Question

I would be so exactly how I can. I would record a sound with my audioqueue. I have a project and added all frameworks, that I needed for my project (so that is not the problem). I have change only 2 files:

  • ViewController.h
  • ViewController.m

Before I start: I am not sure about file types (void) or (IBAction)(I can't tested it).

Here is the source code from my ViewController.h

#import <UIKit/UIKit.h>
//#import <AudioToolbox/AudioQueue.h>  //(don't know to use that) 
//#import <AudioToolbox/AudioFile.h>   //(don't know to use that)
#import <AudioUnit/AudioUnit.h>
#import <AudioToolbox/AudioToolbox.h>

#define NUM_BUFFERS 3
#define SECONDS_TO_RECORD 10

typedef struct
{
    AudioStreamBasicDescription  dataFormat;
    AudioQueueRef                queue;
    AudioQueueBufferRef          buffers[NUM_BUFFERS];
    AudioFileID                  audioFile;
    SInt64                       currentPacket;
    bool                         recording;
} RecordState;

typedef struct
{
    AudioStreamBasicDescription  dataFormat;
    AudioQueueRef                queue;
    AudioQueueBufferRef          buffers[NUM_BUFFERS];
    AudioFileID                  audioFile;
    SInt64                       currentPacket;
    bool                         playing;
} PlayState;



@interface ViewController : UIViewController{
    IBOutlet UILabel* labelStatus;
    IBOutlet UIButton* buttonRecord;
    IBOutlet UIButton* buttonPlay;
    RecordState recordState;
    PlayState playState;
    CFURLRef fileURL;
}

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength;
- (void)setupAudioFormat:(AudioStreamBasicDescription*)format;
- (void)recordPressed:(id)sender;
- (void)playPressed:(id)sender;
- (IBAction)startRecording;
- (IBAction)stopRecording;
- (IBAction)startPlayback;
- (IBAction)stopPlayback;

Here is the source code from my ViewController.m

(I get an error in the funktion startPlayback comments are there)

#import "ViewController.h"
@interface ViewController ()
@end

@implementation ViewController

void AudioInputCallback(
                    void *inUserData,
                    AudioQueueRef inAQ,
                    AudioQueueBufferRef inBuffer,
                    const AudioTimeStamp *inStartTime,
                    UInt32 inNumberPacketDescriptions,
                    const AudioStreamPacketDescription *inPacketDescs)
{

    RecordState* recordState = (RecordState*)inUserData;

    if(!recordState->recording)
    {
        printf("Not recording, returning\n");
    }

    //if(inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
    //{
    //    inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
    //}

    printf("Writing buffer %lld\n", recordState->currentPacket);
    OSStatus status = AudioFileWritePackets(recordState->audioFile,
                                        false,
                                        inBuffer->mAudioDataByteSize,
                                        inPacketDescs,
                                        recordState->currentPacket,
                                        &inNumberPacketDescriptions,
                                        inBuffer->mAudioData);
    if(status == 0)
    {
        recordState->currentPacket += inNumberPacketDescriptions;
    }

    AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}

void AudioOutputCallback(
                     void* inUserData,
                     AudioQueueRef outAQ,
                     AudioQueueBufferRef outBuffer)
{
    PlayState* playState = (PlayState*)inUserData;
    if(!playState->playing)
    {
        printf("Not playing, returning\n");
        return;
    }

    printf("Queuing buffer %lld for playback\n", playState->currentPacket);

    AudioStreamPacketDescription* packetDescs = NULL;

    UInt32 bytesRead;
    UInt32 numPackets = 8000;
    OSStatus status;
    status = AudioFileReadPackets(
                              playState->audioFile,
                              false,
                              &bytesRead,
                              packetDescs,
                              playState->currentPacket,
                              &numPackets,
                              outBuffer->mAudioData);

    if(numPackets)
    {
        outBuffer->mAudioDataByteSize = bytesRead;
        status = AudioQueueEnqueueBuffer(
                                     playState->queue,
                                     outBuffer,
                                     0,
                                     packetDescs);

        playState->currentPacket += numPackets;
    }
    else
    {
        if(playState->playing)
        {
            AudioQueueStop(playState->queue, false);
            AudioFileClose(playState->audioFile);
            playState->playing = false;
        }

        AudioQueueFreeBuffer(playState->queue, outBuffer);
    }    
}

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format
{
    format->mSampleRate = 8000.0;
    format->mFormatID = kAudioFormatLinearPCM;
    format->mFramesPerPacket = 1;
    format->mChannelsPerFrame = 1;
    format->mBytesPerFrame = 2;
    format->mBytesPerPacket = 2;
    format->mBitsPerChannel = 16;
    format->mReserved = 0;
    format->mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
    kLinearPCMFormatFlagIsSignedInteger |
    kLinearPCMFormatFlagIsPacked;
}

- (void)recordPressed:(id)sender
{
    if(!playState.playing)
    {
        if(!recordState.recording)
        {
            printf("Starting recording\n");
            [self startRecording];
        }
        else
        {
            printf("Stopping recording\n");
            [self stopRecording];
        }
    }
    else
    {
        printf("Can't start recording, currently playing\n");
    }
}

- (void)playPressed:(id)sender
{
    if(!recordState.recording)
    {
        if(!playState.playing)
        {
            printf("Starting playback\n");
            [self startPlayback];
        }
        else
        {
            printf("Stopping playback\n");
            [self stopPlayback];
        }
    }
}

- (IBAction)startRecording
{
    [self setupAudioFormat:&recordState.dataFormat];

    recordState.currentPacket = 0;

    OSStatus status;
    status = AudioQueueNewInput(&recordState.dataFormat,
                                AudioInputCallback,
                                &recordState,
                                CFRunLoopGetCurrent(),
                                kCFRunLoopCommonModes,
                                0,
                                &recordState.queue);

    if(status == 0)
    {
        for(int i = 0; i < NUM_BUFFERS; i++)
        {
            AudioQueueAllocateBuffer(recordState.queue,
                                     16000, &recordState.buffers[i]);
            AudioQueueEnqueueBuffer(recordState.queue,
                                    recordState.buffers[i], 0, NULL);
        }

        status = AudioFileCreateWithURL(fileURL,
                                        kAudioFileAIFFType,
                                        &recordState.dataFormat,
                                        kAudioFileFlags_EraseFile,
                                        &recordState.audioFile);
        if(status == 0)
        {
            recordState.recording = true;
            status = AudioQueueStart(recordState.queue, NULL);
            if(status == 0)
            {
                labelStatus.text = @"Recording";
            }
        }
    }

    if(status != 0)
    {
        [self stopRecording];
        labelStatus.text = @"Record Failed";
    }
}

- (IBAction)stopRecording
{
    recordState.recording = false;

    AudioQueueStop(recordState.queue, true);
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(recordState.queue,
                             recordState.buffers[i]);
    }

    AudioQueueDispose(recordState.queue, true);
    AudioFileClose(recordState.audioFile);
    labelStatus.text = @"Idle";
}

- (IBAction)startPlayback
{
    playState.currentPacket = 0;

    [self setupAudioFormat:&playState.dataFormat];

    OSStatus status;


    // I get here an error
    // Use of undeclared identifier 'fsRdPerm'
    // How to fix that?
    status = AudioFileOpenURL(fileURL, fsRdPerm, kAudioFileAIFFType, &playState.audioFile);


    if(status == 0)
    {
        status = AudioQueueNewOutput(
                                     &playState.dataFormat,
                                     AudioOutputCallback,
                                     &playState,
                                     CFRunLoopGetCurrent(),
                                     kCFRunLoopCommonModes,
                                     0,
                                     &playState.queue);

        if(status == 0)
        {
            playState.playing = true;
            for(int i = 0; i < NUM_BUFFERS && playState.playing; i++)
            {
                if(playState.playing)
                {
                    AudioQueueAllocateBuffer(playState.queue, 16000, &playState.buffers[i]);
                    AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
                }
            }

            if(playState.playing)
            {
                status = AudioQueueStart(playState.queue, NULL);
                if(status == 0)
                {
                    labelStatus.text = @"Playing";
                }
            }
        }
    }

    if(status != 0)
    {
        [self stopPlayback];
        labelStatus.text = @"Play failed";
    }
}

- (void)stopPlayback
{
    playState.playing = false;

    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
    }

    AudioQueueDispose(playState.queue, true);
    AudioFileClose(playState.audioFile);
}

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
                                                         NSUserDomainMask, YES);
    NSString* docDir = [paths objectAtIndex:0];
    NSString* file = [docDir stringByAppendingString:@"/recording.aif"];
    return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}

- (void)viewDidLoad
{
    [super viewDidLoad];
        // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
        // Dispose of any resources that can be recreated.
}

@end

I don't know how to fix this, that I can use my project. If I comment the funktion startPlayback out, I get this error:

Ld /Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Products/Debug-iphonesimulator/recorder_test2.app/recorder_test2 normal i386 cd /Users/NAME/Desktop/recorder_test2 setenv IPHONEOS_DEPLOYMENT_TARGET 6.0 setenv PATH "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/usr/bin:/Applications/Xcode.app/Contents/Developer/usr/bin:/usr/bin:/bin:/usr/sbin:/sbin" /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/clang -arch i386 -isysroot /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator6.0.sdk -L/Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Products/Debug-iphonesimulator -F/Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Products/Debug-iphonesimulator -filelist /Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Intermediates/recorder_test2.build/Debug-iphonesimulator/recorder_test2.build/Objects-normal/i386/recorder_test2.LinkFileList -Xlinker -objc_abi_version -Xlinker 2 -fobjc-arc -fobjc-link-runtime -Xlinker -no_implicit_dylibs -mios-simulator-version-min=6.0 -framework AudioToolbox -framework AudioUnit -framework CoreAudio -framework UIKit -framework Foundation -framework CoreGraphics -o /Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Products/Debug-iphonesimulator/recorder_test2.app/recorder_test2

ld: framework not found AudioUnit clang: error: linker command failed with exit code 1 (use -v to see invocation)

Pleas use the 2 source files and test it by yourself and help me.

Was it helpful?

Solution

Just Add AudioUnit in your project settings and make sure you have correct path to it.

enter image description here

OTHER TIPS

Remove AudioUnit.framework from your project and replace fsRdPerm by kAudioFileReadPermission.

Long story:

Although I couldn't find any evidence after a long trip through google, I'm almost sure that fsRdPerm is not present anymore in any of the the audio frameworks of iOS 6. I've searched for it in the iOS6 Simulator and it appears only in CarbonCore.framework, which is a legacy framework, thus old:

pwds2622:Frameworks mac$ pwd
/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator6.0.sdk/System/Library/Frameworks
s2622:Frameworks mac$ grep -sr fsRdPerm .
./CoreServices.framework/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,

I found forum posts which suggest to use kAudioFileReadPermission instead of fsRdPerm. This works and indeed, the documentation for kAudioFileReadPermission says that this is one of the "Flags for use with the AudioFileOpenURL and AudioFileOpen functions.". Read more at Audio File Permission Flags.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top