Domanda

Sto cercando di creare un filmato utilizzando un array di immagini e un file audio.Per rendere il film con un array di immagini ho usato il grande post di Zoul qui .Tutto è perfetto, ho il mio film con la mia foto.Tuttavia, quando provo ad aggiungere alcune tracce audio, ho molti problemi.Per capire ho messo il mio codice:

Quando chiamo questo metodo, il file immagine e il file song sono pronti:

-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
    NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:documentsDirectoryPath];
    for (NSString *tString in dirContents) {
        if ([tString isEqualToString:@"essai.mp4"]) 
        {
            [[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil];

        }
    }

    NSLog(@"Write Started");

    NSError *error = nil;

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
                                                              error:&error];    
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];

    AudioChannelLayout channelLayout;
    memset(&channelLayout, 0, sizeof(AudioChannelLayout));
    channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

    NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
                                   [NSNumber numberWithFloat:44100.0] ,AVSampleRateKey, 
                                   [NSNumber numberWithInt: 1] ,AVNumberOfChannelsKey,
                                   [NSNumber numberWithInt:192000],AVEncoderBitRateKey,
                                   [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
                                   nil];

    AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
                                             assetWriterInputWithMediaType:AVMediaTypeVideo
                                             outputSettings:videoSettings] retain];

    AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
                                             assetWriterInputWithMediaType:AVMediaTypeAudio
                                             outputSettings:audioSettings] retain];

    NSURL* fileURL = [[NSBundle mainBundle] URLForResource:@"Big_Voice_1" withExtension:@"caf"];

    NSLog(@"%@",fileURL);
    AVAsset *asset = [[AVURLAsset URLAssetWithURL:fileURL 
                                            options:nil] retain];


    AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];


    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                     sourcePixelBufferAttributes:nil];


    AVAssetTrack* audioTrack = [asset.tracks objectAtIndex:0]; 

    AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

    [audioReader addOutput:readerOutput];                                             


    NSParameterAssert(videoWriterInput);
    NSParameterAssert(audioWriterInput);
    NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
    audioWriterInput.expectsMediaDataInRealTime = NO;
    videoWriterInput.expectsMediaDataInRealTime = YES;
    [videoWriter addInput:audioWriterInput];
    [videoWriter addInput:videoWriterInput];
    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];


    //Video encoding

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;

    for(int i = 0; i<20; i++)
    {
        buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];


        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 30) 
        {
            if (adaptor.assetWriterInput.readyForMoreMediaData) 
            {
                printf("appending %d attemp %d\n", frameCount, j);

                CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);

                //CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, adaptor.pixelBufferPool, &buffer);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
                NSParameterAssert(bufferPool != NULL);

                [NSThread sleepForTimeInterval:0.05];
            } 
            else 
            {
                printf("adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1];
            }
            j++;
        }
        if (!append_ok) {
            printf("error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;
    }

    //Finish the session:
    [videoWriterInput markAsFinished];





//Start a session:
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];

        CVPixelBufferRef buffer = NULL;

        //Write all picture array in movie file.

        int frameCount = 0;

        for(int i = 0; i<[m_PictArray count]; i++)
        {
            buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];


            BOOL append_ok = NO;
            int j = 0;
            while (!append_ok && j < 30) 
            {
                if (adaptor.assetWriterInput.readyForMoreMediaData) 
                {
                    printf("appending %d attemp %d\n", frameCount, j);

                    CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);


                    append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                    CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
                    NSParameterAssert(bufferPool != NULL);

                    [NSThread sleepForTimeInterval:0.05];
                } 
                else 
                {
                    printf("adaptor not ready %d, %d\n", frameCount, j);
                    [NSThread sleepForTimeInterval:0.1];
                }
                j++;
            }
            if (!append_ok) {
                printf("error appending image %d times %d\n", frameCount, j);
            }
            frameCount++;
        }

        //Finish writing picture:
        [videoWriterInput markAsFinished];
.

I finisco di scrivere l'immagine nel file del film e voglio copiare l'audio nel file e lo faccio:

[audioReader startReading];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
    [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
     {
         NSLog(@"Request");
         NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
         while (audioWriterInput.readyForMoreMediaData) {
             NSLog(@"Ready");
             CMSampleBufferRef nextBuffer = [readerOutput copyNextSampleBuffer];
             if (nextBuffer) {
                 NSLog(@"NextBuffer");
                 [audioWriterInput appendSampleBuffer:nextBuffer];
             }
         }
     }
     ];

    [audioWriterInput markAsFinished];
    [videoWriter finishWriting];
.

Tuttavia lo stato di assetwriterInput del file audio è sempre "no".

La mia domanda: come aggiungere audio a un file video usando il devfound?

Quindi, per favore qualcuno può aiutarmi dicendomi se dimentico qualcosa o se qualcosa non va.

Grazie mille

È stato utile?

Soluzione

I finally found how make movie with a picture array and an audio file. So if you want do the same thing I put my code here (be careful to memory):

  • First make a movie file with your picture array use zoul's post here:

    -(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
    {
      NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
      NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
      for (NSString *tString in dirContents) 
      {
        if ([tString isEqualToString:@"essai.mp4"]) 
        {
            [[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil];
    
        }
      }
    
      NSLog(@"Write Started");
    
      NSError *error = nil;
    
      AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
                                                              error:&error];    
      NSParameterAssert(videoWriter);
    
      NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    
    
      AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
                                             assetWriterInputWithMediaType:AVMediaTypeVideo
                                             outputSettings:videoSettings] retain];
    
    
    
    
      AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                     sourcePixelBufferAttributes:nil];
    
      NSParameterAssert(videoWriterInput);
    
      NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
      videoWriterInput.expectsMediaDataInRealTime = YES;
      [videoWriter addInput:videoWriterInput];
      //Start a session:
      [videoWriter startWriting];
      [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    
      //Video encoding
    
      CVPixelBufferRef buffer = NULL;
    
      //convert uiimage to CGImage.
    
      int frameCount = 0;
    
      for(int i = 0; i<[m_PictArray count]; i++)
      {
        buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
    
    
        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 30) 
        {
            if (adaptor.assetWriterInput.readyForMoreMediaData) 
            {
                printf("appending %d attemp %d\n", frameCount, j);
    
                CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
    
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
                NSParameterAssert(bufferPool != NULL);
    
                [NSThread sleepForTimeInterval:0.05];
            } 
            else 
            {
                printf("adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1];
            }
            j++;
        }
        if (!append_ok) 
        {
            printf("error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;
        CVBufferRelease(buffer);
      }
    
      [videoWriterInput markAsFinished];
      [videoWriter finishWriting];
    
      [videoWriterInput release];
      [videoWriter release];
    
      [m_PictArray removeAllObjects];
    
      NSLog(@"Write Ended"); 
    }
    
  • After that you must put together movie file and audio file. To do this follow my code:

    -(void)CompileFilesToMakeMovie
    {
      AVMutableComposition* mixComposition = [AVMutableComposition composition];
    
      NSString* audio_inputFileName = @"deformed.caf";
      NSString* audio_inputFilePath = [Utilities documentsPath:audio_inputFileName];
      NSURL*    audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
    
      NSString* video_inputFileName = @"essai.mp4";
      NSString* video_inputFilePath = [Utilities documentsPath:video_inputFileName];
      NSURL*    video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
    
      NSString* outputFileName = @"outputFile.mov";
      NSString* outputFilePath = [Utilities documentsPath:outputFileName];
      NSURL*    outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
    
      if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
    
    
    
      CMTime nextClipStartTime = kCMTimeZero;
    
      AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
      CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
      AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
      [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
    
      //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
    
      AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
      CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
      AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
      [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
    
    
    
      AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];   
      _assetExport.outputFileType = @"com.apple.quicktime-movie";
      _assetExport.outputURL = outputFileUrl;
    
      [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         [self saveVideoToAlbum:outputFilePath]; 
     }       
     ];  
    }
    

Sorry if there are some leak, I'm doing the optimization of memory.

Altri suggerimenti

The AVChannelLayoutKey should point to an NSData instance containing an AudioChannelLayout.

Yours points to an NSNumber.

Autorizzato sotto: CC-BY-SA insieme a attribuzione
Non affiliato a StackOverflow
scroll top