Frage

Ich habe erfolgreich erstellt Video von Bildern des folgenden Code

mit
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    //Write samples:
    for (int i = 0;i<[array count]; i++)
    {
        if([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

            CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above

            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

        }
        else
        {
            NSLog(@"error");
            i--;
        }
    }
    NSLog(@"outside for loop");

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWriting];
}

Hier habe ich verwendet CVPixelBufferRef. Statt dessen möchte ich die CVPixelBufferPoolRef in Verbindung mit AVAssetWriterInputPixelBufferAdaptor verwenden.

Kann jemand ein Beispiel geben, die ich debuggen und verwenden kann?

War es hilfreich?

Lösung

Sie passieren Null ‚sourcePixelBufferAttributes‘, wegen der der Pixelpufferpool wird nicht erstellt bekommen:

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

Stattdessen einige Attribute passieren, zum Beispiel:

NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

Dann können Sie den Pool der Pixelpuffer zu schaffen, wie:

CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);

Andere Tipps

@Atulkumar V. Jain: great! Glück ^^ @ Brian: Sie rechts Dank sind, korrigiere ich es, und ich bin immer daran arbeiten jetzt hier ist der Arbeitscode (wenn jemand anderes Bedürfnis es :-))

CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);

[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];

[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
    while (1){
        if (i == frameNumber) {
            break;
        }
        if ([writerInput isReadyForMoreMediaData]) {

            CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

           CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

           CMTime presentTime=CMTimeAdd(lastTime, frameTime);       

        if (sampleBuffer) {
                [adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
                i++;
                CFRelease(sampleBuffer);
            } else {
                break;
            }
        }
    }
    NSLog (@"done");
    [writerInput markAsFinished];
    [videoWriter finishWriting];     

    CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
    [videoWriter release];
    [writerInput release];      
    [imagesArray removeAllObjects];


}];

Anstelle von "für" diesen Code verwenden:

dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);

    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
    int i = 1;
     while (writerInput.readyForMoreMediaData) {
         NSLog(@"inside for loop %d",i);
         CMTime frameTime = CMTimeMake(1, 20);

         CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

         CMTime presentTime=CMTimeAdd(lastTime, frameTime);

         if (i >= [array count]) {
             buffer = NULL;
         }else {
              buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
         }          
         //CVBufferRetain(buffer);

         if (buffer) {
             // append buffer
             [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
             i++;
         } else {
             // done!

             //Finish the session:
             [writerInput markAsFinished];
             [videoWriter finishWriting];                

             CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
             [videoWriter release];
             [writerInput release];
             NSLog (@"Done");
             [imageArray removeAllObjects];              
             break;
         }
     }
}];

Ich habe sie alle Arbeiten!

Hier ist der Beispielcode Link: git@github.com: RudyAramayo / AVAssetWriterInputPixelBufferAdaptorSample.git

Hier ist der Code, den Sie brauchen:

- (void) testCompressionSession
{
    CGSize size = CGSizeMake(480, 320);

    NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];

    NSError *error = nil;

    unlink([betaCompressionDirectory UTF8String]);

    //----initialize compression engine
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);
    if(error)
        NSLog(@"error = %@", [error localizedDescription]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                               [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                         sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);

    if ([videoWriter canAddInput:writerInput])
        NSLog(@"I can add this input");
    else
        NSLog(@"i can't add this input");

    [videoWriter addInput:writerInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    //---
    // insert demo debugging code to write the same image repeated as a movie

    CGImageRef theImage = [[UIImage imageNamed:@"Lotus.png"] CGImage];

    dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
    int __block frame = 0;

    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while ([writerInput isReadyForMoreMediaData])
        {
            if(++frame >= 120)
            {
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                [videoWriter release];
                break;
            }

            CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
            if (buffer)
            {
                if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
                    NSLog(@"FAIL");
                else
                    NSLog(@"Success:%d", frame);
                CFRelease(buffer);
            }
        }
    }];

    NSLog(@"outside for loop");

}

- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
Lizenziert unter: CC-BY-SA mit Zuschreibung
Nicht verbunden mit StackOverflow
scroll top