どのようにiPhoneにAVAssetWriterInputPixelBufferAdaptorと一緒にCVPixelBufferPoolを使用するには?

StackOverflow https://stackoverflow.com/questions/4023842

質問

私は成功し、次のコードを使用して画像からビデオを作成しました。

-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    //Write samples:
    for (int i = 0;i<[array count]; i++)
    {
        if([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

            CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above

            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

        }
        else
        {
            NSLog(@"error");
            i--;
        }
    }
    NSLog(@"outside for loop");

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWriting];
}

ここで私が使用しているCVPixelBufferRef。その代わりに、私はCVPixelBufferPoolRefと一緒にAVAssetWriterInputPixelBufferAdaptorを使用したい。

缶誰が?私はデバッグすることができます例および用途を提供する。

役に立ちましたか?

解決

あなたはピクセルバッファプールが作成され得ることはありませんので、そのうちのnil「でsourcePixelBufferAttributes」を渡しています

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

その代わり、たとえば、いくつかの属性を渡します:

NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

次に、あなたのように、ピクセルバッファを作成するためにプールを使用することができます

CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);

他のヒント

@Atulkumar V.ジャイナ:素晴らしいです!幸運^^ @Brian:あなたが、私はそれを修正し、私はそれが今ここに働く取得しています、作業コード右のおかげでされている(もし他の誰かが必要それを:-))

CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);

[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];

[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
    while (1){
        if (i == frameNumber) {
            break;
        }
        if ([writerInput isReadyForMoreMediaData]) {

            CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

           CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

           CMTime presentTime=CMTimeAdd(lastTime, frameTime);       

        if (sampleBuffer) {
                [adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
                i++;
                CFRelease(sampleBuffer);
            } else {
                break;
            }
        }
    }
    NSLog (@"done");
    [writerInput markAsFinished];
    [videoWriter finishWriting];     

    CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
    [videoWriter release];
    [writerInput release];      
    [imagesArray removeAllObjects];


}];
代わりにこのコードを使う「を」使用しての

dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);

    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
    int i = 1;
     while (writerInput.readyForMoreMediaData) {
         NSLog(@"inside for loop %d",i);
         CMTime frameTime = CMTimeMake(1, 20);

         CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

         CMTime presentTime=CMTimeAdd(lastTime, frameTime);

         if (i >= [array count]) {
             buffer = NULL;
         }else {
              buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
         }          
         //CVBufferRetain(buffer);

         if (buffer) {
             // append buffer
             [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
             i++;
         } else {
             // done!

             //Finish the session:
             [writerInput markAsFinished];
             [videoWriter finishWriting];                

             CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
             [videoWriter release];
             [writerInput release];
             NSLog (@"Done");
             [imageArray removeAllObjects];              
             break;
         }
     }
}];

私はすべての作業をそれを得た!

ここではサンプルコードのリンクは次のとおりです。git@github.com:RudyAramayo / AVAssetWriterInputPixelBufferAdaptorSample.git

ここでは、必要なコードがあります:

- (void) testCompressionSession
{
    CGSize size = CGSizeMake(480, 320);

    NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];

    NSError *error = nil;

    unlink([betaCompressionDirectory UTF8String]);

    //----initialize compression engine
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);
    if(error)
        NSLog(@"error = %@", [error localizedDescription]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                               [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                         sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);

    if ([videoWriter canAddInput:writerInput])
        NSLog(@"I can add this input");
    else
        NSLog(@"i can't add this input");

    [videoWriter addInput:writerInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    //---
    // insert demo debugging code to write the same image repeated as a movie

    CGImageRef theImage = [[UIImage imageNamed:@"Lotus.png"] CGImage];

    dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
    int __block frame = 0;

    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while ([writerInput isReadyForMoreMediaData])
        {
            if(++frame >= 120)
            {
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                [videoWriter release];
                break;
            }

            CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
            if (buffer)
            {
                if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
                    NSLog(@"FAIL");
                else
                    NSLog(@"Success:%d", frame);
                CFRelease(buffer);
            }
        }
    }];

    NSLog(@"outside for loop");

}

- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
ライセンス: CC-BY-SA帰属
所属していません StackOverflow
scroll top