Pregunta

I'm working on sampling the screen using AVCaptureScreenInput and outputting it using a AVCaptureVideoDataOutput, and it's not working. The images it does output are blank, but it appears like I'm doing everything right according to all the documentation I've read.

I've made sure I make the AVCaptureVideoDataOutput to something that could be read by a CGImage (kCVPixelFormatType_32BGRA). When I run this same code and have it output to a AVCaptureMovieFileOutput, the movie renders fine and everything looks good - but what I really want is a series of images.

#import "ScreenRecorder.h"
#import <QuartzCore/QuartzCore.h>

@interface ScreenRecorder() <AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>  {
    BOOL _isRecording;
@private
    AVCaptureSession *_session;
    AVCaptureOutput *_movieFileOutput;
    AVCaptureStillImageOutput *_imageFileOutput;

    NSUInteger _frameIndex;

    NSTimer *_timer;

    NSString *_outputDirectory;
}
@end

@implementation ScreenRecorder

- (BOOL)recordDisplayImages:(CGDirectDisplayID)displayId toURL:(NSURL *)fileURL windowBounds:(CGRect)windowBounds duration:(NSTimeInterval)duration {
    if (_isRecording) {
        return NO;
    }

    _frameIndex = 0;

    // Create a capture session
    _session = [[AVCaptureSession alloc] init];

    // Set the session preset as you wish
    _session.sessionPreset = AVCaptureSessionPresetHigh;

    // Create a ScreenInput with the display and add it to the session
    AVCaptureScreenInput *input = [[[AVCaptureScreenInput alloc] initWithDisplayID:displayId] autorelease];
    if (!input) {
        [_session release];
        _session = nil;
        return NO;
    }
    if ([_session canAddInput:input]) {
        [_session addInput:input];
    }

    input.cropRect = windowBounds;

    // Create a MovieFileOutput and add it to the session
    _movieFileOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    [((AVCaptureVideoDataOutput *)_movieFileOutput) setVideoSettings:[NSDictionary dictionaryWithObjectsAndKeys:@(kCVPixelFormatType_32BGRA),kCVPixelBufferPixelFormatTypeKey, nil]];
//    ((AVCaptureVideoDataOutput *)_movieFileOutput).alwaysDiscardsLateVideoFrames = YES;

    if ([_session canAddOutput:_movieFileOutput])
        [_session addOutput:_movieFileOutput];

    // Start running the session
    [_session startRunning];

    // Delete any existing movie file first
    if ([[NSFileManager defaultManager] fileExistsAtPath:[fileURL path]])
    {
        NSError *err;
        if (![[NSFileManager defaultManager] removeItemAtPath:[fileURL path] error:&err])
        {
            NSLog(@"Error deleting existing movie %@",[err localizedDescription]);
        }
    }

    _outputDirectory = [[fileURL path] retain];
    [[NSFileManager defaultManager] createDirectoryAtPath:_outputDirectory withIntermediateDirectories:YES attributes:nil error:nil];    

    // Set the recording delegate to self
    dispatch_queue_t queue = dispatch_queue_create("com.schaefer.lolz", 0);
    [(AVCaptureVideoDataOutput *)_movieFileOutput setSampleBufferDelegate:self queue:queue];
    //dispatch_release(queue);

    if (0 != duration) {
        _timer = [[NSTimer scheduledTimerWithTimeInterval:duration target:self selector:@selector(finishRecord:) userInfo:nil repeats:NO] retain];
    }
    _isRecording = YES;

    return _isRecording;
}

- (void)dealloc
{
    if (nil != _session) {
        [_session stopRunning];
        [_session release];
    }

    [_outputDirectory release];
    _outputDirectory = nil;

    [super dealloc];
}

- (void)stopRecording {
    if (!_isRecording) {
        return;
    }
    _isRecording = NO;

    // Stop recording to the destination movie file
    if ([_movieFileOutput isKindOfClass:[AVCaptureFileOutput class]]) {
        [_movieFileOutput performSelector:@selector(stopRecording)];
    }
    [_session stopRunning];

    [_session release];
    _session = nil;

    [_timer release];
    _timer = nil;

}

-(void)finishRecord:(NSTimer *)timer
{
    [self stopRecording];
}

//AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);        // Lock the image buffer

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);   // Get information of the image
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef image = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);
    _frameIndex++;    
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    dispatch_async(dispatch_get_main_queue(), ^{
        NSURL *URL = [NSURL fileURLWithPath:[_outputDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.jpg", (int)_frameIndex]]];

        CGImageDestinationRef destination = CGImageDestinationCreateWithURL((CFURLRef)URL, kUTTypeJPEG, 1, NULL);
        CGImageDestinationAddImage(destination, image, nil);

        if (!CGImageDestinationFinalize(destination)) {
            NSLog(@"Failed to write image to %@", URL);
        }

        CFRelease(destination);
        CFRelease(image);
    });
}
@end
¿Fue útil?

Solución

Your data isn't planar, so there is no base address for plane 0--there's no plane 0. (To be sure, you can check with CVPixelBufferIsPlanar.) You'll need CVPixelBufferGetBaseAddress to get a pointer to the first pixel. All the data will be interleaved.

Licenciado bajo: CC-BY-SA con atribución
No afiliado a StackOverflow
scroll top