Question

I'm making an app which add a theme music to an video.

Some user complains that if their music is in apple lossless format, the video will be too large.

I found that is because the AVMutableComposition I use just put the original music format in to the video I generated.

So is there any way I can lower the bitrate of the music in MPMediaItem, or change the format it is encoded?

This is a code snippet of the code I use to add music to video.

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                    ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                     atTime:kCMTimeZero error:nil];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                               preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                               ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                atTime:kCMTimeZero error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetPassthrough];

NSURL    *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];

if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
    [[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}

_assetExport.outputFileType = @"com.apple.quicktime-movie";

_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}
Was it helpful?

Solution

I finally got it, this is the code I use:

static NSString * const kWriterInputIsReadyForMoreData = @"readyForMoreMediaData";

#import <AVFoundation/AVFoundation.h>
@implementation AudioUtil
{
    AVAssetReader *_assetReader;
    AVAssetWriter *_assetWriter;
    AVAssetWriterInput *_assetWriterInput;
    AVAssetReaderTrackOutput *_readerOutput;
    void (^_callback)(BOOL);
    CMSampleBufferRef _sampleBufferToAppend;
}

-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange  callBack:(void (^)(BOOL))callback
{
    NSError *error = nil;
    _callback = callback;

    [[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];

//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = @(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);

_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], @"reader can't add output");
[_assetReader addOutput:_readerOutput];

//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];

NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = @([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = @([QLVideoFormatProvider audioBitrate]);

_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];

//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(@"can start reading %d",canStartReading);
if (!canStartReading) {
    callback(NO);
    return;
}

[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];

    [self appendBufferToAppend];
}

-(void)appendBufferToAppend
{
    if ([_assetWriterInput isReadyForMoreMediaData]) {
        if (_sampleBufferToAppend) {
            [_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
            CFRelease(_sampleBufferToAppend);
        }
        _sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
        if (_sampleBufferToAppend) {
            [self appendBufferToAppend];
        }
        else {
            [_assetWriter finishWritingWithCompletionHandler:^(){
                if (_callback) {
                    _callback(_assetWriter.status == AVAssetWriterStatusCompleted);
                };
            }];
        }
    }
    else {

    }
}

-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
        if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
            [self appendBufferToAppend];
        }
    }
}
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top