¿Cómo puedo comprimir un video en ios usando Bit Rate?

¿Cómo puedo comprimir un video usando la velocidad de bits?

Probé abajo el código para comprimir un video, pero no funciona porque me da un error como

****** Aplicación de finalización debido a una exception no detectada 'NSInvalidArgumentException', motivo: '* – [AVAssetReader startReading] no puede volver a llamarse después de que la lectura ya haya comenzado' ****

- (void) imagePickerController: (UIImagePickerController *) picker didFinishPickingMediaWithInfo: (NSDictionary *) info { // Handle movie capture NSURL *movieURL = [info objectForKey: UIImagePickerControllerMediaURL]; NSData *data = [NSData dataWithContentsOfURL:movieURL]; NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *tempPath = [documentsDirectory stringByAppendingFormat:@"/vid1.mp4"]; BOOL success = [data writeToFile:tempPath atomically:NO]; if (success) { NSLog(@"VIdeo Successfully written"); } else { NSLog(@"VIdeo Wrting failed"); } NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:@"1234"] stringByAppendingString:@".mp4"]]; // Compress movie first [self convertVideoToLowQuailtyWithInputURL:movieURL outputURL:uploadURL]; } - (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL { //setup video writer AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize videoSize = videoTrack.naturalSize; NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoWriterSettings]; videoWriterInput.expectsMediaDataInRealTime = YES; videoWriterInput.transform = videoTrack.prefernetworkingTransform; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; [videoWriter addInput:videoWriterInput]; //setup video reader NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; [videoReader addOutput:videoReaderOutput]; //setup audio writer AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil]; audioWriterInput.expectsMediaDataInRealTime = NO; [videoWriter addInput:audioWriterInput]; //setup audio reader AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; [audioReader addOutput:audioReaderOutput]; [videoWriter startWriting]; //start writing from video reader [videoReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: ^{ while ([videoWriterInput isReadyForMoreMediaData]) { CMSampleBufferRef sampleBuffer; if ([videoReader status] == AVAssetReaderStatusReading && (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) { [videoWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [videoWriterInput markAsFinished]; if ([videoReader status] == AVAssetReaderStatusCompleted) { [audioReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ while (audioWriterInput.readyForMoreMediaData) { CMSampleBufferRef sampleBuffer; if ([audioReader status] == AVAssetReaderStatusReading && (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { [audioWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [audioWriterInput markAsFinished]; if ([audioReader status] == AVAssetReaderStatusCompleted) { [videoWriter finishWritingWithCompletionHandler:^() { NSLog(@"Output URl : %@",outputURL); }]; } } } } ]; } } } } ]; } 

Puede usar los parameters a continuación para comprimir el video por cualidades.

  • AVAssetExportPresetLowQuality
  • AVAssetExportPresetMediumQuality
  • AVAssetExportPresetHighestQuality

Código:

 - (void)CompressVideo { if(firstAsset !=nil) { //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; // http://stackoverflow.com/questions/22715881/merge-video-files-with-their-original-audio-in-ios //VIDEO TRACK AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; //For Audio Track inclusion //============================================================================================ NSArray *arr = [firstAsset tracksWithMediaType:AVMediaTypeAudio]; AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[arr lastObject] atTime:kCMTimeZero error:nil]; //=============================================================================================== AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); //FIXING ORIENTATION// AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; BOOL isFirstAssetPortrait_ = NO; CGAffineTransform firstTransform = FirstAssetTrack.prefernetworkingTransform; if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown; } CGFloat FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.width; if(isFirstAssetPortrait_) { FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.height; CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.prefernetworkingTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; } else { CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.prefernetworkingTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; } [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil]; AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; MainCompositionInst.frameDuration = CMTimeMake(1, 30); // MainCompositionInst.renderSize = CGSizeMake(VideoWidth, 900); MainCompositionInst.renderSize = CGSizeMake(VideoWidth, [UIScreen mainScreen].bounds.size.height); NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"CompressedVideo.mov"]; NSLog(@"myPath Docs : %@",myPathDocs); NSURL *url = [NSURL fileURLWithPath:myPathDocs]; if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) { NSError *error; [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:&error]; } //Movie Quality //================================================== AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; //================================================== exporter.outputURL=url; //Movie Type //================================================== exporter.outputFileType = AVFileTypeQuickTimeMovie; //================================================== exporter.videoComposition = MainCompositionInst; exporter.shouldOptimizeForNetworkUse = YES; [exporter exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(), ^ { videoUrToUload = url; [self exportDidFinish:exporter]; }); }]; } } - (void)exportDidFinish:(AVAssetExportSession *)session { if(session.status == AVAssetExportSessionStatusCompleted) { //Store URL Somewhere using session.url } } 

Obtuve el mismo problema de locking, pero después de hacer algunos cambios, este método funcionó para mí … Simplemente reemplaza este método por el método anterior …

 - (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL { //setup video writer AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize videoSize = videoTrack.naturalSize; NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoWriterSettings]; videoWriterInput.expectsMediaDataInRealTime = YES; videoWriterInput.transform = videoTrack.prefernetworkingTransform; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; [videoWriter addInput:videoWriterInput]; //setup video reader NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; [videoReader addOutput:videoReaderOutput]; //setup audio writer AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil]; audioWriterInput.expectsMediaDataInRealTime = NO; [videoWriter addInput:audioWriterInput]; //setup audio reader AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; [audioReader addOutput:audioReaderOutput]; [videoWriter startWriting]; //start writing from video reader [videoReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: ^{ while ([videoWriterInput isReadyForMoreMediaData]) { CMSampleBufferRef sampleBuffer; if ([videoReader status] == AVAssetReaderStatusReading && (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) { [videoWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [videoWriterInput markAsFinished]; if ([videoReader status] == AVAssetReaderStatusCompleted) { if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) { } else{ //start writing from audio reader [audioReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ while (audioWriterInput.readyForMoreMediaData) { CMSampleBufferRef sampleBuffer; if ([audioReader status] == AVAssetReaderStatusReading && (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { [audioWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [audioWriterInput markAsFinished]; if ([audioReader status] == AVAssetReaderStatusCompleted) { [videoWriter finishWritingWithCompletionHandler:^(){ // [self sendMovieFileAtURL:outputURL]; NSString *moviePath = [outputURL path]; if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(moviePath)) { UISaveVideoAtPathToSavedPhotosAlbum(moviePath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil); } }]; break; } } } } ]; } } } } } ]; }