AVMutableVideoComposition video girado capturado en modo vertical

He utilizado el código siguiente para agregar la superposition de imágenes a través del video y luego exportar el nuevo video generado al directory de documentos. Pero, extrañamente, el video gira 90 grados.

- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition { CMTime nextClipStartTime = kCMTimeZero; NSInteger i; // Make transitionDuration no greater than half the shortest clip duration. CMTime transitionDuration = self.transitionDuration; for (i = 0; i < [_clips count]; i++ ) { NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; if (clipTimeRange) { CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration; halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator. transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration); } } // Add two video tracks and two audio tracks. AVMutableCompositionTrack *compositionVideoTracks[2]; AVMutableCompositionTrack *compositionAudioTracks[2]; compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration. for (i = 0; i < [_clips count]; i++ ) { NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ... AVURLAsset *asset = [_clips objectAtIndex:i]; NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; CMTimeRange timeRangeInAsset; if (clipTimeRange) timeRangeInAsset = [clipTimeRange CMTimeRangeValue]; else timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]); AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; /* CGAffineTransform t = clipVideoTrack.prefernetworkingTransform; NSLog(@"Transform1 : %@",t); */ AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil]; // Remember the time range in which this clip should pass through. // Every clip after the first begins with a transition. // Every clip before the last ends with a transition. // Exclude those transitions from the pass through time ranges. passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration); if (i > 0) { passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration); passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); } if (i+1 < [_clips count]) { passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); } // The end of this clip will overlap the start of the next by transitionDuration. // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.) nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration); // Remember the time range for the transition to the next item. transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration); } // Set up the video composition if we are to perform crossfade or push transitions between clips. NSMutableArray *instructions = [NSMutableArray array]; // Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A". for (i = 0; i < [_clips count]; i++ ) { NSInteger alternatingIndex = i % 2; // alternating targets // Pass through clip i. AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; passThroughInstruction.timeRange = passThroughTimeRanges[i]; AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; /* CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2); CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0); [passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero]; */ passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer]; [instructions addObject:passThroughInstruction]; if (i+1 < [_clips count]) { // Add transition from clip i to clip i+1. AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; transitionInstruction.timeRange = transitionTimeRanges[i]; AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]]; if (self.transitionType == SimpleEditorTransitionTypeCrossFade) { // Fade out the fromLayer by setting a ramp from 1.0 to 0.0. [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]]; } else if (self.transitionType == SimpleEditorTransitionTypePush) { // Set a transform ramp on fromLayer from identity to all the way left of the screen. [fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]]; // Set a transform ramp on toLayer from all the way right of the screen to identity. [toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]]; } transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil]; [instructions addObject:transitionInstruction]; } } videoComposition.instructions = instructions; } 

Por favor, ayuda, ya que no soy capaz de exportar videos retratos en modo adecuado. Agradeció cualquier ayuda. Gracias.

De manera pnetworkingeterminada, cuando exporta video usando AVAssetExportSession , el video se girará desde su orientación original. Tienes que aplicar su transformación para establecer su orientación exacta. Por favor testing el código debajo para hacer lo mismo.

 - (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset forTrack:(AVMutableCompositionTrack *)inTrack atTime:(CMTime)inTime { //FIXING ORIENTATION// AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack]; AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; BOOL isVideoAssetPortrait_ = NO; CGAffineTransform videoTransform = videoAssetTrack.prefernetworkingTransform; if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;} if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;} if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;} if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;} CGFloat FirstAssetScaleToFitRatio = 320.0 / videoAssetTrack.naturalSize.width; if(isVideoAssetPortrait_) { FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height; CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.prefernetworkingTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; }else{ CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.prefernetworkingTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; } [videolayerInstruction setOpacity:0.0 atTime:inTime]; return videolayerInstruction; } 

Espero que esto ayude.

 AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack]; AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration]; 

Arriba está el código para llamar al método mencionado donde inAsset es su activo de video y videoTotalDuration es la duración total de su video en CMTime.mergeComposition es object de la class AVMutableComposition .

Espero que esto te ayude.

EDIT: Este no es un método o evento de callback, debe llamarlo de forma esperada con los parameters requeridos, como se mencionó anteriormente.

Aquí hay una forma un poco más sencilla si simplemente desea mantener la rotation original.

 // Grab the source track from AVURLAsset for example. AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject; // Grab the composition video track from AVMutableComposition you already made. AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject; // Apply the original transform. if (assetVideoTrack && compositionVideoTrack) { [compositionVideoTrack setPrefernetworkingTransform:assetVideoTrack.prefernetworkingTransform]; } // Export... 

Utilice este método a continuación para establecer la orientación correcta de acuerdo con la orientación de los activos de video en AVMutableVideoComposition

 -(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset { AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableComposition *composition = [AVMutableComposition composition]; AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; CGSize videoSize = videoTrack.naturalSize; BOOL isPortrait_ = [self isVideoPortrait:asset]; if(isPortrait_) { NSLog(@"video is portrait "); videoSize = CGSizeMake(videoSize.height, videoSize.width); } composition.naturalSize = videoSize; videoComposition.renderSize = videoSize; // videoComposition.renderSize = videoTrack.naturalSize; // videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600); AVMutableCompositionTrack *compositionVideoTrack; compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil]; AVMutableVideoCompositionLayerInstruction *layerInst; layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; [layerInst setTransform:videoTrack.prefernetworkingTransform atTime:kCMTimeZero]; AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); inst.layerInstructions = [NSArray arrayWithObject:layerInst]; videoComposition.instructions = [NSArray arrayWithObject:inst]; return videoComposition; } -(BOOL) isVideoPortrait:(AVAsset *)asset { BOOL isPortrait = FALSE; NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; if([tracks count] > 0) { AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; CGAffineTransform t = videoTrack.prefernetworkingTransform; // Portrait if(ta == 0 && tb == 1.0 && tc == -1.0 && td == 0) { isPortrait = YES; } // PortraitUpsideDown if(ta == 0 && tb == -1.0 && tc == 1.0 && td == 0) { isPortrait = YES; } // LandscapeRight if(ta == 1.0 && tb == 0 && tc == 0 && td == 1.0) { isPortrait = NO; } // LandscapeLeft if(ta == -1.0 && tb == 0 && tc == 0 && td == -1.0) { isPortrait = NO; } } return isPortrait; } 

Respondiendo velozmente … esto me funciona

  var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack if (assetVideoTrack.playable && compositionVideoTrack.playable) { compositionVideoTrack.prefernetworkingTransform = assetVideoTrack.prefernetworkingTransform } 

Rápido 2:


 hacer
             dejar routes = NSSearchPathForDirectoriesInDomains (
                 NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
             let documentsDirectory: AnyObject = paths [0]
             // esto se cambiará para acomodar videos dynamics
             deje dataPath = documentsDirectory.stringByAppendingPathComponent (videoFileName + ".MOV")
             deje videoAsset = AVURLAsset (URL: NSURL (fileURLWithPath: dataPath), opciones: nil)
             dejar imgGenerator = AVAssetImageGenerator (activo: videoAsset)
             imgGenerator.appliesPrefernetworkingTrackTransform = true
             deje cgImage = intente imgGenerator.copyCGImageAtTime (CMTimeMake (0, 1), actualTime: nil)
             deje uiImage = UIImage (CGImage: cgImage)

             videoThumb.image = uiImage
         } catch let err como NSError {
             print ("Generación de errores en miniatura: \ (err)")
         }