¿Por qué la grabación y la reproducción de video con AVCaptureSession, AVAssetWriter y AVPlayer tartamudean?

Estoy intentando hacer una aplicación que muestre la vista previa de la camera, luego en algunas condiciones comienza a grabar esto con la input de voz y finalmente repite la película grabada.

Ya escribí las classs para previsualizar / grabar / reproducir y el controller que gestiona su coordinación.

Parece que estas funciones funcionan perfectamente cuando se llaman de forma independiente, pero no puedo hacer que funcionen juntas: cuando se reproduce el video, el sonido se ejecuta, pero la image tarda unos cinco segundos en visualizarse y luego tartamudea.

Aquí está mi código para esto:

Vista previa:

- (void) createSession { _session = [[AVCaptureSession alloc] init]; AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID]; if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; NSError *error = nil; _cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain]; if (!error) [_session addInput:_cVideoInput]; device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; error = nil; _cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain]; if (!error) [_session addInput:_cAudioInput]; _cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session]; _cameraLayer.frame = self.bounds; [self.layer addSublayer:_cameraLayer]; _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; [_session setSessionPreset:AVCaptureSessionPreset640x480]; [_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]]; _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME, NULL); [_videoOutput setSampleBufferDelegate:self queue:queue]; [_session addOutput:_videoOutput]; [_audioOutput setSampleBufferDelegate:self queue:queue]; [_session addOutput:_audioOutput]; dispatch_set_context(queue, self); dispatch_set_finalizer_f(queue, queue_finalizer); dispatch_release(queue); [_session startRunning]; } - (void) deleteSession { [_session stopRunning]; [(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil]; [_cameraLayer removeFromSuperlayer]; [_cameraLayer release]; _cameraLayer = nil; [_audioOutput setSampleBufferDelegate:nil queue:NULL]; [_videoOutput setSampleBufferDelegate:nil queue:NULL]; [_audioOutput release]; _audioOutput = nil; [_videoOutput release]; _videoOutput = nil; [_cAudioInput release]; _cAudioInput = nil; [_cVideoInput release]; _cVideoInput = nil; NSArray *inputs = [_session inputs]; for (AVCaptureInput *input in inputs) [_session removeInput:input]; NSArray *outputs = [_session outputs]; for (AVCaptureOutput *output in outputs) [_session removeOutput:output]; [_session release]; _session = nil; } 

Grabación:

 - (void) createWriter { NSString *file = [self file]; if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL]; NSError *error = nil; _writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain]; if (error) { [_writer release]; _writer = nil; NSLog(@"%@", error); return; } AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey, [NSNumber numberWithFloat:44100.0], AVSampleRateKey, [NSNumber numberWithInt:1], AVNumberOfChannelsKey, [NSNumber numberWithInt:64000], AVEncoderBitRateKey, [NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey, nil ]; _wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain]; [_writer addInput:_wAudioInput]; settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:480], AVVideoHeightKey, nil]; _wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain]; [_writer addInput:_wVideoInput]; } - (void) deleteWriter { [_wVideoInput release]; _wVideoInput = nil; [_wAudioInput release]; _wAudioInput = nil; [_writer release]; _writer = nil; } - (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer { if (![self canRecordBuffer:sampleBuffer]) return; if ([_wAudioInput isReadyForMoreMediaData]) [_wAudioInput appendSampleBuffer:sampleBuffer]; } - (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer { if (![self canRecordBuffer:sampleBuffer]) return; if ([_wVideoInput isReadyForMoreMediaData]) [_wVideoInput appendSampleBuffer:sampleBuffer]; } 

Jugando:

 - (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { AVPlayerItem *item = (AVPlayerItem *)object; [item removeObserver:self forKeyPath:@"status"]; switch (item.status) { case AVPlayerItemStatusReadyToPlay: [_player seekToTime:kCMTimeZero]; [_player play]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item]; break; case AVPlayerItemStatusUnknown: case AVPlayerItemStatusFailed: break; default: break; } } - (void) finishPlaying:(NSNotification *)notification { [_player pause]; [_playerLayer removeFromSuperlayer]; [_playerLayer release]; _playerLayer = nil; [_player release]; _player = nil; [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; } - (void) play:(NSString *)path { _player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain]; _playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain]; _playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2, 0, 0, 1), 1, -1, 1); _playerLayer.frame = self.bounds; [self.layer addSublayer:_playerLayer]; [_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL]; } 

Estaba teniendo un problema similar y, aunque esto no lo solucionó por completo, ayudó bastante: en lugar de perder los búferes de muestra si no está listo para escribir de nuevo, intente dejar que el dispositivo duerma un poco y luego re- evaluando si puede escribir los datos.

 - (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer { if (![self canRecordBuffer:sampleBuffer]) return; if (!_wVideoInput.readyForMoreMediaData && _isRecording) { [self performSelector:@selector(RecordingVideoWithBuffer:) withObject:(__bridge id)(sampleBuffer) afterDelay:0.05]; return; } [_wVideoInput appendSampleBuffer:sampleBuffer]; 

Si no está utilizando ARC, solo sampleBuffer debería. Pero ARC requiere la adición __bridge.

EDIT: Utilicé performSelector y retorné en lugar de un ciclo while con NSThread sleep porque no bloquea. El locking es una mierda.