Grabe video con AVCaptureSession, agregue CIFilter y guárdelo en el álbum de fotos.

Quiero hacer una grabadora de video personalizada en mi aplicación. Por ahora puedo grabar el video y savelo, pero quiero agregar filters al video cuando grabe y guarde el video con un nuevo filter en el álbum de fotos. Este es mi código para grabar video y savelo.

let captureSession = AVCaptureSession() let fileOutput = AVCaptureMovieFileOutput() func initVideoRecording() { do { try AVAudioSession.shanetworkingInstance().setCategory(AVAudioSessionCategoryRecord) try AVAudioSession.shanetworkingInstance().setActive(true) }catch { print("error in audio") } let session = AVCaptureSession() session.beginConfiguration() session.sessionPreset = AVCaptureSessionPresetMedium let videoLayer = AVCaptureVideoPreviewLayer(session: session) videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill videoLayer.frame = myImage.bounds myImage.layer.addSublayer(videoLayer) let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) do { let input = try AVCaptureDeviceInput(device: backCamera) let audioInput = try AVCaptureDeviceInput(device: audio) session.addInput(input) session.addInput(audioInput) } catch { print("can't access camera") return } session.addOutput(fileOutput) session.commitConfiguration() session.startRunning() } @IBAction func recordFunc() { if fileOutput.recording { myButton.setTitle("record", forState: .Normal) fileOutput.stopRecording() }else{ let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-captunetworkingvideo.mp4") fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self) myButton.setTitle("stop", forState: .Normal) } } func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { //to save record video to photos album UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil) } 

Intento utilizar AVCaptureVideoDataOutput

Y en su delegado uso este código

 func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { connection.videoOrientation = AVCaptureVideoOrientation.Portrait let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) let comicEffect = CIFilter(name: "CIComicEffect") comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) let filtenetworkingImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) dispatch_async(dispatch_get_main_queue()) { self.myImage.image = filtenetworkingImage } } 

Con este código solo muestra el filter pero no lo graba.

======================= / esta es la solución para mi pregunta \ ================ por favor no que este código usa swift 2 y Xcode 7.3

 let captureSession = AVCaptureSession() let videoOutput = AVCaptureVideoDataOutput() let audioOutput = AVCaptureAudioDataOutput() var adapter:AVAssetWriterInputPixelBufferAdaptor! var record = false var videoWriter:AVAssetWriter! var writerInput:AVAssetWriterInput! var audioWriterInput:AVAssetWriterInput! var lastPath = "" var starTime = kCMTimeZero var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height) override func viewDidAppear(animated: Bool) { super.viewDidAppear(animated) video() } func video() { do { try AVAudioSession.shanetworkingInstance().setCategory(AVAudioSessionCategoryRecord) try AVAudioSession.shanetworkingInstance().setActive(true) }catch { print("error in audio") } captureSession.beginConfiguration() captureSession.sessionPreset = AVCaptureSessionPresetMedium let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession) videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill //videoLayer.frame = myImage.bounds //myImage.layer.addSublayer(videoLayer) view.layer.addSublayer(videoLayer) let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) do { let input = try AVCaptureDeviceInput(device: backCamera) let audioInput = try AVCaptureDeviceInput(device: audio) captureSession.addInput(input) captureSession.addInput(audioInput) } catch { print("can't access camera") return } let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL) videoOutput.setSampleBufferDelegate(self,queue: queue) audioOutput.setSampleBufferDelegate(self, queue: queue) captureSession.addOutput(videoOutput) captureSession.addOutput(audioOutput) captureSession.commitConfiguration() captureSession.startRunning() } @IBAction func recordFunc() { if record { myButton.setTitle("record", forState: .Normal) record = false self.writerInput.markAsFinished() audioWriterInput.markAsFinished() self.videoWriter.finishWritingWithCompletionHandler { () -> Void in print("FINISHED!!!!!") UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil) } }else{ let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-captunetworkingvideo.MP4") lastPath = fileUrl.path! videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4) let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))] writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings) writerInput.expectsMediaDataInRealTime = true audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject]) videoWriter.addInput(writerInput) videoWriter.addInput(audioWriterInput) adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject]) videoWriter.startWriting() videoWriter.startSessionAtSourceTime(starTime) record = true myButton.setTitle("stop", forState: .Normal) } } func getCurrentDate()->String{ let format = NSDateFormatter() format.dateFormat = "dd-MM-yyyy hh:mm:ss" format.locale = NSLocale(localeIdentifier: "en") let date = format.stringFromDate(NSDate()) return date } extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{ func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if captureOutput == videoOutput { connection.videoOrientation = AVCaptureVideoOrientation.Portrait let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) let comicEffect = CIFilter(name: "CIHexagonalPixellate") comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) let filtenetworkingImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) //let filtenetworkingImage = UIImage(CIImage: cameraImage) if self.record == true{ dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), { if self.record == true{ if self.writerInput.readyForMoreMediaData { let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime) print("video is \(bo)") } } }) } dispatch_async(dispatch_get_main_queue()) { self.myImage.image = filtenetworkingImage } }else if captureOutput == audioOutput{ if self.record == true{ let bo = audioWriterInput.appendSampleBuffer(sampleBuffer) print("audio is \(bo)") } } } func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! { let context:CIContext? = CIContext(options: nil) if context != nil { return context!.createCGImage(inputImage, fromRect: inputImage.extent) } return nil } func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) { var title = "Success" var message = "Video was saved" if let saveError = error { title = "Error" message = "Video failed to save" } let alert = UIAlertController(title: title, message: message, prefernetworkingStyle: .Alert) alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil)) presentViewController(alert, animated: true, completion: nil) } 

estos methods que están en DejalActivityView allí en el objective cy no pude convertirlo a Swift, así que si alguien puede convertirlo, edite mi código y conviértalo

 + (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size { NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); NSParameterAssert(context); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; } +(NSDictionary *)getAdapterDictionary{ NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; return sourcePixelBufferAttributesDictionary; } +(NSDictionary *) getAudioDictionary{ AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary* audioOutputSettings = nil; audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, nil ]; // NSDictionary* audioOutputSettings = nil; // audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: // [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey, // [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey, // [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, // nil ]; return audioOutputSettings; } 

Necesita agregar un AVAssetWriter

 var videoRecorder: AVAssetWriter? 

Luego, en la callback de su delegado:

 let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if videoRecorder?.status == .Unknown { startRecordingTime = timeStamp videoRecorder?.startWriting() videoRecorder?.startSessionAtSourceTime(timeStamp) } 

Deberá configurar la grabadora para cada grabación que desee hacer, también tendrá que agregar sus inputs a la grabadora.

Puede comenzar a encontrar problemas ya que no parece tener ninguna configuration de queues, pero necesitará, pero como reference, este Github es un muy buen recurso para ello.

https://github.com/waleedka/rosywriterswift

EDITAR: Información adicional

AVAssetWriterInput () el escritor y luego agregar inputs AVAssetWriterInput para video / audio.