delegado didOutputSampleBuffer no llamado

La function didOutputSampleBuffer en mi código no se llamó. No sé por qué sucedió. Aquí está el código:

import UIKit import AVFoundation import Accelerate class ViewController: UIViewController { var captureSession: AVCaptureSession? var dataOutput: AVCaptureVideoDataOutput? var customPreviewLayer: AVCaptureVideoPreviewLayer? @IBOutlet weak var camView: UIView! override func viewWillAppear(animated: Bool) { super.viewDidAppear(animated) captureSession?.startRunning() //setupCameraSession() } override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. //captureSession?.startRunning() setupCameraSession() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupCameraSession() { // Session self.captureSession = AVCaptureSession() captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080 // Capture device let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) var deviceInput = AVCaptureDeviceInput() do { deviceInput = try AVCaptureDeviceInput(device: inputDevice) } catch let error as NSError { print(error) } if captureSession!.canAddInput(deviceInput) { captureSession!.addInput(deviceInput) } // Preview self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) self.customPreviewLayer!.frame = camView.bounds self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait camView.layer.addSublayer(self.customPreviewLayer!) print("Cam layer added") self.dataOutput = AVCaptureVideoDataOutput() self.dataOutput!.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) ] dataOutput!.alwaysDiscardsLateVideoFrames = true if captureSession!.canAddOutput(dataOutput) { captureSession!.addOutput(dataOutput) } captureSession!.commitConfiguration() let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL) let delegate = VideoDelegate() dataOutput!.setSampleBufferDelegate(delegate, queue: queue) } func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) // For the iOS the luma is contained in full plane (8-bit) let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0) let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0) let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()! let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)! let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)! dispatch_sync(dispatch_get_main_queue(), {() -> Void in self.customPreviewLayer!.contents = dstImageFilter as AnyObject }) } } 

Y aquí está mi código VideoDelegate:

 import Foundation import AVFoundation import UIKit // Video Delegate class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("hihi") } func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("LOL") } } 

¿Por qué se llama a mi delegado y cómo solucionarlo? He marcado una pregunta similar sobre el desbordamiento de stack, pero no puedo encontrar un método para resolverlo. Ayuda.

¡Encontré el problema de mi error! Es porque el delegado que se llamaba tiene que crearse en el mismo controller de vista. aquí está el código modificado:

 import UIKit import AVFoundation import Accelerate var customPreviewLayer: AVCaptureVideoPreviewLayer? class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { var captureSession: AVCaptureSession? var dataOutput: AVCaptureVideoDataOutput? //var customPreviewLayer: AVCaptureVideoPreviewLayer? @IBOutlet weak var camView: UIView! override func viewWillAppear(animated: Bool) { super.viewDidAppear(animated) //setupCameraSession() } override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. //captureSession?.startRunning() setupCameraSession() self.captureSession?.startRunning() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupCameraSession() { // Session self.captureSession = AVCaptureSession() self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080 // Capture device let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) var deviceInput = AVCaptureDeviceInput() // Device input //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error) do { deviceInput = try AVCaptureDeviceInput(device: inputDevice) } catch let error as NSError { // Handle errors print(error) } if self.captureSession!.canAddInput(deviceInput) { self.captureSession!.addInput(deviceInput) } // Preview customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) customPreviewLayer!.frame = camView.bounds customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait self.camView.layer.addSublayer(customPreviewLayer!) print("Cam layer added") self.dataOutput = AVCaptureVideoDataOutput() self.dataOutput!.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) ] self.dataOutput!.alwaysDiscardsLateVideoFrames = true if self.captureSession!.canAddOutput(dataOutput) { self.captureSession!.addOutput(dataOutput) } self.captureSession!.commitConfiguration() let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL) //let delegate = VideoDelegate() self.dataOutput!.setSampleBufferDelegate(self, queue: queue) } func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { print("buffenetworking") let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0) let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0) let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()! let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)! dispatch_sync(dispatch_get_main_queue(), {() -> Void in customPreviewLayer!.contents = dstImageFilter as AnyObject }) } } 

delegado didOutputSampleBuffer no llamado, pero no puedo encontrar ningún problema en el código de mi camera de configuration. código en github: código de demostración

 #import "DMVideoCamera.h" #import <UIKit/UIKit.h> @interface DMVideoCamera()<AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, strong) dispatch_queue_t captureQueue; @property (nonatomic, strong) AVCaptureDevice *device; @property (nonatomic, strong) AVCaptureVideoDataOutput *output; @property (nonatomic, strong) AVCaptureDeviceInput *input; @end @implementation DMVideoCamera - (instancetype)init { if (self = [super init]) { [self setupCamera]; } return self; } - (void)setupCamera { if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) { return; } if([self isAVCaptureActive]) { _captureQueue = dispatch_queue_create("com.dmall.ScanQueue", DISPATCH_QUEUE_SERIAL); NSError *error = nil; _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; _session = [[AVCaptureSession alloc] init]; [self configSessionPreset]; _output = [[AVCaptureVideoDataOutput alloc] init]; [_output setVideoSettings:@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] }]; [_output setAlwaysDiscardsLateVideoFrames:YES]; [_output setSampleBufferDelegate:self queue:_captureQueue]; _input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error]; if ([_session canAddInput:_input]) { [_session addInput:_input]; } if ([_session canAddOutput:_output]) { [_session addOutput:_output]; } } else { [self showAccessAlert]; } } 

Y en el viewController:

 - (void)viewDidLoad { [super viewDidLoad]; DMVideoCamera *camera = [DMVideoCamera new]; AVCaptureVideoPreviewLayer *previewLayer =[AVCaptureVideoPreviewLayer layerWithSession:camera.session]; previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; previewLayer.frame = self.view.bounds; [self.view.layer addSublayer:previewLayer]; camera.zoomFactor = 1.6; [camera start]; }