未调用 didOutputSampleBuffer 委托

didOutputSampleBuffer delegate not called(未调用 didOutputSampleBuffer 委托)
本文介绍了未调用 didOutputSampleBuffer 委托的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我的代码中的didOutputSampleBuffer 函数没有被调用.我不知道为什么会这样.代码如下:

didOutputSampleBuffer function in my code was not called. I don't know why it happened. Here's the code:

import UIKit
import AVFoundation
import Accelerate

class ViewController: UIViewController {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
    super.viewDidAppear(animated)
    captureSession?.startRunning()
    //setupCameraSession()
}

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    //captureSession?.startRunning()
    setupCameraSession()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func setupCameraSession() {
    // Session
    self.captureSession = AVCaptureSession()
    captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
    // Capture device
    let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    var deviceInput = AVCaptureDeviceInput()

    do {
        deviceInput = try AVCaptureDeviceInput(device: inputDevice)
    } catch let error as NSError {
        print(error)
    }
    if captureSession!.canAddInput(deviceInput) {
        captureSession!.addInput(deviceInput)
    }
    // Preview

    self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.customPreviewLayer!.frame = camView.bounds
    self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
    self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
    camView.layer.addSublayer(self.customPreviewLayer!)
    print("Cam layer added")

    self.dataOutput = AVCaptureVideoDataOutput()
    self.dataOutput!.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    dataOutput!.alwaysDiscardsLateVideoFrames = true
    if captureSession!.canAddOutput(dataOutput) {
        captureSession!.addOutput(dataOutput)
    }
    captureSession!.commitConfiguration()
    let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
    let delegate = VideoDelegate()
    dataOutput!.setSampleBufferDelegate(delegate, queue: queue)
}




 func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
    CVPixelBufferLockBaseAddress(imageBuffer, 0)
    // For the iOS the luma is contained in full plane (8-bit)
    let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
    let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
    let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
    let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
    let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
    let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)!
    let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
    dispatch_sync(dispatch_get_main_queue(), {() -> Void in
        self.customPreviewLayer!.contents = dstImageFilter as AnyObject
    })

}


}

这是我的 VideoDelegate 代码:

And here is my VideoDelegate code:

import Foundation
import AVFoundation
import UIKit

// Video Delegate
class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{

    func captureOutput(captureOutput: AVCaptureOutput!,
        didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){
            print("hihi")

    }


    func captureOutput(captureOutput: AVCaptureOutput!,
        didDropSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){

            print("LOL")
    }


}

为什么我的委托没有被调用以及如何解决它?我已经检查了有关堆栈溢出的类似问题,但我找不到解决此问题的方法.请帮助.

Why does"t my delegate get called and how to fix it? I've checked similar question on stack overflow but but i can't find a method to solve this. Please help.

推荐答案

我发现了我报错的问题!这是因为被调用的委托必须在同一个视图控制器中创建.这是修改后的代码:

I found the problem of my error! It's because the delegate that was being called has to be created in the same view controller. here is the modified code:

import UIKit
import AVFoundation
import Accelerate

var customPreviewLayer: AVCaptureVideoPreviewLayer?

class ViewController: UIViewController,     AVCaptureVideoDataOutputSampleBufferDelegate {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
//var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
    super.viewDidAppear(animated)
    //setupCameraSession()
}

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    //captureSession?.startRunning()
    setupCameraSession()
    self.captureSession?.startRunning()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func setupCameraSession() {
    // Session
    self.captureSession = AVCaptureSession()
    self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
    // Capture device
    let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    var deviceInput = AVCaptureDeviceInput()
    // Device input
    //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
    do {
        deviceInput = try AVCaptureDeviceInput(device: inputDevice)

    } catch let error as NSError {
        // Handle errors
        print(error)
    }
    if self.captureSession!.canAddInput(deviceInput) {
        self.captureSession!.addInput(deviceInput)
    }
    // Preview
    customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    customPreviewLayer!.frame = camView.bounds
    customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
    customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
    self.camView.layer.addSublayer(customPreviewLayer!)
    print("Cam layer added")

    self.dataOutput = AVCaptureVideoDataOutput()
    self.dataOutput!.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    self.dataOutput!.alwaysDiscardsLateVideoFrames = true
    if self.captureSession!.canAddOutput(dataOutput) {
        self.captureSession!.addOutput(dataOutput)
    }
    self.captureSession!.commitConfiguration()
    let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
    //let delegate = VideoDelegate()
    self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
}




 func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
    print("buffered")
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
    CVPixelBufferLockBaseAddress(imageBuffer, 0)
    let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
    let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
    let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
    let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
    let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
    let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic

    let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
    dispatch_sync(dispatch_get_main_queue(), {() -> Void in
        customPreviewLayer!.contents = dstImageFilter as AnyObject
    })
}



}

这篇关于未调用 didOutputSampleBuffer 委托的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持编程学习网!

本站部分内容来源互联网,如果有图片或者内容侵犯您的权益请联系我们删除!

相关文档推荐

Why local notification is not firing for UNCalendarNotificationTrigger(为什么没有为UNCalendarNotificationTrigger触发本地通知)
iOS VoiceOver functionality changes with Bundle Identifier(IOS画外音功能随捆绑包标识符而变化)
tabbar middle tab out of tabbar corner(选项卡栏中间的选项卡角外)
Pushing UIViewController above UITabBar(将UIView控制器推送到UITabBar上方)
Dropbox Files.download does not start when number of files in folder is gt; 1000(当文件夹中的文件数为1000时,Dropbox Files.Download不会启动)
How can I sync two flatList scroll position in react native(如何在本机Reaction中同步两个平面列表滚动位置)