基本上,应该使用AVCaptureVideoDataOutputSampleBufferDelegate而不是使用AVCaptureVideoPreviewLayer来捕获帧。这是示例:
import Foundationimport UIKitimport AVFoundationprotocol CaptureManagerDelegate: class { func processCapturedImage(image: UIImage)}class CaptureManager: NSObject { internal static let shared = CaptureManager() weak var delegate: CaptureManagerDelegate? var session: AVCaptureSession? override init() { super.init() session = AVCaptureSession() //setup input let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) let input = try! AVCaptureDeviceInput(device: device) session?.addInput(input) //setup output let output = AVCaptureVideoDataOutput() output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA] output.setSampleBufferDelegate(self, queue: DispatchQueue.main) session?.addOutput(output) } func statSession() { session?.startRunning() } func stopSession() { session?.stopRunning() } func getImageFromSampleBuffer(sampleBuffer: CMSampleBuffer) ->UIImage? { guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil } CVPixelBufferLockbaseAddress(pixelBuffer, .readOnly) let baseAddress = CVPixelBufferGetbaseAddress(pixelBuffer) let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) let colorSpace = CGColorSpaceCreateDeviceRGB() let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) guard let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue) else { return nil } guard let cgImage = context.makeImage() else { return nil } let image = UIImage(cgImage: cgImage, scale: 1, orientation:.right) CVPixelBufferUnlockbaseAddress(pixelBuffer, .readOnly) return image }}extension CaptureManager: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { guard let outputImage = getImageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return } delegate?.processCapturedImage(image: outputImage) }}
更新:
要处理图像,您应该在所需的任何其他类中实现CaptureManagerDelegate协议的processCapturedImage方法,例如:
import UIKitclass ViewController: UIViewController { @IBOutlet weak var imageView: UIImageView! override func viewDidLoad() { super.viewDidLoad() CaptureManager.shared.statSession() CaptureManager.shared.delegate = self }}extension ViewController: CaptureManagerDelegate { func processCapturedImage(image: UIImage) { self.imageView.image = image }}
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)