AVCaptureMovieFileOutput系列javascript
Swift
版本哦!java
本篇不涉及 :视频输出质量
帧率设置
具体的设备格式
像素格式
光学防抖
...等等
这些都会在下一篇
中带你去认识。若是还不会用,就想了解这么多。就如同还不会走就要跑同样,是要跌大跟头滴!ios
UIImagePickerController
AVFoundation
本文主要内容是: AVFoundation
git
AVFoundation
与 UIImagePickerController
的区别在于 在于对视频流的处理,显然前者会更高级一点。AVFoundation
中对视频的输出处理 又分为 AVCaptureMovieFileOutput
与 AVAssetWriter
。这里若是想要对视频的输出给出更多的操做那选择 AVAssetWriter
是个不错的选择。 AVFoundation
更多的区别,仍是在代码中体验比较好,说太多都没用。就是 干
首先咱们新建一个工程github
并在工程中的 plist
文件中添加访问 权限
Privacy - Camera Usage Description
Privacy - Microphone Usage Description
Privacy - Photo Library Usage Description
Privacy - Media Library Usage Description
session
AVCaptureMovieFileOutput
关于 AVCaptureMovieFileOutput
看上图对号入座闭包
首先新建一个 fileOutputViewController
控制器
控制器上放俩按钮: Close
Record
你能够用 storyboard
拖拽也能够用代码实现其点击事件async
由上图咱们能够看到输出方式有两种 AVCaptureMovieFileOutput
与 AVAssetWriter
,在输出以前他们对视频的操做是同样的,因此咱们能够把 它俩公共的部分抽象出来一个类,对使用不一样的输出方式进行继承这个类就 OK
了ide
相同的部分抽象成 一个继承 NSObject
的 CaptureSessionCoordinator
公共类
该公共类不对采集后的视频不作输出处理,由于输出有两种不一样的处理结果。
每一种处理正是其继承 CaptureSessionCoordinator
类的 子类
完成其处理模块化
对 AVCaptureSession
类进行处理,相关属性以下:
AVCaptureSession
AVCaptureDevice
代理
视图
由于使用到线程,故对资源的加锁问题,在 Swift
中无法直接向 Oc
那样直接使用: synchronized
故在此利用闭包的特性达到相同的效果:
如何使用看文中代码
func synchronized(_ lock: AnyObject,dispose: ()->()) {
objc_sync_enter(lock)
dispose()
objc_sync_exit(lock)
}复制代码
因为对视频的处理都不是在主控制器fileOutputViewController
里面执行的。故,对视频的输出都是须要代理来回调到控制器里面执行后续的相关操做。
因此这里须要一个代理:
protocol captureSessionCoordinatorDelegate: class {
func coordinatorDidBeginRecording(coordinator: CaptureSessionCoordinator)
func didFinishRecording(coordinator: CaptureSessionCoordinator)
}复制代码
上面的铺垫后,下面开始对 AVCaptureSession
进行相应的操做:
以咱们的常识,该类中必须有这些方法:
startRunning
结束运行 stopRunning
开始记录 startRecording
stopRecording
AVCaptureVideoPreviewLayer
其余的方法能够在初始中进行,也能够进行模块化拆分
该类一个完整的代码以下:
class CaptureSessionCoordinator: NSObject {
var captureSession: AVCaptureSession?
var cameraDevice: AVCaptureDevice?
var delegateCallQueue: DispatchQueue?
weak var delegate: captureSessionCoordinatorDelegate?
private var sessionQueue = DispatchQueue(label: "coordinator.Session")
private var previewLayer: AVCaptureVideoPreviewLayer?
override init() {
super.init()
captureSession = setupCaptureSession()
}
public func setDelegate(capDelegate: captureSessionCoordinatorDelegate,queue: DispatchQueue) {
synchronized(self) {
delegate = capDelegate
if delegateCallQueue != queue {
delegateCallQueue = queue
}
}
}
//MARK: ________________Session Setup________________
private func setupCaptureSession() -> AVCaptureSession {
let session = AVCaptureSession()
if !addDefaultCameraInputToCaptureSession(capSession: session) {
printLogDebug("failed to add camera input to capture session")
}
if addDefaultMicInputToCaptureSession(capSession: session) {
printLogDebug("failed to add mic input to capture session")
}
return session
}
private func addDefaultCameraInputToCaptureSession(capSession: AVCaptureSession) -> Bool {
do {
let cameraInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
let success = addInput(input: cameraInput, capSession: capSession)
cameraDevice = cameraInput.device
return success
} catch let error as NSError {
printLogDebug("error configuring camera input: \(error.localizedDescription)")
return false
}
}
private func addDefaultMicInputToCaptureSession(capSession: AVCaptureSession) -> Bool {
do {
let micInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
let success = addInput(input: micInput, capSession: capSession)
return success
} catch let error as NSError {
printLogDebug("error configuring mic input: \(error.localizedDescription)")
return false
}
}
//MARK: ________________Public Api________________
func addInput(input: AVCaptureDeviceInput,capSession: AVCaptureSession) -> Bool {
if capSession.canAddInput(input) {
capSession.addInput(input)
return true
}
printLogDebug("input error")
return false
}
func addOutput(output: AVCaptureOutput,capSession: AVCaptureSession) -> Bool {
if capSession.canAddOutput(output) {
capSession.addOutput(output)
return true
}
printLogDebug("output error")
return false
}
func startRunning() {
sessionQueue.async {
self.captureSession?.startRunning()
}
}
func stopRunning() {
sessionQueue.async {
self.stopRunning()
self.captureSession?.stopRunning()
}
}
func startRecording() {
// 子类继承后重写
}
func stopRecording() {
// 子类继承后重写
}
}复制代码
咱们建立以 AVCaptureMovieFileOutput 方式输出并继承 CaptureSessionCoordinator 的类:fileOutputCoordinator
由最上面的大图可知,AVFoundation
输出有两种:AVCaptureMovieFileOutput
与AVAssetWriter
。
而 AVCaptureMovieFileOutput
是对输出流没有作太多的处理,以AVCaptureMovieFileOutput
方式进行视频输出处理的类,不须要太多的处理。
故继承 CaptureSessionCoordinator
它的fileOutputCoordinator
子类只需以下:
重点即是对输出的处理
class fileOutputCoordinator: CaptureSessionCoordinator,AVCaptureFileOutputRecordingDelegate {
var movieFileOutput: AVCaptureMovieFileOutput?
override init() {
super.init()
movieFileOutput = AVCaptureMovieFileOutput()
_ = addOutput(output: movieFileOutput!, capSession: captureSession!)
}
override func startRecording() {
let fm = YfileManager()
let tempUrl = fm.tempFileUrl()
movieFileOutput?.startRecording(toOutputFileURL: tempUrl, recordingDelegate: self)
}
override func stopRecording() {
movieFileOutput?.stopRecording()
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
delegate?.didFinishRecording(coordinator: self, url: outputFileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
delegate?.coordinatorDidBeginRecording(coordinator: self)
}
}复制代码
上面代码中有一个对文件处理的路径操做类:YfileManager
它主要就是对文件路径的操做,与临时文件存储到系统相册中的操做:以上代码中牵扯到的只有以下:
class YfileManager: NSObject {
func tempFileUrl() -> URL {
var path: String = ""
let fm = FileManager.default
var i: Int = 0
while path.isEmpty || fm.fileExists(atPath: path) {
path = NSTemporaryDirectory() + "output\(i.description).mov"
i += 1
}
return URL(fileURLWithPath: path)
}
/// 对临时视频文件的存储操做,本方法在iOS9之后被遗弃了
func copFileToCameraRoll(fileUrl: URL) {
let library = ALAssetsLibrary()
if !library.videoAtPathIs(compatibleWithSavedPhotosAlbum: fileUrl) {
printLogDebug("video error")
}
library.writeVideoAtPath(toSavedPhotosAlbum: fileUrl) { (url, error) in
if (error != nil) {
printLogDebug("error: \(error?.localizedDescription)")
} else if url == nil {
printLogDebug("url is empty")
}
}
}
}复制代码
实现fileOutputViewController
控制器的方法
首当其冲的是相机视图与执行代理的方法:captureSessionCoordinatorDelegate
相关变量:
@IBOutlet weak var recordButton: UIBarButtonItem!
var captureSessionCoordinator: fileOutputCoordinator?
var recording: Bool = false
var dismissing: Bool = false复制代码
控制器具体代码:
class fileOutputViewController: UIViewController,captureSessionCoordinatorDelegate {
@IBOutlet weak var recordButton: UIBarButtonItem!
var captureSessionCoordinator: fileOutputCoordinator?
var recording: Bool = false
var dismissing: Bool = false
override func viewDidLoad() {
super.viewDidLoad()
captureSessionCoordinator = fileOutputCoordinator()
captureSessionCoordinator?.setDelegate(capDelegate: self, queue: DispatchQueue(label: "fileOutputCoordinator"))
confiureCamper()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/// 关闭当前视图
@IBAction func closeCameral(_ sender: Any) {
if recording {
dismissing = true
} else {
stopPipelineAndDismiss()
}
}
/// 开始记录 与中止记录
@IBAction func recording(_ sender: Any) {
if recording {
captureSessionCoordinator?.stopRecording()
} else {
UIApplication.shared.isIdleTimerDisabled = true
}
recordButton.isEnabled = false
recordButton.title = "Stop"
captureSessionCoordinator?.startRecording()
recording = true
}
func confiureCamper() {
let cameraViewlayer = captureSessionCoordinator?.previewLayerSetting()
cameraViewlayer?.frame = view.bounds
view.layer.insertSublayer(cameraViewlayer!, at: 0)
captureSessionCoordinator?.startRunning()
}
func stopPipelineAndDismiss() {
captureSessionCoordinator?.stopRunning()
dismiss(animated: true, completion: nil)
dismissing = false
}
func coordinatorDidBeginRecording(coordinator: CaptureSessionCoordinator) {
recordButton.isEnabled = true
}
func didFinishRecording(coordinator: CaptureSessionCoordinator, url: URL) {
UIApplication.shared.isIdleTimerDisabled = false
recordButton.title = "Record"
recording = false
let fm = YfileManager()
fm.copFileToCameraRoll(fileUrl: url)
if dismissing {
stopPipelineAndDismiss()
}
}
}复制代码
AVCaptureMovieFileOutput
类型的输出完成
Xcode上面的导航栏
->Window
->Devices
->点击你的设备
->找到右下的installed Apps
->点击你的要看的项目
->点击+ -右边图标
->Download
下载到桌面便可 而后选择显示包内容
就能够看到当前沙盒文件的状态啦!
如图: