AVFoundation 是苹果在8.0以后推出的一个音视频框架.session
AVFoundation 最强大的功能是对 照片&视频 的捕捉功能. 例如一些APP中的小视频、直播等, 能够经过AVFoundation来进行实现捕捉.app
捕捉会话主要是用到AVCaptureSession类, 它相似于一个排插, 各类设备都须要与捕捉会话关联起来。框架
经过AVCaptureDevice能够获取到手机的各类硬件设备, 例如: 麦克风、先后摄像头、闪光灯等。async
经过AVCaptureDeviceInput能够捕捉到设备的输入。ide
在AVFoundation中, 捕捉设备输入是没法直接添加到Session中的, 因此须要将捕捉设备输入转化为捕捉设备添加进会话中。post
有输入就有输出。 在iOS10.0以后, 能够经过AVCapturePhotoOutput来进行获取图片的输出, 经过AVCaptureMovieFileOutput来进行视频文件的输出。 还有AVCaptureAudioDataOutput、还有AVCaptureVideoDataOutput等。ui
AVCaptureConnection 能够根据捕捉的媒体的类型来创建一个链接spa
AVCaptureVideoPreviewLayer主要是一个图层,主要是用来显示摄像头实时捕捉的内容。代理
这里涉及到摄像头、麦克风、相册, 须要配置用户隐私需求。rest
#pragma mark - session设置
/// 配置session
/// @param error 错误回调
- (BOOL)setupSession:(NSError **)error {
/** * 添加视频的输入类别 */
//初始化
self.captureSession = [[AVCaptureSession alloc] init];
//设置分辨率
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
//拿到默认视频捕捉设备: iOS默认后置摄像头为默认视频捕捉色别
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//必定要将捕捉设备转化AVCaptureDeviceInput
//注意: 为session添加捕捉设备, 必须将此封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
//摄像头不隶属于任何一个app, 是公共设备, 须要判断是否能添加
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;//摄像头分前置后置, 须要保存作切换操做
}
} else {
return NO;
}
/** * 添加音频的输入设备 */
//添加音频输入设备: 麦克风
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
if (audioInput) {
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
//音频输入只有麦克风, 无需保存
}
} else {
return NO;
}
/** * 设置输出 (照片/视频文件) */
//图片
self.imageOutput = [[AVCapturePhotoOutput alloc] init];
if ([self.captureSession canAddOutput:self.imageOutput]) {
[self.captureSession addOutput:self.imageOutput];
}
//视频AVCaptureMovieFileOutput实例, QuickTime
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.captureSession canAddOutput:self.movieOutput]) {
[self.captureSession addOutput:self.movieOutput];
}
//视频队列
self.videoQueue = dispatch_queue_create("glen.videoQueue", NULL);
return YES;;
}
复制代码
配置完捕捉会话以后,就须要经过外界的按钮点击等操做来告诉AVFoundation来开启或中止捕捉会话。
/// 启动捕捉
- (void)startSession {
if (![self.captureSession isRunning]) {
dispatch_async(self.videoQueue, ^{
[self.captureSession startRunning];
});
}
}
/// 中止捕捉
- (void)stopSession {
if ([self.captureSession isRunning]) {
dispatch_async(self.videoQueue, ^{
[self.captureSession stopRunning];
});
}
}
复制代码
获取当前设备上可用的摄像头设备,并根据需求来得到指定的摄像头设备
/// 寻找指定摄像头
/// @param positon 指定摄像头设备
- (AVCaptureDevice *)cameraWithPositon:(AVCaptureDevicePosition)positon {
AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
//获取到全部设备
NSArray *captureDevices = [captureDeviceDiscoverySession devices];
//遍历设备
for (AVCaptureDevice *device in captureDevices) {
if (device.position == positon) {
return device;
}
}
return nil;
}
复制代码
由于摄像头有多个,因此必需要知道当前使用的是哪一个摄像头
/// 获取当前活跃的摄像头
- (AVCaptureDevice *)activeCamera {
return self.activeVideoInput.device;
}
/// 获取另一个不活跃的摄像头
- (AVCaptureDevice *)inactiveCamera {
AVCaptureDevice *device = nil;
if (self.cameraCount > 1) {
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
//后置变前置
device = [self cameraWithPositon:AVCaptureDevicePositionFront];
} else if ([self activeCamera].position == AVCaptureDevicePositionFront) {
//前置变后置
device = [self cameraWithPositon:AVCaptureDevicePositionBack];
}
}
return device;;
}
复制代码
在进行切换以前,必需要知道其余的摄像头是不是一个可进行使用的状态
/// 是否能切换摄像头
- (BOOL)canSwitchCameras {
return self.cameraCount > 1;
}
复制代码
接下来就是对摄像头进行切换
/// 切换摄像头
- (BOOL)switchCameras {
//判断是否能切换
if (![self canSwitchCameras]) {
return NO;
}
//获取当前设备的反向设备(不活跃的摄像头)
AVCaptureDevice *device = [self inactiveCamera];
//将device添加进AVCaptureDeviceInput
NSError *error;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
//添加进会话中
if (videoInput) {
//标注原始配置要发生改变
[self.captureSession beginConfiguration];
//将原来的输入设备移除
[self.captureSession removeInput:self.activeVideoInput];
//判断可否加入
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
//活跃设备更新
self.activeVideoInput = videoInput;
} else {
//若是新设备没法加入, 则将原来的视频输入设备添加进去
[self.captureSession addInput:self.activeVideoInput];
}
//提交修改配置
[self.captureSession commitConfiguration];
} else {
//若是错误! 设备添加错误
return NO;
}
return YES;
}
复制代码
/// 询问当前活跃的摄像头是否支持兴趣点对焦
- (BOOL)cameraSupportsTapToFocus {
return [[self activeCamera] isFocusPointOfInterestSupported];
}
/// 设置对焦
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
//判断该设备是否支持兴趣点对焦 是否支持自动对焦
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
//由于配置时, 不能让多个对象对它进行修改, 因此过程上锁
if ([device lockForConfiguration:&error]) {
//聚焦位置
device.focusPointOfInterest = point;
//自动对焦模式
device.focusMode = AVCaptureFocusModeAutoFocus;
//修改完毕, 解锁
[device unlockForConfiguration];
} else {
//设备错误
}
}
}
复制代码
static const NSString *CameraAdjustingExposureContext;
/// 当前活跃摄像头是否支持曝光
- (BOOL)cameraSupportsTapToExpose {
return [[self activeCamera] isExposurePointOfInterestSupported];
}
- (void)exposeAtPoint:(CGPoint)point {
//获取活跃摄像头
AVCaptureDevice *device = [self activeCamera];
//设置根据场景曝光
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//活跃摄像头是否支持曝光 而且支持’根据场景曝光‘这个模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) {
//过程锁定
NSError *error;
if ([device lockForConfiguration:&error]) {
//设备曝光点
device.exposurePointOfInterest = point;
//设置曝光模式
device.exposureMode = exposureMode;
//是否支持锁定曝光
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//使用kvo肯定设备的adjustingExposure属性状态
[device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&CameraAdjustingExposureContext];
}
//解锁
[device unlockForConfiguration];
}
}
}
/// 观察者回调
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context {
if (context == &CameraAdjustingExposureContext) {
//获取设备
AVCaptureDevice *device = (AVCaptureDevice *)object;
//判断设备是否再也不调整曝光等级,确认设备的exposureMode是否能够设置为AVCaptureExposureModeLocked
if (!device.isExposurePointOfInterestSupported && [device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//移除做为adjustingExposure 的self,就不会获得后续变动的通知
[object removeObserver:self forKeyPath:@"adjustingExposure" context:&CameraAdjustingExposureContext];
//
dispatch_async(dispatch_get_main_queue(), ^{
if ([device lockForConfiguration:nil]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
} else {
//设备错误回调
}
});
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
}
//从新设置对焦&曝光
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
//获取对焦兴趣点 和 连续自动对焦模式 是否被支持
BOOL canResetFocus = [device isFocusPointOfInterestSupported]&& [device isFocusModeSupported:focusMode];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//确认曝光度能够被重设
BOOL canResetExposure = [device isFocusPointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
//回顾一下,捕捉设备空间左上角(0,0),右下角(1,1) 中心点则(0.5,0.5)
CGPoint centPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
//锁定设备,准备配置
if ([device lockForConfiguration:&error]) {
//焦点可设,则修改
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centPoint;
}
//曝光度可设,则设置为指望的曝光模式
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centPoint;
}
//释放锁定
[device unlockForConfiguration];
}else
{
//设备错误回调
}
}
复制代码
#pragma mark - 拍照
- (void)captureStillImage {
//捕捉到图片存储格式jpg
NSDictionary *setDic = @{AVVideoCodecKey:AVVideoCodecTypeJPEG};
AVCapturePhotoSettings *outputSettings = [AVCapturePhotoSettings photoSettingsWithFormat:setDic];
[self.imageOutput capturePhotoWithSettings:outputSettings delegate:self];
}
//代理方法
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error {
//图片数据
NSData *imageData = photo.fileDataRepresentation;
UIImage *image = [[UIImage alloc] initWithData:imageData];
//将图片写入到Library
[self writeImageToAssetsLibrary:image];
}
/// 写入到相册
/// @param image 图片
- (void)writeImageToAssetsLibrary:(UIImage *)image {
__block PHObjectPlaceholder *assetPlaceholder = nil;
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetChangeRequest *changeRequest = [PHAssetChangeRequest creationRequestForAssetFromImage:image];
assetPlaceholder = changeRequest.placeholderForCreatedAsset;
} completionHandler:^(BOOL success, NSError * _Nullable error) {
NSLog(@"OK");
dispatch_async(dispatch_get_main_queue(), ^{
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc postNotificationName:ThumbnailCreatedNotification object:image];
});
}];
}
复制代码