介绍
这篇文章给大家介绍怎么在iOS中利用摄像头获取环境光感参数,内容非常详细,感兴趣的小伙伴们可以参考借鉴,希望对大家能有所帮助。
# import “LightSensitiveViewController.h" @import AVFoundation; # import & lt; ImageIO/ImageIO.h> @interface LightSensitiveViewController () & lt;, AVCaptureVideoDataOutputSampleBufferDelegate> @property (原子,,强烈),AVCaptureSession *会话; @end @implementation LightSensitiveViewController 安康;(空白)viewDidLoad { (才能super viewDidLoad);//才能,Do any additional setup after loading 从而视图。 时间=self.view.backgroundColor 才能;[UIColor whiteColor]; 时间=self.navigationItem.title 才能;@"光感“; (self 光敏)才能; } 安康;(空白)didReceiveMemoryWarning { (才能super didReceiveMemoryWarning);//才能,Dispose of any resources that 还要be 重新创建。 } # pragma 马克的光感 安康;(空白)lightSensitive {//1。才能获取硬件设备 AVCaptureDevice 才能;* device =, [AVCaptureDevice defaultDeviceWithMediaType AVMediaTypeVideo):;//2。才能创建输入流 AVCaptureDeviceInput 才能;* input =, [[AVCaptureDeviceInput alloc] initWithDevice: device 错误:nil);//3。才能创建设备输出流 AVCaptureVideoDataOutput 才能;* output =, [[AVCaptureVideoDataOutput alloc], init); (才能output setSampleBufferDelegate: self 队列:dispatch_get_main_queue ());//才能,AVCaptureSession属性 self.session 才能=,[[AVCaptureSession alloc] init];//才能,设置为高质量采集率 (才能self.session setSessionPreset AVCaptureSessionPresetHigh):;//,才能添加会话输入和输出 if 才能;([self.session canAddInput:输入]),{ ,,,(self.session addInput:输入); ,,} if 才能;([self.session canAddOutput:输出]),{ ,,,(self.session addOutput:输出); ,,}//9。才能启动会话 (才能self.session startRunning); } # pragma 马克安康;AVCaptureVideoDataOutputSampleBufferDelegate的方法 安康;(空白)captureOutput:(AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) connection { CFDictionaryRef 才能;metadataDict =, CMCopyDictionaryOfAttachments(零、sampleBuffer, kCMAttachmentMode_ShouldPropagate); NSDictionary 才能;* metadata =, [[NSMutableDictionary alloc], initWithDictionary: (__bridge NSDictionary *) metadataDict]; CFRelease才能(metadataDict); NSDictionary 才能;* exifMetadata =, [[metadata objectForKey: (NSString *) kCGImagePropertyExifDictionary], mutableCopy); float 才能;brightnessValue =, [[exifMetadata objectForKey: (NSString *) kCGImagePropertyExifBrightnessValue], floatValue); NSLog才能(@" % f" brightnessValue);//才能,根据brightnessValue的值来打开和关闭闪光灯 AVCaptureDevice 才能;* device =, [AVCaptureDevice defaultDeviceWithMediaType AVMediaTypeVideo):; BOOL 才能;result =, [device hasTorch];//,判断设备是否有闪光灯 if 才能;((& lt; brightnessValue 0),,,,结果),{//,打开闪光灯 ,,,(device lockForConfiguration: nil); ,,,(device setTorchMode:, AVCaptureTorchModeOn);//开 ,,,(device unlockForConfiguration); }才能else 如果(brightnessValue 祝辞,0),,,,结果),{//,关闭闪光灯 ,,,(device lockForConfiguration: nil); ,,,(device setTorchMode:, AVCaptureTorchModeOff);//关 ,,,(device unlockForConfiguration); ,,} } @end
注意点:
- <李>
首先引入AVFoundation框架和ImageIO/ImageIO。h声明文件
李> <李>遵循AVCaptureVideoDataOutputSampleBufferDelegate协议
李> <李>AVCaptureSession对象要定义为属性,确保有对象在一直引用AVCaptureSession对象,否则如果在光敏方法中定义并初始化AVCaptureSession对象,会造成AVCaptureSession对象提前释放,(自我。会话startRunning];会失效
李> <李>实现AVCaptureVideoDataOutputSampleBufferDelegate的代理方法,参数brightnessValue就是周围环境的亮度参数了,范围大概在5 ~ ~ 12之间,参数数值越大,环境越亮