ios原生AVFoudation掃描在同時支持二維碼和條碼掃描時,二維碼總是很快得到數據,條碼則有時有有時無,在坑裏待了一段時間後終於找到原因,VCaptureMetadataOutput 有一個屬性 rectOfInterest
就是設置元數據識別搜索的區域。
這個屬性有點問題,不是普通的CGRect,四個值都需要在0~1之間。
AVCaptureVideoPreviewLayer 有個方法
- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInLayerCoordinates
可以看看這個方法的說明:
描述就是:把一個在previewlayer座標系中的rect 轉換成 一個在metadataoutputs座標系中的rect。
這個方法需要的rect參數是我們系統座標系中的rect.
SO 。這就得到了我們需要的rectOfInterest。
代碼:
- (BOOL)startReading {
NSError *error;
//1.初始化捕捉設備(AVCaptureDevice),類型爲AVMediaTypeVideo
AVCaptureDevice *captureDevice = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];
//2.用captureDevice創建輸入流
AVCaptureDeviceInput *input = [AVCaptureDeviceInputdeviceInputWithDevice:captureDeviceerror:&error];
if (!input) {
NSLog(@"%@", [errorlocalizedDescription]);
returnNO;
}
//3.創建媒體數據輸出流
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutputalloc] init];
//4.實例化捕捉會話
_captureSession = [[AVCaptureSessionalloc] init];
//4.1.將輸入流添加到會話
[_captureSessionaddInput:input];
[_captureSessionsetSessionPreset:AVCaptureSessionPresetHigh];
//4.2.將媒體輸出流添加到會話中
[_captureSessionaddOutput:captureMetadataOutput];
//5.創建串行隊列,並加媒體輸出流添加到隊列當中
dispatch_queue_t dispatchQueue;
//dispatchQueue = dispatch_queue_create("myQueue", NULL);
dispatchQueue = dispatch_get_main_queue();
//5.1.設置代理
[captureMetadataOutput setMetadataObjectsDelegate:selfqueue:dispatchQueue];
//5.2.設置輸出媒體數據類型爲QRCode
[captureMetadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeEAN8Code,AVMetadataObjectTypeCode128Code]];
//6.實例化預覽圖層
_videoPreviewLayer = [[AVCaptureVideoPreviewLayeralloc] initWithSession:_captureSession];
//7.設置預覽圖層填充方式
[_videoPreviewLayersetVideoGravity:AVLayerVideoGravityResizeAspectFill];
//8.設置圖層的frame
[_videoPreviewLayersetFrame:_vPreview.layer.bounds];
//9.將圖層添加到預覽view的圖層上
[_vPreview.layeraddSublayer:_videoPreviewLayer];
//10.設置掃描範圍
captureMetadataOutput.rectOfInterest =CGRectMake(0.2f,0.2f, 0.8f,0.8f);
//10.1.掃描框
_boxView = [[UIViewalloc] initWithFrame:CGRectMake(60, [[UIScreenmainScreen] bounds].size.height/2 -30 -([[UIScreenmainScreen] bounds].size.width - 60 * 2)/2, [[UIScreenmainScreen] bounds].size.width - 60 * 2, [[UIScreenmainScreen] bounds].size.width - 60 * 2)];
_boxView.layer.borderColor =mainColor.CGColor;
_boxView.layer.borderWidth =1.0f;
[_vPreviewaddSubview:_boxView];
//10.2.掃描線
_scanLayer = [[CALayeralloc] init];
_scanLayer.frame =CGRectMake(0,0, _boxView.bounds.size.width,1);
_scanLayer.backgroundColor = [UIColorcolorWithRed:190.0/255green:190.0/255blue:190.0/255alpha:1.0].CGColor;
[_boxView.layeraddSublayer:_scanLayer];
NSTimer *timer = [NSTimerscheduledTimerWithTimeInterval:0.2ftarget:selfselector:@selector(moveScanLayer:)userInfo:nilrepeats:YES];
[timer fire];
//放大焦距
[captureDevice lockForConfiguration:&error];
if (captureDevice.activeFormat.videoMaxZoomFactor > 2) {
captureDevice.videoZoomFactor =2;
}else{
captureDevice.videoZoomFactor = captureDevice.activeFormat.videoMaxZoomFactor;
}
[captureDevice unlockForConfiguration];
//10.開始掃描
[_captureSessionstartRunning];
//11.必須在startRunning之後纔會生效(坑哦)
CGRect rect =CGRectMake(30,100, self.view.frame.size.width - 60, 300);
CGRect intertRect = [_videoPreviewLayermetadataOutputRectOfInterestForRect:rect];
CGRect layerRect = [_videoPreviewLayerrectForMetadataOutputRectOfInterest:intertRect];
NSLog(@"%@, %@",NSStringFromCGRect(intertRect),NSStringFromCGRect(layerRect));
captureMetadataOutput.rectOfInterest = intertRect;
// 這種方法實際不可行
// float width = self.view.frame.size.width;// AVCaptureVideoPreviewLayer的對象的寬度
// float height = self.view.frame.size.height;// AVCaptureVideoPreviewLayer的對象的高度
//
// captureMetadataOutput.rectOfInterest=CGRectMake(100/height, (width/2 -110)/width +50, 220/height, 220/width);
returnYES;
}