@@ -10,12 +10,10 @@ @implementation ScannerViewController {
1010 AVCaptureDevice *_device;
1111 AVCaptureVideoPreviewLayer *_prevLayer;
1212 bool running;
13- NSString * lastFormat;
14-
13+ NSString *lastFormat;
1514 MainScreenState state;
16-
17- CGImageRef decodeImage;
18- NSString * decodeResult;
15+ CGImageRef decodeImage;
16+ NSString *decodeResult;
1917 size_t width;
2018 size_t height;
2119 size_t bytesPerRow;
@@ -32,7 +30,7 @@ @implementation ScannerViewController {
3230
3331- (void ) viewWillAppear : (BOOL )animated {
3432 [super viewWillAppear: animated];
35-
33+
3634#if TARGET_IPHONE_SIMULATOR
3735 NSLog (@" IDScanner: On iOS simulator camera is not supported" );
3836 [self .delegate returnScanResult: self scanResult: nil ];
@@ -51,7 +49,7 @@ - (void)viewWillDisappear:(BOOL) animated {
5149
5250- (void )viewDidLoad {
5351 [super viewDidLoad ];
54-
52+
5553 self.prevLayer = nil ;
5654 [[NSNotificationCenter defaultCenter ] addObserver: self selector: @selector (decodeResultNotification: ) name: DecoderResultNotification object: nil ];
5755}
@@ -83,10 +81,10 @@ - (void)toggleTorch
8381 [self .device setTorchMode: AVCaptureTorchModeOff];
8482 else
8583 [self .device setTorchMode: AVCaptureTorchModeOn];
86-
84+
8785 if ([self .device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus])
8886 self.device .focusMode = AVCaptureFocusModeContinuousAutoFocus;
89-
87+
9088 [self .device unlockForConfiguration ];
9189 }
9290 }
@@ -95,7 +93,7 @@ - (void)toggleTorch
9593- (void )initCapture
9694{
9795 self.device = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
98-
96+
9997 if (@available (iOS 13.0 , *)) {
10098 AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInUltraWideCamera]
10199 mediaType: AVMediaTypeVideo
@@ -105,32 +103,31 @@ - (void)initCapture
105103
106104 if (captureDevices.count > 0 ) {
107105 NSLog (@" Supports ultrawide camera" );
108-
106+
109107 self.device = captureDevices[0 ];
110108 }
111109 }
112110
113-
114111 AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice: self .device error: nil ];
115112 AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc ] init ];
116113 captureOutput.alwaysDiscardsLateVideoFrames = YES ;
117114 [captureOutput setSampleBufferDelegate: self queue: dispatch_get_main_queue ()];
118-
115+
119116 // Set the video output to store frame in BGRA (It is supposed to be faster)
120117 NSString * key = (NSString *)kCVPixelBufferPixelFormatTypeKey ;
121-
118+
122119 // Set the video output to store frame in 422YpCbCr8(It is supposed to be faster)
123120 NSNumber * value = [NSNumber numberWithUnsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ];
124121 NSDictionary * videoSettings = [NSDictionary dictionaryWithObject: value forKey: key];
125122 [captureOutput setVideoSettings: videoSettings];
126-
123+
127124 // Create a capture session
128125 self.captureSession = [[AVCaptureSession alloc ] init ];
129-
126+
130127 // We add input and output
131128 [self .captureSession addInput: captureInput];
132129 [self .captureSession addOutput: captureOutput];
133-
130+
134131 if ([self .captureSession canSetSessionPreset: AVCaptureSessionPreset1280x720]) {
135132 NSLog (@" Set preview port to 1280X720" );
136133 self.captureSession .sessionPreset = AVCaptureSessionPreset1280x720;
@@ -140,61 +137,45 @@ - (void)initCapture
140137 NSLog (@" Set preview port to 640X480" );
141138 self.captureSession .sessionPreset = AVCaptureSessionPreset640x480;
142139 }
143-
144- // Limit camera FPS to 15 for single core devices (iPhone 4 and older) so more CPU power is available for decoder
145- host_basic_info_data_t hostInfo;
146- mach_msg_type_number_t infoCount;
147- infoCount = HOST_BASIC_INFO_COUNT;
148- host_info (mach_host_self (), HOST_BASIC_INFO, (host_info_t )&hostInfo, &infoCount);
149-
150- if (hostInfo.max_cpus < 2 ) {
151- if ([self .device respondsToSelector: @selector (setActiveVideoMinFrameDuration: )]) {
152- [self .device lockForConfiguration: nil ];
153- [self .device setActiveVideoMinFrameDuration: CMTimeMake (1 , 15 )];
154- [self .device unlockForConfiguration ];
155- } else {
156- AVCaptureConnection *conn = [captureOutput connectionWithMediaType: AVMediaTypeVideo];
157- [conn setVideoMinFrameDuration: CMTimeMake (1 , 15 )];
158- }
159- }
160-
140+
161141 // We add the preview layer
162142 self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self .captureSession];
163-
164- if (self.interfaceOrientation == UIInterfaceOrientationLandscapeLeft) {
143+
144+ UIInterfaceOrientation orientation = [UIApplication sharedApplication ].statusBarOrientation ;
145+ if (orientation == UIInterfaceOrientationLandscapeLeft) {
165146 self.prevLayer .connection .videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
166147 self.prevLayer .frame = CGRectMake (0 , 0 , MAX (self.view .frame .size .width ,self.view .frame .size .height ), MIN (self.view .frame .size .width ,self.view .frame .size .height ));
167148 }
168- if (self. interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
149+ if (orientation == UIInterfaceOrientationLandscapeRight) {
169150 self.prevLayer .connection .videoOrientation = AVCaptureVideoOrientationLandscapeRight;
170151 self.prevLayer .frame = CGRectMake (0 , 0 , MAX (self.view .frame .size .width ,self.view .frame .size .height ), MIN (self.view .frame .size .width ,self.view .frame .size .height ));
171152 }
172-
173- if (self. interfaceOrientation == UIInterfaceOrientationPortrait) {
153+
154+ if (orientation == UIInterfaceOrientationPortrait) {
174155 self.prevLayer .connection .videoOrientation = AVCaptureVideoOrientationPortrait;
175156 self.prevLayer .frame = CGRectMake (0 , 0 , MIN (self.view .frame .size .width ,self.view .frame .size .height ), MAX (self.view .frame .size .width ,self.view .frame .size .height ));
176157 }
177- if (self. interfaceOrientation == UIInterfaceOrientationPortraitUpsideDown) {
158+ if (orientation == UIInterfaceOrientationPortraitUpsideDown) {
178159 self.prevLayer .connection .videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
179160 self.prevLayer .frame = CGRectMake (0 , 0 , MIN (self.view .frame .size .width ,self.view .frame .size .height ), MAX (self.view .frame .size .width ,self.view .frame .size .height ));
180161 }
181-
162+
182163 self.prevLayer .videoGravity = AVLayerVideoGravityResizeAspectFill;
183164 [self .view.layer addSublayer: self .prevLayer];
184165#if USE_MWOVERLAY
185166 [MWOverlay addToPreviewLayer: self .prevLayer];
186167#endif
187168
188169 self.focusTimer = [NSTimer scheduledTimerWithTimeInterval: 1.0 target: self selector: @selector (reFocus ) userInfo: nil repeats: YES ];
189-
170+
190171 [self CustomOverlay ];
191172}
192173
193174- (void ) CustomOverlay
194175{
195176 CGRect bounds = self.view .bounds ;
196177 bounds = CGRectMake (0 , 0 , bounds.size .width , bounds.size .height );
197-
178+
198179 UIView* overlayView = [[UIView alloc ] initWithFrame: bounds];
199180 overlayView.autoresizesSubviews = YES ;
200181 overlayView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
@@ -239,7 +220,7 @@ - (void) onVideoStart: (NSNotification*) note
239220 return ;
240221 }
241222 running = YES ;
242-
223+
243224 // lock device and set focus mode
244225 NSError *error = nil ;
245226 if ([self .device lockForConfiguration: &error]) {
@@ -271,23 +252,21 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput
271252 if (self.state != CAMERA_DECODING) {
272253 self.state = CAMERA_DECODING;
273254 }
274-
255+
275256 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
276-
277- // get image
278257 CIImage *ciImage = [CIImage imageWithCVPixelBuffer: imageBuffer];
279-
258+
280259 dispatch_async (dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0 ), ^{
281260 NSUserDefaults *settings = [NSUserDefaults standardUserDefaults ];
282-
261+
283262 // activate license
284263 NSString *scannerType = [settings objectForKey: @" scannerType" ];
285-
264+
286265 IDScanPDFDetector *pdfDetector = [IDScanPDFDetector detectorWithActivationKey: [settings objectForKey: @" scannerPDFKey" ]];
287266 IDScanMRZDetector *mrzDetector = [IDScanMRZDetector detectorWithActivationKey: [settings objectForKey: @" scannerMRZKey" ]];
288-
267+
289268 NSString *result = @" " ;
290-
269+
291270 // detect based on scanner Type
292271 if ([scannerType isEqualToString: @" pdf" ]) {
293272 result = [pdfDetector detectFromImage: ciImage][@" string" ];
@@ -296,19 +275,19 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput
296275 } else {
297276 // combined scanner
298277 result = [pdfDetector detectFromImage: ciImage][@" string" ];
299-
278+
300279 if ([result length ] < 4 ) {
301280 result = [mrzDetector detectFromImage: ciImage][@" string" ];
302281 }
303282 }
304-
283+
305284 // Ignore results less than 4 characters - probably false detection
306285 if ([result length ] > 4 ) {
307286 self.state = CAMERA;
308-
309- if (decodeImage != nil ) {
310- CGImageRelease (decodeImage);
311- decodeImage = nil ;
287+
288+ if (self-> decodeImage != nil ) {
289+ CGImageRelease (self-> decodeImage );
290+ self-> decodeImage = nil ;
312291 }
313292
314293 dispatch_async (dispatch_get_main_queue (), ^(void ) {
@@ -322,18 +301,17 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput
322301 self.state = CAMERA;
323302 }
324303 });
325-
326304}
327305
328306#pragma mark -
329307#pragma mark Memory management
330308
331- - (void )viewDidUnload
309+ - (void )didReceiveMemoryWarning
332310{
333311 [self stopScanning ];
334-
312+
335313 self.prevLayer = nil ;
336- [super viewDidUnload ];
314+ [super didReceiveMemoryWarning ];
337315}
338316
339317- (void )dealloc {
@@ -366,7 +344,7 @@ - (void) deinitCapture {
366344#if USE_MWOVERLAY
367345 [MWOverlay removeFromPreviewLayer ];
368346#endif
369-
347+
370348#if !__has_feature(objc_arc)
371349 [self .captureSession release ];
372350#endif
@@ -378,28 +356,22 @@ - (void) deinitCapture {
378356
379357
380358- (void )decodeResultNotification : (NSNotification *)notification {
381-
359+
382360 if ([notification.object isKindOfClass: [DecoderResult class ]]) {
383361 DecoderResult *obj = (DecoderResult*)notification.object ;
384362 if (obj.succeeded ) {
385363 decodeResult = [[NSString alloc ] initWithString: obj.result];
386-
364+
387365 // Call the delegate to return the decodeResult and dismiss the camera view
388366 [self .delegate returnScanResult: self scanResult: decodeResult];
389367 [self dismissViewControllerAnimated: YES completion: nil ];
390368 }
391369 }
392370}
393371
394- - (void )alertView : (UIAlertView *)alertView didDismissWithButtonIndex : (NSInteger )buttonIndex {
395- if (buttonIndex == 0 ) {
396- [self startScanning ];
397- }
398- }
372+ - (enum UIInterfaceOrientationMask)supportedInterfaceOrientations {
373+ UIInterfaceOrientation interfaceOrientation = [[UIApplication sharedApplication ] statusBarOrientation ];
399374
400- - (NSUInteger )supportedInterfaceOrientations {
401- UIInterfaceOrientation interfaceOrientation =[[UIApplication sharedApplication ] statusBarOrientation ];
402-
403375 switch (interfaceOrientation) {
404376 case UIInterfaceOrientationPortrait:
405377 return UIInterfaceOrientationMaskPortrait;
@@ -416,18 +388,14 @@ - (NSUInteger)supportedInterfaceOrientations {
416388 default :
417389 break ;
418390 }
419-
391+
420392 return UIInterfaceOrientationMaskAll;
421393}
422394
423395- (BOOL ) shouldAutorotate {
424396 return YES ;
425397}
426398
427- - (BOOL )shouldAutorotateToInterfaceOrientation : (UIInterfaceOrientation)interfaceOrientation {
428- return (interfaceOrientation == UIInterfaceOrientationPortrait);
429- }
430-
431399- (void )touchesEnded : (NSSet *)touches withEvent : (UIEvent *)event {
432400 [self toggleTorch ];
433401}
0 commit comments