1 | #import <AVFoundation/AVFoundation.h>
|
2 |
|
3 | #import <UMBarCodeScannerInterface/UMBarCodeScannerProviderInterface.h>
|
4 | #import <EXCamera/EXCamera.h>
|
5 | #import <EXCamera/EXCameraUtils.h>
|
6 | #import <EXCamera/EXCameraManager.h>
|
7 | #import <EXCamera/EXCameraPermissionRequester.h>
|
8 | #import <UMCore/UMAppLifecycleService.h>
|
9 | #import <UMCore/UMUtilities.h>
|
10 | #import <UMFaceDetectorInterface/UMFaceDetectorManagerProvider.h>
|
11 | #import <UMFileSystemInterface/UMFileSystemInterface.h>
|
12 | #import <UMPermissionsInterface/UMPermissionsInterface.h>
|
13 |
|
14 | @interface EXCamera ()
|
15 |
|
16 | @property (nonatomic, weak) id<UMFileSystemInterface> fileSystem;
|
17 | @property (nonatomic, weak) UMModuleRegistry *moduleRegistry;
|
18 | @property (nonatomic, strong) id<UMFaceDetectorManager> faceDetectorManager;
|
19 | @property (nonatomic, strong) id<UMBarCodeScannerInterface> barCodeScanner;
|
20 | @property (nonatomic, weak) id<UMPermissionsInterface> permissionsManager;
|
21 | @property (nonatomic, weak) id<UMAppLifecycleService> lifecycleManager;
|
22 |
|
23 | @property (nonatomic, assign, getter=isSessionPaused) BOOL paused;
|
24 |
|
25 | @property (nonatomic, strong) NSDictionary *photoCaptureOptions;
|
26 | @property (nonatomic, strong) UMPromiseResolveBlock photoCapturedResolve;
|
27 | @property (nonatomic, strong) UMPromiseRejectBlock photoCapturedReject;
|
28 |
|
29 | @property (nonatomic, strong) UMPromiseResolveBlock videoRecordedResolve;
|
30 | @property (nonatomic, strong) UMPromiseRejectBlock videoRecordedReject;
|
31 |
|
32 | @property (nonatomic, copy) UMDirectEventBlock onCameraReady;
|
33 | @property (nonatomic, copy) UMDirectEventBlock onMountError;
|
34 | @property (nonatomic, copy) UMDirectEventBlock onPictureSaved;
|
35 |
|
36 | @property (nonatomic, copy) UMDirectEventBlock onBarCodeScanned;
|
37 | @property (nonatomic, copy) UMDirectEventBlock onFacesDetected;
|
38 |
|
39 | @end
|
40 |
|
41 | @implementation EXCamera
|
42 |
|
43 | static NSDictionary *defaultFaceDetectorOptions = nil;
|
44 |
|
45 | - (id)initWithModuleRegistry:(UMModuleRegistry *)moduleRegistry
|
46 | {
|
47 | if ((self = [super init])) {
|
48 | _moduleRegistry = moduleRegistry;
|
49 | _session = [AVCaptureSession new];
|
50 | _sessionQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
|
51 | _faceDetectorManager = [self createFaceDetectorManager];
|
52 | _barCodeScanner = [self createBarCodeScanner];
|
53 | _lifecycleManager = [moduleRegistry getModuleImplementingProtocol:@protocol(UMAppLifecycleService)];
|
54 | _fileSystem = [moduleRegistry getModuleImplementingProtocol:@protocol(UMFileSystemInterface)];
|
55 | _permissionsManager = [moduleRegistry getModuleImplementingProtocol:@protocol(UMPermissionsInterface)];
|
56 | #if !(TARGET_IPHONE_SIMULATOR)
|
57 | _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
|
58 | _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
59 | _previewLayer.needsDisplayOnBoundsChange = YES;
|
60 | #endif
|
61 | _paused = NO;
|
62 | _pictureSize = AVCaptureSessionPresetHigh;
|
63 | [self changePreviewOrientation:[UIApplication sharedApplication].statusBarOrientation];
|
64 | [self initializeCaptureSessionInput];
|
65 | [self startSession];
|
66 | [[NSNotificationCenter defaultCenter] addObserver:self
|
67 | selector:@selector(orientationChanged:)
|
68 | name:UIDeviceOrientationDidChangeNotification
|
69 | object:nil];
|
70 | [_lifecycleManager registerAppLifecycleListener:self];
|
71 | }
|
72 | return self;
|
73 | }
|
74 |
|
75 | - (void)dealloc
|
76 | {
|
77 | // In very rare case EXCamera might be unmounted (and thus deallocated) after starting taking a photo,
|
78 | // but still before callbacks from AVCapturePhotoCaptureDelegate are fired (that means before results from taking a photo are handled).
|
79 | // This scenario leads to a state when AVCapturePhotoCaptureDelegate is `nil` and
|
80 | // neither self.photoCapturedResolve nor self.photoCapturedResolve is called.
|
81 | // To prevent hanging promise let's reject here.
|
82 | if (_photoCapturedReject) {
|
83 | _photoCapturedReject(@"E_IMAGE_CAPTURE_FAILED", @"Camera unmounted during taking photo process.", nil);
|
84 | }
|
85 | }
|
86 |
|
87 | - (void)onReady:(NSDictionary *)event
|
88 | {
|
89 | if (_onCameraReady) {
|
90 | _onCameraReady(nil);
|
91 | }
|
92 | }
|
93 |
|
94 | - (void)onMountingError:(NSDictionary *)event
|
95 | {
|
96 | if (_onMountError) {
|
97 | _onMountError(event);
|
98 | }
|
99 | }
|
100 |
|
101 | - (void)onBarCodeScanned:(NSDictionary *)event
|
102 | {
|
103 | if (_onBarCodeScanned) {
|
104 | _onBarCodeScanned(event);
|
105 | }
|
106 | }
|
107 |
|
108 | - (void)onPictureSaved:(NSDictionary *)event
|
109 | {
|
110 | if (_onPictureSaved) {
|
111 | _onPictureSaved(event);
|
112 | }
|
113 | }
|
114 |
|
115 | - (void)layoutSubviews
|
116 | {
|
117 | [super layoutSubviews];
|
118 | _previewLayer.frame = self.bounds;
|
119 | [self setBackgroundColor:[UIColor blackColor]];
|
120 | [self.layer insertSublayer:_previewLayer atIndex:0];
|
121 | }
|
122 |
|
123 | - (void)removeFromSuperview
|
124 | {
|
125 | [_lifecycleManager unregisterAppLifecycleListener:self];
|
126 | [self stopSession];
|
127 | [super removeFromSuperview];
|
128 | [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
|
129 | }
|
130 |
|
131 | - (void)updateType
|
132 | {
|
133 | UM_WEAKIFY(self);
|
134 | dispatch_async(_sessionQueue, ^{
|
135 | UM_ENSURE_STRONGIFY(self);
|
136 | [self initializeCaptureSessionInput];
|
137 | if (!self.session.isRunning) {
|
138 | [self startSession];
|
139 | }
|
140 | });
|
141 | }
|
142 |
|
143 | - (void)updateFlashMode
|
144 | {
|
145 | AVCaptureDevice *device = [_videoCaptureDeviceInput device];
|
146 | NSError *error = nil;
|
147 |
|
148 | if (_flashMode == EXCameraFlashModeTorch) {
|
149 | if (![device hasTorch]) {
|
150 | return;
|
151 | }
|
152 |
|
153 | if (![device lockForConfiguration:&error]) {
|
154 | if (error) {
|
155 | UMLogInfo(@"%s: %@", __func__, error);
|
156 | }
|
157 | return;
|
158 | }
|
159 |
|
160 | if ([device hasTorch] && [device isTorchModeSupported:AVCaptureTorchModeOn]) {
|
161 | if ([device lockForConfiguration:&error]) {
|
162 | [device setTorchMode:AVCaptureTorchModeOn];
|
163 | [device unlockForConfiguration];
|
164 | } else {
|
165 | if (error) {
|
166 | UMLogInfo(@"%s: %@", __func__, error);
|
167 | }
|
168 | }
|
169 | }
|
170 | } else {
|
171 | if (![device hasFlash]) {
|
172 | return;
|
173 | }
|
174 |
|
175 | if (![device lockForConfiguration:&error]) {
|
176 | if (error) {
|
177 | UMLogInfo(@"%s: %@", __func__, error);
|
178 | }
|
179 | return;
|
180 | }
|
181 |
|
182 | if ([device hasFlash])
|
183 | {
|
184 | if ([device lockForConfiguration:&error]) {
|
185 | if ([device isTorchModeSupported:AVCaptureTorchModeOff]) {
|
186 | [device setTorchMode:AVCaptureTorchModeOff];
|
187 | }
|
188 | [device unlockForConfiguration];
|
189 | } else {
|
190 | if (error) {
|
191 | UMLogInfo(@"%s: %@", __func__, error);
|
192 | }
|
193 | }
|
194 | }
|
195 | }
|
196 |
|
197 | [device unlockForConfiguration];
|
198 | }
|
199 |
|
200 | - (void)updateFocusMode
|
201 | {
|
202 | AVCaptureDevice *device = [_videoCaptureDeviceInput device];
|
203 | NSError *error = nil;
|
204 |
|
205 | if (![device lockForConfiguration:&error]) {
|
206 | if (error) {
|
207 | UMLogInfo(@"%s: %@", __func__, error);
|
208 | }
|
209 | return;
|
210 | }
|
211 |
|
212 | if ([device isFocusModeSupported:_autoFocus]) {
|
213 | if ([device lockForConfiguration:&error]) {
|
214 | [device setFocusMode:_autoFocus];
|
215 | } else {
|
216 | if (error) {
|
217 | UMLogInfo(@"%s: %@", __func__, error);
|
218 | }
|
219 | }
|
220 | }
|
221 |
|
222 | [device unlockForConfiguration];
|
223 | }
|
224 |
|
225 | - (void)updateFocusDepth
|
226 | {
|
227 | AVCaptureDevice *device = [_videoCaptureDeviceInput device];
|
228 | NSError *error = nil;
|
229 |
|
230 | if (device == nil || device.focusMode != EXCameraAutoFocusOff) {
|
231 | return;
|
232 | }
|
233 |
|
234 | if ([device isLockingFocusWithCustomLensPositionSupported]) {
|
235 | if (![device lockForConfiguration:&error]) {
|
236 | if (error) {
|
237 | UMLogInfo(@"%s: %@", __func__, error);
|
238 | }
|
239 | return;
|
240 | }
|
241 |
|
242 | UM_WEAKIFY(device);
|
243 | [device setFocusModeLockedWithLensPosition:_focusDepth completionHandler:^(CMTime syncTime) {
|
244 | UM_ENSURE_STRONGIFY(device);
|
245 | [device unlockForConfiguration];
|
246 | }];
|
247 | return;
|
248 | }
|
249 |
|
250 | UMLogInfo(@"%s: Setting focusDepth isn't supported for this camera device", __func__);
|
251 | return;
|
252 | }
|
253 |
|
254 | - (void)updateZoom {
|
255 | AVCaptureDevice *device = [_videoCaptureDeviceInput device];
|
256 | NSError *error = nil;
|
257 |
|
258 | if (![device lockForConfiguration:&error]) {
|
259 | if (error) {
|
260 | UMLogInfo(@"%s: %@", __func__, error);
|
261 | }
|
262 | return;
|
263 | }
|
264 |
|
265 | device.videoZoomFactor = (device.activeFormat.videoMaxZoomFactor - 1.0) * _zoom + 1.0;
|
266 |
|
267 | [device unlockForConfiguration];
|
268 | }
|
269 |
|
270 | - (void)updateWhiteBalance
|
271 | {
|
272 | AVCaptureDevice *device = [_videoCaptureDeviceInput device];
|
273 | NSError *error = nil;
|
274 |
|
275 | if (![device lockForConfiguration:&error]) {
|
276 | if (error) {
|
277 | UMLogInfo(@"%s: %@", __func__, error);
|
278 | }
|
279 | return;
|
280 | }
|
281 |
|
282 | if (_whiteBalance == EXCameraWhiteBalanceAuto) {
|
283 | [device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
|
284 | [device unlockForConfiguration];
|
285 | } else {
|
286 | AVCaptureWhiteBalanceTemperatureAndTintValues temperatureAndTint = {
|
287 | .temperature = [EXCameraUtils temperatureForWhiteBalance:_whiteBalance],
|
288 | .tint = 0,
|
289 | };
|
290 | AVCaptureWhiteBalanceGains rgbGains = [device deviceWhiteBalanceGainsForTemperatureAndTintValues:temperatureAndTint];
|
291 | if ([device lockForConfiguration:&error]) {
|
292 | UM_WEAKIFY(device);
|
293 | [device setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:rgbGains completionHandler:^(CMTime syncTime) {
|
294 | UM_ENSURE_STRONGIFY(device);
|
295 | [device unlockForConfiguration];
|
296 | }];
|
297 | } else {
|
298 | if (error) {
|
299 | UMLogInfo(@"%s: %@", __func__, error);
|
300 | }
|
301 | }
|
302 | }
|
303 |
|
304 | [device unlockForConfiguration];
|
305 | }
|
306 |
|
307 | - (void)updatePictureSize
|
308 | {
|
309 | [self updateSessionPreset:_pictureSize];
|
310 | }
|
311 |
|
312 | - (void)setIsScanningBarCodes:(BOOL)barCodeScanning
|
313 | {
|
314 | if (_barCodeScanner) {
|
315 | [_barCodeScanner setIsEnabled:barCodeScanning];
|
316 | } else if (barCodeScanning) {
|
317 | UMLogError(@"BarCodeScanner module not found. Make sure `expo-barcode-scanner` is installed and linked correctly.");
|
318 | }
|
319 | }
|
320 |
|
321 | - (void)setBarCodeScannerSettings:(NSDictionary *)settings
|
322 | {
|
323 | if (_barCodeScanner) {
|
324 | [_barCodeScanner setSettings:settings];
|
325 | }
|
326 | }
|
327 |
|
328 | - (void)setIsDetectingFaces:(BOOL)faceDetecting
|
329 | {
|
330 | if (_faceDetectorManager) {
|
331 | [_faceDetectorManager setIsEnabled:faceDetecting];
|
332 | } else if (faceDetecting) {
|
333 | UMLogError(@"FaceDetector module not found. Make sure `expo-face-detector` is installed and linked correctly.");
|
334 | }
|
335 | }
|
336 |
|
337 | - (void)updateFaceDetectorSettings:(NSDictionary *)settings
|
338 | {
|
339 | if (_faceDetectorManager) {
|
340 | [_faceDetectorManager updateSettings:settings];
|
341 | }
|
342 | }
|
343 |
|
344 | - (void)takePicture:(NSDictionary *)options resolve:(UMPromiseResolveBlock)resolve reject:(UMPromiseRejectBlock)reject
|
345 | {
|
346 | if (_photoCapturedResolve) {
|
347 | reject(@"E_ANOTHER_CAPTURE", @"Another photo capture is already being processed. Await the first call.", nil);
|
348 | return;
|
349 | }
|
350 | if (!_photoOutput) {
|
351 | reject(@"E_IMAGE_CAPTURE_FAILED", @"Camera is not ready yet. Wait for 'onCameraReady' callback.", nil);
|
352 | return;
|
353 | }
|
354 | AVCaptureConnection *connection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo];
|
355 | [connection setVideoOrientation:[EXCameraUtils videoOrientationForDeviceOrientation:[[UIDevice currentDevice] orientation]]];
|
356 |
|
357 | _photoCapturedReject = reject;
|
358 | _photoCapturedResolve = resolve;
|
359 | _photoCaptureOptions = options;
|
360 |
|
361 | AVCapturePhotoSettings *outputSettings = [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}];
|
362 | outputSettings.highResolutionPhotoEnabled = YES;
|
363 | AVCaptureFlashMode requestedFlashMode = AVCaptureFlashModeOff;
|
364 | switch (_flashMode) {
|
365 | case EXCameraFlashModeOff:
|
366 | requestedFlashMode = AVCaptureFlashModeOff;
|
367 | break;
|
368 | case EXCameraFlashModeAuto:
|
369 | requestedFlashMode = AVCaptureFlashModeAuto;
|
370 | break;
|
371 | case EXCameraFlashModeOn:
|
372 | case EXCameraFlashModeTorch:
|
373 | requestedFlashMode = AVCaptureFlashModeOn;
|
374 | break;
|
375 | }
|
376 | if ([[_photoOutput supportedFlashModes] containsObject:@(requestedFlashMode)]) {
|
377 | outputSettings.flashMode = requestedFlashMode;
|
378 | }
|
379 | [_photoOutput capturePhotoWithSettings:outputSettings delegate:self];
|
380 | }
|
381 |
|
382 | - (void)captureOutput:(AVCapturePhotoOutput *)output
|
383 | didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
|
384 | previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
|
385 | resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
|
386 | bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
|
387 | error:(NSError *)error
|
388 | {
|
389 | NSDictionary *options = _photoCaptureOptions;
|
390 | UMPromiseRejectBlock reject = _photoCapturedReject;
|
391 | UMPromiseResolveBlock resolve = _photoCapturedResolve;
|
392 | _photoCapturedResolve = nil;
|
393 | _photoCapturedReject = nil;
|
394 | _photoCaptureOptions = nil;
|
395 |
|
396 | if (error || !photoSampleBuffer) {
|
397 | reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
|
398 | return;
|
399 | }
|
400 |
|
401 | if (!self.fileSystem) {
|
402 | reject(@"E_IMAGE_CAPTURE_FAILED", @"No file system module", nil);
|
403 | return;
|
404 | }
|
405 |
|
406 | NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
|
407 |
|
408 | CGImageSourceRef sourceCGIImageRef = CGImageSourceCreateWithData((CFDataRef) imageData, NULL);
|
409 | NSDictionary *sourceMetadata = (__bridge NSDictionary *) CGImageSourceCopyPropertiesAtIndex(sourceCGIImageRef, 0, NULL);
|
410 | [self handleCapturedImageData:imageData metadata:sourceMetadata options:options resolver:resolve reject:reject];
|
411 | CFRelease(sourceCGIImageRef);
|
412 | }
|
413 |
|
414 | - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error API_AVAILABLE(ios(11.0))
|
415 | {
|
416 | NSDictionary *options = _photoCaptureOptions;
|
417 | UMPromiseRejectBlock reject = _photoCapturedReject;
|
418 | UMPromiseResolveBlock resolve = _photoCapturedResolve;
|
419 | _photoCapturedResolve = nil;
|
420 | _photoCapturedReject = nil;
|
421 | _photoCaptureOptions = nil;
|
422 |
|
423 | if (error || !photo) {
|
424 | reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
|
425 | return;
|
426 | }
|
427 |
|
428 | if (!self.fileSystem) {
|
429 | reject(@"E_IMAGE_CAPTURE_FAILED", @"No file system module", nil);
|
430 | return;
|
431 | }
|
432 |
|
433 | NSData *imageData = [photo fileDataRepresentation];
|
434 | [self handleCapturedImageData:imageData metadata:photo.metadata options:options resolver:resolve reject:reject];
|
435 | }
|
436 |
|
437 | - (void)handleCapturedImageData:(NSData *)imageData metadata:(NSDictionary *)metadata options:(NSDictionary *)options resolver:(UMPromiseResolveBlock)resolve reject:(UMPromiseRejectBlock)reject
|
438 | {
|
439 | UIImage *takenImage = [UIImage imageWithData:imageData];
|
440 | BOOL useFastMode = [options[@"fastMode"] boolValue];
|
441 | if (useFastMode) {
|
442 | resolve(nil);
|
443 | }
|
444 |
|
445 | CGSize previewSize;
|
446 | if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation])) {
|
447 | previewSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
|
448 | } else {
|
449 | previewSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
|
450 | }
|
451 |
|
452 | CGImageRef takenCGImage = takenImage.CGImage;
|
453 | CGRect cropRect = CGRectMake(0, 0, CGImageGetWidth(takenCGImage), CGImageGetHeight(takenCGImage));
|
454 | CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(previewSize, cropRect);
|
455 | takenImage = [EXCameraUtils cropImage:takenImage toRect:croppedSize];
|
456 |
|
457 | NSString *path = [self.fileSystem generatePathInDirectory:[self.fileSystem.cachesDirectory stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
|
458 | float width = takenImage.size.width;
|
459 | float height = takenImage.size.height;
|
460 | NSData *processedImageData = nil;
|
461 | float quality = [options[@"quality"] floatValue];
|
462 |
|
463 | NSMutableDictionary *response = [[NSMutableDictionary alloc] init];
|
464 | if ([options[@"exif"] boolValue]) {
|
465 | NSMutableDictionary *updatedExif = [EXCameraUtils updateExifMetadata:metadata[(NSString *)kCGImagePropertyExifDictionary] withAdditionalData:@{ @"Orientation": @([EXCameraUtils exportImageOrientation:takenImage.imageOrientation]) }];
|
466 | updatedExif[(NSString *)kCGImagePropertyExifPixelYDimension] = @(width);
|
467 | updatedExif[(NSString *)kCGImagePropertyExifPixelXDimension] = @(height);
|
468 | response[@"exif"] = updatedExif;
|
469 |
|
470 | NSMutableDictionary *updatedMetadata = [metadata mutableCopy];
|
471 | updatedMetadata[(NSString *)kCGImagePropertyExifDictionary] = updatedExif;
|
472 |
|
473 | // UIImage does not contain metadata information. We need to add them to CGImage manually.
|
474 | processedImageData = [EXCameraUtils dataFromImage:takenImage withMetadata:updatedMetadata imageQuality:quality];
|
475 | } else {
|
476 | processedImageData = UIImageJPEGRepresentation(takenImage, quality);
|
477 | }
|
478 |
|
479 | if (!processedImageData) {
|
480 | return reject(@"E_IMAGE_SAVE_FAILED", @"Could not save the image.", nil);
|
481 | }
|
482 |
|
483 | response[@"uri"] = [EXCameraUtils writeImage:processedImageData toPath:path];
|
484 | response[@"width"] = @(width);
|
485 | response[@"height"] = @(height);
|
486 |
|
487 | if ([options[@"base64"] boolValue]) {
|
488 | response[@"base64"] = [processedImageData base64EncodedStringWithOptions:0];
|
489 | }
|
490 |
|
491 | if ([options[@"fastMode"] boolValue]) {
|
492 | [self onPictureSaved:@{@"data": response, @"id": options[@"id"]}];
|
493 | } else {
|
494 | resolve(response);
|
495 | }
|
496 | }
|
497 |
|
498 | - (void)record:(NSDictionary *)options resolve:(UMPromiseResolveBlock)resolve reject:(UMPromiseRejectBlock)reject
|
499 | {
|
500 | if (_movieFileOutput == nil) {
|
501 | // At the time of writing AVCaptureMovieFileOutput and AVCaptureVideoDataOutput (> GMVDataOutput)
|
502 | // cannot coexist on the same AVSession (see: https://stackoverflow.com/a/4986032/1123156).
|
503 | // We stop face detection here and restart it in when AVCaptureMovieFileOutput finishes recording.
|
504 | if (_faceDetectorManager) {
|
505 | [_faceDetectorManager stopFaceDetection];
|
506 | }
|
507 | [self setupMovieFileCapture];
|
508 | }
|
509 |
|
510 | if (_movieFileOutput != nil && !_movieFileOutput.isRecording && _videoRecordedResolve == nil && _videoRecordedReject == nil) {
|
511 | if (options[@"maxDuration"]) {
|
512 | Float64 maxDuration = [options[@"maxDuration"] floatValue];
|
513 | _movieFileOutput.maxRecordedDuration = CMTimeMakeWithSeconds(maxDuration, 30);
|
514 | }
|
515 |
|
516 | if (options[@"maxFileSize"]) {
|
517 | _movieFileOutput.maxRecordedFileSize = [options[@"maxFileSize"] integerValue];
|
518 | }
|
519 |
|
520 | AVCaptureSessionPreset preset;
|
521 | if (options[@"quality"]) {
|
522 | EXCameraVideoResolution resolution = [options[@"quality"] integerValue];
|
523 | preset = [EXCameraUtils captureSessionPresetForVideoResolution:resolution];
|
524 | } else if ([_session.sessionPreset isEqual:AVCaptureSessionPresetPhoto]) {
|
525 | preset = AVCaptureSessionPresetHigh;
|
526 | }
|
527 |
|
528 | if (preset != nil) {
|
529 | [self updateSessionPreset:preset];
|
530 | }
|
531 |
|
532 | bool shouldBeMuted = options[@"mute"] && [options[@"mute"] boolValue];
|
533 | [self updateSessionAudioIsMuted:shouldBeMuted];
|
534 |
|
535 | AVCaptureConnection *connection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
|
536 | // TODO: Add support for videoStabilizationMode (right now it is not only read, never written to)
|
537 | if (connection.isVideoStabilizationSupported == NO) {
|
538 | UMLogWarn(@"%s: Video Stabilization is not supported on this device.", __func__);
|
539 | } else {
|
540 | [connection setPreferredVideoStabilizationMode:self.videoStabilizationMode];
|
541 | }
|
542 | [connection setVideoOrientation:[EXCameraUtils videoOrientationForDeviceOrientation:[[UIDevice currentDevice] orientation]]];
|
543 |
|
544 | bool canBeMirrored = connection.isVideoMirroringSupported;
|
545 | bool shouldBeMirrored = options[@"mirror"] && [options[@"mirror"] boolValue];
|
546 | if (canBeMirrored && shouldBeMirrored) {
|
547 | [connection setVideoMirrored:shouldBeMirrored];
|
548 | }
|
549 |
|
550 | UM_WEAKIFY(self);
|
551 | dispatch_async(self.sessionQueue, ^{
|
552 | UM_STRONGIFY(self);
|
553 | if (!self) {
|
554 | reject(@"E_IMAGE_SAVE_FAILED", @"Camera view has been unmounted.", nil);
|
555 | return;
|
556 | }
|
557 | if (!self.fileSystem) {
|
558 | reject(@"E_IMAGE_SAVE_FAILED", @"No file system module", nil);
|
559 | return;
|
560 | }
|
561 | NSString *directory = [self.fileSystem.cachesDirectory stringByAppendingPathComponent:@"Camera"];
|
562 | NSString *path = [self.fileSystem generatePathInDirectory:directory withExtension:@".mov"];
|
563 | NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:path];
|
564 | [self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
565 | self.videoRecordedResolve = resolve;
|
566 | self.videoRecordedReject = reject;
|
567 | });
|
568 | }
|
569 | }
|
570 |
|
571 | - (void)maybeStartFaceDetection:(BOOL)mirrored {
|
572 | if (self.faceDetectorManager) {
|
573 | AVCaptureConnection *connection = [self.photoOutput connectionWithMediaType:AVMediaTypeVideo];
|
574 | [connection setVideoOrientation:[EXCameraUtils videoOrientationForDeviceOrientation:[[UIDevice currentDevice] orientation]]];
|
575 | [self.faceDetectorManager maybeStartFaceDetectionOnSession:self.session withPreviewLayer:self.previewLayer mirrored:mirrored];
|
576 | }
|
577 | }
|
578 |
|
579 | - (void)setPresetCamera:(NSInteger)presetCamera
|
580 | {
|
581 | _presetCamera = presetCamera;
|
582 | [self.faceDetectorManager updateMirrored:_presetCamera!=1];
|
583 | }
|
584 |
|
585 | - (void)stopRecording
|
586 | {
|
587 | [_movieFileOutput stopRecording];
|
588 | }
|
589 |
|
590 | - (void)resumePreview
|
591 | {
|
592 | [[_previewLayer connection] setEnabled:YES];
|
593 | }
|
594 |
|
595 | - (void)pausePreview
|
596 | {
|
597 | [[_previewLayer connection] setEnabled:NO];
|
598 | }
|
599 |
|
600 | - (void)startSession
|
601 | {
|
602 | #pragma clang diagnostic push
|
603 | #pragma clang diagnostic ignored "-Wunreachable-code"
|
604 | #if TARGET_IPHONE_SIMULATOR
|
605 | return;
|
606 | #endif
|
607 | if (![_permissionsManager hasGrantedPermissionUsingRequesterClass:[EXCameraPermissionRequester class]]) {
|
608 | [self onMountingError:@{@"message": @"Camera permissions not granted - component could not be rendered."}];
|
609 | return;
|
610 | }
|
611 | UM_WEAKIFY(self);
|
612 | dispatch_async(_sessionQueue, ^{
|
613 | UM_ENSURE_STRONGIFY(self);
|
614 |
|
615 | if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
616 | return;
|
617 | }
|
618 |
|
619 | AVCapturePhotoOutput *photoOutput = [AVCapturePhotoOutput new];
|
620 | photoOutput.highResolutionCaptureEnabled = YES;
|
621 | photoOutput.livePhotoCaptureEnabled = NO;
|
622 | if ([self.session canAddOutput:photoOutput]) {
|
623 | [self.session addOutput:photoOutput];
|
624 | self.photoOutput = photoOutput;
|
625 | }
|
626 |
|
627 | [self setRuntimeErrorHandlingObserver:
|
628 | [[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
629 | UM_ENSURE_STRONGIFY(self);
|
630 | dispatch_async(self.sessionQueue, ^{
|
631 | UM_ENSURE_STRONGIFY(self)
|
632 | // Manually restarting the session since it must
|
633 | // have been stopped due to an error.
|
634 | [self.session startRunning];
|
635 | [self ensureSessionConfiguration];
|
636 | [self onReady:nil];
|
637 | });
|
638 | }]];
|
639 |
|
640 | // when BarCodeScanner is enabled since the beginning of camera component lifecycle,
|
641 | // some race condition occurs in reconfiguration and barcodes aren't scanned at all
|
642 | dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 50 * NSEC_PER_USEC), self.sessionQueue, ^{
|
643 | UM_ENSURE_STRONGIFY(self);
|
644 | [self maybeStartFaceDetection:self.presetCamera!=1];
|
645 | if (self.barCodeScanner) {
|
646 | [self.barCodeScanner maybeStartBarCodeScanning];
|
647 | }
|
648 |
|
649 | [self.session startRunning];
|
650 | [self ensureSessionConfiguration];
|
651 | [self onReady:nil];
|
652 | });
|
653 | });
|
654 | #pragma clang diagnostic pop
|
655 | }
|
656 |
|
657 | - (void)stopSession
|
658 | {
|
659 | #if TARGET_IPHONE_SIMULATOR
|
660 | return;
|
661 | #endif
|
662 | UM_WEAKIFY(self);
|
663 | dispatch_async(_sessionQueue, ^{
|
664 | UM_ENSURE_STRONGIFY(self);
|
665 |
|
666 | if (self.faceDetectorManager) {
|
667 | [self.faceDetectorManager stopFaceDetection];
|
668 | }
|
669 | if (self.barCodeScanner) {
|
670 | [self.barCodeScanner stopBarCodeScanning];
|
671 | }
|
672 | [self.previewLayer removeFromSuperlayer];
|
673 | [self.session commitConfiguration];
|
674 | [self.session stopRunning];
|
675 | for (AVCaptureInput *input in self.session.inputs) {
|
676 | [self.session removeInput:input];
|
677 | }
|
678 |
|
679 | for (AVCaptureOutput *output in self.session.outputs) {
|
680 | [self.session removeOutput:output];
|
681 | }
|
682 | });
|
683 | }
|
684 |
|
685 | - (void)initializeCaptureSessionInput
|
686 | {
|
687 | if (_videoCaptureDeviceInput.device.position == _presetCamera) {
|
688 | return;
|
689 | }
|
690 |
|
691 | __block UIInterfaceOrientation interfaceOrientation;
|
692 | [UMUtilities performSynchronouslyOnMainThread:^{
|
693 | interfaceOrientation = [[UIApplication sharedApplication] statusBarOrientation];
|
694 | }];
|
695 | AVCaptureVideoOrientation orientation = [EXCameraUtils videoOrientationForInterfaceOrientation:interfaceOrientation];
|
696 |
|
697 | UM_WEAKIFY(self);
|
698 | dispatch_async(_sessionQueue, ^{
|
699 | UM_ENSURE_STRONGIFY(self);
|
700 |
|
701 | [self.session beginConfiguration];
|
702 |
|
703 | NSError *error = nil;
|
704 | AVCaptureDevice *captureDevice = [EXCameraUtils deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
705 | AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
706 |
|
707 | if (error || captureDeviceInput == nil) {
|
708 | NSString *errorMessage = @"Camera could not be started - ";
|
709 | if (error) {
|
710 | errorMessage = [errorMessage stringByAppendingString:[error description]];
|
711 | } else {
|
712 | errorMessage = [errorMessage stringByAppendingString:@"there's no captureDeviceInput available"];
|
713 | }
|
714 | [self onMountingError:@{@"message": errorMessage}];
|
715 | return;
|
716 | }
|
717 |
|
718 | [self.session removeInput:self.videoCaptureDeviceInput];
|
719 | if ([self.session canAddInput:captureDeviceInput]) {
|
720 | [self.session addInput:captureDeviceInput];
|
721 |
|
722 | self.videoCaptureDeviceInput = captureDeviceInput;
|
723 | [self updateZoom];
|
724 | [self updateFocusMode];
|
725 | [self updateFocusDepth];
|
726 | [self updateWhiteBalance];
|
727 | [self.previewLayer.connection setVideoOrientation:orientation];
|
728 | }
|
729 |
|
730 | [self.session commitConfiguration];
|
731 | });
|
732 | }
|
733 |
|
734 | // Some configuration need to be applied on session after it's started
|
735 | // - torchMode: https://stackoverflow.com/a/53666293/4337317
|
736 | - (void)ensureSessionConfiguration
|
737 | {
|
738 | UM_WEAKIFY(self);
|
739 | dispatch_async(_sessionQueue, ^{
|
740 | UM_ENSURE_STRONGIFY(self);
|
741 | [self updateFlashMode];
|
742 | });
|
743 | }
|
744 |
|
745 | #pragma mark - internal
|
746 |
|
747 | - (void)updateSessionPreset:(AVCaptureSessionPreset)preset
|
748 | {
|
749 | #if !(TARGET_IPHONE_SIMULATOR)
|
750 | if (preset) {
|
751 | UM_WEAKIFY(self);
|
752 | dispatch_async(_sessionQueue, ^{
|
753 | UM_ENSURE_STRONGIFY(self);
|
754 | [self.session beginConfiguration];
|
755 | if ([self.session canSetSessionPreset:preset]) {
|
756 | self.session.sessionPreset = preset;
|
757 | }
|
758 | [self.session commitConfiguration];
|
759 | });
|
760 | }
|
761 | #endif
|
762 | }
|
763 |
|
764 | - (void)updateSessionAudioIsMuted:(BOOL)isMuted
|
765 | {
|
766 | UM_WEAKIFY(self);
|
767 | dispatch_async(_sessionQueue, ^{
|
768 | UM_ENSURE_STRONGIFY(self);
|
769 | [self.session beginConfiguration];
|
770 |
|
771 | for (AVCaptureDeviceInput* input in [self.session inputs]) {
|
772 | if ([input.device hasMediaType:AVMediaTypeAudio]) {
|
773 | if (isMuted) {
|
774 | [self.session removeInput:input];
|
775 | }
|
776 | [self.session commitConfiguration];
|
777 | return;
|
778 | }
|
779 | }
|
780 |
|
781 | if (!isMuted) {
|
782 | NSError *error = nil;
|
783 |
|
784 | AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
785 | AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
|
786 |
|
787 | if (error || audioDeviceInput == nil) {
|
788 | UMLogInfo(@"%s: %@", __func__, error);
|
789 | return;
|
790 | }
|
791 |
|
792 | if ([self.session canAddInput:audioDeviceInput]) {
|
793 | [self.session addInput:audioDeviceInput];
|
794 | }
|
795 | }
|
796 |
|
797 | [self.session commitConfiguration];
|
798 | });
|
799 | }
|
800 |
|
801 | - (void)onAppForegrounded
|
802 | {
|
803 | if (![_session isRunning] && [self isSessionPaused]) {
|
804 | _paused = NO;
|
805 | UM_WEAKIFY(self);
|
806 | dispatch_async(_sessionQueue, ^{
|
807 | UM_ENSURE_STRONGIFY(self);
|
808 | [self.session startRunning];
|
809 | [self ensureSessionConfiguration];
|
810 | });
|
811 | }
|
812 | }
|
813 |
|
814 | - (void)onAppBackgrounded
|
815 | {
|
816 | if ([_session isRunning] && ![self isSessionPaused]) {
|
817 | _paused = YES;
|
818 | UM_WEAKIFY(self);
|
819 | dispatch_async(_sessionQueue, ^{
|
820 | UM_ENSURE_STRONGIFY(self);
|
821 | [self.session stopRunning];
|
822 | });
|
823 | }
|
824 | }
|
825 |
|
826 | - (void)orientationChanged:(NSNotification *)notification
|
827 | {
|
828 | UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
829 | [self changePreviewOrientation:orientation];
|
830 | }
|
831 |
|
832 | - (void)changePreviewOrientation:(UIInterfaceOrientation)orientation
|
833 | {
|
834 | UM_WEAKIFY(self);
|
835 | AVCaptureVideoOrientation videoOrientation = [EXCameraUtils videoOrientationForInterfaceOrientation:orientation];
|
836 | [UMUtilities performSynchronouslyOnMainThread:^{
|
837 | UM_ENSURE_STRONGIFY(self);
|
838 | if (self.previewLayer.connection.isVideoOrientationSupported) {
|
839 | [self.previewLayer.connection setVideoOrientation:videoOrientation];
|
840 | }
|
841 | }];
|
842 | }
|
843 |
|
844 | # pragma mark - AVCaptureMovieFileOutput
|
845 |
|
846 | - (void)setupMovieFileCapture
|
847 | {
|
848 | AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
849 |
|
850 | if ([_session canAddOutput:movieFileOutput]) {
|
851 | [_session addOutput:movieFileOutput];
|
852 | _movieFileOutput = movieFileOutput;
|
853 | }
|
854 | }
|
855 |
|
856 | - (void)cleanupMovieFileCapture
|
857 | {
|
858 | if ([_session.outputs containsObject:_movieFileOutput]) {
|
859 | [_session removeOutput:_movieFileOutput];
|
860 | _movieFileOutput = nil;
|
861 | }
|
862 | }
|
863 |
|
864 | - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
|
865 | {
|
866 | BOOL success = YES;
|
867 | if ([error code] != noErr) {
|
868 | NSNumber *value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
869 | if (value) {
|
870 | success = [value boolValue];
|
871 | }
|
872 | }
|
873 | if (success && _videoRecordedResolve != nil) {
|
874 | _videoRecordedResolve(@{ @"uri": outputFileURL.absoluteString });
|
875 | } else if (_videoRecordedReject != nil) {
|
876 | _videoRecordedReject(@"E_RECORDING_FAILED", @"An error occurred while recording a video.", error);
|
877 | }
|
878 | _videoRecordedResolve = nil;
|
879 | _videoRecordedReject = nil;
|
880 |
|
881 | [self cleanupMovieFileCapture];
|
882 | // If face detection has been running prior to recording to file
|
883 | // we reenable it here (see comment in -record).
|
884 | [self maybeStartFaceDetection:false];
|
885 |
|
886 | if (_session.sessionPreset != _pictureSize) {
|
887 | [self updateSessionPreset:_pictureSize];
|
888 | }
|
889 | }
|
890 |
|
891 | # pragma mark - Face detector
|
892 |
|
893 | - (id)createFaceDetectorManager
|
894 | {
|
895 | id <UMFaceDetectorManagerProvider> faceDetectorProvider = [_moduleRegistry getModuleImplementingProtocol:@protocol(UMFaceDetectorManagerProvider)];
|
896 |
|
897 | if (faceDetectorProvider) {
|
898 | id <UMFaceDetectorManager> faceDetector = [faceDetectorProvider createFaceDetectorManager];
|
899 | if (faceDetector) {
|
900 | UM_WEAKIFY(self);
|
901 | [faceDetector setOnFacesDetected:^(NSArray<NSDictionary *> *faces) {
|
902 | UM_ENSURE_STRONGIFY(self);
|
903 | if (self.onFacesDetected) {
|
904 | self.onFacesDetected(@{
|
905 | @"type": @"face",
|
906 | @"faces": faces
|
907 | });
|
908 | }
|
909 | }];
|
910 | [faceDetector setSessionQueue:_sessionQueue];
|
911 | }
|
912 | return faceDetector;
|
913 | }
|
914 | return nil;
|
915 | }
|
916 |
|
917 | # pragma mark - BarCode scanner
|
918 |
|
919 | - (id)createBarCodeScanner
|
920 | {
|
921 | id<UMBarCodeScannerProviderInterface> barCodeScannerProvider = [_moduleRegistry getModuleImplementingProtocol:@protocol(UMBarCodeScannerProviderInterface)];
|
922 | if (barCodeScannerProvider) {
|
923 | id<UMBarCodeScannerInterface> barCodeScanner = [barCodeScannerProvider createBarCodeScanner];
|
924 | if (barCodeScanner) {
|
925 | UM_WEAKIFY(self);
|
926 | [barCodeScanner setSession:_session];
|
927 | [barCodeScanner setSessionQueue:_sessionQueue];
|
928 | [barCodeScanner setOnBarCodeScanned:^(NSDictionary *body) {
|
929 | UM_ENSURE_STRONGIFY(self);
|
930 | [self onBarCodeScanned:body];
|
931 | }];
|
932 | }
|
933 | return barCodeScanner;
|
934 | }
|
935 | return nil;
|
936 | }
|
937 |
|
938 | @end
|