// Copyright 2017-present 650 Industries. All rights reserved. #import #import #import #import #import #import #import #import #import NSString *const EXAudioRecordingOptionsKey = @"ios"; NSString *const EXAudioRecordingOptionExtensionKey = @"extension"; NSString *const EXAudioRecordingOptionOutputFormatKey = @"outputFormat"; NSString *const EXAudioRecordingOptionAudioQualityKey = @"audioQuality"; NSString *const EXAudioRecordingOptionSampleRateKey = @"sampleRate"; NSString *const EXAudioRecordingOptionNumberOfChannelsKey = @"numberOfChannels"; NSString *const EXAudioRecordingOptionBitRateKey = @"bitRate"; NSString *const EXAudioRecordingOptionBitRateStrategyKey = @"bitRateStrategy"; NSString *const EXAudioRecordingOptionBitDepthHintKey = @"bitDepthHint"; NSString *const EXAudioRecordingOptionLinearPCMBitDepthKey = @"linearPCMBitDepth"; NSString *const EXAudioRecordingOptionLinearPCMIsBigEndianKey = @"linearPCMIsBigEndian"; NSString *const EXAudioRecordingOptionLinearPCMIsFloatKey = @"linearPCMIsFloat"; NSString *const EXDidUpdatePlaybackStatusEventName = @"didUpdatePlaybackStatus"; @interface EXAV () @property (nonatomic, weak) id kernelAudioSessionManagerDelegate; @property (nonatomic, weak) id kernelPermissionsServiceDelegate; @property (nonatomic, assign) BOOL audioIsEnabled; @property (nonatomic, assign) EXAVAudioSessionMode currentAudioSessionMode; @property (nonatomic, assign) BOOL isBackgrounded; @property (nonatomic, assign) EXAudioInterruptionMode audioInterruptionMode; @property (nonatomic, assign) BOOL playsInSilentMode; @property (nonatomic, assign) BOOL allowsAudioRecording; @property (nonatomic, assign) BOOL staysActiveInBackground; @property (nonatomic, assign) int soundDictionaryKeyCount; @property (nonatomic, strong) NSMutableDictionary *soundDictionary; @property (nonatomic, assign) BOOL isBeingObserved; @property (nonatomic, strong) NSMutableSet *> *videoSet; @property (nonatomic, strong) NSString *audioRecorderFilename; @property (nonatomic, strong) NSDictionary *audioRecorderSettings; @property (nonatomic, strong) AVAudioRecorder *audioRecorder; @property (nonatomic, assign) BOOL audioRecorderIsPreparing; @property (nonatomic, assign) BOOL audioRecorderShouldBeginRecording; @property (nonatomic, assign) int audioRecorderDurationMillis; @property (nonatomic, weak) UMModuleRegistry *moduleRegistry; @end @implementation EXAV UM_EXPORT_MODULE(ExponentAV); - (instancetype)init { if (self = [super init]) { _audioIsEnabled = YES; _currentAudioSessionMode = EXAVAudioSessionModeInactive; _isBackgrounded = NO; _audioInterruptionMode = EXAudioInterruptionModeMixWithOthers; _playsInSilentMode = false; _allowsAudioRecording = false; _staysActiveInBackground = false; _soundDictionaryKeyCount = 0; _soundDictionary = [NSMutableDictionary new]; _isBeingObserved = NO; _videoSet = [NSMutableSet new]; _audioRecorderFilename = nil; _audioRecorderSettings = nil; _audioRecorder = nil; _audioRecorderIsPreparing = false; _audioRecorderShouldBeginRecording = false; _audioRecorderDurationMillis = 0; } return self; } + (const NSArray *)exportedInterfaces { return @[@protocol(EXAVInterface)]; } - (NSDictionary *)constantsToExport { return @{ @"Qualities": @{ @"Low": AVAudioTimePitchAlgorithmLowQualityZeroLatency, @"Medium": AVAudioTimePitchAlgorithmTimeDomain, @"High": AVAudioTimePitchAlgorithmSpectral } }; } #pragma mark - Expo experience lifecycle - (void)setModuleRegistry:(UMModuleRegistry *)moduleRegistry { [[_moduleRegistry getModuleImplementingProtocol:@protocol(UMAppLifecycleService)] unregisterAppLifecycleListener:self]; _moduleRegistry = moduleRegistry; _kernelAudioSessionManagerDelegate = [_moduleRegistry getSingletonModuleForName:@"AudioSessionManager"]; // [_kernelAudioSessionManagerDelegate moduleDidForeground:self]; [[_moduleRegistry getModuleImplementingProtocol:@protocol(UMAppLifecycleService)] registerAppLifecycleListener:self]; } - (void)onAppForegrounded { [_kernelAudioSessionManagerDelegate moduleDidForeground:self]; _isBackgrounded = NO; [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { [exAVObject appDidForeground]; }]; } - (void)onAppBackgrounded { _isBackgrounded = YES; if (!_staysActiveInBackground) { [self _deactivateAudioSession]; // This will pause all players and stop all recordings [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { [exAVObject appDidBackground]; }]; [_kernelAudioSessionManagerDelegate moduleDidBackground:self]; } } #pragma mark - RCTEventEmitter - (void)startObserving { _isBeingObserved = YES; } - (void)stopObserving { _isBeingObserved = NO; } #pragma mark - Global audio state control API - (void)registerVideoForAudioLifecycle:(NSObject *)video { [_videoSet addObject:video]; } - (void)unregisterVideoForAudioLifecycle:(NSObject *)video { [_videoSet removeObject:video]; } - (void)_runBlockForAllAVObjects:(void (^)(NSObject *exAVObject))block { for (EXAVPlayerData *data in [_soundDictionary allValues]) { block(data); } for (NSObject *video in [_videoSet allObjects]) { block(video); } } // This method is placed here so that it is easily referrable from _setAudioSessionCategoryForAudioMode. - (NSError *)_setAudioMode:(NSDictionary *)mode { BOOL playsInSilentMode = ((NSNumber *)mode[@"playsInSilentModeIOS"]).boolValue; EXAudioInterruptionMode interruptionMode = ((NSNumber *)mode[@"interruptionModeIOS"]).intValue; BOOL allowsRecording = ((NSNumber *)mode[@"allowsRecordingIOS"]).boolValue; BOOL shouldPlayInBackground = ((NSNumber *)mode[@"staysActiveInBackground"]).boolValue; if (!playsInSilentMode && interruptionMode == EXAudioInterruptionModeDuckOthers) { return UMErrorWithMessage(@"Impossible audio mode: playsInSilentMode == false and duckOthers == true cannot be set on iOS."); } else if (!playsInSilentMode && allowsRecording) { return UMErrorWithMessage(@"Impossible audio mode: playsInSilentMode == false and allowsRecording == true cannot be set on iOS."); } else if (!playsInSilentMode && shouldPlayInBackground) { return UMErrorWithMessage(@"Impossible audio mode: playsInSilentMode == false and staysActiveInBackground == true cannot be set on iOS."); } else { if (!allowsRecording) { if (_audioRecorder && [_audioRecorder isRecording]) { [_audioRecorder pause]; } } _playsInSilentMode = playsInSilentMode; _audioInterruptionMode = interruptionMode; _allowsAudioRecording = allowsRecording; _staysActiveInBackground = shouldPlayInBackground; if (_currentAudioSessionMode != EXAVAudioSessionModeInactive) { return [self _updateAudioSessionCategoryForAudioSessionMode:[self _getAudioSessionModeRequired]]; } return nil; } } - (NSError *)_updateAudioSessionCategoryForAudioSessionMode:(EXAVAudioSessionMode)audioSessionMode { AVAudioSessionCategory requiredAudioCategory; AVAudioSessionCategoryOptions requiredAudioCategoryOptions = 0; if (!_playsInSilentMode) { // _allowsRecording is guaranteed to be false, and _interruptionMode is guaranteed to not be EXAudioInterruptionModeDuckOthers (see above) if (_audioInterruptionMode == EXAudioInterruptionModeDoNotMix) { requiredAudioCategory = AVAudioSessionCategorySoloAmbient; } else { requiredAudioCategory = AVAudioSessionCategoryAmbient; } } else { EXAudioInterruptionMode activeInterruptionMode = audioSessionMode == EXAVAudioSessionModeActiveMuted ? EXAudioInterruptionModeMixWithOthers : _audioInterruptionMode; NSString *category = _allowsAudioRecording ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback; requiredAudioCategory = category; switch (activeInterruptionMode) { case EXAudioInterruptionModeDoNotMix: break; case EXAudioInterruptionModeDuckOthers: requiredAudioCategoryOptions = AVAudioSessionCategoryOptionDuckOthers; break; case EXAudioInterruptionModeMixWithOthers: default: requiredAudioCategoryOptions = AVAudioSessionCategoryOptionMixWithOthers; break; } } if ([[_kernelAudioSessionManagerDelegate activeCategory] isEqual:requiredAudioCategory] && [_kernelAudioSessionManagerDelegate activeCategoryOptions] == requiredAudioCategoryOptions) { return nil; } return [_kernelAudioSessionManagerDelegate setCategory:requiredAudioCategory withOptions:requiredAudioCategoryOptions forModule:self]; } - (EXAVAudioSessionMode)_getAudioSessionModeRequired { __block EXAVAudioSessionMode audioSessionModeRequired = EXAVAudioSessionModeInactive; [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { EXAVAudioSessionMode audioSessionModeRequiredByThisObject = [exAVObject getAudioSessionModeRequired]; if (audioSessionModeRequiredByThisObject > audioSessionModeRequired) { audioSessionModeRequired = audioSessionModeRequiredByThisObject; } }]; if (_audioRecorder) { if (_audioRecorderShouldBeginRecording || [_audioRecorder isRecording]) { audioSessionModeRequired = EXAVAudioSessionModeActive; } else if (_audioRecorderIsPreparing && audioSessionModeRequired == EXAVAudioSessionModeInactive) { audioSessionModeRequired = EXAVAudioSessionModeActiveMuted; } } return audioSessionModeRequired; } - (NSError *)promoteAudioSessionIfNecessary { if (!_audioIsEnabled) { return UMErrorWithMessage(@"Expo Audio is disabled, so the audio session could not be activated."); } if (_isBackgrounded && ![_kernelAudioSessionManagerDelegate isActiveForModule:self]) { return UMErrorWithMessage(@"This experience is currently in the background, so the audio session could not be activated."); } EXAVAudioSessionMode audioSessionModeRequired = [self _getAudioSessionModeRequired]; if (audioSessionModeRequired == EXAVAudioSessionModeInactive) { return nil; } NSError *error; error = [self _updateAudioSessionCategoryForAudioSessionMode:audioSessionModeRequired]; if (error) { return error; } error = [_kernelAudioSessionManagerDelegate setActive:YES forModule:self]; if (error) { return error; } _currentAudioSessionMode = audioSessionModeRequired; return nil; } - (NSError *)_deactivateAudioSession { if (_currentAudioSessionMode == EXAVAudioSessionModeInactive) { return nil; } // We must have all players, recorders, and videos paused in order to effectively deactivate the session. [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { [exAVObject pauseImmediately]; }]; if (_audioRecorder && [_audioRecorder isRecording]) { [_audioRecorder pause]; } NSError *error = [_kernelAudioSessionManagerDelegate setActive:NO forModule:self]; if (!error) { _currentAudioSessionMode = EXAVAudioSessionModeInactive; } return error; } - (NSError *)demoteAudioSessionIfPossible { EXAVAudioSessionMode audioSessionModeRequired = [self _getAudioSessionModeRequired]; // Current audio session mode is lower than the required one // (we should rather promote the session than demote it). if (_currentAudioSessionMode <= audioSessionModeRequired) { return nil; } // We require the session to be muted and it is active. // Let's only update the category. if (audioSessionModeRequired == EXAVAudioSessionModeActiveMuted) { NSError *error = [self _updateAudioSessionCategoryForAudioSessionMode:audioSessionModeRequired]; if (!error) { _currentAudioSessionMode = EXAVAudioSessionModeActiveMuted; } return error; } // We require the session to be inactive and it is active, let's deactivate it! return [self _deactivateAudioSession]; } - (void)handleAudioSessionInterruption:(NSNotification *)notification { NSNumber *interruptionType = [[notification userInfo] objectForKey:AVAudioSessionInterruptionTypeKey]; if (interruptionType.unsignedIntegerValue == AVAudioSessionInterruptionTypeBegan) { _currentAudioSessionMode = EXAVAudioSessionModeInactive; } [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { [exAVObject handleAudioSessionInterruption:notification]; }]; } - (void)handleMediaServicesReset:(NSNotification *)notification { // See here: https://developer.apple.com/library/content/qa/qa1749/_index.html // (this is an unlikely notification to receive, but best practices suggests that we catch it just in case) _currentAudioSessionMode = EXAVAudioSessionModeInactive; [self _runBlockForAllAVObjects:^(NSObject *exAVObject) { [exAVObject handleMediaServicesReset:nil]; }]; if (_audioRecorder) { [self _removeAudioRecorder:NO]; [self _createNewAudioRecorder]; [_audioRecorder prepareToRecord]; } } #pragma mark - Internal sound playback helper methods - (void)_runBlock:(void (^)(EXAVPlayerData *data))block withSoundForKey:(nonnull NSNumber *)key withRejecter:(UMPromiseRejectBlock)reject { EXAVPlayerData *data = _soundDictionary[key]; if (data) { block(data); } else { reject(@"E_AUDIO_NOPLAYER", nil, UMErrorWithMessage(@"Player does not exist.")); } } - (void)_removeSoundForKey:(NSNumber *)key { EXAVPlayerData *data = _soundDictionary[key]; if (data) { [data pauseImmediately]; [self demoteAudioSessionIfPossible]; } _soundDictionary[key] = nil; } #pragma mark - Internal video playback helper method - (void)_runBlock:(void (^)(EXVideoView *view))block withEXVideoViewForTag:(nonnull NSNumber *)reactTag withRejecter:(UMPromiseRejectBlock)reject { // TODO check that the bridge is still valid after the dispatch // TODO check if the queues are ok [[_moduleRegistry getModuleImplementingProtocol:@protocol(UMUIManager)] executeUIBlock:^(id view) { if ([view isKindOfClass:[EXVideoView class]]) { block(view); } else { NSString *errorMessage = [NSString stringWithFormat:@"Invalid view returned from registry, expecting EXVideo, got: %@", view]; reject(@"E_VIDEO_TAGINCORRECT", errorMessage, UMErrorWithMessage(errorMessage)); } } forView:reactTag ofClass:[EXVideoView class]]; } #pragma mark - Internal audio recording helper methods - (NSString *)_getBitRateStrategyFromEnum:(NSNumber *)bitRateEnumSelected { if (bitRateEnumSelected) { switch ([bitRateEnumSelected integerValue]) { case EXAudioRecordingOptionBitRateStrategyConstant: return AVAudioBitRateStrategy_Constant; case EXAudioRecordingOptionBitRateStrategyLongTermAverage: return AVAudioBitRateStrategy_LongTermAverage; case EXAudioRecordingOptionBitRateStrategyVariableConstrained: return AVAudioBitRateStrategy_VariableConstrained; break; case EXAudioRecordingOptionBitRateStrategyVariable: return AVAudioBitRateStrategy_Variable; default: return nil; } } return nil; } - (NSDictionary *)_getAVKeysForRecordingOptionsKeys:(NSString *)bitRateStrategy { return @{EXAudioRecordingOptionOutputFormatKey: AVFormatIDKey, EXAudioRecordingOptionAudioQualityKey: bitRateStrategy == AVAudioBitRateStrategy_Variable ? AVEncoderAudioQualityForVBRKey : AVEncoderAudioQualityKey, EXAudioRecordingOptionSampleRateKey: AVSampleRateKey, EXAudioRecordingOptionNumberOfChannelsKey: AVNumberOfChannelsKey, EXAudioRecordingOptionBitRateKey: AVEncoderBitRateKey, EXAudioRecordingOptionBitDepthHintKey: AVEncoderBitDepthHintKey, EXAudioRecordingOptionLinearPCMBitDepthKey: AVLinearPCMBitDepthKey, EXAudioRecordingOptionLinearPCMIsBigEndianKey: AVLinearPCMIsBigEndianKey, EXAudioRecordingOptionLinearPCMIsFloatKey: AVLinearPCMIsFloatKey}; } - (UInt32)_getFormatIDFromString:(NSString *)typeString { const char *s = typeString.UTF8String; UInt32 typeCode = s[3] | (s[2] << 8) | (s[1] << 16) | (s[0] << 24); return typeCode; } - (void)_setNewAudioRecorderFilenameAndSettings:(NSDictionary *)optionsFromJS { NSDictionary *iosOptionsFromJS = optionsFromJS[EXAudioRecordingOptionsKey]; NSString *extension = iosOptionsFromJS[EXAudioRecordingOptionExtensionKey]; _audioRecorderFilename = [NSString stringWithFormat:@"recording-%@%@", [[NSUUID UUID] UUIDString], extension]; NSString *bitRateStrategy = [self _getBitRateStrategyFromEnum:iosOptionsFromJS[EXAudioRecordingOptionBitRateStrategyKey]]; NSDictionary *avKeysForRecordingOptionsKeys = [self _getAVKeysForRecordingOptionsKeys:bitRateStrategy]; NSMutableDictionary *recorderSettings = [NSMutableDictionary new]; for (NSString *recordingOptionsKey in avKeysForRecordingOptionsKeys) { if (iosOptionsFromJS[recordingOptionsKey]) { recorderSettings[avKeysForRecordingOptionsKeys[recordingOptionsKey]] = iosOptionsFromJS[recordingOptionsKey]; } } recorderSettings[AVEncoderBitRateStrategyKey] = bitRateStrategy; if ( iosOptionsFromJS[EXAudioRecordingOptionOutputFormatKey] && [iosOptionsFromJS[EXAudioRecordingOptionOutputFormatKey] isKindOfClass:[NSString class]] ) { recorderSettings[AVFormatIDKey] = @([self _getFormatIDFromString:iosOptionsFromJS[EXAudioRecordingOptionOutputFormatKey]]); } _audioRecorderSettings = recorderSettings; } - (NSError *)_createNewAudioRecorder { if (_audioRecorder) { return UMErrorWithMessage(@"Recorder already exists."); } id fileSystem = [_moduleRegistry getModuleImplementingProtocol:@protocol(UMFileSystemInterface)]; if (!fileSystem) { return UMErrorWithMessage(@"No FileSystem module."); } NSString *directory = [fileSystem.cachesDirectory stringByAppendingPathComponent:@"AV"]; [fileSystem ensureDirExistsWithPath:directory]; NSString *soundFilePath = [directory stringByAppendingPathComponent:_audioRecorderFilename]; NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath]; NSError *error; AVAudioRecorder *recorder = [[AVAudioRecorder alloc] initWithURL:soundFileURL settings:_audioRecorderSettings error:&error]; if (error == nil) { _audioRecorder = recorder; } return error; } - (int)_getDurationMillisOfRecordingAudioRecorder { return _audioRecorder ? (int) (_audioRecorder.currentTime * 1000) : 0; } - (NSDictionary *)_getAudioRecorderStatus { if (_audioRecorder) { int durationMillisFromRecorder = [self _getDurationMillisOfRecordingAudioRecorder]; // After stop, the recorder's duration goes to zero, so we replace it with the correct duration in this case. int durationMillis = durationMillisFromRecorder == 0 ? _audioRecorderDurationMillis : durationMillisFromRecorder; return @{@"canRecord": @(YES), @"isRecording": @([_audioRecorder isRecording]), @"durationMillis": @(durationMillis)}; } else { return nil; } } - (BOOL)_checkAudioRecorderExistsOrReject:(UMPromiseRejectBlock)reject { if (_audioRecorder == nil) { reject(@"E_AUDIO_NORECORDER", nil, UMErrorWithMessage(@"Recorder does not exist.")); } return _audioRecorder != nil; } - (void)_removeAudioRecorder:(BOOL)removeFilenameAndSettings { if (_audioRecorder) { [_audioRecorder stop]; [self demoteAudioSessionIfPossible]; _audioRecorder = nil; } if (removeFilenameAndSettings) { _audioRecorderFilename = nil; _audioRecorderSettings = nil; _audioRecorderDurationMillis = 0; } } - (NSArray *)supportedEvents { return @[EXDidUpdatePlaybackStatusEventName, @"ExponentAV.onError"]; } #pragma mark - Audio API: Global settings UM_EXPORT_METHOD_AS(setAudioIsEnabled, setAudioIsEnabled:(BOOL)value resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { _audioIsEnabled = value; if (!value) { [self _deactivateAudioSession]; } resolve(nil); } UM_EXPORT_METHOD_AS(setAudioMode, setAudioMode:(NSDictionary *)mode resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { NSError *error = [self _setAudioMode:mode]; if (error) { reject(@"E_AUDIO_AUDIOMODE", nil, error); } else { resolve(nil); } } #pragma mark - Unified playback API - Audio UM_EXPORT_METHOD_AS(loadForSound, loadForSound:(NSDictionary *)source withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)loadSuccess rejecter:(UMPromiseRejectBlock)loadError) { NSNumber *key = @(_soundDictionaryKeyCount++); __weak __typeof__(self) weakSelf = self; EXAVPlayerData *data = [[EXAVPlayerData alloc] initWithEXAV:self withSource:source withStatus:status withLoadFinishBlock:^(BOOL success, NSDictionary *successStatus, NSString *error) { if (success) { loadSuccess(@[key, successStatus]); } else { [weakSelf _removeSoundForKey:key]; loadError(@"EXAV", error, nil); } }]; data.errorCallback = ^(NSString *error) { __strong __typeof__(self) strongSelf = weakSelf; if (strongSelf) { [strongSelf sendEventWithName:@"ExponentAV.onError" body:@{ @"key": key, @"error": error }]; [strongSelf _removeSoundForKey:key]; } }; data.statusUpdateCallback = ^(NSDictionary *status) { __strong __typeof__(weakSelf) strongSelf = weakSelf; if (strongSelf && strongSelf.isBeingObserved) { NSDictionary *response = @{@"key": key, @"status": status}; [strongSelf sendEventWithName:EXDidUpdatePlaybackStatusEventName body:response]; } }; _soundDictionary[key] = data; } - (void)sendEventWithName:(NSString *)eventName body:(NSDictionary *)body { [[_moduleRegistry getModuleImplementingProtocol:@protocol(UMEventEmitterService)] sendEventWithName:eventName body:body]; } UM_EXPORT_METHOD_AS(unloadForSound, unloadForSound:(NSNumber *)key resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXAVPlayerData *data) { [self _removeSoundForKey:key]; resolve([EXAVPlayerData getUnloadedStatus]); } withSoundForKey:key withRejecter:reject]; } UM_EXPORT_METHOD_AS(setStatusForSound, setStatusForSound:(NSNumber *)key withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXAVPlayerData *data) { [data setStatus:status resolver:resolve rejecter:reject]; } withSoundForKey:key withRejecter:reject]; } UM_EXPORT_METHOD_AS(getStatusForSound, getStatusForSound:(NSNumber *)key resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXAVPlayerData *data) { NSDictionary *status = [data getStatus]; resolve(status); } withSoundForKey:key withRejecter:reject]; } UM_EXPORT_METHOD_AS(replaySound, replaySound:(NSNumber *)key withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXAVPlayerData *data) { [data replayWithStatus:status resolver:resolve rejecter:reject]; } withSoundForKey:key withRejecter:reject]; } #pragma mark - Unified playback API - Video UM_EXPORT_METHOD_AS(loadForVideo, loadForVideo:(NSNumber *)reactTag source:(NSDictionary *)source withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXVideoView *view) { [view setSource:source withStatus:status resolver:resolve rejecter:reject]; } withEXVideoViewForTag:reactTag withRejecter:reject]; } UM_EXPORT_METHOD_AS(unloadForVideo, unloadForVideo:(NSNumber *)reactTag resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXVideoView *view) { [view setSource:nil withStatus:nil resolver:resolve rejecter:reject]; } withEXVideoViewForTag:reactTag withRejecter:reject]; } UM_EXPORT_METHOD_AS(setStatusForVideo, setStatusForVideo:(NSNumber *)reactTag withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXVideoView *view) { [view setStatus:status resolver:resolve rejecter:reject]; } withEXVideoViewForTag:reactTag withRejecter:reject]; } UM_EXPORT_METHOD_AS(replayVideo, replayVideo:(NSNumber *)reactTag withStatus:(NSDictionary *)status resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXVideoView *view) { [view replayWithStatus:status resolver:resolve rejecter:reject]; } withEXVideoViewForTag:reactTag withRejecter:reject]; } UM_EXPORT_METHOD_AS(getStatusForVideo, getStatusForVideo:(NSNumber *)reactTag resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { [self _runBlock:^(EXVideoView *view) { resolve(view.status); } withEXVideoViewForTag:reactTag withRejecter:reject]; } // Note that setStatusUpdateCallback happens in the JS for video via onStatusUpdate #pragma mark - Audio API: Recording UM_EXPORT_METHOD_AS(prepareAudioRecorder, prepareAudioRecorder:(NSDictionary *)options resolver:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if (![[_moduleRegistry getModuleImplementingProtocol:@protocol(UMPermissionsInterface)] hasGrantedPermission:@"audioRecording"]) { reject(@"E_MISSING_PERMISSION", @"Missing audio recording permission.", nil); return; } [self _setNewAudioRecorderFilenameAndSettings:options]; NSError *error = [self _createNewAudioRecorder]; if (_audioRecorder && !error) { _audioRecorderIsPreparing = true; error = [self promoteAudioSessionIfNecessary]; if (error) { reject(@"E_AUDIO_RECORDERNOTCREATED", @"Prepare encountered an error: audio session not activated!", error); } else if ([_audioRecorder prepareToRecord]) { resolve(@{@"uri": [[_audioRecorder url] absoluteString], @"status": [self _getAudioRecorderStatus]}); } else { reject(@"E_AUDIO_RECORDERNOTCREATED", nil, UMErrorWithMessage(@"Prepare encountered an error: recorder not prepared.")); } _audioRecorderIsPreparing = false; [self demoteAudioSessionIfPossible]; } else { reject(@"E_AUDIO_RECORDERNOTCREATED", @"Prepare encountered an error: recorder not created.", error); } } UM_EXPORT_METHOD_AS(startAudioRecording, startAudioRecording:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if (![[_moduleRegistry getModuleImplementingProtocol:@protocol(UMPermissionsInterface)] hasGrantedPermission:@"audioRecording"]) { reject(@"E_MISSING_PERMISSION", @"Missing audio recording permission.", nil); return; } if ([self _checkAudioRecorderExistsOrReject:reject]) { if (!_allowsAudioRecording) { reject(@"E_AUDIO_AUDIOMODE", nil, UMErrorWithMessage(@"Recording not allowed on iOS.")); } else if (!_audioRecorder.recording) { _audioRecorderShouldBeginRecording = true; NSError *error = [self promoteAudioSessionIfNecessary]; if (!error) { if ([_audioRecorder record]) { resolve([self _getAudioRecorderStatus]); } else { reject(@"E_AUDIO_RECORDING", nil, UMErrorWithMessage(@"Start encountered an error: recording not started.")); } } else { reject(@"E_AUDIO_RECORDING", @"Start encountered an error: audio session not activated.", error); } } else { resolve([self _getAudioRecorderStatus]); } } _audioRecorderShouldBeginRecording = false; } UM_EXPORT_METHOD_AS(pauseAudioRecording, pauseAudioRecording:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if ([self _checkAudioRecorderExistsOrReject:reject]) { if (_audioRecorder.recording) { [_audioRecorder pause]; [self demoteAudioSessionIfPossible]; } resolve([self _getAudioRecorderStatus]); } } UM_EXPORT_METHOD_AS(stopAudioRecording, stopAudioRecording:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if ([self _checkAudioRecorderExistsOrReject:reject]) { if (_audioRecorder.recording) { _audioRecorderDurationMillis = [self _getDurationMillisOfRecordingAudioRecorder]; [_audioRecorder stop]; [self demoteAudioSessionIfPossible]; } resolve([self _getAudioRecorderStatus]); } } UM_EXPORT_METHOD_AS(getAudioRecordingStatus, getAudioRecordingStatus:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if ([self _checkAudioRecorderExistsOrReject:reject]) { resolve([self _getAudioRecorderStatus]); } } UM_EXPORT_METHOD_AS(unloadAudioRecorder, unloadAudioRecorder:(UMPromiseResolveBlock)resolve rejecter:(UMPromiseRejectBlock)reject) { if ([self _checkAudioRecorderExistsOrReject:reject]) { [self _removeAudioRecorder:YES]; resolve(nil); } } #pragma mark - Lifecycle - (void)dealloc { [_kernelAudioSessionManagerDelegate moduleWillDeallocate:self]; [[_moduleRegistry getModuleImplementingProtocol:@protocol(UMAppLifecycleService)] unregisterAppLifecycleListener:self]; [[NSNotificationCenter defaultCenter] removeObserver:self]; // This will clear all @properties and deactivate the audio session: for (NSObject *video in [_videoSet allObjects]) { [video pauseImmediately]; [_videoSet removeObject:video]; } [self _removeAudioRecorder:YES]; for (NSNumber *key in [_soundDictionary allKeys]) { [self _removeSoundForKey:key]; } } @end