From 5a6fc95184001a1371ba07b73e4af851beac6a94 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Sat, 4 Oct 2025 21:23:31 +0800 Subject: [PATCH 1/8] Implement --- ios/RCTWebRTC/AudioDeviceModuleObserver.h | 20 ++ ios/RCTWebRTC/AudioDeviceModuleObserver.m | 219 ++++++++++++++++ .../WebRTCModule+RTCAudioDeviceModule.h | 5 + .../WebRTCModule+RTCAudioDeviceModule.m | 215 +++++++++++++++ ios/RCTWebRTC/WebRTCModule.h | 13 + ios/RCTWebRTC/WebRTCModule.m | 43 +-- src/AudioDeviceModule.ts | 246 ++++++++++++++++++ src/AudioDeviceModuleEvents.ts | 199 ++++++++++++++ src/index.ts | 5 + 9 files changed, 948 insertions(+), 17 deletions(-) create mode 100644 ios/RCTWebRTC/AudioDeviceModuleObserver.h create mode 100644 ios/RCTWebRTC/AudioDeviceModuleObserver.m create mode 100644 ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h create mode 100644 ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m create mode 100644 src/AudioDeviceModule.ts create mode 100644 src/AudioDeviceModuleEvents.ts diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.h b/ios/RCTWebRTC/AudioDeviceModuleObserver.h new file mode 100644 index 000000000..c2c0e2500 --- /dev/null +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.h @@ -0,0 +1,20 @@ +#import +#import "WebRTCModule.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface AudioDeviceModuleObserver : NSObject + +- (instancetype)initWithWebRTCModule:(WebRTCModule *)module; + +// Methods to receive results from JS +- (void)resolveEngineCreatedWithResult:(NSInteger)result; +- (void)resolveWillEnableEngineWithResult:(NSInteger)result; +- (void)resolveWillStartEngineWithResult:(NSInteger)result; +- (void)resolveDidStopEngineWithResult:(NSInteger)result; +- (void)resolveDidDisableEngineWithResult:(NSInteger)result; +- (void)resolveWillReleaseEngineWithResult:(NSInteger)result; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.m b/ios/RCTWebRTC/AudioDeviceModuleObserver.m new file mode 100644 index 000000000..9a28d8cff --- /dev/null +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.m @@ -0,0 +1,219 @@ +#import "AudioDeviceModuleObserver.h" +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface AudioDeviceModuleObserver () + +@property(weak, nonatomic) WebRTCModule *module; +@property(nonatomic, strong) dispatch_semaphore_t engineCreatedSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willEnableEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willStartEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t didStopEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t didDisableEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willReleaseEngineSemaphore; + +@property(nonatomic, assign) NSInteger engineCreatedResult; +@property(nonatomic, assign) NSInteger willEnableEngineResult; +@property(nonatomic, assign) NSInteger willStartEngineResult; +@property(nonatomic, assign) NSInteger didStopEngineResult; +@property(nonatomic, assign) NSInteger didDisableEngineResult; +@property(nonatomic, assign) NSInteger willReleaseEngineResult; + +@end + +@implementation AudioDeviceModuleObserver + +- (instancetype)initWithWebRTCModule:(WebRTCModule *)module { + self = [super init]; + if (self) { + self.module = module; + _engineCreatedSemaphore = dispatch_semaphore_create(0); + _willEnableEngineSemaphore = dispatch_semaphore_create(0); + _willStartEngineSemaphore = dispatch_semaphore_create(0); + _didStopEngineSemaphore = dispatch_semaphore_create(0); + _didDisableEngineSemaphore = dispatch_semaphore_create(0); + _willReleaseEngineSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +#pragma mark - RTCAudioDeviceModuleDelegate + +- (void)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didReceiveSpeechActivityEvent:(RTCSpeechActivityEvent)speechActivityEvent { + NSString *eventType = speechActivityEvent == RTCSpeechActivityEventStarted ? @"started" : @"ended"; + + [self.module sendEventWithName:kEventAudioDeviceModuleSpeechActivity + body:@{ + @"event" : eventType, + }]; + + RCTLog(@"[AudioDeviceModuleObserver] Speech activity event: %@", eventType); +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didCreateEngine:(AVAudioEngine *)engine { + RCTLog(@"[AudioDeviceModuleObserver] Engine created - waiting for JS response"); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineCreated body:@{}]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.engineCreatedSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine created - JS returned: %ld", (long)self.engineCreatedResult); + return self.engineCreatedResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + willEnableEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillEnable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willEnableEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - JS returned: %ld", (long)self.willEnableEngineResult); + return self.willEnableEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + willStartEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillStart + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willStartEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - JS returned: %ld", (long)self.willStartEngineResult); + return self.willStartEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didStopEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidStop + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didStopEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - JS returned: %ld", (long)self.didStopEngineResult); + return self.didStopEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didDisableEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidDisable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didDisableEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - JS returned: %ld", (long)self.didDisableEngineResult); + return self.didDisableEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule willReleaseEngine:(AVAudioEngine *)engine { + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - waiting for JS response"); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillRelease body:@{}]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willReleaseEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - JS returned: %ld", (long)self.willReleaseEngineResult); + return self.willReleaseEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + engine:(AVAudioEngine *)engine + configureInputFromSource:(nullable AVAudioNode *)source + toDestination:(AVAudioNode *)destination + withFormat:(AVAudioFormat *)format + context:(NSDictionary *)context { + RCTLog(@"[AudioDeviceModuleObserver] Configure input - format: %@", format); + return 0; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + engine:(AVAudioEngine *)engine + configureOutputFromSource:(AVAudioNode *)source + toDestination:(nullable AVAudioNode *)destination + withFormat:(AVAudioFormat *)format + context:(NSDictionary *)context { + RCTLog(@"[AudioDeviceModuleObserver] Configure output - format: %@", format); + return 0; +} + +- (void)audioDeviceModuleDidUpdateDevices:(RTCAudioDeviceModule *)audioDeviceModule { + [self.module sendEventWithName:kEventAudioDeviceModuleDevicesUpdated body:@{}]; + + RCTLog(@"[AudioDeviceModuleObserver] Devices updated"); +} + +#pragma mark - Resolve methods from JS + +- (void)resolveEngineCreatedWithResult:(NSInteger)result { + self.engineCreatedResult = result; + dispatch_semaphore_signal(self.engineCreatedSemaphore); +} + +- (void)resolveWillEnableEngineWithResult:(NSInteger)result { + self.willEnableEngineResult = result; + dispatch_semaphore_signal(self.willEnableEngineSemaphore); +} + +- (void)resolveWillStartEngineWithResult:(NSInteger)result { + self.willStartEngineResult = result; + dispatch_semaphore_signal(self.willStartEngineSemaphore); +} + +- (void)resolveDidStopEngineWithResult:(NSInteger)result { + self.didStopEngineResult = result; + dispatch_semaphore_signal(self.didStopEngineSemaphore); +} + +- (void)resolveDidDisableEngineWithResult:(NSInteger)result { + self.didDisableEngineResult = result; + dispatch_semaphore_signal(self.didDisableEngineSemaphore); +} + +- (void)resolveWillReleaseEngineWithResult:(NSInteger)result { + self.willReleaseEngineResult = result; + dispatch_semaphore_signal(self.willReleaseEngineSemaphore); +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h new file mode 100644 index 000000000..32fcd47f5 --- /dev/null +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h @@ -0,0 +1,5 @@ +#import "WebRTCModule.h" + +@interface WebRTCModule (RTCAudioDeviceModule) + +@end diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m new file mode 100644 index 000000000..9870253bd --- /dev/null +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -0,0 +1,215 @@ +#import + +#import +#import + +#import "AudioDeviceModuleObserver.h" +#import "WebRTCModule.h" + +@implementation WebRTCModule (RTCAudioDeviceModule) + +#pragma mark - Recording & Playback Control + +RCT_EXPORT_METHOD(audioDeviceModuleStartPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) + reject) { + NSInteger result = [self.audioDeviceModule startPlayout]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to start playout: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) + reject) { + NSInteger result = [self.audioDeviceModule stopPlayout]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to stop playout: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStartRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) + reject) { + NSInteger result = [self.audioDeviceModule startRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to start recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) + reject) { + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording : (RCTPromiseResolveBlock) + resolve rejecter : (RCTPromiseRejectBlock)reject) { + NSError *error = nil; + AVAudioSession *session = [AVAudioSession sharedInstance]; + + // Set category to PlayAndRecord with some options + [session setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:(AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth) + error:&error]; + if (error) { + NSLog(@"Error setting category: %@", error); + } + + // Activate the session + [session setActive:YES error:&error]; + if (error) { + NSLog(@"Error activating session: %@", error); + } + + NSInteger result = [self.audioDeviceModule initAndStartRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to start local recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopLocalRecording : (RCTPromiseResolveBlock) + resolve rejecter : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop local recording: %ld", (long)result], nil); + } +} + +#pragma mark - Microphone Control + +RCT_EXPORT_METHOD(audioDeviceModuleSetMicrophoneMuted : (BOOL)muted resolver : (RCTPromiseResolveBlock) + resolve rejecter : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; + if (result == 0) { + resolve(@{@"success" : @YES, @"muted" : @(muted)}); + } else { + reject(@"mute_error", [NSString stringWithFormat:@"Failed to set microphone mute: %ld", (long)result], nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsMicrophoneMuted) { + return @(self.audioDeviceModule.isMicrophoneMuted); +} + +#pragma mark - Voice Processing + +RCT_EXPORT_METHOD(audioDeviceModuleSetVoiceProcessingEnabled : (BOOL)enabled resolver : (RCTPromiseResolveBlock) + resolve rejecter : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; + if (result == 0) { + resolve(@{@"success" : @YES, @"enabled" : @(enabled)}); + } else { + reject(@"voice_processing_error", [NSString stringWithFormat:@"Failed to set voice processing: %ld", (long)result], + nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingEnabled) { + return @(self.audioDeviceModule.isVoiceProcessingEnabled); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingBypassed : (BOOL)bypassed) { + self.audioDeviceModule.voiceProcessingBypassed = bypassed; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingBypassed) { + return @(self.audioDeviceModule.isVoiceProcessingBypassed); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingAGCEnabled : (BOOL)enabled) { + self.audioDeviceModule.voiceProcessingAGCEnabled = enabled; + return @{@"success" : @YES, @"enabled" : @(enabled)}; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingAGCEnabled) { + return @(self.audioDeviceModule.isVoiceProcessingAGCEnabled); +} + +#pragma mark - Status + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsPlaying) { return @(self.audioDeviceModule.isPlaying); } + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecording) { return @(self.audioDeviceModule.isRecording); } + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsEngineRunning) { + return @(self.audioDeviceModule.isEngineRunning); +} + +#pragma mark - Advanced Features + +RCT_EXPORT_METHOD(audioDeviceModuleSetMuteMode : (NSInteger)mode resolver : (RCTPromiseResolveBlock) + resolve rejecter : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; + if (result == 0) { + resolve(@{@"success" : @YES, @"mode" : @(mode)}); + } else { + reject(@"mute_mode_error", [NSString stringWithFormat:@"Failed to set mute mode: %ld", (long)result], nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetMuteMode) { return @(self.audioDeviceModule.muteMode); } + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetAdvancedDuckingEnabled : (BOOL)enabled) { + self.audioDeviceModule.advancedDuckingEnabled = enabled; + return @{@"success" : @YES, @"enabled" : @(enabled)}; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsAdvancedDuckingEnabled) { + return @(self.audioDeviceModule.isAdvancedDuckingEnabled); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetDuckingLevel : (NSInteger)level) { + self.audioDeviceModule.duckingLevel = level; + return @{@"success" : @YES, @"level" : @(level)}; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetDuckingLevel) { + return @(self.audioDeviceModule.duckingLevel); +} + +#pragma mark - Observer Delegate Response Methods + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveEngineCreated : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveEngineCreatedWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillEnableEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillEnableEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillStartEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillStartEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidStopEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveDidStopEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidDisableEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveDidDisableEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillReleaseEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillReleaseEngineWithResult:result]; + return nil; +} + +@end diff --git a/ios/RCTWebRTC/WebRTCModule.h b/ios/RCTWebRTC/WebRTCModule.h index 4e0767743..7badb2552 100644 --- a/ios/RCTWebRTC/WebRTCModule.h +++ b/ios/RCTWebRTC/WebRTCModule.h @@ -22,6 +22,16 @@ static NSString *const kEventMediaStreamTrackEnded = @"mediaStreamTrackEnded"; static NSString *const kEventPeerConnectionOnRemoveTrack = @"peerConnectionOnRemoveTrack"; static NSString *const kEventPeerConnectionOnTrack = @"peerConnectionOnTrack"; static NSString *const kEventFrameCryptionStateChanged = @"frameCryptionStateChanged"; +static NSString *const kEventAudioDeviceModuleSpeechActivity = @"audioDeviceModuleSpeechActivity"; +static NSString *const kEventAudioDeviceModuleEngineCreated = @"audioDeviceModuleEngineCreated"; +static NSString *const kEventAudioDeviceModuleEngineWillEnable = @"audioDeviceModuleEngineWillEnable"; +static NSString *const kEventAudioDeviceModuleEngineWillStart = @"audioDeviceModuleEngineWillStart"; +static NSString *const kEventAudioDeviceModuleEngineDidStop = @"audioDeviceModuleEngineDidStop"; +static NSString *const kEventAudioDeviceModuleEngineDidDisable = @"audioDeviceModuleEngineDidDisable"; +static NSString *const kEventAudioDeviceModuleEngineWillRelease = @"audioDeviceModuleEngineWillRelease"; +static NSString *const kEventAudioDeviceModuleDevicesUpdated = @"audioDeviceModuleDevicesUpdated"; + +@class AudioDeviceModuleObserver; @interface WebRTCModule : RCTEventEmitter @@ -39,6 +49,9 @@ static NSString *const kEventFrameCryptionStateChanged = @"frameCryptionStateCha @property(nonatomic, strong) NSMutableDictionary *keyProviders; @property(nonatomic, strong) NSMutableDictionary *dataPacketCryptors; +@property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; +@property(nonatomic, strong) AudioDeviceModuleObserver *audioDeviceModuleObserver; + - (RTCMediaStream *)streamForReactTag:(NSString *)reactTag; @end diff --git a/ios/RCTWebRTC/WebRTCModule.m b/ios/RCTWebRTC/WebRTCModule.m index 51397a17b..36798e17c 100644 --- a/ios/RCTWebRTC/WebRTCModule.m +++ b/ios/RCTWebRTC/WebRTCModule.m @@ -10,6 +10,7 @@ #import "WebRTCModule+RTCPeerConnection.h" #import "WebRTCModule.h" #import "WebRTCModuleOptions.h" +#import "AudioDeviceModuleObserver.h" @interface WebRTCModule () @end @@ -70,23 +71,19 @@ - (instancetype)init { RCTLogInfo(@"Using video encoder factory: %@", NSStringFromClass([encoderFactory class])); RCTLogInfo(@"Using video decoder factory: %@", NSStringFromClass([decoderFactory class])); - if (audioProcessingModule != nil) { - if (audioDevice != nil) { - NSLog(@"Both audioProcessingModule and audioDevice are provided, but only one can be used. Ignoring " - @"audioDevice."); - } - RCTLogInfo(@"Using audio processing module: %@", NSStringFromClass([audioProcessingModule class])); - _peerConnectionFactory = - [[RTCPeerConnectionFactory alloc] initWithAudioDeviceModuleType:RTCAudioDeviceModuleTypeAudioEngine - bypassVoiceProcessing:NO - encoderFactory:encoderFactory - decoderFactory:decoderFactory - audioProcessingModule:audioProcessingModule]; + if (audioDevice == nil) { + RCTLogInfo(@"Using audio processing module: %@", NSStringFromClass([audioProcessingModule class])); + _peerConnectionFactory = + [[RTCPeerConnectionFactory alloc] initWithAudioDeviceModuleType:RTCAudioDeviceModuleTypeAudioEngine + bypassVoiceProcessing:NO + encoderFactory:encoderFactory + decoderFactory:decoderFactory + audioProcessingModule:audioProcessingModule]; } else { - RCTLogInfo(@"Using audio device: %@", NSStringFromClass([audioDevice class])); - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory - audioDevice:audioDevice]; + RCTLogInfo(@"Using audio device: %@", NSStringFromClass([audioDevice class])); + _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory + audioDevice:audioDevice]; } _peerConnections = [NSMutableDictionary new]; @@ -97,6 +94,10 @@ - (instancetype)init { _keyProviders = [NSMutableDictionary new]; _dataPacketCryptors = [NSMutableDictionary new]; + _audioDeviceModule = _peerConnectionFactory.audioDeviceModule; + _audioDeviceModuleObserver = [[AudioDeviceModuleObserver alloc] initWithWebRTCModule:self]; + _audioDeviceModule.observer = _audioDeviceModuleObserver; + dispatch_queue_attr_t attributes = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INITIATED, -1); _workerQueue = dispatch_queue_create("WebRTCModule.queue", attributes); @@ -141,7 +142,15 @@ - (dispatch_queue_t)methodQueue { kEventMediaStreamTrackEnded, kEventPeerConnectionOnRemoveTrack, kEventPeerConnectionOnTrack, - kEventFrameCryptionStateChanged + kEventFrameCryptionStateChanged, + kEventAudioDeviceModuleSpeechActivity, + kEventAudioDeviceModuleEngineCreated, + kEventAudioDeviceModuleEngineWillEnable, + kEventAudioDeviceModuleEngineWillStart, + kEventAudioDeviceModuleEngineDidStop, + kEventAudioDeviceModuleEngineDidDisable, + kEventAudioDeviceModuleEngineWillRelease, + kEventAudioDeviceModuleDevicesUpdated ]; } diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts new file mode 100644 index 000000000..2e5a8c973 --- /dev/null +++ b/src/AudioDeviceModule.ts @@ -0,0 +1,246 @@ +import { NativeModules, Platform } from 'react-native'; + +const { WebRTCModule } = NativeModules; + +export enum AudioEngineMuteMode { + Unknown = -1, + VoiceProcessing = 0, + RestartEngine = 1, + InputMixer = 2, +} + +/** + * Audio Device Module API for controlling audio devices and settings. + * iOS/macOS only - will throw on Android. + */ +export class AudioDeviceModule { + /** + * Start audio playback + */ + static async startPlayout(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStartPlayout(); + } + + /** + * Stop audio playback + */ + static async stopPlayout(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStopPlayout(); + } + + /** + * Start audio recording + */ + static async startRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStartRecording(); + } + + /** + * Stop audio recording + */ + static async stopRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStopRecording(); + } + + /** + * Initialize and start local audio recording (calls initAndStartRecording) + */ + static async startLocalRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStartLocalRecording(); + } + + /** + * Stop local audio recording + */ + static async stopLocalRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleStopLocalRecording(); + } + + /** + * Mute or unmute the microphone + */ + static async setMicrophoneMuted(muted: boolean): Promise<{ success: boolean; muted: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetMicrophoneMuted(muted); + } + + /** + * Check if microphone is currently muted + */ + static isMicrophoneMuted(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsMicrophoneMuted(); + } + + /** + * Enable or disable voice processing (requires engine restart) + */ + static async setVoiceProcessingEnabled(enabled: boolean): Promise<{ success: boolean; enabled: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetVoiceProcessingEnabled(enabled); + } + + /** + * Check if voice processing is enabled + */ + static isVoiceProcessingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsVoiceProcessingEnabled(); + } + + /** + * Temporarily bypass voice processing without restarting the engine + */ + static setVoiceProcessingBypassed(bypassed: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + WebRTCModule.audioDeviceModuleSetVoiceProcessingBypassed(bypassed); + } + + /** + * Check if voice processing is currently bypassed + */ + static isVoiceProcessingBypassed(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsVoiceProcessingBypassed(); + } + + /** + * Enable or disable Automatic Gain Control (AGC) + */ + static setVoiceProcessingAGCEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetVoiceProcessingAGCEnabled(enabled); + } + + /** + * Check if AGC is enabled + */ + static isVoiceProcessingAGCEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsVoiceProcessingAGCEnabled(); + } + + /** + * Check if audio is currently playing + */ + static isPlaying(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsPlaying(); + } + + /** + * Check if audio is currently recording + */ + static isRecording(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsRecording(); + } + + /** + * Check if the audio engine is running + */ + static isEngineRunning(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsEngineRunning(); + } + + /** + * Set the microphone mute mode + */ + static async setMuteMode(mode: AudioEngineMuteMode): Promise<{ success: boolean; mode: AudioEngineMuteMode }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetMuteMode(mode); + } + + /** + * Get the current mute mode + */ + static getMuteMode(): AudioEngineMuteMode { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleGetMuteMode(); + } + + /** + * Enable or disable advanced audio ducking + */ + static setAdvancedDuckingEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetAdvancedDuckingEnabled(enabled); + } + + /** + * Check if advanced ducking is enabled + */ + static isAdvancedDuckingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleIsAdvancedDuckingEnabled(); + } + + /** + * Set the audio ducking level (0-100) + */ + static setDuckingLevel(level: number): { success: boolean; level: number } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleSetDuckingLevel(level); + } + + /** + * Get the current ducking level + */ + static getDuckingLevel(): number { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + return WebRTCModule.audioDeviceModuleGetDuckingLevel(); + } +} diff --git a/src/AudioDeviceModuleEvents.ts b/src/AudioDeviceModuleEvents.ts new file mode 100644 index 000000000..a695f95ce --- /dev/null +++ b/src/AudioDeviceModuleEvents.ts @@ -0,0 +1,199 @@ +import { NativeEventEmitter, NativeModules, Platform } from 'react-native'; + +const { WebRTCModule } = NativeModules; + +export type SpeechActivityEvent = 'started' | 'ended'; + +export interface SpeechActivityEventData { + event: SpeechActivityEvent; +} + +export interface EngineStateEventData { + isPlayoutEnabled: boolean; + isRecordingEnabled: boolean; +} + +export type AudioDeviceModuleEventType = + | 'speechActivity' + | 'devicesUpdated'; + +export type AudioDeviceModuleEventData = + | SpeechActivityEventData + | EngineStateEventData + | Record; // Empty object for events with no data + +export type AudioDeviceModuleEventListener = (data: AudioDeviceModuleEventData) => void; + +/** + * Handler function that must return a number (0 for success, non-zero for error) + */ +export type AudioEngineEventNoParamsHandler = () => Promise; +export type AudioEngineEventHandler = ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => Promise; + +/** + * Event emitter for RTCAudioDeviceModule delegate callbacks. + * iOS/macOS only. + */ +class AudioDeviceModuleEventEmitter { + private eventEmitter: NativeEventEmitter | null = null; + private engineCreatedHandler: AudioEngineEventNoParamsHandler | null = null; + private willEnableEngineHandler: AudioEngineEventHandler | null = null; + private willStartEngineHandler: AudioEngineEventHandler | null = null; + private didStopEngineHandler: AudioEngineEventHandler | null = null; + private didDisableEngineHandler: AudioEngineEventHandler | null = null; + private willReleaseEngineHandler: AudioEngineEventNoParamsHandler | null = null; + + constructor() { + if (Platform.OS !== 'android' && WebRTCModule) { + this.eventEmitter = new NativeEventEmitter(WebRTCModule); + + // Setup handlers for blocking delegate methods + this.eventEmitter.addListener('audioDeviceModuleEngineCreated', async () => { + let result = 0; + if (this.engineCreatedHandler) { + try { + await this.engineCreatedHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveEngineCreated(result); + }); + + this.eventEmitter.addListener('audioDeviceModuleEngineWillEnable', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { + let result = 0; + if (this.willEnableEngineHandler) { + try { + await this.willEnableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveWillEnableEngine(result); + }); + + this.eventEmitter.addListener('audioDeviceModuleEngineWillStart', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { + let result = 0; + if (this.willStartEngineHandler) { + try { + await this.willStartEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveWillStartEngine(result); + }); + + this.eventEmitter.addListener('audioDeviceModuleEngineDidStop', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { + let result = 0; + if (this.didStopEngineHandler) { + try { + await this.didStopEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveDidStopEngine(result); + }); + + this.eventEmitter.addListener('audioDeviceModuleEngineDidDisable', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { + let result = 0; + if (this.didDisableEngineHandler) { + try { + await this.didDisableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveDidDisableEngine(result); + }); + + this.eventEmitter.addListener('audioDeviceModuleEngineWillRelease', async () => { + let result = 0; + if (this.willReleaseEngineHandler) { + try { + await this.willReleaseEngineHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + WebRTCModule.audioDeviceModuleResolveWillReleaseEngine(result); + }); + } + } + /** + * Subscribe to speech activity events (started/ended) + */ + addSpeechActivityListener(listener: (data: SpeechActivityEventData) => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + return this.eventEmitter.addListener('audioDeviceModuleSpeechActivity', listener); + } + + /** + * Subscribe to devices updated event (input/output devices changed) + */ + addDevicesUpdatedListener(listener: () => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + return this.eventEmitter.addListener('audioDeviceModuleDevicesUpdated', listener); + } + + /** + * Set handler for engine created delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setEngineCreatedHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.engineCreatedHandler = handler; + } + + /** + * Set handler for will enable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setWillEnableEngineHandler(handler: AudioEngineEventHandler | null) { + this.willEnableEngineHandler = handler; + } + + /** + * Set handler for will start engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setWillStartEngineHandler(handler: AudioEngineEventHandler | null) { + this.willStartEngineHandler = handler; + } + + /** + * Set handler for did stop engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setDidStopEngineHandler(handler: AudioEngineEventHandler | null) { + this.didStopEngineHandler = handler; + } + + /** + * Set handler for did disable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setDidDisableEngineHandler(handler: AudioEngineEventHandler | null) { + this.didDisableEngineHandler = handler; + } + + /** + * Set handler for will release engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns + */ + setWillReleaseEngineHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.willReleaseEngineHandler = handler; + } +} + +export const audioDeviceModuleEvents = new AudioDeviceModuleEventEmitter(); diff --git a/src/index.ts b/src/index.ts index eaedc773e..344cf4a5c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,6 +8,8 @@ if (WebRTCModule === null) { }`); } +import { AudioDeviceModule, AudioEngineMuteMode } from './AudioDeviceModule'; +import { audioDeviceModuleEvents } from './AudioDeviceModuleEvents'; import { setupNativeEvents } from './EventEmitter'; import Logger from './Logger'; import mediaDevices from './MediaDevices'; @@ -68,6 +70,9 @@ export { registerGlobals, startIOSPIP, stopIOSPIP, + AudioDeviceModule, + AudioEngineMuteMode, + audioDeviceModuleEvents, }; declare const global: any; From 7696e78b334dd2c0450c4d4554cda302727e6e49 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 12:26:03 +0800 Subject: [PATCH 2/8] Remove test code --- .../WebRTCModule+RTCAudioDeviceModule.m | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m index 9870253bd..ad7bf7bd8 100644 --- a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -52,23 +52,6 @@ @implementation WebRTCModule (RTCAudioDeviceModule) RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSError *error = nil; - AVAudioSession *session = [AVAudioSession sharedInstance]; - - // Set category to PlayAndRecord with some options - [session setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:(AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth) - error:&error]; - if (error) { - NSLog(@"Error setting category: %@", error); - } - - // Activate the session - [session setActive:YES error:&error]; - if (error) { - NSLog(@"Error activating session: %@", error); - } - NSInteger result = [self.audioDeviceModule initAndStartRecording]; if (result == 0) { resolve(@{@"success" : @YES}); From 770a2353c4f6e5bf42145adbb6dd9e7b8d555db2 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 12:39:25 +0800 Subject: [PATCH 3/8] Update format --- ios/RCTWebRTC/AudioDeviceModuleObserver.m | 190 +++++------ .../WebRTCModule+RTCAudioDeviceModule.m | 179 ++++++----- src/AudioDeviceModule.ts | 299 ++++++++++-------- src/AudioDeviceModuleEvents.ts | 287 +++++++++-------- 4 files changed, 514 insertions(+), 441 deletions(-) diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.m b/ios/RCTWebRTC/AudioDeviceModuleObserver.m index 9a28d8cff..b619a5c37 100644 --- a/ios/RCTWebRTC/AudioDeviceModuleObserver.m +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.m @@ -25,135 +25,143 @@ @interface AudioDeviceModuleObserver () @implementation AudioDeviceModuleObserver - (instancetype)initWithWebRTCModule:(WebRTCModule *)module { - self = [super init]; - if (self) { - self.module = module; - _engineCreatedSemaphore = dispatch_semaphore_create(0); - _willEnableEngineSemaphore = dispatch_semaphore_create(0); - _willStartEngineSemaphore = dispatch_semaphore_create(0); - _didStopEngineSemaphore = dispatch_semaphore_create(0); - _didDisableEngineSemaphore = dispatch_semaphore_create(0); - _willReleaseEngineSemaphore = dispatch_semaphore_create(0); - } - return self; + self = [super init]; + if (self) { + self.module = module; + _engineCreatedSemaphore = dispatch_semaphore_create(0); + _willEnableEngineSemaphore = dispatch_semaphore_create(0); + _willStartEngineSemaphore = dispatch_semaphore_create(0); + _didStopEngineSemaphore = dispatch_semaphore_create(0); + _didDisableEngineSemaphore = dispatch_semaphore_create(0); + _willReleaseEngineSemaphore = dispatch_semaphore_create(0); + } + return self; } #pragma mark - RTCAudioDeviceModuleDelegate - (void)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didReceiveSpeechActivityEvent:(RTCSpeechActivityEvent)speechActivityEvent { - NSString *eventType = speechActivityEvent == RTCSpeechActivityEventStarted ? @"started" : @"ended"; + NSString *eventType = speechActivityEvent == RTCSpeechActivityEventStarted ? @"started" : @"ended"; - [self.module sendEventWithName:kEventAudioDeviceModuleSpeechActivity - body:@{ - @"event" : eventType, - }]; + [self.module sendEventWithName:kEventAudioDeviceModuleSpeechActivity + body:@{ + @"event" : eventType, + }]; - RCTLog(@"[AudioDeviceModuleObserver] Speech activity event: %@", eventType); + RCTLog(@"[AudioDeviceModuleObserver] Speech activity event: %@", eventType); } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didCreateEngine:(AVAudioEngine *)engine { - RCTLog(@"[AudioDeviceModuleObserver] Engine created - waiting for JS response"); + RCTLog(@"[AudioDeviceModuleObserver] Engine created - waiting for JS response"); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineCreated body:@{}]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineCreated body:@{}]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.engineCreatedSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.engineCreatedSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine created - JS returned: %ld", (long)self.engineCreatedResult); - return self.engineCreatedResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine created - JS returned: %ld", (long)self.engineCreatedResult); + return self.engineCreatedResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule willEnableEngine:(AVAudioEngine *)engine isPlayoutEnabled:(BOOL)isPlayoutEnabled isRecordingEnabled:(BOOL)isRecordingEnabled { - RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - playout: %d, recording: %d - waiting for JS response", - isPlayoutEnabled, isRecordingEnabled); + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillEnable - body:@{ - @"isPlayoutEnabled" : @(isPlayoutEnabled), - @"isRecordingEnabled" : @(isRecordingEnabled), - }]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillEnable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.willEnableEngineSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willEnableEngineSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - JS returned: %ld", (long)self.willEnableEngineResult); - return self.willEnableEngineResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - JS returned: %ld", (long)self.willEnableEngineResult); + + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + RCTLog(@"[AudioDeviceModuleObserver] Audio session category: %@", audioSession.category); + + return self.willEnableEngineResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule willStartEngine:(AVAudioEngine *)engine isPlayoutEnabled:(BOOL)isPlayoutEnabled isRecordingEnabled:(BOOL)isRecordingEnabled { - RCTLog(@"[AudioDeviceModuleObserver] Engine will start - playout: %d, recording: %d - waiting for JS response", - isPlayoutEnabled, isRecordingEnabled); + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillStart - body:@{ - @"isPlayoutEnabled" : @(isPlayoutEnabled), - @"isRecordingEnabled" : @(isRecordingEnabled), - }]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillStart + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.willStartEngineSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willStartEngineSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine will start - JS returned: %ld", (long)self.willStartEngineResult); - return self.willStartEngineResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - JS returned: %ld", (long)self.willStartEngineResult); + return self.willStartEngineResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didStopEngine:(AVAudioEngine *)engine isPlayoutEnabled:(BOOL)isPlayoutEnabled isRecordingEnabled:(BOOL)isRecordingEnabled { - RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - playout: %d, recording: %d - waiting for JS response", - isPlayoutEnabled, isRecordingEnabled); + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidStop - body:@{ - @"isPlayoutEnabled" : @(isPlayoutEnabled), - @"isRecordingEnabled" : @(isRecordingEnabled), - }]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidStop + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.didStopEngineSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didStopEngineSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - JS returned: %ld", (long)self.didStopEngineResult); - return self.didStopEngineResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - JS returned: %ld", (long)self.didStopEngineResult); + return self.didStopEngineResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didDisableEngine:(AVAudioEngine *)engine isPlayoutEnabled:(BOOL)isPlayoutEnabled isRecordingEnabled:(BOOL)isRecordingEnabled { - RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - playout: %d, recording: %d - waiting for JS response", - isPlayoutEnabled, isRecordingEnabled); + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidDisable - body:@{ - @"isPlayoutEnabled" : @(isPlayoutEnabled), - @"isRecordingEnabled" : @(isRecordingEnabled), - }]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidDisable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.didDisableEngineSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didDisableEngineSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - JS returned: %ld", (long)self.didDisableEngineResult); - return self.didDisableEngineResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - JS returned: %ld", (long)self.didDisableEngineResult); + return self.didDisableEngineResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule willReleaseEngine:(AVAudioEngine *)engine { - RCTLog(@"[AudioDeviceModuleObserver] Engine will release - waiting for JS response"); + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - waiting for JS response"); - [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillRelease body:@{}]; + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillRelease body:@{}]; - // Wait indefinitely for JS to respond - dispatch_semaphore_wait(self.willReleaseEngineSemaphore, DISPATCH_TIME_FOREVER); + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willReleaseEngineSemaphore, DISPATCH_TIME_FOREVER); - RCTLog(@"[AudioDeviceModuleObserver] Engine will release - JS returned: %ld", (long)self.willReleaseEngineResult); - return self.willReleaseEngineResult; + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - JS returned: %ld", (long)self.willReleaseEngineResult); + return self.willReleaseEngineResult; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule @@ -162,8 +170,8 @@ - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule toDestination:(AVAudioNode *)destination withFormat:(AVAudioFormat *)format context:(NSDictionary *)context { - RCTLog(@"[AudioDeviceModuleObserver] Configure input - format: %@", format); - return 0; + RCTLog(@"[AudioDeviceModuleObserver] Configure input - format: %@", format); + return 0; } - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule @@ -172,46 +180,46 @@ - (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule toDestination:(nullable AVAudioNode *)destination withFormat:(AVAudioFormat *)format context:(NSDictionary *)context { - RCTLog(@"[AudioDeviceModuleObserver] Configure output - format: %@", format); - return 0; + RCTLog(@"[AudioDeviceModuleObserver] Configure output - format: %@", format); + return 0; } - (void)audioDeviceModuleDidUpdateDevices:(RTCAudioDeviceModule *)audioDeviceModule { - [self.module sendEventWithName:kEventAudioDeviceModuleDevicesUpdated body:@{}]; + [self.module sendEventWithName:kEventAudioDeviceModuleDevicesUpdated body:@{}]; - RCTLog(@"[AudioDeviceModuleObserver] Devices updated"); + RCTLog(@"[AudioDeviceModuleObserver] Devices updated"); } #pragma mark - Resolve methods from JS - (void)resolveEngineCreatedWithResult:(NSInteger)result { - self.engineCreatedResult = result; - dispatch_semaphore_signal(self.engineCreatedSemaphore); + self.engineCreatedResult = result; + dispatch_semaphore_signal(self.engineCreatedSemaphore); } - (void)resolveWillEnableEngineWithResult:(NSInteger)result { - self.willEnableEngineResult = result; - dispatch_semaphore_signal(self.willEnableEngineSemaphore); + self.willEnableEngineResult = result; + dispatch_semaphore_signal(self.willEnableEngineSemaphore); } - (void)resolveWillStartEngineWithResult:(NSInteger)result { - self.willStartEngineResult = result; - dispatch_semaphore_signal(self.willStartEngineSemaphore); + self.willStartEngineResult = result; + dispatch_semaphore_signal(self.willStartEngineSemaphore); } - (void)resolveDidStopEngineWithResult:(NSInteger)result { - self.didStopEngineResult = result; - dispatch_semaphore_signal(self.didStopEngineSemaphore); + self.didStopEngineResult = result; + dispatch_semaphore_signal(self.didStopEngineSemaphore); } - (void)resolveDidDisableEngineWithResult:(NSInteger)result { - self.didDisableEngineResult = result; - dispatch_semaphore_signal(self.didDisableEngineSemaphore); + self.didDisableEngineResult = result; + dispatch_semaphore_signal(self.didDisableEngineSemaphore); } - (void)resolveWillReleaseEngineWithResult:(NSInteger)result { - self.willReleaseEngineResult = result; - dispatch_semaphore_signal(self.willReleaseEngineSemaphore); + self.willReleaseEngineResult = result; + dispatch_semaphore_signal(self.willReleaseEngineSemaphore); } @end diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m index ad7bf7bd8..dc5b308f7 100644 --- a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -12,187 +12,196 @@ @implementation WebRTCModule (RTCAudioDeviceModule) RCT_EXPORT_METHOD(audioDeviceModuleStartPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) reject) { - NSInteger result = [self.audioDeviceModule startPlayout]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"playout_error", [NSString stringWithFormat:@"Failed to start playout: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule startPlayout]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to start playout: %ld", (long)result], nil); + } } RCT_EXPORT_METHOD(audioDeviceModuleStopPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) reject) { - NSInteger result = [self.audioDeviceModule stopPlayout]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"playout_error", [NSString stringWithFormat:@"Failed to stop playout: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule stopPlayout]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to stop playout: %ld", (long)result], nil); + } } RCT_EXPORT_METHOD(audioDeviceModuleStartRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) reject) { - NSInteger result = [self.audioDeviceModule startRecording]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"recording_error", [NSString stringWithFormat:@"Failed to start recording: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule startRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to start recording: %ld", (long)result], nil); + } } RCT_EXPORT_METHOD(audioDeviceModuleStopRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) reject) { - NSInteger result = [self.audioDeviceModule stopRecording]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop recording: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop recording: %ld", (long)result], nil); + } } RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSInteger result = [self.audioDeviceModule initAndStartRecording]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"recording_error", [NSString stringWithFormat:@"Failed to start local recording: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule initAndStartRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject( + @"recording_error", [NSString stringWithFormat:@"Failed to start local recording: %ld", (long)result], nil); + } } RCT_EXPORT_METHOD(audioDeviceModuleStopLocalRecording : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSInteger result = [self.audioDeviceModule stopRecording]; - if (result == 0) { - resolve(@{@"success" : @YES}); - } else { - reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop local recording: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(@{@"success" : @YES}); + } else { + reject( + @"recording_error", [NSString stringWithFormat:@"Failed to stop local recording: %ld", (long)result], nil); + } } #pragma mark - Microphone Control RCT_EXPORT_METHOD(audioDeviceModuleSetMicrophoneMuted : (BOOL)muted resolver : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; - if (result == 0) { - resolve(@{@"success" : @YES, @"muted" : @(muted)}); - } else { - reject(@"mute_error", [NSString stringWithFormat:@"Failed to set microphone mute: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; + if (result == 0) { + resolve(@{@"success" : @YES, @"muted" : @(muted)}); + } else { + reject(@"mute_error", [NSString stringWithFormat:@"Failed to set microphone mute: %ld", (long)result], nil); + } } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsMicrophoneMuted) { - return @(self.audioDeviceModule.isMicrophoneMuted); + return @(self.audioDeviceModule.isMicrophoneMuted); } #pragma mark - Voice Processing RCT_EXPORT_METHOD(audioDeviceModuleSetVoiceProcessingEnabled : (BOOL)enabled resolver : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; - if (result == 0) { - resolve(@{@"success" : @YES, @"enabled" : @(enabled)}); - } else { - reject(@"voice_processing_error", [NSString stringWithFormat:@"Failed to set voice processing: %ld", (long)result], - nil); - } + NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; + if (result == 0) { + resolve(@{@"success" : @YES, @"enabled" : @(enabled)}); + } else { + reject(@"voice_processing_error", + [NSString stringWithFormat:@"Failed to set voice processing: %ld", (long)result], + nil); + } } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingEnabled) { - return @(self.audioDeviceModule.isVoiceProcessingEnabled); + return @(self.audioDeviceModule.isVoiceProcessingEnabled); } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingBypassed : (BOOL)bypassed) { - self.audioDeviceModule.voiceProcessingBypassed = bypassed; - return nil; + self.audioDeviceModule.voiceProcessingBypassed = bypassed; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingBypassed) { - return @(self.audioDeviceModule.isVoiceProcessingBypassed); + return @(self.audioDeviceModule.isVoiceProcessingBypassed); } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingAGCEnabled : (BOOL)enabled) { - self.audioDeviceModule.voiceProcessingAGCEnabled = enabled; - return @{@"success" : @YES, @"enabled" : @(enabled)}; + self.audioDeviceModule.voiceProcessingAGCEnabled = enabled; + return @{@"success" : @YES, @"enabled" : @(enabled)}; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingAGCEnabled) { - return @(self.audioDeviceModule.isVoiceProcessingAGCEnabled); + return @(self.audioDeviceModule.isVoiceProcessingAGCEnabled); } #pragma mark - Status -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsPlaying) { return @(self.audioDeviceModule.isPlaying); } +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsPlaying) { + return @(self.audioDeviceModule.isPlaying); +} -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecording) { return @(self.audioDeviceModule.isRecording); } +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecording) { + return @(self.audioDeviceModule.isRecording); +} RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsEngineRunning) { - return @(self.audioDeviceModule.isEngineRunning); + return @(self.audioDeviceModule.isEngineRunning); } #pragma mark - Advanced Features RCT_EXPORT_METHOD(audioDeviceModuleSetMuteMode : (NSInteger)mode resolver : (RCTPromiseResolveBlock) resolve rejecter : (RCTPromiseRejectBlock)reject) { - NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; - if (result == 0) { - resolve(@{@"success" : @YES, @"mode" : @(mode)}); - } else { - reject(@"mute_mode_error", [NSString stringWithFormat:@"Failed to set mute mode: %ld", (long)result], nil); - } + NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; + if (result == 0) { + resolve(@{@"success" : @YES, @"mode" : @(mode)}); + } else { + reject(@"mute_mode_error", [NSString stringWithFormat:@"Failed to set mute mode: %ld", (long)result], nil); + } } -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetMuteMode) { return @(self.audioDeviceModule.muteMode); } +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetMuteMode) { + return @(self.audioDeviceModule.muteMode); +} RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetAdvancedDuckingEnabled : (BOOL)enabled) { - self.audioDeviceModule.advancedDuckingEnabled = enabled; - return @{@"success" : @YES, @"enabled" : @(enabled)}; + self.audioDeviceModule.advancedDuckingEnabled = enabled; + return @{@"success" : @YES, @"enabled" : @(enabled)}; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsAdvancedDuckingEnabled) { - return @(self.audioDeviceModule.isAdvancedDuckingEnabled); + return @(self.audioDeviceModule.isAdvancedDuckingEnabled); } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetDuckingLevel : (NSInteger)level) { - self.audioDeviceModule.duckingLevel = level; - return @{@"success" : @YES, @"level" : @(level)}; + self.audioDeviceModule.duckingLevel = level; + return @{@"success" : @YES, @"level" : @(level)}; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetDuckingLevel) { - return @(self.audioDeviceModule.duckingLevel); + return @(self.audioDeviceModule.duckingLevel); } #pragma mark - Observer Delegate Response Methods RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveEngineCreated : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveEngineCreatedWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveEngineCreatedWithResult:result]; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillEnableEngine : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveWillEnableEngineWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveWillEnableEngineWithResult:result]; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillStartEngine : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveWillStartEngineWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveWillStartEngineWithResult:result]; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidStopEngine : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveDidStopEngineWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveDidStopEngineWithResult:result]; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidDisableEngine : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveDidDisableEngineWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveDidDisableEngineWithResult:result]; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillReleaseEngine : (NSInteger)result) { - [self.audioDeviceModuleObserver resolveWillReleaseEngineWithResult:result]; - return nil; + [self.audioDeviceModuleObserver resolveWillReleaseEngineWithResult:result]; + return nil; } @end diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts index 2e5a8c973..4c2073186 100644 --- a/src/AudioDeviceModule.ts +++ b/src/AudioDeviceModule.ts @@ -14,233 +14,256 @@ export enum AudioEngineMuteMode { * iOS/macOS only - will throw on Android. */ export class AudioDeviceModule { - /** + /** * Start audio playback */ - static async startPlayout(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async startPlayout(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartPlayout(); } - return WebRTCModule.audioDeviceModuleStartPlayout(); - } - /** + /** * Stop audio playback */ - static async stopPlayout(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async stopPlayout(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopPlayout(); } - return WebRTCModule.audioDeviceModuleStopPlayout(); - } - /** + /** * Start audio recording */ - static async startRecording(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async startRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartRecording(); } - return WebRTCModule.audioDeviceModuleStartRecording(); - } - /** + /** * Stop audio recording */ - static async stopRecording(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async stopRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopRecording(); } - return WebRTCModule.audioDeviceModuleStopRecording(); - } - /** + /** * Initialize and start local audio recording (calls initAndStartRecording) */ - static async startLocalRecording(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async startLocalRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartLocalRecording(); } - return WebRTCModule.audioDeviceModuleStartLocalRecording(); - } - /** + /** * Stop local audio recording */ - static async stopLocalRecording(): Promise<{ success: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async stopLocalRecording(): Promise<{ success: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopLocalRecording(); } - return WebRTCModule.audioDeviceModuleStopLocalRecording(); - } - /** + /** * Mute or unmute the microphone */ - static async setMicrophoneMuted(muted: boolean): Promise<{ success: boolean; muted: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async setMicrophoneMuted(muted: boolean): Promise<{ success: boolean; muted: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetMicrophoneMuted(muted); } - return WebRTCModule.audioDeviceModuleSetMicrophoneMuted(muted); - } - /** + /** * Check if microphone is currently muted */ - static isMicrophoneMuted(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isMicrophoneMuted(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsMicrophoneMuted(); } - return WebRTCModule.audioDeviceModuleIsMicrophoneMuted(); - } - /** + /** * Enable or disable voice processing (requires engine restart) */ - static async setVoiceProcessingEnabled(enabled: boolean): Promise<{ success: boolean; enabled: boolean }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async setVoiceProcessingEnabled(enabled: boolean): Promise<{ success: boolean; enabled: boolean }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetVoiceProcessingEnabled(enabled); } - return WebRTCModule.audioDeviceModuleSetVoiceProcessingEnabled(enabled); - } - /** + /** * Check if voice processing is enabled */ - static isVoiceProcessingEnabled(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isVoiceProcessingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingEnabled(); } - return WebRTCModule.audioDeviceModuleIsVoiceProcessingEnabled(); - } - /** + /** * Temporarily bypass voice processing without restarting the engine */ - static setVoiceProcessingBypassed(bypassed: boolean): void { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static setVoiceProcessingBypassed(bypassed: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + WebRTCModule.audioDeviceModuleSetVoiceProcessingBypassed(bypassed); } - WebRTCModule.audioDeviceModuleSetVoiceProcessingBypassed(bypassed); - } - /** + /** * Check if voice processing is currently bypassed */ - static isVoiceProcessingBypassed(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isVoiceProcessingBypassed(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingBypassed(); } - return WebRTCModule.audioDeviceModuleIsVoiceProcessingBypassed(); - } - /** + /** * Enable or disable Automatic Gain Control (AGC) */ - static setVoiceProcessingAGCEnabled(enabled: boolean): { success: boolean; enabled: boolean } { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static setVoiceProcessingAGCEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetVoiceProcessingAGCEnabled(enabled); } - return WebRTCModule.audioDeviceModuleSetVoiceProcessingAGCEnabled(enabled); - } - /** + /** * Check if AGC is enabled */ - static isVoiceProcessingAGCEnabled(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isVoiceProcessingAGCEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingAGCEnabled(); } - return WebRTCModule.audioDeviceModuleIsVoiceProcessingAGCEnabled(); - } - /** + /** * Check if audio is currently playing */ - static isPlaying(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isPlaying(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsPlaying(); } - return WebRTCModule.audioDeviceModuleIsPlaying(); - } - /** + /** * Check if audio is currently recording */ - static isRecording(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isRecording(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsRecording(); } - return WebRTCModule.audioDeviceModuleIsRecording(); - } - /** + /** * Check if the audio engine is running */ - static isEngineRunning(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isEngineRunning(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsEngineRunning(); } - return WebRTCModule.audioDeviceModuleIsEngineRunning(); - } - /** + /** * Set the microphone mute mode */ - static async setMuteMode(mode: AudioEngineMuteMode): Promise<{ success: boolean; mode: AudioEngineMuteMode }> { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static async setMuteMode(mode: AudioEngineMuteMode): Promise<{ success: boolean; mode: AudioEngineMuteMode }> { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetMuteMode(mode); } - return WebRTCModule.audioDeviceModuleSetMuteMode(mode); - } - /** + /** * Get the current mute mode */ - static getMuteMode(): AudioEngineMuteMode { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static getMuteMode(): AudioEngineMuteMode { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetMuteMode(); } - return WebRTCModule.audioDeviceModuleGetMuteMode(); - } - /** + /** * Enable or disable advanced audio ducking */ - static setAdvancedDuckingEnabled(enabled: boolean): { success: boolean; enabled: boolean } { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static setAdvancedDuckingEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetAdvancedDuckingEnabled(enabled); } - return WebRTCModule.audioDeviceModuleSetAdvancedDuckingEnabled(enabled); - } - /** + /** * Check if advanced ducking is enabled */ - static isAdvancedDuckingEnabled(): boolean { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static isAdvancedDuckingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsAdvancedDuckingEnabled(); } - return WebRTCModule.audioDeviceModuleIsAdvancedDuckingEnabled(); - } - /** + /** * Set the audio ducking level (0-100) */ - static setDuckingLevel(level: number): { success: boolean; level: number } { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static setDuckingLevel(level: number): { success: boolean; level: number } { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetDuckingLevel(level); } - return WebRTCModule.audioDeviceModuleSetDuckingLevel(level); - } - /** + /** * Get the current ducking level */ - static getDuckingLevel(): number { - if (Platform.OS === 'android') { - throw new Error('AudioDeviceModule is only available on iOS/macOS'); + static getDuckingLevel(): number { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetDuckingLevel(); } - return WebRTCModule.audioDeviceModuleGetDuckingLevel(); - } } diff --git a/src/AudioDeviceModuleEvents.ts b/src/AudioDeviceModuleEvents.ts index a695f95ce..9f81dddcb 100644 --- a/src/AudioDeviceModuleEvents.ts +++ b/src/AudioDeviceModuleEvents.ts @@ -28,172 +28,205 @@ export type AudioDeviceModuleEventListener = (data: AudioDeviceModuleEventData) * Handler function that must return a number (0 for success, non-zero for error) */ export type AudioEngineEventNoParamsHandler = () => Promise; -export type AudioEngineEventHandler = ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => Promise; +export type AudioEngineEventHandler = (params: { + isPlayoutEnabled: boolean; + isRecordingEnabled: boolean; +}) => Promise; /** * Event emitter for RTCAudioDeviceModule delegate callbacks. * iOS/macOS only. */ class AudioDeviceModuleEventEmitter { - private eventEmitter: NativeEventEmitter | null = null; - private engineCreatedHandler: AudioEngineEventNoParamsHandler | null = null; - private willEnableEngineHandler: AudioEngineEventHandler | null = null; - private willStartEngineHandler: AudioEngineEventHandler | null = null; - private didStopEngineHandler: AudioEngineEventHandler | null = null; - private didDisableEngineHandler: AudioEngineEventHandler | null = null; - private willReleaseEngineHandler: AudioEngineEventNoParamsHandler | null = null; - - constructor() { - if (Platform.OS !== 'android' && WebRTCModule) { - this.eventEmitter = new NativeEventEmitter(WebRTCModule); - - // Setup handlers for blocking delegate methods - this.eventEmitter.addListener('audioDeviceModuleEngineCreated', async () => { - let result = 0; - if (this.engineCreatedHandler) { - try { - await this.engineCreatedHandler(); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } + private eventEmitter: NativeEventEmitter | null = null; + private engineCreatedHandler: AudioEngineEventNoParamsHandler | null = null; + private willEnableEngineHandler: AudioEngineEventHandler | null = null; + private willStartEngineHandler: AudioEngineEventHandler | null = null; + private didStopEngineHandler: AudioEngineEventHandler | null = null; + private didDisableEngineHandler: AudioEngineEventHandler | null = null; + private willReleaseEngineHandler: AudioEngineEventNoParamsHandler | null = null; + + constructor() { + if (Platform.OS !== 'android' && WebRTCModule) { + this.eventEmitter = new NativeEventEmitter(WebRTCModule); + + // Setup handlers for blocking delegate methods + this.eventEmitter.addListener('audioDeviceModuleEngineCreated', async () => { + let result = 0; + + if (this.engineCreatedHandler) { + try { + await this.engineCreatedHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveEngineCreated(result); + }); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineWillEnable', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.willEnableEngineHandler) { + try { + await this.willEnableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillEnableEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineWillStart', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.willStartEngineHandler) { + try { + await this.willStartEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillStartEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineDidStop', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.didStopEngineHandler) { + try { + await this.didStopEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveDidStopEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineDidDisable', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.didDisableEngineHandler) { + try { + await this.didDisableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveDidDisableEngine(result); + }, + ); + + this.eventEmitter.addListener('audioDeviceModuleEngineWillRelease', async () => { + let result = 0; + + if (this.willReleaseEngineHandler) { + try { + await this.willReleaseEngineHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillReleaseEngine(result); + }); } - WebRTCModule.audioDeviceModuleResolveEngineCreated(result); - }); - - this.eventEmitter.addListener('audioDeviceModuleEngineWillEnable', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { - let result = 0; - if (this.willEnableEngineHandler) { - try { - await this.willEnableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } - } - WebRTCModule.audioDeviceModuleResolveWillEnableEngine(result); - }); - - this.eventEmitter.addListener('audioDeviceModuleEngineWillStart', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { - let result = 0; - if (this.willStartEngineHandler) { - try { - await this.willStartEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } - } - WebRTCModule.audioDeviceModuleResolveWillStartEngine(result); - }); - - this.eventEmitter.addListener('audioDeviceModuleEngineDidStop', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { - let result = 0; - if (this.didStopEngineHandler) { - try { - await this.didStopEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } - } - WebRTCModule.audioDeviceModuleResolveDidStopEngine(result); - }); - - this.eventEmitter.addListener('audioDeviceModuleEngineDidDisable', async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { - let result = 0; - if (this.didDisableEngineHandler) { - try { - await this.didDisableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } - } - WebRTCModule.audioDeviceModuleResolveDidDisableEngine(result); - }); - - this.eventEmitter.addListener('audioDeviceModuleEngineWillRelease', async () => { - let result = 0; - if (this.willReleaseEngineHandler) { - try { - await this.willReleaseEngineHandler(); - } catch (error) { - // If error is a number, use it as the error code, otherwise use -1 - result = typeof error === 'number' ? error : -1; - } - } - WebRTCModule.audioDeviceModuleResolveWillReleaseEngine(result); - }); } - } - /** + /** * Subscribe to speech activity events (started/ended) */ - addSpeechActivityListener(listener: (data: SpeechActivityEventData) => void) { - if (!this.eventEmitter) { - throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + addSpeechActivityListener(listener: (data: SpeechActivityEventData) => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + + return this.eventEmitter.addListener('audioDeviceModuleSpeechActivity', listener); } - return this.eventEmitter.addListener('audioDeviceModuleSpeechActivity', listener); - } - /** + /** * Subscribe to devices updated event (input/output devices changed) */ - addDevicesUpdatedListener(listener: () => void) { - if (!this.eventEmitter) { - throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + addDevicesUpdatedListener(listener: () => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + + return this.eventEmitter.addListener('audioDeviceModuleDevicesUpdated', listener); } - return this.eventEmitter.addListener('audioDeviceModuleDevicesUpdated', listener); - } - /** + /** * Set handler for engine created delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setEngineCreatedHandler(handler: AudioEngineEventNoParamsHandler | null) { - this.engineCreatedHandler = handler; - } + setEngineCreatedHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.engineCreatedHandler = handler; + } - /** + /** * Set handler for will enable engine delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setWillEnableEngineHandler(handler: AudioEngineEventHandler | null) { - this.willEnableEngineHandler = handler; - } + setWillEnableEngineHandler(handler: AudioEngineEventHandler | null) { + this.willEnableEngineHandler = handler; + } - /** + /** * Set handler for will start engine delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setWillStartEngineHandler(handler: AudioEngineEventHandler | null) { - this.willStartEngineHandler = handler; - } + setWillStartEngineHandler(handler: AudioEngineEventHandler | null) { + this.willStartEngineHandler = handler; + } - /** + /** * Set handler for did stop engine delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setDidStopEngineHandler(handler: AudioEngineEventHandler | null) { - this.didStopEngineHandler = handler; - } + setDidStopEngineHandler(handler: AudioEngineEventHandler | null) { + this.didStopEngineHandler = handler; + } - /** + /** * Set handler for did disable engine delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setDidDisableEngineHandler(handler: AudioEngineEventHandler | null) { - this.didDisableEngineHandler = handler; - } + setDidDisableEngineHandler(handler: AudioEngineEventHandler | null) { + this.didDisableEngineHandler = handler; + } - /** + /** * Set handler for will release engine delegate - MUST return 0 for success or error code * This handler blocks the native thread until it returns */ - setWillReleaseEngineHandler(handler: AudioEngineEventNoParamsHandler | null) { - this.willReleaseEngineHandler = handler; - } + setWillReleaseEngineHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.willReleaseEngineHandler = handler; + } } export const audioDeviceModuleEvents = new AudioDeviceModuleEventEmitter(); From 648c595808808217613107510dff0ccdbe5161a8 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 13:16:09 +0800 Subject: [PATCH 4/8] Update format --- ios/RCTWebRTC/AudioDeviceModuleObserver.h | 2 +- .../WebRTCModule+RTCAudioDeviceModule.m | 48 ++++++++------ ios/RCTWebRTC/WebRTCModule.m | 24 +++---- src/AudioDeviceModule.ts | 64 +++++++++---------- 4 files changed, 75 insertions(+), 63 deletions(-) diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.h b/ios/RCTWebRTC/AudioDeviceModuleObserver.h index c2c0e2500..a1f415909 100644 --- a/ios/RCTWebRTC/AudioDeviceModuleObserver.h +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.h @@ -3,7 +3,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface AudioDeviceModuleObserver : NSObject +@interface AudioDeviceModuleObserver : NSObject - (instancetype)initWithWebRTCModule:(WebRTCModule *)module; diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m index dc5b308f7..154f91aeb 100644 --- a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -10,8 +10,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) #pragma mark - Recording & Playback Control -RCT_EXPORT_METHOD(audioDeviceModuleStartPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) - reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStartPlayout + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule startPlayout]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -20,8 +21,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) } } -RCT_EXPORT_METHOD(audioDeviceModuleStopPlayout : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) - reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStopPlayout + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopPlayout]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -30,8 +32,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) } } -RCT_EXPORT_METHOD(audioDeviceModuleStartRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) - reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStartRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule startRecording]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -40,8 +43,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) } } -RCT_EXPORT_METHOD(audioDeviceModuleStopRecording : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock) - reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStopRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopRecording]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -50,8 +54,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) } } -RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording : (RCTPromiseResolveBlock) - resolve rejecter : (RCTPromiseRejectBlock)reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule initAndStartRecording]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -61,8 +66,9 @@ @implementation WebRTCModule (RTCAudioDeviceModule) } } -RCT_EXPORT_METHOD(audioDeviceModuleStopLocalRecording : (RCTPromiseResolveBlock) - resolve rejecter : (RCTPromiseRejectBlock)reject) { +RCT_EXPORT_METHOD(audioDeviceModuleStopLocalRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopRecording]; if (result == 0) { resolve(@{@"success" : @YES}); @@ -74,8 +80,10 @@ @implementation WebRTCModule (RTCAudioDeviceModule) #pragma mark - Microphone Control -RCT_EXPORT_METHOD(audioDeviceModuleSetMicrophoneMuted : (BOOL)muted resolver : (RCTPromiseResolveBlock) - resolve rejecter : (RCTPromiseRejectBlock)reject) { +RCT_EXPORT_METHOD(audioDeviceModuleSetMicrophoneMuted + : (BOOL)muted resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; if (result == 0) { resolve(@{@"success" : @YES, @"muted" : @(muted)}); @@ -90,8 +98,10 @@ @implementation WebRTCModule (RTCAudioDeviceModule) #pragma mark - Voice Processing -RCT_EXPORT_METHOD(audioDeviceModuleSetVoiceProcessingEnabled : (BOOL)enabled resolver : (RCTPromiseResolveBlock) - resolve rejecter : (RCTPromiseRejectBlock)reject) { +RCT_EXPORT_METHOD(audioDeviceModuleSetVoiceProcessingEnabled + : (BOOL)enabled resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; if (result == 0) { resolve(@{@"success" : @YES, @"enabled" : @(enabled)}); @@ -140,8 +150,10 @@ @implementation WebRTCModule (RTCAudioDeviceModule) #pragma mark - Advanced Features -RCT_EXPORT_METHOD(audioDeviceModuleSetMuteMode : (NSInteger)mode resolver : (RCTPromiseResolveBlock) - resolve rejecter : (RCTPromiseRejectBlock)reject) { +RCT_EXPORT_METHOD(audioDeviceModuleSetMuteMode + : (NSInteger)mode resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; if (result == 0) { resolve(@{@"success" : @YES, @"mode" : @(mode)}); diff --git a/ios/RCTWebRTC/WebRTCModule.m b/ios/RCTWebRTC/WebRTCModule.m index 36798e17c..ce54d8db9 100644 --- a/ios/RCTWebRTC/WebRTCModule.m +++ b/ios/RCTWebRTC/WebRTCModule.m @@ -7,10 +7,10 @@ #import #import +#import "AudioDeviceModuleObserver.h" #import "WebRTCModule+RTCPeerConnection.h" #import "WebRTCModule.h" #import "WebRTCModuleOptions.h" -#import "AudioDeviceModuleObserver.h" @interface WebRTCModule () @end @@ -72,18 +72,18 @@ - (instancetype)init { RCTLogInfo(@"Using video decoder factory: %@", NSStringFromClass([decoderFactory class])); if (audioDevice == nil) { - RCTLogInfo(@"Using audio processing module: %@", NSStringFromClass([audioProcessingModule class])); - _peerConnectionFactory = - [[RTCPeerConnectionFactory alloc] initWithAudioDeviceModuleType:RTCAudioDeviceModuleTypeAudioEngine - bypassVoiceProcessing:NO - encoderFactory:encoderFactory - decoderFactory:decoderFactory - audioProcessingModule:audioProcessingModule]; + RCTLogInfo(@"Using audio processing module: %@", NSStringFromClass([audioProcessingModule class])); + _peerConnectionFactory = + [[RTCPeerConnectionFactory alloc] initWithAudioDeviceModuleType:RTCAudioDeviceModuleTypeAudioEngine + bypassVoiceProcessing:NO + encoderFactory:encoderFactory + decoderFactory:decoderFactory + audioProcessingModule:audioProcessingModule]; } else { - RCTLogInfo(@"Using audio device: %@", NSStringFromClass([audioDevice class])); - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory - audioDevice:audioDevice]; + RCTLogInfo(@"Using audio device: %@", NSStringFromClass([audioDevice class])); + _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory + audioDevice:audioDevice]; } _peerConnections = [NSMutableDictionary new]; diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts index 4c2073186..90fa280b6 100644 --- a/src/AudioDeviceModule.ts +++ b/src/AudioDeviceModule.ts @@ -15,8 +15,8 @@ export enum AudioEngineMuteMode { */ export class AudioDeviceModule { /** - * Start audio playback - */ + * Start audio playback + */ static async startPlayout(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -26,8 +26,8 @@ export class AudioDeviceModule { } /** - * Stop audio playback - */ + * Stop audio playback + */ static async stopPlayout(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -37,8 +37,8 @@ export class AudioDeviceModule { } /** - * Start audio recording - */ + * Start audio recording + */ static async startRecording(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -48,8 +48,8 @@ export class AudioDeviceModule { } /** - * Stop audio recording - */ + * Stop audio recording + */ static async stopRecording(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -59,8 +59,8 @@ export class AudioDeviceModule { } /** - * Initialize and start local audio recording (calls initAndStartRecording) - */ + * Initialize and start local audio recording (calls initAndStartRecording) + */ static async startLocalRecording(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -70,8 +70,8 @@ export class AudioDeviceModule { } /** - * Stop local audio recording - */ + * Stop local audio recording + */ static async stopLocalRecording(): Promise<{ success: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -81,8 +81,8 @@ export class AudioDeviceModule { } /** - * Mute or unmute the microphone - */ + * Mute or unmute the microphone + */ static async setMicrophoneMuted(muted: boolean): Promise<{ success: boolean; muted: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -92,8 +92,8 @@ export class AudioDeviceModule { } /** - * Check if microphone is currently muted - */ + * Check if microphone is currently muted + */ static isMicrophoneMuted(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -103,8 +103,8 @@ export class AudioDeviceModule { } /** - * Enable or disable voice processing (requires engine restart) - */ + * Enable or disable voice processing (requires engine restart) + */ static async setVoiceProcessingEnabled(enabled: boolean): Promise<{ success: boolean; enabled: boolean }> { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -114,8 +114,8 @@ export class AudioDeviceModule { } /** - * Check if voice processing is enabled - */ + * Check if voice processing is enabled + */ static isVoiceProcessingEnabled(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -125,8 +125,8 @@ export class AudioDeviceModule { } /** - * Temporarily bypass voice processing without restarting the engine - */ + * Temporarily bypass voice processing without restarting the engine + */ static setVoiceProcessingBypassed(bypassed: boolean): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -136,8 +136,8 @@ export class AudioDeviceModule { } /** - * Check if voice processing is currently bypassed - */ + * Check if voice processing is currently bypassed + */ static isVoiceProcessingBypassed(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -147,8 +147,8 @@ export class AudioDeviceModule { } /** - * Enable or disable Automatic Gain Control (AGC) - */ + * Enable or disable Automatic Gain Control (AGC) + */ static setVoiceProcessingAGCEnabled(enabled: boolean): { success: boolean; enabled: boolean } { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -158,8 +158,8 @@ export class AudioDeviceModule { } /** - * Check if AGC is enabled - */ + * Check if AGC is enabled + */ static isVoiceProcessingAGCEnabled(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -169,8 +169,8 @@ export class AudioDeviceModule { } /** - * Check if audio is currently playing - */ + * Check if audio is currently playing + */ static isPlaying(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -180,8 +180,8 @@ export class AudioDeviceModule { } /** - * Check if audio is currently recording - */ + * Check if audio is currently recording + */ static isRecording(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); From 8201f25dfb8dffcd8d92f99b8e396380d14eeb10 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 14:49:04 +0800 Subject: [PATCH 5/8] Setup listeners early --- src/index.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/index.ts b/src/index.ts index 344cf4a5c..dd171b9f1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -103,4 +103,10 @@ function registerGlobals(): void { global.RTCRtpReceiver = RTCRtpReceiver; global.RTCRtpSender = RTCRtpSender; global.RTCErrorEvent = RTCErrorEvent; + + // Ensure audioDeviceModuleEvents is initialized and event listeners are registered + // This forces the constructor to run and set up native event listeners. + // We use void operator to explicitly indicate we're intentionally evaluating the expression + // without using its value, which prevents tree-shaking from removing this reference. + void audioDeviceModuleEvents; } From 30999db41203fecb8d5c0a783beb5bf148830963 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 15:57:06 +0800 Subject: [PATCH 6/8] Fixes --- .../WebRTCModule+RTCAudioDeviceModule.m | 24 +++++++++---------- src/AudioDeviceModule.ts | 24 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m index 154f91aeb..cda1a4774 100644 --- a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -15,7 +15,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule startPlayout]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject(@"playout_error", [NSString stringWithFormat:@"Failed to start playout: %ld", (long)result], nil); } @@ -26,7 +26,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopPlayout]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject(@"playout_error", [NSString stringWithFormat:@"Failed to stop playout: %ld", (long)result], nil); } @@ -37,7 +37,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule startRecording]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject(@"recording_error", [NSString stringWithFormat:@"Failed to start recording: %ld", (long)result], nil); } @@ -48,7 +48,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopRecording]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop recording: %ld", (long)result], nil); } @@ -59,7 +59,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule initAndStartRecording]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject( @"recording_error", [NSString stringWithFormat:@"Failed to start local recording: %ld", (long)result], nil); @@ -71,7 +71,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule stopRecording]; if (result == 0) { - resolve(@{@"success" : @YES}); + resolve(nil); } else { reject( @"recording_error", [NSString stringWithFormat:@"Failed to stop local recording: %ld", (long)result], nil); @@ -86,7 +86,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; if (result == 0) { - resolve(@{@"success" : @YES, @"muted" : @(muted)}); + resolve(nil); } else { reject(@"mute_error", [NSString stringWithFormat:@"Failed to set microphone mute: %ld", (long)result], nil); } @@ -104,7 +104,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; if (result == 0) { - resolve(@{@"success" : @YES, @"enabled" : @(enabled)}); + resolve(nil); } else { reject(@"voice_processing_error", [NSString stringWithFormat:@"Failed to set voice processing: %ld", (long)result], @@ -127,7 +127,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingAGCEnabled : (BOOL)enabled) { self.audioDeviceModule.voiceProcessingAGCEnabled = enabled; - return @{@"success" : @YES, @"enabled" : @(enabled)}; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingAGCEnabled) { @@ -156,7 +156,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) : (RCTPromiseRejectBlock)reject) { NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; if (result == 0) { - resolve(@{@"success" : @YES, @"mode" : @(mode)}); + resolve(nil); } else { reject(@"mute_mode_error", [NSString stringWithFormat:@"Failed to set mute mode: %ld", (long)result], nil); } @@ -168,7 +168,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetAdvancedDuckingEnabled : (BOOL)enabled) { self.audioDeviceModule.advancedDuckingEnabled = enabled; - return @{@"success" : @YES, @"enabled" : @(enabled)}; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsAdvancedDuckingEnabled) { @@ -177,7 +177,7 @@ @implementation WebRTCModule (RTCAudioDeviceModule) RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetDuckingLevel : (NSInteger)level) { self.audioDeviceModule.duckingLevel = level; - return @{@"success" : @YES, @"level" : @(level)}; + return nil; } RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetDuckingLevel) { diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts index 90fa280b6..1507bb084 100644 --- a/src/AudioDeviceModule.ts +++ b/src/AudioDeviceModule.ts @@ -17,7 +17,7 @@ export class AudioDeviceModule { /** * Start audio playback */ - static async startPlayout(): Promise<{ success: boolean }> { + static async startPlayout(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -28,7 +28,7 @@ export class AudioDeviceModule { /** * Stop audio playback */ - static async stopPlayout(): Promise<{ success: boolean }> { + static async stopPlayout(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -39,7 +39,7 @@ export class AudioDeviceModule { /** * Start audio recording */ - static async startRecording(): Promise<{ success: boolean }> { + static async startRecording(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -50,7 +50,7 @@ export class AudioDeviceModule { /** * Stop audio recording */ - static async stopRecording(): Promise<{ success: boolean }> { + static async stopRecording(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -61,7 +61,7 @@ export class AudioDeviceModule { /** * Initialize and start local audio recording (calls initAndStartRecording) */ - static async startLocalRecording(): Promise<{ success: boolean }> { + static async startLocalRecording(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -72,7 +72,7 @@ export class AudioDeviceModule { /** * Stop local audio recording */ - static async stopLocalRecording(): Promise<{ success: boolean }> { + static async stopLocalRecording(): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -83,7 +83,7 @@ export class AudioDeviceModule { /** * Mute or unmute the microphone */ - static async setMicrophoneMuted(muted: boolean): Promise<{ success: boolean; muted: boolean }> { + static async setMicrophoneMuted(muted: boolean): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -105,7 +105,7 @@ export class AudioDeviceModule { /** * Enable or disable voice processing (requires engine restart) */ - static async setVoiceProcessingEnabled(enabled: boolean): Promise<{ success: boolean; enabled: boolean }> { + static async setVoiceProcessingEnabled(enabled: boolean): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -149,7 +149,7 @@ export class AudioDeviceModule { /** * Enable or disable Automatic Gain Control (AGC) */ - static setVoiceProcessingAGCEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + static setVoiceProcessingAGCEnabled(enabled: boolean): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -204,7 +204,7 @@ export class AudioDeviceModule { /** * Set the microphone mute mode */ - static async setMuteMode(mode: AudioEngineMuteMode): Promise<{ success: boolean; mode: AudioEngineMuteMode }> { + static async setMuteMode(mode: AudioEngineMuteMode): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -226,7 +226,7 @@ export class AudioDeviceModule { /** * Enable or disable advanced audio ducking */ - static setAdvancedDuckingEnabled(enabled: boolean): { success: boolean; enabled: boolean } { + static setAdvancedDuckingEnabled(enabled: boolean): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } @@ -248,7 +248,7 @@ export class AudioDeviceModule { /** * Set the audio ducking level (0-100) */ - static setDuckingLevel(level: number): { success: boolean; level: number } { + static setDuckingLevel(level: number): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); } From cedd5ce5032f1fc1de28c186a9c74826f72c3593 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 20:50:36 +0800 Subject: [PATCH 7/8] Availability APIs --- .../WebRTCModule+RTCAudioDeviceModule.m | 44 ++++++++++ src/AudioDeviceModule.ts | 88 ++++++++++++++++--- src/index.ts | 3 +- 3 files changed, 120 insertions(+), 15 deletions(-) diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m index cda1a4774..24a0199ba 100644 --- a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -184,6 +184,50 @@ @implementation WebRTCModule (RTCAudioDeviceModule) return @(self.audioDeviceModule.duckingLevel); } +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecordingAlwaysPreparedMode) { + return @(self.audioDeviceModule.recordingAlwaysPreparedMode); +} + +RCT_EXPORT_METHOD(audioDeviceModuleSetRecordingAlwaysPreparedMode + : (BOOL)enabled resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setRecordingAlwaysPreparedMode:enabled]; + if (result == 0) { + resolve(nil); + } else { + reject(@"recording_always_prepared_mode_error", + [NSString stringWithFormat:@"Failed to set recording always prepared mode: %ld", (long)result], + nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetEngineAvailability) { + RTCAudioEngineAvailability availability = self.audioDeviceModule.engineAvailability; + return @{ + @"isInputAvailable" : @(availability.isInputAvailable), + @"isOutputAvailable" : @(availability.isOutputAvailable) + }; +} + +RCT_EXPORT_METHOD(audioDeviceModuleSetEngineAvailability + : (NSDictionary *)availabilityDict resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + RTCAudioEngineAvailability availability; + availability.isInputAvailable = [availabilityDict[@"isInputAvailable"] boolValue]; + availability.isOutputAvailable = [availabilityDict[@"isOutputAvailable"] boolValue]; + [self.audioDeviceModule setEngineAvailability:availability]; + NSInteger result = [self.audioDeviceModule setEngineAvailability:availability]; + if (result == 0) { + resolve(nil); + } else { + reject(@"engine_availability_error", + [NSString stringWithFormat:@"Failed to set engine availability: %ld", (long)result], + nil); + } +} + #pragma mark - Observer Delegate Response Methods RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveEngineCreated : (NSInteger)result) { diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts index 1507bb084..a00fdce13 100644 --- a/src/AudioDeviceModule.ts +++ b/src/AudioDeviceModule.ts @@ -9,6 +9,22 @@ export enum AudioEngineMuteMode { InputMixer = 2, } +export interface AudioEngineAvailability { + isInputAvailable: boolean; + isOutputAvailable: boolean; +} + +export const AudioEngineAvailability = { + default: { + isInputAvailable: true, + isOutputAvailable: true, + }, + none: { + isInputAvailable: false, + isOutputAvailable: false, + }, +} as const; + /** * Audio Device Module API for controlling audio devices and settings. * iOS/macOS only - will throw on Android. @@ -191,8 +207,8 @@ export class AudioDeviceModule { } /** - * Check if the audio engine is running - */ + * Check if the audio engine is running + */ static isEngineRunning(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -202,8 +218,8 @@ export class AudioDeviceModule { } /** - * Set the microphone mute mode - */ + * Set the microphone mute mode + */ static async setMuteMode(mode: AudioEngineMuteMode): Promise { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -213,8 +229,8 @@ export class AudioDeviceModule { } /** - * Get the current mute mode - */ + * Get the current mute mode + */ static getMuteMode(): AudioEngineMuteMode { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -224,8 +240,8 @@ export class AudioDeviceModule { } /** - * Enable or disable advanced audio ducking - */ + * Enable or disable advanced audio ducking + */ static setAdvancedDuckingEnabled(enabled: boolean): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -235,8 +251,8 @@ export class AudioDeviceModule { } /** - * Check if advanced ducking is enabled - */ + * Check if advanced ducking is enabled + */ static isAdvancedDuckingEnabled(): boolean { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -246,8 +262,8 @@ export class AudioDeviceModule { } /** - * Set the audio ducking level (0-100) - */ + * Set the audio ducking level (0-100) + */ static setDuckingLevel(level: number): void { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -257,8 +273,8 @@ export class AudioDeviceModule { } /** - * Get the current ducking level - */ + * Get the current ducking level + */ static getDuckingLevel(): number { if (Platform.OS === 'android') { throw new Error('AudioDeviceModule is only available on iOS/macOS'); @@ -266,4 +282,48 @@ export class AudioDeviceModule { return WebRTCModule.audioDeviceModuleGetDuckingLevel(); } + + /** + * Check if recording always prepared mode is enabled + */ + static isRecordingAlwaysPreparedMode(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsRecordingAlwaysPreparedMode(); + } + + /** + * Enable or disable recording always prepared mode + */ + static setRecordingAlwaysPreparedMode(enabled: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetRecordingAlwaysPreparedMode(enabled); + } + + /** + * Get the current engine availability (input/output availability) + */ + static getEngineAvailability(): AudioEngineAvailability { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetEngineAvailability(); + } + + /** + * Set the engine availability (input/output availability) + */ + static setEngineAvailability(availability: AudioEngineAvailability): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetEngineAvailability(availability); + } } diff --git a/src/index.ts b/src/index.ts index dd171b9f1..94c9300ba 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,7 +8,7 @@ if (WebRTCModule === null) { }`); } -import { AudioDeviceModule, AudioEngineMuteMode } from './AudioDeviceModule'; +import { AudioDeviceModule, AudioEngineMuteMode, type AudioEngineAvailability } from './AudioDeviceModule'; import { audioDeviceModuleEvents } from './AudioDeviceModuleEvents'; import { setupNativeEvents } from './EventEmitter'; import Logger from './Logger'; @@ -72,6 +72,7 @@ export { stopIOSPIP, AudioDeviceModule, AudioEngineMuteMode, + type AudioEngineAvailability, audioDeviceModuleEvents, }; From cff35e8f862b94c4fdd8ab5d89559523267955ec Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 21:29:17 +0800 Subject: [PATCH 8/8] Minor fixes --- src/AudioDeviceModuleEvents.ts | 47 +++++++++++++++++----------------- src/index.ts | 5 +--- 2 files changed, 25 insertions(+), 27 deletions(-) diff --git a/src/AudioDeviceModuleEvents.ts b/src/AudioDeviceModuleEvents.ts index 9f81dddcb..e0997d87e 100644 --- a/src/AudioDeviceModuleEvents.ts +++ b/src/AudioDeviceModuleEvents.ts @@ -46,7 +46,7 @@ class AudioDeviceModuleEventEmitter { private didDisableEngineHandler: AudioEngineEventHandler | null = null; private willReleaseEngineHandler: AudioEngineEventNoParamsHandler | null = null; - constructor() { + public setupListeners() { if (Platform.OS !== 'android' && WebRTCModule) { this.eventEmitter = new NativeEventEmitter(WebRTCModule); @@ -158,9 +158,10 @@ class AudioDeviceModuleEventEmitter { }); } } + /** - * Subscribe to speech activity events (started/ended) - */ + * Subscribe to speech activity events (started/ended) + */ addSpeechActivityListener(listener: (data: SpeechActivityEventData) => void) { if (!this.eventEmitter) { throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); @@ -170,8 +171,8 @@ class AudioDeviceModuleEventEmitter { } /** - * Subscribe to devices updated event (input/output devices changed) - */ + * Subscribe to devices updated event (input/output devices changed) + */ addDevicesUpdatedListener(listener: () => void) { if (!this.eventEmitter) { throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); @@ -181,49 +182,49 @@ class AudioDeviceModuleEventEmitter { } /** - * Set handler for engine created delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for engine created delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setEngineCreatedHandler(handler: AudioEngineEventNoParamsHandler | null) { this.engineCreatedHandler = handler; } /** - * Set handler for will enable engine delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for will enable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setWillEnableEngineHandler(handler: AudioEngineEventHandler | null) { this.willEnableEngineHandler = handler; } /** - * Set handler for will start engine delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for will start engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setWillStartEngineHandler(handler: AudioEngineEventHandler | null) { this.willStartEngineHandler = handler; } /** - * Set handler for did stop engine delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for did stop engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setDidStopEngineHandler(handler: AudioEngineEventHandler | null) { this.didStopEngineHandler = handler; } /** - * Set handler for did disable engine delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for did disable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setDidDisableEngineHandler(handler: AudioEngineEventHandler | null) { this.didDisableEngineHandler = handler; } /** - * Set handler for will release engine delegate - MUST return 0 for success or error code - * This handler blocks the native thread until it returns - */ + * Set handler for will release engine delegate + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ setWillReleaseEngineHandler(handler: AudioEngineEventNoParamsHandler | null) { this.willReleaseEngineHandler = handler; } diff --git a/src/index.ts b/src/index.ts index 94c9300ba..dacb06d08 100644 --- a/src/index.ts +++ b/src/index.ts @@ -106,8 +106,5 @@ function registerGlobals(): void { global.RTCErrorEvent = RTCErrorEvent; // Ensure audioDeviceModuleEvents is initialized and event listeners are registered - // This forces the constructor to run and set up native event listeners. - // We use void operator to explicitly indicate we're intentionally evaluating the expression - // without using its value, which prevents tree-shaking from removing this reference. - void audioDeviceModuleEvents; + audioDeviceModuleEvents.setupListeners(); }