Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion example/index.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { AppRegistry } from 'react-native';
import App from './src/App';
import { name as appName } from './app.json';
import { registerGlobals, setLogLevel } from '@livekit/react-native';
import { registerGlobals, setLogLevel, useIOSAudioManagement } from '@livekit/react-native';
import { LogLevel } from 'livekit-client';
import { setupErrorLogHandler } from './src/utils/ErrorLogHandler';
import { setupCallService } from './src/callservice/CallService';
Expand All @@ -16,3 +16,5 @@ setupCallService();
// Required React-Native setup for app
registerGlobals();
AppRegistry.registerComponent(appName, () => App);

useIOSAudioManagement();
1 change: 0 additions & 1 deletion example/src/RoomPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ const RoomView = ({ navigation, e2ee }: RoomViewProps) => {
return () => {};
}, [room, e2ee]);

useIOSAudioManagement(room, true);
// Setup room listeners
const { send } = useDataChannel(
(dataMessage: ReceivedDataMessage<string>) => {
Expand Down
41 changes: 38 additions & 3 deletions ios/AudioUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,11 @@ public class AudioUtils {
case "default_":
.default
case "voicePrompt":
.voicePrompt
if #available(iOS 12.0, *) {
.voicePrompt
} else {
.default
}
case "videoRecording":
.videoRecording
case "videoChat":
Expand All @@ -26,7 +30,7 @@ public class AudioUtils {
}
return retMode
}

public static func audioSessionCategoryFromString(_ category: String) -> AVAudioSession.Category {
let retCategory: AVAudioSession.Category = switch category {
case "ambient":
Expand All @@ -42,8 +46,39 @@ public class AudioUtils {
case "multiRoute":
.multiRoute
default:
.ambient
.soloAmbient
}
return retCategory
}

public static func audioSessionCategoryOptionsFromStrings(_ options: [String]) -> AVAudioSession.CategoryOptions {
var categoryOptions: AVAudioSession.CategoryOptions = []
for option in options {
switch option {
case "mixWithOthers":
categoryOptions.insert(.mixWithOthers)
case "duckOthers":
categoryOptions.insert(.duckOthers)
case "allowBluetooth":
categoryOptions.insert(.allowBluetooth)
case "allowBluetoothA2DP":
categoryOptions.insert(.allowBluetoothA2DP)
case "allowAirPlay":
categoryOptions.insert(.allowAirPlay)
case "defaultToSpeaker":
categoryOptions.insert(.defaultToSpeaker)
case "interruptSpokenAudioAndMixWithOthers":
if #available(iOS 13.0, *) {
categoryOptions.insert(.interruptSpokenAudioAndMixWithOthers)
}
case "overrideMutedMicrophoneInterruption":
if #available(iOS 14.5, *) {
categoryOptions.insert(.overrideMutedMicrophoneInterruption)
}
default:
break
}
}
return categoryOptions
}
}
157 changes: 77 additions & 80 deletions ios/LiveKitReactNativeModule.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,30 +11,30 @@ struct LKEvents {

@objc(LivekitReactNativeModule)
public class LivekitReactNativeModule: RCTEventEmitter {

// This cannot be initialized in init as self.bridge is given afterwards.
private var _audioRendererManager: AudioRendererManager? = nil
public var audioRendererManager: AudioRendererManager {
get {
if _audioRendererManager == nil {
_audioRendererManager = AudioRendererManager(bridge: self.bridge)
}

return _audioRendererManager!
}
}

@objc
public override init() {
super.init()
let config = RTCAudioSessionConfiguration()
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
config.categoryOptions = [.allowAirPlay, .allowBluetoothHFP, .allowBluetoothA2DP, .defaultToSpeaker]
config.mode = AVAudioSession.Mode.videoChat.rawValue

RTCAudioSessionConfiguration.setWebRTC(config)
}

@objc
override public static func requiresMainQueueSetup() -> Bool {
return false
Expand All @@ -48,19 +48,19 @@ public class LivekitReactNativeModule: RCTEventEmitter {
options.videoEncoderFactory = simulcastVideoEncoderFactory
options.audioProcessingModule = LKAudioProcessingManager.sharedInstance().audioProcessingModule
}

@objc(configureAudio:)
public func configureAudio(_ config: NSDictionary) {
guard let iOSConfig = config["ios"] as? NSDictionary
else {
return
}

let defaultOutput = iOSConfig["defaultOutput"] as? String ?? "speaker"

let rtcConfig = RTCAudioSessionConfiguration()
rtcConfig.category = AVAudioSession.Category.playAndRecord.rawValue

if (defaultOutput == "earpiece") {
rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP];
rtcConfig.mode = AVAudioSession.Mode.voiceChat.rawValue
Expand All @@ -70,17 +70,39 @@ public class LivekitReactNativeModule: RCTEventEmitter {
}
RTCAudioSessionConfiguration.setWebRTC(rtcConfig)
}

@objc(startAudioSession)
public func startAudioSession() {
// intentionally left empty

@objc(startAudioSession:withRejecter:)
public func startAudioSession(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
let session = RTCAudioSession.sharedInstance()
session.lockForConfiguration()
defer {
session.unlockForConfiguration()
}

do {
try session.setActive(true)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Moved setActive(true) here, I haven't confirmed it this get's invoked before starting internal AVAudioEngine, I assume it does but need to double check.

resolve(nil)
} catch {
reject("startAudioSession", "Error activating audio session: \(error.localizedDescription)", error)
}
}

@objc(stopAudioSession)
public func stopAudioSession() {
// intentionally left empty

@objc(stopAudioSession:withRejecter:)
public func stopAudioSession(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
let session = RTCAudioSession.sharedInstance()
session.lockForConfiguration()
defer {
session.unlockForConfiguration()
}

do {
try session.setActive(false)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Moved setActive(false) here.

resolve(nil)
} catch {
reject("stopAudioSession", "Error deactivating audio session: \(error.localizedDescription)", error)
}
}

@objc(showAudioRoutePicker)
public func showAudioRoutePicker() {
if #available(iOS 11.0, *) {
Expand All @@ -95,12 +117,12 @@ public class LivekitReactNativeModule: RCTEventEmitter {
}
}
}

@objc(getAudioOutputsWithResolver:withRejecter:)
public func getAudioOutputs(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock){
resolve(["default", "force_speaker"])
}

@objc(selectAudioOutput:withResolver:withRejecter:)
public func selectAudioOutput(_ deviceId: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
let session = AVAudioSession.sharedInstance()
Expand All @@ -114,86 +136,61 @@ public class LivekitReactNativeModule: RCTEventEmitter {
reject("selectAudioOutput error", error.localizedDescription, error)
return
}

resolve(nil)
}
@objc(setAppleAudioConfiguration:)
public func setAppleAudioConfiguration(_ configuration: NSDictionary) {

@objc(setAppleAudioConfiguration:withResolver:withRejecter:)
public func setAppleAudioConfiguration(_ configuration: NSDictionary, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
let session = RTCAudioSession.sharedInstance()
let config = RTCAudioSessionConfiguration.webRTC()

let appleAudioCategory = configuration["audioCategory"] as? String
let appleAudioCategoryOptions = configuration["audioCategoryOptions"] as? [String]
let appleAudioMode = configuration["audioMode"] as? String

session.lockForConfiguration()

var categoryChanged = false

if let appleAudioCategoryOptions = appleAudioCategoryOptions {
categoryChanged = true

var newOptions: AVAudioSession.CategoryOptions = []
for option in appleAudioCategoryOptions {
if option == "mixWithOthers" {
newOptions.insert(.mixWithOthers)
} else if option == "duckOthers" {
newOptions.insert(.duckOthers)
} else if option == "allowBluetooth" {
newOptions.insert(.allowBluetooth)
} else if option == "allowBluetoothA2DP" {
newOptions.insert(.allowBluetoothA2DP)
} else if option == "allowAirPlay" {
newOptions.insert(.allowAirPlay)
} else if option == "defaultToSpeaker" {
newOptions.insert(.defaultToSpeaker)
}
}
config.categoryOptions = newOptions
defer {
session.unlockForConfiguration()
}

if let appleAudioCategory = appleAudioCategory {
categoryChanged = true
config.category = AudioUtils.audioSessionCategoryFromString(appleAudioCategory).rawValue
}

if categoryChanged {
do {
try session.setCategory(AVAudioSession.Category(rawValue: config.category), with: config.categoryOptions)
} catch {
NSLog("Error setting category: %@", error.localizedDescription)
}

if let appleAudioCategoryOptions = appleAudioCategoryOptions {
config.categoryOptions = AudioUtils.audioSessionCategoryOptionsFromStrings(appleAudioCategoryOptions)
}

if let appleAudioMode = appleAudioMode {
let mode = AudioUtils.audioSessionModeFromString(appleAudioMode)
config.mode = mode.rawValue
do {
try session.setMode(mode)
} catch {
NSLog("Error setting mode: %@", error.localizedDescription)
}
config.mode = AudioUtils.audioSessionModeFromString(appleAudioMode).rawValue
}

session.unlockForConfiguration()

do {
try session.setConfiguration(config)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using RTCAudioSession's setConfiguration to simplify here.

resolve(nil)
} catch {
reject("setAppleAudioConfiguration", "Error setting category: \(error.localizedDescription)", error)
return
}

}

@objc(createAudioSinkListener:trackId:)
public func createAudioSinkListener(_ pcId: NSNumber, trackId: String) -> String {
let renderer = AudioSinkRenderer(eventEmitter: self)
let reactTag = self.audioRendererManager.registerRenderer(renderer)
renderer.reactTag = reactTag
self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId)

return reactTag
}

@objc(deleteAudioSinkListener:pcId:trackId:)
public func deleteAudioSinkListener(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? {
self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId)
self.audioRendererManager.unregisterRenderer(forReactTag: reactTag)

return nil
}

Expand All @@ -203,15 +200,15 @@ public class LivekitReactNativeModule: RCTEventEmitter {
let reactTag = self.audioRendererManager.registerRenderer(renderer)
renderer.reactTag = reactTag
self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId)

return reactTag
}

@objc(deleteVolumeProcessor:pcId:trackId:)
public func deleteVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? {
self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId)
self.audioRendererManager.unregisterRenderer(forReactTag: reactTag)

return nil
}

Expand All @@ -221,7 +218,7 @@ public class LivekitReactNativeModule: RCTEventEmitter {
let minFrequency = (options["minFrequency"] as? NSNumber)?.floatValue ?? 1000
let maxFrequency = (options["maxFrequency"] as? NSNumber)?.floatValue ?? 8000
let intervalMs = (options["updateInterval"] as? NSNumber)?.floatValue ?? 40

let renderer = MultibandVolumeAudioRenderer(
bands: bands,
minFrequency: minFrequency,
Expand All @@ -232,26 +229,26 @@ public class LivekitReactNativeModule: RCTEventEmitter {
let reactTag = self.audioRendererManager.registerRenderer(renderer)
renderer.reactTag = reactTag
self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId)

return reactTag
}

@objc(deleteMultibandVolumeProcessor:pcId:trackId:)
public func deleteMultibandVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? {
self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId)
self.audioRendererManager.unregisterRenderer(forReactTag: reactTag)

return nil
}

@objc(setDefaultAudioTrackVolume:)
public func setDefaultAudioTrackVolume(_ volume: NSNumber) -> Any? {
let options = WebRTCModuleOptions.sharedInstance()
options.defaultTrackVolume = volume.doubleValue

return nil
}

override public func supportedEvents() -> [String]! {
return [
LKEvents.kEventVolumeProcessed,
Expand Down
Loading
Loading