Skip to content

Commit 4737e4d

Browse files
committed
Add comments
1 parent f068eb9 commit 4737e4d

21 files changed

+139
-121
lines changed

Sources/StreamVideo/CallKit/CallKitService.swift

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,8 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
104104
private var callEndedNotificationCancellable: AnyCancellable?
105105
private var ringingTimerCancellable: AnyCancellable?
106106

107+
/// Debounces CallKit mute toggles that arrive in bursts when the app moves
108+
/// between foreground and background states.
107109
private let muteActionSubject = PassthroughSubject<MuteRequest, Never>()
108110
private var muteActionCancellable: AnyCancellable?
109111
private let muteProcessingQueue = OperationQueue(maxConcurrentOperationCount: 1)
@@ -120,12 +122,9 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
120122
.compactMap { $0.object as? Call }
121123
.sink { [weak self] in self?.callEnded($0.cId, ringingTimedOut: false) }
122124

123-
/// - Important:
124-
/// It used to debounce System's attempts to mute/unmute the call. It seems that the system
125-
/// performs rapid mute/unmute attempts when the call is being joined or moving to foreground.
126-
/// The observation below is in place to guard and normalise those attempts to avoid
127-
/// - rapid speaker and mic toggles
128-
/// - unnecessary attempts to mute/unmute the mic
125+
/// - Important: CallKit can rapidly toggle the mute state while the app
126+
/// moves between foreground and background. This observation smooths
127+
/// those bursts so we do not end up thrashing the audio pipeline.
129128
muteActionCancellable = muteActionSubject
130129
.removeDuplicates()
131130
.filter { [weak self] _ in self?.applicationStateAdapter.state != .foreground }
@@ -787,6 +786,8 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
787786
}
788787
}
789788

789+
/// Normalises mute requests triggered by call settings so CallKit stays in
790+
/// sync with the in-app toggle while avoiding redundant transactions.
790791
private func performCallSettingMuteRequest(
791792
_ muted: Bool,
792793
callUUID: UUID
@@ -808,6 +809,8 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
808809
}
809810
}
810811

812+
/// Applies the debounced mute request once CallKit and permissions agree
813+
/// that the action is allowed.
811814
private func performMuteRequest(_ request: MuteRequest) {
812815
muteProcessingQueue.addTaskOperation { [weak self] in
813816
guard

Sources/StreamVideo/Models/CallSettings.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ import Foundation
77

88
/// Represents the settings for a call.
99
public final class CallSettings: ObservableObject, Sendable, Equatable, CustomStringConvertible {
10+
/// Canonical baseline settings used when we need a placeholder before the
11+
/// backend sends the definitive values.
1012
public static let `default` = CallSettings()
1113

1214
/// Whether the audio is on for the current user.

Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift

Lines changed: 6 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -131,15 +131,12 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
131131
/// - Parameter source: The audio device module implementation to observe.
132132
init(
133133
_ source: any RTCAudioDeviceModuleControlling,
134-
isPlaying: Bool = false,
135-
isRecording: Bool = false,
136-
isMicrophoneMuted: Bool = false,
137134
audioLevelsNodeAdapter: AudioEngineNodeAdapting = AudioEngineLevelNodeAdapter()
138135
) {
139136
self.source = source
140-
self.isPlayingSubject = .init(isPlaying)
141-
self.isRecordingSubject = .init(isRecording)
142-
self.isMicrophoneMutedSubject = .init(isMicrophoneMuted)
137+
self.isPlayingSubject = .init(source.isPlaying)
138+
self.isRecordingSubject = .init(source.isRecording)
139+
self.isMicrophoneMutedSubject = .init(source.isMicrophoneMuted)
143140
self.isStereoPlayoutEnabledSubject = .init(source.isStereoPlayoutEnabled)
144141
self.isVoiceProcessingBypassedSubject = .init(source.isVoiceProcessingBypassed)
145142
self.isVoiceProcessingEnabledSubject = .init(source.isVoiceProcessingEnabled)
@@ -162,28 +159,6 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
162159

163160
audioLevelsAdapter.subject = audioLevelSubject
164161
source.observer = self
165-
166-
source
167-
.microphoneMutedPublisher()
168-
.receive(on: dispatchQueue)
169-
.sink { [weak self] in self?.isMicrophoneMutedSubject.send($0) }
170-
.store(in: disposableBag)
171-
172-
source
173-
.isVoiceProcessingBypassedPublisher()
174-
.receive(on: dispatchQueue)
175-
.sink { [weak self] in self?.isVoiceProcessingBypassedSubject.send($0) }
176-
.store(in: disposableBag)
177-
source
178-
.isVoiceProcessingEnabledPublisher()
179-
.receive(on: dispatchQueue)
180-
.sink { [weak self] in self?.isVoiceProcessingEnabledSubject.send($0) }
181-
.store(in: disposableBag)
182-
source
183-
.isVoiceProcessingAGCEnabledPublisher()
184-
.receive(on: dispatchQueue)
185-
.sink { [weak self] in self?.isVoiceProcessingAGCEnabledSubject.send($0) }
186-
.store(in: disposableBag)
187162
}
188163

189164
// MARK: - Recording
@@ -207,10 +182,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
207182
(source as? RTCAudioDeviceModule)?.setRecordingAlwaysPreparedMode(true)
208183
source.prefersStereoPlayout = isPreferred
209184

185+
/// We store the mic state before we perform `InitAndStartRecording` so we can restore
186+
/// the state at the end.
210187
let isMuted = isMicrophoneMuted
211188

212189
_ = source.stopRecording()
213190
_ = source.initAndStartRecording()
191+
214192
if isMuted {
215193
_ = source.setMicrophoneMuted(isMuted)
216194
}
@@ -519,40 +497,3 @@ extension AVAudioEngine {
519497
return "\(asbd.mChannelsPerFrame) ch @ \(asbd.mSampleRate) Hz"
520498
}
521499
}
522-
523-
enum RetriableTask {
524-
static func run(
525-
iterations: Int,
526-
operation: () throws -> Void
527-
) throws {
528-
try execute(
529-
currentIteration: 0,
530-
iterations: iterations,
531-
operation: operation
532-
)
533-
}
534-
535-
private static func execute(
536-
currentIteration: Int,
537-
iterations: Int,
538-
operation: () throws -> Void
539-
) throws {
540-
do {
541-
return try operation()
542-
} catch {
543-
if currentIteration < iterations - 1 {
544-
do {
545-
return try execute(
546-
currentIteration: currentIteration + 1,
547-
iterations: iterations,
548-
operation: operation
549-
)
550-
} catch {
551-
throw error
552-
}
553-
} else {
554-
throw error
555-
}
556-
}
557-
}
558-
}

Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift

Lines changed: 5 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,11 @@ import Combine
66
import StreamWebRTC
77

88
/// Abstraction over `RTCAudioDeviceModule` so tests can provide fakes while
9-
/// production code keeps using the WebRTC implementation.
9+
/// production code continues to rely on the WebRTC-backed implementation.
1010
protocol RTCAudioDeviceModuleControlling: AnyObject {
1111
var observer: RTCAudioDeviceModuleDelegate? { get set }
12+
var isPlaying: Bool { get }
13+
var isRecording: Bool { get }
1214
var isMicrophoneMuted: Bool { get }
1315
var isStereoPlayoutEnabled: Bool { get }
1416
var isVoiceProcessingBypassed: Bool { get }
@@ -24,15 +26,11 @@ protocol RTCAudioDeviceModuleControlling: AnyObject {
2426
func setMicrophoneMuted(_ isMuted: Bool) -> Int
2527
func stopRecording() -> Int
2628
func refreshStereoPlayoutState()
27-
28-
/// Publisher that emits whenever the microphone mute state changes.
29-
func microphoneMutedPublisher() -> AnyPublisher<Bool, Never>
30-
func isVoiceProcessingBypassedPublisher() -> AnyPublisher<Bool, Never>
31-
func isVoiceProcessingEnabledPublisher() -> AnyPublisher<Bool, Never>
32-
func isVoiceProcessingAGCEnabledPublisher() -> AnyPublisher<Bool, Never>
3329
}
3430

3531
extension RTCAudioDeviceModule: RTCAudioDeviceModuleControlling {
32+
/// Convenience wrapper that mirrors the old `initPlayout` and
33+
/// `startPlayout` sequence so the caller can request playout in one call.
3634
func initAndStartPlayout() -> Int {
3735
let result = initPlayout()
3836
if result == 0 {
@@ -41,24 +39,4 @@ extension RTCAudioDeviceModule: RTCAudioDeviceModuleControlling {
4139
return result
4240
}
4341
}
44-
45-
func microphoneMutedPublisher() -> AnyPublisher<Bool, Never> {
46-
publisher(for: \.isMicrophoneMuted)
47-
.eraseToAnyPublisher()
48-
}
49-
50-
func isVoiceProcessingBypassedPublisher() -> AnyPublisher<Bool, Never> {
51-
publisher(for: \.isVoiceProcessingBypassed)
52-
.eraseToAnyPublisher()
53-
}
54-
55-
func isVoiceProcessingEnabledPublisher() -> AnyPublisher<Bool, Never> {
56-
publisher(for: \.isVoiceProcessingEnabled)
57-
.eraseToAnyPublisher()
58-
}
59-
60-
func isVoiceProcessingAGCEnabledPublisher() -> AnyPublisher<Bool, Never> {
61-
publisher(for: \.isVoiceProcessingAGCEnabled)
62-
.eraseToAnyPublisher()
63-
}
6442
}

Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ extension StreamCallAudioRecorder.Namespace {
2222
/// ensure thread safety when accessing the recorder instance.
2323
final class AVAudioRecorderMiddleware: Middleware<StreamCallAudioRecorder.Namespace>, @unchecked Sendable {
2424

25+
/// Tracks which metering backend is active so we can flip between
26+
/// `AVAudioRecorder` and the audio device module seamlessly.
2527
enum Mode: Equatable {
2628
case invalid
2729
case audioRecorder(AVAudioRecorder)
@@ -39,6 +41,8 @@ extension StreamCallAudioRecorder.Namespace {
3941

4042
/// Subscription for publishing meter updates at refresh rate.
4143
private var updateMetersCancellable: AnyCancellable?
44+
/// Listens for ADM availability and pivots the metering source on the
45+
/// fly when stereo playout is enabled.
4246
private var audioDeviceModuleCancellable: AnyCancellable?
4347

4448
init(audioRecorder: AVAudioRecorder? = nil) {

Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ extension StreamCallAudioRecorder.Namespace {
3333

3434
// Monitor for category changes that are incompatible with recording
3535
cancellable = audioStore
36+
// Observe the derived configuration so system-driven category
37+
// changes also stop the local recorder.
3638
.publisher(\.audioSessionConfiguration.category)
3739
.filter { $0 != .playAndRecord && $0 != .record }
3840
.sink { [weak self] _ in

Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ final class CallAudioSession: @unchecked Sendable {
1212

1313
@Injected(\.audioStore) private var audioStore
1414

15+
/// Bundles the reactive inputs we need to evaluate whenever call
16+
/// capabilities or settings change, keeping log context attached.
1517
private struct Input {
1618
var callSettings: CallSettings
1719
var ownCapabilities: Set<OwnCapability>
@@ -50,6 +52,8 @@ final class CallAudioSession: @unchecked Sendable {
5052
private let disposableBag = DisposableBag()
5153
private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
5254

55+
/// Serialises policy evaluations so the AVAudioSession only receives one
56+
/// configuration at a time even when upstream publishers fire in bursts.
5357
private let processingPipeline = PassthroughSubject<Input, Never>()
5458

5559
private var lastAppliedConfiguration: AudioSessionConfiguration?
@@ -91,6 +95,8 @@ final class CallAudioSession: @unchecked Sendable {
9195
self.delegate = delegate
9296
self.statsAdapter = statsAdapter
9397

98+
// Expose the policy's stereo preference so the audio device module can
99+
// reconfigure itself before WebRTC starts playout.
94100
audioStore.dispatch(.stereo(.setPlayoutPreferred(policy is LivestreamAudioSessionPolicy)))
95101
audioStore.dispatch(.webRTCAudioSession(.setAudioEnabled(true)))
96102

@@ -162,6 +168,8 @@ final class CallAudioSession: @unchecked Sendable {
162168
)
163169
}
164170

171+
/// Wires call setting and capability updates into the processing queue so
172+
/// downstream work always executes serially.
165173
private func configureCallSettingsAndCapabilitiesObservation(
166174
callSettingsPublisher: AnyPublisher<CallSettings, Never>,
167175
ownCapabilitiesPublisher: AnyPublisher<Set<OwnCapability>, Never>
@@ -184,6 +192,8 @@ final class CallAudioSession: @unchecked Sendable {
184192
.store(in: disposableBag)
185193
}
186194

195+
/// Reapplies the last known category options when the system clears them,
196+
/// which happens after some CallKit activations.
187197
private func configureCallOptionsObservation() {
188198
audioStore
189199
.publisher(\.audioSessionConfiguration.options)
@@ -195,6 +205,8 @@ final class CallAudioSession: @unchecked Sendable {
195205
.store(in: disposableBag)
196206
}
197207

208+
/// Keeps the delegate informed of hardware flips while also re-evaluating
209+
/// the policy when we detect a reconfiguration-worthy route change.
198210
private func configureCurrentRouteObservation() {
199211
audioStore
200212
.publisher(\.currentRoute)
@@ -246,6 +258,8 @@ final class CallAudioSession: @unchecked Sendable {
246258
)
247259
}
248260

261+
/// Breaks the configuration into store actions so reducers update the
262+
/// audio session and our own bookkeeping in a single dispatch.
249263
private func applyConfiguration(
250264
_ configuration: AudioSessionConfiguration,
251265
callSettings: CallSettings,

Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@ extension AVAudioSession.CategoryOptions {
2323
static let playback: AVAudioSession.CategoryOptions = []
2424

2525
#if !canImport(AVFoundation, _version: 2360.61.4.11)
26+
/// Older SDKs only expose ``allowBluetooth`` so we map the HFP alias to it
27+
/// to avoid peppering the codebase with availability checks.
2628
public static let allowBluetoothHFP = Self.allowBluetooth
2729
#endif
2830
}

Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,8 @@ import Foundation
77

88
extension AVAudioSession.RouteChangeReason {
99

10-
/// Taken from https://chromium.googlesource.com/external/webrtc/+/34911ad55c4c4c549fe60e1b4cc127420b15666b/webrtc/modules/audio_device/ios/audio_device_ios.mm#557
11-
/// in the routeChange logic. Useful to ignore route changes that don't really matter for our
12-
/// webrtc sessions.
10+
/// Mirrors the filtering logic used by WebRTC so we ignore redundant
11+
/// callbacks such as `categoryChange` that would otherwise spam the store.
1312
var isValidRouteChange: Bool {
1413
switch self {
1514
case .categoryChange, .routeConfigurationChange:
@@ -19,7 +18,8 @@ extension AVAudioSession.RouteChangeReason {
1918
}
2019
}
2120

22-
/// https://github.com/TheWidlarzGroup/react-native-video/blob/fbb260e9164194a55d2b26404aea000e924e2f04/ios/Video/AudioSessionManager.swift#L357
21+
/// Flags reasons that represent real hardware transitions so we can rebuild
22+
/// the audio graph when necessary.
2323
var requiresReconfiguration: Bool {
2424
switch self {
2525
case .categoryChange, .override, .wakeFromSleep, .newDeviceAvailable, .oldDeviceUnavailable:

Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ extension AVAudioSession.Mode {
1212
rawValue
1313
}
1414

15+
/// Indicates whether the mode keeps stereo playout active or if WebRTC
16+
/// should fall back to mono because of voice-processing constraints.
1517
var supportsStereoPlayout: Bool {
1618
switch self {
1719
case .videoChat, .voiceChat, .gameChat:

0 commit comments

Comments
 (0)