diff --git a/DemoApp/Sources/Components/AppEnvironment.swift b/DemoApp/Sources/Components/AppEnvironment.swift index ea66f24eb..d2d6eb832 100644 --- a/DemoApp/Sources/Components/AppEnvironment.swift +++ b/DemoApp/Sources/Components/AppEnvironment.swift @@ -554,7 +554,7 @@ extension AppEnvironment { extension AppEnvironment { enum AudioSessionPolicyDebugConfiguration: Hashable, Debuggable, Sendable { - case `default`, ownCapabilities + case `default`, ownCapabilities, livestream var title: String { switch self { @@ -562,6 +562,8 @@ extension AppEnvironment { return "Default" case .ownCapabilities: return "OwnCapabilities" + case .livestream: + return "Livestream" } } @@ -571,12 +573,14 @@ extension AppEnvironment { return DefaultAudioSessionPolicy() case .ownCapabilities: return OwnCapabilitiesAudioSessionPolicy() + case .livestream: + return LivestreamAudioSessionPolicy() } } } static var audioSessionPolicy: AudioSessionPolicyDebugConfiguration = { - .default + .livestream }() } @@ -616,7 +620,7 @@ extension AppEnvironment { } static var proximityPolicies: Set = { - [.speaker, .video] + [.video, .speaker] }() } @@ -634,6 +638,19 @@ extension ClientCapability: Debuggable { } } +extension Logger.WebRTC.LogMode: Debuggable { + var title: String { + switch self { + case .none: + return "None" + case .validFilesOnly: + return "Valid Files only" + case .all: + return "All" + } + } +} + extension String: Debuggable { var title: String { self diff --git a/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift b/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift index 76e90d693..c9580d6eb 100644 --- a/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift +++ b/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift @@ -6,7 +6,12 @@ import Foundation import StreamVideo enum LogQueue { - static let queue: Queue = .init(maxCount: 3000) + #if DEBUG + private static let queueCapaity = 10000 + #else + private static let queueCapaity = 1000 + #endif + static let queue: Queue = .init(maxCount: queueCapaity) static func insert(_ element: LogDetails) { queue.insert(element) } diff --git a/DemoApp/Sources/Views/Login/DebugMenu.swift b/DemoApp/Sources/Views/Login/DebugMenu.swift index 2954bc722..9028d4e1c 100644 --- a/DemoApp/Sources/Views/Login/DebugMenu.swift +++ b/DemoApp/Sources/Views/Login/DebugMenu.swift @@ -231,7 +231,7 @@ struct DebugMenu: View { } makeMenu( - for: [.default, .ownCapabilities], + for: [.default, .ownCapabilities, .livestream], currentValue: audioSessionPolicy, label: "AudioSession policy" ) { self.audioSessionPolicy = $0 } @@ -302,10 +302,10 @@ struct DebugMenu: View { ) { LogConfig.level = $0 } makeMenu( - for: [true, false], - currentValue: LogConfig.webRTCLogsEnabled, + for: [.none, .validFilesOnly, .all], + currentValue: Logger.WebRTC.mode, label: "WebRTC Logs" - ) { LogConfig.webRTCLogsEnabled = $0 } + ) { Logger.WebRTC.mode = $0 } Button { isLogsViewerVisible = true diff --git a/Package.swift b/Package.swift index 9e6fc7881..f7c764395 100644 --- a/Package.swift +++ b/Package.swift @@ -23,7 +23,7 @@ let package = Package( ], dependencies: [ .package(url: "https://github.com/apple/swift-protobuf.git", exact: "1.30.0"), - .package(url: "https://github.com/GetStream/stream-video-swift-webrtc.git", exact: "137.0.43") + .package(url: "https://github.com/GetStream/stream-video-swift-webrtc.git", exact: "137.0.51") ], targets: [ .target( diff --git a/Sources/StreamVideo/Errors/Errors.swift b/Sources/StreamVideo/Errors/Errors.swift index 739c1cf11..857123b30 100644 --- a/Sources/StreamVideo/Errors/Errors.swift +++ b/Sources/StreamVideo/Errors/Errors.swift @@ -7,10 +7,11 @@ import Foundation extension Stream_Video_Sfu_Models_Error: Error, ReflectiveStringConvertible {} /// A Client error. -public class ClientError: Error, ReflectiveStringConvertible, @unchecked Sendable { - public struct Location: Equatable, Sendable { +public class ClientError: Error, CustomStringConvertible, @unchecked Sendable { + public struct Location: Equatable, Sendable, CustomStringConvertible { public let file: String public let line: Int + public var description: String { "{ file:\(file), line:\(line) }" } } /// The file and line number which emitted the error. @@ -33,7 +34,26 @@ public class ClientError: Error, ReflectiveStringConvertible, @unchecked Sendabl /// Retrieve the localized description for this error. public var localizedDescription: String { message ?? errorDescription ?? "" } - + + public var description: String { + var result = "ClientError {" + result += " location:\(location)" + if let message { + result += " message:\(message)" + } + if let apiError { + result += ", apiError:\(apiError)" + } + if let underlyingError { + result += ", underlyingError:\(underlyingError)" + } + if let errorDescription { + result += ", errorDescription:\(errorDescription)" + } + result += " }" + return result + } + /// A client error based on an external general error. /// - Parameters: /// - error: an external error. diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift index 3cfb2b917..5b723122d 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift @@ -2,6 +2,8 @@ // Copyright © 2025 Stream.io Inc. All rights reserved. // +import AudioToolbox +import AVFAudio import AVFoundation import Combine import Foundation @@ -11,56 +13,172 @@ import StreamWebRTC /// audio pipeline can stay in sync with application logic. final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable, @unchecked Sendable { + /// Helper constants used across the module. enum Constant { - // WebRTC interfaces are returning integer result codes. We use this typed/named - // constant to define the Success of an operation. + /// WebRTC interfaces return integer result codes. We use this typed/named + /// constant to define the success of an operation. static let successResult = 0 - // The down limit of audio pipeline in DB that is considered silence. + /// Audio pipeline floor in dB that we interpret as silence. static let silenceDB: Float = -160 } /// Events emitted as the underlying audio engine changes state. - enum Event: Equatable { + enum Event: Equatable, CustomStringConvertible { + /// Outbound audio surpassed the silence threshold. case speechActivityStarted + /// Outbound audio dropped back to silence. case speechActivityEnded + /// A new `AVAudioEngine` instance has been created. case didCreateAudioEngine(AVAudioEngine) - case willEnableAudioEngine(AVAudioEngine) - case willStartAudioEngine(AVAudioEngine) - case didStopAudioEngine(AVAudioEngine) - case didDisableAudioEngine(AVAudioEngine) + /// The engine is about to enable playout/recording paths. + case willEnableAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) + /// The engine is about to start rendering. + case willStartAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) + /// The engine has fully stopped. + case didStopAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) + /// The engine was disabled after stopping. + case didDisableAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) + /// The engine will be torn down. case willReleaseAudioEngine(AVAudioEngine) + /// The input graph is configured with a new source node. + case configureInputFromSource(AVAudioEngine, source: AVAudioNode?, destination: AVAudioNode, format: AVAudioFormat) + /// The output graph is configured with a destination node. + case configureOutputFromSource(AVAudioEngine, source: AVAudioNode, destination: AVAudioNode?, format: AVAudioFormat) + /// Voice processing knobs changed. + case didUpdateAudioProcessingState( + voiceProcessingEnabled: Bool, + voiceProcessingBypassed: Bool, + voiceProcessingAGCEnabled: Bool, + stereoPlayoutEnabled: Bool + ) + + var description: String { + switch self { + case .speechActivityStarted: + return ".speechActivityStarted" + + case .speechActivityEnded: + return ".speechActivityEnded" + + case .didCreateAudioEngine(let engine): + return ".didCreateAudioEngine(\(engine))" + + case .willEnableAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled): + return ".willEnableAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))" + + case .willStartAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled): + return ".willStartAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))" + + case .didStopAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled): + return ".didStopAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))" + + case .didDisableAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled): + return ".didDisableAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))" + + case .willReleaseAudioEngine(let engine): + return ".willReleaseAudioEngine(\(engine))" + + case .configureInputFromSource(let engine, let source, let destination, let format): + return ".configureInputFromSource(\(engine), source:\(source), destination:\(destination), format:\(format))" + + case .configureOutputFromSource(let engine, let source, let destination, let format): + return ".configureOutputFromSource(\(engine), source:\(source), destination:\(destination), format:\(format))" + + case let .didUpdateAudioProcessingState( + voiceProcessingEnabled, + voiceProcessingBypassed, + voiceProcessingAGCEnabled, + stereoPlayoutEnabled + ): + return ".didUpdateAudioProcessingState(voiceProcessingEnabled:\(voiceProcessingEnabled), voiceProcessingBypassed:\(voiceProcessingBypassed), voiceProcessingAGCEnabled:\(voiceProcessingAGCEnabled), stereoPlayoutEnabled:\(stereoPlayoutEnabled))" + } + } } + /// Tracks whether WebRTC is currently playing back audio. private let isPlayingSubject: CurrentValueSubject + /// `true` while audio playout is active. var isPlaying: Bool { isPlayingSubject.value } + /// Publisher that reflects playout activity changes. var isPlayingPublisher: AnyPublisher { isPlayingSubject.eraseToAnyPublisher() } + /// Tracks whether WebRTC is capturing microphone samples. private let isRecordingSubject: CurrentValueSubject + /// `true` while audio capture is active. var isRecording: Bool { isRecordingSubject.value } + /// Publisher that reflects recording activity changes. var isRecordingPublisher: AnyPublisher { isRecordingSubject.eraseToAnyPublisher() } + /// Tracks whether the microphone is muted at the ADM layer. private let isMicrophoneMutedSubject: CurrentValueSubject + /// `true` if the microphone is muted. var isMicrophoneMuted: Bool { isMicrophoneMutedSubject.value } + /// Publisher that reflects microphone mute changes. var isMicrophoneMutedPublisher: AnyPublisher { isMicrophoneMutedSubject.eraseToAnyPublisher() } + /// Tracks whether stereo playout is configured. + private let isStereoPlayoutEnabledSubject: CurrentValueSubject + /// `true` if stereo playout is available and active. + var isStereoPlayoutEnabled: Bool { isStereoPlayoutEnabledSubject.value } + /// Publisher emitting stereo playout state. + var isStereoPlayoutEnabledPublisher: AnyPublisher { isStereoPlayoutEnabledSubject.eraseToAnyPublisher() } + + /// Tracks whether VP processing is currently bypassed. + private let isVoiceProcessingBypassedSubject: CurrentValueSubject + /// `true` if the voice processing unit is bypassed. + var isVoiceProcessingBypassed: Bool { isVoiceProcessingBypassedSubject.value } + /// Publisher emitting VP bypass changes. + var isVoiceProcessingBypassedPublisher: AnyPublisher { isVoiceProcessingBypassedSubject.eraseToAnyPublisher() } + + /// Tracks whether voice processing is enabled. + private let isVoiceProcessingEnabledSubject: CurrentValueSubject + /// `true` when Apple VP is active. + var isVoiceProcessingEnabled: Bool { isVoiceProcessingEnabledSubject.value } + /// Publisher emitting VP enablement changes. + var isVoiceProcessingEnabledPublisher: AnyPublisher { isVoiceProcessingEnabledSubject.eraseToAnyPublisher() } + + /// Tracks whether automatic gain control is enabled inside VP. + private let isVoiceProcessingAGCEnabledSubject: CurrentValueSubject + /// `true` while AGC is active. + var isVoiceProcessingAGCEnabled: Bool { isVoiceProcessingAGCEnabledSubject.value } + /// Publisher emitting AGC changes. + var isVoiceProcessingAGCEnabledPublisher: AnyPublisher { isVoiceProcessingAGCEnabledSubject.eraseToAnyPublisher() } + + /// Observes RMS audio levels (in dB) derived from the input tap. private let audioLevelSubject = CurrentValueSubject(Constant.silenceDB) // default to silence + /// Latest measured audio level. var audioLevel: Float { audioLevelSubject.value } + /// Publisher emitting audio level updates. var audioLevelPublisher: AnyPublisher { audioLevelSubject.eraseToAnyPublisher() } + /// Wrapper around WebRTC `RTCAudioDeviceModule`. private let source: any RTCAudioDeviceModuleControlling + /// Manages Combine subscriptions generated by this module. private let disposableBag: DisposableBag = .init() + /// Serial queue used to deliver events to observers. private let dispatchQueue: DispatchQueue + /// Internal relay that feeds `publisher`. private let subject: PassthroughSubject + /// Object that taps engine nodes and publishes audio level data. private var audioLevelsAdapter: AudioEngineNodeAdapting + /// Public stream of `Event` values describing engine transitions. let publisher: AnyPublisher + /// Strong reference to the current engine so we can introspect it if needed. + private var engine: AVAudioEngine? + + /// Textual diagnostics for logging and debugging. override var description: String { "{ " + "isPlaying:\(isPlaying)" + ", isRecording:\(isRecording)" + ", isMicrophoneMuted:\(isMicrophoneMuted)" + + ", isStereoPlayoutEnabled:\(isStereoPlayoutEnabled)" + + ", isVoiceProcessingBypassed:\(isVoiceProcessingBypassed)" + + ", isVoiceProcessingEnabled:\(isVoiceProcessingEnabled)" + + ", isVoiceProcessingAGCEnabled:\(isVoiceProcessingAGCEnabled)" + ", audioLevel:\(audioLevel)" + ", source:\(source)" + " }" @@ -70,15 +188,16 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable /// - Parameter source: The audio device module implementation to observe. init( _ source: any RTCAudioDeviceModuleControlling, - isPlaying: Bool = false, - isRecording: Bool = false, - isMicrophoneMuted: Bool = false, audioLevelsNodeAdapter: AudioEngineNodeAdapting = AudioEngineLevelNodeAdapter() ) { self.source = source - self.isPlayingSubject = .init(isPlaying) - self.isRecordingSubject = .init(isRecording) - self.isMicrophoneMutedSubject = .init(isMicrophoneMuted) + self.isPlayingSubject = .init(source.isPlaying) + self.isRecordingSubject = .init(source.isRecording) + self.isMicrophoneMutedSubject = .init(source.isMicrophoneMuted) + self.isStereoPlayoutEnabledSubject = .init(source.isStereoPlayoutEnabled) + self.isVoiceProcessingBypassedSubject = .init(source.isVoiceProcessingBypassed) + self.isVoiceProcessingEnabledSubject = .init(source.isVoiceProcessingEnabled) + self.isVoiceProcessingAGCEnabledSubject = .init(source.isVoiceProcessingAGCEnabled) self.audioLevelsAdapter = audioLevelsNodeAdapter let dispatchQueue = DispatchQueue(label: "io.getstream.audiodevicemodule", qos: .userInteractive) @@ -90,18 +209,65 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable .eraseToAnyPublisher() super.init() + subject + .log(.debug, subsystems: .audioSession) { "\($0)" } + .sink { _ in } + .store(in: disposableBag) + audioLevelsAdapter.subject = audioLevelSubject source.observer = self - source - .microphoneMutedPublisher() - .receive(on: dispatchQueue) - .sink { [weak self] in self?.isMicrophoneMutedSubject.send($0) } - .store(in: disposableBag) + source.isVoiceProcessingBypassed = true } // MARK: - Recording + /// Reinitializes the ADM, clearing its internal audio graph state. + func reset() { + _ = source.reset() + } + + /// Switches between stereo and mono playout while keeping the recording + /// state consistent across reinitializations. + /// - Parameter isPreferred: `true` when stereo output should be used. + func setStereoPlayoutPreference(_ isPreferred: Bool) { + /// - Important: `.voiceProcessing` requires VP to be enabled in order to mute and + /// `.restartEngine` rebuilds the whole graph. Each of them has different issues: + /// - `.voiceProcessing`: as it requires VP to be enabled in order to mute/unmute that + /// means that for outputs where VP is disabled (e.g. stereo) we cannot mute/unmute. + /// - `.restartEngine`: rebuilds the whole graph and requires explicit calling of + /// `initAndStartRecording` . + _ = source.setMuteMode(isPreferred ? .inputMixer : .voiceProcessing) + /// - Important: We can probably set this one to false when the user doesn't have + /// sendAudio capability. + _ = source.setRecordingAlwaysPreparedMode(false) + source.prefersStereoPlayout = isPreferred + } + + /// Starts or stops speaker playout on the ADM, retrying transient failures. + /// - Parameter isActive: `true` to start playout, `false` to stop. + /// - Throws: `ClientError` when WebRTC returns a non-zero status. + func setPlayout(_ isActive: Bool) throws { + guard isActive != isPlaying else { + return + } + if isActive { + if source.isPlayoutInitialized { + try throwingExecution("Unable to start playout") { + source.startPlayout() + } + } else { + try throwingExecution("Unable to initAndStart playout") { + source.initAndStartPlayout() + } + } + } else { + try throwingExecution("Unable to stop playout") { + source.stopPlayout() + } + } + } + /// Enables or disables recording on the wrapped audio device module. /// - Parameter isEnabled: When `true` recording starts, otherwise stops. /// - Throws: `ClientError` when the underlying module reports a failure. @@ -109,23 +275,18 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable guard isEnabled != isRecording else { return } - if isEnabled { - let isMicrophoneMuted = source.isMicrophoneMuted - - try throwingExecution("Unable to initAndStartRecording.") { - source.initAndStartRecording() - } - - // After restarting the ADM it always returns with microphoneMute:false. - // Here we reinstate the muted condition after restarting ADM. - if isMicrophoneMuted { - try throwingExecution("Unable to setMicrophoneMuted:\(isEnabled).") { - source.setMicrophoneMuted(isMicrophoneMuted) + if source.isRecordingInitialized { + try throwingExecution("Unable to start recording") { + source.startRecording() + } + } else { + try throwingExecution("Unable to initAndStart recording") { + source.initAndStartRecording() } } } else { - try throwingExecution("Unable to stopRecording.") { + try throwingExecution("Unable to stop recording") { source.stopRecording() } } @@ -137,18 +298,29 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable /// - Parameter isMuted: `true` to mute the microphone, `false` to unmute. /// - Throws: `ClientError` when the underlying module reports a failure. func setMuted(_ isMuted: Bool) throws { - guard isMuted != isMicrophoneMuted else { + guard isMuted != source.isMicrophoneMuted else { return } + if !isMuted, !isRecording { + try setRecording(true) + } + try throwingExecution("Unable to setMicrophoneMuted:\(isMuted)") { source.setMicrophoneMuted(isMuted) } + isMicrophoneMutedSubject.send(isMuted) } + /// Forces the ADM to recompute whether stereo output is supported. + func refreshStereoPlayoutState() { + source.refreshStereoPlayoutState() + } + // MARK: - RTCAudioDeviceModuleDelegate + /// Receives speech activity notifications emitted by WebRTC VAD. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, didReceiveSpeechActivityEvent speechActivityEvent: RTCSpeechActivityEvent @@ -163,73 +335,111 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable } } + /// Stores the created engine reference and emits an event so observers can + /// hook into the audio graph configuration. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, didCreateEngine engine: AVAudioEngine ) -> Int { + self.engine = engine subject.send(.didCreateAudioEngine(engine)) return Constant.successResult } + /// Keeps local playback/recording state in sync as WebRTC enables the + /// corresponding engine paths. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, willEnableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool ) -> Int { - subject.send(.willEnableAudioEngine(engine)) + subject.send( + .willEnableAudioEngine( + engine, + isPlayoutEnabled: isPlayoutEnabled, + isRecordingEnabled: isRecordingEnabled + ) + ) isPlayingSubject.send(isPlayoutEnabled) isRecordingSubject.send(isRecordingEnabled) return Constant.successResult } + /// Mirrors state when the engine is about to start running and delivering + /// audio samples. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, willStartEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool ) -> Int { - subject.send(.willStartAudioEngine(engine)) + subject.send( + .willStartAudioEngine( + engine, + isPlayoutEnabled: isPlayoutEnabled, + isRecordingEnabled: isRecordingEnabled + ) + ) isPlayingSubject.send(isPlayoutEnabled) isRecordingSubject.send(isRecordingEnabled) + return Constant.successResult } + /// Updates state and notifies observers once the engine has completely + /// stopped. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, didStopEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool ) -> Int { - subject.send(.didStopAudioEngine(engine)) - audioLevelsAdapter.uninstall(on: 0) + subject.send( + .didStopAudioEngine( + engine, + isPlayoutEnabled: isPlayoutEnabled, + isRecordingEnabled: isRecordingEnabled + ) + ) isPlayingSubject.send(isPlayoutEnabled) isRecordingSubject.send(isRecordingEnabled) return Constant.successResult } + /// Tracks when the engine has been disabled after stopping so clients can + /// react (e.g., rebuilding audio graphs). func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, didDisableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool ) -> Int { - subject.send(.didDisableAudioEngine(engine)) - audioLevelsAdapter.uninstall(on: 0) + subject.send( + .didDisableAudioEngine( + engine, + isPlayoutEnabled: isPlayoutEnabled, + isRecordingEnabled: isRecordingEnabled + ) + ) isPlayingSubject.send(isPlayoutEnabled) isRecordingSubject.send(isRecordingEnabled) return Constant.successResult } + /// Clears internal references before WebRTC disposes the engine. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, willReleaseEngine engine: AVAudioEngine ) -> Int { + self.engine = nil subject.send(.willReleaseAudioEngine(engine)) audioLevelsAdapter.uninstall(on: 0) return Constant.successResult } + /// Keeps observers informed when WebRTC sets up the input graph and installs + /// an audio level tap to monitor microphone activity. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, engine: AVAudioEngine, @@ -238,6 +448,14 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable format: AVAudioFormat, context: [AnyHashable: Any] ) -> Int { + subject.send( + .configureInputFromSource( + engine, + source: source, + destination: destination, + format: format + ) + ) audioLevelsAdapter.installInputTap( on: destination, format: format, @@ -247,6 +465,7 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable return Constant.successResult } + /// Emits an event whenever WebRTC reconfigures the output graph. func audioDeviceModule( _ audioDeviceModule: RTCAudioDeviceModule, engine: AVAudioEngine, @@ -255,32 +474,74 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable format: AVAudioFormat, context: [AnyHashable: Any] ) -> Int { - Constant.successResult + subject.send( + .configureOutputFromSource( + engine, + source: source, + destination: destination, + format: format + ) + ) + return Constant.successResult } + /// Currently unused: CallKit/RoutePicker own the device selection UX. func audioDeviceModuleDidUpdateDevices( _ audioDeviceModule: RTCAudioDeviceModule ) { - /* No-op */ + // No-op } + /// Mirrors state changes coming from CallKit/WebRTC voice-processing + /// controls so UI can reflect the correct toggles. + func audioDeviceModule( + _ module: RTCAudioDeviceModule, + didUpdateAudioProcessingState state: RTCAudioProcessingState + ) { + subject.send( + .didUpdateAudioProcessingState( + voiceProcessingEnabled: state.voiceProcessingEnabled, + voiceProcessingBypassed: state.voiceProcessingBypassed, + voiceProcessingAGCEnabled: state.voiceProcessingAGCEnabled, + stereoPlayoutEnabled: state.stereoPlayoutEnabled + ) + ) + isVoiceProcessingEnabledSubject.send(state.voiceProcessingEnabled) + isVoiceProcessingBypassedSubject.send(state.voiceProcessingBypassed) + isVoiceProcessingAGCEnabledSubject.send(state.voiceProcessingAGCEnabled) + isStereoPlayoutEnabledSubject.send(state.stereoPlayoutEnabled) + } + + /// Mirrors the subset of properties that can be encoded for debugging. private enum CodingKeys: String, CodingKey { case isPlaying case isRecording case isMicrophoneMuted + case isStereoPlayoutEnabled + case isVoiceProcessingBypassed + case isVoiceProcessingEnabled + case isVoiceProcessingAGCEnabled + case audioLevel } + /// Serializes the module state, primarily for diagnostic payloads. func encode(to encoder: Encoder) throws { var container = encoder.container(keyedBy: CodingKeys.self) try container.encode(isPlaying, forKey: .isPlaying) try container.encode(isRecording, forKey: .isRecording) try container.encode(isMicrophoneMuted, forKey: .isMicrophoneMuted) + try container.encode(isStereoPlayoutEnabled, forKey: .isStereoPlayoutEnabled) + try container.encode(isVoiceProcessingBypassed, forKey: .isVoiceProcessingBypassed) + try container.encode(isVoiceProcessingEnabled, forKey: .isVoiceProcessingEnabled) + try container.encode(isVoiceProcessingAGCEnabled, forKey: .isVoiceProcessingAGCEnabled) try container.encode(audioLevel, forKey: .audioLevel) } // MARK: - Private helpers + /// Runs a WebRTC ADM call and translates its integer result into a + /// `ClientError` enriched with call-site metadata. private func throwingExecution( _ message: @autoclosure () -> String, file: StaticString = #file, diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift index 4894bea2a..15bd57b71 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift @@ -32,7 +32,6 @@ final class AudioEngineLevelNodeAdapter: AudioEngineNodeAdapting { var subject: CurrentValueSubject? -// private let publisher: (Float) -> Void private var inputTap: AVAudioMixerNode? /// Installs a tap on the supplied audio node to monitor input levels. diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift index 2889c0990..e64d82028 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift @@ -6,22 +6,42 @@ import Combine import StreamWebRTC /// Abstraction over `RTCAudioDeviceModule` so tests can provide fakes while -/// production code keeps using the WebRTC implementation. +/// production code continues to rely on the WebRTC-backed implementation. protocol RTCAudioDeviceModuleControlling: AnyObject { var observer: RTCAudioDeviceModuleDelegate? { get set } + var isPlaying: Bool { get } + var isRecording: Bool { get } + var isPlayoutInitialized: Bool { get } + var isRecordingInitialized: Bool { get } var isMicrophoneMuted: Bool { get } + var isStereoPlayoutEnabled: Bool { get } + var isVoiceProcessingBypassed: Bool { get set } + var isVoiceProcessingEnabled: Bool { get } + var isVoiceProcessingAGCEnabled: Bool { get } + var prefersStereoPlayout: Bool { get set } + func reset() -> Int + func initAndStartPlayout() -> Int + func startPlayout() -> Int + func stopPlayout() -> Int func initAndStartRecording() -> Int func setMicrophoneMuted(_ isMuted: Bool) -> Int + func startRecording() -> Int func stopRecording() -> Int - - /// Publisher that emits whenever the microphone mute state changes. - func microphoneMutedPublisher() -> AnyPublisher + func refreshStereoPlayoutState() + func setMuteMode(_ mode: RTCAudioEngineMuteMode) -> Int + func setRecordingAlwaysPreparedMode(_ alwaysPreparedRecording: Bool) -> Int } extension RTCAudioDeviceModule: RTCAudioDeviceModuleControlling { - func microphoneMutedPublisher() -> AnyPublisher { - publisher(for: \.isMicrophoneMuted) - .eraseToAnyPublisher() + /// Convenience wrapper that mirrors the old `initPlayout` and + /// `startPlayout` sequence so the caller can request playout in one call. + func initAndStartPlayout() -> Int { + let result = initPlayout() + if result == 0 { + return startPlayout() + } else { + return result + } } } diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift index 97d09f51b..93b41c164 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift @@ -22,6 +22,8 @@ extension StreamCallAudioRecorder.Namespace { /// ensure thread safety when accessing the recorder instance. final class AVAudioRecorderMiddleware: Middleware, @unchecked Sendable { + /// Tracks which metering backend is active so we can flip between + /// `AVAudioRecorder` and the audio device module seamlessly. enum Mode: Equatable { case invalid case audioRecorder(AVAudioRecorder) @@ -39,6 +41,8 @@ extension StreamCallAudioRecorder.Namespace { /// Subscription for publishing meter updates at refresh rate. private var updateMetersCancellable: AnyCancellable? + /// Listens for ADM availability and pivots the metering source on the + /// fly when stereo playout is enabled. private var audioDeviceModuleCancellable: AnyCancellable? init(audioRecorder: AVAudioRecorder? = nil) { @@ -50,18 +54,24 @@ extension StreamCallAudioRecorder.Namespace { mode = .invalid } + let initialMode = self.mode + super.init() audioDeviceModuleCancellable = audioStore .publisher(\.audioDeviceModule) .receive(on: processingQueue) .sink { [weak self] in + if self?.updateMetersCancellable != nil { + self?.stopRecording() + self?.startRecording() + } + + // We restore the mode to whatever we had before the call. if let audioDeviceModule = $0 { self?.mode = .audioDeviceModule(audioDeviceModule) - if self?.updateMetersCancellable != nil { - self?.stopRecording() - self?.startRecording() - } + } else { + self?.mode = initialMode } } } diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift index 0f2d4d49c..1f04e3ba7 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift @@ -33,6 +33,8 @@ extension StreamCallAudioRecorder.Namespace { // Monitor for category changes that are incompatible with recording cancellable = audioStore + // Observe the derived configuration so system-driven category + // changes also stop the local recorder. .publisher(\.audioSessionConfiguration.category) .filter { $0 != .playAndRecord && $0 != .record } .sink { [weak self] _ in diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift index 9f3e4d06a..dfb279022 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift @@ -5,7 +5,7 @@ import AVFoundation /// Represents the audio session configuration. -public struct AudioSessionConfiguration: ReflectiveStringConvertible, Equatable, Sendable { +public struct AudioSessionConfiguration: CustomStringConvertible, Equatable, Sendable { var isActive: Bool /// The audio session category. var category: AVAudioSession.Category @@ -16,6 +16,17 @@ public struct AudioSessionConfiguration: ReflectiveStringConvertible, Equatable, /// The audio session port override. var overrideOutputAudioPort: AVAudioSession.PortOverride? + public var description: String { + var result = "{ " + result += "isActive:\(isActive)" + result += ", category:\(category)" + result += ", mode:\(mode)" + result += ", options:\(options)" + result += ", overrideOutputAudioPort:\(overrideOutputAudioPort)" + result += " }" + return result + } + /// Compares two `AudioSessionConfiguration` instances for equality. public static func == (lhs: Self, rhs: Self) -> Bool { lhs.isActive == rhs.isActive && diff --git a/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift index 2c2d9f652..5bf9db5c9 100644 --- a/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift +++ b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift @@ -12,6 +12,33 @@ final class CallAudioSession: @unchecked Sendable { @Injected(\.audioStore) private var audioStore + /// Bundles the reactive inputs we need to evaluate whenever call + /// capabilities or settings change, keeping log context attached. + private struct Input { + var callSettings: CallSettings + var ownCapabilities: Set + var currentRoute: RTCAudioStore.StoreState.AudioRoute? + var file: StaticString + var function: StaticString + var line: UInt + + init( + callSettings: CallSettings, + ownCapabilities: Set, + currentRoute: RTCAudioStore.StoreState.AudioRoute? = nil, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line + ) { + self.callSettings = callSettings + self.ownCapabilities = ownCapabilities + self.currentRoute = currentRoute + self.file = file + self.function = function + self.line = line + } + } + var currentRouteIsExternal: Bool { audioStore.state.currentRoute.isExternal } private(set) weak var delegate: StreamAudioSessionAdapterDelegate? @@ -25,11 +52,15 @@ final class CallAudioSession: @unchecked Sendable { private let disposableBag = DisposableBag() private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) - private var lastCallSettingSpeakerOn: Bool? + /// Serialises policy evaluations so the AVAudioSession only receives one + /// configuration at a time even when upstream publishers fire in bursts. + private let processingPipeline = PassthroughSubject() - init( - policy: AudioSessionPolicy = DefaultAudioSessionPolicy() - ) { + private var lastAppliedConfiguration: AudioSessionConfiguration? + private var lastCallSettings: CallSettings? + private var lastOwnCapabilities: Set? + + init(policy: AudioSessionPolicy = DefaultAudioSessionPolicy()) { self.policy = policy /// - Important: This runs whenever an CallAudioSession is created and ensures that @@ -40,7 +71,7 @@ final class CallAudioSession: @unchecked Sendable { .setCategoryAndModeAndCategoryOptions( .playAndRecord, mode: .voiceChat, - categoryOptions: [.allowBluetooth, .allowBluetoothA2DP] + categoryOptions: [.allowBluetoothHFP, .allowBluetoothA2DP] ) ) ) @@ -55,34 +86,26 @@ final class CallAudioSession: @unchecked Sendable { ) { disposableBag.removeAll() + processingPipeline + .debounce(for: .milliseconds(250), scheduler: processingQueue) + .receive(on: processingQueue) + .sink { [weak self] in self?.process($0) } + .store(in: disposableBag) + self.delegate = delegate self.statsAdapter = statsAdapter - audioStore.dispatch(.webRTCAudioSession(.setAudioEnabled(true))) + // Expose the policy's stereo preference so the audio device module can + // reconfigure itself before WebRTC starts playout. + audioStore.dispatch(.stereo(.setPlayoutPreferred(policy is LivestreamAudioSessionPolicy))) - Publishers - .CombineLatest(callSettingsPublisher, ownCapabilitiesPublisher) - .receive(on: processingQueue) - .sink { [weak self] in self?.didUpdate(callSettings: $0, ownCapabilities: $1) } - .store(in: disposableBag) + configureCallSettingsAndCapabilitiesObservation( + callSettingsPublisher: callSettingsPublisher, + ownCapabilitiesPublisher: ownCapabilitiesPublisher + ) + configureCurrentRouteObservation() + configureCallOptionsObservation() - audioStore - .publisher(\.currentRoute) - .removeDuplicates() - // We want to start listening on route changes **once** we have - // expressed our initial preference. - .drop { [weak self] _ in self?.lastCallSettingSpeakerOn == nil } - .receive(on: processingQueue) - .sink { - [weak self] in self?.delegate?.audioSessionAdapterDidUpdateSpeakerOn( - $0.isSpeaker, - file: #file, - function: #function, - line: #line - ) - } - .store(in: disposableBag) - statsAdapter?.trace(.init(audioSession: traceRepresentation)) } @@ -114,51 +137,180 @@ final class CallAudioSession: @unchecked Sendable { return } - processingQueue.addOperation { [weak self] in - self?.didUpdate( + processingPipeline.send( + .init( callSettings: callSettings, - ownCapabilities: ownCapabilities + ownCapabilities: ownCapabilities, + currentRoute: audioStore.state.currentRoute ) - } + ) } // MARK: - Private Helpers + private func process( + _ input: Input + ) { + log.debug( + "⚙️ Processing input:\(input).", + functionName: input.function, + fileName: input.file, + lineNumber: input.line + ) + didUpdate( + callSettings: input.callSettings, + ownCapabilities: input.ownCapabilities, + currentRoute: input.currentRoute ?? audioStore.state.currentRoute, + file: input.file, + function: input.function, + line: input.line + ) + } + + /// Wires call setting and capability updates into the processing queue so + /// downstream work always executes serially. + private func configureCallSettingsAndCapabilitiesObservation( + callSettingsPublisher: AnyPublisher, + ownCapabilitiesPublisher: AnyPublisher, Never> + ) { + Publishers + .CombineLatest(callSettingsPublisher, ownCapabilitiesPublisher) + .receive(on: processingQueue) + .sink { [weak self] in + guard let self else { + return + } + + processingPipeline.send( + .init( + callSettings: $0, + ownCapabilities: $1 + ) + ) + } + .store(in: disposableBag) + } + + /// Reapplies the last known category options when the system clears them, + /// which happens after some CallKit activations. + private func configureCallOptionsObservation() { + audioStore + .publisher(\.audioSessionConfiguration.options) + .removeDuplicates() + .filter { $0.isEmpty } + .receive(on: processingQueue) + .compactMap { [weak self] _ in self?.lastAppliedConfiguration?.options } + .sink { [weak self] in self?.audioStore.dispatch(.avAudioSession(.setCategoryOptions($0))) } + .store(in: disposableBag) + } + + /// Keeps the delegate informed of hardware flips while also re-evaluating + /// the policy when we detect a reconfiguration-worthy route change. + private func configureCurrentRouteObservation() { + audioStore + .publisher(\.currentRoute) + .removeDuplicates() + .filter { $0.reason.requiresReconfiguration } + .receive(on: processingQueue) + .sink { [weak self] in + guard let self, let lastCallSettings, let lastOwnCapabilities else { return } + if lastCallSettings.speakerOn != $0.isSpeaker { + self.delegate?.audioSessionAdapterDidUpdateSpeakerOn( + $0.isSpeaker, + file: #file, + function: #function, + line: #line + ) + } else { + processingPipeline.send( + .init( + callSettings: lastCallSettings, + ownCapabilities: lastOwnCapabilities, + currentRoute: $0 + ) + ) + } + } + .store(in: disposableBag) + } + private func didUpdate( callSettings: CallSettings, - ownCapabilities: Set + ownCapabilities: Set, + currentRoute: RTCAudioStore.StoreState.AudioRoute, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line ) { defer { statsAdapter?.trace(.init(audioSession: traceRepresentation)) } - let configuration = policy.configuration( - for: callSettings, - ownCapabilities: ownCapabilities + applyConfiguration( + policy.configuration( + for: callSettings, + ownCapabilities: ownCapabilities + ), + callSettings: callSettings, + ownCapabilities: ownCapabilities, + file: file, + function: function, + line: line + ) + } + + /// Breaks the configuration into store actions so reducers update the + /// audio session and our own bookkeeping in a single dispatch. + private func applyConfiguration( + _ configuration: AudioSessionConfiguration, + callSettings: CallSettings, + ownCapabilities: Set, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line + ) { + log.debug( + "CallAudioSession will apply configuration:\(configuration)", + subsystems: .audioSession, + functionName: function, + fileName: file, + lineNumber: line ) - var actions: [RTCAudioStore.Namespace.Action] = [ - .avAudioSession( - .setCategoryAndModeAndCategoryOptions( - configuration.category, - mode: configuration.mode, - categoryOptions: configuration.options + var actions: [StoreActionBox] = [] + + actions.append(.normal(.setMicrophoneMuted(!callSettings.audioOn || !ownCapabilities.contains(.sendAudio)))) + + actions.append( + .normal( + .avAudioSession( + .setCategoryAndModeAndCategoryOptions( + configuration.category, + mode: configuration.mode, + categoryOptions: configuration.options + ) ) - ), - .avAudioSession( - .setOverrideOutputAudioPort(configuration.overrideOutputAudioPort ?? .none) - ), - .setActive(configuration.isActive) - ] - - if ownCapabilities.contains(.sendAudio) { - actions.append(.setShouldRecord(true)) - actions.append(.setMicrophoneMuted(!callSettings.audioOn)) - } else { - actions.append(.setShouldRecord(false)) - actions.append(.setMicrophoneMuted(true)) - } + ) + ) + + actions.append(contentsOf: [ + // Setting only the audioEnabled doesn't stop the audio playout + // as if a new track gets added later on WebRTC will try to restart + // the playout. However, the combination of audioEnabled:false + // and AVAudioSession.active:false seems to work. + .normal(.webRTCAudioSession(.setAudioEnabled(configuration.isActive))), + .normal(.setActive(configuration.isActive)), + .normal(.avAudioSession(.setOverrideOutputAudioPort(configuration.overrideOutputAudioPort ?? .none))) + ]) + + audioStore.dispatch( + actions, + file: file, + function: function, + line: line + ) - audioStore.dispatch(actions) - lastCallSettingSpeakerOn = configuration.overrideOutputAudioPort == .speaker + lastAppliedConfiguration = configuration + lastCallSettings = callSettings + lastOwnCapabilities = ownCapabilities } } diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift index 7f14fc7c9..c6afe56e1 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift @@ -14,11 +14,17 @@ extension AVAudioSession.CategoryOptions { appIsInForeground: Bool ) -> AVAudioSession.CategoryOptions { [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] } /// Category options for playback. static let playback: AVAudioSession.CategoryOptions = [] + + #if !canImport(AVFoundation, _version: 2360.61.4.11) + /// Older SDKs only expose ``allowBluetooth`` so we map the HFP alias to it + /// to avoid peppering the codebase with availability checks. + public static let allowBluetoothHFP = Self.allowBluetooth + #endif } diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift new file mode 100644 index 000000000..67a4404b6 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift @@ -0,0 +1,31 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +extension AVAudioSession.RouteChangeReason { + + /// Mirrors the filtering logic used by WebRTC so we ignore redundant + /// callbacks such as `categoryChange` that would otherwise spam the store. + var isValidRouteChange: Bool { + switch self { + case .categoryChange, .routeConfigurationChange: + return false + default: + return true + } + } + + /// Flags reasons that represent real hardware transitions so we can rebuild + /// the audio graph when necessary. + var requiresReconfiguration: Bool { + switch self { + case .categoryChange, .override, .wakeFromSleep, .newDeviceAvailable, .oldDeviceUnavailable: + return true + default: + return false + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift index ac753beae..5ea33caf7 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift @@ -27,11 +27,19 @@ extension AVAudioSession.CategoryOptions { options.append(".duckOthers") } + #if canImport(AVFoundation, _version: 2360.61.4.11) + // Adds ".allowBluetooth" if present, permitting audio playback through + // Bluetooth devices. + if contains(.allowBluetoothHFP) { + options.append(".allowBluetoothHFP") + } + #else // Adds ".allowBluetooth" if present, permitting audio playback through // Bluetooth devices. if contains(.allowBluetooth) { options.append(".allowBluetooth") } + #endif // Adds ".defaultToSpeaker" if present, enabling speaker output by default. if contains(.defaultToSpeaker) { diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift index 2939fb57a..2bd39992f 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift @@ -11,4 +11,16 @@ extension AVAudioSession.Mode { public var description: String { rawValue } + + /// Indicates whether the mode keeps stereo playout active or if WebRTC + /// should fall back to mono because of voice-processing constraints. + var supportsStereoPlayout: Bool { + switch self { + case .videoChat, .voiceChat, .gameChat: + return false + + default: + return true + } + } } diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift index 27476d9ef..80dea145a 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift @@ -28,7 +28,7 @@ public struct DefaultAudioSessionPolicy: AudioSessionPolicy { category: .playAndRecord, mode: .voiceChat, options: [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ], overrideOutputAudioPort: callSettings.speakerOn diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift new file mode 100644 index 000000000..007c21ac2 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift @@ -0,0 +1,29 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Provides an audio session configuration tailored for livestream calls, +/// keeping stereo playout active while respecting the local capabilities. +public struct LivestreamAudioSessionPolicy: AudioSessionPolicy { + + public init() {} + + /// Builds the configuration used when a call toggles livestream mode. + /// Stereo playout is preferred (thus the category and the options), but the policy falls back to playback + /// category if the current user cannot transmit audio. A2DP is required to allow external devices + /// to play stereo. + public func configuration( + for callSettings: CallSettings, + ownCapabilities: Set + ) -> AudioSessionConfiguration { + .init( + isActive: callSettings.audioOutputOn, + category: ownCapabilities.contains(.sendAudio) ? .playAndRecord : .playback, + mode: .default, + options: .allowBluetoothA2DP, + overrideOutputAudioPort: callSettings.speakerOn ? .speaker : nil + ) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift index 7f51fcf4a..14aff5284 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift @@ -11,7 +11,7 @@ protocol AVAudioSessionProtocol { /// - Parameters: /// - category: The audio category (e.g., `.playAndRecord`). /// - mode: The audio mode (e.g., `.voiceChat`). - /// - categoryOptions: The options for the category (e.g., `.allowBluetooth`). + /// - categoryOptions: The options for the category (e.g., `.allowBluetoothHFP`). /// - Throws: An error if setting the category fails. func setCategory( _ category: AVAudioSession.Category, diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift new file mode 100644 index 000000000..66d62fea6 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift @@ -0,0 +1,126 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation + +extension AVAudioSession { + /// Captures a stable view of the session so state changes can be diffed + /// outside of the AVAudioSession API, which otherwise exposes mutable + /// objects. + struct Snapshot: Equatable, CustomStringConvertible { + var category: AVAudioSession.Category + var mode: AVAudioSession.Mode + var categoryOptions: AVAudioSession.CategoryOptions + var routeSharingPolicy: AVAudioSession.RouteSharingPolicy + var availableModes: [AVAudioSession.Mode] + var preferredInput: RTCAudioStore.StoreState.AudioRoute.Port? + var renderingMode: String + var prefersEchoCancelledInput: Bool + var isEchoCancelledInputEnabled: Bool + var isEchoCancelledInputAvailable: Bool + var maximumOutputNumberOfChannels: Int + var outputNumberOfChannels: Int + var preferredOutputNumberOfChannels: Int + + /// Produces a compact string payload that is easy to log when + /// diagnosing audio route transitions. + var description: String { + var result = "{" + result += "category:\(category)" + result += ", mode:\(mode)" + result += ", categoryOptions:\(categoryOptions)" + result += ", routeSharingPolicy:\(routeSharingPolicy)" + result += ", availableModes:\(availableModes)" + result += ", preferredInput:\(preferredInput)" + result += ", renderingMode:\(renderingMode)" + result += ", prefersEchoCancelledInput:\(prefersEchoCancelledInput)" + result += ", isEchoCancelledInputEnabled:\(isEchoCancelledInputEnabled)" + result += ", isEchoCancelledInputAvailable:\(isEchoCancelledInputAvailable)" + result += ", maximumOutputNumberOfChannels:\(maximumOutputNumberOfChannels)" + result += ", outputNumberOfChannels:\(outputNumberOfChannels)" + result += ", preferredOutputNumberOfChannels:\(preferredOutputNumberOfChannels)" + result += " }" + return result + } + + /// Builds a new snapshot by pulling the latest values from the shared + /// AVAudioSession instance. + init(_ source: AVAudioSession = .sharedInstance()) { + self.category = source.category + self.mode = source.mode + self.categoryOptions = source.categoryOptions + self.routeSharingPolicy = source.routeSharingPolicy + self.availableModes = source.availableModes + self.preferredInput = source.preferredInput.map { .init($0) } ?? nil + #if compiler(>=6.0) + if #available(iOS 17.2, *) { self.renderingMode = "\(source.renderingMode)" } + else { self.renderingMode = "" } + #else + self.renderingMode = "" + #endif + + #if compiler(>=6.0) + if #available(iOS 18.2, *) { self.prefersEchoCancelledInput = source.prefersEchoCancelledInput + } else { self.prefersEchoCancelledInput = false } + #else + self.prefersEchoCancelledInput = false + #endif + + #if compiler(>=6.0) + if #available(iOS 18.2, *) { self.isEchoCancelledInputEnabled = source.isEchoCancelledInputEnabled + } else { self.isEchoCancelledInputEnabled = false } + #else + self.isEchoCancelledInputEnabled = false + #endif + + #if compiler(>=6.0) + if #available(iOS 18.2, *) { self.isEchoCancelledInputAvailable = source.isEchoCancelledInputAvailable + } else { self.isEchoCancelledInputAvailable = false } + #else + self.isEchoCancelledInputAvailable = false + #endif + self.maximumOutputNumberOfChannels = source.maximumOutputNumberOfChannels + self.outputNumberOfChannels = source.outputNumberOfChannels + self.preferredOutputNumberOfChannels = source.preferredOutputNumberOfChannels + } + } +} + +/// Polls the shared AVAudioSession on a timer so stores can react using Combine. +final class AVAudioSessionObserver { + + var publisher: AnyPublisher { subject.eraseToAnyPublisher() } + + private let subject: CurrentValueSubject = .init(.init()) + private var cancellable: AnyCancellable? + + /// Starts emitting snapshots roughly every 100ms, which is fast enough to + /// catch rapid route transitions without adding noticeable overhead. + func startObserving() { + cancellable = DefaultTimer + .publish(every: 0.1) + .sink { [weak self] _ in self?.subject.send(.init()) } + } + + /// Cancels the observation timer and stops sending snapshot updates. + func stopObserving() { + cancellable?.cancel() + cancellable = nil + } +} + +extension AVAudioSessionObserver: InjectionKey { + nonisolated(unsafe) static var currentValue: AVAudioSessionObserver = .init() +} + +extension InjectedValues { + /// Injects the audio session observer so effects can subscribe without + /// hard-coding their own polling logic. + var avAudioSessionObserver: AVAudioSessionObserver { + get { InjectedValues[AVAudioSessionObserver.self] } + set { InjectedValues[AVAudioSessionObserver.self] = newValue } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift index dd0611db6..b6cb0435e 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift @@ -55,6 +55,8 @@ protocol AudioSessionProtocol: AnyObject { func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws + func setPreferredOutputNumberOfChannels(_ noOfChannels: Int) throws + /// Applies the provided configuration to the audio session. /// - Parameter configuration: Desired audio session configuration. func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift index 57669c312..7761cd382 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift @@ -73,7 +73,7 @@ extension RTCAudioStore.StoreState.AVAudioSessionConfiguration { .duckOthers, .interruptSpokenAudioAndMixWithOthers, .defaultToSpeaker, - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] map[.playAndRecord] = AllowedConfiguration( diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift new file mode 100644 index 000000000..42af665a9 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift @@ -0,0 +1,73 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation +import StreamWebRTC + +extension RTCAudioStore { + + /// Mirrors the system audio session into the store so reducers can keep a + /// coherent view of category, mode, and options that were set by other + /// actors such as CallKit or Control Center. + final class AVAudioSessionEffect: StoreEffect, @unchecked Sendable { + + @Injected(\.avAudioSessionObserver) private var avAudioSessionObserver + private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) + private var audioDeviceModuleCancellable: AnyCancellable? + private var avAudioSessionObserverCancellable: AnyCancellable? + + override init() { + super.init() + } + + /// Subscribes to adm availability changes and starts forwarding + /// snapshots once we have an audio device module configured. + override func set( + statePublisher: AnyPublisher? + ) { + avAudioSessionObserverCancellable?.cancel() + avAudioSessionObserverCancellable = nil + audioDeviceModuleCancellable?.cancel() + audioDeviceModuleCancellable = nil + avAudioSessionObserver.stopObserving() + + guard let statePublisher else { + return + } + + audioDeviceModuleCancellable = statePublisher + .map(\.audioDeviceModule) + .removeDuplicates() + .compactMap { $0 } + .sink { [weak self] in self?.didUpdate($0) } + } + + // MARK: - Private Helpers + + private func didUpdate(_ audioDeviceModule: AudioDeviceModule) { + avAudioSessionObserverCancellable?.cancel() + avAudioSessionObserverCancellable = nil + avAudioSessionObserver.stopObserving() + + avAudioSessionObserverCancellable = avAudioSessionObserver + .publisher + .removeDuplicates() + .sink { [weak self] in self?.didUpdate($0) } + + avAudioSessionObserver.startObserving() + } + + private func didUpdate(_ state: AVAudioSession.Snapshot) { + dispatcher?.dispatch( + [ + .normal(.avAudioSession(.systemSetCategory(state.category))), + .normal(.avAudioSession(.systemSetMode(state.mode))), + .normal(.avAudioSession(.systemSetCategoryOptions(state.categoryOptions))) + ] + ) + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+InterruptionsMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift similarity index 93% rename from Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+InterruptionsMiddleware.swift rename to Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift index df50b338a..0b1db6b93 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+InterruptionsMiddleware.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift @@ -10,7 +10,7 @@ extension RTCAudioStore { /// Converts audio session interruption callbacks into store actions so the /// audio pipeline can gracefully pause and resume. - final class InterruptionsMiddleware: Middleware, @unchecked Sendable { + final class InterruptionsEffect: StoreEffect, @unchecked Sendable { private let audioSessionObserver: RTCAudioSessionPublisher private let disposableBag = DisposableBag() @@ -61,7 +61,7 @@ extension RTCAudioStore { } dispatcher?.dispatch(actions.map(\.box)) - case .didChangeRoute: + default: break } } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift new file mode 100644 index 000000000..22cf6e109 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift @@ -0,0 +1,49 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation +import StreamWebRTC + +extension RTCAudioStore { + + /// Bridges `RTCAudioSession` route updates into store state so downstream + /// features can react to speaker/headset transitions. + final class RouteChangeEffect: StoreEffect, @unchecked Sendable { + + private let audioSessionObserver: RTCAudioSessionPublisher + private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) + private var disposableBag = DisposableBag() + + convenience init(_ source: RTCAudioSession) { + self.init(.init(source)) + } + + init(_ audioSessionObserver: RTCAudioSessionPublisher) { + self.audioSessionObserver = audioSessionObserver + super.init() + + audioSessionObserver + .publisher + .compactMap { + switch $0 { + case let .didChangeRoute(reason, from, to): + return ( + reason, + RTCAudioStore.StoreState.AudioRoute(from), + RTCAudioStore.StoreState.AudioRoute(to, reason: reason) + ) + default: + return nil + } + } + .receive(on: processingQueue) + .log(.debug, subsystems: .audioSession) { "AudioRoute updated \($1) → \($2) due to reason:\($0)." } + .map { $0.2 } + .sink { [weak self] in self?.dispatcher?.dispatch(.setCurrentRoute($0)) } + .store(in: disposableBag) + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift new file mode 100644 index 000000000..a6a720ca0 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift @@ -0,0 +1,68 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation + +extension RTCAudioStore { + + /// Observes the audio device module to detect when stereo playout becomes + /// available, keeping the store's stereo state aligned with WebRTC. + final class StereoPlayoutEffect: StoreEffect, @unchecked Sendable { + + private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) + private let disposableBag = DisposableBag() + private var audioDeviceModuleCancellable: AnyCancellable? + + override func set( + statePublisher: AnyPublisher? + ) { + audioDeviceModuleCancellable?.cancel() + audioDeviceModuleCancellable = nil + processingQueue.cancelAllOperations() + disposableBag.removeAll() + + guard let statePublisher else { + return + } + + audioDeviceModuleCancellable = statePublisher + .map(\.audioDeviceModule) + .removeDuplicates() + .receive(on: processingQueue) + .sink { [weak self] in self?.didUpdate(audioDeviceModule: $0, statePublisher: statePublisher) } + } + + // MARK: - Private Helpers + + private func didUpdate( + audioDeviceModule: AudioDeviceModule?, + statePublisher: AnyPublisher + ) { + disposableBag.removeAll() + + guard let audioDeviceModule else { + return + } + + /// This is important to support cases (e.g. a wired headphone) that do not trigger a valid + /// route change for WebRTC causing the user to join the call without stereo and requiring + /// either toggling the speaker or reconnect their wired headset. + statePublisher + .map(\.currentRoute) + .removeDuplicates() + .debounce(for: .seconds(2), scheduler: processingQueue) + .sink { [weak audioDeviceModule] _ in audioDeviceModule?.refreshStereoPlayoutState() } + .store(in: disposableBag) + + audioDeviceModule + .isStereoPlayoutEnabledPublisher + .removeDuplicates() + .receive(on: processingQueue) + .sink { [weak self] in self?.dispatcher?.dispatch(.stereo(.setPlayoutEnabled($0))) } + .store(in: disposableBag) + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift index 79f3442e5..3aa71ec34 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift @@ -39,17 +39,13 @@ extension RTCAudioStore { } } - case .setShouldRecord(let value): + case .setRecording(let value): if let audioDeviceModule = state.audioDeviceModule { log.throwing( - "Unable to process setShouldRecord:\(value).", + "Unable to process setRecording:\(value).", subsystems: .audioSession ) { - try didSetShouldRecord( - value, - state: state, - audioDeviceModule: audioDeviceModule - ) + try audioDeviceModule.setRecording(value) } } @@ -78,19 +74,18 @@ extension RTCAudioStore { ) } - case .setActive: - break - case .setRecording: - break - case .setHasRecordingPermission: - break - case .setCurrentRoute: - break - case .avAudioSession: - break - case .webRTCAudioSession: - break - case .callKit: + case .stereo(.setPlayoutPreferred(let value)): + state.audioDeviceModule?.setStereoPlayoutPreference(value) + + case let .webRTCAudioSession(.setAudioEnabled(value)): + log.throwing( + "Unable to process setPlayout:\(value).", + subsystems: .audioSession + ) { + try state.audioDeviceModule?.setPlayout(value) + } + + default: break } } @@ -105,32 +100,16 @@ extension RTCAudioStore { audioDeviceModule: AudioDeviceModule ) throws { guard + !value, state.isActive, - state.shouldRecord + state.isRecording else { return } - if value { - try audioDeviceModule.setRecording(false) - } else { - // Restart the ADM - try audioDeviceModule.setRecording(false) - try audioDeviceModule.setRecording(true) - } - } - - /// Starts or stops ADM recording when `shouldRecord` changes. - private func didSetShouldRecord( - _ value: Bool, - state: RTCAudioStore.StoreState, - audioDeviceModule: AudioDeviceModule - ) throws { - guard audioDeviceModule.isRecording != value else { - return - } - - try audioDeviceModule.setRecording(value) + // Restart the ADM + try audioDeviceModule.setRecording(false) + try audioDeviceModule.setRecording(true) } /// Applies the store's microphone muted state to the ADM. @@ -139,12 +118,6 @@ extension RTCAudioStore { state: RTCAudioStore.StoreState, audioDeviceModule: AudioDeviceModule ) throws { - guard - state.shouldRecord - else { - return - } - try audioDeviceModule.setMuted(value) } @@ -154,7 +127,7 @@ extension RTCAudioStore { _ audioDeviceModule: AudioDeviceModule?, state: RTCAudioStore.StoreState ) throws { - try state.audioDeviceModule?.setRecording(false) + state.audioDeviceModule?.reset() disposableBag.removeAll() @@ -162,6 +135,10 @@ extension RTCAudioStore { return } + audioDeviceModule.setStereoPlayoutPreference( + state.stereoConfiguration.playout.preferred + ) + audioDeviceModule .isRecordingPublisher .removeDuplicates() @@ -171,7 +148,6 @@ extension RTCAudioStore { audioDeviceModule .isMicrophoneMutedPublisher .removeDuplicates() - .log(.debug) { "ADM sent isMicrophoneMuted:\($0)." } .sink { [weak self] in self?.dispatcher?.dispatch(.setMicrophoneMuted($0)) } .store(in: disposableBag) } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+RouteChangeMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+RouteChangeMiddleware.swift deleted file mode 100644 index d7015cde6..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+RouteChangeMiddleware.swift +++ /dev/null @@ -1,61 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Foundation -import StreamWebRTC - -extension RTCAudioStore { - - /// Bridges `RTCAudioSession` route updates into store state so downstream - /// features can react to speaker/headset transitions. - final class RouteChangeMiddleware: Middleware, @unchecked Sendable { - - private let audioSessionObserver: RTCAudioSessionPublisher - private let disposableBag = DisposableBag() - - convenience init(_ source: RTCAudioSession) { - self.init(.init(source)) - } - - init(_ audioSessionObserver: RTCAudioSessionPublisher) { - self.audioSessionObserver = audioSessionObserver - super.init() - - audioSessionObserver - .publisher - .compactMap { - guard - case let .didChangeRoute(reason, from, to) = $0 - else { - return nil - } - return (reason, from, to) - } - .sink { [weak self] in self?.didChangeRoute(reason: $0, from: $1, to: $2) } - .store(in: disposableBag) - } - - // MARK: - Private Helpers - - /// Handles route changes by persisting the new route and adapting the - /// output port override. - private func didChangeRoute( - reason: AVAudioSession.RouteChangeReason, - from: AVAudioSessionRouteDescription, - to: AVAudioSessionRouteDescription - ) { - let currentRoute = StoreState.AudioRoute(to) - let previousRoute = StoreState.AudioRoute(from) - dispatcher?.dispatch([ - .normal(.setCurrentRoute(currentRoute)), - .normal(.avAudioSession(.setOverrideOutputAudioPort(currentRoute.isSpeaker ? .speaker : .none))) - ]) - log.debug( - "AudioSession route changed from \(previousRoute) to \(currentRoute) due to:\(reason)", - subsystems: .audioSession - ) - } - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift index 7bf43c830..a32687a0d 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift @@ -13,9 +13,27 @@ extension RTCAudioStore { /// via middleware responsible for requesting permissions. public enum StoreAction: Sendable, Equatable, StoreActionBoxProtocol, CustomStringConvertible { + enum StereoAction: Equatable, Sendable, CustomStringConvertible { + case setPlayoutPreferred(Bool) + case setPlayoutEnabled(Bool) + + var description: String { + switch self { + case .setPlayoutPreferred(let value): + return ".setPlayoutPreferred(\(value))" + + case .setPlayoutEnabled(let value): + return ".setPlayoutEnabled(\(value))" + } + } + } + enum AVAudioSessionAction: Equatable, Sendable, CustomStringConvertible { + case systemSetCategory(AVAudioSession.Category) case setCategory(AVAudioSession.Category) + case systemSetMode(AVAudioSession.Mode) case setMode(AVAudioSession.Mode) + case systemSetCategoryOptions(AVAudioSession.CategoryOptions) case setCategoryOptions(AVAudioSession.CategoryOptions) case setCategoryAndMode(AVAudioSession.Category, mode: AVAudioSession.Mode) @@ -36,12 +54,21 @@ extension RTCAudioStore { var description: String { switch self { + case .systemSetCategory(let category): + return ".systemSetCategory(\(category))" + case .setCategory(let category): return ".setCategory(\(category))" + case .systemSetMode(let mode): + return ".systemSetMode(\(mode))" + case .setMode(let mode): return ".setMode(\(mode))" + case .systemSetCategoryOptions(let categoryOptions): + return ".systemSetCategoryOptions(\(categoryOptions))" + case .setCategoryOptions(let categoryOptions): return ".setCategoryOptions(\(categoryOptions))" @@ -99,7 +126,6 @@ extension RTCAudioStore { case setActive(Bool) case setInterrupted(Bool) - case setShouldRecord(Bool) case setRecording(Bool) case setMicrophoneMuted(Bool) case setHasRecordingPermission(Bool) @@ -109,6 +135,7 @@ extension RTCAudioStore { case avAudioSession(AVAudioSessionAction) case webRTCAudioSession(WebRTCAudioSessionAction) + case stereo(StereoAction) case callKit(CallKitAction) var description: String { @@ -119,9 +146,6 @@ extension RTCAudioStore { case .setInterrupted(let value): return ".setInterrupted(\(value))" - case .setShouldRecord(let value): - return ".setShouldRecord(\(value))" - case .setRecording(let value): return ".setRecording(\(value))" @@ -143,6 +167,9 @@ extension RTCAudioStore { case .webRTCAudioSession(let value): return ".webRTCAudioSession(\(value))" + case .stereo(let value): + return ".stereo(\(value))" + case .callKit(let value): return ".callKit(\(value))" } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift index 1054ac325..d72ad2225 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift @@ -22,9 +22,6 @@ extension RTCAudioStore { case let .setInterrupted(value): return value != state.isInterrupted - case let .setShouldRecord(value): - return value != state.shouldRecord - case let .setRecording(value): return value != state.isRecording @@ -54,6 +51,12 @@ extension RTCAudioStore { case .callKit: return true + + case let .stereo(value): + return shouldExecute( + action: value, + state: state.stereoConfiguration + ) } } @@ -65,6 +68,15 @@ extension RTCAudioStore { state: StoreState.AVAudioSessionConfiguration ) -> Bool { switch action { + case let .systemSetCategory(value): + return value != state.category + + case let .systemSetMode(value): + return value != state.mode + + case let .systemSetCategoryOptions(value): + return value != state.options + case let .setCategory(value): return value != state.category @@ -107,5 +119,18 @@ extension RTCAudioStore { return value != state.prefersNoInterruptionsFromSystemAlerts } } + + private func shouldExecute( + action: StoreAction.StereoAction, + state: StoreState.StereoConfiguration + ) -> Bool { + switch action { + case let .setPlayoutPreferred(value): + state.playout.preferred != value + + case let .setPlayoutEnabled(value): + state.playout.enabled != value + } + } } } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift index ba07ead84..103e289c1 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift @@ -27,12 +27,23 @@ extension RTCAudioStore { static func middleware(audioSession: RTCAudioSession) -> [Middleware] { [ - InterruptionsMiddleware(audioSession), - RouteChangeMiddleware(audioSession), AudioDeviceModuleMiddleware() ] } + static func effects(audioSession: RTCAudioSession) -> Set> { + [ + InterruptionsEffect(audioSession), + StereoPlayoutEffect(), + RouteChangeEffect(audioSession), + AVAudioSessionEffect() + ] + } + + static func logger() -> StoreLogger { + .init(logSkipped: false) + } + static func coordinator() -> StoreCoordinator { Coordinator() } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift index 68f10dbfa..a90c8b201 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift @@ -10,6 +10,21 @@ extension RTCAudioStore { /// The state container for all permission statuses. struct StoreState: CustomStringConvertible, Encodable, Hashable, Sendable { + struct StereoConfiguration: CustomStringConvertible, Encodable, Hashable, Sendable { + struct Playout: CustomStringConvertible, Encodable, Hashable, Sendable { + var preferred: Bool + var enabled: Bool + + var description: String { "{ preferred:\(preferred), enabled:\(enabled) }" } + } + + var playout: Playout + + var description: String { + "{ playout:\(playout) }" + } + } + struct AVAudioSessionConfiguration: CustomStringConvertible, Encodable, Hashable, Sendable { var category: AVAudioSession.Category /// The AVAudioSession mode. Encoded as its string value. @@ -99,6 +114,12 @@ extension RTCAudioStore { .bluetoothA2DP, .bluetoothLE, .bluetoothHFP, .carAudio, .headphones ] + private enum CodingKeys: String, CodingKey { + case type + case name + case id + } + var type: String var name: String var id: String @@ -106,6 +127,9 @@ extension RTCAudioStore { var isExternal: Bool var isSpeaker: Bool var isReceiver: Bool + var channels: Int + + let source: AVAudioSessionPortDescription? var description: String { " { id:\(id), name:\(name), type:\(type) }" @@ -118,6 +142,8 @@ extension RTCAudioStore { self.isExternal = Self.externalPorts.contains(source.portType) self.isSpeaker = source.portType == .builtInSpeaker self.isReceiver = source.portType == .builtInReceiver + self.channels = source.channels?.endIndex ?? 0 + self.source = source } init( @@ -126,7 +152,8 @@ extension RTCAudioStore { id: String, isExternal: Bool, isSpeaker: Bool, - isReceiver: Bool + isReceiver: Bool, + channels: Int ) { self.type = type self.name = name @@ -134,36 +161,57 @@ extension RTCAudioStore { self.isExternal = isExternal self.isSpeaker = isSpeaker self.isReceiver = isReceiver + self.channels = channels + self.source = nil } } let inputs: [Port] let outputs: [Port] + let reason: AVAudioSession.RouteChangeReason var isExternal: Bool var isSpeaker: Bool var isReceiver: Bool + var supportsStereoOutput: Bool + var supportsStereoInput: Bool + var description: String { - " { inputs:\(inputs), outputs:\(outputs) }" + var result = "{ " + result += "inputs:\(inputs)" + result += ", outputs:\(outputs)" + result += ", reason:\(reason)" + result += ", supportsStereoInput:\(supportsStereoInput)" + result += ", supportsStereoOutput:\(supportsStereoOutput)" + result += " }" + return result } - init(_ source: AVAudioSessionRouteDescription) { + init( + _ source: AVAudioSessionRouteDescription, + reason: AVAudioSession.RouteChangeReason = .unknown + ) { self.init( inputs: source.inputs.map(Port.init), - outputs: source.outputs.map(Port.init) + outputs: source.outputs.map(Port.init), + reason: reason ) } init( inputs: [Port], - outputs: [Port] + outputs: [Port], + reason: AVAudioSession.RouteChangeReason = .unknown ) { self.inputs = inputs self.outputs = outputs + self.reason = reason self.isExternal = outputs.first { $0.isExternal } != nil self.isSpeaker = outputs.first { $0.isSpeaker } != nil self.isReceiver = outputs.first { $0.isReceiver } != nil + self.supportsStereoInput = inputs.first { $0.channels > 1 } != nil + self.supportsStereoOutput = outputs.first { $0.channels > 1 } != nil } static let empty = AudioRoute(inputs: [], outputs: []) @@ -171,7 +219,6 @@ extension RTCAudioStore { var isActive: Bool var isInterrupted: Bool - var shouldRecord: Bool var isRecording: Bool var isMicrophoneMuted: Bool var hasRecordingPermission: Bool @@ -181,17 +228,18 @@ extension RTCAudioStore { var audioSessionConfiguration: AVAudioSessionConfiguration var webRTCAudioSessionConfiguration: WebRTCAudioSessionConfiguration + var stereoConfiguration: StereoConfiguration var description: String { " { " + "isActive:\(isActive)" + ", isInterrupted:\(isInterrupted)" + - ", shouldRecord:\(shouldRecord)" + ", isRecording:\(isRecording)" + ", isMicrophoneMuted:\(isMicrophoneMuted)" + ", hasRecordingPermission:\(hasRecordingPermission)" + ", audioSessionConfiguration:\(audioSessionConfiguration)" + ", webRTCAudioSessionConfiguration:\(webRTCAudioSessionConfiguration)" + + ", stereoConfiguration:\(stereoConfiguration)" + ", audioDeviceModule:\(audioDeviceModule)" + ", currentRoute:\(currentRoute)" + " }" @@ -200,12 +248,12 @@ extension RTCAudioStore { private enum CodingKeys: String, CodingKey { case isActive case isInterrupted - case shouldRecord case isRecording case isMicrophoneMuted case hasRecordingPermission case audioSessionConfiguration case webRTCAudioSessionConfiguration + case stereoConfiguration case audioDeviceModule case currentRoute } @@ -214,7 +262,6 @@ extension RTCAudioStore { var container = encoder.container(keyedBy: CodingKeys.self) try container.encode(isActive, forKey: .isActive) try container.encode(isInterrupted, forKey: .isInterrupted) - try container.encode(shouldRecord, forKey: .shouldRecord) try container.encode(isRecording, forKey: .isRecording) try container.encode(isMicrophoneMuted, forKey: .isMicrophoneMuted) try container.encode( @@ -229,6 +276,10 @@ extension RTCAudioStore { webRTCAudioSessionConfiguration, forKey: .webRTCAudioSessionConfiguration ) + try container.encode( + stereoConfiguration, + forKey: .stereoConfiguration + ) try container.encodeIfPresent( audioDeviceModule, forKey: .audioDeviceModule @@ -239,13 +290,12 @@ extension RTCAudioStore { static func == (lhs: StoreState, rhs: StoreState) -> Bool { lhs.isActive == rhs.isActive && lhs.isInterrupted == rhs.isInterrupted - && lhs.shouldRecord == rhs.shouldRecord && lhs.isRecording == rhs.isRecording && lhs.isMicrophoneMuted == rhs.isMicrophoneMuted && lhs.hasRecordingPermission == rhs.hasRecordingPermission && lhs.audioSessionConfiguration == rhs.audioSessionConfiguration - && lhs.webRTCAudioSessionConfiguration - == rhs.webRTCAudioSessionConfiguration + && lhs.webRTCAudioSessionConfiguration == rhs.webRTCAudioSessionConfiguration + && lhs.stereoConfiguration == rhs.stereoConfiguration && lhs.audioDeviceModule === rhs.audioDeviceModule && lhs.currentRoute == rhs.currentRoute } @@ -253,12 +303,12 @@ extension RTCAudioStore { func hash(into hasher: inout Hasher) { hasher.combine(isActive) hasher.combine(isInterrupted) - hasher.combine(shouldRecord) hasher.combine(isRecording) hasher.combine(isMicrophoneMuted) hasher.combine(hasRecordingPermission) hasher.combine(audioSessionConfiguration) hasher.combine(webRTCAudioSessionConfiguration) + hasher.combine(stereoConfiguration) if let audioDeviceModule { hasher.combine(ObjectIdentifier(audioDeviceModule)) } else { diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift index 8b6a1199b..09fc0ecbf 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift @@ -27,13 +27,26 @@ extension RTCAudioStore.Namespace { function: StaticString, line: UInt ) async throws -> State { - guard case let .avAudioSession(action) = action else { - return state + var updatedState = state + + if case let .setCurrentRoute(value) = action { + updatedState.audioSessionConfiguration.overrideOutputAudioPort = value.isSpeaker ? .speaker : .none } - var updatedState = state + guard case let .avAudioSession(action) = action else { + return updatedState + } switch action { + case let .systemSetCategory(value): + updatedState.audioSessionConfiguration.category = value + + case let .systemSetMode(value): + updatedState.audioSessionConfiguration.mode = value + + case let .systemSetCategoryOptions(value): + updatedState.audioSessionConfiguration.options = value + case let .setCategory(value): try performUpdate( state: state.audioSessionConfiguration, @@ -42,6 +55,7 @@ extension RTCAudioStore.Namespace { categoryOptions: state.audioSessionConfiguration.options ) updatedState.audioSessionConfiguration.category = value + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setMode(value): try performUpdate( @@ -51,6 +65,7 @@ extension RTCAudioStore.Namespace { categoryOptions: state.audioSessionConfiguration.options ) updatedState.audioSessionConfiguration.mode = value + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setCategoryOptions(value): try performUpdate( @@ -60,6 +75,7 @@ extension RTCAudioStore.Namespace { categoryOptions: value ) updatedState.audioSessionConfiguration.options = value + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setCategoryAndMode(category, mode): try performUpdate( @@ -70,6 +86,7 @@ extension RTCAudioStore.Namespace { ) updatedState.audioSessionConfiguration.category = category updatedState.audioSessionConfiguration.mode = mode + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setCategoryAndCategoryOptions(category, categoryOptions): try performUpdate( @@ -80,6 +97,7 @@ extension RTCAudioStore.Namespace { ) updatedState.audioSessionConfiguration.category = category updatedState.audioSessionConfiguration.options = categoryOptions + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setModeAndCategoryOptions(mode, categoryOptions): try performUpdate( @@ -90,6 +108,7 @@ extension RTCAudioStore.Namespace { ) updatedState.audioSessionConfiguration.mode = mode updatedState.audioSessionConfiguration.options = categoryOptions + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setCategoryAndModeAndCategoryOptions(category, mode, categoryOptions): try performUpdate( @@ -101,6 +120,7 @@ extension RTCAudioStore.Namespace { updatedState.audioSessionConfiguration.category = category updatedState.audioSessionConfiguration.mode = mode updatedState.audioSessionConfiguration.options = categoryOptions + updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none case let .setOverrideOutputAudioPort(value): if state.audioSessionConfiguration.category == .playAndRecord { @@ -134,6 +154,10 @@ extension RTCAudioStore.Namespace { || state.mode != mode || state.options != categoryOptions else { + log.debug( + "AVAudioSession configuration didn't change category:\(category), mode:\(mode), categoryOptions:\(categoryOptions).", + subsystems: .audioSession + ) return } @@ -206,13 +230,7 @@ extension RTCAudioStore.Namespace { return try await reduce( state: state, - action: .avAudioSession( - .setCategoryAndModeAndCategoryOptions( - state.audioSessionConfiguration.category, - mode: state.audioSessionConfiguration.mode, - categoryOptions: categoryOptions - ) - ), + action: .avAudioSession(.setCategoryOptions(categoryOptions)), file: #file, function: #function, line: #line diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift index 9a5d7d866..8c3578e60 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift @@ -38,13 +38,11 @@ extension RTCAudioStore.Namespace { } } updatedState.isActive = value + try updatedState.audioDeviceModule?.setPlayout(value) case let .setInterrupted(value): updatedState.isInterrupted = value - case let .setShouldRecord(value): - updatedState.shouldRecord = value - case let .setRecording(value): updatedState.isRecording = value @@ -57,20 +55,34 @@ extension RTCAudioStore.Namespace { case let .setAudioDeviceModule(value): updatedState.audioDeviceModule = value if value == nil { - updatedState.shouldRecord = false updatedState.isRecording = false - updatedState.isMicrophoneMuted = false + updatedState.isMicrophoneMuted = true + updatedState.stereoConfiguration = .init( + playout: .init( + preferred: false, + enabled: false + ) + ) } case let .setCurrentRoute(value): updatedState.currentRoute = value + case let .stereo(.setPlayoutPreferred(value)): + updatedState.stereoConfiguration.playout.preferred = value + + case let .stereo(.setPlayoutEnabled(value)): + updatedState.stereoConfiguration.playout.enabled = value + case .avAudioSession: break case .webRTCAudioSession: break + case .stereo: + break + case .callKit: break } diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift index 0d878c1af..1e3e32ab4 100644 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift @@ -28,9 +28,8 @@ final class RTCAudioStore: @unchecked Sendable { initialState: .init( isActive: false, isInterrupted: false, - shouldRecord: false, isRecording: false, - isMicrophoneMuted: false, + isMicrophoneMuted: true, hasRecordingPermission: false, audioDeviceModule: nil, currentRoute: .init(audioSession.currentRoute), @@ -44,10 +43,17 @@ final class RTCAudioStore: @unchecked Sendable { isAudioEnabled: false, useManualAudio: false, prefersNoInterruptionsFromSystemAlerts: false + ), + stereoConfiguration: .init( + playout: .init( + preferred: false, + enabled: false + ) ) ), reducers: Namespace.reducers(audioSession: audioSession), - middleware: Namespace.middleware(audioSession: audioSession) + middleware: Namespace.middleware(audioSession: audioSession), + effects: Namespace.effects(audioSession: audioSession) ) store.dispatch([ diff --git a/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift b/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift index 24d34e16a..54e4377c6 100644 --- a/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift +++ b/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift @@ -6,8 +6,10 @@ import Foundation extension Logger { + /// Executes a throwing operation and routes any failures to the logging + /// backend using the supplied metadata. func throwing( - _ message: @autoclosure () -> String, + _ message: @autoclosure () -> String = "", subsystems: LogSubsystem, file: StaticString = #file, function: StaticString = #function, diff --git a/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift b/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift new file mode 100644 index 000000000..4ff3291c1 --- /dev/null +++ b/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift @@ -0,0 +1,121 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +extension Logger { + + public enum WebRTC { + public enum LogMode { case none, validFilesOnly, all } + + public nonisolated(unsafe) static var mode: LogMode = .all { + didSet { RTCLogger.default.didUpdate(mode: mode) } + } + + nonisolated(unsafe) static var severity: RTCLoggingSeverity = .init(LogConfig.level) { + didSet { RTCLogger.default.didUpdate(severity: severity) } + } + + enum ValidFile: String { + case audioEngineDevice = "audio_engine_device.mm" + } + + nonisolated(unsafe) static var validFiles: [ValidFile] = [ + .audioEngineDevice + ] + } +} + +extension RTCLoggingSeverity { + + init(_ logLevel: LogLevel) { + switch logLevel { + case .debug: + self = .verbose + case .info: + self = .info + case .warning: + self = .warning + case .error: + self = .error + } + } +} + +extension Logger.WebRTC { + final class RTCLogger: @unchecked Sendable { + static let `default` = RTCLogger() + + private let logger = RTCCallbackLogger() + private var isRunning = false + private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) + + private init() { + didUpdate(mode: mode) + } + + func didUpdate(severity: RTCLoggingSeverity) { + processingQueue.addOperation { [weak self] in + self?.logger.severity = severity + } + } + + func didUpdate(mode: LogMode) { + processingQueue.addOperation { [weak self] in + guard let self else { + return + } + + guard mode != .none else { + return + } + + guard !self.isRunning else { + return + } + + logger.start { [weak self] in self?.process($0) } + + self.isRunning = true + } + } + + private func process(_ message: String) { + let trimmedMessage = message.trimmingCharacters( + in: .whitespacesAndNewlines + ) + + switch severity { + case .none, .verbose: + if isMessageFromValidFile(trimmedMessage) { + log.debug(trimmedMessage, subsystems: .webRTCInternal) + } + case .info: + if isMessageFromValidFile(trimmedMessage) { + log.info(trimmedMessage, subsystems: .webRTCInternal) + } + case .warning: + log.warning(trimmedMessage, subsystems: .webRTCInternal) + case .error: + log.error(trimmedMessage, subsystems: .webRTCInternal) + @unknown default: + log.debug(trimmedMessage, subsystems: .webRTCInternal) + } + } + + private func isMessageFromValidFile(_ message: String) -> Bool { + guard mode == .validFilesOnly, !validFiles.isEmpty else { + return true + } + + for validFile in validFiles { + if message.contains(validFile.rawValue) { + return true + } + } + return false + } + } +} diff --git a/Sources/StreamVideo/Utils/Logger/Logger.swift b/Sources/StreamVideo/Utils/Logger/Logger.swift index 72b4ddfd3..60832558b 100644 --- a/Sources/StreamVideo/Utils/Logger/Logger.swift +++ b/Sources/StreamVideo/Utils/Logger/Logger.swift @@ -151,6 +151,7 @@ public enum LogConfig { public nonisolated(unsafe) static var level: LogLevel = .error { didSet { invalidateLogger() + Logger.WebRTC.severity = .init(level) } } @@ -298,8 +299,8 @@ public enum LogConfig { } public static var webRTCLogsEnabled: Bool { - get { WebRTCLogger.default.enabled } - set { WebRTCLogger.default.enabled = newValue } + get { Logger.WebRTC.mode != .none } + set { Logger.WebRTC.mode = newValue ? .all : .none } } /// Invalidates the current logger instance so it can be recreated. diff --git a/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift b/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift deleted file mode 100644 index 3d248740f..000000000 --- a/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift +++ /dev/null @@ -1,50 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import Foundation -import OSLog -import StreamWebRTC - -final class WebRTCLogger: @unchecked Sendable { - - static let `default` = WebRTCLogger() - - var enabled: Bool = false { - didSet { didUpdate(enabled) } - } - - var severity: RTCLoggingSeverity = .error { - didSet { webRTCLogger.severity = severity } - } - - private let webRTCLogger: RTCCallbackLogger = .init() - - private init() { - webRTCLogger.severity = .verbose - } - - private func didUpdate(_ enabled: Bool) { - guard enabled else { - webRTCLogger.stop() - return - } - webRTCLogger.start { message, severity in - let trimmedMessage = message.trimmingCharacters( - in: .whitespacesAndNewlines - ) - switch severity { - case .none, .verbose: - log.debug(trimmedMessage, subsystems: .webRTCInternal) - case .info: - log.info(trimmedMessage, subsystems: .webRTCInternal) - case .warning: - log.warning(trimmedMessage, subsystems: .webRTCInternal) - case .error: - log.error(trimmedMessage, subsystems: .webRTCInternal) - @unknown default: - log.debug(trimmedMessage, subsystems: .webRTCInternal) - } - } - } -} diff --git a/Sources/StreamVideo/Utils/RetriableTask.swift b/Sources/StreamVideo/Utils/RetriableTask.swift new file mode 100644 index 000000000..fe593dfce --- /dev/null +++ b/Sources/StreamVideo/Utils/RetriableTask.swift @@ -0,0 +1,52 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// A helper that retries synchronous operations a fixed number of times. +enum RetriableTask { + /// Runs the provided throwing operation up to the requested number of iterations. + /// The call stops as soon as the operation succeeds, or rethrows the last error + /// if all attempts fail. + /// - Parameters: + /// - iterations: Maximum number of times the operation should be executed. + /// - operation: The work item to execute repeatedly until it succeeds. + /// - Throws: The final error thrown by `operation` if it never succeeds. + static func run( + iterations: Int, + operation: () throws -> Void + ) throws { + try execute( + currentIteration: 0, + iterations: iterations, + operation: operation + ) + } + + /// Recursively executes the operation, incrementing the iteration until + /// the maximum is reached or the call succeeds. + private static func execute( + currentIteration: Int, + iterations: Int, + operation: () throws -> Void + ) throws { + do { + return try operation() + } catch { + if currentIteration < iterations - 1 { + do { + return try execute( + currentIteration: currentIteration + 1, + iterations: iterations, + operation: operation + ) + } catch { + throw error + } + } else { + throw error + } + } + } +} diff --git a/Sources/StreamVideo/Utils/Store/Reducer.swift b/Sources/StreamVideo/Utils/Store/Reducer.swift index a1ce504ea..8fa662e0b 100644 --- a/Sources/StreamVideo/Utils/Store/Reducer.swift +++ b/Sources/StreamVideo/Utils/Store/Reducer.swift @@ -51,6 +51,16 @@ import Foundation /// state. They are executed in sequence, with each reducer receiving the /// state produced by the previous one. class Reducer: @unchecked Sendable { + /// Closure for dispatching new actions to the store. + /// + /// Use this to trigger additional actions in response to the current + /// action. The dispatcher is automatically set when the middleware is + /// added to a store. + /// + /// - Warning: Avoid creating infinite loops by dispatching actions + /// that trigger the same middleware repeatedly. + var dispatcher: Store.Dispatcher? + /// Processes an action to produce a new state. /// /// Override this method to implement state transformation logic. The diff --git a/Sources/StreamVideo/Utils/Store/Store.swift b/Sources/StreamVideo/Utils/Store/Store.swift index a28bd3d5d..db8f0878d 100644 --- a/Sources/StreamVideo/Utils/Store/Store.swift +++ b/Sources/StreamVideo/Utils/Store/Store.swift @@ -77,6 +77,8 @@ final class Store: @unchecked Sendable { /// Array of middleware that handle side effects. private var middleware: [Middleware] + private var effects: Set> + /// Initializes a new store with the specified configuration. /// /// - Parameters: @@ -92,6 +94,7 @@ final class Store: @unchecked Sendable { initialState: Namespace.State, reducers: [Reducer], middleware: [Middleware], + effects: Set>, logger: StoreLogger, executor: StoreExecutor, coordinator: StoreCoordinator @@ -100,13 +103,16 @@ final class Store: @unchecked Sendable { let stateSubject = CurrentValueSubject(initialState) self.stateSubject = stateSubject self.statePublisher = stateSubject.eraseToAnyPublisher() - self.reducers = reducers + self.reducers = [] self.middleware = [] + self.effects = [] self.logger = logger self.executor = executor self.coordinator = coordinator + reducers.forEach { add($0) } middleware.forEach { add($0) } + effects.forEach { add($0) } } // MARK: - Middleware Management @@ -168,6 +174,7 @@ final class Store: @unchecked Sendable { return } reducers.append(value) + value.dispatcher = .init(self) } } @@ -182,6 +189,45 @@ final class Store: @unchecked Sendable { return } reducers = reducers.filter { $0 !== value } + value.dispatcher = nil + } + } + + // MARK: - Effects Management + + /// Adds an effect to respond to state changes. + /// + /// Effects are executed every time the store's state gets updated. + /// + /// - Parameter value: The effect to add. + func add>(_ value: T) { + processingQueue.addOperation { [weak self] in + guard + let self + else { + return + } + effects.insert(value) + value.dispatcher = .init(self) + value.set(statePublisher: statePublisher) + value.stateProvider = { [weak self] in self?.state } + } + } + + /// Removes a previously added reducer. + /// + /// - Parameter value: The reducer to remove. + func remove>(_ value: T) { + processingQueue.addOperation { [weak self] in + guard + let self + else { + return + } + effects.remove(value) + value.dispatcher = nil + value.set(statePublisher: nil) + value.stateProvider = nil } } diff --git a/Sources/StreamVideo/Utils/Store/StoreEffect.swift b/Sources/StreamVideo/Utils/Store/StoreEffect.swift new file mode 100644 index 000000000..636db57af --- /dev/null +++ b/Sources/StreamVideo/Utils/Store/StoreEffect.swift @@ -0,0 +1,51 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +import Foundation + +/// Base type for async side-effects that observe the store and can dispatch +/// follow-up actions without touching reducers directly. +class StoreEffect: @unchecked Sendable, Hashable { + private lazy var identifier = "store.\(type(of: self))" + + /// Closure for dispatching new actions to the store. + /// + /// Use this to trigger additional actions in response to the current + /// action. The dispatcher is automatically set when the middleware is + /// added to a store. + /// + /// - Warning: Avoid creating infinite loops by dispatching actions + /// that trigger the same middleware repeatedly. + var dispatcher: Store.Dispatcher? + + /// Closure for accessing the current store state. + /// + /// This provider is automatically set when the middleware is added to + /// a store. It returns the current state at the time of access. + var stateProvider: (() -> Namespace.State?)? + + /// The current store state, if available. + /// + /// Returns `nil` if the middleware hasn't been added to a store yet. + /// Use this property to make decisions based on the current state. + var state: Namespace.State? { stateProvider?() } + + /// Supplies the state publisher once the effect is attached to a store, + /// giving subclasses a hook to start or stop their observations. + func set(statePublisher: AnyPublisher?) { + // No-op + } + + func hash(into hasher: inout Hasher) { + hasher.combine(identifier) + } + + static func == ( + lhs: StoreEffect, + rhs: StoreEffect + ) -> Bool { + lhs.identifier == rhs.identifier && lhs === rhs + } +} diff --git a/Sources/StreamVideo/Utils/Store/StoreLogger.swift b/Sources/StreamVideo/Utils/Store/StoreLogger.swift index f9079bb10..87b31feaf 100644 --- a/Sources/StreamVideo/Utils/Store/StoreLogger.swift +++ b/Sources/StreamVideo/Utils/Store/StoreLogger.swift @@ -52,12 +52,18 @@ class StoreLogger { /// throughput. let statistics: StoreStatistics = .init() + let logSkipped: Bool + /// Initializes a new store logger. /// /// - Parameter logSubsystem: The subsystem for categorizing logs. /// Defaults to `.other`. - init(logSubsystem: LogSubsystem = .other) { + init( + logSubsystem: LogSubsystem = .other, + logSkipped: Bool = true + ) { self.logSubsystem = logSubsystem + self.logSkipped = logSkipped #if DEBUG statistics.enable(interval: 60) { @@ -119,6 +125,9 @@ class StoreLogger { line: UInt ) { defer { statistics.record(action) } + + guard logSkipped else { return } + log.debug( "Store identifier:\(identifier) skipped action:\(action).", subsystems: logSubsystem, diff --git a/Sources/StreamVideo/Utils/Store/StoreNamespace.swift b/Sources/StreamVideo/Utils/Store/StoreNamespace.swift index 215a9e28c..c9813eb9b 100644 --- a/Sources/StreamVideo/Utils/Store/StoreNamespace.swift +++ b/Sources/StreamVideo/Utils/Store/StoreNamespace.swift @@ -74,6 +74,8 @@ protocol StoreNamespace: Sendable { /// - Returns: Array of middleware for this store. static func middleware() -> [Middleware] + static func effects() -> Set> + /// Creates the logger for this store. /// /// Override to provide custom logging behavior. @@ -113,6 +115,7 @@ protocol StoreNamespace: Sendable { initialState: State, reducers: [Reducer], middleware: [Middleware], + effects: Set>, logger: StoreLogger, executor: StoreExecutor, coordinator: StoreCoordinator @@ -129,6 +132,8 @@ extension StoreNamespace { /// Default implementation returns empty array. static func middleware() -> [Middleware] { [] } + static func effects() -> Set> { [] } + /// Default implementation returns basic logger. static func logger() -> StoreLogger { .init() } @@ -152,6 +157,7 @@ extension StoreNamespace { initialState: State, reducers: [Reducer] = Self.reducers(), middleware: [Middleware] = Self.middleware(), + effects: Set> = Self.effects(), logger: StoreLogger = Self.logger(), executor: StoreExecutor = Self.executor(), coordinator: StoreCoordinator = Self.coordinator() @@ -161,6 +167,7 @@ extension StoreNamespace { initialState: initialState, reducers: reducers, middleware: middleware, + effects: effects, logger: logger, executor: executor, coordinator: coordinator diff --git a/Sources/StreamVideo/Utils/Store/StoreStatistics.swift b/Sources/StreamVideo/Utils/Store/StoreStatistics.swift index 98e5d5940..76e83a511 100644 --- a/Sources/StreamVideo/Utils/Store/StoreStatistics.swift +++ b/Sources/StreamVideo/Utils/Store/StoreStatistics.swift @@ -10,7 +10,7 @@ final class StoreStatistics { typealias Reporter = (Int, TimeInterval) -> Void private let processingQueue = UnfairQueue() - private var actions: [Namespace.Action] = [] + private var actions: [String] = [] private var cancellable: AnyCancellable? private var interval: TimeInterval = 0 @@ -31,7 +31,7 @@ final class StoreStatistics { } func record(_ action: Namespace.Action) { - processingQueue.sync { actions.append(action) } + processingQueue.sync { actions.append("\(action)") } } private func flush() { diff --git a/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift b/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift index e2833ae4e..8017746f0 100644 --- a/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift +++ b/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift @@ -19,6 +19,7 @@ extension RTCSessionDescription: @retroactive Encodable {} extension RTCConfiguration: @retroactive Encodable {} extension RTCIceServer: @retroactive Encodable {} extension RTCCryptoOptions: @retroactive Encodable {} +extension AVAudioSession.RouteChangeReason: @retroactive Encodable {} #else extension RTCSignalingState: Encodable {} extension RTCMediaStream: Encodable {} @@ -33,6 +34,7 @@ extension RTCSessionDescription: Encodable {} extension RTCConfiguration: Encodable {} extension RTCIceServer: Encodable {} extension RTCCryptoOptions: Encodable {} +extension AVAudioSession.RouteChangeReason: Encodable {} #endif extension RTCSignalingState { diff --git a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift index 8a251b938..47c8f735f 100644 --- a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift +++ b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift @@ -9,7 +9,17 @@ extension RTCMediaConstraints { nonisolated(unsafe) static let defaultConstraints = RTCMediaConstraints( mandatoryConstraints: nil, - optionalConstraints: ["DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue] + optionalConstraints: [ + "DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue, + /// Added support for Google's media constraints to improve transmitted audio + /// https://github.com/GetStream/react-native-webrtc/pull/20/commits/6476119456005dc35ba00e9bf4d4c4124c6066e8 + "googAutoGainControl": kRTCMediaConstraintsValueTrue, + "googNoiseSuppression": kRTCMediaConstraintsValueTrue, + "googEchoCancellation": kRTCMediaConstraintsValueTrue, + "googHighpassFilter": kRTCMediaConstraintsValueTrue, + "googTypingNoiseDetection": kRTCMediaConstraintsValueTrue, + "googAudioMirroring": kRTCMediaConstraintsValueFalse + ] ) nonisolated(unsafe) static let iceRestartConstraints = RTCMediaConstraints( diff --git a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift index 3df412ae7..5305f545c 100644 --- a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift +++ b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift @@ -51,28 +51,26 @@ final class PeerConnectionFactory: @unchecked Sendable { /// - Parameter audioProcessingModule: The RTCAudioProcessingModule to use. /// - Returns: A PeerConnectionFactory instance. static func build( - audioProcessingModule: RTCAudioProcessingModule + audioProcessingModule: RTCAudioProcessingModule, + audioDeviceModuleSource: RTCAudioDeviceModuleControlling? = nil ) -> PeerConnectionFactory { - if let factory = PeerConnectionFactoryStorage.shared.factory( - for: audioProcessingModule - ) { - return factory - } else { - return .init(audioProcessingModule) - } + return .init(audioProcessingModule, audioDeviceModuleSource: audioDeviceModuleSource) } /// Private initializer to ensure instances are created through the `build` method. /// - Parameter audioProcessingModule: The RTCAudioProcessingModule to use. - private init(_ audioProcessingModule: RTCAudioProcessingModule) { + private init( + _ audioProcessingModule: RTCAudioProcessingModule, + audioDeviceModuleSource: RTCAudioDeviceModuleControlling? + ) { self.audioProcessingModule = audioProcessingModule _ = factory - _ = audioDeviceModule - PeerConnectionFactoryStorage.shared.store(self, for: audioProcessingModule) - } - - deinit { - PeerConnectionFactoryStorage.shared.remove(for: audioProcessingModule) + + if let audioDeviceModuleSource { + audioDeviceModule = .init(audioDeviceModuleSource) + } else { + _ = audioDeviceModule + } } // MARK: - Builders @@ -213,56 +211,3 @@ final class PeerConnectionFactory: @unchecked Sendable { .baseline(for: videoCodec) } } - -/// A thread-safe storage class for managing PeerConnectionFactory instances. -final class PeerConnectionFactoryStorage: @unchecked Sendable { - /// Shared singleton instance of PeerConnectionFactoryStorage. - static let shared = PeerConnectionFactoryStorage() - - /// Dictionary to store PeerConnectionFactory instances, keyed by module address. - private var storage: [String: PeerConnectionFactory] = [:] - - /// Queue to ensure thread-safe access to the storage. - private let queue = UnfairQueue() - - /// Stores a PeerConnectionFactory instance for a given RTCAudioProcessingModule. - /// - Parameters: - /// - factory: The PeerConnectionFactory to store. - /// - module: The RTCAudioProcessingModule associated with the factory. - func store( - _ factory: PeerConnectionFactory, - for module: RTCAudioProcessingModule - ) { - queue.sync { - storage[key(for: module)] = factory - } - } - - /// Retrieves a PeerConnectionFactory instance for a given RTCAudioProcessingModule. - /// - Parameter module: The RTCAudioProcessingModule to lookup. - /// - Returns: The associated PeerConnectionFactory, if found. - func factory(for module: RTCAudioProcessingModule) -> PeerConnectionFactory? { - queue.sync { - storage[key(for: module)] - } - } - - /// Removes a PeerConnectionFactory instance for a given RTCAudioProcessingModule. - /// If the storage becomes empty after removal, it cleans up SSL. - /// - Parameter module: The RTCAudioProcessingModule to remove. - func remove(for module: RTCAudioProcessingModule) { - queue.sync { - storage[key(for: module)] = nil - if storage.isEmpty { - /// SSL cleanUp should only occur when no factory is active. During tests where - /// factories are being created on demand this is causing failures. The storage ensures - /// that only when there is no other factory the SSL will be cleaned up. - RTCCleanupSSL() - } - } - } - - private func key(for object: AnyObject) -> String { - "\(Unmanaged.passUnretained(object).toOpaque())" - } -} diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift index 40149b5e8..869cebe4a 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift @@ -53,6 +53,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { private var callSettings: CallSettings + private let mungeSubscriberStereo = true + /// A publisher that we use to observe setUp status. Once the setUp has been completed we expect /// a `true` value to be sent. After that, any subsequent observations will rely on the `currentValue` /// to know that the setUp completed, without having to wait for it. @@ -808,8 +810,21 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { ) ) - let answer = try await createAnswer() - try await setLocalDescription(answer) + var answer = try await createAnswer() + if mungeSubscriberStereo { + let munger = SDPParser() + let visitor = StereoEnableVisitor() + munger.registerVisitor(visitor) + await munger.parse(sdp: answer.sdp) + let munged = visitor.applyStereoUpdates(to: answer.sdp) + let mungedAnswer = RTCSessionDescription(type: answer.type, sdp: munged) + try await setLocalDescription(mungedAnswer) + log.debug("Munged Subscriber offer: \(mungedAnswer)", subsystems: subsystem) + + answer = mungedAnswer + } else { + try await setLocalDescription(answer) + } try await sfuAdapter.sendAnswer( sessionDescription: answer.sdp, diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift index 5ac0261e2..8176fb985 100644 --- a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift @@ -14,8 +14,18 @@ final class StereoEnableVisitor: SDPLineVisitor { case foundOpus(mid: String, payload: String) } + private enum Constants { + static let fmtpDelimiter: Character = ";" + static let keyValueSeparator: Character = "=" + static let stereoParameters: [(key: String, value: String)] = [ + ("stereo", "1"), + ("sprop-stereo", "1") + ] + } + private var state: State = .idle private(set) var found: [String: MidStereoInformation] = [:] + private(set) var fmtpLineReplacements: [String: String] = [:] /// Prefixes handled by this visitor: mid, rtpmap, and fmtp lines. var supportedPrefixes: Set { @@ -43,6 +53,7 @@ final class StereoEnableVisitor: SDPLineVisitor { state = .foundOpus(mid: mid, payload: String(parts[0])) case let (_, .foundOpus(mid, codecPayload)) where line.hasPrefix(SupportedPrefix.fmtp.rawValue): + let originalLine = line let parts = line .replacingOccurrences(of: SupportedPrefix.fmtp.rawValue, with: "") .split(separator: " ", maxSplits: 1) @@ -55,18 +66,23 @@ final class StereoEnableVisitor: SDPLineVisitor { let payload = String(parts[0]) let config = String(parts[1]) - guard - payload == codecPayload, - config.contains("stereo=1") - else { + guard payload == codecPayload else { state = .idle return } + let (updatedConfig, didMutate) = ensureStereoConfiguration(in: config) + if didMutate { + let updatedLine = "\(SupportedPrefix.fmtp.rawValue)\(payload) \(updatedConfig)" + fmtpLineReplacements[originalLine] = updatedLine + } else { + fmtpLineReplacements.removeValue(forKey: originalLine) + } + found[mid] = .init( mid: mid, codecPayload: codecPayload, - isStereoEnabled: true + isStereoEnabled: updatedConfig.contains("stereo=1") ) state = .idle @@ -74,4 +90,77 @@ final class StereoEnableVisitor: SDPLineVisitor { break } } + + /// Applies the computed stereo updates to the provided SDP, returning a new SDP string. + /// - Parameter sdp: The original SDP string. + /// - Returns: The SDP string with stereo parameters enforced where required. + func applyStereoUpdates(to sdp: String) -> String { + guard fmtpLineReplacements.isEmpty == false else { return sdp } + + let delimiter = "\r\n" + var lines = sdp.components(separatedBy: delimiter) + + for index in lines.indices { + let line = lines[index] + if let replacement = fmtpLineReplacements[line] { + lines[index] = replacement + } + } + + return lines.joined(separator: delimiter) + } + + /// Resets the internal state allowing the visitor to be reused. + func reset() { + state = .idle + found.removeAll() + fmtpLineReplacements.removeAll() + } + + private func ensureStereoConfiguration(in config: String) -> (String, Bool) { + let components = config + .split(separator: Constants.fmtpDelimiter) + .map { $0.trimmingCharacters(in: .whitespaces) } + .filter { !$0.isEmpty } + + var order: [String] = [] + var values: [String: String] = [:] + + for component in components { + let keyValue = component.split(separator: Constants.keyValueSeparator, maxSplits: 1) + let key = keyValue[0].trimmingCharacters(in: .whitespaces) + let value = keyValue.count > 1 + ? keyValue[1].trimmingCharacters(in: .whitespaces) + : "" + + if values[key] == nil { + order.append(key) + } + values[key] = value + } + + var didMutate = false + + for (key, value) in Constants.stereoParameters { + if let existing = values[key] { + if existing != value { + values[key] = value + didMutate = true + } + } else { + values[key] = value + order.append(key) + didMutate = true + } + } + + let updatedConfig = order.map { key -> String in + guard let value = values[key], value.isEmpty == false else { + return key + } + return "\(key)=\(value)" + }.joined(separator: String(Constants.fmtpDelimiter)) + + return (updatedConfig, didMutate) + } } diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift index 37114964e..6268e0f12 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift @@ -359,18 +359,26 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() if !isFastReconnecting { - /// Configures the audio session for the current call using the provided - /// join source. This ensures the session setup reflects whether the - /// join was triggered in-app or via CallKit and applies the correct - /// audio routing and category. - try await coordinator.stateAdapter.configureAudioSession( - source: context.joinSource - ) + try await withThrowingTaskGroup(of: Void.self) { [context] group in + group.addTask { [context] in + /// Configures the audio session for the current call using the provided + /// join source. This ensures the session setup reflects whether the + /// join was triggered in-app or via CallKit and applies the correct + /// audio routing and category. + try await coordinator.stateAdapter.configureAudioSession( + source: context.joinSource + ) + } - /// Configures all peer connections after the audio session is ready. - /// Ensures signaling, media, and routing are correctly established for - /// all tracks as part of the join process. - try await coordinator.stateAdapter.configurePeerConnections() + group.addTask { + /// Configures all peer connections after the audio session is ready. + /// Ensures signaling, media, and routing are correctly established for + /// all tracks as part of the join process. + try await coordinator.stateAdapter.configurePeerConnections() + } + + try await group.waitForAll() + } // Once our PeerConnection have been created we consume the // eventBucket we created above in order to re-apply any event diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift index 309a8b1c3..179718eb7 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift @@ -9,7 +9,7 @@ extension WebRTCCoordinator.StateMachine { class Stage: StreamStateMachineStage, @unchecked Sendable { /// Context holding the state and dependencies for the stage. - struct Context { + struct Context: @unchecked Sendable { weak var coordinator: WebRTCCoordinator? var authenticator: WebRTCAuthenticating = WebRTCAuthenticator() var sfuEventObserver: SFUEventAdapter? diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift index c61c6c7b6..e9e8a5ded 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift @@ -38,6 +38,8 @@ protocol WebRTCAuthenticating { /// Concrete implementation of WebRTCAuthenticating. struct WebRTCAuthenticator: WebRTCAuthenticating { + @Injected(\.audioStore) private var audioStore + /// Authenticates the WebRTC connection. /// - Parameters: /// - coordinator: The WebRTC coordinator. @@ -90,9 +92,7 @@ struct WebRTCAuthenticator: WebRTCAuthenticating { let remoteCallSettings = CallSettings(response.call.settings) let callSettings = { var result = initialCallSettings ?? remoteCallSettings - if - coordinator.stateAdapter.audioSession.currentRouteIsExternal, - result.speakerOn { + if audioStore.state.currentRoute.isExternal, result.speakerOn { result = result.withUpdatedSpeakerState(false) } return result diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift index 7e9777cf8..46d92a188 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift @@ -138,11 +138,11 @@ final class WebRTCPermissionsAdapter: @unchecked Sendable { } var updatedCallSettings = callSettings - if callSettings.audioOn, !permissions.hasMicrophonePermission { + if callSettings.audioOn, permissions.state.microphonePermission != .granted { updatedCallSettings = updatedCallSettings.withUpdatedAudioState(false) } - if callSettings.videoOn, !permissions.hasCameraPermission { + if callSettings.videoOn, permissions.state.cameraPermission != .granted { updatedCallSettings = updatedCallSettings.withUpdatedVideoState(false) } diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index c7c999c19..88d409d43 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -117,14 +117,50 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W rtcPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding, videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(), screenShareSessionProvider: ScreenShareSessionProvider = .init() + ) { + self.init( + user: user, + apiKey: apiKey, + callCid: callCid, + videoConfig: videoConfig, + peerConnectionFactory: PeerConnectionFactory.build( + audioProcessingModule: videoConfig.audioProcessingModule + ), + rtcPeerConnectionCoordinatorFactory: rtcPeerConnectionCoordinatorFactory, + videoCaptureSessionProvider: videoCaptureSessionProvider, + screenShareSessionProvider: screenShareSessionProvider + ) + } + + /// Initializes the WebRTC state adapter with user details and connection + /// configurations. + /// + /// - Parameters: + /// - user: The user participating in the call. + /// - apiKey: The API key for authenticating WebRTC calls. + /// - callCid: The call identifier (callCid). + /// - videoConfig: Configuration for video settings. + /// - peerConnectionFactory: The factory to use when constructing peerConnection and for the + /// audioSession.. + /// - rtcPeerConnectionCoordinatorFactory: Factory for peer connection + /// creation. + /// - videoCaptureSessionProvider: Provides sessions for video capturing. + /// - screenShareSessionProvider: Provides sessions for screen sharing. + init( + user: User, + apiKey: String, + callCid: String, + videoConfig: VideoConfig, + peerConnectionFactory: PeerConnectionFactory, + rtcPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding, + videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(), + screenShareSessionProvider: ScreenShareSessionProvider = .init() ) { self.user = user self.apiKey = apiKey self.callCid = callCid self.videoConfig = videoConfig - let peerConnectionFactory = PeerConnectionFactory.build( - audioProcessingModule: videoConfig.audioProcessingModule - ) + let peerConnectionFactory = peerConnectionFactory self.peerConnectionFactory = peerConnectionFactory self.rtcPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory self.videoCaptureSessionProvider = videoCaptureSessionProvider @@ -681,10 +717,9 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W func configureAudioSession(source: JoinSource?) async throws { try await audioStore.dispatch([ - .setRecording(peerConnectionFactory.audioDeviceModule.isRecording), - .setMicrophoneMuted(peerConnectionFactory.audioDeviceModule.isMicrophoneMuted), .setAudioDeviceModule(peerConnectionFactory.audioDeviceModule) ]).result() + audioSession.activate( callSettingsPublisher: $callSettings.removeDuplicates().eraseToAnyPublisher(), ownCapabilitiesPublisher: $ownCapabilities.removeDuplicates().eraseToAnyPublisher(), diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index 1101cd5dc..bfb3e8e04 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -134,7 +134,6 @@ 40245F652BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F642BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift */; }; 40245F672BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F662BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift */; }; 40245F692BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F682BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift */; }; - 4026BEEA2EA79FD400360AD0 /* CallFlow_PerformanceTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4026BEE92EA79FD400360AD0 /* CallFlow_PerformanceTests.swift */; }; 402778832BD13C62002F4399 /* NoiseCancellationFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402778822BD13C62002F4399 /* NoiseCancellationFilter.swift */; }; 4028FE982DC4F638001F9DC3 /* ConsumableBucket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4028FE972DC4F638001F9DC3 /* ConsumableBucket.swift */; }; 4028FE9A2DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4028FE992DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift */; }; @@ -169,12 +168,12 @@ 402C544B2B6B9FF000672BFB /* CallButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4091460A2B690AA4007F3C17 /* CallButtonView.swift */; }; 402C545B2B6BE50500672BFB /* MockStreamStatistics.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C545A2B6BE50500672BFB /* MockStreamStatistics.swift */; }; 402C545D2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C545C2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift */; }; + 402C5C5F2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C5E2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift */; }; + 402C5C612ECB96D30096F212 /* AVAudioSessionObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C602ECB96D30096F212 /* AVAudioSessionObserver.swift */; }; + 402C5C632ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C622ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift */; }; 402D0E882D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E872D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift */; }; 402D0E8A2D0C94E600E9B83F /* RTCVideoTrack+Clone.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E892D0C94E600E9B83F /* RTCVideoTrack+Clone.swift */; }; 402D0E8C2D0C94F900E9B83F /* CallSettings+Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E8B2D0C94F900E9B83F /* CallSettings+Audio.swift */; }; - 402E69A22EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E69A02EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift */; }; - 402E69A32EA65FF90082F7FA /* BatteryStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E699E2EA65FF90082F7FA /* BatteryStore_Tests.swift */; }; - 402E69A42EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E699F2EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift */; }; 402EE1302AA8861B00312632 /* DemoChatViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402EE12F2AA8861B00312632 /* DemoChatViewModel.swift */; }; 402F04A92B70ED8600CA1986 /* StreamCallStatisticsReporter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A62B70ED8600CA1986 /* StreamCallStatisticsReporter.swift */; }; 402F04AA2B70ED8600CA1986 /* Statistics+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A72B70ED8600CA1986 /* Statistics+Convenience.swift */; }; @@ -211,6 +210,8 @@ 40382F472C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */; }; 40382F482C89D03700C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */; }; 40382F502C8B3DAE00C2D00F /* StreamRTCPeerConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */; }; + 4039088D2EC2311A00B19FA1 /* StoreEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039088C2EC2311A00B19FA1 /* StoreEffect.swift */; }; + 403908AC2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403908AB2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift */; }; 4039F0C02D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */; }; 4039F0CA2D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0C92D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift */; }; 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0CB2D0241120078159E /* AudioCodec.swift */; }; @@ -268,6 +269,7 @@ 404A81342DA3CB66001F7FA8 /* CallStateMachine_RejectedStageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403FB1612BFE22A40047A696 /* CallStateMachine_RejectedStageTests.swift */; }; 404A81362DA3CBF0001F7FA8 /* CallConfigurationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A81352DA3CBF0001F7FA8 /* CallConfigurationTests.swift */; }; 404A81382DA3CC0C001F7FA8 /* CallConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A81372DA3CC0C001F7FA8 /* CallConfiguration.swift */; }; + 404B546B2ED06D8C009378F2 /* RetriableTask.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404B546A2ED06D8C009378F2 /* RetriableTask.swift */; }; 404C27CB2BF2552800DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 404C27CC2BF2552900DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 404CAEE72B8F48F6007087BC /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; }; @@ -475,7 +477,6 @@ 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */; }; 40986C3A2CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */; }; 40986C3C2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */; }; - 40986C3E2CD1148F00510F88 /* AudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */; }; 409AF6E62DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E52DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift */; }; 409AF6E82DAFC80200EE7BF6 /* PictureInPictureContent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E72DAFC80200EE7BF6 /* PictureInPictureContent.swift */; }; 409AF6EA2DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E92DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift */; }; @@ -496,15 +497,7 @@ 40A0E9602B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; }; 40A0E9622B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */; }; 40A0E9682B88E04D0089E8D3 /* CIImage_Resize_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E9672B88E04D0089E8D3 /* CIImage_Resize_Tests.swift */; }; - 40A0FFB12EA63CB900F39D8F /* BatteryStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB02EA63CB900F39D8F /* BatteryStore.swift */; }; - 40A0FFB42EA63D3C00F39D8F /* BatteryStore+Namespace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB32EA63D3C00F39D8F /* BatteryStore+Namespace.swift */; }; - 40A0FFB62EA63D8F00F39D8F /* BatteryStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB52EA63D8F00F39D8F /* BatteryStore+State.swift */; }; - 40A0FFB82EA63D9700F39D8F /* BatteryStore+Action.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB72EA63D9700F39D8F /* BatteryStore+Action.swift */; }; - 40A0FFBB2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBA2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift */; }; - 40A0FFBE2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBD2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift */; }; 40A0FFC02EA6418000F39D8F /* Sequence+AsyncReduce.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBF2EA6418000F39D8F /* Sequence+AsyncReduce.swift */; }; - 40A317E82EB504C900733948 /* ModerationBlurViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A317E72EB504C900733948 /* ModerationBlurViewModifier.swift */; }; - 40A317EB2EB5081500733948 /* ModerationWarningViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A317EA2EB5081500733948 /* ModerationWarningViewModifier.swift */; }; 40A7C5B52E099B4600EEDF9C /* ParticipantEventResetAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A7C5B22E099B1000EEDF9C /* ParticipantEventResetAdapter.swift */; }; 40A7C5B82E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A7C5B72E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift */; }; 40A9416E2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A9416D2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift */; }; @@ -665,7 +658,24 @@ 40B575D42DCCECE800F489B8 /* MockAVPictureInPictureController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B575D22DCCECDA00F489B8 /* MockAVPictureInPictureController.swift */; }; 40B575D82DCCF00200F489B8 /* StreamPictureInPictureControllerProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B575D52DCCEFB500F489B8 /* StreamPictureInPictureControllerProtocol.swift */; }; 40B713692A275F1400D1FE67 /* AppState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8456E6C5287EB55F004E180E /* AppState.swift */; }; - 40BAD0B32EA7CE3200CCD3D7 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40BAD0B22EA7CE3200CCD3D7 /* StreamWebRTC */; }; + 40B8FFA72EC393A80061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFA62EC393A80061E3F6 /* StreamWebRTC */; }; + 40B8FFA92EC393B50061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFA82EC393B50061E3F6 /* StreamWebRTC */; }; + 40B8FFAB2EC393BB0061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFAA2EC393BB0061E3F6 /* StreamWebRTC */; }; + 40B8FFB62EC3949F0061E3F6 /* BatteryStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB42EC3949F0061E3F6 /* BatteryStore.swift */; }; + 40B8FFB72EC3949F0061E3F6 /* BatteryStore+Action.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB02EC3949F0061E3F6 /* BatteryStore+Action.swift */; }; + 40B8FFB82EC3949F0061E3F6 /* BatteryStore+Namespace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB12EC3949F0061E3F6 /* BatteryStore+Namespace.swift */; }; + 40B8FFB92EC3949F0061E3F6 /* BatteryStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB22EC3949F0061E3F6 /* BatteryStore+State.swift */; }; + 40B8FFBA2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFAC2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift */; }; + 40B8FFBB2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFAE2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift */; }; + 40B8FFC02EC394AA0061E3F6 /* CallModerationBlurEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBC2EC394AA0061E3F6 /* CallModerationBlurEvent.swift */; }; + 40B8FFC12EC394AA0061E3F6 /* RingCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBE2EC394AA0061E3F6 /* RingCallRequest.swift */; }; + 40B8FFC22EC394AA0061E3F6 /* CallModerationWarningEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBD2EC394AA0061E3F6 /* CallModerationWarningEvent.swift */; }; + 40B8FFC32EC394AA0061E3F6 /* RingCallResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBF2EC394AA0061E3F6 /* RingCallResponse.swift */; }; + 40B8FFC72EC394C50061E3F6 /* ModerationWarningViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC52EC394C50061E3F6 /* ModerationWarningViewModifier.swift */; }; + 40B8FFC82EC394C50061E3F6 /* ModerationBlurViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC42EC394C50061E3F6 /* ModerationBlurViewModifier.swift */; }; + 40B8FFCD2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFCA2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift */; }; + 40B8FFCE2EC394D30061E3F6 /* BatteryStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC92EC394D30061E3F6 /* BatteryStore_Tests.swift */; }; + 40B8FFCF2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFCB2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift */; }; 40BBC4792C6227DC002AEF92 /* DemoNoiseCancellationButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC4782C6227DC002AEF92 /* DemoNoiseCancellationButtonView.swift */; }; 40BBC47C2C6227F1002AEF92 /* View+PresentDemoMoreMenu.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC47B2C6227F1002AEF92 /* View+PresentDemoMoreMenu.swift */; }; 40BBC47E2C62287F002AEF92 /* DemoReconnectionButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC47D2C62287F002AEF92 /* DemoReconnectionButtonView.swift */; }; @@ -835,8 +845,7 @@ 40E1C8B12EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B02EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift */; }; 40E1C8B32EA18C8400AC3647 /* RTCAudioStore_CallKitReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B22EA18C8400AC3647 /* RTCAudioStore_CallKitReducerTests.swift */; }; 40E1C8B62EA18E4D00AC3647 /* RTCAudioStore_AVAudioSessionReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B52EA18E4D00AC3647 /* RTCAudioStore_AVAudioSessionReducerTests.swift */; }; - 40E1C8B82EA1934000AC3647 /* RTCAudioStore_RouteChangeMiddlewareTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B72EA1934000AC3647 /* RTCAudioStore_RouteChangeMiddlewareTests.swift */; }; - 40E1C8BA2EA1946300AC3647 /* RTCAudioStore_InterruptionsMiddlewareTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsMiddlewareTests.swift */; }; + 40E1C8BA2EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift */; }; 40E1C8BC2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8BB2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift */; }; 40E1C8BF2EA1992500AC3647 /* CallAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8BD2EA1992500AC3647 /* CallAudioSession_Tests.swift */; }; 40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */; }; @@ -861,11 +870,11 @@ 40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */; }; 40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */; }; 40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */; }; - 40E7A45B2E29495500E8AB8B /* WebRTCLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */; }; 40E9B3B12BCD755F00ACF18F /* MemberResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */; }; 40E9B3B32BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */; }; 40E9B3B52BCD93F500ACF18F /* Credentials+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */; }; 40E9B3B72BCD941600ACF18F /* SFUResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B62BCD941600ACF18F /* SFUResponse+Dummy.swift */; }; + 40ED20E92EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40ED20E82EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift */; }; 40ED6D4B2B14F0E600FB5F69 /* Launch Screen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 40ED6D4A2B14F0E600FB5F69 /* Launch Screen.storyboard */; }; 40EDA17C2C13792D00583A65 /* View+AlertWithTextField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 408937902C134305000EEB69 /* View+AlertWithTextField.swift */; }; 40EE9D2B2E969F010000EA92 /* AudioDeviceModule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D282E969F010000EA92 /* AudioDeviceModule.swift */; }; @@ -879,11 +888,23 @@ 40EE9D4A2E97BCB20000EA92 /* RTCAudioStore+WebRTCAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D492E97BCB20000EA92 /* RTCAudioStore+WebRTCAudioSessionReducer.swift */; }; 40EE9D4D2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D4C2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift */; }; 40EE9D4F2E97C7500000EA92 /* RTCAudioStore+CallKitReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D4E2E97C7500000EA92 /* RTCAudioStore+CallKitReducer.swift */; }; - 40EE9D512E97C7C60000EA92 /* RTCAudioStore+RouteChangeMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeMiddleware.swift */; }; + 40EE9D512E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift */; }; 40EE9D532E97C8B70000EA92 /* RTCAudioSessionPublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D522E97C8B70000EA92 /* RTCAudioSessionPublisher.swift */; }; - 40EE9D552E97CA7F0000EA92 /* RTCAudioStore+InterruptionsMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsMiddleware.swift */; }; + 40EE9D552E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift */; }; 40EE9D572E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D562E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift */; }; 40EE9D5B2E991C6A0000EA92 /* RTCAudioStore+Coordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D5A2E991C6A0000EA92 /* RTCAudioStore+Coordinator.swift */; }; + 40EF61A32ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A22ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift */; }; + 40EF61A52ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A42ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift */; }; + 40EF61AA2ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A62ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift */; }; + 40EF61AB2ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A82ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift */; }; + 40EF61AC2ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A72ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift */; }; + 40EF61AE2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61AD2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift */; }; + 40EF61B02ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61AF2ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift */; }; + 40EF61B22ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B12ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift */; }; + 40EF61B72ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */; }; + 40EF61B82ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */; }; + 40EF61BA2ED893A400ED1F04 /* MockStoreDispatcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B92ED893A400ED1F04 /* MockStoreDispatcher.swift */; }; + 40EF61BE2ED8B01300ED1F04 /* Logger+WebRTC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61BD2ED8B01300ED1F04 /* Logger+WebRTC.swift */; }; 40F017392BBEAF6400E89FD1 /* MockCallKitService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F017382BBEAF6400E89FD1 /* MockCallKitService.swift */; }; 40F0173B2BBEB1A900E89FD1 /* CallKitAdapterTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0173A2BBEB1A900E89FD1 /* CallKitAdapterTests.swift */; }; 40F0173E2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0173D2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift */; }; @@ -1103,12 +1124,6 @@ 82E3BA552A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82E3BA522A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift */; }; 82E3BA562A0BAF64001AB93E /* WebSocketEngine_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F58B8A29EEACAD00010C4C /* WebSocketEngine_Mock.swift */; }; 82E3BA572A0BAF65001AB93E /* WebSocketEngine_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F58B8A29EEACAD00010C4C /* WebSocketEngine_Mock.swift */; }; - 82EB8F572B0277730038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F562B0277730038B5A2 /* StreamWebRTC */; }; - 82EB8F592B0277E70038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F582B0277E70038B5A2 /* StreamWebRTC */; }; - 82EB8F5B2B0277EC0038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5A2B0277EC0038B5A2 /* StreamWebRTC */; }; - 82EB8F5D2B0277F10038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5C2B0277F10038B5A2 /* StreamWebRTC */; }; - 82EB8F5F2B0277F60038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5E2B0277F60038B5A2 /* StreamWebRTC */; }; - 82EB8F612B0277FB0038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F602B0277FB0038B5A2 /* StreamWebRTC */; }; 82FB89372A702A9200AC16A1 /* Authentication_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FB89362A702A9200AC16A1 /* Authentication_Tests.swift */; }; 82FF40B52A17C6C200B4D95E /* CallControlsView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FF40B42A17C6C200B4D95E /* CallControlsView_Tests.swift */; }; 82FF40B72A17C6CD00B4D95E /* ReconnectionView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FF40B62A17C6CD00B4D95E /* ReconnectionView_Tests.swift */; }; @@ -1154,8 +1169,6 @@ 8414081129F284A800FF2D7C /* AssertJSONEqual.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081029F284A800FF2D7C /* AssertJSONEqual.swift */; }; 8414081329F28B5700FF2D7C /* RTCConfiguration_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081229F28B5600FF2D7C /* RTCConfiguration_Tests.swift */; }; 8414081529F28FFC00FF2D7C /* CallSettings_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081429F28FFC00FF2D7C /* CallSettings_Tests.swift */; }; - 841457372EBE5BF100D0D034 /* RingCallResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841457362EBE5BF100D0D034 /* RingCallResponse.swift */; }; - 841457382EBE5BF100D0D034 /* RingCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841457352EBE5BF100D0D034 /* RingCallRequest.swift */; }; 8415D3E1290B2AF2006E53CB /* outgoing.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 8415D3E0290B2AF2006E53CB /* outgoing.m4a */; }; 8415D3E3290BC882006E53CB /* Sounds.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8415D3E2290BC882006E53CB /* Sounds.swift */; }; 841947982886D9CD0007B36E /* BundleExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841947972886D9CD0007B36E /* BundleExtensions.swift */; }; @@ -1331,8 +1344,6 @@ 845C09952C10A7D700F725B3 /* SessionTimer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845C09902C0E0B7600F725B3 /* SessionTimer.swift */; }; 845C09972C11AAA200F725B3 /* RejectCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845C09962C11AAA100F725B3 /* RejectCallRequest.swift */; }; 845E31062A7121D6004DC470 /* BroadcastObserver_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845E31052A7121D6004DC470 /* BroadcastObserver_Tests.swift */; }; - 8464FBA92EB3832000933768 /* CallModerationBlurEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8464FBA72EB3832000933768 /* CallModerationBlurEvent.swift */; }; - 8464FBAA2EB3832000933768 /* CallModerationWarningEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8464FBA82EB3832000933768 /* CallModerationWarningEvent.swift */; }; 8468821328DFA448003BA9EE /* UnsecureRepository.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8468821228DFA448003BA9EE /* UnsecureRepository.swift */; }; 8469593229BB3D7500134EA0 /* SignalServer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8469593129BB3D7500134EA0 /* SignalServer_Tests.swift */; }; 8469593429BB5CE200134EA0 /* HTTPConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8469593329BB5CE200134EA0 /* HTTPConfig.swift */; }; @@ -1918,7 +1929,6 @@ 40245F642BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessAudioOutputIconView_Tests.swift; sourceTree = ""; }; 40245F662BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessSpeakerIconView_Tests.swift; sourceTree = ""; }; 40245F682BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessParticipantsListButton_Tests.swift; sourceTree = ""; }; - 4026BEE92EA79FD400360AD0 /* CallFlow_PerformanceTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallFlow_PerformanceTests.swift; sourceTree = ""; }; 402778822BD13C62002F4399 /* NoiseCancellationFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoiseCancellationFilter.swift; sourceTree = ""; }; 4028FE972DC4F638001F9DC3 /* ConsumableBucket.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConsumableBucket.swift; sourceTree = ""; }; 4028FE992DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConsumableBucketItemTransformer.swift; sourceTree = ""; }; @@ -1943,12 +1953,12 @@ 402C2A402DCE184400E60006 /* Combine+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Combine+Sendable.swift"; sourceTree = ""; }; 402C545A2B6BE50500672BFB /* MockStreamStatistics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockStreamStatistics.swift; sourceTree = ""; }; 402C545C2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamCallStatisticsFormatter_Tests.swift; sourceTree = ""; }; + 402C5C5E2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivestreamAudioSessionPolicy.swift; sourceTree = ""; }; + 402C5C602ECB96D30096F212 /* AVAudioSessionObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioSessionObserver.swift; sourceTree = ""; }; + 402C5C622ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = "RTCAudioStore+AVAudioSessionEffect.swift"; path = "Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift"; sourceTree = SOURCE_ROOT; }; 402D0E872D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioTrack+Clone.swift"; sourceTree = ""; }; 402D0E892D0C94E600E9B83F /* RTCVideoTrack+Clone.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCVideoTrack+Clone.swift"; sourceTree = ""; }; 402D0E8B2D0C94F900E9B83F /* CallSettings+Audio.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallSettings+Audio.swift"; sourceTree = ""; }; - 402E699E2EA65FF90082F7FA /* BatteryStore_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore_Tests.swift; sourceTree = ""; }; - 402E699F2EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreDefaultReducer_Tests.swift; sourceTree = ""; }; - 402E69A02EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreObservationMiddleware_Tests.swift; sourceTree = ""; }; 402EE12F2AA8861B00312632 /* DemoChatViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatViewModel.swift; sourceTree = ""; }; 402F04A62B70ED8600CA1986 /* StreamCallStatisticsReporter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StreamCallStatisticsReporter.swift; sourceTree = ""; }; 402F04A72B70ED8600CA1986 /* Statistics+Convenience.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Statistics+Convenience.swift"; sourceTree = ""; }; @@ -1975,6 +1985,8 @@ 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift"; sourceTree = ""; }; 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_Participant+Convenience.swift"; sourceTree = ""; }; 40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCPeerConnection.swift; sourceTree = ""; }; + 4039088C2EC2311A00B19FA1 /* StoreEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StoreEffect.swift; sourceTree = ""; }; + 403908AB2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+StereoPlayoutEffect.swift"; sourceTree = ""; }; 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCRtpCodecCapability+Convenience.swift"; sourceTree = ""; }; 4039F0C92D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift"; sourceTree = ""; }; 4039F0CB2D0241120078159E /* AudioCodec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodec.swift; sourceTree = ""; }; @@ -2029,6 +2041,7 @@ 404A81302DA3C5F0001F7FA8 /* MockDefaultAPI.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockDefaultAPI.swift; sourceTree = ""; }; 404A81352DA3CBF0001F7FA8 /* CallConfigurationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallConfigurationTests.swift; sourceTree = ""; }; 404A81372DA3CC0C001F7FA8 /* CallConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallConfiguration.swift; sourceTree = ""; }; + 404B546A2ED06D8C009378F2 /* RetriableTask.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RetriableTask.swift; sourceTree = ""; }; 4050725F2E5F49D5003D2109 /* CallKitMissingPermissionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitMissingPermissionPolicy.swift; sourceTree = ""; }; 405072612E5F4CCA003D2109 /* CallKitMissingPermissionPolicyProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitMissingPermissionPolicyProtocol.swift; sourceTree = ""; }; 405072642E5F4CDD003D2109 /* CallKitMissingPermissionPolicy+NoOp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallKitMissingPermissionPolicy+NoOp.swift"; sourceTree = ""; }; @@ -2194,7 +2207,6 @@ 4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OnChangeViewModifier_iOS13.swift; sourceTree = ""; }; 40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpEncodingParameters_Test.swift; sourceTree = ""; }; 40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpTransceiverInit_Tests.swift; sourceTree = ""; }; - 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSession_Tests.swift; sourceTree = ""; }; 409AF6E52DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureReconnectionView.swift; sourceTree = ""; }; 409AF6E72DAFC80200EE7BF6 /* PictureInPictureContent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureContent.swift; sourceTree = ""; }; 409AF6E92DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureContentProviderTests.swift; sourceTree = ""; }; @@ -2211,15 +2223,7 @@ 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoBackgroundEffectSelector.swift; sourceTree = ""; }; 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+CGOrientation.swift"; sourceTree = ""; }; 40A0E9672B88E04D0089E8D3 /* CIImage_Resize_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIImage_Resize_Tests.swift; sourceTree = ""; }; - 40A0FFB02EA63CB900F39D8F /* BatteryStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore.swift; sourceTree = ""; }; - 40A0FFB32EA63D3C00F39D8F /* BatteryStore+Namespace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Namespace.swift"; sourceTree = ""; }; - 40A0FFB52EA63D8F00F39D8F /* BatteryStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+State.swift"; sourceTree = ""; }; - 40A0FFB72EA63D9700F39D8F /* BatteryStore+Action.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Action.swift"; sourceTree = ""; }; - 40A0FFBA2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+DefaultReducer.swift"; sourceTree = ""; }; - 40A0FFBD2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+ObservationMiddleware.swift"; sourceTree = ""; }; 40A0FFBF2EA6418000F39D8F /* Sequence+AsyncReduce.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Sequence+AsyncReduce.swift"; sourceTree = ""; }; - 40A317E72EB504C900733948 /* ModerationBlurViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationBlurViewModifier.swift; sourceTree = ""; }; - 40A317EA2EB5081500733948 /* ModerationWarningViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationWarningViewModifier.swift; sourceTree = ""; }; 40A7C5B22E099B1000EEDF9C /* ParticipantEventResetAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventResetAdapter.swift; sourceTree = ""; }; 40A7C5B72E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventResetAdapter_Tests.swift; sourceTree = ""; }; 40A9416D2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureAdapter.swift; sourceTree = ""; }; @@ -2335,6 +2339,21 @@ 40B575CF2DCCEBA900F489B8 /* PictureInPictureEnforcedStopAdapterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureEnforcedStopAdapterTests.swift; sourceTree = ""; }; 40B575D22DCCECDA00F489B8 /* MockAVPictureInPictureController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAVPictureInPictureController.swift; sourceTree = ""; }; 40B575D52DCCEFB500F489B8 /* StreamPictureInPictureControllerProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureControllerProtocol.swift; sourceTree = ""; }; + 40B8FFAC2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+ObservationMiddleware.swift"; sourceTree = ""; }; + 40B8FFAE2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+DefaultReducer.swift"; sourceTree = ""; }; + 40B8FFB02EC3949F0061E3F6 /* BatteryStore+Action.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Action.swift"; sourceTree = ""; }; + 40B8FFB12EC3949F0061E3F6 /* BatteryStore+Namespace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Namespace.swift"; sourceTree = ""; }; + 40B8FFB22EC3949F0061E3F6 /* BatteryStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+State.swift"; sourceTree = ""; }; + 40B8FFB42EC3949F0061E3F6 /* BatteryStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore.swift; sourceTree = ""; }; + 40B8FFBC2EC394AA0061E3F6 /* CallModerationBlurEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationBlurEvent.swift; sourceTree = ""; }; + 40B8FFBD2EC394AA0061E3F6 /* CallModerationWarningEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationWarningEvent.swift; sourceTree = ""; }; + 40B8FFBE2EC394AA0061E3F6 /* RingCallRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RingCallRequest.swift; sourceTree = ""; }; + 40B8FFBF2EC394AA0061E3F6 /* RingCallResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RingCallResponse.swift; sourceTree = ""; }; + 40B8FFC42EC394C50061E3F6 /* ModerationBlurViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationBlurViewModifier.swift; sourceTree = ""; }; + 40B8FFC52EC394C50061E3F6 /* ModerationWarningViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationWarningViewModifier.swift; sourceTree = ""; }; + 40B8FFC92EC394D30061E3F6 /* BatteryStore_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore_Tests.swift; sourceTree = ""; }; + 40B8FFCA2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreDefaultReducer_Tests.swift; sourceTree = ""; }; + 40B8FFCB2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreObservationMiddleware_Tests.swift; sourceTree = ""; }; 40BBC4782C6227DC002AEF92 /* DemoNoiseCancellationButtonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoNoiseCancellationButtonView.swift; sourceTree = ""; }; 40BBC47B2C6227F1002AEF92 /* View+PresentDemoMoreMenu.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+PresentDemoMoreMenu.swift"; sourceTree = ""; }; 40BBC47D2C62287F002AEF92 /* DemoReconnectionButtonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoReconnectionButtonView.swift; sourceTree = ""; }; @@ -2495,8 +2514,7 @@ 40E1C8B02EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_DefaultReducerTests.swift; sourceTree = ""; }; 40E1C8B22EA18C8400AC3647 /* RTCAudioStore_CallKitReducerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_CallKitReducerTests.swift; sourceTree = ""; }; 40E1C8B52EA18E4D00AC3647 /* RTCAudioStore_AVAudioSessionReducerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_AVAudioSessionReducerTests.swift; sourceTree = ""; }; - 40E1C8B72EA1934000AC3647 /* RTCAudioStore_RouteChangeMiddlewareTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_RouteChangeMiddlewareTests.swift; sourceTree = ""; }; - 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsMiddlewareTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_InterruptionsMiddlewareTests.swift; sourceTree = ""; }; + 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_InterruptionsEffectTests.swift; sourceTree = ""; }; 40E1C8BB2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift; sourceTree = ""; }; 40E1C8BD2EA1992500AC3647 /* CallAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallAudioSession_Tests.swift; sourceTree = ""; }; 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Int+DefaultValues.swift"; sourceTree = ""; }; @@ -2521,11 +2539,11 @@ 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGSize+Adapt.swift"; sourceTree = ""; }; 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastBufferReaderKey.swift; sourceTree = ""; }; 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentDevice.swift; sourceTree = ""; }; - 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCLogger.swift; sourceTree = ""; }; 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "MemberResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "JoinCallResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Credentials+Dummy.swift"; sourceTree = ""; }; 40E9B3B62BCD941600ACF18F /* SFUResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SFUResponse+Dummy.swift"; sourceTree = ""; }; + 40ED20E82EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession.RouteChangeReason+Convenience.swift"; sourceTree = ""; }; 40ED6D4A2B14F0E600FB5F69 /* Launch Screen.storyboard */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; path = "Launch Screen.storyboard"; sourceTree = ""; }; 40EE9D282E969F010000EA92 /* AudioDeviceModule.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioDeviceModule.swift; sourceTree = ""; }; 40EE9D292E969F010000EA92 /* AudioEngineLevelNodeAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioEngineLevelNodeAdapter.swift; sourceTree = ""; }; @@ -2538,11 +2556,22 @@ 40EE9D492E97BCB20000EA92 /* RTCAudioStore+WebRTCAudioSessionReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+WebRTCAudioSessionReducer.swift"; sourceTree = ""; }; 40EE9D4C2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+AVAudioSessionConfigurationValidator.swift"; sourceTree = ""; }; 40EE9D4E2E97C7500000EA92 /* RTCAudioStore+CallKitReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+CallKitReducer.swift"; sourceTree = ""; }; - 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+RouteChangeMiddleware.swift"; sourceTree = ""; }; + 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+RouteChangeEffect.swift"; sourceTree = ""; }; 40EE9D522E97C8B70000EA92 /* RTCAudioSessionPublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionPublisher.swift; sourceTree = ""; }; - 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+InterruptionsMiddleware.swift"; sourceTree = ""; }; + 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+InterruptionsEffect.swift"; sourceTree = ""; }; 40EE9D562E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+AudioDeviceModuleMiddleware.swift"; sourceTree = ""; }; 40EE9D5A2E991C6A0000EA92 /* RTCAudioStore+Coordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+Coordinator.swift"; sourceTree = ""; }; + 40EF61A22ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioEngineLevelNodeAdapter_Tests.swift; sourceTree = ""; }; + 40EF61A42ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivestreamAudioSessionPolicyTests.swift; sourceTree = ""; }; + 40EF61A62ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioSessionObserver_Tests.swift; sourceTree = ""; }; + 40EF61A72ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionPublisher_Tests.swift; sourceTree = ""; }; + 40EF61A82ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift; sourceTree = ""; }; + 40EF61AD2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_AVAudioSessionEffectTests.swift; sourceTree = ""; }; + 40EF61AF2ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_RouteChangeEffectTests.swift; sourceTree = ""; }; + 40EF61B12ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_StereoPlayoutEffectTests.swift; sourceTree = ""; }; + 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreState+Dummy.swift"; sourceTree = ""; }; + 40EF61B92ED893A400ED1F04 /* MockStoreDispatcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockStoreDispatcher.swift; sourceTree = ""; }; + 40EF61BD2ED8B01300ED1F04 /* Logger+WebRTC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Logger+WebRTC.swift"; sourceTree = ""; }; 40F017382BBEAF6400E89FD1 /* MockCallKitService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCallKitService.swift; sourceTree = ""; }; 40F0173A2BBEB1A900E89FD1 /* CallKitAdapterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAdapterTests.swift; sourceTree = ""; }; 40F0173D2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestsAuthenticationProvider.swift; sourceTree = ""; }; @@ -2754,8 +2783,6 @@ 8414081029F284A800FF2D7C /* AssertJSONEqual.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssertJSONEqual.swift; sourceTree = ""; }; 8414081229F28B5600FF2D7C /* RTCConfiguration_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCConfiguration_Tests.swift; sourceTree = ""; }; 8414081429F28FFC00FF2D7C /* CallSettings_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallSettings_Tests.swift; sourceTree = ""; }; - 841457352EBE5BF100D0D034 /* RingCallRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RingCallRequest.swift; sourceTree = ""; }; - 841457362EBE5BF100D0D034 /* RingCallResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RingCallResponse.swift; sourceTree = ""; }; 8415D3E0290B2AF2006E53CB /* outgoing.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; path = outgoing.m4a; sourceTree = ""; }; 8415D3E2290BC882006E53CB /* Sounds.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Sounds.swift; sourceTree = ""; }; 841947972886D9CD0007B36E /* BundleExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BundleExtensions.swift; sourceTree = ""; }; @@ -2926,8 +2953,6 @@ 845C09922C0E1BF900F725B3 /* DemoSessionTimerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoSessionTimerView.swift; sourceTree = ""; }; 845C09962C11AAA100F725B3 /* RejectCallRequest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RejectCallRequest.swift; sourceTree = ""; }; 845E31052A7121D6004DC470 /* BroadcastObserver_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastObserver_Tests.swift; sourceTree = ""; }; - 8464FBA72EB3832000933768 /* CallModerationBlurEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationBlurEvent.swift; sourceTree = ""; }; - 8464FBA82EB3832000933768 /* CallModerationWarningEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationWarningEvent.swift; sourceTree = ""; }; 8468821228DFA448003BA9EE /* UnsecureRepository.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnsecureRepository.swift; sourceTree = ""; }; 8469593129BB3D7500134EA0 /* SignalServer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SignalServer_Tests.swift; sourceTree = ""; }; 8469593329BB5CE200134EA0 /* HTTPConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HTTPConfig.swift; sourceTree = ""; }; @@ -3253,7 +3278,6 @@ buildActionMask = 2147483647; files = ( 40C4DF522C1C60A80035DBC2 /* StreamVideo.framework in Frameworks */, - 40BAD0B32EA7CE3200CCD3D7 /* StreamWebRTC in Frameworks */, 822FF7212AEAD100000202A7 /* StreamSwiftTestHelpers in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -3277,7 +3301,7 @@ 8423B7562950BB0B00012F8D /* Sentry in Frameworks */, 40AC73B42BE0062B00C57517 /* StreamVideoNoiseCancellation in Frameworks */, 401A64A82A9DF7B400534ED1 /* EffectsLibrary in Frameworks */, - 82EB8F592B0277E70038B5A2 /* StreamWebRTC in Frameworks */, + 40B8FFA92EC393B50061E3F6 /* StreamWebRTC in Frameworks */, 4035913C2BC53D2A00B5B767 /* Accelerate.framework in Frameworks */, 84F7381A287C141000A363F4 /* StreamVideoSwiftUI.framework in Frameworks */, 84F7384D287C198500A363F4 /* StreamVideo.framework in Frameworks */, @@ -3306,7 +3330,7 @@ 848A805D290A808E00F3079B /* StreamVideoUIKit.framework in Frameworks */, 848A8058290A808A00F3079B /* StreamVideo.framework in Frameworks */, 848A805B290A808C00F3079B /* StreamVideoSwiftUI.framework in Frameworks */, - 82EB8F5B2B0277EC0038B5A2 /* StreamWebRTC in Frameworks */, + 40B8FFAB2EC393BB0061E3F6 /* StreamWebRTC in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -3315,7 +3339,7 @@ buildActionMask = 2147483647; files = ( 84BE8A5628BE314000B34D2F /* SwiftProtobuf in Frameworks */, - 82EB8F572B0277730038B5A2 /* StreamWebRTC in Frameworks */, + 40B8FFA72EC393A80061E3F6 /* StreamWebRTC in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -3324,7 +3348,6 @@ buildActionMask = 2147483647; files = ( 84F737F5287C13AD00A363F4 /* StreamVideo.framework in Frameworks */, - 82EB8F5D2B0277F10038B5A2 /* StreamWebRTC in Frameworks */, 822FF71B2AEAD0B4000202A7 /* StreamSwiftTestHelpers in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -3342,7 +3365,6 @@ buildActionMask = 2147483647; files = ( 84F7380F287C141000A363F4 /* StreamVideoSwiftUI.framework in Frameworks */, - 82EB8F5F2B0277F60038B5A2 /* StreamWebRTC in Frameworks */, 822FF71D2AEAD0BE000202A7 /* StreamSwiftTestHelpers in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -3361,7 +3383,6 @@ buildActionMask = 2147483647; files = ( 84F73830287C146D00A363F4 /* StreamVideoUIKit.framework in Frameworks */, - 82EB8F612B0277FB0038B5A2 /* StreamWebRTC in Frameworks */, 822FF71F2AEAD0C4000202A7 /* StreamSwiftTestHelpers in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -3831,16 +3852,6 @@ path = SerialActorQueue; sourceTree = ""; }; - 402E69A12EA65FF90082F7FA /* Battery */ = { - isa = PBXGroup; - children = ( - 402E699E2EA65FF90082F7FA /* BatteryStore_Tests.swift */, - 402E699F2EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift */, - 402E69A02EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift */, - ); - path = Battery; - sourceTree = ""; - }; 402F04A52B70ED8600CA1986 /* Statistics */ = { isa = PBXGroup; children = ( @@ -4041,6 +4052,17 @@ path = Protocols; sourceTree = ""; }; + 4039088B2EC230F100B19FA1 /* Effects */ = { + isa = PBXGroup; + children = ( + 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift */, + 403908AB2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift */, + 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift */, + 402C5C622ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift */, + ); + path = Effects; + sourceTree = ""; + }; 4039F0CD2D024DCE0078159E /* Utilities */ = { isa = PBXGroup; children = ( @@ -4317,6 +4339,7 @@ 4067F3092CDA330E002E28BD /* Extensions */ = { isa = PBXGroup; children = ( + 40ED20E82EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift */, 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */, 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */, 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */, @@ -4788,6 +4811,7 @@ children = ( 40944D1E2E4E37E600088AF0 /* StoreNamespace.swift */, 40944D142E4E2D8F00088AF0 /* Store.swift */, + 4039088C2EC2311A00B19FA1 /* StoreEffect.swift */, 40944D162E4E352800088AF0 /* Reducer.swift */, 40944D282E532D4100088AF0 /* StoreDelay.swift */, 40944D182E4E353F00088AF0 /* Middleware.swift */, @@ -4866,52 +4890,6 @@ path = Extensions; sourceTree = ""; }; - 40A0FFAF2EA63CB000F39D8F /* Battery */ = { - isa = PBXGroup; - children = ( - 40A0FFB22EA63D1C00F39D8F /* Namespace */, - 40A0FFB02EA63CB900F39D8F /* BatteryStore.swift */, - ); - path = Battery; - sourceTree = ""; - }; - 40A0FFB22EA63D1C00F39D8F /* Namespace */ = { - isa = PBXGroup; - children = ( - 40A0FFBC2EA63F6F00F39D8F /* Middleware */, - 40A0FFB92EA63E9200F39D8F /* Reducers */, - 40A0FFB32EA63D3C00F39D8F /* BatteryStore+Namespace.swift */, - 40A0FFB52EA63D8F00F39D8F /* BatteryStore+State.swift */, - 40A0FFB72EA63D9700F39D8F /* BatteryStore+Action.swift */, - ); - path = Namespace; - sourceTree = ""; - }; - 40A0FFB92EA63E9200F39D8F /* Reducers */ = { - isa = PBXGroup; - children = ( - 40A0FFBA2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift */, - ); - path = Reducers; - sourceTree = ""; - }; - 40A0FFBC2EA63F6F00F39D8F /* Middleware */ = { - isa = PBXGroup; - children = ( - 40A0FFBD2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift */, - ); - path = Middleware; - sourceTree = ""; - }; - 40A317E62EB504B900733948 /* Moderation */ = { - isa = PBXGroup; - children = ( - 40A317E72EB504C900733948 /* ModerationBlurViewModifier.swift */, - 40A317EA2EB5081500733948 /* ModerationWarningViewModifier.swift */, - ); - path = Moderation; - sourceTree = ""; - }; 40A7C5B42E099B1600EEDF9C /* ParticipantEventResetAdapter */ = { isa = PBXGroup; children = ( @@ -5270,6 +5248,62 @@ path = Visitors; sourceTree = ""; }; + 40B8FFAD2EC3949F0061E3F6 /* Middleware */ = { + isa = PBXGroup; + children = ( + 40B8FFAC2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift */, + ); + path = Middleware; + sourceTree = ""; + }; + 40B8FFAF2EC3949F0061E3F6 /* Reducers */ = { + isa = PBXGroup; + children = ( + 40B8FFAE2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift */, + ); + path = Reducers; + sourceTree = ""; + }; + 40B8FFB32EC3949F0061E3F6 /* Namespace */ = { + isa = PBXGroup; + children = ( + 40B8FFAD2EC3949F0061E3F6 /* Middleware */, + 40B8FFAF2EC3949F0061E3F6 /* Reducers */, + 40B8FFB02EC3949F0061E3F6 /* BatteryStore+Action.swift */, + 40B8FFB12EC3949F0061E3F6 /* BatteryStore+Namespace.swift */, + 40B8FFB22EC3949F0061E3F6 /* BatteryStore+State.swift */, + ); + path = Namespace; + sourceTree = ""; + }; + 40B8FFB52EC3949F0061E3F6 /* Battery */ = { + isa = PBXGroup; + children = ( + 40B8FFB32EC3949F0061E3F6 /* Namespace */, + 40B8FFB42EC3949F0061E3F6 /* BatteryStore.swift */, + ); + path = Battery; + sourceTree = ""; + }; + 40B8FFC62EC394C50061E3F6 /* Moderation */ = { + isa = PBXGroup; + children = ( + 40B8FFC42EC394C50061E3F6 /* ModerationBlurViewModifier.swift */, + 40B8FFC52EC394C50061E3F6 /* ModerationWarningViewModifier.swift */, + ); + path = Moderation; + sourceTree = ""; + }; + 40B8FFCC2EC394D30061E3F6 /* Battery */ = { + isa = PBXGroup; + children = ( + 40B8FFC92EC394D30061E3F6 /* BatteryStore_Tests.swift */, + 40B8FFCA2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift */, + 40B8FFCB2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift */, + ); + path = Battery; + sourceTree = ""; + }; 40BBC47A2C6227DF002AEF92 /* Extensions */ = { isa = PBXGroup; children = ( @@ -5553,7 +5587,7 @@ 40C7B82A2B612D5100FB9DB2 /* ViewModifiers */ = { isa = PBXGroup; children = ( - 40A317E62EB504B900733948 /* Moderation */, + 40B8FFC62EC394C50061E3F6 /* Moderation */, 403EFC9E2BDBFE050057C248 /* CallEndedViewModifier.swift */, 408D29A02B6D208700885473 /* Snapshot */, 409145E92B68FDD2007F3C17 /* ReadableContentGuide */, @@ -5651,6 +5685,7 @@ 40D287392DB12CAA006AD8C7 /* Policies */ = { isa = PBXGroup; children = ( + 40EF61A42ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift */, 40D2873A2DB12CAD006AD8C7 /* DefaultAudioSessionPolicyTests.swift */, 40D2873C2DB12E46006AD8C7 /* OwnCapabilitiesAudioSessionPolicyTests.swift */, ); @@ -5694,6 +5729,7 @@ 40D75C4F2E437FAE000E0438 /* RTCAudioStore */ = { isa = PBXGroup; children = ( + 40EF61A92ED8801600ED1F04 /* Components */, 40E1C8AA2EA1561D00AC3647 /* Namespace */, 40E1C8A32EA14CFA00AC3647 /* Components */, 40D75C622E4396D2000E0438 /* RTCAudioStore_Tests.swift */, @@ -5752,6 +5788,7 @@ 40E1C89E2EA1176200AC3647 /* AudioDeviceModule */ = { isa = PBXGroup; children = ( + 40EF61A22ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift */, 40E1C89F2EA1176C00AC3647 /* AudioDeviceModule_Tests.swift */, ); path = AudioDeviceModule; @@ -5769,6 +5806,7 @@ 40E1C8AA2EA1561D00AC3647 /* Namespace */ = { isa = PBXGroup; children = ( + 40EF61A12ED85E2B00ED1F04 /* Effects */, 40E1C8B42EA18C9400AC3647 /* Middleware */, 40E1C8AE2EA157FD00AC3647 /* Reducers */, 40E1C8A92EA1561D00AC3647 /* RTCAudioStore_CoordinatorTests.swift */, @@ -5791,8 +5829,6 @@ isa = PBXGroup; children = ( 40E1C8BB2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift */, - 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsMiddlewareTests.swift */, - 40E1C8B72EA1934000AC3647 /* RTCAudioStore_RouteChangeMiddlewareTests.swift */, ); path = Middleware; sourceTree = ""; @@ -5930,6 +5966,7 @@ 40EE9D362E97B34C0000EA92 /* Namespace */ = { isa = PBXGroup; children = ( + 4039088B2EC230F100B19FA1 /* Effects */, 40EE9D442E97B3A60000EA92 /* Reducers */, 40EE9D432E97B3A10000EA92 /* Middleware */, 40EE9D3D2E97B38F0000EA92 /* RTCAudioStore+Namespace.swift */, @@ -5943,8 +5980,6 @@ 40EE9D432E97B3A10000EA92 /* Middleware */ = { isa = PBXGroup; children = ( - 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeMiddleware.swift */, - 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsMiddleware.swift */, 40EE9D562E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift */, ); path = Middleware; @@ -5967,6 +6002,28 @@ 4019A2812E4352CB00CE70A4 /* AudioSessions */, 40EE9D4C2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift */, 40EE9D522E97C8B70000EA92 /* RTCAudioSessionPublisher.swift */, + 402C5C602ECB96D30096F212 /* AVAudioSessionObserver.swift */, + ); + path = Components; + sourceTree = ""; + }; + 40EF61A12ED85E2B00ED1F04 /* Effects */ = { + isa = PBXGroup; + children = ( + 40EF61B12ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift */, + 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift */, + 40EF61AD2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift */, + 40EF61AF2ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift */, + ); + path = Effects; + sourceTree = ""; + }; + 40EF61A92ED8801600ED1F04 /* Components */ = { + isa = PBXGroup; + children = ( + 40EF61A62ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift */, + 40EF61A72ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift */, + 40EF61A82ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift */, ); path = Components; sourceTree = ""; @@ -6032,6 +6089,7 @@ 40AB34C62C5D3F0400B5B6B3 /* CallStatsReport+Dummy.swift */, 40AB34C82C5D3F2E00B5B6B3 /* ParticipantsStats+Dummy.swift */, 843060FF2D38203D000E14D5 /* SessionSettingsResponse+Dummy.swift */, + 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */, ); path = Dummy; sourceTree = ""; @@ -6087,6 +6145,7 @@ 40F101672D5A653200C49481 /* AudioSessionPolicy.swift */, 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */, 40F101652D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift */, + 402C5C5E2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift */, ); path = Policies; sourceTree = ""; @@ -6433,7 +6492,6 @@ 824DBA9F29F6D77B005ACD09 /* ReconnectionTests.swift */, 82FB89362A702A9200AC16A1 /* Authentication_Tests.swift */, 40B499CB2AC1A90F00A53B60 /* DeeplinkTests.swift */, - 4026BEE92EA79FD400360AD0 /* CallFlow_PerformanceTests.swift */, ); path = Tests; sourceTree = ""; @@ -6590,7 +6648,7 @@ 842747F429EEDACB00E063AD /* Utils */ = { isa = PBXGroup; children = ( - 402E69A12EA65FF90082F7FA /* Battery */, + 40B8FFCC2EC394D30061E3F6 /* Battery */, 40064BD72E5C88DC007CDB33 /* PermissionStore */, 40C71B572E5355F800733BF6 /* Store */, 40FAAC852DDC9B2D007BF93A /* AnyEncodable.swift */, @@ -6764,10 +6822,10 @@ 8456E6C7287EC343004E180E /* Logger */ = { isa = PBXGroup; children = ( - 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */, 40BBC4A92C6270F5002AEF92 /* Array+Logger.swift */, 40AB34E02C5E73F900B5B6B3 /* Publisher+Logger.swift */, 8456E6C8287EC343004E180E /* Logger.swift */, + 40EF61BD2ED8B01300ED1F04 /* Logger+WebRTC.swift */, 40E1C8982EA1080100AC3647 /* Logger+ThrowingExecution.swift */, 406568932E05515700A67EAC /* Signposting.swift */, 8456E6C9287EC343004E180E /* Destination */, @@ -7024,6 +7082,7 @@ 4019A2862E43565A00CE70A4 /* MockAudioSession.swift */, 4019A2882E4357B200CE70A4 /* MockRTCAudioStore.swift */, 40E1C89C2EA115AB00AC3647 /* MockRTCAudioDeviceModule.swift */, + 40EF61B92ED893A400ED1F04 /* MockStoreDispatcher.swift */, 40E1C8A12EA13C9700AC3647 /* MockAudioEngineNodeAdapter.swift */, ); path = Mock; @@ -7122,7 +7181,7 @@ 84AF64D3287C79220012A503 /* Utils */ = { isa = PBXGroup; children = ( - 40A0FFAF2EA63CB000F39D8F /* Battery */, + 40B8FFB52EC3949F0061E3F6 /* Battery */, 40C8F0042E55D21D00A0CC87 /* PermissionsStore */, 40944D132E4E2D8800088AF0 /* Store */, 406568852E0426F600A67EAC /* IdleTimerAdapter */, @@ -7155,6 +7214,7 @@ 8456E6DA287EC530004E180E /* StreamRuntimeCheck.swift */, 8268615F290A7556005BFFED /* SystemEnvironment.swift */, 841FF51A2A5FED4800809BBB /* SystemEnvironment+XStreamClient.swift */, + 404B546A2ED06D8C009378F2 /* RetriableTask.swift */, 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */, 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */, 84C2997C28784BB30034B735 /* Utils.swift */, @@ -7249,7 +7309,6 @@ 845E31042A7121BE004DC470 /* Screensharing */, 8414081229F28B5600FF2D7C /* RTCConfiguration_Tests.swift */, 8446AF902A4D84F4002AB07B /* Retries_Tests.swift */, - 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */, 400C9FCC2D9D648100DB26DC /* RTCConfiguration_DefaultsTests.swift */, ); path = WebRTC; @@ -7258,10 +7317,10 @@ 84DC383E29ADFCFC00946713 /* Models */ = { isa = PBXGroup; children = ( - 841457352EBE5BF100D0D034 /* RingCallRequest.swift */, - 841457362EBE5BF100D0D034 /* RingCallResponse.swift */, - 8464FBA72EB3832000933768 /* CallModerationBlurEvent.swift */, - 8464FBA82EB3832000933768 /* CallModerationWarningEvent.swift */, + 40B8FFBC2EC394AA0061E3F6 /* CallModerationBlurEvent.swift */, + 40B8FFBD2EC394AA0061E3F6 /* CallModerationWarningEvent.swift */, + 40B8FFBE2EC394AA0061E3F6 /* RingCallRequest.swift */, + 40B8FFBF2EC394AA0061E3F6 /* RingCallResponse.swift */, 8438AB042E5F3A2900BA834F /* AppEventResponse.swift */, 8438AB052E5F3A2900BA834F /* FileUploadConfig.swift */, 84DD68F22E5F24A9001A1DF5 /* AppUpdatedEvent.swift */, @@ -7799,7 +7858,6 @@ name = SwiftUIDemoAppUITests; packageProductDependencies = ( 822FF7202AEAD100000202A7 /* StreamSwiftTestHelpers */, - 40BAD0B22EA7CE3200CCD3D7 /* StreamWebRTC */, ); productName = SwiftUIDemoAppUITests; productReference = 82392D512993C9E100941435 /* SwiftUIDemoAppUITests.xctest */; @@ -7847,9 +7905,9 @@ 401A64A72A9DF7B400534ED1 /* EffectsLibrary */, 4046DEEF2A9F469100CA6D2F /* GDPerformanceView-Swift */, 844ADA642AD3F1AB00769F6A /* GoogleSignInSwift */, - 82EB8F582B0277E70038B5A2 /* StreamWebRTC */, 40AC73B32BE0062B00C57517 /* StreamVideoNoiseCancellation */, 4014F1022D8C2EBC004E7EFD /* Gleap */, + 40B8FFA82EC393B50061E3F6 /* StreamWebRTC */, ); productName = StreamVideoSwiftUI; productReference = 842D8BC32865B31B00801910 /* StreamVideoCallApp-Debug.app */; @@ -7893,10 +7951,10 @@ 4029A6292AB069600065DAFB /* StreamChatSwiftUI */, 406303442AD942ED0091AE77 /* GoogleSignInSwift */, 82E1C14E2AEA7DD50076D7BE /* GDPerformanceView-Swift */, - 82EB8F5A2B0277EC0038B5A2 /* StreamWebRTC */, 40AB35682B738D3D00E465CC /* EffectsLibrary */, 40F017892BC014EC00E89FD1 /* Sentry */, 40C708D52D8D729500D3501F /* Gleap */, + 40B8FFAA2EC393BB0061E3F6 /* StreamWebRTC */, ); productName = DemoAppUIKit; productReference = 8493224C290837890013C029 /* DemoAppUIKit.app */; @@ -7919,7 +7977,7 @@ name = StreamVideo; packageProductDependencies = ( 84BE8A5528BE314000B34D2F /* SwiftProtobuf */, - 82EB8F562B0277730038B5A2 /* StreamWebRTC */, + 40B8FFA62EC393A80061E3F6 /* StreamWebRTC */, ); productName = StreamVideo; productReference = 84F737ED287C13AC00A363F4 /* StreamVideo.framework */; @@ -7941,7 +7999,6 @@ name = StreamVideoTests; packageProductDependencies = ( 822FF71A2AEAD0B4000202A7 /* StreamSwiftTestHelpers */, - 82EB8F5C2B0277F10038B5A2 /* StreamWebRTC */, ); productName = StreamVideoTests; productReference = 84F737F4287C13AD00A363F4 /* StreamVideoTests.xctest */; @@ -7985,7 +8042,6 @@ name = StreamVideoSwiftUITests; packageProductDependencies = ( 822FF71C2AEAD0BE000202A7 /* StreamSwiftTestHelpers */, - 82EB8F5E2B0277F60038B5A2 /* StreamWebRTC */, ); productName = StreamVideoSwiftUITests; productReference = 84F7380E287C141000A363F4 /* StreamVideoSwiftUITests.xctest */; @@ -8029,7 +8085,6 @@ name = StreamVideoUIKitTests; packageProductDependencies = ( 822FF71E2AEAD0C4000202A7 /* StreamSwiftTestHelpers */, - 82EB8F602B0277FB0038B5A2 /* StreamWebRTC */, ); productName = StreamVideoUIKitTests; productReference = 84F7382F287C146D00A363F4 /* StreamVideoUIKitTests.xctest */; @@ -8101,9 +8156,9 @@ 40F445C32A9E1D91004BE3DA /* XCRemoteSwiftPackageReference "stream-chat-swift-test-helpers" */, 4046DEEC2A9F404300CA6D2F /* XCRemoteSwiftPackageReference "GDPerformanceView-Swift" */, 844ADA612AD3F1AB00769F6A /* XCRemoteSwiftPackageReference "GoogleSignIn-iOS" */, - 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */, 40AC73B22BE0062B00C57517 /* XCRemoteSwiftPackageReference "stream-video-noise-cancellation-swift" */, 4014F1012D8C2EBC004E7EFD /* XCRemoteSwiftPackageReference "Gleap-iOS-SDK" */, + 40B8FFA52EC393A80061E3F6 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */, ); productRefGroup = 842D8BC42865B31B00801910 /* Products */; projectDirPath = ""; @@ -8273,7 +8328,6 @@ files = ( 82392D5F2993CCB300941435 /* ParticipantRobot.swift in Sources */, 82C837E429A5333700CB6B0E /* CallDetailsPage.swift in Sources */, - 4026BEEA2EA79FD400360AD0 /* CallFlow_PerformanceTests.swift in Sources */, 82C837E229A532C000CB6B0E /* LoginPage.swift in Sources */, 82392D542993C9E100941435 /* StreamTestCase.swift in Sources */, 82C837E029A531ED00CB6B0E /* CallPage.swift in Sources */, @@ -8526,12 +8580,14 @@ 40DFA8902CC11146003DCE05 /* APIKey.swift in Sources */, 40DFA88F2CC11137003DCE05 /* UpdateCallResponse.swift in Sources */, 40EE9D2B2E969F010000EA92 /* AudioDeviceModule.swift in Sources */, + 40EF61BE2ED8B01300ED1F04 /* Logger+WebRTC.swift in Sources */, 40EE9D2C2E969F010000EA92 /* AudioEngineLevelNodeAdapter.swift in Sources */, 40DFA88E2CC1111E003DCE05 /* CallState.swift in Sources */, 403793C72D3670BC00C752DF /* ApplicationLifecycleVideoMuteAdapter.swift in Sources */, 4029E94E2CB8162900E1D571 /* IncomingVideoQualitySettings.swift in Sources */, 40AAD1802D27FC5E00D10330 /* RTCPeerConnectionTrackInfoCollectionType.swift in Sources */, 846D16242A52C3D50036CE4C /* CameraManager.swift in Sources */, + 40ED20E92EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift in Sources */, 848CCCE52AB8ED8F002E83A2 /* StopHLSBroadcastingResponse.swift in Sources */, 84F07BD12CB4804900422E58 /* NoiseCancellationSettingsRequest.swift in Sources */, 84DC389729ADFCFD00946713 /* StopLiveResponse.swift in Sources */, @@ -8544,8 +8600,6 @@ 8478EB13288A054B00525538 /* VideoConfig.swift in Sources */, 841BAA372BD15CDE000C73E4 /* Coordinates.swift in Sources */, 8492B875290808AE00006649 /* StreamVideoEnvironment.swift in Sources */, - 8464FBA92EB3832000933768 /* CallModerationBlurEvent.swift in Sources */, - 8464FBAA2EB3832000933768 /* CallModerationWarningEvent.swift in Sources */, 841BAA492BD15CDE000C73E4 /* CollectUserFeedbackRequest.swift in Sources */, 406583902B877A0500B4F979 /* ImageBackgroundVideoFilter.swift in Sources */, 8454A3192AAB374B00A012C6 /* CallStatsReport.swift in Sources */, @@ -8556,7 +8610,6 @@ 84A7E184288362DF00526C98 /* Atomic.swift in Sources */, 8449824E2C738A830029734D /* StopAllRTMPBroadcastsResponse.swift in Sources */, 40E363522D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift in Sources */, - 40A0FFB42EA63D3C00F39D8F /* BatteryStore+Namespace.swift in Sources */, 84D2E37729DC856D001D2118 /* CallMemberUpdatedEvent.swift in Sources */, 40DFA88D2CC10FF3003DCE05 /* Stream_Video_Sfu_Models_AppleThermalState+Convenience.swift in Sources */, 8409465B29AF4EEC007AF5BF /* ListRecordingsResponse.swift in Sources */, @@ -8638,7 +8691,6 @@ 841BAA462BD15CDE000C73E4 /* CallStatsReportSummaryResponse.swift in Sources */, 84DC38D829ADFCFD00946713 /* JoinCallRequest.swift in Sources */, 40ADB8612D65DFD700B06AAF /* String.StringInterpolation+Nil.swift in Sources */, - 40A0FFB62EA63D8F00F39D8F /* BatteryStore+State.swift in Sources */, 84AF64D2287C78E70012A503 /* User.swift in Sources */, 84274F482884251600CF8794 /* InternetConnection.swift in Sources */, 84DC389129ADFCFD00946713 /* VideoSettings.swift in Sources */, @@ -8658,6 +8710,7 @@ 842D3B5F29F6D3720051698A /* DeviceData.swift in Sources */, 842E70D02B91BE1700D2D68B /* ClosedCaptionEvent.swift in Sources */, 40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */, + 4039088D2EC2311A00B19FA1 /* StoreEffect.swift in Sources */, 40E363382D09E6560028C52A /* Array+Prepare.swift in Sources */, 842D3B5829F667660051698A /* CreateDeviceRequest.swift in Sources */, 84BBF62B28AFC24000387A02 /* PeerConnectionFactory.swift in Sources */, @@ -8667,7 +8720,6 @@ 8490DD1F298D39D9007E53D2 /* JsonEventDecoder.swift in Sources */, 40FB15192BF77EE700D5E580 /* Call+Idle.swift in Sources */, 40ADB85C2D64B00E00B06AAF /* CGSize+Hashable.swift in Sources */, - 40A0FFBE2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift in Sources */, 8438AB062E5F3A2900BA834F /* FileUploadConfig.swift in Sources */, 8438AB072E5F3A2900BA834F /* AppEventResponse.swift in Sources */, 40E3633E2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift in Sources */, @@ -8692,7 +8744,6 @@ 842E70D72B91BE1700D2D68B /* CallRecordingFailedEvent.swift in Sources */, 40EE9D532E97C8B70000EA92 /* RTCAudioSessionPublisher.swift in Sources */, 40F1016A2D5A653B00C49481 /* AudioSessionConfiguration.swift in Sources */, - 40A0FFBB2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift in Sources */, 40BBC4BE2C6280E4002AEF92 /* LocalScreenShareMediaAdapter.swift in Sources */, 841BAA332BD15CDE000C73E4 /* SFULocationResponse.swift in Sources */, 84DC38D129ADFCFD00946713 /* Credentials.swift in Sources */, @@ -8707,8 +8758,6 @@ 84DC38DB29ADFCFD00946713 /* JSONDataEncoding.swift in Sources */, 40FB15112BF77D5800D5E580 /* StreamStateMachineStage.swift in Sources */, 8496A9A629CC500F00F15FF1 /* StreamVideoCaptureHandler.swift in Sources */, - 841457372EBE5BF100D0D034 /* RingCallResponse.swift in Sources */, - 841457382EBE5BF100D0D034 /* RingCallRequest.swift in Sources */, 406568872E0426FD00A67EAC /* IdleTimerAdapter.swift in Sources */, 84CD12162C73831000056640 /* CallRtmpBroadcastStartedEvent.swift in Sources */, 40944D232E4E3D7D00088AF0 /* StreamCallAudioRecorder+State.swift in Sources */, @@ -8747,6 +8796,7 @@ 84DCA2142A38A428000C3411 /* CoordinatorModels.swift in Sources */, 4061288B2CF33088007F5CDC /* SupportedPrefix.swift in Sources */, 40BBC4C02C629408002AEF92 /* RTCTemporaryPeerConnection.swift in Sources */, + 402C5C612ECB96D30096F212 /* AVAudioSessionObserver.swift in Sources */, 84B0091B2A4C521100CF1FA7 /* Retries.swift in Sources */, 4019A2802E43529000CE70A4 /* AudioSessionProtocol.swift in Sources */, 405BFFD22DBB8BE8005B2BE4 /* ProximityManager.swift in Sources */, @@ -8777,6 +8827,7 @@ 40BBC4C42C638789002AEF92 /* RTCPeerConnectionCoordinator.swift in Sources */, 40BBC4C62C638915002AEF92 /* WebRTCCoordinator.swift in Sources */, 40802AE92DD2A7C700B9F970 /* AVAudioSessionProtocol.swift in Sources */, + 404B546B2ED06D8C009378F2 /* RetriableTask.swift in Sources */, 841BAA392BD15CDE000C73E4 /* UserSessionStats.swift in Sources */, 406B3BD72C8F332200FC93A1 /* RTCVideoTrack+Sendable.swift in Sources */, 406128812CF32FEF007F5CDC /* SDPLineVisitor.swift in Sources */, @@ -8786,10 +8837,15 @@ 406128832CF33000007F5CDC /* SDPParser.swift in Sources */, 40A0FFC02EA6418000F39D8F /* Sequence+AsyncReduce.swift in Sources */, 84B9A56D29112F39004DE31A /* EndpointConfig.swift in Sources */, + 402C5C5F2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift in Sources */, 405072672E5F4CF7003D2109 /* CallKitMissingPermissionPolicy+EndCall.swift in Sources */, 4039F0CF2D024DDF0078159E /* MediaTransceiverStorage.swift in Sources */, 8469593829BB6B4E00134EA0 /* GetEdgesResponse.swift in Sources */, 40AB34AE2C5D02D400B5B6B3 /* SFUAdapter.swift in Sources */, + 40B8FFC02EC394AA0061E3F6 /* CallModerationBlurEvent.swift in Sources */, + 40B8FFC12EC394AA0061E3F6 /* RingCallRequest.swift in Sources */, + 40B8FFC22EC394AA0061E3F6 /* CallModerationWarningEvent.swift in Sources */, + 40B8FFC32EC394AA0061E3F6 /* RingCallResponse.swift in Sources */, 84DC389A29ADFCFD00946713 /* APIError.swift in Sources */, 8449824B2C738A830029734D /* DeleteCallRequest.swift in Sources */, 84AF64DB287C7A2C0012A503 /* ErrorPayload.swift in Sources */, @@ -8830,18 +8886,23 @@ 40C9E4442C94740600802B28 /* Stream_Video_Sfu_Models_VideoLayer+Convenience.swift in Sources */, 84CD12252C73840300056640 /* CallUserMutedEvent.swift in Sources */, 84DC38AC29ADFCFD00946713 /* CallAcceptedEvent.swift in Sources */, + 40B8FFB62EC3949F0061E3F6 /* BatteryStore.swift in Sources */, + 40B8FFB72EC3949F0061E3F6 /* BatteryStore+Action.swift in Sources */, + 40B8FFB82EC3949F0061E3F6 /* BatteryStore+Namespace.swift in Sources */, + 40B8FFB92EC3949F0061E3F6 /* BatteryStore+State.swift in Sources */, + 40B8FFBA2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift in Sources */, + 40B8FFBB2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift in Sources */, 84FC2C2828AD350100181490 /* WebRTCEvents.swift in Sources */, 4091158E2E06A1EA00F9135C /* OperationQueue+TaskOperations.swift in Sources */, 40E3635D2D0A17C10028C52A /* CameraVideoOutputHandler.swift in Sources */, 40FEA2C92DA4015300AC523B /* (null) in Sources */, 4159F17B2C86FA41002B94D3 /* RTMPSettingsRequest.swift in Sources */, - 40EE9D552E97CA7F0000EA92 /* RTCAudioStore+InterruptionsMiddleware.swift in Sources */, + 40EE9D552E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift in Sources */, 84DC38A129ADFCFD00946713 /* BlockUserResponse.swift in Sources */, 40E363362D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift in Sources */, 4012B1942BFCAC1C006B0031 /* Call+RejectingStage.swift in Sources */, 40BBC4D22C639158002AEF92 /* WebRTCCoordinator+Connecting.swift in Sources */, 40BBC4AF2C627692002AEF92 /* LocalMediaAdapting.swift in Sources */, - 40A0FFB82EA63D9700F39D8F /* BatteryStore+Action.swift in Sources */, 84DCA2152A38A79E000C3411 /* Token.swift in Sources */, 40FB151B2BF77EEE00D5E580 /* Call+JoiningStage.swift in Sources */, 402B34C02DCDEE9000574663 /* WebRTCUpdateSubscriptionsAdapter.swift in Sources */, @@ -8875,6 +8936,7 @@ 40FB8FF62D661DC400F4390A /* Call+Identifiable.swift in Sources */, 40944D252E4E3D9100088AF0 /* StreamCallAudioRecorder+Logger.swift in Sources */, 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */, + 402C5C632ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift in Sources */, 4028FE982DC4F638001F9DC3 /* ConsumableBucket.swift in Sources */, 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */, 84A7E1862883632100526C98 /* ConnectionStatus.swift in Sources */, @@ -8922,12 +8984,12 @@ 40E1C89B2EA1156600AC3647 /* RTCAudioDeviceModuleControlling.swift in Sources */, 40BBC48C2C623C6E002AEF92 /* MediaAdapter.swift in Sources */, 40FB151D2BF77EFA00D5E580 /* Call+JoinedStage.swift in Sources */, + 403908AC2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift in Sources */, 40BBC4B52C627761002AEF92 /* LocalVideoMediaAdapter.swift in Sources */, 84DC38C429ADFCFD00946713 /* MemberResponse.swift in Sources */, 40E363452D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift in Sources */, 84DC38CB29ADFCFD00946713 /* SortParamRequest.swift in Sources */, 8490032529D308A000AD9BB4 /* GetCallResponse.swift in Sources */, - 40A0FFB12EA63CB900F39D8F /* BatteryStore.swift in Sources */, 841947982886D9CD0007B36E /* BundleExtensions.swift in Sources */, 40483CB82C9B1DEE00B4FCA8 /* WebRTCCoordinatorProviding.swift in Sources */, 4028FEAB2DC536DE001F9DC3 /* Date+millisecondsSince1970.swift in Sources */, @@ -9114,7 +9176,7 @@ 842B8E1D2A2DFED900863A87 /* EgressHLSResponse.swift in Sources */, 40FF825D2D63527D0029AA80 /* Comparator.swift in Sources */, 848CCCE82AB8ED8F002E83A2 /* StartHLSBroadcastingResponse.swift in Sources */, - 40EE9D512E97C7C60000EA92 /* RTCAudioStore+RouteChangeMiddleware.swift in Sources */, + 40EE9D512E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift in Sources */, 84D2E37629DC856D001D2118 /* CallMemberRemovedEvent.swift in Sources */, 40E3636C2D0A24390028C52A /* ScreenShareCaptureHandler.swift in Sources */, 408722372E13C91F006A68CB /* AVCaptureDevice.Format+MediaSubType.swift in Sources */, @@ -9169,7 +9231,6 @@ 40151FA02E7446FC00326540 /* AudioProcessingStore+CapturedChannelsMiddleware.swift in Sources */, 843DAB9929E695CF00E0EB63 /* CreateGuestResponse.swift in Sources */, 84DC389229ADFCFD00946713 /* RequestPermissionRequest.swift in Sources */, - 40E7A45B2E29495500E8AB8B /* WebRTCLogger.swift in Sources */, 84C28C922A84D16A00742E33 /* GoLiveRequest.swift in Sources */, 84FC2C1328ACDF3A00181490 /* ProtoModel.swift in Sources */, 40BBC4CE2C639054002AEF92 /* WebRTCCoordinator+Error.swift in Sources */, @@ -9238,6 +9299,7 @@ 8490031929D2E0DF00AD9BB4 /* Sorting_Tests.swift in Sources */, 406B3C4C2C91EFA700FC93A1 /* MockCallAuthenticator.swift in Sources */, 40F0174D2BBEEFD500E89FD1 /* TranscriptionSettings+Dummy.swift in Sources */, + 40EF61B72ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */, 406B3C4A2C91EE9700FC93A1 /* MockWebRTCCoordinatorStack.swift in Sources */, 40C71B692E535D7400733BF6 /* StreamCallAudioRecorder_DefaultReducerTests.swift in Sources */, 40AB34C92C5D3F2E00B5B6B3 /* ParticipantsStats+Dummy.swift in Sources */, @@ -9280,9 +9342,6 @@ 8414080F29F2838F00FF2D7C /* RawJSON_Tests.swift in Sources */, 40E9B3B72BCD941600ACF18F /* SFUResponse+Dummy.swift in Sources */, 40382F432C89CF9700C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift in Sources */, - 402E69A22EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift in Sources */, - 402E69A32EA65FF90082F7FA /* BatteryStore_Tests.swift in Sources */, - 402E69A42EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift in Sources */, 40FAAC862DDC9B2D007BF93A /* AnyEncodable.swift in Sources */, 40B48C1F2D14CAFC002C4EAB /* Comparable_ClampedTests.swift in Sources */, 404A812F2DA3C4FC001F7FA8 /* CallStateMachine_AcceptingStageTests.swift in Sources */, @@ -9324,7 +9383,6 @@ 40F017392BBEAF6400E89FD1 /* MockCallKitService.swift in Sources */, 40D2873B2DB12CAD006AD8C7 /* DefaultAudioSessionPolicyTests.swift in Sources */, 40F017402BBEBC6500E89FD1 /* MockCallKitPushNotificationAdapter.swift in Sources */, - 40986C3E2CD1148F00510F88 /* AudioSession_Tests.swift in Sources */, 403FB1512BFE1AA90047A696 /* CallStateMachine_Tests.swift in Sources */, 406B3C532C92007900FC93A1 /* WebRTCCoordinatorStateMachine_ConnectedStageTests.swift in Sources */, 40B48C172D14C97F002C4EAB /* CGSize_DefaultValuesTests.swift in Sources */, @@ -9382,6 +9440,7 @@ 8446AF912A4D84F4002AB07B /* Retries_Tests.swift in Sources */, 406B3C272C904F7100FC93A1 /* LocalScreenShareMediaAdapter_Tests.swift in Sources */, 84F58B7429EE928400010C4C /* TestError.swift in Sources */, + 40EF61B22ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift in Sources */, 403CA9BB2CCA548D001A88C2 /* Stream_Video_Sfu_Event_VideoLayerSetting+Dummy.swift in Sources */, 84D6E53A2B3AD10000D0056C /* RepeatingTimer_Tests.swift in Sources */, 40AF6A372C93423400BA2935 /* WebRTCCoordinatorStateMachine_FastReconnectingStageTests.swift in Sources */, @@ -9415,6 +9474,7 @@ 40AB31262A49838000C270E1 /* EventTests.swift in Sources */, 4065688A2E04275F00A67EAC /* IdleTimerAdapter_Tests.swift in Sources */, 84F58B7C29EE979F00010C4C /* VirtualTime.swift in Sources */, + 40EF61BA2ED893A400ED1F04 /* MockStoreDispatcher.swift in Sources */, 40B3E5492DBBD2CA00DE8F50 /* SpeakerProximityPolicy_Tests.swift in Sources */, 40F0173E2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift in Sources */, 401338762BF2489C007318BD /* MockCXCallController.swift in Sources */, @@ -9426,6 +9486,7 @@ 40D36AC82DDDF39F00972D75 /* WebRTCTrace+Dummy.swift in Sources */, 84DCA2112A389160000C3411 /* AssertDelay.swift in Sources */, 40C71B7B2E536F0F00733BF6 /* MockAVAudioRecorder.swift in Sources */, + 40EF61AE2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift in Sources */, 40F0173B2BBEB1A900E89FD1 /* CallKitAdapterTests.swift in Sources */, 40C71B662E535CFB00733BF6 /* StreamCallAudioRecorder_StateTests.swift in Sources */, 403FB1492BFDF3950047A696 /* CallCache_Tests.swift in Sources */, @@ -9483,11 +9544,15 @@ 407E67592DC101DF00878FFC /* CallCRUDTests.swift in Sources */, 8414081529F28FFC00FF2D7C /* CallSettings_Tests.swift in Sources */, 40D36AE42DDE02D100972D75 /* MockWebRTCStatsCollector.swift in Sources */, + 40EF61AA2ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift in Sources */, + 40EF61AB2ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift in Sources */, + 40EF61AC2ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift in Sources */, 8492B87829081D1600006649 /* HTTPClient_Mock.swift in Sources */, 40F0175F2BBEF11600E89FD1 /* AudioSettings+Dummy.swift in Sources */, 40986C3A2CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift in Sources */, 40F0176F2BBEF22D00E89FD1 /* CallResponse+Dummy.swift in Sources */, 40E1C8B12EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift in Sources */, + 40EF61A32ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift in Sources */, 40064BD62E5C7703007CDB33 /* MockPermissionsStore.swift in Sources */, 40B48C372D14D424002C4EAB /* RTCAudioTrack_CloneTests.swift in Sources */, 404A81362DA3CBF0001F7FA8 /* CallConfigurationTests.swift in Sources */, @@ -9513,10 +9578,14 @@ 40B3E5472DBBCB2A00DE8F50 /* VideoProximityPolicy_Tests.swift in Sources */, 406B3C3F2C919BB300FC93A1 /* MockSFUStack.swift in Sources */, 403FB14C2BFE14760047A696 /* Publisher_NextTests.swift in Sources */, + 40B8FFCD2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift in Sources */, + 40B8FFCE2EC394D30061E3F6 /* BatteryStore_Tests.swift in Sources */, + 40B8FFCF2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift in Sources */, 84A4DCBB2A41DC6E00B1D1BF /* AsyncAssert.swift in Sources */, 406B3C162C90343300FC93A1 /* LocalVideoMediaAdapter_Tests.swift in Sources */, 40B48C3D2D14D7EF002C4EAB /* RTCRtpTransceiverInit_ConvenienceTests.swift in Sources */, 4045F84B2E716DF000074FB3 /* StoreTask_Tests.swift in Sources */, + 40EF61A52ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift in Sources */, 84CBBE0B29228BA900D0DA61 /* StreamVideoTestCase.swift in Sources */, 40F017512BBEF00500E89FD1 /* ScreensharingSettings+Dummy.swift in Sources */, 40AF6A492C935EB600BA2935 /* WebRTCCoordinatorStateMachine_CleanUpStageTests.swift in Sources */, @@ -9530,7 +9599,6 @@ 40AB34C72C5D3F0400B5B6B3 /* CallStatsReport+Dummy.swift in Sources */, 40986C3C2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift in Sources */, 40B48C2C2D14D0FF002C4EAB /* StreamVideoSfuModelsPublishOption_ConvenienceTests.swift in Sources */, - 40E1C8B82EA1934000AC3647 /* RTCAudioStore_RouteChangeMiddlewareTests.swift in Sources */, 406B3C1C2C903A2B00FC93A1 /* MockVideoCapturerFactory.swift in Sources */, 84BB570E2A20D7BB0002C123 /* Mapping_Tests.swift in Sources */, 27293A6712944001B2C5E10D /* LoggerConcurrency_Tests.swift in Sources */, @@ -9541,7 +9609,8 @@ 4063033F2AD847EC0091AE77 /* CallState_Tests.swift in Sources */, 406B3C2F2C90864900FC93A1 /* VideoMediaAdapter_Tests.swift in Sources */, 40AAD18F2D2EEAD500D10330 /* MockCaptureDeviceProvider.swift in Sources */, - 40E1C8BA2EA1946300AC3647 /* RTCAudioStore_InterruptionsMiddlewareTests.swift in Sources */, + 40E1C8BA2EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift in Sources */, + 40EF61B02ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift in Sources */, 843DAB9C29E6FFCD00E0EB63 /* StreamVideo_Tests.swift in Sources */, 4031D7F82B83C087002EC6E4 /* StreamCallAudioRecorder_Tests.swift in Sources */, 40E1C8A02EA1176C00AC3647 /* AudioDeviceModule_Tests.swift in Sources */, @@ -9584,7 +9653,6 @@ 84F3B0DE28913E0F0088751D /* CallControlsView.swift in Sources */, 8435EB9029CDAADA00E02651 /* ParticipantsGridLayout.swift in Sources */, 8434C52D289AA41D0001490A /* ImageExtensions.swift in Sources */, - 40A317E82EB504C900733948 /* ModerationBlurViewModifier.swift in Sources */, 849EDA8B297AFCC80072A12D /* PreJoiningView.swift in Sources */, 84D425082AA61E9900473150 /* LivestreamPlayer.swift in Sources */, 40A941762B4D9F16006D6965 /* PictureInPictureSourceView.swift in Sources */, @@ -9598,7 +9666,6 @@ 843697D228C7A25F00839D99 /* ParticipantsGridView.swift in Sources */, 840042CF2A70212D00917B30 /* ScreensharingControls.swift in Sources */, 40C7B8342B613A8200FB9DB2 /* ControlBadgeView.swift in Sources */, - 40A317EB2EB5081500733948 /* ModerationWarningViewModifier.swift in Sources */, 8406269A2A37A5E2004B8748 /* CallEvents.swift in Sources */, 40C7B8362B613C7800FB9DB2 /* ParticipantsListButton.swift in Sources */, 40AA2EE22AE0137E000DCA5C /* ClipCorners.swift in Sources */, @@ -9618,6 +9685,8 @@ 840A5A5629054F69006A1E4B /* UserListProvider.swift in Sources */, 40FAF3D32B10F611003F8029 /* UIDevice+Convenience.swift in Sources */, 8458872A28A3F935002A81BF /* OutgoingCallView.swift in Sources */, + 40B8FFC72EC394C50061E3F6 /* ModerationWarningViewModifier.swift in Sources */, + 40B8FFC82EC394C50061E3F6 /* ModerationBlurViewModifier.swift in Sources */, 40245F3A2BE26F7200FCF075 /* StatelessAudioOutputIconView.swift in Sources */, 8457CF9128BB835F00E8CF50 /* CallView.swift in Sources */, 846E4AFD29D1DDE8003733AB /* LayoutMenuView.swift in Sources */, @@ -9732,6 +9801,7 @@ 84DCA20B2A382FE0000C3411 /* CallViewModel_Tests.swift in Sources */, 40245F5F2BE279D300FCF075 /* StatelessToggleCameraIconView_Tests.swift in Sources */, 4067A5D82AE1249400CFDEB1 /* CornerClipper_Tests.swift in Sources */, + 40EF61B82ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */, 82E3BA542A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift in Sources */, 82E3BA372A0BAD59001AB93E /* StreamVideo_Mock.swift in Sources */, 40F0C3AC2BC8138A00AB75AD /* ReusePool_Tests.swift in Sources */, @@ -11471,20 +11541,20 @@ minimumVersion = 1.0.3; }; }; - 40F445C32A9E1D91004BE3DA /* XCRemoteSwiftPackageReference "stream-chat-swift-test-helpers" */ = { + 40B8FFA52EC393A80061E3F6 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/GetStream/stream-chat-swift-test-helpers"; + repositoryURL = "https://github.com/GetStream/stream-video-swift-webrtc.git"; requirement = { kind = exactVersion; - version = 0.3.5; + version = 137.0.51; }; }; - 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */ = { + 40F445C32A9E1D91004BE3DA /* XCRemoteSwiftPackageReference "stream-chat-swift-test-helpers" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/GetStream/stream-video-swift-webrtc"; + repositoryURL = "https://github.com/GetStream/stream-chat-swift-test-helpers"; requirement = { kind = exactVersion; - version = 137.0.43; + version = 0.3.5; }; }; 8423B7542950BB0A00012F8D /* XCRemoteSwiftPackageReference "sentry-cocoa" */ = { @@ -11554,9 +11624,19 @@ package = 40AC73B22BE0062B00C57517 /* XCRemoteSwiftPackageReference "stream-video-noise-cancellation-swift" */; productName = StreamVideoNoiseCancellation; }; - 40BAD0B22EA7CE3200CCD3D7 /* StreamWebRTC */ = { + 40B8FFA62EC393A80061E3F6 /* StreamWebRTC */ = { + isa = XCSwiftPackageProductDependency; + package = 40B8FFA52EC393A80061E3F6 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; + productName = StreamWebRTC; + }; + 40B8FFA82EC393B50061E3F6 /* StreamWebRTC */ = { isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; + package = 40B8FFA52EC393A80061E3F6 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; + productName = StreamWebRTC; + }; + 40B8FFAA2EC393BB0061E3F6 /* StreamWebRTC */ = { + isa = XCSwiftPackageProductDependency; + package = 40B8FFA52EC393A80061E3F6 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; productName = StreamWebRTC; }; 40C708D52D8D729500D3501F /* Gleap */ = { @@ -11594,36 +11674,6 @@ package = 4046DEEC2A9F404300CA6D2F /* XCRemoteSwiftPackageReference "GDPerformanceView-Swift" */; productName = "GDPerformanceView-Swift"; }; - 82EB8F562B0277730038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; - 82EB8F582B0277E70038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; - 82EB8F5A2B0277EC0038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; - 82EB8F5C2B0277F10038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; - 82EB8F5E2B0277F60038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; - 82EB8F602B0277FB0038B5A2 /* StreamWebRTC */ = { - isa = XCSwiftPackageProductDependency; - package = 82EB8F552B0277730038B5A2 /* XCRemoteSwiftPackageReference "stream-video-swift-webrtc" */; - productName = StreamWebRTC; - }; 8423B7552950BB0B00012F8D /* Sentry */ = { isa = XCSwiftPackageProductDependency; package = 8423B7542950BB0A00012F8D /* XCRemoteSwiftPackageReference "sentry-cocoa" */; diff --git a/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift b/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift index dbcbce9c7..31c8ddae5 100644 --- a/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift +++ b/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift @@ -71,8 +71,7 @@ final class ParticipantEventResetAdapter_Tests: XCTestCase, @unchecked Sendable await self.wait(for: self.interval) XCTAssertNotNil(self.viewModel.participantEvent) - await self.wait(for: self.interval) - XCTAssertNil(self.viewModel.participantEvent) + await self.fulfilmentInMainActor { self.viewModel.participantEvent == nil } } try await group.waitForAll() diff --git a/StreamVideoTests/CallStateMachine/CallStateMachine/CallStateMachine_Tests.swift b/StreamVideoTests/CallStateMachine/CallStateMachine/CallStateMachine_Tests.swift index 9ba33a316..702d11c1c 100644 --- a/StreamVideoTests/CallStateMachine/CallStateMachine/CallStateMachine_Tests.swift +++ b/StreamVideoTests/CallStateMachine/CallStateMachine/CallStateMachine_Tests.swift @@ -27,8 +27,13 @@ final class CallStateMachineTests: StreamVideoTestCase, @unchecked Sendable { // MARK: - Test Transition func testValidTransition() { + /// The test case is expected to log the following ClientError + /// ClientError { location:{ file:StreamVideoTests/HTTPClient_Mock.swift, line:28 } message:Please setup responses } + // Given - let nextState = Call.StateMachine.Stage.AcceptingStage(.init(call: mockCall)) + let nextState = Call.StateMachine.Stage.AcceptingStage( + .init(call: mockCall, input: .accepting(deliverySubject: .init())) + ) XCTAssertEqual(subject.currentStage.id, .idle) // When diff --git a/StreamVideoTests/Mock/MockAudioSession.swift b/StreamVideoTests/Mock/MockAudioSession.swift index 5276c33ff..7c294d8dd 100644 --- a/StreamVideoTests/Mock/MockAudioSession.swift +++ b/StreamVideoTests/Mock/MockAudioSession.swift @@ -21,6 +21,7 @@ final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendabl case setActive case overrideOutputAudioPort case setConfiguration + case setPreferredOutputNumberOfChannels } enum MockFunctionInputKey: Payloadable { @@ -33,6 +34,7 @@ final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendabl case setActive(Bool) case overrideOutputAudioPort(AVAudioSession.PortOverride) case setConfiguration(RTCAudioSessionConfiguration) + case setPreferredOutputNumberOfChannels(Int) var payload: Any { switch self { @@ -62,6 +64,9 @@ final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendabl case let .setConfiguration(configuration): return configuration + + case let .setPreferredOutputNumberOfChannels(value): + return value } } } @@ -183,4 +188,13 @@ final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendabl mode = configuration.mode categoryOptions = configuration.categoryOptions } + + func setPreferredOutputNumberOfChannels(_ noOfChannels: Int) throws { + stubbedFunctionInput[.setPreferredOutputNumberOfChannels]? + .append(.setPreferredOutputNumberOfChannels(noOfChannels)) + + if let error = stubbedFunction[.setPreferredOutputNumberOfChannels] as? Error { + throw error + } + } } diff --git a/StreamVideoTests/Mock/MockRTCAudioDeviceModule.swift b/StreamVideoTests/Mock/MockRTCAudioDeviceModule.swift index 7534ff390..2d1988699 100644 --- a/StreamVideoTests/Mock/MockRTCAudioDeviceModule.swift +++ b/StreamVideoTests/Mock/MockRTCAudioDeviceModule.swift @@ -15,31 +15,72 @@ final class MockRTCAudioDeviceModule: RTCAudioDeviceModuleControlling, Mockable, typealias FunctionInputKey = MockFunctionInputKey enum MockFunctionKey: Hashable, CaseIterable { - case initAndStartRecording - case stopRecording case setMicrophoneMuted case microphoneMutedPublisher + case reset + case initAndStartPlayout + case startPlayout + case stopPlayout + case initAndStartRecording + case startRecording + case stopRecording + case refreshStereoPlayoutState + case setMuteMode + case setRecordingAlwaysPreparedMode } enum MockFunctionInputKey: Payloadable { - case initAndStartRecording - case stopRecording case setMicrophoneMuted(Bool) case microphoneMutedPublisher + case reset + case initAndStartPlayout + case startPlayout + case stopPlayout + case initAndStartRecording + case startRecording + case stopRecording + case refreshStereoPlayoutState + case setMuteMode(RTCAudioEngineMuteMode) + case setRecordingAlwaysPreparedMode(Bool) var payload: Any { switch self { + + case .setMicrophoneMuted(let value): + return value + + case .microphoneMutedPublisher: + return () + + case .reset: + return () + + case .initAndStartPlayout: + return () + + case .startPlayout: + return () + + case .stopPlayout: + return () + case .initAndStartRecording: return () + case .startRecording: + return () + case .stopRecording: return () - case .setMicrophoneMuted(let value): + case .refreshStereoPlayoutState: + return () + + case let .setMuteMode(value): return value - case .microphoneMutedPublisher: - return () + case let .setRecordingAlwaysPreparedMode(value): + return value } } } @@ -59,6 +100,28 @@ final class MockRTCAudioDeviceModule: RTCAudioDeviceModuleControlling, Mockable, init() { stub(for: \.isMicrophoneMuted, with: false) + stub(for: \.isPlaying, with: false) + stub(for: \.isRecording, with: false) + stub(for: \.isPlayoutInitialized, with: false) + stub(for: \.isRecordingInitialized, with: false) + stub(for: \.isMicrophoneMuted, with: false) + stub(for: \.isStereoPlayoutEnabled, with: false) + stub(for: \.isVoiceProcessingBypassed, with: false) + stub(for: \.isVoiceProcessingEnabled, with: false) + stub(for: \.isVoiceProcessingAGCEnabled, with: false) + stub(for: \.prefersStereoPlayout, with: false) + + stub(for: .initAndStartRecording, with: 0) + stub(for: .setMicrophoneMuted, with: 0) + stub(for: .stopRecording, with: 0) + stub(for: .reset, with: 0) + stub(for: .initAndStartPlayout, with: 0) + stub(for: .startPlayout, with: 0) + stub(for: .stopPlayout, with: 0) + stub(for: .startRecording, with: 0) + stub(for: .refreshStereoPlayoutState, with: 0) + stub(for: .setMuteMode, with: 0) + stub(for: .setRecordingAlwaysPreparedMode, with: 0) } // MARK: - RTCAudioDeviceModuleControlling @@ -67,9 +130,46 @@ final class MockRTCAudioDeviceModule: RTCAudioDeviceModuleControlling, Mockable, var observer: (any RTCAudioDeviceModuleDelegate)? + var isPlaying: Bool { + self[dynamicMember: \.isPlaying] + } + + var isRecording: Bool { + self[dynamicMember: \.isRecording] + } + + var isPlayoutInitialized: Bool { + self[dynamicMember: \.isPlayoutInitialized] + } + + var isRecordingInitialized: Bool { + self[dynamicMember: \.isRecordingInitialized] + } + var isMicrophoneMuted: Bool { - get { self[dynamicMember: \.isMicrophoneMuted] } - set { _ = newValue } + self[dynamicMember: \.isMicrophoneMuted] + } + + var isStereoPlayoutEnabled: Bool { + self[dynamicMember: \.isStereoPlayoutEnabled] + } + + var isVoiceProcessingBypassed: Bool { + get { self[dynamicMember: \.isVoiceProcessingBypassed] } + set { stub(for: \.isVoiceProcessingBypassed, with: newValue) } + } + + var isVoiceProcessingEnabled: Bool { + self[dynamicMember: \.isVoiceProcessingEnabled] + } + + var isVoiceProcessingAGCEnabled: Bool { + self[dynamicMember: \.isVoiceProcessingAGCEnabled] + } + + var prefersStereoPlayout: Bool { + get { self[dynamicMember: \.prefersStereoPlayout] } + set { stub(for: \.prefersStereoPlayout, with: newValue) } } func initAndStartRecording() -> Int { @@ -81,7 +181,7 @@ final class MockRTCAudioDeviceModule: RTCAudioDeviceModuleControlling, Mockable, func setMicrophoneMuted(_ isMuted: Bool) -> Int { stubbedFunctionInput[.setMicrophoneMuted]? .append(.setMicrophoneMuted(isMuted)) - return stubbedFunction[.setMicrophoneMuted] as? Int ?? 0 + return stubbedFunction[.setMicrophoneMuted] as! Int } func stopRecording() -> Int { @@ -95,4 +195,51 @@ final class MockRTCAudioDeviceModule: RTCAudioDeviceModuleControlling, Mockable, .append(.microphoneMutedPublisher) return microphoneMutedSubject.eraseToAnyPublisher() } + + func reset() -> Int { + stubbedFunctionInput[.reset]? + .append(.reset) + return stubbedFunction[.reset] as! Int + } + + func initAndStartPlayout() -> Int { + stubbedFunctionInput[.initAndStartPlayout]? + .append(.initAndStartPlayout) + return stubbedFunction[.initAndStartPlayout] as! Int + } + + func startPlayout() -> Int { + stubbedFunctionInput[.startPlayout]? + .append(.startPlayout) + return stubbedFunction[.startPlayout] as! Int + } + + func stopPlayout() -> Int { + stubbedFunctionInput[.stopPlayout]? + .append(.stopPlayout) + return stubbedFunction[.stopPlayout] as! Int + } + + func startRecording() -> Int { + stubbedFunctionInput[.startRecording]? + .append(.startRecording) + return stubbedFunction[.startRecording] as! Int + } + + func refreshStereoPlayoutState() { + stubbedFunctionInput[.refreshStereoPlayoutState]? + .append(.refreshStereoPlayoutState) + } + + func setMuteMode(_ mode: RTCAudioEngineMuteMode) -> Int { + stubbedFunctionInput[.setMuteMode]? + .append(.setMuteMode(mode)) + return stubbedFunction[.setMuteMode] as! Int + } + + func setRecordingAlwaysPreparedMode(_ alwaysPreparedRecording: Bool) -> Int { + stubbedFunctionInput[.setRecordingAlwaysPreparedMode]? + .append(.setRecordingAlwaysPreparedMode(alwaysPreparedRecording)) + return stubbedFunction[.setRecordingAlwaysPreparedMode] as! Int + } } diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift index b14dd4002..cbfcecd2c 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift @@ -9,6 +9,14 @@ import StreamWebRTC final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding, @unchecked Sendable { var stubbedBuildCoordinatorResult: [PeerConnectionType: MockRTCPeerConnectionCoordinator] = [:] + var stubbedPeerConnectionFactory: PeerConnectionFactory? + + init( + peerConnectionFactory: PeerConnectionFactory? = nil + ) { + self.stubbedPeerConnectionFactory = peerConnectionFactory + } + func buildCoordinator( sessionId: String, peerType: PeerConnectionType, @@ -28,7 +36,7 @@ final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinato sessionId: sessionId, peerType: peerType, peerConnection: peerConnection, - peerConnectionFactory: peerConnectionFactory, + peerConnectionFactory: stubbedPeerConnectionFactory ?? peerConnectionFactory, videoOptions: videoOptions, videoConfig: videoConfig, callSettings: callSettings, diff --git a/StreamVideoTests/Mock/MockStoreDispatcher.swift b/StreamVideoTests/Mock/MockStoreDispatcher.swift new file mode 100644 index 000000000..787d71b52 --- /dev/null +++ b/StreamVideoTests/Mock/MockStoreDispatcher.swift @@ -0,0 +1,28 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +import Foundation +@testable import StreamVideo + +extension StoreNamespace { + + static func makeMockDispatcher() -> MockStoreDispatcher { + .init() + } +} + +struct MockStoreDispatcher: @unchecked Sendable { + + var recordedActions: [StoreActionBox] { subject.value } + var publisher: AnyPublisher<[StoreActionBox], Never> { subject.eraseToAnyPublisher() } + private let subject: CurrentValueSubject<[StoreActionBox], Never> = .init([]) + + func handle( + actions: [StoreActionBox] + ) { + let value = subject.value + subject.send(value + actions) + } +} diff --git a/StreamVideoTests/Mock/MockWebRTCCoordinatorStack.swift b/StreamVideoTests/Mock/MockWebRTCCoordinatorStack.swift index cc76503e1..f17c1bbb5 100644 --- a/StreamVideoTests/Mock/MockWebRTCCoordinatorStack.swift +++ b/StreamVideoTests/Mock/MockWebRTCCoordinatorStack.swift @@ -16,8 +16,10 @@ final class MockWebRTCCoordinatorStack: @unchecked Sendable { let webRTCAuthenticator: MockWebRTCAuthenticator let coordinator: WebRTCCoordinator let sfuStack: MockSFUStack + let mockAudioDeviceModule: MockRTCAudioDeviceModule let rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory let internetConnection: MockInternetConnection + let peerConenctionFactory: PeerConnectionFactory private var healthCheckCancellable: AnyCancellable? @@ -29,7 +31,7 @@ final class MockWebRTCCoordinatorStack: @unchecked Sendable { callAuthenticator: MockCallAuthenticator = .init(), webRTCAuthenticator: MockWebRTCAuthenticator = .init(), sfuStack: MockSFUStack = .init(), - rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory = .init(), + rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory? = nil, internetConnection: MockInternetConnection = .init() ) { self.user = user @@ -39,6 +41,14 @@ final class MockWebRTCCoordinatorStack: @unchecked Sendable { self.callAuthenticator = callAuthenticator self.webRTCAuthenticator = webRTCAuthenticator self.sfuStack = sfuStack + let mockAudioDeviceModule = MockRTCAudioDeviceModule() + self.mockAudioDeviceModule = mockAudioDeviceModule + self.peerConenctionFactory = .build( + audioProcessingModule: videoConfig.audioProcessingModule, + audioDeviceModuleSource: mockAudioDeviceModule + ) + let rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory ?? + .init(peerConnectionFactory: peerConenctionFactory) self.rtcPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory self.internetConnection = internetConnection coordinator = .init( diff --git a/StreamVideoTests/Utilities/Dummy/RTCAudioStoreState+Dummy.swift b/StreamVideoTests/Utilities/Dummy/RTCAudioStoreState+Dummy.swift new file mode 100644 index 000000000..4b92fc738 --- /dev/null +++ b/StreamVideoTests/Utilities/Dummy/RTCAudioStoreState+Dummy.swift @@ -0,0 +1,130 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +@testable import StreamVideo + +extension RTCAudioStore.StoreState.AudioRoute { + + static func dummy( + inputs: [RTCAudioStore.StoreState.AudioRoute.Port] = [], + outputs: [RTCAudioStore.StoreState.AudioRoute.Port] = [], + reason: AVAudioSession.RouteChangeReason = .unknown + ) -> RTCAudioStore.StoreState.AudioRoute { + .init( + inputs: inputs, + outputs: outputs, + reason: reason + ) + } +} + +extension RTCAudioStore.StoreState.AudioRoute.Port { + + static func dummy( + type: String = .unique, + name: String = .unique, + id: String = .unique, + isExternal: Bool = false, + isSpeaker: Bool = false, + isReceiver: Bool = false, + channels: Int = 0 + ) -> RTCAudioStore.StoreState.AudioRoute.Port { + .init( + type: type, + name: name, + id: id, + isExternal: isExternal, + isSpeaker: isSpeaker, + isReceiver: isReceiver, + channels: channels + ) + } +} + +extension RTCAudioStore.StoreState.AVAudioSessionConfiguration { + + static func dummy( + category: AVAudioSession.Category = .soloAmbient, + mode: AVAudioSession.Mode = .default, + options: AVAudioSession.CategoryOptions = [], + overrideOutputAudioPort: AVAudioSession.PortOverride = .none + ) -> RTCAudioStore.StoreState.AVAudioSessionConfiguration { + .init( + category: category, + mode: mode, + options: options, + overrideOutputAudioPort: overrideOutputAudioPort + ) + } +} + +extension RTCAudioStore.StoreState.WebRTCAudioSessionConfiguration { + + static func dummy( + isAudioEnabled: Bool = false, + useManualAudio: Bool = false, + prefersNoInterruptionsFromSystemAlerts: Bool = false + ) -> RTCAudioStore.StoreState.WebRTCAudioSessionConfiguration { + .init( + isAudioEnabled: isAudioEnabled, + useManualAudio: useManualAudio, + prefersNoInterruptionsFromSystemAlerts: prefersNoInterruptionsFromSystemAlerts + ) + } +} + +extension RTCAudioStore.StoreState.StereoConfiguration { + + static func dummy( + playout: RTCAudioStore.StoreState.StereoConfiguration.Playout = .dummy() + ) -> RTCAudioStore.StoreState.StereoConfiguration { + .init( + playout: playout + ) + } +} + +extension RTCAudioStore.StoreState.StereoConfiguration.Playout { + + static func dummy( + preferred: Bool = false, + enabled: Bool = false + ) -> RTCAudioStore.StoreState.StereoConfiguration.Playout { + .init( + preferred: preferred, + enabled: enabled + ) + } +} + +extension RTCAudioStore.StoreState { + + static func dummy( + isActive: Bool = false, + isInterrupted: Bool = false, + isRecording: Bool = false, + isMicrophoneMuted: Bool = false, + hasRecordingPermission: Bool = false, + audioDeviceModule: AudioDeviceModule? = nil, + currentRoute: AudioRoute = .dummy(), + audioSessionConfiguration: AVAudioSessionConfiguration = .dummy(), + webRTCAudioSessionConfiguration: WebRTCAudioSessionConfiguration = .dummy(), + stereoConfiguration: StereoConfiguration = .dummy() + ) -> RTCAudioStore.StoreState { + .init( + isActive: isActive, + isInterrupted: isInterrupted, + isRecording: isRecording, + isMicrophoneMuted: isMicrophoneMuted, + hasRecordingPermission: hasRecordingPermission, + audioDeviceModule: audioDeviceModule, + currentRoute: currentRoute, + audioSessionConfiguration: audioSessionConfiguration, + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: stereoConfiguration + ) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift index 51d77c259..50af9cb17 100644 --- a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift +++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift @@ -11,550 +11,382 @@ import XCTest final class AudioDeviceModule_Tests: XCTestCase, @unchecked Sendable { - private lazy var source: MockRTCAudioDeviceModule! = .init() - private lazy var audioEngineNodeAdapter: MockAudioEngineNodeAdapter! = .init() - private lazy var subject: AudioDeviceModule! = .init(source, audioLevelsNodeAdapter: audioEngineNodeAdapter) + private var source: MockRTCAudioDeviceModule! + private var audioEngineNodeAdapter: MockAudioEngineNodeAdapter! + private var subject: AudioDeviceModule! + private var cancellables: Set! + + override func setUp() { + super.setUp() + source = .init() + audioEngineNodeAdapter = .init() + cancellables = [] + } override func tearDown() { + cancellables = nil subject = nil - source = nil audioEngineNodeAdapter = nil + source = nil super.tearDown() } - // MARK: - init + // MARK: - setPlayout + + func test_setPlayout_whenActivatingInitialized_callsStartPlayout() throws { + makeSubject() + source.stub(for: \.isPlayoutInitialized, with: true) - func test_init_subscribesOnMicrophoneMutePublisher() { - _ = subject + try subject.setPlayout(true) - XCTAssertEqual(source.timesCalled(.microphoneMutedPublisher), 1) + XCTAssertEqual(source.timesCalled(.startPlayout), 1) + XCTAssertEqual(source.timesCalled(.initAndStartPlayout), 0) } - // MARK: setRecording + func test_setPlayout_whenActivatingNotInitialized_callsInitAndStartPlayout() throws { + makeSubject() + source.stub(for: \.isPlayoutInitialized, with: false) - func test_setRecording_isEnabledTrueIsRecordingTrue_noAction() throws { - subject = .init(source, isRecording: true) + try subject.setPlayout(true) - try subject.setRecording(true) + XCTAssertEqual(source.timesCalled(.initAndStartPlayout), 1) + XCTAssertEqual(source.timesCalled(.startPlayout), 0) + } - XCTAssertEqual(source.timesCalled(.initAndStartRecording), 0) - XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 0) - XCTAssertEqual(source.timesCalled(.stopRecording), 0) + func test_setPlayout_whenDeactivating_callsStopPlayout() throws { + source.stub(for: \.isPlaying, with: true) + makeSubject() + + try subject.setPlayout(false) + + XCTAssertEqual(source.timesCalled(.stopPlayout), 1) + } + + func test_setPlayout_whenAlreadyPlaying_doesNothing() throws { + source.stub(for: \.isPlaying, with: true) + makeSubject() + + try subject.setPlayout(true) + + XCTAssertEqual(source.timesCalled(.startPlayout), 0) + XCTAssertEqual(source.timesCalled(.initAndStartPlayout), 0) } - func test_setRecording_isEnabledTrueIsRecordingFalseIsMicrophoneMutedFalse_initAndStartRecording() throws { - subject = .init(source, isRecording: false) + func test_setPlayout_whenOperationFails_throwsClientError() { + makeSubject() + source.stub(for: \.isPlayoutInitialized, with: true) + source.stub(for: .startPlayout, with: -1) + + XCTAssertThrowsError(try subject.setPlayout(true)) { error in + XCTAssertTrue(error is ClientError) + } + } + + // MARK: - setRecording + + func test_setRecording_whenActivatingInitialized_callsStartRecording() throws { + makeSubject() + source.stub(for: \.isRecordingInitialized, with: true) try subject.setRecording(true) - XCTAssertEqual(source.timesCalled(.initAndStartRecording), 1) - XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 0) - XCTAssertEqual(source.timesCalled(.stopRecording), 0) + XCTAssertEqual(source.timesCalled(.startRecording), 1) + XCTAssertEqual(source.timesCalled(.initAndStartRecording), 0) } - func test_setRecording_isEnabledTrueIsRecordingFalseIsMicrophoneMutedTrue_initAndStartRecordingAndSetMicrophoneMuted() throws { - subject = .init(source, isRecording: false) - source.stub(for: \.isMicrophoneMuted, with: true) + func test_setRecording_whenActivatingNotInitialized_callsInitAndStartRecording() throws { + makeSubject() + source.stub(for: \.isRecordingInitialized, with: false) try subject.setRecording(true) XCTAssertEqual(source.timesCalled(.initAndStartRecording), 1) - XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 1) - XCTAssertEqual(source.timesCalled(.stopRecording), 0) + XCTAssertEqual(source.timesCalled(.startRecording), 0) } - func test_setRecording_isEnabledFalseIsRecordingFalse_noAction() throws { - subject = .init(source, isRecording: false) + func test_setRecording_whenDeactivating_callsStopRecording() throws { + source.stub(for: \.isRecording, with: true) + makeSubject() try subject.setRecording(false) + XCTAssertEqual(source.timesCalled(.stopRecording), 1) + } + + func test_setRecording_whenAlreadyRecording_doesNothing() throws { + source.stub(for: \.isRecording, with: true) + makeSubject() + + try subject.setRecording(true) + + XCTAssertEqual(source.timesCalled(.startRecording), 0) XCTAssertEqual(source.timesCalled(.initAndStartRecording), 0) - XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 0) XCTAssertEqual(source.timesCalled(.stopRecording), 0) } // MARK: - setMuted - func test_setMuted_isMutedTrueIsMicrophoneMutedTrue_noAction() throws { - source.microphoneMutedSubject.send(true) - subject = .init(source, isMicrophoneMuted: true) + func test_setMuted_whenStateUnchanged_doesNothing() throws { + source.stub(for: \.isMicrophoneMuted, with: true) + makeSubject() try subject.setMuted(true) XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 0) } - func test_setMuted_isMutedTrueIsMicrophoneMutedFalse_setMicrophoneMutedAndSubjectSend() async throws { - source.microphoneMutedSubject.send(false) - subject = .init(source, isMicrophoneMuted: false) - - let sinkExpectation = expectation(description: "Sink was called.") - let cancellable = subject - .isMicrophoneMutedPublisher - .filter { $0 == true } - .sink { _ in sinkExpectation.fulfill() } + func test_setMuted_whenMuting_updatesStateAndPublisher() throws { + source.stub(for: \.isMicrophoneMuted, with: false) + makeSubject() try subject.setMuted(true) XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 1) - await safeFulfillment(of: [sinkExpectation]) - cancellable.cancel() + XCTAssertTrue(subject.isMicrophoneMuted) } - func test_setMuted_isMutedFalseIsMicrophoneMutedTrue_setMicrophoneMutedAndSubjectSend() async throws { - source.microphoneMutedSubject.send(true) - subject = .init(source, isMicrophoneMuted: true) - - let sinkExpectation = expectation(description: "Sink was called.") - let cancellable = subject - .isMicrophoneMutedPublisher - .filter { $0 == false } - .sink { _ in sinkExpectation.fulfill() } + func test_setMuted_whenUnmutingWhileRecordingStopped_startsRecordingBeforeUnmuting() throws { + source.stub(for: \.isMicrophoneMuted, with: true) + source.stub(for: \.isRecordingInitialized, with: false) + makeSubject() try subject.setMuted(false) + XCTAssertEqual(source.timesCalled(.initAndStartRecording), 1) XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 1) - await safeFulfillment(of: [sinkExpectation]) - cancellable.cancel() + XCTAssertFalse(subject.isMicrophoneMuted) } - func test_setMuted_isMutedFalseIsMicrophoneMutedFalse_noAction() throws { - source.microphoneMutedSubject.send(false) - subject = .init(source, isMicrophoneMuted: false) - - try subject.setMuted(false) - - XCTAssertEqual(source.timesCalled(.setMicrophoneMuted), 0) - } + // MARK: - Stereo playout - // MARK: - didReceiveSpeechActivityEvent + func test_setStereoPlayoutPreference_updatesMuteModeAndPreference() { + makeSubject() - func test_didReceiveSpeechActivityEvent_speechActivityStarted_publishesEvent() async throws { - try await assertEvent(.speechActivityStarted) { - subject.audioDeviceModule($0, didReceiveSpeechActivityEvent: .started) - } - } + subject.setStereoPlayoutPreference(true) + XCTAssertTrue(source.prefersStereoPlayout) - func test_didReceiveSpeechActivityEvent_speechActivityEnded_publishesEvent() async throws { - try await assertEvent(.speechActivityEnded) { - subject.audioDeviceModule($0, didReceiveSpeechActivityEvent: .ended) - } - } + subject.setStereoPlayoutPreference(false) + XCTAssertFalse(source.prefersStereoPlayout) - // MARK: - didCreateEngine + let recordedModes = source.recordedInputPayload(RTCAudioEngineMuteMode.self, for: .setMuteMode) + XCTAssertEqual(recordedModes, [.inputMixer, .voiceProcessing]) - func test_didCreateEngine_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - try await assertEvent(.didCreateAudioEngine(audioEngine)) { - _ = subject.audioDeviceModule($0, didCreateEngine: audioEngine) - } + let recordedPreparedFlags = source.recordedInputPayload(Bool.self, for: .setRecordingAlwaysPreparedMode) + XCTAssertEqual(recordedPreparedFlags, [false, false]) } - // MARK: - willEnableAudioEngine + func test_refreshStereoPlayoutState_invokesUnderlyingModule() { + makeSubject() - func test_willEnableEngine_isPlayoutEnabledFalse_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = false - try await assertEvent( - .willEnableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willEnableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + subject.refreshStereoPlayoutState() - func test_willEnableEngine_isPlayoutEnabledTrue_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = false - try await assertEvent( - .willEnableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willEnableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } + XCTAssertEqual(source.timesCalled(.refreshStereoPlayoutState), 1) } - func test_willEnableEngine_isPlayoutEnabledFalse_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = true - try await assertEvent( - .willEnableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willEnableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + // MARK: - Reset - func test_willEnableEngine_isPlayoutEnabledTrue_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = true - try await assertEvent( - .willEnableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willEnableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + func test_reset_invokesUnderlyingModule() { + makeSubject() - // MARK: - willStartEngine + subject.reset() - func test_willStartEngine_isPlayoutEnabledFalse_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = false - try await assertEvent( - .willStartAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willStartEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } + XCTAssertEqual(source.timesCalled(.reset), 1) } - func test_willStartEngine_isPlayoutEnabledTrue_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = false - try await assertEvent( - .willStartAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willStartEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + // MARK: - Delegate callbacks - func test_willStartEngine_isPlayoutEnabledFalse_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = true - try await assertEvent( - .willStartAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willStartEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) + func test_didReceiveSpeechActivityEvent_started_emitsEvent() async { + makeSubject() + await expectEvent(.speechActivityStarted) { + subject.audioDeviceModule($0, didReceiveSpeechActivityEvent: .started) } } - func test_willStartEngine_isPlayoutEnabledTrue_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = true - try await assertEvent( - .willStartAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - willStartEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) + func test_didReceiveSpeechActivityEvent_ended_emitsEvent() async { + makeSubject() + await expectEvent(.speechActivityEnded) { + subject.audioDeviceModule($0, didReceiveSpeechActivityEvent: .ended) } } - // MARK: - didStopEngine + func test_willEnableEngine_emitsEventAndUpdatesState() async { + makeSubject() + let engine = AVAudioEngine() + let expectedEvent = AudioDeviceModule.Event.willEnableAudioEngine( + engine, + isPlayoutEnabled: true, + isRecordingEnabled: false + ) - func test_didStopEngine_isPlayoutEnabledFalse_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = false - try await assertEvent( - .didStopAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled + await expectEvent( + expectedEvent, + isPlayoutEnabled: true, + isRecordingEnabled: false ) { - _ = subject.audioDeviceModule( + subject.audioDeviceModule( $0, - didStopEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled + willEnableEngine: engine, + isPlayoutEnabled: true, + isRecordingEnabled: false ) } - } - func test_didStopEngine_isPlayoutEnabledTrue_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = false - try await assertEvent( - .didStopAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didStopEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } + XCTAssertTrue(subject.isPlaying) + XCTAssertFalse(subject.isRecording) } - func test_didStopEngine_isPlayoutEnabledFalse_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = true - try await assertEvent( - .didStopAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didStopEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + func test_willReleaseEngine_emitsEventAndUninstallsTap() async { + makeSubject() + let engine = AVAudioEngine() - func test_didStopEngine_isPlayoutEnabledTrue_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = true - try await assertEvent( - .didStopAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didStopEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) + await expectEvent(.willReleaseAudioEngine(engine)) { + _ = subject.audioDeviceModule($0, willReleaseEngine: engine) } - } - - func test_didStopEngine_uninstallWasCalled() async throws { - _ = subject.audioDeviceModule( - .init(), - didStopEngine: .init(), - isPlayoutEnabled: false, - isRecordingEnabled: false - ) XCTAssertEqual(audioEngineNodeAdapter.timesCalled(.uninstall), 1) XCTAssertEqual(audioEngineNodeAdapter.recordedInputPayload(Int.self, for: .uninstall)?.first, 0) } - // MARK: - didDisableEngine - - func test_didDisableEngine_isPlayoutEnabledFalse_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = false - try await assertEvent( - .didDisableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didDisableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + func test_configureInputFromSource_installsTap() { + makeSubject() + let engine = AVAudioEngine() + let destination = AVAudioMixerNode() + let format = AVAudioFormat( + commonFormat: .pcmFormatFloat32, + sampleRate: 48000, + channels: 1, + interleaved: false + )! - func test_didDisableEngine_isPlayoutEnabledTrue_isRecordingEnabledFalse_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = false - try await assertEvent( - .didDisableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didDisableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + _ = subject.audioDeviceModule( + .init(), + engine: engine, + configureInputFromSource: nil, + toDestination: destination, + format: format, + context: [:] + ) - func test_didDisableEngine_isPlayoutEnabledFalse_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = false - let isRecordingEnabled = true - try await assertEvent( - .didDisableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { - _ = subject.audioDeviceModule( - $0, - didDisableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) - } - } + XCTAssertEqual(audioEngineNodeAdapter.timesCalled(.installInputTap), 1) + let payload = audioEngineNodeAdapter + .recordedInputPayload((Int, UInt32).self, for: .installInputTap)? + .first + XCTAssertEqual(payload?.0, 0) + XCTAssertEqual(payload?.1, 1024) + } + + func test_configureOutputFromSource_emitsEvent() async { + makeSubject() + let engine = AVAudioEngine() + let sourceNode = AVAudioPlayerNode() + let destination = AVAudioMixerNode() + let format = AVAudioFormat( + commonFormat: .pcmFormatFloat32, + sampleRate: 48000, + channels: 2, + interleaved: false + )! + let expectedEvent = AudioDeviceModule.Event.configureOutputFromSource( + engine, + source: sourceNode, + destination: destination, + format: format + ) - func test_didDisableEngine_isPlayoutEnabledTrue_isRecordingEnabledTrue_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - let isPlayoutEnabled = true - let isRecordingEnabled = true - try await assertEvent( - .didDisableAudioEngine(audioEngine), - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled - ) { + await expectEvent(expectedEvent) { _ = subject.audioDeviceModule( $0, - didDisableEngine: audioEngine, - isPlayoutEnabled: isPlayoutEnabled, - isRecordingEnabled: isRecordingEnabled + engine: engine, + configureOutputFromSource: sourceNode, + toDestination: destination, + format: format, + context: [:] ) } } - func test_didDisableEngine_uninstallWasCalled() async throws { - _ = subject.audioDeviceModule( - .init(), - didDisableEngine: .init(), - isPlayoutEnabled: false, - isRecordingEnabled: false + func test_didUpdateAudioProcessingState_updatesPublishersAndEmitsEvent() async { + makeSubject() + let expectedEvent = AudioDeviceModule.Event.didUpdateAudioProcessingState( + voiceProcessingEnabled: true, + voiceProcessingBypassed: false, + voiceProcessingAGCEnabled: true, + stereoPlayoutEnabled: true ) - XCTAssertEqual(audioEngineNodeAdapter.timesCalled(.uninstall), 1) - XCTAssertEqual(audioEngineNodeAdapter.recordedInputPayload(Int.self, for: .uninstall)?.first, 0) - } - - // MARK: - willReleaseEngine - - func test_willReleaseEngine_publishesEvent() async throws { - let audioEngine = AVAudioEngine() - try await assertEvent(.willReleaseAudioEngine(audioEngine)) { - _ = subject.audioDeviceModule($0, willReleaseEngine: audioEngine) + await expectEvent(expectedEvent) { + subject.audioDeviceModule( + $0, + didUpdateAudioProcessingState: RTCAudioProcessingState( + voiceProcessingEnabled: true, + voiceProcessingBypassed: false, + voiceProcessingAGCEnabled: true, + stereoPlayoutEnabled: true + ) + ) } - } - - func test_willReleaseEngine_uninstallWasCalled() async throws { - _ = subject.audioDeviceModule(.init(), willReleaseEngine: .init()) - XCTAssertEqual(audioEngineNodeAdapter.timesCalled(.uninstall), 1) - XCTAssertEqual(audioEngineNodeAdapter.recordedInputPayload(Int.self, for: .uninstall)?.first, 0) + XCTAssertTrue(subject.isVoiceProcessingEnabled) + XCTAssertFalse(subject.isVoiceProcessingBypassed) + XCTAssertTrue(subject.isVoiceProcessingAGCEnabled) + XCTAssertTrue(subject.isStereoPlayoutEnabled) } - // MARK: - configureInputFromSource + // MARK: - Helpers - func test_configureInputFromSource_installWasCalled() async throws { - _ = subject.audioDeviceModule( - .init(), - engine: .init(), - configureInputFromSource: nil, - toDestination: .init(), - format: .init(), - context: [:] - ) - - XCTAssertEqual(audioEngineNodeAdapter.timesCalled(.installInputTap), 1) - let rawInput = try XCTUnwrap( - audioEngineNodeAdapter.recordedInputPayload( - Any.self, - for: .installInputTap - )?.first + @discardableResult + private func makeSubject() -> AudioDeviceModule { + let module = AudioDeviceModule( + source, + audioLevelsNodeAdapter: audioEngineNodeAdapter ) - let input = try XCTUnwrap(rawInput as? (Int, UInt32)) - XCTAssertEqual(input.0, 0) - XCTAssertEqual(input.1, 1024) + subject = module + return module } - // MARK: - Private Helpers - - private func assertEvent( - _ event: AudioDeviceModule.Event, + private func expectEvent( + _ expectedEvent: AudioDeviceModule.Event, isPlayoutEnabled: Bool? = nil, isRecordingEnabled: Bool? = nil, operation: (RTCAudioDeviceModule) -> Void, file: StaticString = #file, - function: StaticString = #function, line: UInt = #line - ) async throws { - let sinkExpectation = expectation(description: "Sink was called.") - let disposableBag = DisposableBag() - subject - .publisher - .filter { $0 == event } - .sink { _ in sinkExpectation.fulfill() } - .store(in: disposableBag) + ) async { + guard subject != nil else { + XCTFail("Subject not initialized", file: file, line: line) + return + } + + let eventExpectation = expectation(description: "Expect \(expectedEvent)") + subject.publisher + .filter { $0 == expectedEvent } + .sink { _ in eventExpectation.fulfill() } + .store(in: &cancellables) - var expectations = [sinkExpectation] + var expectations = [eventExpectation] if let isPlayoutEnabled { - let isPlayoutExpectation = expectation(description: "isPlayout:\(isPlayoutEnabled) failed.") - subject - .isPlayingPublisher + let playoutExpectation = expectation(description: "isPlaying updated") + subject.isPlayingPublisher .dropFirst() .filter { $0 == isPlayoutEnabled } - .sink { _ in isPlayoutExpectation.fulfill() } - .store(in: disposableBag) - expectations.append(isPlayoutExpectation) + .sink { _ in playoutExpectation.fulfill() } + .store(in: &cancellables) + expectations.append(playoutExpectation) } if let isRecordingEnabled { - let isRecordingEnabledExpectation = expectation(description: "isRecording:\(isRecordingEnabled) failed.") - subject - .isRecordingPublisher + let recordingExpectation = expectation(description: "isRecording updated") + subject.isRecordingPublisher .dropFirst() .filter { $0 == isRecordingEnabled } - .sink { _ in isRecordingEnabledExpectation.fulfill() } - .store(in: disposableBag) - expectations.append(isRecordingEnabledExpectation) + .sink { _ in recordingExpectation.fulfill() } + .store(in: &cancellables) + expectations.append(recordingExpectation) } operation(.init()) await safeFulfillment(of: expectations, file: file, line: line) - disposableBag.removeAll() + cancellables.removeAll() } } diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter_Tests.swift new file mode 100644 index 000000000..7462f2759 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter_Tests.swift @@ -0,0 +1,157 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import XCTest + +final class AudioEngineLevelNodeAdapter_Tests: XCTestCase, @unchecked Sendable { + + private var subject: CurrentValueSubject! + private var sut: AudioEngineLevelNodeAdapter! + private var cancellables: Set! + + override func setUp() { + super.setUp() + subject = .init(-100) + sut = AudioEngineLevelNodeAdapter() + sut.subject = subject + cancellables = [] + } + + override func tearDown() { + cancellables = nil + sut = nil + subject = nil + super.tearDown() + } + + // MARK: - installInputTap + + func test_installInputTap_configuresMixerTapOnce() { + let mixer = TestMixerNode() + let format = makeAudioFormat() + + sut.installInputTap(on: mixer, format: format, bus: 1, bufferSize: 2048) + + XCTAssertEqual(mixer.installTapCount, 1) + XCTAssertEqual(mixer.capturedBus, 1) + XCTAssertEqual(mixer.capturedBufferSize, 2048) + XCTAssertTrue(mixer.capturedFormat === format) + } + + func test_installInputTap_whenAlreadyInstalled_doesNotInstallTwice() { + let mixer = TestMixerNode() + let format = makeAudioFormat() + + sut.installInputTap(on: mixer, format: format) + sut.installInputTap(on: mixer, format: format) + + XCTAssertEqual(mixer.installTapCount, 1) + } + + func test_installInputTap_whenTapReceivesSamples_publishesDecibelValue() { + let mixer = TestMixerNode() + let format = makeAudioFormat() + sut.installInputTap(on: mixer, format: format) + let expectation = expectation(description: "Received audio level") + + var recordedValue: Float? + subject + .dropFirst() + .sink { value in + recordedValue = value + expectation.fulfill() + } + .store(in: &cancellables) + + let samples: [Float] = Array(repeating: 0.5, count: 4) + mixer.emit(bufferWith: samples, format: format) + + wait(for: [expectation], timeout: 1) + XCTAssertEqual(recordedValue ?? 0, 20 * log10(0.5), accuracy: 0.001) + } + + // MARK: - uninstall + + func test_uninstall_removesTapAndSendsSilence() { + let mixer = TestMixerNode() + sut.installInputTap(on: mixer, format: makeAudioFormat()) + let expectation = expectation(description: "Received silence") + + subject + .dropFirst() + .sink { value in + if value == AudioEngineLevelNodeAdapter.Constant.silenceDB { + expectation.fulfill() + } + } + .store(in: &cancellables) + + sut.uninstall() + wait(for: [expectation], timeout: 1) + + XCTAssertEqual(mixer.removeTapCount, 1) + } + + // MARK: - Helpers + + private func makeAudioFormat() -> AVAudioFormat { + AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 48000, channels: 1, interleaved: false)! + } +} + +private final class TestMixerNode: AVAudioMixerNode { + + private(set) var installTapCount = 0 + private(set) var removeTapCount = 0 + private(set) var capturedBus: AVAudioNodeBus? + private(set) var capturedBufferSize: AVAudioFrameCount? + private(set) var capturedFormat: AVAudioFormat? + private var tapBlock: AVAudioNodeTapBlock? + var stubbedEngine: AVAudioEngine? + + override var engine: AVAudioEngine? { stubbedEngine } + + init(engine: AVAudioEngine? = .init()) { + stubbedEngine = engine + super.init() + } + + override func installTap( + onBus bus: AVAudioNodeBus, + bufferSize: AVAudioFrameCount, + format: AVAudioFormat?, + block tapBlock: @escaping AVAudioNodeTapBlock + ) { + installTapCount += 1 + capturedBus = bus + capturedBufferSize = bufferSize + capturedFormat = format + self.tapBlock = tapBlock + } + + override func removeTap(onBus bus: AVAudioNodeBus) { + removeTapCount += 1 + tapBlock = nil + } + + func emit(bufferWith samples: [Float], format: AVAudioFormat) { + guard let tapBlock else { + XCTFail("Tap block not installed") + return + } + + let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(samples.count))! + buffer.frameLength = AVAudioFrameCount(samples.count) + if let pointer = buffer.floatChannelData?[0] { + for (index, sample) in samples.enumerated() { + pointer[index] = sample + } + } + + tapBlock(buffer, AVAudioTime(hostTime: 0)) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests.swift b/StreamVideoTests/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests.swift index 87e7bb05b..c5f681f39 100644 --- a/StreamVideoTests/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests.swift +++ b/StreamVideoTests/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests.swift @@ -12,12 +12,14 @@ final class StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests: StreamVideoT private var actionsReceived: [(StreamCallAudioRecorder.Namespace.Action, StoreDelay)]! = [] private var audioRecorder: MockAVAudioRecorder! private lazy var mockPermissions: MockPermissionsStore! = .init() + private lazy var mockAudioStore: MockRTCAudioStore! = .init() private lazy var subject: StreamCallAudioRecorder .Namespace .AVAudioRecorderMiddleware! = .init(audioRecorder: audioRecorder) override func setUp() async throws { try await super.setUp() + mockAudioStore.makeShared() _ = mockPermissions audioRecorder = try .build() _ = subject @@ -25,7 +27,8 @@ final class StreamCallAudioRecorder_AVAudioRecorderMiddlewareTests: StreamVideoT override func tearDown() { mockPermissions.dismantle() - + mockAudioStore.dismantle() + subject = nil audioRecorder = nil actionsReceived = nil diff --git a/StreamVideoTests/Utils/AudioSession/CallAudioSession/CallAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/CallAudioSession/CallAudioSession_Tests.swift index 8270a3e22..637764af2 100644 --- a/StreamVideoTests/Utils/AudioSession/CallAudioSession/CallAudioSession_Tests.swift +++ b/StreamVideoTests/Utils/AudioSession/CallAudioSession/CallAudioSession_Tests.swift @@ -37,7 +37,7 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { isActive: true, category: .playAndRecord, mode: .voiceChat, - options: [.allowBluetooth, .allowBluetoothA2DP] + options: [.allowBluetoothHFP, .allowBluetoothA2DP] ) ) @@ -47,7 +47,7 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { let configuration = self.mockAudioStore.audioStore.state.audioSessionConfiguration return configuration.category == .playAndRecord && configuration.mode == .voiceChat - && configuration.options.contains(.allowBluetooth) + && configuration.options.contains(.allowBluetoothHFP) && configuration.options.contains(.allowBluetoothA2DP) } } @@ -58,11 +58,15 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { let delegate = SpyAudioSessionAdapterDelegate() let statsAdapter = MockWebRTCStatsAdapter() let policy = MockAudioSessionPolicy() + let mockAudioDeviceModule = MockRTCAudioDeviceModule() + mockAudioDeviceModule.stub(for: \.isRecording, with: true) + mockAudioDeviceModule.stub(for: \.isMicrophoneMuted, with: false) + mockAudioStore.audioStore.dispatch(.setAudioDeviceModule(.init(mockAudioDeviceModule))) let policyConfiguration = AudioSessionConfiguration( isActive: true, category: .playAndRecord, mode: .voiceChat, - options: [.allowBluetooth, .allowBluetoothA2DP], + options: [.allowBluetoothHFP, .allowBluetoothA2DP], overrideOutputAudioPort: .speaker ) policy.stub(for: .configuration, with: policyConfiguration) @@ -76,11 +80,6 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { shouldSetActive: true ) - // Initial enable dispatch. - await fulfillment { - self.mockAudioStore.audioStore.state.webRTCAudioSessionConfiguration.isAudioEnabled - } - // Provide call settings to trigger policy application. callSettingsSubject.send(CallSettings(audioOn: true, speakerOn: true)) capabilitiesSubject.send([.sendAudio]) @@ -90,20 +89,9 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { return state.audioSessionConfiguration.category == policyConfiguration.category && state.audioSessionConfiguration.mode == policyConfiguration.mode && state.audioSessionConfiguration.options == policyConfiguration.options - && state.shouldRecord + && state.isRecording && state.isMicrophoneMuted == false - } - - // Simulate route change to trigger delegate notification. - let speakerRoute = RTCAudioStore.StoreState.AudioRoute( - MockAVAudioSessionRouteDescription( - outputs: [MockAVAudioSessionPortDescription(portType: .builtInSpeaker)] - ) - ) - mockAudioStore.audioStore.dispatch(.setCurrentRoute(speakerRoute)) - - await fulfillment { - delegate.speakerUpdates.contains(true) + && state.webRTCAudioSessionConfiguration.isAudioEnabled } let traces = statsAdapter.stubbedFunctionInput[.trace]?.compactMap { input -> WebRTCTrace? in @@ -158,7 +146,7 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { isActive: true, category: .playAndRecord, mode: .voiceChat, - options: [.allowBluetooth], + options: [.allowBluetoothHFP], overrideOutputAudioPort: .speaker ) ) @@ -176,7 +164,7 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { capabilitiesSubject.send([.sendAudio]) await fulfillment { - self.mockAudioStore.audioStore.state.audioSessionConfiguration.options.contains(.allowBluetooth) + self.mockAudioStore.audioStore.state.audioSessionConfiguration.options.contains(.allowBluetoothHFP) } let updatedPolicy = MockAudioSessionPolicy() @@ -200,11 +188,144 @@ final class CallAudioSession_Tests: XCTestCase, @unchecked Sendable { await fulfillment { let state = self.mockAudioStore.audioStore.state return state.audioSessionConfiguration.options == [.allowBluetoothA2DP] - && state.shouldRecord == false + && state.isRecording == false && state.isMicrophoneMuted == true } } + func test_activate_setsStereoPreference_whenPolicyPrefersStereoPlayout() async { + let callSettingsSubject = PassthroughSubject() + let capabilitiesSubject = PassthroughSubject, Never>() + let delegate = SpyAudioSessionAdapterDelegate() + subject = .init(policy: LivestreamAudioSessionPolicy()) + + subject.activate( + callSettingsPublisher: callSettingsSubject.eraseToAnyPublisher(), + ownCapabilitiesPublisher: capabilitiesSubject.eraseToAnyPublisher(), + delegate: delegate, + statsAdapter: nil, + shouldSetActive: true + ) + + await fulfillment { + self.mockAudioStore.audioStore.state.stereoConfiguration.playout.preferred + } + } + + func test_routeChangeWithMatchingSpeaker_reappliesPolicy() async { + let callSettingsSubject = PassthroughSubject() + let capabilitiesSubject = PassthroughSubject, Never>() + let delegate = SpyAudioSessionAdapterDelegate() + let policy = MockAudioSessionPolicy() + let policyConfiguration = AudioSessionConfiguration( + isActive: true, + category: .playAndRecord, + mode: .voiceChat, + options: [.allowBluetoothHFP], + overrideOutputAudioPort: .speaker + ) + policy.stub(for: .configuration, with: policyConfiguration) + + subject = .init(policy: policy) + subject.activate( + callSettingsPublisher: callSettingsSubject.eraseToAnyPublisher(), + ownCapabilitiesPublisher: capabilitiesSubject.eraseToAnyPublisher(), + delegate: delegate, + statsAdapter: nil, + shouldSetActive: true + ) + + callSettingsSubject.send(CallSettings(audioOn: true, speakerOn: true)) + capabilitiesSubject.send([.sendAudio]) + + await fulfillment { + (policy.stubbedFunctionInput[.configuration]?.count ?? 0) == 1 + } + + let initialCount = policy.stubbedFunctionInput[.configuration]?.count ?? 0 + mockAudioStore.audioStore.dispatch( + .setCurrentRoute( + makeRoute(reason: .oldDeviceUnavailable, speakerOn: true) + ) + ) + + await fulfillment { + (policy.stubbedFunctionInput[.configuration]?.count ?? 0) == initialCount + 1 + } + } + + func test_routeChangeWithDifferentSpeaker_notifiesDelegate() async { + let callSettingsSubject = PassthroughSubject() + let capabilitiesSubject = PassthroughSubject, Never>() + let delegate = SpyAudioSessionAdapterDelegate() + let policy = MockAudioSessionPolicy() + subject = .init(policy: policy) + subject.activate( + callSettingsPublisher: callSettingsSubject.eraseToAnyPublisher(), + ownCapabilitiesPublisher: capabilitiesSubject.eraseToAnyPublisher(), + delegate: delegate, + statsAdapter: nil, + shouldSetActive: true + ) + + callSettingsSubject.send(CallSettings(audioOn: true, speakerOn: true)) + capabilitiesSubject.send([.sendAudio]) + + await fulfillment { + (policy.stubbedFunctionInput[.configuration]?.count ?? 0) == 1 + } + + mockAudioStore.audioStore.dispatch( + .setCurrentRoute( + makeRoute(reason: .oldDeviceUnavailable, speakerOn: false) + ) + ) + + await fulfillment { + delegate.speakerUpdates.contains(false) + } + + XCTAssertEqual(policy.stubbedFunctionInput[.configuration]?.count ?? 0, 1) + } + + func test_callOptionsCleared_reappliesLastOptions() async { + let callSettingsSubject = PassthroughSubject() + let capabilitiesSubject = PassthroughSubject, Never>() + let delegate = SpyAudioSessionAdapterDelegate() + let policy = MockAudioSessionPolicy() + let policyConfiguration = AudioSessionConfiguration( + isActive: true, + category: .playAndRecord, + mode: .voiceChat, + options: [.allowBluetoothHFP] + ) + policy.stub(for: .configuration, with: policyConfiguration) + + subject = .init(policy: policy) + subject.activate( + callSettingsPublisher: callSettingsSubject.eraseToAnyPublisher(), + ownCapabilitiesPublisher: capabilitiesSubject.eraseToAnyPublisher(), + delegate: delegate, + statsAdapter: nil, + shouldSetActive: true + ) + + callSettingsSubject.send(CallSettings(audioOn: true, speakerOn: true)) + capabilitiesSubject.send([.sendAudio]) + + await fulfillment { + self.mockAudioStore.audioStore.state.audioSessionConfiguration.options == policyConfiguration.options + } + + mockAudioStore.audioStore.dispatch( + .avAudioSession(.systemSetCategoryOptions([])) + ) + + await fulfillment { + self.mockAudioStore.audioStore.state.audioSessionConfiguration.options == policyConfiguration.options + } + } + func test_currentRouteIsExternal_matchesAudioStoreState() async { let policy = MockAudioSessionPolicy() subject = .init(policy: policy) @@ -235,3 +356,25 @@ private final class SpyAudioSessionAdapterDelegate: StreamAudioSessionAdapterDel speakerUpdates.append(speakerOn) } } + +// MARK: - Helpers + +private func makeRoute( + reason: AVAudioSession.RouteChangeReason, + speakerOn: Bool +) -> RTCAudioStore.StoreState.AudioRoute { + let port = RTCAudioStore.StoreState.AudioRoute.Port( + type: speakerOn ? AVAudioSession.Port.builtInSpeaker.rawValue : AVAudioSession.Port.builtInReceiver.rawValue, + name: speakerOn ? "speaker" : "receiver", + id: UUID().uuidString, + isExternal: !speakerOn, + isSpeaker: speakerOn, + isReceiver: !speakerOn, + channels: speakerOn ? 2 : 1 + ) + return .init( + inputs: [], + outputs: [port], + reason: reason + ) +} diff --git a/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift b/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift index fb2a54cfa..121043b02 100644 --- a/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift +++ b/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift @@ -18,7 +18,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: false ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -32,7 +32,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: false ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -46,7 +46,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: false ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -60,7 +60,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: true ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -74,7 +74,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: true ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -88,7 +88,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: true ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -102,7 +102,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: true ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -116,7 +116,7 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable appIsInForeground: false ), [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -127,4 +127,13 @@ final class AVAudioSessionCategoryOptionsTests: XCTestCase, @unchecked Sendable func test_playback_whenAccessed_thenReturnsEmptyOptions() { XCTAssertEqual(AVAudioSession.CategoryOptions.playback, []) } + + #if !canImport(AVFoundation, _version: 2360.61.4.11) + func test_allowBluetoothHFPAliasesBluetoothOnLegacySDKs() { + XCTAssertEqual( + AVAudioSession.CategoryOptions.allowBluetoothHFP, + AVAudioSession.CategoryOptions.allowBluetooth + ) + } + #endif } diff --git a/StreamVideoTests/Utils/AudioSession/Policies/DefaultAudioSessionPolicyTests.swift b/StreamVideoTests/Utils/AudioSession/Policies/DefaultAudioSessionPolicyTests.swift index d7370fc9c..ea116d76e 100644 --- a/StreamVideoTests/Utils/AudioSession/Policies/DefaultAudioSessionPolicyTests.swift +++ b/StreamVideoTests/Utils/AudioSession/Policies/DefaultAudioSessionPolicyTests.swift @@ -39,7 +39,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -61,7 +61,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -82,7 +82,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -104,7 +104,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -127,7 +127,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -149,7 +149,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -171,7 +171,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -193,7 +193,7 @@ final class DefaultAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) diff --git a/StreamVideoTests/Utils/AudioSession/Policies/LivestreamAudioSessionPolicyTests.swift b/StreamVideoTests/Utils/AudioSession/Policies/LivestreamAudioSessionPolicyTests.swift new file mode 100644 index 000000000..15682bef4 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/Policies/LivestreamAudioSessionPolicyTests.swift @@ -0,0 +1,60 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +@testable import StreamVideo +import XCTest + +final class LivestreamAudioSessionPolicyTests: XCTestCase, @unchecked Sendable { + + private var subject: LivestreamAudioSessionPolicy! + + override func setUp() { + super.setUp() + subject = LivestreamAudioSessionPolicy() + } + + override func tearDown() { + subject = nil + super.tearDown() + } + + func test_configuration_whenCanSendAudio_prefersPlayAndRecord() { + let callSettings = CallSettings( + audioOn: true, + videoOn: true, + speakerOn: true, + audioOutputOn: true + ) + let configuration = subject.configuration( + for: callSettings, + ownCapabilities: [.sendAudio] + ) + + XCTAssertEqual(configuration.isActive, callSettings.audioOutputOn) + XCTAssertEqual(configuration.category, .playAndRecord) + XCTAssertEqual(configuration.mode, .default) + XCTAssertEqual(configuration.options, [.allowBluetoothA2DP]) + XCTAssertEqual(configuration.overrideOutputAudioPort, .speaker) + } + + func test_configuration_whenCannotSendAudio_fallsBackToPlayback() { + let callSettings = CallSettings( + audioOn: false, + videoOn: false, + speakerOn: false, + audioOutputOn: false + ) + let configuration = subject.configuration( + for: callSettings, + ownCapabilities: [] + ) + + XCTAssertEqual(configuration.isActive, callSettings.audioOutputOn) + XCTAssertEqual(configuration.category, .playback) + XCTAssertEqual(configuration.mode, .default) + XCTAssertEqual(configuration.options, [.allowBluetoothA2DP]) + XCTAssertNil(configuration.overrideOutputAudioPort) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicyTests.swift b/StreamVideoTests/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicyTests.swift index 941896157..2d3c452f8 100644 --- a/StreamVideoTests/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicyTests.swift +++ b/StreamVideoTests/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicyTests.swift @@ -71,7 +71,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -97,7 +97,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -164,7 +164,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -188,7 +188,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -213,7 +213,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) @@ -238,7 +238,7 @@ final class OwnCapabilitiesAudioSessionPolicyTests: XCTestCase, @unchecked Senda XCTAssertEqual( configuration.options, [ - .allowBluetooth, + .allowBluetoothHFP, .allowBluetoothA2DP ] ) diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver_Tests.swift new file mode 100644 index 000000000..876162af3 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver_Tests.swift @@ -0,0 +1,68 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import XCTest + +final class AVAudioSessionObserver_Tests: XCTestCase, @unchecked Sendable { + + private var cancellables: Set! + + override func setUp() { + super.setUp() + cancellables = [] + } + + override func tearDown() { + cancellables = nil + super.tearDown() + } + + func test_startObserving_emitsSnapshotsFromTimer() async { + let observer = AVAudioSessionObserver() + let expectation = expectation(description: "snapshots") + expectation.expectedFulfillmentCount = 2 + + observer.publisher + .prefix(2) + .sink { snapshot in + XCTAssertEqual(snapshot.category, AVAudioSession.sharedInstance().category) + expectation.fulfill() + } + .store(in: &cancellables) + + observer.startObserving() + + await fulfillment(of: [expectation], timeout: 1) + observer.stopObserving() + } + + func test_stopObserving_preventsFurtherEmissions() async { + let observer = AVAudioSessionObserver() + let firstTwo = expectation(description: "first snapshots") + let noMoreSnapshots = expectation(description: "no extra snapshots") + noMoreSnapshots.isInverted = true + + observer.publisher + .prefix(2) + .sink( + receiveCompletion: { _ in firstTwo.fulfill() }, + receiveValue: { _ in } + ) + .store(in: &cancellables) + + observer.publisher + .dropFirst(2) + .sink { _ in noMoreSnapshots.fulfill() } + .store(in: &cancellables) + + observer.startObserving() + await fulfillment(of: [firstTwo], timeout: 1) + + observer.stopObserving() + await fulfillment(of: [noMoreSnapshots], timeout: 0.3) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift index 6c0bca8a1..630334be4 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift @@ -31,7 +31,7 @@ final class RTCAudioStore_AVAudioSessionConfigurationValidatorTests: XCTestCase, subject = .init( category: .playAndRecord, mode: .voiceChat, - options: [.allowBluetooth, .defaultToSpeaker], + options: [.allowBluetoothHFP, .defaultToSpeaker], overrideOutputAudioPort: .none ) @@ -64,7 +64,7 @@ final class RTCAudioStore_AVAudioSessionConfigurationValidatorTests: XCTestCase, subject = .init( category: .playback, mode: .default, - options: [.allowBluetooth], + options: [.allowBluetoothHFP], overrideOutputAudioPort: .speaker ) diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_AVAudioSessionEffectTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_AVAudioSessionEffectTests.swift new file mode 100644 index 000000000..4fcf5ed18 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_AVAudioSessionEffectTests.swift @@ -0,0 +1,113 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioStore_AVAudioSessionEffectTests: XCTestCase, @unchecked Sendable { + + private var effect: RTCAudioStore.AVAudioSessionEffect! + private var stateSubject: PassthroughSubject! + private var dispatchedActions: [[StoreActionBox]]! + private var dispatcher: Store.Dispatcher! + private var dispatcherExpectation: XCTestExpectation? + private var originalObserver: AVAudioSessionObserver! + private var testObserver: AVAudioSessionObserver! + + override func setUp() { + super.setUp() + effect = .init() + stateSubject = .init() + dispatchedActions = [] + dispatcher = .init { [weak self] actions, _, _, _ in + self?.dispatchedActions.append(actions) + self?.dispatcherExpectation?.fulfill() + } + effect.dispatcher = dispatcher + originalObserver = InjectedValues[\.avAudioSessionObserver] + testObserver = AVAudioSessionObserver() + InjectedValues[\.avAudioSessionObserver] = testObserver + } + + override func tearDown() { + effect.set(statePublisher: nil) + testObserver.stopObserving() + InjectedValues[\.avAudioSessionObserver] = originalObserver + dispatcherExpectation = nil + dispatchedActions = nil + stateSubject = nil + effect = nil + testObserver = nil + originalObserver = nil + super.tearDown() + } + + func test_whenAudioDeviceModuleAvailable_dispatchesSystemCategoryUpdates() async { + dispatcherExpectation = expectation(description: "Dispatch category updates") + effect.set(statePublisher: stateSubject.eraseToAnyPublisher()) + + stateSubject.send(makeState(audioDeviceModule: makeAudioDeviceModule())) + + await fulfillment(of: [dispatcherExpectation!], timeout: 2) + + XCTAssertTrue( + dispatchedActions.contains { actions in + actions.contains { box in + if case let .normal(action) = box, + case .avAudioSession(.systemSetCategory) = action { + return true + } + return false + } + } + ) + } + + func test_whenAudioDeviceModuleMissing_doesNotDispatch() async { + let inverted = expectation(description: "No dispatch") + inverted.isInverted = true + dispatcherExpectation = inverted + + effect.set(statePublisher: stateSubject.eraseToAnyPublisher()) + stateSubject.send(makeState(audioDeviceModule: nil)) + + await fulfillment(of: [inverted], timeout: 0.5) + XCTAssertTrue(dispatchedActions.isEmpty) + } + + // MARK: - Helpers + + private func makeAudioDeviceModule() -> AudioDeviceModule { + AudioDeviceModule(MockRTCAudioDeviceModule()) + } + + private func makeState( + audioDeviceModule: AudioDeviceModule? + ) -> RTCAudioStore.StoreState { + .init( + isActive: false, + isInterrupted: false, + isRecording: false, + isMicrophoneMuted: false, + hasRecordingPermission: true, + audioDeviceModule: audioDeviceModule, + currentRoute: .empty, + audioSessionConfiguration: .init( + category: .playAndRecord, + mode: .default, + options: [], + overrideOutputAudioPort: .none + ), + webRTCAudioSessionConfiguration: .init( + isAudioEnabled: true, + useManualAudio: false, + prefersNoInterruptionsFromSystemAlerts: false + ), + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) + ) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_InterruptionsMiddlewareTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_InterruptionsEffectTests.swift similarity index 96% rename from StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_InterruptionsMiddlewareTests.swift rename to StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_InterruptionsEffectTests.swift index eaa6417eb..682a04468 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_InterruptionsMiddlewareTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_InterruptionsEffectTests.swift @@ -6,13 +6,13 @@ import StreamWebRTC import XCTest -final class RTCAudioStore_InterruptionsMiddlewareTests: XCTestCase, @unchecked Sendable { +final class RTCAudioStore_InterruptionsEffectTests: XCTestCase, @unchecked Sendable { private enum TestError: Error { case stub } private var session: RTCAudioSession! private var publisher: RTCAudioSessionPublisher! - private var subject: RTCAudioStore.InterruptionsMiddleware! + private var subject: RTCAudioStore.InterruptionsEffect! private var dispatched: [[StoreActionBox]]! override func setUp() { @@ -151,7 +151,6 @@ final class RTCAudioStore_InterruptionsMiddlewareTests: XCTestCase, @unchecked S private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -172,14 +171,14 @@ final class RTCAudioStore_InterruptionsMiddlewareTests: XCTestCase, @unchecked S .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) ) } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_RouteChangeEffectTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_RouteChangeEffectTests.swift new file mode 100644 index 000000000..579500805 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_RouteChangeEffectTests.swift @@ -0,0 +1,78 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioStore_RouteChangeEffectTests: XCTestCase, @unchecked Sendable { + + private var session: RTCAudioSession! + private var publisher: RTCAudioSessionPublisher! + private var effect: RTCAudioStore.RouteChangeEffect! + private var dispatcher: Store.Dispatcher! + private var dispatchedActions: [[StoreActionBox]]! + private var dispatcherExpectation: XCTestExpectation? + + override func setUp() { + super.setUp() + session = .sharedInstance() + publisher = .init(session) + effect = .init(publisher) + dispatchedActions = [] + dispatcher = .init { [weak self] actions, _, _, _ in + self?.dispatchedActions.append(actions) + self?.dispatcherExpectation?.fulfill() + } + effect.dispatcher = dispatcher + } + + override func tearDown() { + effect.dispatcher = nil + dispatcherExpectation = nil + dispatchedActions = nil + dispatcher = nil + effect = nil + publisher = nil + session = nil + super.tearDown() + } + + func test_routeChange_dispatchesSetCurrentRoute() async { + dispatcherExpectation = expectation(description: "Dispatches setCurrentRoute") + let reason: AVAudioSession.RouteChangeReason = .noSuitableRouteForCategory + let previousRoute = AVAudioSessionRouteDescription.dummy() + + publisher.audioSessionDidChangeRoute( + session, + reason: reason, + previousRoute: previousRoute + ) + + await safeFulfillment(of: [dispatcherExpectation!], timeout: 1) + + let actions = dispatchedActions.flatMap { $0.map(\.wrappedValue) } + guard case let .setCurrentRoute(route) = actions.first else { + return XCTFail("Expected setCurrentRoute action.") + } + + let expectedRoute = RTCAudioStore.StoreState.AudioRoute( + session.currentRoute, + reason: reason + ) + XCTAssertEqual(route, expectedRoute) + } + + func test_nonRouteEvents_doNotDispatch() async { + let invertedExpectation = expectation(description: "No dispatch") + invertedExpectation.isInverted = true + dispatcherExpectation = invertedExpectation + + publisher.audioSessionDidBeginInterruption(session) + + await safeFulfillment(of: [invertedExpectation], timeout: 0.5) + XCTAssertTrue(dispatchedActions.isEmpty) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_StereoPlayoutEffectTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_StereoPlayoutEffectTests.swift new file mode 100644 index 000000000..5536c8e20 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore_StereoPlayoutEffectTests.swift @@ -0,0 +1,76 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioStore_StereoPlayoutEffectTests: XCTestCase, @unchecked Sendable { + + func test_stereoPlayoutChanges_dispatchesStereoAction() async throws { + let expectation = self.expectation(description: "Expected action dispatched.") + let subject = RTCAudioStore.StereoPlayoutEffect() + + let mockDispatcher = MockStoreDispatcher() + subject.dispatcher = .init { actions, _, _, _ in mockDispatcher.handle(actions: actions) } + + let mockAudioDeviceModule = MockRTCAudioDeviceModule() + let audioDeviceModule = AudioDeviceModule(mockAudioDeviceModule) + let stateSubject = CurrentValueSubject(.dummy(audioDeviceModule: audioDeviceModule)) + + let cancellable = mockDispatcher + .publisher + .filter { !$0.isEmpty } + .map { $0.map(\.wrappedValue) } + .filter { actions in + for action in actions { + guard case let .stereo(.setPlayoutEnabled(value)) = action else { + continue + } + return value + } + return false + } + .sink { _ in expectation.fulfill() } + + subject.set(statePublisher: stateSubject.eraseToAnyPublisher()) + audioDeviceModule.audioDeviceModule( + .init(), + didUpdateAudioProcessingState: RTCAudioProcessingState( + voiceProcessingEnabled: true, + voiceProcessingBypassed: false, + voiceProcessingAGCEnabled: true, + stereoPlayoutEnabled: true + ) + ) + + await fulfillment(of: [expectation]) + + cancellable.cancel() + } + + func test_routeChanges_refreshStereoState() async throws { + let subject = RTCAudioStore.StereoPlayoutEffect() + + let mockAudioDeviceModule = MockRTCAudioDeviceModule() + let audioDeviceModule = AudioDeviceModule(mockAudioDeviceModule) + let stateSubject = CurrentValueSubject(.dummy(audioDeviceModule: audioDeviceModule)) + + subject.set(statePublisher: stateSubject.eraseToAnyPublisher()) + audioDeviceModule.audioDeviceModule( + .init(), + didUpdateAudioProcessingState: RTCAudioProcessingState( + voiceProcessingEnabled: true, + voiceProcessingBypassed: false, + voiceProcessingAGCEnabled: true, + stereoPlayoutEnabled: true + ) + ) + stateSubject.send(.dummy(audioDeviceModule: audioDeviceModule, currentRoute: .dummy(inputs: [.dummy()]))) + + await fulfillment { mockAudioDeviceModule.timesCalled(.refreshStereoPlayoutState) == 1 } + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift index aaeaea021..28f0d14a4 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift @@ -23,11 +23,12 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck super.tearDown() } - func test_setInterrupted_whenActiveAndShouldRecordTrue_stopsRecording() { + // MARK: - setInterrupted + + func test_setInterrupted_whenActiveAndRecordingTrue_nothingHappens() { let (module, mock) = makeModule(isRecording: true) let state = makeState( isActive: true, - shouldRecord: true, isRecording: true, audioDeviceModule: module ) @@ -40,7 +41,7 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck line: #line ) - XCTAssertEqual(mock.timesCalled(.stopRecording), 1) + XCTAssertEqual(mock.timesCalled(.stopRecording), 0) XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 0) } @@ -48,7 +49,6 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck let (module, mock) = makeModule(isRecording: true) let state = makeState( isActive: true, - shouldRecord: true, isRecording: true, audioDeviceModule: module ) @@ -65,17 +65,18 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 1) } - func test_setShouldRecord_whenEnabled_startsRecording() { + // MARK: - setRecording + + func test_setRecording_whenEnabled_startsRecording() { let (module, mock) = makeModule(isRecording: false) let state = makeState( - shouldRecord: false, isRecording: false, audioDeviceModule: module ) subject.apply( state: state, - action: .setShouldRecord(true), + action: .setRecording(true), file: #file, function: #function, line: #line @@ -84,17 +85,16 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 1) } - func test_setShouldRecord_whenDisabled_stopsRecording() { + func test_setRecording_whenDisabled_stopsRecording() { let (module, mock) = makeModule(isRecording: true) let state = makeState( - shouldRecord: true, isRecording: true, audioDeviceModule: module ) subject.apply( state: state, - action: .setShouldRecord(false), + action: .setRecording(false), file: #file, function: #function, line: #line @@ -103,13 +103,14 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck XCTAssertEqual(mock.timesCalled(.stopRecording), 1) } - func test_setMicrophoneMuted_whenShouldRecordTrue_updatesModule() { + // MARK: - setMicrophoneMuted + + func test_setMicrophoneMuted_whenRecordingTrue_updatesModule() { let (module, mock) = makeModule( isRecording: true, isMicrophoneMuted: false ) let state = makeState( - shouldRecord: true, isRecording: true, isMicrophoneMuted: false, audioDeviceModule: module @@ -123,16 +124,16 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck line: #line ) + XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 0) XCTAssertEqual(mock.timesCalled(.setMicrophoneMuted), 1) } - func test_setMicrophoneMuted_whenShouldRecordFalse_noInteraction() { + func test_setMicrophoneMuted_whenRecordingFalse_updatesModule() { let (module, mock) = makeModule( isRecording: false, isMicrophoneMuted: false ) let state = makeState( - shouldRecord: false, audioDeviceModule: module ) @@ -144,30 +145,75 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck line: #line ) - XCTAssertEqual(mock.timesCalled(.setMicrophoneMuted), 0) + XCTAssertEqual(mock.timesCalled(.setMicrophoneMuted), 1) } - func test_setAudioDeviceModule_replacesModuleAndDispatchesPublishers() throws { - let (currentModule, currentMock) = makeModule(isRecording: true) - let (replacementModule, _) = makeModule(isRecording: false) + func test_setMicrophoneUnMuted_whenRecordingTrue_updatesModule() { + let (module, mock) = makeModule( + isRecording: true, + isMicrophoneMuted: true + ) + let state = makeState( + isRecording: true, + isMicrophoneMuted: true, + audioDeviceModule: module + ) + + subject.apply( + state: state, + action: .setMicrophoneMuted(false), + file: #file, + function: #function, + line: #line + ) - let dispatchExpectation = expectation(description: "Dispatched expected actions") - dispatchExpectation.expectedFulfillmentCount = 2 + XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 0) + XCTAssertEqual(mock.timesCalled(.setMicrophoneMuted), 1) + } + func test_setMicrophoneUnMuted_whenRecordingFalse_updatesModule() { + let (module, mock) = makeModule( + isRecording: false, + isMicrophoneMuted: true + ) + let state = makeState( + isRecording: false, + isMicrophoneMuted: true, + audioDeviceModule: module + ) + + subject.apply( + state: state, + action: .setMicrophoneMuted(false), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(mock.timesCalled(.initAndStartRecording), 1) + XCTAssertEqual(mock.timesCalled(.setMicrophoneMuted), 1) + } + + // MARK: - setAudioDeviceModule + + func test_setAudioDeviceModule_updatesModule() throws { + let (currentModule, currentMock) = makeModule( + isRecording: true, + isMicrophoneMuted: false + ) + + let setRecordingExpectation = expectation(description: "setRecording called from AudioDeviceModule value.") + let setMicrophoneMutedExpectation = expectation(description: "setMicrophoneMuted called from AudioDeviceModule value.") subject.dispatcher = .init { actions, _, _, _ in actions .map(\.wrappedValue) .forEach { action in switch action { case .setRecording(true): - guard self.recordedSetRecording == false else { return } - self.recordedSetRecording = true - dispatchExpectation.fulfill() + setRecordingExpectation.fulfill() - case .setMicrophoneMuted(true): - guard self.recordedSetMicrophoneMuted == false else { return } - self.recordedSetMicrophoneMuted = true - dispatchExpectation.fulfill() + case .setMicrophoneMuted(false): + setMicrophoneMutedExpectation.fulfill() default: break @@ -175,8 +221,26 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck } } + subject.apply( + state: makeState(), + action: .setAudioDeviceModule(currentModule), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(currentMock.timesCalled(.reset), 0) + XCTAssertEqual(currentMock.timesCalled(.setMuteMode), 1) + XCTAssertEqual(currentMock.timesCalled(.setRecordingAlwaysPreparedMode), 1) + + wait(for: [setRecordingExpectation, setMicrophoneMutedExpectation], timeout: 1) + } + + func test_setAudioDeviceModule_replacesModuleAndDispatchesPublishers() throws { + let (currentModule, currentMock) = makeModule(isRecording: true) + let (replacementModule, replacementMock) = makeModule(isRecording: false) + let state = makeState( - shouldRecord: true, isRecording: true, isMicrophoneMuted: false, audioDeviceModule: currentModule @@ -190,35 +254,94 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck line: #line ) - XCTAssertEqual(currentMock.timesCalled(.stopRecording), 1) + XCTAssertEqual(currentMock.timesCalled(.reset), 1) + XCTAssertEqual(replacementMock.timesCalled(.reset), 0) + XCTAssertEqual(replacementMock.timesCalled(.setMuteMode), 1) + XCTAssertEqual(replacementMock.timesCalled(.setRecordingAlwaysPreparedMode), 1) + } + + // MARK: - setPlayoutPreferred + + func test_setPlayoutPreferred_updatesModule() throws { + let (module, mock) = makeModule(isRecording: false) - // Trigger publisher output. - let engine = AVAudioEngine() - _ = replacementModule.audioDeviceModule( - .init(), - willEnableEngine: engine, - isPlayoutEnabled: false, - isRecordingEnabled: true + subject.apply( + state: makeState(audioDeviceModule: module), + action: .stereo(.setPlayoutPreferred(true)), + file: #file, + function: #function, + line: #line ) - try replacementModule.setMuted(true) + XCTAssertTrue(mock.prefersStereoPlayout) + } + + func test_setPlayoutPreferred_false_updatesModule() throws { + let (module, mock) = makeModule(isRecording: false) + + subject.apply( + state: makeState(audioDeviceModule: module), + action: .stereo(.setPlayoutPreferred(false)), + file: #file, + function: #function, + line: #line + ) - wait(for: [dispatchExpectation], timeout: 1) + XCTAssertFalse(mock.prefersStereoPlayout) + } + + // MARK: - setAudioEnabled + + func test_setAudioEnabled_whenEnabled_updatesModule() throws { + let (module, mock) = makeModule(isRecording: false) + let state = makeState( + isRecording: false, + audioDeviceModule: module + ) + + subject.apply( + state: state, + action: .webRTCAudioSession(.setAudioEnabled(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(mock.timesCalled(.initAndStartPlayout), 1) + } + + func test_setAudioEnabled_whenDisabled_updatesModule() throws { + let (module, mock) = makeModule(isRecording: false, isPlaying: true) + let state = makeState( + isRecording: false, + audioDeviceModule: module + ) + + subject.apply( + state: state, + action: .webRTCAudioSession(.setAudioEnabled(false)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(mock.timesCalled(.stopPlayout), 1) } // MARK: - Helpers private func makeModule( isRecording: Bool, - isMicrophoneMuted: Bool = false + isMicrophoneMuted: Bool = false, + isPlaying: Bool = false ) -> (AudioDeviceModule, MockRTCAudioDeviceModule) { let source = MockRTCAudioDeviceModule() - source.microphoneMutedSubject.send(isMicrophoneMuted) + source.stub(for: \.isRecording, with: isRecording) + source.stub(for: \.isPlaying, with: isPlaying) + source.stub(for: \.isMicrophoneMuted, with: isMicrophoneMuted) let module = AudioDeviceModule( - source, - isRecording: isRecording, - isMicrophoneMuted: isMicrophoneMuted + source ) return (module, source) } @@ -226,7 +349,6 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -247,14 +369,14 @@ final class RTCAudioStore_AudioDeviceModuleMiddlewareTests: XCTestCase, @uncheck .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) ) } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_RouteChangeMiddlewareTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_RouteChangeMiddlewareTests.swift deleted file mode 100644 index 7d502c194..000000000 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore_RouteChangeMiddlewareTests.swift +++ /dev/null @@ -1,77 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -@testable import StreamVideo -import StreamWebRTC -import XCTest - -final class RTCAudioStore_RouteChangeMiddlewareTests: XCTestCase, @unchecked Sendable { - - private var session: RTCAudioSession! - private var publisher: RTCAudioSessionPublisher! - private var subject: RTCAudioStore.RouteChangeMiddleware! - private var dispatched: [[StoreActionBox]]! - - override func setUp() { - super.setUp() - session = RTCAudioSession.sharedInstance() - publisher = .init(session) - subject = .init(publisher) - dispatched = [] - } - - override func tearDown() { - subject.dispatcher = nil - subject = nil - publisher = nil - session = nil - dispatched = nil - super.tearDown() - } - - func test_routeChange_dispatchesSetCurrentRouteAndOverrideActions() { - let dispatcherExpectation = expectation(description: "Dispatcher called") - dispatcherExpectation.assertForOverFulfill = false - - subject.dispatcher = .init { [weak self] actions, _, _, _ in - self?.dispatched.append(actions) - dispatcherExpectation.fulfill() - } - - let previousRoute = MockAVAudioSessionRouteDescription( - outputs: [MockAVAudioSessionPortDescription(portType: .builtInReceiver)] - ) - - publisher.audioSessionDidChangeRoute( - session, - reason: .oldDeviceUnavailable, - previousRoute: previousRoute - ) - - wait(for: [dispatcherExpectation], timeout: 1) - - guard let actions = dispatched.first(where: { $0.count == 2 }) else { - return XCTFail("Expected dispatched actions.") - } - - XCTAssertEqual(actions.count, 2) - - guard case let .setCurrentRoute(route) = actions[0].wrappedValue else { - return XCTFail("Expected first action to be setCurrentRoute.") - } - - guard - case let .avAudioSession(.setOverrideOutputAudioPort(port)) = actions[1].wrappedValue - else { - return XCTFail("Expected second action to setOverrideOutputAudioPort.") - } - - let expectedRoute = RTCAudioStore.StoreState.AudioRoute(session.currentRoute) - XCTAssertEqual(route, expectedRoute) - - let expectedPort: AVAudioSession.PortOverride = expectedRoute.isSpeaker ? .speaker : .none - XCTAssertEqual(port, expectedPort) - } -} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore_CoordinatorTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore_CoordinatorTests.swift index c328286e3..ae8ee81e5 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore_CoordinatorTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore_CoordinatorTests.swift @@ -78,7 +78,15 @@ final class RTCAudioStore_CoordinatorTests: XCTestCase, @unchecked Sendable { func test_setCurrentRoute_differentValue_returnsTrue() { let route = RTCAudioStore.StoreState.AudioRoute( - inputs: [.init(type: .unique, name: .unique, id: .unique, isExternal: false, isSpeaker: true, isReceiver: false)], + inputs: [.init( + type: .unique, + name: .unique, + id: .unique, + isExternal: false, + isSpeaker: true, + isReceiver: false, + channels: 1 + )], outputs: [] ) let state = makeState(currentRoute: .empty) @@ -195,12 +203,59 @@ final class RTCAudioStore_CoordinatorTests: XCTestCase, @unchecked Sendable { ) } + func test_stereo_setPlayoutPreferred_sameValue_returnsFalse() { + let stereoConfiguration = makeStereoConfiguration(preferred: true, enabled: false) + let state = makeState(stereoConfiguration: stereoConfiguration) + + XCTAssertFalse( + subject.shouldExecute( + action: .stereo(.setPlayoutPreferred(true)), + state: state + ) + ) + } + + func test_stereo_setPlayoutPreferred_differentValue_returnsTrue() { + let stereoConfiguration = makeStereoConfiguration(preferred: false, enabled: false) + let state = makeState(stereoConfiguration: stereoConfiguration) + + XCTAssertTrue( + subject.shouldExecute( + action: .stereo(.setPlayoutPreferred(true)), + state: state + ) + ) + } + + func test_stereo_setPlayoutEnabled_sameValue_returnsFalse() { + let stereoConfiguration = makeStereoConfiguration(preferred: false, enabled: true) + let state = makeState(stereoConfiguration: stereoConfiguration) + + XCTAssertFalse( + subject.shouldExecute( + action: .stereo(.setPlayoutEnabled(true)), + state: state + ) + ) + } + + func test_stereo_setPlayoutEnabled_differentValue_returnsTrue() { + let stereoConfiguration = makeStereoConfiguration(preferred: false, enabled: false) + let state = makeState(stereoConfiguration: stereoConfiguration) + + XCTAssertTrue( + subject.shouldExecute( + action: .stereo(.setPlayoutEnabled(true)), + state: state + ) + ) + } + // MARK: - Helpers private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -216,19 +271,22 @@ final class RTCAudioStore_CoordinatorTests: XCTestCase, @unchecked Sendable { isAudioEnabled: false, useManualAudio: false, prefersNoInterruptionsFromSystemAlerts: false + ), + stereoConfiguration: RTCAudioStore.StoreState.StereoConfiguration = .init( + playout: .init(preferred: false, enabled: false) ) ) -> RTCAudioStore.StoreState { .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: stereoConfiguration ) } @@ -257,4 +315,16 @@ final class RTCAudioStore_CoordinatorTests: XCTestCase, @unchecked Sendable { prefersNoInterruptionsFromSystemAlerts: prefersNoInterruptionsFromSystemAlerts ) } + + private func makeStereoConfiguration( + preferred: Bool, + enabled: Bool + ) -> RTCAudioStore.StoreState.StereoConfiguration { + .init( + playout: .init( + preferred: preferred, + enabled: enabled + ) + ) + } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_AVAudioSessionReducerTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_AVAudioSessionReducerTests.swift index bab7d6b3c..204636429 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_AVAudioSessionReducerTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_AVAudioSessionReducerTests.swift @@ -106,16 +106,16 @@ final class RTCAudioStore_AVAudioSessionReducerTests: XCTestCase, @unchecked Sen let state = makeState( category: .playAndRecord, mode: .voiceChat, - options: [.allowBluetooth] + options: [.allowBluetoothHFP] ) session.category = AVAudioSession.Category.playAndRecord.rawValue session.mode = AVAudioSession.Mode.voiceChat.rawValue - session.categoryOptions = [.allowBluetooth] + session.categoryOptions = [.allowBluetoothHFP] session.isActive = true let result = try await subject.reduce( state: state, - action: .avAudioSession(.setCategoryOptions([.allowBluetooth, .defaultToSpeaker])), + action: .avAudioSession(.setCategoryOptions([.allowBluetoothHFP, .defaultToSpeaker])), file: #file, function: #function, line: #line @@ -193,12 +193,58 @@ final class RTCAudioStore_AVAudioSessionReducerTests: XCTestCase, @unchecked Sen XCTAssertEqual(session.timesCalled(.setConfiguration), 1) } + func test_reduce_systemSetCategory_updatesStateWithoutCallingSession() async throws { + let state = makeState( + category: .playback, + mode: .default, + options: [] + ) + + let result = try await subject.reduce( + state: state, + action: .avAudioSession(.systemSetCategory(.playAndRecord)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(result.audioSessionConfiguration.category, .playAndRecord) + XCTAssertEqual(session.timesCalled(.setConfiguration), 0) + } + + func test_reduce_setCurrentRoute_updatesOverridePort() async throws { + let state = makeState(overrideOutput: .none) + let speakerRoute = RTCAudioStore.StoreState.AudioRoute( + inputs: [], + outputs: [ + .init( + type: .unique, + name: .unique, + id: .unique, + isExternal: false, + isSpeaker: true, + isReceiver: false, + channels: 2 + ) + ] + ) + + let result = try await subject.reduce( + state: state, + action: .setCurrentRoute(speakerRoute), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(result.audioSessionConfiguration.overrideOutputAudioPort, .speaker) + } + // MARK: - Helpers private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -217,7 +263,6 @@ final class RTCAudioStore_AVAudioSessionReducerTests: XCTestCase, @unchecked Sen .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, @@ -229,7 +274,8 @@ final class RTCAudioStore_AVAudioSessionReducerTests: XCTestCase, @unchecked Sen options: options, overrideOutputAudioPort: overrideOutput ), - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) ) } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_CallKitReducerTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_CallKitReducerTests.swift index 82e86c9e7..4906bb0a5 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_CallKitReducerTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_CallKitReducerTests.swift @@ -109,14 +109,14 @@ final class RTCAudioStore_CallKitReducerTests: XCTestCase, @unchecked Sendable { .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) ) } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_DefaultReducerTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_DefaultReducerTests.swift index e0691cce3..19fcad34f 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_DefaultReducerTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_DefaultReducerTests.swift @@ -95,12 +95,31 @@ final class RTCAudioStore_DefaultReducerTests: XCTestCase, @unchecked Sendable { } } + func test_reduce_setActive_updatesAudioDeviceModulePlayout() async throws { + session.isActive = false + let (audioDeviceModule, mockModule) = makeAudioDeviceModule() + mockModule.stub(for: \.isPlayoutInitialized, with: false) + let state = makeState( + isActive: false, + audioDeviceModule: audioDeviceModule + ) + + _ = try await subject.reduce( + state: state, + action: .setActive(true), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(mockModule.timesCalled(.initAndStartPlayout), 1) + } + // MARK: - setAudioDeviceModule func test_reduce_setAudioDeviceModule_nil_resetsRecordingFlags() async throws { let module = AudioDeviceModule(MockRTCAudioDeviceModule()) let state = makeState( - shouldRecord: true, isRecording: true, isMicrophoneMuted: true, audioDeviceModule: module @@ -115,16 +134,14 @@ final class RTCAudioStore_DefaultReducerTests: XCTestCase, @unchecked Sendable { ) XCTAssertNil(result.audioDeviceModule) - XCTAssertFalse(result.shouldRecord) XCTAssertFalse(result.isRecording) - XCTAssertFalse(result.isMicrophoneMuted) + XCTAssertTrue(result.isMicrophoneMuted) } func test_reduce_setAudioDeviceModule_nonNil_preservesRecordingFlags() async throws { let currentModule = AudioDeviceModule(MockRTCAudioDeviceModule()) let replacement = AudioDeviceModule(MockRTCAudioDeviceModule()) let state = makeState( - shouldRecord: true, isRecording: true, isMicrophoneMuted: true, audioDeviceModule: currentModule @@ -139,11 +156,32 @@ final class RTCAudioStore_DefaultReducerTests: XCTestCase, @unchecked Sendable { ) XCTAssertTrue(result.audioDeviceModule === replacement) - XCTAssertTrue(result.shouldRecord) XCTAssertTrue(result.isRecording) XCTAssertTrue(result.isMicrophoneMuted) } + func test_reduce_setAudioDeviceModule_nil_resetsStereoConfiguration() async throws { + let module = AudioDeviceModule(MockRTCAudioDeviceModule()) + let stereoConfiguration = RTCAudioStore.StoreState.StereoConfiguration( + playout: .init(preferred: true, enabled: true) + ) + let state = makeState( + audioDeviceModule: module, + stereoConfiguration: stereoConfiguration + ) + + let result = try await subject.reduce( + state: state, + action: .setAudioDeviceModule(nil), + file: #file, + function: #function, + line: #line + ) + + XCTAssertFalse(result.stereoConfiguration.playout.preferred) + XCTAssertFalse(result.stereoConfiguration.playout.enabled) + } + // MARK: - Passthrough actions func test_reduce_avAudioSessionAction_returnsUnchangedState() async throws { @@ -165,7 +203,6 @@ final class RTCAudioStore_DefaultReducerTests: XCTestCase, @unchecked Sendable { private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -181,19 +218,31 @@ final class RTCAudioStore_DefaultReducerTests: XCTestCase, @unchecked Sendable { isAudioEnabled: false, useManualAudio: false, prefersNoInterruptionsFromSystemAlerts: false + ), + stereoConfiguration: RTCAudioStore.StoreState.StereoConfiguration = .init( + playout: .init( + preferred: false, + enabled: false + ) ) ) -> RTCAudioStore.StoreState { .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: stereoConfiguration ) } + + private func makeAudioDeviceModule() -> (AudioDeviceModule, MockRTCAudioDeviceModule) { + let mock = MockRTCAudioDeviceModule() + let module = AudioDeviceModule(mock) + return (module, mock) + } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_WebRTCAudioSessionReducerTests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_WebRTCAudioSessionReducerTests.swift index c7aa04dba..9881e6f32 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_WebRTCAudioSessionReducerTests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore_WebRTCAudioSessionReducerTests.swift @@ -145,7 +145,6 @@ final class RTCAudioStore_WebRTCAudioSessionReducerTests: XCTestCase, @unchecked private func makeState( isActive: Bool = false, isInterrupted: Bool = false, - shouldRecord: Bool = false, isRecording: Bool = false, isMicrophoneMuted: Bool = false, hasRecordingPermission: Bool = false, @@ -166,14 +165,14 @@ final class RTCAudioStore_WebRTCAudioSessionReducerTests: XCTestCase, @unchecked .init( isActive: isActive, isInterrupted: isInterrupted, - shouldRecord: shouldRecord, isRecording: isRecording, isMicrophoneMuted: isMicrophoneMuted, hasRecordingPermission: hasRecordingPermission, audioDeviceModule: audioDeviceModule, currentRoute: currentRoute, audioSessionConfiguration: audioSessionConfiguration, - webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration + webRTCAudioSessionConfiguration: webRTCAudioSessionConfiguration, + stereoConfiguration: .init(playout: .init(preferred: false, enabled: false)) ) } } diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift index 9098c8337..7b8e14f48 100644 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift @@ -52,13 +52,11 @@ final class RTCAudioStore_Tests: XCTestCase, @unchecked Sendable { func test_dispatch_multipleActions_updatesState() async { subject.dispatch([ - .setInterrupted(true), - .setShouldRecord(true) + .setInterrupted(true) ]) await fulfillment { self.subject.state.isInterrupted - && self.subject.state.shouldRecord } } diff --git a/StreamVideoTests/Utils/Store/Store_Tests.swift b/StreamVideoTests/Utils/Store/Store_Tests.swift index 447db0d82..d844dac6f 100644 --- a/StreamVideoTests/Utils/Store/Store_Tests.swift +++ b/StreamVideoTests/Utils/Store/Store_Tests.swift @@ -50,7 +50,7 @@ final class Store_Tests: XCTestCase, @unchecked Sendable { } } - func test_dispatch_allReduceresWereCalled() async { + func test_dispatch_allReducersWereCalled() async { subject.dispatch(.callReducersWithStep) await fulfillment { @@ -80,6 +80,40 @@ final class Store_Tests: XCTestCase, @unchecked Sendable { XCTAssertEqual(reducerB.timesCalled, 0) XCTAssertEqual(subject.state.reducersCalled, 0) } + + // MARK: - Effects + + func test_addEffect_configuresDependenciesAndReceivesStateUpdates() async { + let effect = TestStoreEffect() + subject.add(effect) + + await fulfillment(timeout: 2) { + effect.didReceivePublisher + && effect.dispatcher != nil + && effect.state != nil + } + + subject.dispatch(.callReducersWithStep) + + await fulfillment(timeout: 2) { + effect.receivedStates.contains { $0.reducersCalled == 2 } + } + } + + func test_removeEffect_clearsDependencies() async { + let effect = TestStoreEffect() + subject.add(effect) + + await fulfillment(timeout: 2) { effect.didReceivePublisher } + + subject.remove(effect) + + await fulfillment(timeout: 2) { + effect.dispatcher == nil + && effect.stateProvider == nil + && effect.didReceiveNilPublisher + } + } } // MARK: - Private Types @@ -144,3 +178,31 @@ private enum TestStoreNamespace: StoreNamespace, Sendable { static let identifier: String = .unique } + +private final class TestStoreEffect: StoreEffect, @unchecked Sendable { + + private var cancellable: AnyCancellable? + + private(set) var receivedStates: [TestStoreState] = [] + private(set) var didReceivePublisher = false + private(set) var didReceiveNilPublisher = false + + override func set( + statePublisher: AnyPublisher? + ) { + cancellable?.cancel() + guard let statePublisher else { + didReceiveNilPublisher = true + didReceivePublisher = false + cancellable = nil + return + } + + didReceivePublisher = true + didReceiveNilPublisher = false + cancellable = statePublisher + .sink { [weak self] state in + self?.receivedStates.append(state) + } + } +} diff --git a/StreamVideoTests/WebRTC/AudioSession_Tests.swift b/StreamVideoTests/WebRTC/AudioSession_Tests.swift deleted file mode 100644 index 09f1b3688..000000000 --- a/StreamVideoTests/WebRTC/AudioSession_Tests.swift +++ /dev/null @@ -1,159 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -@testable import StreamVideo -import StreamWebRTC -import XCTest - -final class AudioSession_Tests: XCTestCase, @unchecked Sendable { - -// private lazy var subject: StreamAudioSessionAdapter! = StreamAudioSessionAdapter() -// private lazy var rtcAudioSession: RTCAudioSession! = .sharedInstance() -// -// private var updatedCallSettings: CallSettings? -// private var didReceiveUpdateCallSettings: Bool = false - - // MARK: - Lifecycle - -// override func setUp() { -// super.setUp() -// subject.delegate = self -// } - -// override func tearDown() { -// subject = nil -// rtcAudioSession = nil -// updatedCallSettings = nil -// super.tearDown() -// } - -// // MARK: - StreamAudioSessionAdapterDelegate -// -// func audioSessionDidUpdateCallSettings( -// _ audioSession: StreamAudioSessionAdapter, -// callSettings: CallSettings -// ) { -// didReceiveUpdateCallSettings = true -// updatedCallSettings = callSettings -// } - - // MARK: - didUpdateCallSettings - -// func test_didUpdateCallSettings_updatesActiveCallSettings() { -// // Given -// let callSettings = CallSettings(speakerOn: true, audioOutputOn: true) -// -// // When -// subject.didUpdateCallSettings(callSettings) -// -// // Then -// XCTAssertEqual(subject.activeCallSettings, callSettings) -// } - -// func test_didUpdateCallSettings_respectsCallSettingsIfAlreadyActive() { -// // Given -// let initialSettings = CallSettings(speakerOn: true, audioOutputOn: true) -// subject.didUpdateCallSettings(initialSettings) -// let newSettings = initialSettings // No change -// -// // When -// subject.didUpdateCallSettings(newSettings) -// -// // Then -// XCTAssertEqual(subject.activeCallSettings, initialSettings) -// XCTAssertFalse(didReceiveUpdateCallSettings) -// } - - // MARK: - audioSessionDidChangeRoute - -// func test_audioSessionDidChangeRoute_updatesRouteOnNewDeviceAvailable() { -// // Given -// let previousRoute = AVAudioSessionRouteDescription() -// let callSettings = CallSettings(speakerOn: true, audioOutputOn: true) -// subject.didUpdateCallSettings(callSettings) -// -// // When -// subject.audioSessionDidChangeRoute( -// rtcAudioSession, -// reason: .newDeviceAvailable, -// previousRoute: previousRoute -// ) -// -// // Then -// XCTAssertNotNil(updatedCallSettings) -// } - -// func test_audioSessionDidChangeRoute_respectsCallSettingsOnOldDeviceUnavailable() { -// // Given -// let previousRoute = AVAudioSessionRouteDescription() -// let callSettings = CallSettings(audioOutputOn: true, speakerOn: true) -// subject.didUpdateCallSettings(callSettings) -// -// // When -// subject.audioSessionDidChangeRoute( -// mockAudioSession, -// reason: .oldDeviceUnavailable, -// previousRoute: previousRoute -// ) -// -// // Then -// XCTAssertEqual(mockDelegate.updatedCallSettings?.speakerOn, callSettings.speakerOn) -// } - - // MARK: - audioSession(didChangeCanPlayOrRecord:) - -// func test_audioSession_didChangeCanPlayOrRecord_logsCorrectly() { -// // When -// subject.audioSession( -// mockAudioSession, -// didChangeCanPlayOrRecord: true -// ) -// -// // Then -// XCTAssertTrue(mockAudioSession.loggedInfo.contains("can playOrRecord:true")) -// } - - // MARK: - audioSessionDidStopPlayOrRecord - -// func test_audioSessionDidStopPlayOrRecord_logsCorrectly() { -// // When -// subject.audioSessionDidStopPlayOrRecord(mockAudioSession) -// -// // Then -// XCTAssertTrue(mockAudioSession.loggedInfo.contains("cannot playOrRecord")) -// } - - // MARK: - audioSession(didSetActive:) - -// func test_audioSession_didSetActive_appliesCorrectCallSettings() { -// // Given -// let callSettings = CallSettings(audioOutputOn: true, speakerOn: true) -// subject.didUpdateCallSettings(callSettings) -// -// // When -// subject.audioSession( -// mockAudioSession, -// didSetActive: true -// ) -// -// // Then -// XCTAssertEqual(mockDelegate.updatedCallSettings?.speakerOn, callSettings.speakerOn) -// } - - // MARK: - Private Helpers - -// func test_performAudioSessionOperation_executesOperationOnProcessingQueue() { -// // Given -// let expectation = self.expectation(description: "Operation executed") -// -// // When -// subject.performAudioSessionOperation { -// _ in -// expectation.fulfill() -// } -// -// // Then -// waitForExpectations(timeout: 1.0) -// } -} diff --git a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift index 7017d2018..ebdf19670 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift @@ -18,7 +18,12 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable { private lazy var callCid: String! = .unique private lazy var mockCallAuthenticator: MockCallAuthenticator! = .init() private lazy var mockWebRTCAuthenticator: MockWebRTCAuthenticator! = .init() - private lazy var rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory! = .init() + private lazy var mockPeerConnectionFactory: PeerConnectionFactory! = .build( + audioProcessingModule: Self.videoConfig.audioProcessingModule, + audioDeviceModuleSource: MockRTCAudioDeviceModule() + ) + private lazy var rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory! = + .init(peerConnectionFactory: mockPeerConnectionFactory) private lazy var mockSFUStack: MockSFUStack! = .init() private lazy var subject: WebRTCCoordinator! = .init( user: user, @@ -45,6 +50,7 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable { callCid = nil apiKey = nil user = nil + mockPeerConnectionFactory = nil try await super.tearDown() } diff --git a/StreamVideoTests/WebRTC/v2/WebRTCPermissionsAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCPermissionsAdapter_Tests.swift index 6098267e6..2a8b27fba 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCPermissionsAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCPermissionsAdapter_Tests.swift @@ -74,7 +74,6 @@ final class WebRTCPermissionsAdapter_Tests: StreamVideoTestCase, @unchecked Send } func test_willSet_videoOnTrue_unknownCamera_inForeground_requestsPermission_andKeepsVideoOnWhenGranted() async { - defer { mockAppStateAdapter.dismante() } mockAppStateAdapter.makeShared() mockAppStateAdapter.stubbedState = .foreground mockPermissions.stubCameraPermission(.unknown) @@ -82,6 +81,12 @@ final class WebRTCPermissionsAdapter_Tests: StreamVideoTestCase, @unchecked Send await withTaskGroup(of: Void.self) { group in group.addTask { + await self.fulfillment { self.mockPermissions.timesCalled(.requestCameraPermission) == 1 } + self.mockPermissions.stubCameraPermission(.granted) + } + + group.addTask { + await self.wait(for: 0.5) let input = CallSettings(audioOn: false, videoOn: true) let output = await self.subject.willSet(callSettings: input) XCTAssertEqual(output.videoOn, true) @@ -89,13 +94,10 @@ final class WebRTCPermissionsAdapter_Tests: StreamVideoTestCase, @unchecked Send await self.fulfillment { self.delegate.videoOnValues.contains(true) } } - group.addTask { - await self.fulfillment { self.mockPermissions.timesCalled(.requestCameraPermission) == 1 } - self.mockPermissions.stubCameraPermission(.granted) - await self.wait(for: 0.5) - } - await group.waitForAll() } + + mockAppStateAdapter?.dismante() + mockPermissions?.dismantle() } } diff --git a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift index 9c298113a..edea62f15 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift @@ -13,7 +13,12 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { private lazy var user: User! = .dummy() private lazy var apiKey: String! = .unique private lazy var callCid: String! = .unique - private lazy var rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory! = .init() + private lazy var mockPeerConnectionFactory: PeerConnectionFactory! = .build( + audioProcessingModule: Self.videoConfig.audioProcessingModule, + audioDeviceModuleSource: MockRTCAudioDeviceModule() + ) + private lazy var rtcPeerConnectionCoordinatorFactory: MockRTCPeerConnectionCoordinatorFactory! = + .init(peerConnectionFactory: mockPeerConnectionFactory) private lazy var mockPermissions: MockPermissionsStore! = .init() private lazy var mockAudioStore: MockRTCAudioStore! = .init() private lazy var subject: WebRTCStateAdapter! = .init( @@ -21,6 +26,7 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { apiKey: apiKey, callCid: callCid, videoConfig: Self.videoConfig, + peerConnectionFactory: mockPeerConnectionFactory, rtcPeerConnectionCoordinatorFactory: rtcPeerConnectionCoordinatorFactory ) @@ -32,7 +38,8 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { _ = mockPermissions } - override func tearDown() { + override func tearDown() async throws { + await subject.cleanUp() mockAudioStore.dismantle() mockPermissions.dismantle() subject = nil @@ -40,7 +47,8 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { callCid = nil apiKey = nil user = nil - super.tearDown() + mockPeerConnectionFactory = nil + try await super.tearDown() } override class func tearDown() {