Skip to content

Commit

Permalink
Audio engine observer fixes (#571)
Browse files Browse the repository at this point in the history
  • Loading branch information
hiroshihorie authored Feb 1, 2025
1 parent 9e750d1 commit 20f8c3e
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 35 deletions.
16 changes: 8 additions & 8 deletions Sources/LiveKit/Audio/AudioDeviceModuleDelegateAdapter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -40,49 +40,49 @@ class AudioDeviceModuleDelegateAdapter: NSObject, LKRTCAudioDeviceModuleDelegate

func audioDeviceModule(_: LKRTCAudioDeviceModule, didCreateEngine engine: AVAudioEngine) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidCreate(engine)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willEnableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willStartEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didStopEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didDisableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willReleaseEngine engine: AVAudioEngine) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillRelease(engine)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureInputFromSource src: AVAudioNode?, toDestination dst: AVAudioNode, format: AVAudioFormat) -> Bool {
guard let audioManager else { return false }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
return entryPoint?.engineWillConnectInput(engine, src: src, dst: dst, format: format) ?? false
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureOutputFromSource src: AVAudioNode, toDestination dst: AVAudioNode?, format: AVAudioFormat) -> Bool {
guard let audioManager else { return false }
let entryPoint = audioManager._state.engineObservers.buildChain()
let entryPoint = audioManager.buildEngineObserverChain()
return entryPoint?.engineWillConnectOutput(engine, src: src, dst: dst, format: format) ?? false
}
}
47 changes: 29 additions & 18 deletions Sources/LiveKit/Audio/AudioEngineObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ import AVFAudio

/// Do not retain the engine object.
public protocol AudioEngineObserver: NextInvokable, Sendable {
func setNext(_ handler: any AudioEngineObserver)
associatedtype Next = any AudioEngineObserver
var next: (any AudioEngineObserver)? { get set }

func engineDidCreate(_ engine: AVAudioEngine)
func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
Expand All @@ -39,25 +40,35 @@ public protocol AudioEngineObserver: NextInvokable, Sendable {

/// Default implementation to make it optional.
public extension AudioEngineObserver {
func engineDidCreate(_: AVAudioEngine) {}
func engineWillEnable(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineWillStart(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineDidStop(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineDidDisable(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineWillRelease(_: AVAudioEngine) {}

func engineWillConnectOutput(_: AVAudioEngine, src _: AVAudioNode, dst _: AVAudioNode?, format _: AVAudioFormat) -> Bool { false }
func engineWillConnectInput(_: AVAudioEngine, src _: AVAudioNode?, dst _: AVAudioNode, format _: AVAudioFormat) -> Bool { false }
}
func engineDidCreate(_ engine: AVAudioEngine) {
next?.engineDidCreate(engine)
}

func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

extension [any AudioEngineObserver] {
func buildChain() -> Element? {
guard let first else { return nil }
func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

for i in 0 ..< count - 1 {
self[i].setNext(self[i + 1])
}
func engineWillRelease(_ engine: AVAudioEngine) {
next?.engineWillRelease(engine)
}

func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat) -> Bool {
next?.engineWillConnectOutput(engine, src: src, dst: dst, format: format) ?? false
}

return first
func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat) -> Bool {
next?.engineWillConnectInput(engine, src: src, dst: dst, format: format) ?? false
}
}
13 changes: 7 additions & 6 deletions Sources/LiveKit/Audio/DefaultAudioSessionObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ internal import LiveKitWebRTC
@_implementationOnly import LiveKitWebRTC
#endif

public final class DefaultAudioSessionObserver: AudioEngineObserver, Loggable {
public class DefaultAudioSessionObserver: AudioEngineObserver, Loggable, @unchecked Sendable {
struct State {
var isSessionActive = false
var next: (any AudioEngineObserver)?
Expand All @@ -36,7 +36,12 @@ public final class DefaultAudioSessionObserver: AudioEngineObserver, Loggable {

let _state = StateSync(State())

init() {
public var next: (any AudioEngineObserver)? {
get { _state.next }
set { _state.mutate { $0.next = newValue } }
}

public init() {
// Backward compatibility with `customConfigureAudioSessionFunc`.
_state.onDidMutate = { new_, old_ in
if let config_func = AudioManager.shared._state.customConfigureFunc,
Expand All @@ -51,10 +56,6 @@ public final class DefaultAudioSessionObserver: AudioEngineObserver, Loggable {
}
}

public func setNext(_ nextHandler: any AudioEngineObserver) {
_state.mutate { $0.next = nextHandler }
}

public func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
if AudioManager.shared._state.customConfigureFunc == nil {
log("Configuring audio session...")
Expand Down
2 changes: 1 addition & 1 deletion Sources/LiveKit/Protocols/NextInvokable.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ import Foundation

public protocol NextInvokable {
associatedtype Next
func setNext(_ handler: Next)
var next: Next? { get set }
}
13 changes: 13 additions & 0 deletions Sources/LiveKit/Track/AudioManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -341,3 +341,16 @@ public extension AudioManager {
renderPreProcessingDelegateAdapter.remove(delegate: delegate)
}
}

extension AudioManager {
func buildEngineObserverChain() -> (any AudioEngineObserver)? {
var objects = _state.engineObservers
guard !objects.isEmpty else { return nil }

for i in 0 ..< objects.count - 1 {
objects[i].next = objects[i + 1]
}

return objects.first
}
}
6 changes: 4 additions & 2 deletions Tests/LiveKitTests/AudioEngineTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -282,9 +282,10 @@ class AudioEngineTests: XCTestCase {
}

final class SineWaveNodeHook: AudioEngineObserver {
var next: (any LiveKit.AudioEngineObserver)?

Check warning on line 285 in Tests/LiveKitTests/AudioEngineTests.swift

View workflow job for this annotation

GitHub Actions / test (macos-14, 15.4, macOS)

stored property 'next' of 'Sendable'-conforming class 'SineWaveNodeHook' is mutable

let sineWaveNode = SineWaveSourceNode()

func setNext(_: any LiveKit.AudioEngineObserver) {}
func engineDidCreate(_ engine: AVAudioEngine) {
engine.attach(sineWaveNode)
}
Expand All @@ -301,6 +302,8 @@ final class SineWaveNodeHook: AudioEngineObserver {
}

final class PlayerNodeHook: AudioEngineObserver {
var next: (any LiveKit.AudioEngineObserver)?

Check warning on line 305 in Tests/LiveKitTests/AudioEngineTests.swift

View workflow job for this annotation

GitHub Actions / test (macos-14, 15.4, macOS)

stored property 'next' of 'Sendable'-conforming class 'PlayerNodeHook' is mutable

public let playerNode = AVAudioPlayerNode()
public let playerMixerNode = AVAudioMixerNode()
public let playerNodeFormat: AVAudioFormat
Expand All @@ -309,7 +312,6 @@ final class PlayerNodeHook: AudioEngineObserver {
self.playerNodeFormat = playerNodeFormat
}

func setNext(_: any LiveKit.AudioEngineObserver) {}
public func engineDidCreate(_ engine: AVAudioEngine) {
engine.attach(playerNode)
engine.attach(playerMixerNode)
Expand Down

0 comments on commit 20f8c3e

Please sign in to comment.