Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into broadcast-ipc
Browse files Browse the repository at this point in the history
  • Loading branch information
ladvoc committed Feb 3, 2025
2 parents 39224a3 + 9e750d1 commit a779853
Show file tree
Hide file tree
Showing 26 changed files with 1,118 additions and 209 deletions.
4 changes: 2 additions & 2 deletions LiveKitClient.podspec
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = "LiveKitClient"
spec.version = "2.0.20"
spec.version = "2.1.0"
spec.summary = "LiveKit Swift Client SDK. Easily build live audio or video experiences into your mobile app, game or website."
spec.homepage = "https://github.com/livekit/client-sdk-swift"
spec.license = {:type => "Apache 2.0", :file => "LICENSE"}
Expand All @@ -10,7 +10,7 @@ Pod::Spec.new do |spec|
spec.osx.deployment_target = "10.15"

spec.swift_versions = ["5.7"]
spec.source = {:git => "https://github.com/livekit/client-sdk-swift.git", :tag => "2.0.20"}
spec.source = {:git => "https://github.com/livekit/client-sdk-swift.git", :tag => "2.1.0"}

spec.source_files = "Sources/**/*"

Expand Down
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.15"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
2 changes: 1 addition & 1 deletion Package@swift-5.9.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.15"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Add the dependency and also to your target
let package = Package(
...
dependencies: [
.package(name: "LiveKit", url: "https://github.com/livekit/client-sdk-swift.git", .upToNextMajor("2.0.20")),
.package(name: "LiveKit", url: "https://github.com/livekit/client-sdk-swift.git", .upToNextMajor("2.1.0")),
],
targets: [
.target(
Expand Down
88 changes: 88 additions & 0 deletions Sources/LiveKit/Audio/AudioDeviceModuleDelegateAdapter.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
/*
* Copyright 2025 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import Foundation

#if swift(>=5.9)
internal import LiveKitWebRTC
#else
@_implementationOnly import LiveKitWebRTC
#endif

// Invoked on WebRTC's worker thread, do not block.
class AudioDeviceModuleDelegateAdapter: NSObject, LKRTCAudioDeviceModuleDelegate {
weak var audioManager: AudioManager?

func audioDeviceModule(_: LKRTCAudioDeviceModule, didReceiveSpeechActivityEvent speechActivityEvent: RTCSpeechActivityEvent) {
guard let audioManager else { return }
audioManager._state.onMutedSpeechActivity?(audioManager, speechActivityEvent.toLKType())
}

func audioDeviceModuleDidUpdateDevices(_: LKRTCAudioDeviceModule) {
guard let audioManager else { return }
audioManager._state.onDevicesDidUpdate?(audioManager)
}

// Engine events

func audioDeviceModule(_: LKRTCAudioDeviceModule, didCreateEngine engine: AVAudioEngine) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineDidCreate(engine)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willEnableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willStartEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didStopEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didDisableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willReleaseEngine engine: AVAudioEngine) {
guard let audioManager else { return }
let entryPoint = audioManager._state.engineObservers.buildChain()
entryPoint?.engineWillRelease(engine)
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureInputFromSource src: AVAudioNode?, toDestination dst: AVAudioNode, format: AVAudioFormat) -> Bool {
guard let audioManager else { return false }
let entryPoint = audioManager._state.engineObservers.buildChain()
return entryPoint?.engineWillConnectInput(engine, src: src, dst: dst, format: format) ?? false
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureOutputFromSource src: AVAudioNode, toDestination dst: AVAudioNode?, format: AVAudioFormat) -> Bool {
guard let audioManager else { return false }
let entryPoint = audioManager._state.engineObservers.buildChain()
return entryPoint?.engineWillConnectOutput(engine, src: src, dst: dst, format: format) ?? false
}
}
63 changes: 63 additions & 0 deletions Sources/LiveKit/Audio/AudioEngineObserver.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Copyright 2025 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import AVFAudio

/// Do not retain the engine object.
public protocol AudioEngineObserver: NextInvokable, Sendable {
func setNext(_ handler: any AudioEngineObserver)

func engineDidCreate(_ engine: AVAudioEngine)
func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineWillRelease(_ engine: AVAudioEngine)

/// Provide custom implementation for internal AVAudioEngine's output configuration.
/// Buffers flow from `src` to `dst`. Preferred format to connect node is provided as `format`.
/// Return true if custom implementation is provided, otherwise default implementation will be used.
func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat) -> Bool
/// Provide custom implementation for internal AVAudioEngine's input configuration.
/// Buffers flow from `src` to `dst`. Preferred format to connect node is provided as `format`.
/// Return true if custom implementation is provided, otherwise default implementation will be used.
func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat) -> Bool
}

/// Default implementation to make it optional.
public extension AudioEngineObserver {
func engineDidCreate(_: AVAudioEngine) {}
func engineWillEnable(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineWillStart(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineDidStop(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineDidDisable(_: AVAudioEngine, isPlayoutEnabled _: Bool, isRecordingEnabled _: Bool) {}
func engineWillRelease(_: AVAudioEngine) {}

func engineWillConnectOutput(_: AVAudioEngine, src _: AVAudioNode, dst _: AVAudioNode?, format _: AVAudioFormat) -> Bool { false }
func engineWillConnectInput(_: AVAudioEngine, src _: AVAudioNode?, dst _: AVAudioNode, format _: AVAudioFormat) -> Bool { false }
}

extension [any AudioEngineObserver] {
func buildChain() -> Element? {
guard let first else { return nil }

for i in 0 ..< count - 1 {
self[i].setNext(self[i + 1])
}

return first
}
}
127 changes: 127 additions & 0 deletions Sources/LiveKit/Audio/DefaultAudioSessionObserver.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
/*
* Copyright 2025 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#if os(iOS) || os(visionOS) || os(tvOS)

import AVFoundation

#if swift(>=5.9)
internal import LiveKitWebRTC
#else
@_implementationOnly import LiveKitWebRTC
#endif

public final class DefaultAudioSessionObserver: AudioEngineObserver, Loggable {
struct State {
var isSessionActive = false
var next: (any AudioEngineObserver)?

// Used for backward compatibility with `customConfigureAudioSessionFunc`.
var isPlayoutEnabled: Bool = false
var isRecordingEnabled: Bool = false
}

let _state = StateSync(State())

init() {
// Backward compatibility with `customConfigureAudioSessionFunc`.
_state.onDidMutate = { new_, old_ in
if let config_func = AudioManager.shared._state.customConfigureFunc,
new_.isPlayoutEnabled != old_.isPlayoutEnabled ||
new_.isRecordingEnabled != old_.isRecordingEnabled
{
// Simulate state and invoke custom config func.
let old_state = AudioManager.State(localTracksCount: old_.isRecordingEnabled ? 1 : 0, remoteTracksCount: old_.isPlayoutEnabled ? 1 : 0)
let new_state = AudioManager.State(localTracksCount: new_.isRecordingEnabled ? 1 : 0, remoteTracksCount: new_.isPlayoutEnabled ? 1 : 0)
config_func(new_state, old_state)
}
}
}

public func setNext(_ nextHandler: any AudioEngineObserver) {
_state.mutate { $0.next = nextHandler }
}

public func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
if AudioManager.shared._state.customConfigureFunc == nil {
log("Configuring audio session...")
let session = LKRTCAudioSession.sharedInstance()
session.lockForConfiguration()
defer { session.unlockForConfiguration() }

let config: AudioSessionConfiguration = isRecordingEnabled ? .playAndRecordSpeaker : .playback
do {
if _state.isSessionActive {
log("AudioSession deactivating due to category switch")
try session.setActive(false) // Deactivate first
_state.mutate { $0.isSessionActive = false }
}

log("AudioSession activating category to: \(config.category)")
try session.setConfiguration(config.toRTCType(), active: true)
_state.mutate { $0.isSessionActive = true }
} catch {
log("AudioSession failed to configure with error: \(error)", .error)
}

log("AudioSession activationCount: \(session.activationCount), webRTCSessionCount: \(session.webRTCSessionCount)")
}

_state.mutate {
$0.isPlayoutEnabled = isPlayoutEnabled
$0.isRecordingEnabled = isRecordingEnabled
}

// Call next last
_state.next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
}

public func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
// Call next first
_state.next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)

_state.mutate {
$0.isPlayoutEnabled = isPlayoutEnabled
$0.isRecordingEnabled = isRecordingEnabled
}

if AudioManager.shared._state.customConfigureFunc == nil {
log("Configuring audio session...")
let session = LKRTCAudioSession.sharedInstance()
session.lockForConfiguration()
defer { session.unlockForConfiguration() }

do {
if isPlayoutEnabled, !isRecordingEnabled {
let config: AudioSessionConfiguration = .playback
log("AudioSession switching category to: \(config.category)")
try session.setConfiguration(config.toRTCType())
}
if !isPlayoutEnabled, !isRecordingEnabled {
log("AudioSession deactivating")
try session.setActive(false)
_state.mutate { $0.isSessionActive = false }
}
} catch {
log("AudioSession failed to configure with error: \(error)", .error)
}

log("AudioSession activationCount: \(session.activationCount), webRTCSessionCount: \(session.webRTCSessionCount)")
}
}
}

#endif
16 changes: 1 addition & 15 deletions Sources/LiveKit/Core/RTC.swift
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,6 @@ private class VideoEncoderFactorySimulcast: LKRTCVideoEncoderFactorySimulcast {
}

class RTC {
private static var _bypassVoiceProcessing: Bool = false
private static var _peerConnectionFactoryInitialized = false

static var bypassVoiceProcessing: Bool {
get { _bypassVoiceProcessing }
set {
if _peerConnectionFactoryInitialized {
logger.log("Warning: Setting bypassVoiceProcessing after PeerConnectionFactory initialization has no effect. Set it at application launch.", .warning, type: Room.self)
}
_bypassVoiceProcessing = newValue
}
}

static let h264BaselineLevel5CodecInfo: LKRTCVideoCodecInfo = {
// this should never happen
guard let profileLevelId = LKRTCH264ProfileLevelId(profile: .constrainedBaseline, level: .level5) else {
Expand Down Expand Up @@ -100,8 +87,7 @@ class RTC {

logger.log("Initializing PeerConnectionFactory...", type: Room.self)

_peerConnectionFactoryInitialized = true
return LKRTCPeerConnectionFactory(bypassVoiceProcessing: bypassVoiceProcessing,
return LKRTCPeerConnectionFactory(bypassVoiceProcessing: false,
encoderFactory: encoderFactory,
decoderFactory: decoderFactory,
audioProcessingModule: audioProcessingModule)
Expand Down
7 changes: 3 additions & 4 deletions Sources/LiveKit/Core/Room.swift
Original file line number Diff line number Diff line change
Expand Up @@ -518,12 +518,11 @@ extension Room: AppStateDelegate {

public extension Room {
/// Set this to true to bypass initialization of voice processing.
/// Must be set before RTCPeerConnectionFactory gets initialized.
/// The most reliable place to set this is in your application's initialization process.
@available(*, deprecated, renamed: "AudioManager.shared.isVoiceProcessingBypassed")
@objc
static var bypassVoiceProcessing: Bool {
get { RTC.bypassVoiceProcessing }
set { RTC.bypassVoiceProcessing = newValue }
get { AudioManager.shared.isVoiceProcessingBypassed }
set { AudioManager.shared.isVoiceProcessingBypassed = newValue }
}
}

Expand Down
11 changes: 11 additions & 0 deletions Sources/LiveKit/Extensions/CustomStringConvertible.swift
Original file line number Diff line number Diff line change
Expand Up @@ -178,3 +178,14 @@ extension AVCaptureDevice.Format {
return "Format(\(values.joined(separator: ", ")))"
}
}

extension LKRTCAudioProcessingConfig {
func toDebugString() -> String {
"RTCAudioProcessingConfig(" +
"isEchoCancellationEnabled: \(isEchoCancellationEnabled), " +
"isNoiseSuppressionEnabled: \(isNoiseSuppressionEnabled), " +
"isAutoGainControl1Enabled: \(isAutoGainControl1Enabled), " +
"isHighpassFilterEnabled: \(isHighpassFilterEnabled)" +
")"
}
}
2 changes: 1 addition & 1 deletion Sources/LiveKit/LiveKit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ let logger = Logger(label: "LiveKitSDK")
@objc
public class LiveKitSDK: NSObject {
@objc(sdkVersion)
public static let version = "2.0.20"
public static let version = "2.1.0"

@objc
public static func setLoggerStandardOutput() {
Expand Down
Loading

0 comments on commit a779853

Please sign in to comment.