Skip to content

Fail to publish instead of crash when mic not available #615

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
Mar 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .nanpa/audioengine-avoid-crash.kdl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
patch type="fix" "Avoid audio engine crash"
2 changes: 1 addition & 1 deletion LiveKitClient.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Pod::Spec.new do |spec|

spec.source_files = "Sources/**/*"

spec.dependency("LiveKitWebRTC", "= 125.6422.19")
spec.dependency("LiveKitWebRTC", "= 125.6422.22")
spec.dependency("SwiftProtobuf")
spec.dependency("Logging")
spec.dependency("DequeModule", "= 1.1.4")
Expand Down
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.19"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.22"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
.package(url: "https://github.com/apple/swift-collections.git", from: "1.1.0"),
Expand Down
2 changes: 1 addition & 1 deletion Package@swift-5.9.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.19"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.22"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
.package(url: "https://github.com/apple/swift-collections.git", from: "1.1.0"),
Expand Down
48 changes: 24 additions & 24 deletions Sources/LiveKit/Audio/AudioDeviceModuleDelegateAdapter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,51 +38,51 @@ class AudioDeviceModuleDelegateAdapter: NSObject, LKRTCAudioDeviceModuleDelegate

// Engine events

func audioDeviceModule(_: LKRTCAudioDeviceModule, didCreateEngine engine: AVAudioEngine) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, didCreateEngine engine: AVAudioEngine) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidCreate(engine)
return entryPoint?.engineDidCreate(engine) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willEnableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, willEnableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
return entryPoint?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willStartEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, willStartEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
return entryPoint?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didStopEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, didStopEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
return entryPoint?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, didDisableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, didDisableEngine engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
return entryPoint?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, willReleaseEngine engine: AVAudioEngine) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, willReleaseEngine engine: AVAudioEngine) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillRelease(engine)
return entryPoint?.engineWillRelease(engine) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureInputFromSource src: AVAudioNode?, toDestination dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureInputFromSource src: AVAudioNode?, toDestination dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context)
return entryPoint?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}

func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureOutputFromSource src: AVAudioNode, toDestination dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any]) {
guard let audioManager else { return }
func audioDeviceModule(_: LKRTCAudioDeviceModule, engine: AVAudioEngine, configureOutputFromSource src: AVAudioNode, toDestination dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int {
guard let audioManager else { return 0 }
let entryPoint = audioManager.buildEngineObserverChain()
entryPoint?.engineWillConnectOutput(engine, src: src, dst: dst, format: format, context: context)
return entryPoint?.engineWillConnectOutput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}
}
48 changes: 24 additions & 24 deletions Sources/LiveKit/Audio/AudioEngineObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,54 +29,54 @@ public protocol AudioEngineObserver: NextInvokable, Sendable {
associatedtype Next = any AudioEngineObserver
var next: (any AudioEngineObserver)? { get set }

func engineDidCreate(_ engine: AVAudioEngine)
func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
func engineWillRelease(_ engine: AVAudioEngine)
func engineDidCreate(_ engine: AVAudioEngine) -> Int
func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int
func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int
func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int
func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int
func engineWillRelease(_ engine: AVAudioEngine) -> Int

/// Provide custom implementation for internal AVAudioEngine's output configuration.
/// Buffers flow from `src` to `dst`. Preferred format to connect node is provided as `format`.
/// Return true if custom implementation is provided, otherwise default implementation will be used.
func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any])
func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int
/// Provide custom implementation for internal AVAudioEngine's input configuration.
/// Buffers flow from `src` to `dst`. Preferred format to connect node is provided as `format`.
/// Return true if custom implementation is provided, otherwise default implementation will be used.
func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any])
func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int
}

/// Default implementation to make it optional.
public extension AudioEngineObserver {
func engineDidCreate(_ engine: AVAudioEngine) {
next?.engineDidCreate(engine)
func engineDidCreate(_ engine: AVAudioEngine) -> Int {
next?.engineDidCreate(engine) ?? 0
}

func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
func engineWillStart(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
next?.engineWillStart(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
func engineDidStop(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
next?.engineDidStop(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

func engineWillRelease(_ engine: AVAudioEngine) {
next?.engineWillRelease(engine)
func engineWillRelease(_ engine: AVAudioEngine) -> Int {
next?.engineWillRelease(engine) ?? 0
}

func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any]) {
next?.engineWillConnectOutput(engine, src: src, dst: dst, format: format, context: context)
func engineWillConnectOutput(_ engine: AVAudioEngine, src: AVAudioNode, dst: AVAudioNode?, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int {
next?.engineWillConnectOutput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}

func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) {
next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context)
func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int {
next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}
}
26 changes: 18 additions & 8 deletions Sources/LiveKit/Audio/DefaultAudioSessionObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
* limitations under the License.
*/

let kFailedToConfigureAudioSessionErrorCode = -4100

#if os(iOS) || os(visionOS) || os(tvOS)

import AVFoundation
Expand Down Expand Up @@ -56,26 +58,32 @@ public class DefaultAudioSessionObserver: AudioEngineObserver, Loggable, @unchec
}
}

public func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
public func engineWillEnable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
if AudioManager.shared._state.customConfigureFunc == nil {
log("Configuring audio session...")
let session = LKRTCAudioSession.sharedInstance()
session.lockForConfiguration()
defer { session.unlockForConfiguration() }

let config: AudioSessionConfiguration = isRecordingEnabled ? .playAndRecordSpeaker : .playback
do {
if _state.isSessionActive {
if _state.isSessionActive {
do {
log("AudioSession deactivating due to category switch")
try session.setActive(false) // Deactivate first
_state.mutate { $0.isSessionActive = false }
} catch {
log("Failed to deactivate AudioSession with error: \(error)", .error)
}
}

let config: AudioSessionConfiguration = isRecordingEnabled ? .playAndRecordSpeaker : .playback
do {
log("AudioSession activating category to: \(config.category)")
try session.setConfiguration(config.toRTCType(), active: true)
_state.mutate { $0.isSessionActive = true }
} catch {
log("AudioSession failed to configure with error: \(error)", .error)
// Pass error code to audio engine
return kFailedToConfigureAudioSessionErrorCode
}

log("AudioSession activationCount: \(session.activationCount), webRTCSessionCount: \(session.webRTCSessionCount)")
Expand All @@ -87,12 +95,12 @@ public class DefaultAudioSessionObserver: AudioEngineObserver, Loggable, @unchec
}

// Call next last
_state.next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
return _state.next?.engineWillEnable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled) ?? 0
}

public func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) {
public func engineDidDisable(_ engine: AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool) -> Int {
// Call next first
_state.next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)
let nextResult = _state.next?.engineDidDisable(engine, isPlayoutEnabled: isPlayoutEnabled, isRecordingEnabled: isRecordingEnabled)

_state.mutate {
$0.isPlayoutEnabled = isPlayoutEnabled
Expand All @@ -111,7 +119,7 @@ public class DefaultAudioSessionObserver: AudioEngineObserver, Loggable, @unchec
log("AudioSession switching category to: \(config.category)")
try session.setConfiguration(config.toRTCType())
}
if !isPlayoutEnabled, !isRecordingEnabled {
if !isPlayoutEnabled, !isRecordingEnabled, _state.isSessionActive {
log("AudioSession deactivating")
try session.setActive(false)
_state.mutate { $0.isSessionActive = false }
Expand All @@ -122,6 +130,8 @@ public class DefaultAudioSessionObserver: AudioEngineObserver, Loggable, @unchec

log("AudioSession activationCount: \(session.activationCount), webRTCSessionCount: \(session.webRTCSessionCount)")
}

return nextResult ?? 0
}
}

Expand Down
17 changes: 9 additions & 8 deletions Sources/LiveKit/Audio/DefaultMixerAudioObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public final class DefaultMixerAudioObserver: AudioEngineObserver, Loggable {
next = handler
}

public func engineDidCreate(_ engine: AVAudioEngine) {
public func engineDidCreate(_ engine: AVAudioEngine) -> Int {
let (appNode, appMixerNode, micNode, micMixerNode) = _state.read {
($0.appNode, $0.appMixerNode, $0.micNode, $0.micMixerNode)
}
Expand All @@ -86,12 +86,12 @@ public final class DefaultMixerAudioObserver: AudioEngineObserver, Loggable {
engine.attach(micMixerNode)

// Invoke next
next?.engineDidCreate(engine)
return next?.engineDidCreate(engine) ?? 0
}

public func engineWillRelease(_ engine: AVAudioEngine) {
public func engineWillRelease(_ engine: AVAudioEngine) -> Int {
// Invoke next
next?.engineWillRelease(engine)
let nextResult = next?.engineWillRelease(engine)

let (appNode, appMixerNode, micNode, micMixerNode) = _state.read {
($0.appNode, $0.appMixerNode, $0.micNode, $0.micMixerNode)
Expand All @@ -101,14 +101,15 @@ public final class DefaultMixerAudioObserver: AudioEngineObserver, Loggable {
engine.detach(appMixerNode)
engine.detach(micNode)
engine.detach(micMixerNode)

return nextResult ?? 0
}

public func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) {
public func engineWillConnectInput(_ engine: AVAudioEngine, src: AVAudioNode?, dst: AVAudioNode, format: AVAudioFormat, context: [AnyHashable: Any]) -> Int {
// Get the main mixer
guard let mainMixerNode = context[kRTCAudioEngineInputMixerNodeKey] as? AVAudioMixerNode else {
// If failed to get main mixer, call next and return.
next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context)
return
return next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}

// Read nodes from state lock.
Expand Down Expand Up @@ -140,7 +141,7 @@ public final class DefaultMixerAudioObserver: AudioEngineObserver, Loggable {
}

// Invoke next
next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context)
return next?.engineWillConnectInput(engine, src: src, dst: dst, format: format, context: context) ?? 0
}
}

Expand Down
8 changes: 8 additions & 0 deletions Sources/LiveKit/Errors.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ public enum LiveKitErrorType: Int, Sendable {
case unableToResolveFPSRange = 703
case capturerDimensionsNotResolved = 704
case deviceAccessDenied = 705

// Audio
case audioEngine = 801
case audioSession = 802
}

extension LiveKitErrorType: CustomStringConvertible {
Expand Down Expand Up @@ -96,6 +100,10 @@ extension LiveKitErrorType: CustomStringConvertible {
return "Unable to resolve FPS range"
case .capturerDimensionsNotResolved:
return "Capturer dimensions not resolved"
case .audioEngine:
return "Audio Engine Error"
case .audioSession:
return "Audio Session Error"
default: return "Unknown"
}
}
Expand Down
Loading
Loading