diff --git a/.changes/audio-processing-lifecycle b/.changes/audio-processing-lifecycle new file mode 100644 index 000000000..a738535c6 --- /dev/null +++ b/.changes/audio-processing-lifecycle @@ -0,0 +1 @@ +patch type="fixed" "Audio processing delegate lifecycle" diff --git a/Sources/LiveKit/Audio/Manager/AudioManager.swift b/Sources/LiveKit/Audio/Manager/AudioManager.swift index 658f5db47..01acd83bc 100644 --- a/Sources/LiveKit/Audio/Manager/AudioManager.swift +++ b/Sources/LiveKit/Audio/Manager/AudioManager.swift @@ -117,17 +117,9 @@ public class AudioManager: Loggable { // MARK: - AudioProcessingModule - private lazy var capturePostProcessingDelegateAdapter: AudioCustomProcessingDelegateAdapter = { - let adapter = AudioCustomProcessingDelegateAdapter(label: "capturePost") - RTC.audioProcessingModule.capturePostProcessingDelegate = adapter - return adapter - }() - - private lazy var renderPreProcessingDelegateAdapter: AudioCustomProcessingDelegateAdapter = { - let adapter = AudioCustomProcessingDelegateAdapter(label: "renderPre") - RTC.audioProcessingModule.renderPreProcessingDelegate = adapter - return adapter - }() + private lazy var capturePostProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(label: "capturePost") + + private lazy var renderPreProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(label: "renderPre") let capturePostProcessingDelegateSubject = CurrentValueSubject(nil) @@ -135,10 +127,17 @@ public class AudioManager: Loggable { /// - Note: Only one delegate can be set at a time, but you can create one to wrap others if needed /// - Note: If you only need to observe the buffer (rather than modify it), use ``add(localAudioRenderer:)`` instead public var capturePostProcessingDelegate: AudioCustomProcessingDelegate? { - get { capturePostProcessingDelegateAdapter.target } - set { - capturePostProcessingDelegateAdapter.set(target: newValue) - capturePostProcessingDelegateSubject.send(newValue) + didSet { + if let capturePostProcessingDelegate { + // Clear WebRTC delegate first - this triggers audioProcessingRelease() on the old target + RTC.audioProcessingModule.capturePostProcessingDelegate = nil + capturePostProcessingDelegateAdapter.set(target: capturePostProcessingDelegate) + RTC.audioProcessingModule.capturePostProcessingDelegate = capturePostProcessingDelegateAdapter + } else { + RTC.audioProcessingModule.capturePostProcessingDelegate = nil + capturePostProcessingDelegateAdapter.set(target: nil) + } + capturePostProcessingDelegateSubject.send(capturePostProcessingDelegate) } } @@ -147,8 +146,17 @@ public class AudioManager: Loggable { /// - Note: If you only need to observe the buffer (rather than modify it), use ``add(remoteAudioRenderer:)`` instead /// - Note: If you need to observe the buffer for individual tracks, use ``RemoteAudioTrack/add(audioRenderer:)`` instead public var renderPreProcessingDelegate: AudioCustomProcessingDelegate? { - get { renderPreProcessingDelegateAdapter.target } - set { renderPreProcessingDelegateAdapter.set(target: newValue) } + didSet { + if let renderPreProcessingDelegate { + // Clear WebRTC delegate first - this triggers release() on the old target + RTC.audioProcessingModule.renderPreProcessingDelegate = nil + renderPreProcessingDelegateAdapter.set(target: renderPreProcessingDelegate) + RTC.audioProcessingModule.renderPreProcessingDelegate = renderPreProcessingDelegateAdapter + } else { + RTC.audioProcessingModule.renderPreProcessingDelegate = nil + renderPreProcessingDelegateAdapter.set(target: nil) + } + } } // MARK: - AudioDeviceModule diff --git a/Sources/LiveKit/Protocols/AudioCustomProcessingDelegate.swift b/Sources/LiveKit/Protocols/AudioCustomProcessingDelegate.swift index d916bcada..d76bf467c 100644 --- a/Sources/LiveKit/Protocols/AudioCustomProcessingDelegate.swift +++ b/Sources/LiveKit/Protocols/AudioCustomProcessingDelegate.swift @@ -52,7 +52,7 @@ class AudioCustomProcessingDelegateAdapter: MulticastDelegate, @u // MARK: - Private private struct State { - weak var target: AudioCustomProcessingDelegate? + var target: AudioCustomProcessingDelegate? } private var _state = StateSync(State()) diff --git a/Tests/LiveKitAudioTests/AudioProcessingLifecycle.swift b/Tests/LiveKitAudioTests/AudioProcessingLifecycle.swift new file mode 100644 index 000000000..23384f588 --- /dev/null +++ b/Tests/LiveKitAudioTests/AudioProcessingLifecycle.swift @@ -0,0 +1,105 @@ +/* + * Copyright 2025 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@testable import LiveKit +#if canImport(LiveKitTestSupport) +import LiveKitTestSupport +#endif +import LiveKitWebRTC + +private enum CallType { + case initialize + case process + case release +} + +private class ProcessingDelegateTester: AudioCustomProcessingDelegate, @unchecked Sendable { + let label: String + struct State { + var entries: [CallType] = [] + } + + let _state = StateSync(State()) + + init(label: String) { + self.label = label + } + + func audioProcessingInitialize(sampleRate sampleRateHz: Int, channels: Int) { + _state.mutate { $0.entries.append(.initialize) } + print("ProcessingDelegate(\(label)).Initialize(sampleRate: \(sampleRateHz), channels: \(channels))") + } + + func audioProcessingProcess(audioBuffer: LiveKit.LKAudioBuffer) { + _state.mutate { $0.entries.append(.process) } + print("ProcessingDelegate(\(label)).Process(audioBuffer: \(audioBuffer.frames))") + } + + func audioProcessingRelease() { + _state.mutate { $0.entries.append(.release) } + print("ProcessingDelegate(\(label)).Release") + } +} + +class AudioProcessingLifecycle: LKTestCase { + func testAudioProcessing() async throws { + let processorA = ProcessingDelegateTester(label: "A") + let processorB = ProcessingDelegateTester(label: "B") + // Set processing delegate + AudioManager.shared.capturePostProcessingDelegate = processorA + + try await withRooms([RoomTestingOptions(canPublish: true)]) { rooms in + // Alias to Room1 + let room1 = rooms[0] + // Publish mic + try await room1.localParticipant.setMicrophone(enabled: true) + do { + // 1 secs... + let ns = UInt64(1 * 1_000_000_000) + try await Task.sleep(nanoseconds: ns) + } + + // Verify processorA was initialized and received audio + let stateA = processorA._state.copy() + XCTAssertTrue(stateA.entries.contains(.initialize), "Processor A should have been initialized") + XCTAssertTrue(stateA.entries.contains(.process), "Processor A should have processed audio") + + // Switch to processorB + AudioManager.shared.capturePostProcessingDelegate = processorB + do { + // 1 secs... + let ns = UInt64(1 * 1_000_000_000) + try await Task.sleep(nanoseconds: ns) + } + + // Verify processorA was released + let stateA2 = processorA._state.copy() + XCTAssertTrue(stateA2.entries.contains(.release), "Processor A should have been released") + + // Verify processorB was initialized and received audio + let stateB = processorB._state.copy() + XCTAssertTrue(stateB.entries.contains(.initialize), "Processor B should have been initialized") + XCTAssertTrue(stateB.entries.contains(.process), "Processor B should have processed audio") + } + + // Remove processing delegate + AudioManager.shared.capturePostProcessingDelegate = nil + + // Verify processorB was released + let stateB2 = processorB._state.copy() + XCTAssertTrue(stateB2.entries.contains(.release), "Processor B should have been released") + } +}