Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion example/index.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { AppRegistry } from 'react-native';
import App from './src/App';
import { name as appName } from './app.json';
import { registerGlobals, setLogLevel } from '@livekit/react-native';
import { registerGlobals, setLogLevel, useIOSAudioManagement } from '@livekit/react-native';
import { LogLevel } from 'livekit-client';
import { setupErrorLogHandler } from './src/utils/ErrorLogHandler';
import { setupCallService } from './src/callservice/CallService';
Expand All @@ -16,3 +16,5 @@ setupCallService();
// Required React-Native setup for app
registerGlobals();
AppRegistry.registerComponent(appName, () => App);

useIOSAudioManagement();
1 change: 0 additions & 1 deletion example/src/RoomPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ const RoomView = ({ navigation, e2ee }: RoomViewProps) => {
return () => {};
}, [room, e2ee]);

useIOSAudioManagement(room, true);
// Setup room listeners
const { send } = useDataChannel(
(dataMessage: ReceivedDataMessage<string>) => {
Expand Down
2 changes: 1 addition & 1 deletion ios/LiveKitReactNativeModule.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public class LivekitReactNativeModule: RCTEventEmitter {
super.init()
let config = RTCAudioSessionConfiguration()
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
config.categoryOptions = [.allowAirPlay, .allowBluetoothHFP, .allowBluetoothA2DP, .defaultToSpeaker]
config.mode = AVAudioSession.Mode.videoChat.rawValue

RTCAudioSessionConfiguration.setWebRTC(config)
Expand Down
177 changes: 61 additions & 116 deletions src/audio/AudioManager.ts
Original file line number Diff line number Diff line change
@@ -1,141 +1,86 @@
import { useState, useEffect, useMemo } from 'react';
import { Platform } from 'react-native';
import {
RoomEvent,
Room,
type LocalTrackPublication,
type RemoteTrackPublication,
} from 'livekit-client';
import AudioSession, {
getDefaultAppleAudioConfigurationForMode,
type AppleAudioConfiguration,
type AudioTrackState,
} from './AudioSession';
import { log } from '..';
import { audioDeviceModuleEvents } from '@livekit/react-native-webrtc';

Check failure on line 5 in src/audio/AudioManager.ts

View workflow job for this annotation

GitHub Actions / test

Module '"@livekit/react-native-webrtc"' has no exported member 'audioDeviceModuleEvents'.

export type AudioEngineConfigurationState = {
isPlayoutEnabled: boolean;
isRecordingEnabled: boolean;
preferSpeakerOutput: boolean;
};

/**
* Handles setting the appropriate AVAudioSession options automatically
* depending on the audio track states of the Room.
*
* @param room
* @param preferSpeakerOutput
* @param onConfigureNativeAudio A custom method for determining options used.
*/
export function useIOSAudioManagement(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This changes the behavior quite a bit. Better to create a separate method and deprecate the old one.

The use method prefix is sort of special in React and indicates a React Hook (something that'd only live for the lifecycle of the containing component), whereas this one looks like it's a one and done setup method. Something like setupIOSAudioManagement would work instead.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right, I think deprecating it is better 👍

room: Room,
preferSpeakerOutput: boolean = true,
onConfigureNativeAudio?: (
trackState: AudioTrackState,
preferSpeakerOutput: boolean
) => AppleAudioConfiguration
preferSpeakerOutput = true,
onConfigureNativeAudio?: (configurationState: AudioEngineConfigurationState) => AppleAudioConfiguration
) {
const [localTrackCount, setLocalTrackCount] = useState(0);
const [remoteTrackCount, setRemoteTrackCount] = useState(0);
const trackState = useMemo(
() => computeAudioTrackState(localTrackCount, remoteTrackCount),
[localTrackCount, remoteTrackCount]
);

useEffect(() => {
let recalculateTrackCounts = () => {
setLocalTrackCount(getLocalAudioTrackCount(room));
setRemoteTrackCount(getRemoteAudioTrackCount(room));
};

recalculateTrackCounts();

room.on(RoomEvent.Connected, recalculateTrackCounts);

return () => {
room.off(RoomEvent.Connected, recalculateTrackCounts);
};
}, [room]);
useEffect(() => {
if (Platform.OS !== 'ios') {
return () => {};
}
let audioEngineState: AudioEngineConfigurationState = {
isPlayoutEnabled: false,
isRecordingEnabled: false,
preferSpeakerOutput: preferSpeakerOutput,
};

let onLocalPublished = (publication: LocalTrackPublication) => {
if (publication.kind === 'audio') {
setLocalTrackCount(localTrackCount + 1);
}
};
let onLocalUnpublished = (publication: LocalTrackPublication) => {
if (publication.kind === 'audio') {
if (localTrackCount - 1 < 0) {
log.warn(
'mismatched local audio track count! attempted to reduce track count below zero.'
);
}
setLocalTrackCount(Math.max(localTrackCount - 1, 0));
}
};
let onRemotePublished = (publication: RemoteTrackPublication) => {
if (publication.kind === 'audio') {
setRemoteTrackCount(remoteTrackCount + 1);
const tryConfigure = async (newState: AudioEngineConfigurationState, oldState: AudioEngineConfigurationState) => {
if ((!newState.isPlayoutEnabled && !newState.isRecordingEnabled) && (oldState.isPlayoutEnabled || oldState.isRecordingEnabled)) {
log.info("AudioSession deactivating...")
await AudioSession.stopAudioSession()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, users will already need to call start/stopAudioSession() manually for Android, will this have any issues with that?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ahh... yes in that case it could be funky...

} else if (newState.isRecordingEnabled || newState.isPlayoutEnabled) {
const config = onConfigureNativeAudio ? onConfigureNativeAudio(newState) : getDefaultAppleAudioConfigurationForAudioState(newState);
log.info("AudioSession configuring category:", config.audioCategory)
await AudioSession.setAppleAudioConfiguration(config)
if (!oldState.isPlayoutEnabled && !oldState.isRecordingEnabled) {
log.info("AudioSession activating...")
await AudioSession.startAudioSession()
}
};
let onRemoteUnpublished = (publication: RemoteTrackPublication) => {
if (publication.kind === 'audio') {
if (remoteTrackCount - 1 < 0) {
log.warn(
'mismatched remote audio track count! attempted to reduce track count below zero.'
);
}
setRemoteTrackCount(Math.max(remoteTrackCount - 1, 0));
}
};

room
.on(RoomEvent.LocalTrackPublished, onLocalPublished)
.on(RoomEvent.LocalTrackUnpublished, onLocalUnpublished)
.on(RoomEvent.TrackPublished, onRemotePublished)
.on(RoomEvent.TrackUnpublished, onRemoteUnpublished);
}
};

return () => {
room
.off(RoomEvent.LocalTrackPublished, onLocalPublished)
.off(RoomEvent.LocalTrackUnpublished, onLocalUnpublished)
.off(RoomEvent.TrackPublished, onRemotePublished)
.off(RoomEvent.TrackUnpublished, onRemoteUnpublished);
const handleEngineStateUpdate = async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => {
const oldState = audioEngineState;
const newState = {
isPlayoutEnabled,
isRecordingEnabled,
preferSpeakerOutput: audioEngineState.preferSpeakerOutput,
};
}, [room, localTrackCount, remoteTrackCount]);

useEffect(() => {
if (Platform.OS !== 'ios') {
return;
}
// If this throws, the audio engine will not continue it's operation
await tryConfigure(newState, oldState);
// Update the audio state only if configure succeeds
audioEngineState = newState;
};

let configFunc =
onConfigureNativeAudio ?? getDefaultAppleAudioConfigurationForMode;
let audioConfig = configFunc(trackState, preferSpeakerOutput);
AudioSession.setAppleAudioConfiguration(audioConfig);
}, [trackState, onConfigureNativeAudio, preferSpeakerOutput]);
// Attach audio engine events
audioDeviceModuleEvents.setWillEnableEngineHandler(handleEngineStateUpdate);
audioDeviceModuleEvents.setDidDisableEngineHandler(handleEngineStateUpdate);
}

function computeAudioTrackState(
localTracks: number,
remoteTracks: number
): AudioTrackState {
if (localTracks > 0 && remoteTracks > 0) {
return 'localAndRemote';
} else if (localTracks > 0 && remoteTracks === 0) {
return 'localOnly';
} else if (localTracks === 0 && remoteTracks > 0) {
return 'remoteOnly';
} else {
return 'none';
function getDefaultAppleAudioConfigurationForAudioState(
configurationState: AudioEngineConfigurationState,
): AppleAudioConfiguration {
if (configurationState.isRecordingEnabled) {
return {
audioCategory: 'playAndRecord',
audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'],
audioMode: configurationState.preferSpeakerOutput ? 'videoChat' : 'voiceChat',
};
} else if (configurationState.isPlayoutEnabled) {
return {
audioCategory: 'playback',
audioCategoryOptions: ['mixWithOthers'],
audioMode: 'spokenAudio',
};
}
}

function getLocalAudioTrackCount(room: Room): number {
return room.localParticipant.audioTrackPublications.size;
}

function getRemoteAudioTrackCount(room: Room): number {
var audioTracks = 0;
room.remoteParticipants.forEach((participant) => {
audioTracks += participant.audioTrackPublications.size;
});
return audioTracks;
return {
audioCategory: 'soloAmbient',
audioCategoryOptions: [],
audioMode: 'default',
};
}
31 changes: 0 additions & 31 deletions src/audio/AudioSession.ts
Original file line number Diff line number Diff line change
Expand Up @@ -197,37 +197,6 @@ export type AppleAudioConfiguration = {
audioMode?: AppleAudioMode;
};

export type AudioTrackState =
| 'none'
| 'remoteOnly'
| 'localOnly'
| 'localAndRemote';

export function getDefaultAppleAudioConfigurationForMode(
mode: AudioTrackState,
preferSpeakerOutput: boolean = true
): AppleAudioConfiguration {
if (mode === 'remoteOnly') {
return {
audioCategory: 'playback',
audioCategoryOptions: ['mixWithOthers'],
audioMode: 'spokenAudio',
};
} else if (mode === 'localAndRemote' || mode === 'localOnly') {
return {
audioCategory: 'playAndRecord',
audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'],
audioMode: preferSpeakerOutput ? 'videoChat' : 'voiceChat',
};
}

return {
audioCategory: 'soloAmbient',
audioCategoryOptions: [],
audioMode: 'default',
};
}

export default class AudioSession {
/**
* Applies the provided audio configuration to the underlying AudioSession.
Expand Down
Loading