diff --git a/.changeset/tender-eyes-run.md b/.changeset/tender-eyes-run.md new file mode 100644 index 000000000..22df7e007 --- /dev/null +++ b/.changeset/tender-eyes-run.md @@ -0,0 +1,5 @@ +--- +"@livekit/components-react": patch +--- + +Pull audio track from avatar worker in useVoiceAssistant diff --git a/packages/core/etc/components-core.api.md b/packages/core/etc/components-core.api.md index 6ba2034d8..2b6dce8eb 100644 --- a/packages/core/etc/components-core.api.md +++ b/packages/core/etc/components-core.api.md @@ -86,10 +86,7 @@ export type ChatOptions = { }; // @public (undocumented) -export function computeMenuPosition(button: HTMLElement, menu: HTMLElement): Promise<{ - x: number; - y: number; -}>; +export function computeMenuPosition(button: HTMLElement, menu: HTMLElement, onUpdate?: (x: number, y: number) => void): () => void; // @public (undocumented) export function connectedParticipantObserver(room: Room, identity: string, options?: ConnectedParticipantObserverOptions): Observable; diff --git a/packages/react/etc/components-react.api.md b/packages/react/etc/components-react.api.md index a070ddf38..55a5e5ba3 100644 --- a/packages/react/etc/components-react.api.md +++ b/packages/react/etc/components-react.api.md @@ -1256,16 +1256,12 @@ export interface VideoTrackProps extends React_2.VideoHTMLAttributes p.kind === ParticipantKind.AGENT); - const audioTrack = useParticipantTracks([Track.Source.Microphone], agent?.identity)[0]; + const remoteParticipants = useRemoteParticipants(); + const agent = remoteParticipants.find( + (p) => p.kind === ParticipantKind.AGENT && !('lk.publish_on_behalf' in p.attributes), + ); + const worker = remoteParticipants.find( + (p) => + p.kind === ParticipantKind.AGENT && p.attributes['lk.publish_on_behalf'] === agent?.identity, + ); + const agentTracks = useParticipantTracks( + [Track.Source.Microphone, Track.Source.Camera], + agent?.identity, + ); + const workerTracks = useParticipantTracks( + [Track.Source.Microphone, Track.Source.Camera], + worker?.identity, + ); + const audioTrack = + agentTracks.find((t) => t.source === Track.Source.Microphone) ?? + workerTracks.find((t) => t.source === Track.Source.Microphone); + const videoTrack = + agentTracks.find((t) => t.source === Track.Source.Camera) ?? + workerTracks.find((t) => t.source === Track.Source.Camera); const { segments: agentTranscriptions } = useTrackTranscription(audioTrack); const connectionState = useConnectionState(); const { attributes } = useParticipantAttributes({ participant: agent }); @@ -66,6 +105,7 @@ export function useVoiceAssistant(): VoiceAssistant { agent, state, audioTrack, + videoTrack, agentTranscriptions, agentAttributes: attributes, };