import { AudioAnalyserOptions } from 'livekit-client'; import { AudioCaptureOptions } from 'livekit-client'; import { CaptureOptionsBySource } from '@livekit/components-core'; import { ChatMessage } from '@livekit/components-core'; import { ChatOptions } from '@livekit/components-core'; import { ConnectionQuality } from 'livekit-client'; import { ConnectionState as ConnectionState_2 } from 'livekit-client'; import { CreateLocalTracksOptions } from 'livekit-client'; import { DataPublishOptions } from 'livekit-client'; import { default as default_2 } from 'typed-emitter'; import { DisconnectReason } from 'livekit-client'; import { EventMap } from 'typed-emitter'; import { GridLayoutDefinition } from '@livekit/components-core'; import { GridLayoutInfo } from '@livekit/components-core'; import { HTMLAttributes } from 'react'; import { isTrackReference } from '@livekit/components-core'; import { KrispNoiseFilterProcessor } from '@livekit/krisp-noise-filter'; import { LocalAudioTrack } from 'livekit-client'; import { LocalParticipant } from 'livekit-client'; import { LocalTrackPublication } from 'livekit-client'; import { LocalUserChoices } from '@livekit/components-core'; import { LocalVideoTrack } from 'livekit-client'; import { MediaDeviceFailure } from 'livekit-client'; import { MessageDecoder } from '@livekit/components-core'; import { MessageEncoder } from '@livekit/components-core'; import { NoiseFilterOptions } from '@livekit/krisp-noise-filter'; import { Participant } from 'livekit-client'; import { ParticipantClickEvent } from '@livekit/components-core'; import { ParticipantEvent } from 'livekit-client'; import { ParticipantIdentifier } from '@livekit/components-core'; import { ParticipantPermission } from '@livekit/protocol'; import { PinState } from '@livekit/components-core'; import * as React_2 from 'react'; import { ReceivedAgentTranscriptionMessage } from '@livekit/components-core'; import { ReceivedChatMessage } from '@livekit/components-core'; import { ReceivedDataMessage } from '@livekit/components-core'; import { ReceivedMessage } from '@livekit/components-core'; import { ReceivedTranscriptionSegment } from '@livekit/components-core'; import { ReceivedUserTranscriptionMessage } from '@livekit/components-core'; import { RemoteAudioTrack } from 'livekit-client'; import { RemoteParticipant } from 'livekit-client'; import { Room } from 'livekit-client'; import { RoomConnectOptions } from 'livekit-client'; import { RoomEvent } from 'livekit-client'; import { RoomOptions } from 'livekit-client'; import { ScreenShareCaptureOptions } from 'livekit-client'; import { SendTextOptions } from 'livekit-client'; import { setLogExtension } from '@livekit/components-core'; import { setLogLevel } from '@livekit/components-core'; import { SetMediaDeviceOptions } from '@livekit/components-core'; import { SourcesArray } from '@livekit/components-core'; import { SVGProps } from 'react'; import { TextStreamData } from '@livekit/components-core'; import { ToggleSource } from '@livekit/components-core'; import { TokenSourceConfigurable } from 'livekit-client'; import { TokenSourceFetchOptions } from 'livekit-client'; import { TokenSourceFixed } from 'livekit-client'; import { Track } from 'livekit-client'; import { TrackProcessor } from 'livekit-client'; import { TrackPublication } from 'livekit-client'; import { TrackPublishOptions } from 'livekit-client'; import { TrackReference } from '@livekit/components-core'; import { TrackReferenceOrPlaceholder } from '@livekit/components-core'; import { TrackSourceWithOptions } from '@livekit/components-core'; import { TranscriptionSegment } from 'livekit-client'; import { VideoCaptureOptions } from 'livekit-client'; import { WidgetState } from '@livekit/components-core'; declare type AgentActions = { /** Returns a promise that resolves once the agent is connected and available for user input */ waitUntilConnected: (signal?: AbortSignal) => Promise; /** * Returns a promise that resolves once the client could be listening for user speech (`canListen` is true) * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ waitUntilCouldBeListening: (signal?: AbortSignal) => Promise; /** Returns a promise that resolves once the client has disconnected from the agent either for an expected or unexpected reason. */ waitUntilFinished: (signal?: AbortSignal) => Promise; /** Returns a promise that resolves once the agent has published a camera track */ waitUntilCamera: (signal?: AbortSignal) => Promise; /** Returns a promise that resolves once the agent has published a microphone track */ waitUntilMicrophone: (signal?: AbortSignal) => Promise; }; /** @beta */ export declare type AgentCallbacks = { [AgentEvent.CameraChanged]: (newTrack: TrackReference | undefined) => void; [AgentEvent.MicrophoneChanged]: (newTrack: TrackReference | undefined) => void; [AgentEvent.StateChanged]: (newAgentState: AgentState) => void; }; /** @beta */ export declare enum AgentEvent { CameraChanged = "cameraChanged", MicrophoneChanged = "microphoneChanged", StateChanged = "stateChanged" } /** @see https://github.com/livekit/agents/blob/65170238db197f62f479eb7aaef1c0e18bfad6e7/livekit-agents/livekit/agents/voice/events.py#L97 */ declare type AgentSdkStates = 'initializing' | 'idle' | 'listening' | 'thinking' | 'speaking'; /** * State representing the current status of the agent, whether it is ready for speach, etc * * For most agents (which have the preconnect audio buffer feature enabled), this is the lifecycle: * connecting ➡️ pre-connect-buffering ➡️ initializing/listening/thinking/speaking * * For agents without the preconnect audio feature enabled: * connecting ➡️ initializing ➡️ idle/listening/thinking/speaking * * If an agent fails to connect: * connecting ➡️ pre-connect-buffering/initializing ➡️ failed * * Legacy useVoiceAssistant hook: * disconnected ➡️ connecting ➡️ initializing ➡️ listening/thinking/speaking * * @beta * */ export declare type AgentState = 'disconnected' | 'connecting' | 'pre-connect-buffering' | 'failed' | AgentSdkStates; declare type AgentStateAvailable = AgentStateCommon & { state: 'listening' | 'thinking' | 'speaking'; failureReasons: null; /** The agent's assigned identity, coming from the JWT token. */ identity: Participant['identity']; name: Participant['name']; metadata: Participant['metadata']; /** Is the agent connected to the client? */ isConnected: true; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: true; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: false; /** Is the agent currently connecting or setting itself up? */ isPending: false; cameraTrack?: TrackReference; microphoneTrack?: TrackReference; }; declare type AgentStateCases = AgentStateConnecting | AgentStateDisconnected | AgentStateAvailable | AgentStatePreConnectBuffering | AgentStateUnAvailable | AgentStateFailed; declare type AgentStateCommon = { attributes: Participant['attributes']; internal: { emitter: default_2; agentParticipant: RemoteParticipant | null; workerParticipant: RemoteParticipant | null; }; }; declare type AgentStateConnecting = AgentStateCommon & { state: 'connecting'; failureReasons: null; /** The client's assigned identity, coming from the JWT token. */ identity: undefined; name: undefined; metadata: undefined; /** Is the agent connected to the client? */ isConnected: false; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: false; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: false; /** Is the agent currently connecting or setting itself up? */ isPending: true; cameraTrack: undefined; microphoneTrack: undefined; }; declare type AgentStateDisconnected = AgentStateCommon & { state: 'disconnected'; failureReasons: null; /** The client's assigned identity, coming from the JWT token. */ identity: undefined; name: undefined; metadata: undefined; /** Is the agent connected to the client? */ isConnected: false; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: false; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: true; /** Is the agent currently connecting or setting itself up? */ isPending: false; cameraTrack: undefined; microphoneTrack: undefined; }; declare type AgentStateFailed = AgentStateCommon & { state: 'failed'; failureReasons: Array; /** The client's assigned identity, coming from the JWT token. */ identity: undefined; name: undefined; metadata: undefined; /** Is the agent connected to the client? */ isConnected: false; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: false; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: true; /** Is the agent currently connecting or setting itself up? */ isPending: false; cameraTrack: undefined; microphoneTrack: undefined; }; declare type AgentStatePreConnectBuffering = AgentStateCommon & { state: 'pre-connect-buffering'; failureReasons: null; /** The client's assigned identity, coming from the JWT token. */ identity: Participant['identity']; name: Participant['name']; metadata: Participant['metadata']; /** Is the agent connected to the client? */ isConnected: false; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: true; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: false; /** Is the agent currently connecting or setting itself up? */ isPending: false; cameraTrack?: TrackReference; microphoneTrack?: TrackReference; }; declare type AgentStateUnAvailable = AgentStateCommon & { state: 'initializing' | 'idle'; failureReasons: null; /** The client's assigned identity, coming from the JWT token. */ identity: Participant['identity']; name: Participant['name']; metadata: Participant['metadata']; /** Is the agent connected to the client? */ isConnected: false; /** * Could the client be listening for user speech? * * Note that this may not mean that the agent is actually connected - the audio pre-connect * buffer could be active and recording user input before the agent actually connects. * */ canListen: false; /** Has the client disconnected from the agent either for an expected or unexpected reason? */ isFinished: false; /** Is the agent currently connecting or setting itself up? */ isPending: true; cameraTrack?: TrackReference; microphoneTrack?: TrackReference; }; /** @public */ export declare interface AllowAudioPlaybackProps extends React_2.ButtonHTMLAttributes { room?: Room; label: string; } /** @public */ export declare interface AllowMediaPlaybackProps extends React_2.ButtonHTMLAttributes { label?: string; } /** * This component is the default setup of a classic LiveKit audio conferencing app. * It provides functionality like switching between participant grid view and focus view. * * @remarks * The component is implemented with other LiveKit components like `FocusContextProvider`, * `GridLayout`, `ControlBar`, `FocusLayoutContainer` and `FocusLayout`. * * @example * ```tsx * * * * ``` * @public */ export declare function AudioConference({ ...props }: AudioConferenceProps): React_2.JSX.Element; /** @public */ export declare interface AudioConferenceProps extends React_2.HTMLAttributes { } /** * The AudioTrack component is responsible for rendering participant audio tracks. * This component must have access to the participant's context, or alternatively pass it a `Participant` as a property. * * @example * ```tsx * * * * ``` * * @see `ParticipantTile` component * @public */ export declare const AudioTrack: (props: AudioTrackProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface AudioTrackProps extends React_2.AudioHTMLAttributes { /** The track reference of the track from which the audio is to be rendered. */ trackRef?: TrackReference; onSubscriptionStatusChanged?: (subscribed: boolean) => void; /** Sets the volume of the audio track. By default, the range is between `0.0` and `1.0`. */ volume?: number; /** * Mutes the audio track if set to `true`. * @remarks * If set to `true`, the server will stop sending audio track data to the client. * @alpha */ muted?: boolean; } /** * The AudioVisualizer component is used to visualize the audio volume of a given audio track. * @remarks * Requires a `TrackReferenceOrPlaceholder` to be provided either as a property or via the `TrackRefContext`. * @example * ```tsx * * ``` * @public * @deprecated Use BarVisualizer instead */ export declare const AudioVisualizer: (props: AudioVisualizerProps & React_2.RefAttributes) => React_2.ReactNode; /** * @public * @deprecated Use BarVisualizer instead */ export declare interface AudioVisualizerProps extends React_2.HTMLAttributes { trackRef?: TrackReference; } /** * @alpha */ export declare interface AudioWaveformOptions { barCount?: number; volMultiplier?: number; updateInterval?: number; } /** * Visualizes audio signals from a TrackReference as bars. * If the `state` prop is set, it automatically transitions between VoiceAssistant states. * @beta * * @remarks For VoiceAssistant state transitions this component requires a voice assistant agent running with livekit-agents \>= 0.9.0 * * @example * ```tsx * function SimpleVoiceAssistant() { * const { state, audioTrack } = useVoiceAssistant(); * return ( * * ); * } * ``` * * @example * Styling the BarVisualizer using CSS classes * ```css * .lk-audio-bar { * // Styles for "idle" bars * } * .lk-audio-bar.lk-highlighted { * // Styles for "active" bars * } * ``` * * @example * Styling the BarVisualizer using CSS custom properties * ```css * --lk-fg // for the "active" colour, note that this defines the main foreground colour for the whole "theme" * --lk-va-bg // for "idle" colour * ``` * * @example * Using a custom bar template for the BarVisualizer * ```tsx * *
* * ``` * the highlighted children will get a data prop of data-lk-highlighted for them to switch between active and idle bars in their own template bar */ export declare const BarVisualizer: React_2.ForwardRefExoticComponent & React_2.RefAttributes>; /** * @beta */ export declare type BarVisualizerOptions = { /** in percentage */ maxHeight?: number; /** in percentage */ minHeight?: number; }; /** * @beta */ export declare interface BarVisualizerProps extends React_2.HTMLProps { /** If set, the visualizer will transition between different voice assistant states */ state?: AgentState; /** Number of bars that show up in the visualizer */ barCount?: number; /** @deprecated use `track` field instead */ trackRef?: TrackReferenceOrPlaceholder; track?: TrackReferenceOrPlaceholder | LocalAudioTrack | RemoteAudioTrack; options?: BarVisualizerOptions; /** The template component to be used in the visualizer. */ children?: React_2.ReactNode; } /** * @internal */ export declare const CameraDisabledIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const CameraIcon: (props: SVGProps) => React_2.JSX.Element; /** * The `CarouselLayout` component displays a list of tracks in a scroll container. * It will display as many tiles as possible and overflow the rest. * @remarks * To ensure visual stability when tiles are reordered due to track updates, * the component uses the `useVisualStableUpdate` hook. * @example * ```tsx * const tracks = useTracks([Track.Source.Camera]); * * * * ``` * @public */ export declare function CarouselLayout({ tracks, orientation, ...props }: CarouselLayoutProps): React_2.JSX.Element; /** @public */ export declare interface CarouselLayoutProps extends React_2.HTMLAttributes { tracks: TrackReferenceOrPlaceholder[]; children: React_2.ReactNode; /** Place the tiles vertically or horizontally next to each other. * If undefined orientation is guessed by the dimensions of the container. */ orientation?: 'vertical' | 'horizontal'; } /** * The Chat component provides ready-to-use chat functionality in a LiveKit room. * Messages are distributed to all participants in the room in real-time. * * @remarks * - Only users who are in the room at the time of dispatch will receive messages * - Message history is not persisted between sessions * - Requires `@livekit/components-styles` to be imported for styling * * @example * ```tsx * import '@livekit/components-styles'; * * function Room() { * return ( * * * * ); * } * ``` * * For custom styling, refer to: https://docs.livekit.io/reference/components/react/concepts/style-components/ * * @public */ export declare function Chat({ messageFormatter, messageDecoder, messageEncoder, channelTopic, ...props }: ChatProps): React_2.JSX.Element; /** * @internal */ export declare const ChatCloseIcon: (props: SVGProps) => React_2.JSX.Element; /** @internal */ declare type ChatContextAction = { msg: 'show_chat'; } | { msg: 'hide_chat'; } | { msg: 'toggle_chat'; } | { msg: 'unread_msg'; count: number; } | { msg: 'toggle_settings'; }; /** * The `ChatEntry` component holds and displays one chat message. * * @example * ```tsx * * * * ``` * @see `Chat` * @public */ export declare const ChatEntry: (props: ChatEntryProps & React_2.RefAttributes) => React_2.ReactNode; /** * ChatEntry composes the HTML div element under the hood, so you can pass all its props. * These are the props specific to the ChatEntry component: * @public */ export declare interface ChatEntryProps extends React_2.HTMLAttributes { /** The chat massage object to display. */ entry: ReceivedChatMessage; /** Hide sender name. Useful when displaying multiple consecutive chat messages from the same person. */ hideName?: boolean; /** Hide message timestamp. */ hideTimestamp?: boolean; /** An optional formatter for the message body. */ messageFormatter?: MessageFormatter; } /** * @internal */ export declare const ChatIcon: (props: SVGProps) => React_2.JSX.Element; export { ChatMessage } /** @public */ export declare interface ChatProps extends React_2.HTMLAttributes, ChatOptions { messageFormatter?: MessageFormatter; } /** * The `ChatToggle` component is a button that toggles the visibility of the `Chat` component. * @remarks * For the component to have any effect it has to live inside a `LayoutContext` context. * * @example * ```tsx * * * * ``` * @public */ export declare const ChatToggle: (props: ChatToggleProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface ChatToggleProps extends React_2.ButtonHTMLAttributes { } /** * @internal */ export declare const Chevron: (props: SVGProps) => React_2.JSX.Element; /** * The `ClearPinButton` is a basic html button with the added ability to signal * the `LayoutContext` that it should display the grid view again. * @remarks * This component works only inside a `LayoutContext`. * * @example * ```tsx * * Back to grid view * * ``` * @public */ export declare const ClearPinButton: (props: ClearPinButtonProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface ClearPinButtonProps extends React_2.ButtonHTMLAttributes { } /** * The `ConnectionQualityIndicator` shows the individual connection quality of a participant. * * @example * ```tsx * * ``` * @public */ export declare const ConnectionQualityIndicator: (props: ConnectionQualityIndicatorProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface ConnectionQualityIndicatorOptions { participant?: Participant; } /** @public */ export declare interface ConnectionQualityIndicatorProps extends React_2.HTMLAttributes, ConnectionQualityIndicatorOptions { } /** * The `ConnectionState` component displays the connection status of the room as strings * (`"connected" | "connecting" | "disconnected" | "reconnecting"`). * * @example * ```tsx * * * * ``` * @public */ export declare const ConnectionState: (props: ConnectionStatusProps & React_2.RefAttributes) => React_2.ReactNode; /** * The `ConnectionStateToast` component displays a toast * notification indicating the current connection state of the room. * @public */ export declare function ConnectionStateToast(props: ConnectionStateToastProps): React_2.JSX.Element; /** @public */ export declare interface ConnectionStateToastProps extends React_2.HTMLAttributes { room?: Room; } /** @public */ export declare interface ConnectionStatusProps extends React_2.HTMLAttributes { /** * The room from which the connection status should be displayed. */ room?: Room; } /** * The `ControlBar` prefab gives the user the basic user interface to control their * media devices (camera, microphone and screen share), open the `Chat` and leave the room. * * @remarks * This component is build with other LiveKit components like `TrackToggle`, * `DeviceSelectorButton`, `DisconnectButton` and `StartAudio`. * * @example * ```tsx * * * * ``` * @public */ export declare function ControlBar({ variation, controls, saveUserChoices, onDeviceError, ...props }: ControlBarProps): React_2.JSX.Element; /** @public */ export declare type ControlBarControls = { microphone?: boolean; camera?: boolean; chat?: boolean; screenShare?: boolean; leave?: boolean; settings?: boolean; }; /** @public */ export declare interface ControlBarProps extends React_2.HTMLAttributes { onDeviceError?: (error: { source: Track.Source; error: Error; }) => void; variation?: 'minimal' | 'verbose' | 'textOnly'; controls?: ControlBarControls; /** * If `true`, the user's device choices will be persisted. * This will enable the user to have the same device choices when they rejoin the room. * @defaultValue true * @alpha */ saveUserChoices?: boolean; } /** * The `DisconnectButton` is a basic html button with the added ability to disconnect from a LiveKit room. * Normally this is the big red button that allows end users to leave the video or audio call. * * @example * ```tsx * * Leave room * * ``` * @public */ export declare const DisconnectButton: (props: DisconnectButtonProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface DisconnectButtonProps extends React_2.ButtonHTMLAttributes { stopTracks?: boolean; } declare type FeatureContext = T extends true ? FeatureFlags : FeatureFlags | undefined; /** @internal */ export declare interface FeatureFlags { autoSubscription?: boolean; } /** * The `FocusLayout` component is just a light wrapper around the `ParticipantTile` to display a single participant. * @public */ export declare function FocusLayout({ trackRef, ...htmlProps }: FocusLayoutProps): React_2.JSX.Element; /** * The `FocusLayoutContainer` is a layout component that expects two children: * A small side component: In a video conference, this is usually a carousel of participants * who are not in focus. And a larger main component to display the focused participant. * For example, with the `FocusLayout` component. * @public */ export declare function FocusLayoutContainer(props: FocusLayoutContainerProps): React_2.JSX.Element; /** @public */ export declare interface FocusLayoutContainerProps extends React_2.HTMLAttributes { } /** @public */ export declare interface FocusLayoutProps extends React_2.HTMLAttributes { /** The track to display in the focus layout. */ trackRef?: TrackReferenceOrPlaceholder; onParticipantClick?: (evt: ParticipantClickEvent) => void; } /** * The `FocusToggle` puts the `ParticipantTile` in focus or removes it from focus. * @remarks * This component needs to live inside `LayoutContext` to work properly. * * @example * ```tsx * * * * ``` * @public */ export declare const FocusToggle: (props: FocusToggleProps & React_2.RefAttributes) => React_2.ReactNode; /** * @internal */ export declare const FocusToggleIcon: (props: SVGProps) => React_2.JSX.Element; /** @public */ export declare interface FocusToggleProps extends React_2.ButtonHTMLAttributes { trackRef?: TrackReferenceOrPlaceholder; } /** @public */ export declare function formatChatMessageLinks(message: string): React_2.ReactNode; /** * @internal */ export declare const GearIcon: (props: SVGProps) => React_2.JSX.Element; /** * The `GridLayout` component displays the nested participants in a grid where every participants has the same size. * It also supports pagination if there are more participants than the grid can display. * @remarks * To ensure visual stability when tiles are reordered due to track updates, * the component uses the `useVisualStableUpdate` hook. * @example * ```tsx * * * * * * ``` * @public */ export declare function GridLayout({ tracks, ...props }: GridLayoutProps): React_2.JSX.Element; export { GridLayoutDefinition } /** @public */ export declare interface GridLayoutProps extends React_2.HTMLAttributes, Pick { children: React_2.ReactNode; tracks: TrackReferenceOrPlaceholder[]; } export { isTrackReference } /** @public */ export declare const LayoutContext: React_2.Context; /** @alpha */ export declare function LayoutContextProvider({ value, onPinChange, onWidgetChange, children, }: React_2.PropsWithChildren): React_2.JSX.Element; /** @alpha */ export declare interface LayoutContextProviderProps { value?: LayoutContextType; onPinChange?: (state: PinState) => void; onWidgetChange?: (state: WidgetState) => void; } /** @public */ export declare type LayoutContextType = { pin: PinContextType; widget: WidgetContextType; }; /** * @internal */ export declare const LeaveIcon: (props: SVGProps) => React_2.JSX.Element; /** * The `LiveKitRoom` component provides the room context to all its child components. * It is generally the starting point of your LiveKit app and the root of the LiveKit component tree. * It provides the room state as a React context to all child components, so you don't have to pass it yourself. * * @example * ```tsx * * ... * * ``` * @public */ export declare const LiveKitRoom: (props: React_2.PropsWithChildren & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface LiveKitRoomProps extends Omit, 'onError'> { /** * URL to the LiveKit server. * For example: `wss://.livekit.cloud` * To simplify the implementation, `undefined` is also accepted as an intermediate value, but only with a valid string url can the connection be established. */ serverUrl: string | undefined; /** * A user specific access token for a client to authenticate to the room. * This token is necessary to establish a connection to the room. * To simplify the implementation, `undefined` is also accepted as an intermediate value, but only with a valid string token can the connection be established. * * @see https://docs.livekit.io/cloud/project-management/keys-and-tokens/#generating-access-tokens */ token: string | undefined; /** * Publish audio immediately after connecting to your LiveKit room. * @defaultValue `false` * @see https://docs.livekit.io/client-sdk-js/interfaces/AudioCaptureOptions.html */ audio?: AudioCaptureOptions | boolean; /** * Publish video immediately after connecting to your LiveKit room. * @defaultValue `false` * @see https://docs.livekit.io/client-sdk-js/interfaces/VideoCaptureOptions.html */ video?: VideoCaptureOptions | boolean; /** * Publish screen share immediately after connecting to your LiveKit room. * @defaultValue `false` * @see https://docs.livekit.io/client-sdk-js/interfaces/ScreenShareCaptureOptions.html */ screen?: ScreenShareCaptureOptions | boolean; /** * If set to true a connection to LiveKit room is initiated. * @defaultValue `true` */ connect?: boolean; /** * Options for when creating a new room. * When you pass your own room instance to this component, these options have no effect. * Instead, set the options directly in the room instance. * * @see https://docs.livekit.io/client-sdk-js/interfaces/RoomOptions.html */ options?: RoomOptions; /** * Define options how to connect to the LiveKit server. * * @see https://docs.livekit.io/client-sdk-js/interfaces/RoomConnectOptions.html */ connectOptions?: RoomConnectOptions; onConnected?: () => void; onDisconnected?: (reason?: DisconnectReason) => void; onError?: (error: Error) => void; onMediaDeviceFailure?: (failure?: MediaDeviceFailure, kind?: MediaDeviceKind) => void; onEncryptionError?: (error: Error) => void; /** * Optional room instance. * By passing your own room instance you overwrite the `options` parameter, * make sure to set the options directly on the room instance itself. */ room?: Room; simulateParticipants?: number | undefined; /** * @internal */ featureFlags?: FeatureFlags; } /** @internal */ export declare const LKFeatureContext: React_2.Context; export { LocalUserChoices } /** * @internal */ export declare const LockLockedIcon: (props: SVGProps) => React_2.JSX.Element; /** * The `MediaDeviceMenu` component is a button that opens a menu that lists * all media devices and allows the user to select them. * * @remarks * This component is implemented with the `MediaDeviceSelect` LiveKit components. * * @example * ```tsx * * * * ``` * @public */ export declare function MediaDeviceMenu({ kind, initialSelection, onActiveDeviceChange, tracks, requestPermissions, ...props }: MediaDeviceMenuProps): React_2.JSX.Element; /** @public */ export declare interface MediaDeviceMenuProps extends React_2.ButtonHTMLAttributes { kind?: MediaDeviceKind; initialSelection?: string; onActiveDeviceChange?: (kind: MediaDeviceKind, deviceId: string) => void; tracks?: Partial>; /** * this will call getUserMedia if the permissions are not yet given to enumerate the devices with device labels. * in some browsers multiple calls to getUserMedia result in multiple permission prompts. * It's generally advised only flip this to true, once a (preview) track has been acquired successfully with the * appropriate permissions. * * @see {@link PreJoin} * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices | MDN enumerateDevices} */ requestPermissions?: boolean; } /** * The `MediaDeviceSelect` list all media devices of one kind. * Clicking on one of the listed devices make it the active media device. * * @example * ```tsx * * * * ``` * @public */ export declare const MediaDeviceSelect: (props: MediaDeviceSelectProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface MediaDeviceSelectProps extends Omit, 'onError'> { kind: MediaDeviceKind; onActiveDeviceChange?: (deviceId: string) => void; onDeviceListChange?: (devices: MediaDeviceInfo[]) => void; onDeviceSelectError?: (e: Error) => void; initialSelection?: string; /** will force the browser to only return the specified device * will call `onDeviceSelectError` with the error in case this fails */ exactMatch?: boolean; track?: LocalAudioTrack | LocalVideoTrack; /** * this will call getUserMedia if the permissions are not yet given to enumerate the devices with device labels. * in some browsers multiple calls to getUserMedia result in multiple permission prompts. * It's generally advised only flip this to true, once a (preview) track has been acquired successfully with the * appropriate permissions. * * @see {@link MediaDeviceMenu} * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices | MDN enumerateDevices} */ requestPermissions?: boolean; onError?: (e: Error) => void; } export { MessageDecoder } export { MessageEncoder } /** @public */ export declare type MessageFormatter = (message: string) => React_2.ReactNode; /** @beta */ export declare type MessagesCallbacks = { [MessagesEvent.MessageReceived]: (message: ReceivedMessage) => void; }; /** @beta */ export declare enum MessagesEvent { /** * Emits when a new message is received from a participant * args: (message: ReceivedMessage) */ MessageReceived = "messageReceived" } /** * @internal */ export declare const MicDisabledIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const MicIcon: (props: SVGProps) => React_2.JSX.Element; /** * Interface for configuring options for the useMultibandTrackVolume hook. * @alpha */ export declare interface MultiBandTrackVolumeOptions { bands?: number; /** * cut off of frequency bins on the lower end * Note: this is not a frequency measure, but in relation to analyserOptions.fftSize, */ loPass?: number; /** * cut off of frequency bins on the higher end * Note: this is not a frequency measure, but in relation to analyserOptions.fftSize, */ hiPass?: number; /** * update should run every x ms */ updateInterval?: number; analyserOptions?: AnalyserOptions; } /** * The `ParticipantAudioTile` component is the base utility wrapper for displaying a visual representation of a participant. * This component can be used as a child of the `TileLoop` or independently if a participant is passed as a property. * * @example * ```tsx * * ``` * @public */ export declare const ParticipantAudioTile: (props: ParticipantTileProps & React_2.RefAttributes) => React_2.ReactNode; export { ParticipantClickEvent } /** @public */ export declare const ParticipantContext: React_2.Context; /** * The `ParticipantContextIfNeeded` component only creates a `ParticipantContext` * if there is no `ParticipantContext` already. * @example * ```tsx * * ... * * ``` * @public */ export declare function ParticipantContextIfNeeded(props: React_2.PropsWithChildren<{ participant?: Participant; }>): React_2.JSX.Element; export { ParticipantIdentifier } /** * The `ParticipantLoop` component loops over an array of participants to create a context for every participant. * This component takes exactly one child component as a template. * By providing your own template as a child you have full control over the look and feel of your participant representations. * * @remarks * If you want to loop over individual tracks instead of participants, you can use the `TrackLoop` component. * * @example * ```tsx * const participants = useParticipants(); * * * * ``` * @public */ export declare function ParticipantLoop({ participants, ...props }: ParticipantLoopProps): React_2.JSX.Element; /** @public */ export declare interface ParticipantLoopProps { /** The participants to loop over. Use `useParticipants()` hook to get participants. */ participants: Participant[]; /** The template component to be used in the loop. */ children: React_2.ReactNode; } /** * The `ParticipantName` component displays the name of the participant as a string within an HTML span element. * If no participant name is undefined the participant identity string is displayed. * * @example * ```tsx * * ``` * @public */ export declare const ParticipantName: (props: ParticipantNameProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface ParticipantNameProps extends React_2.HTMLAttributes, UseParticipantInfoOptions { } /** * @internal */ export declare const ParticipantPlaceholder: (props: SVGProps) => React_2.JSX.Element; /** * The `ParticipantTile` component is the base utility wrapper for displaying a visual representation of a participant. * This component can be used as a child of the `TrackLoop` component or by passing a track reference as property. * * @example Using the `ParticipantTile` component with a track reference: * ```tsx * * ``` * @example Using the `ParticipantTile` component as a child of the `TrackLoop` component: * ```tsx * * * * ``` * @public */ export declare const ParticipantTile: (props: ParticipantTileProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface ParticipantTileProps extends React_2.HTMLAttributes { /** The track reference to display. */ trackRef?: TrackReferenceOrPlaceholder; disableSpeakingIndicator?: boolean; onParticipantClick?: (event: ParticipantClickEvent) => void; } /** @internal */ declare type PinAction = { msg: 'set_pin'; trackReference: TrackReferenceOrPlaceholder; } | { msg: 'clear_pin'; }; /** @internal */ declare type PinContextType = { dispatch?: React_2.Dispatch; state?: PinState; }; export { PinState } /** * The `PreJoin` prefab component is normally presented to the user before he enters a room. * This component allows the user to check and select the preferred media device (camera und microphone). * On submit the user decisions are returned, which can then be passed on to the `LiveKitRoom` so that the user enters the room with the correct media devices. * * @remarks * This component is independent of the `LiveKitRoom` component and should not be nested within it. * Because it only accesses the local media tracks this component is self-contained and works without connection to the LiveKit server. * * @example * ```tsx * * ``` * @public */ export declare function PreJoin({ defaults, onValidate, onSubmit, onError, debug, joinLabel, micLabel, camLabel, userLabel, persistUserChoices, videoProcessor, ...htmlProps }: PreJoinProps): React_2.JSX.Element; /** * Props for the PreJoin component. * @public */ export declare interface PreJoinProps extends Omit, 'onSubmit' | 'onError'> { /** This function is called with the `LocalUserChoices` if validation is passed. */ onSubmit?: (values: LocalUserChoices) => void; /** * Provide your custom validation function. Only if validation is successful the user choices are past to the onSubmit callback. */ onValidate?: (values: LocalUserChoices) => boolean; onError?: (error: Error) => void; /** Prefill the input form with initial values. */ defaults?: Partial; /** Display a debug window for your convenience. */ debug?: boolean; joinLabel?: string; micLabel?: string; camLabel?: string; userLabel?: string; /** * If true, user choices are persisted across sessions. * @defaultValue true * @alpha */ persistUserChoices?: boolean; videoProcessor?: TrackProcessor; } /** * @internal */ export declare const QualityExcellentIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const QualityGoodIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const QualityPoorIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const QualityUnknownIcon: (props: SVGProps) => React_2.JSX.Element; export { ReceivedAgentTranscriptionMessage } export { ReceivedChatMessage } export { ReceivedMessage } export { ReceivedUserTranscriptionMessage } /** * The `RoomAudioRenderer` component is a drop-in solution for adding audio to your LiveKit app. * It takes care of handling remote participants’ audio tracks and makes sure that microphones and screen share are audible. * * @example * ```tsx * * * * ``` * @public */ export declare function RoomAudioRenderer({ room, volume, muted }: RoomAudioRendererProps): React_2.JSX.Element; /** @public */ export declare interface RoomAudioRendererProps { room?: Room; /** Sets the volume for all audio tracks rendered by this component. By default, the range is between `0.0` and `1.0`. */ volume?: number; /** * If set to `true`, mutes all audio tracks rendered by the component. * @remarks * If set to `true`, the server will stop sending audio track data to the client. * @alpha */ muted?: boolean; } /** @public */ export declare const RoomContext: React_2.Context; /** * The `RoomName` component renders the name of the connected LiveKit room inside a span tag. * * @example * ```tsx * * * * ``` * @public * * @param props - RoomNameProps */ export declare const RoomName: React_2.FC>; /** @public */ export declare interface RoomNameProps extends React_2.HTMLAttributes { childrenPosition?: 'before' | 'after'; } /** * @internal */ export declare const ScreenShareIcon: (props: SVGProps) => React_2.JSX.Element; /** * @internal */ export declare const ScreenShareStopIcon: (props: SVGProps) => React_2.JSX.Element; declare type SessionActions = { /** Returns a promise that resolves once the room connects. */ waitUntilConnected: (signal?: AbortSignal) => void; /** Returns a promise that resolves once the room disconnects */ waitUntilDisconnected: (signal?: AbortSignal) => void; prepareConnection: () => Promise; /** Connect to the underlying room and dispatch any agents */ start: (options?: SessionConnectOptions) => Promise; /** Disconnect from the underlying room */ end: () => Promise; }; /** @beta */ export declare type SessionCallbacks = { [SessionEvent.ConnectionStateChanged]: (newAgentConnectionState: ConnectionState_2) => void; [SessionEvent.MediaDevicesError]: (error: Error) => void; [SessionEvent.EncryptionError]: (error: Error) => void; }; /** @beta */ export declare type SessionConnectOptions = { /** Optional abort signal which if triggered will terminate connecting even if it isn't complete */ signal?: AbortSignal; tracks?: { microphone?: { enabled?: boolean; publishOptions?: TrackPublishOptions; }; camera?: { enabled?: boolean; publishOptions?: TrackPublishOptions; }; screenShare?: { enabled?: boolean; publishOptions?: TrackPublishOptions; }; }; /** Options for Room.connect(.., .., opts) */ roomConnectOptions?: RoomConnectOptions; }; /** @beta */ export declare enum SessionEvent { ConnectionStateChanged = "connectionStateChanged", /** * Emits when an error is encountered while attempting to create a track. * Use MediaDeviceFailure.getFailure(error) to get the reason of failure. * args: (error: Error, kind: MediaDeviceKind) */ MediaDevicesError = "mediaDevicesError", /** * Emits when an error is received while decrypting frame received frame information. * args: (error: Error) */ EncryptionError = "encryptionError" } /** * The `SessionProvider` component instantiates a SessionContext from the return of useSession * @beta */ export declare function SessionProvider(props: SessionProviderProps): React_2.JSX.Element; /** @beta */ export declare type SessionProviderProps = { session: UseSessionReturn; children: React_2.ReactNode; }; declare type SessionStateCommon = { room: Room; internal: { emitter: default_2; tokenSource: TokenSourceConfigurable | TokenSourceFixed; agentConnectTimeoutMilliseconds?: number; agentTimeoutFailureReason: string | null; startAgentTimeout: (agentConnectTimeoutMilliseconds?: number) => void; clearAgentTimeout: () => void; clearAgentTimeoutFailureReason: () => void; updateAgentTimeoutState: (agentState: AgentState) => void; updateAgentTimeoutParticipantExists: (agentParticipantExists: boolean) => void; }; }; declare type SessionStateConnected = SessionStateCommon & { connectionState: ConnectionState_2.Connected | ConnectionState_2.Reconnecting | ConnectionState_2.SignalReconnecting; isConnected: true; local: { cameraTrack?: TrackReference; microphoneTrack?: TrackReference; screenShareTrack?: TrackReference; }; }; declare type SessionStateConnecting = SessionStateCommon & { connectionState: ConnectionState_2.Connecting; isConnected: false; local: { cameraTrack: undefined; microphoneTrack: undefined; screenShareTrack: undefined; }; }; declare type SessionStateDisconnected = SessionStateCommon & { connectionState: ConnectionState_2.Disconnected; isConnected: false; local: { cameraTrack: undefined; microphoneTrack: undefined; screenShareTrack: undefined; }; }; declare type SessionStub = Pick; export { setLogExtension } export { setLogLevel } /** * @internal */ export declare const SpinnerIcon: (props: SVGProps) => React_2.JSX.Element; /** * The `StartAudio` component is only visible when the browser blocks audio playback. This is due to some browser implemented autoplay policies. * To start audio playback, the user must perform a user-initiated event such as clicking this button. * As soon as audio playback starts, the button hides itself again. * * @example * ```tsx * * * * ``` * * @see Autoplay policy on MDN web docs: {@link https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Best_practices#autoplay_policy} * @public */ export declare const StartAudio: (props: AllowAudioPlaybackProps & React_2.RefAttributes) => React_2.ReactNode; /** * The `StartMediaButton` component is only visible when the browser blocks media playback. This is due to some browser implemented autoplay policies. * To start media playback, the user must perform a user-initiated event such as clicking this button. * As soon as media playback starts, the button hides itself again. * * @example * ```tsx * * * * ``` * * @see Autoplay policy on MDN web docs: {@link https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Best_practices#autoplay_policy} * @public */ export declare const StartMediaButton: (props: AllowMediaPlaybackProps & React_2.RefAttributes) => React_2.ReactNode; /** @beta */ export declare type SwitchActiveDeviceOptions = { /** * If true, adds an `exact` constraint to the getUserMedia request. * The request will fail if this option is true and the device specified is not actually available */ exact?: boolean; }; export { TextStreamData } /** * The `Toast` component is a rudimentary way to display a message to the user. * This message should be short lived and not require user interaction. * For example, displaying the current connection state like `ConnectionStateToast` does. * * @example * ```tsx * Connecting... * ``` * @public */ export declare function Toast(props: React_2.HTMLAttributes): React_2.JSX.Element; /** * The `TrackLoop` component loops over tracks. It is for example a easy way to loop over all participant camera and screen share tracks. * `TrackLoop` creates a `TrackRefContext` for each track that you can use to e.g. render the track. * * @example * ```tsx * const trackRefs = useTracks([Track.Source.Camera]); * * * {(trackRef) => trackRef && } * * * ``` * @public */ export declare function TrackLoop({ tracks, ...props }: TrackLoopProps): React_2.JSX.Element; /** @public */ export declare interface TrackLoopProps { /** Track references to loop over. You can the use `useTracks()` hook to get TrackReferences. */ tracks: TrackReference[] | TrackReferenceOrPlaceholder[]; /** The template component to be used in the loop. */ children: React_2.ReactNode; } /** * The `TrackMutedIndicator` shows whether the participant's camera or microphone is muted or not. * By default, a muted/unmuted icon is displayed for a camera, microphone, and screen sharing track. * * @example * ```tsx * * ``` * @public */ export declare const TrackMutedIndicator: (props: TrackMutedIndicatorProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface TrackMutedIndicatorProps extends React_2.HTMLAttributes { trackRef: TrackReferenceOrPlaceholder; show?: 'always' | 'muted' | 'unmuted'; } declare interface TrackMutedIndicatorReturnType { isMuted: boolean; className: string; } /** * This context provides a `TrackReferenceOrPlaceholder` to all child components. * @public */ export declare const TrackRefContext: React_2.Context; /** * Only create a `TrackRefContext` if there is no `TrackRefContext` already. * @internal */ export declare function TrackRefContextIfNeeded(props: React_2.PropsWithChildren<{ trackRef?: TrackReferenceOrPlaceholder; }>): React_2.JSX.Element; export { TrackReference } export { TrackReferenceOrPlaceholder } /** * With the `TrackToggle` component it is possible to mute and unmute your camera and microphone. * The component uses an html button element under the hood so you can treat it like a button. * * @example * ```tsx * * * * * ``` * @public */ export declare const TrackToggle: (props: TrackToggleProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface TrackToggleProps extends Omit, 'onChange'> { source: T; showIcon?: boolean; initialState?: boolean; /** * Function that is called when the enabled state of the toggle changes. * The second function argument `isUserInitiated` is `true` if the change was initiated by a user interaction, such as a click. */ onChange?: (enabled: boolean, isUserInitiated: boolean) => void; captureOptions?: CaptureOptionsBySource; publishOptions?: TrackPublishOptions; onDeviceError?: (error: Error) => void; } /** * @alpha * @deprecated Use useTranscription instead */ export declare interface TrackTranscriptionOptions { /** * how many transcription segments should be buffered in state * @defaultValue 100 */ bufferSize?: number; /** * optional callback for retrieving newly incoming transcriptions only */ onTranscription?: (newSegments: TranscriptionSegment[]) => void; } /** * @internal */ export declare const UnfocusToggleIcon: (props: SVGProps) => React_2.JSX.Element; /** * useAgent encapculates all agent state, normalizing some quirks around how LiveKit Agents work. * @beta */ export declare function useAgent(session?: SessionStub): UseAgentReturn; /** @beta */ export declare type UseAgentReturn = AgentStateCases & AgentActions; /** * In many browsers to start audio playback, the user must perform a user-initiated event such as clicking a button. * The `useAudioPlayback` hook returns an object with a boolean `canPlayAudio` flag that indicates whether audio * playback is allowed in the current context, as well as a `startAudio` function that can be called in a button * `onClick` callback to start audio playback in the current context. * * @see Autoplay policy on MDN web docs for more info: {@link https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Best_practices#autoplay_policy} * @alpha */ export declare function useAudioPlayback(room?: Room): { canPlayAudio: boolean; startAudio: () => Promise; }; /** * @alpha */ export declare function useAudioWaveform(trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReferenceOrPlaceholder, options?: AudioWaveformOptions): { bars: number[]; }; /** * The `useChat` hook provides chat functionality for a LiveKit room. * * @remarks * Message history is not persisted and will be lost if the component is refreshed. * You may want to persist message history in the browser, a cache or a database. * * @returns An object containing: * - `chatMessages` - Array of received chat messages * - `send` - Function to send a new message * - `isSending` - Boolean indicating if a message is currently being sent * * @example * ```tsx * function ChatComponent() { * const { chatMessages, send, isSending } = useChat(); * * return ( *
* {chatMessages.map((msg) => ( *
* {msg.from?.identity}: {msg.message} *
* ))} * *
* ); * } * ``` * @public */ export declare function useChat(options?: ChatOptions & { room?: Room; }): { send: (message: string, options?: SendTextOptions) => Promise; chatMessages: ReceivedChatMessage[]; isSending: boolean; }; /** * The `useChatToggle` hook provides state and functions for toggling the chat window. * @remarks * Depends on the `LayoutContext` to work properly. * @see {@link ChatToggle}, {@link Chat} * @public */ export declare function useChatToggle({ props }: UseChatToggleProps): { mergedProps: React_2.ButtonHTMLAttributes & { className: string; onClick: () => void; 'aria-pressed': string; 'data-lk-unread-msgs': string; }; }; /** @public */ export declare interface UseChatToggleProps { props: React_2.ButtonHTMLAttributes; } /** * The `useClearPinButton` hook provides props for the {@link ClearPinButton} * or your custom implementation of it component. It adds the `onClick` handler * to signal the `LayoutContext` that the tile in focus should be cleared. * @public */ export declare function useClearPinButton(props: ClearPinButtonProps): { buttonProps: ClearPinButtonProps & { className: string; disabled: boolean; onClick: () => void; }; }; /** * The `useConnectionQualityIndicator` hook provides props for the `ConnectionQualityIndicator` or your custom implementation of it component. * @example * ```tsx * const { quality } = useConnectionQualityIndicator(); * // or * const { quality } = useConnectionQualityIndicator({ participant }); * ``` * @public */ export declare function useConnectionQualityIndicator(options?: ConnectionQualityIndicatorOptions): { className: "lk-connection-quality"; quality: ConnectionQuality; }; /** * The `useConnectionState` hook allows you to simply implement your own `ConnectionState` component. * * @example * ```tsx * const connectionState = useConnectionState(room); * ``` * @public */ export declare function useConnectionState(room?: Room): ConnectionState_2; /** @public */ export declare function useCreateLayoutContext(): LayoutContextType; /** * The `useDataChannel` hook returns the ability to send and receive messages. * Pass an optional `topic` to narrow down which messages are returned in the messages array. * * @remarks * There is only one data channel. Passing a `topic` does not open a new data channel. * It is only used to filter out messages with no or a different `topic`. * * @example * ```tsx * // Send messages to all participants via the 'chat' topic. * const { message: latestMessage, send } = useDataChannel('chat', (msg) => console.log("message received", msg)); * ``` * * @example * ```tsx * // Receive all messages (no topic filtering) * const { message: latestMessage, send } = useDataChannel((msg) => console.log("message received", msg)); * ``` * * @public */ export declare function useDataChannel(topic: T, onMessage?: (msg: ReceivedDataMessage) => void): UseDataChannelReturnType; /** * Overload for `useDataChannel` without a topic. See {@link (useDataChannel:1)} for information and usage examples. * * @public */ export declare function useDataChannel(onMessage?: (msg: ReceivedDataMessage) => void): UseDataChannelReturnType; declare type UseDataChannelReturnType = { isSending: boolean; send: (payload: Uint8Array, options: DataPublishOptions) => Promise; message: ReceivedDataMessage | undefined; }; /** * The `useDisconnectButton` hook is used to implement the `DisconnectButton` or your * custom implementation of it. It adds onClick handler to the button to disconnect * from the room. * * @example * ```tsx * const { buttonProps } = useDisconnectButton(buttonProps); * return ; * ``` * @public */ export declare function useDisconnectButton(props: DisconnectButtonProps): { buttonProps: DisconnectButtonProps & { className: string; onClick: () => void; disabled: boolean; }; }; /** @public */ export declare function useEnsureCreateLayoutContext(layoutContext?: LayoutContextType): LayoutContextType; /** * Ensures that a layout context is provided, either via context or explicitly as a parameter. * If not inside a `LayoutContext` and no layout context is provided, an error is thrown. * @public */ export declare function useEnsureLayoutContext(layoutContext?: LayoutContextType): LayoutContextType; /** * Ensures that a participant is provided, either via context or explicitly as a parameter. * If not inside a `ParticipantContext` and no participant is provided, an error is thrown. * @public */ export declare function useEnsureParticipant(participant?: Participant): Participant; /** * Ensures that a room is provided, either via context or explicitly as a parameter. * If no room is provided, an error is thrown. * @public */ export declare function useEnsureRoom(room?: Room): Room; /** * Ensures that a session is provided, either via context or explicitly as a parameter. * If no session is provided, an error is thrown. * @beta */ export declare function useEnsureSession(session?: UseSessionReturn): UseSessionReturn; /** * Ensures that a track reference is provided, either via context or explicitly as a parameter. * If not inside a `TrackRefContext` and no track reference is provided, an error is thrown. * @public */ export declare function useEnsureTrackRef(trackRef?: TrackReferenceOrPlaceholder): TrackReferenceOrPlaceholder; /** @public */ export declare function useEvents, EmitterEventMap extends Emitter extends default_2 ? EM : never, Event extends Parameters[0], Callback extends EmitterEventMap[Event]>(instance: Emitter | { internal: { emitter: Emitter; }; } | null | undefined, event: Event, handlerFn: Callback | undefined, dependencies?: React_2.DependencyList): void; /** * Try to determine the `facingMode` of a local participant video track. * @remarks * Works only on local video tracks. * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode | MDN docs on facingMode} * @alpha */ export declare function useFacingMode(trackReference: TrackReferenceOrPlaceholder): 'user' | 'environment' | 'left' | 'right' | 'undefined'; /** * @internal */ export declare function useFeatureContext(require?: T): FeatureContext; /** * The `useFocusToggle` hook is used to implement the `FocusToggle` or your custom implementation of it. * The `TrackReferenceOrPlaceholder` is used to register a onClick handler and to identify the track to focus on. * * @example * ```tsx * const { mergedProps, inFocus } = useFocusToggle({ trackRef, props: yourButtonProps }); * return ; * ``` * @public */ export declare function useFocusToggle({ trackRef, props }: UseFocusToggleProps): { mergedProps: React_2.ButtonHTMLAttributes & { className: string; onClick: (event: React_2.MouseEvent) => void; }; inFocus: boolean; }; /** @public */ export declare interface UseFocusToggleProps { trackRef?: TrackReferenceOrPlaceholder; props: React_2.ButtonHTMLAttributes; } /** * The `useGridLayout` hook tries to select the best layout to fit all tiles. * If the available screen space is not enough, it will reduce the number of maximum visible * tiles and select a layout that still works visually within the given limitations. * As the order of tiles changes over time, the hook tries to keep visual updates to a minimum * while trying to display important tiles such as speaking participants or screen shares. * * @example * ```tsx * const { layout } = useGridLayout(gridElement, trackCount); * ``` * @public */ export declare function useGridLayout( /** HTML element that contains the grid. */ gridElement: React_2.RefObject, /** Count of tracks that should get layed out */ trackCount: number, options?: { gridLayouts?: GridLayoutDefinition[]; }): { layout: GridLayoutInfo; containerWidth: number; containerHeight: number; }; /** * @alpha */ export declare function useIsEncrypted(participant?: Participant, options?: UseIsEncryptedOptions): boolean; /** * @alpha */ export declare interface UseIsEncryptedOptions { room?: Room; } /** * The `useIsMuted` hook is used to implement the `TrackMutedIndicator` or your custom implementation of it. * It returns a `boolean` that indicates if the track is muted or not. * * @example With a track reference * ```tsx * const isMuted = useIsMuted(track); * ``` * * @param trackRef - A `TrackReference` indicating the track to monitor. * @returns boolean indicating if the track is muted * * @public */ export declare function useIsMuted(trackRef: TrackReferenceOrPlaceholder): boolean; /** @public */ export declare interface UseIsMutedOptions { participant?: Participant; } /** * The `useIsRecording` hook returns a `boolean` that indicates if the room is currently being recorded. * @example * ```tsx * const isRecording = useIsRecording(); * ``` * @public */ export declare function useIsRecording(room?: Room): boolean; /** * The `useIsSpeaking` hook returns a `boolean` that indicates if the participant is speaking or not. * @example * ```tsx * const isSpeaking = useIsSpeaking(participant); * ``` * @public */ export declare function useIsSpeaking(participant?: Participant): boolean; /** * Enable the Krisp enhanced noise cancellation feature for local audio tracks. * * Defaults to the localParticipant's microphone track publication, but you can override this behavior by passing in a different track reference. * * @package \@livekit/components-react/krisp * @remarks This filter requires that you install the `@livekit/krisp-noise-filter` package and is supported only on {@link https://cloud.livekit.io | LiveKit Cloud}. * @beta * @example * ```tsx * const krisp = useKrispNoiseFilter(); * return krisp.setNoiseFilterEnabled(ev.target.checked)} * checked={krisp.isNoiseFilterEnabled} * disabled={krisp.isNoiseFilterPending} * /> * ``` * @returns Use `setIsNoiseFilterEnabled` to enable/disable the noise filter. */ export declare function useKrispNoiseFilter(options?: useKrispNoiseFilterOptions): { setNoiseFilterEnabled: (enable: boolean) => Promise; isNoiseFilterEnabled: boolean; isNoiseFilterPending: boolean; processor: KrispNoiseFilterProcessor | undefined; }; /** * @beta */ export declare interface useKrispNoiseFilterOptions { /** * The track reference to use for the noise filter (defaults: local microphone track) */ trackRef?: TrackReferenceOrPlaceholder; /** * @internal */ filterOptions?: NoiseFilterOptions; } /** * Ensures that a layout context is provided via context. * If no layout context is provided, an error is thrown. * @public */ export declare function useLayoutContext(): LayoutContextType; /** * The `useLiveKitRoom` hook is used to implement the `LiveKitRoom` or your custom implementation of it. * It returns a `Room` instance and HTML props that should be applied to the root element of the component. * * @example * ```tsx * const { room, htmlProps } = useLiveKitRoom(); * return
...
; * ``` * @public */ export declare function useLiveKitRoom(props: LiveKitRoomProps): { room: Room | undefined; htmlProps: HTMLAttributes; }; /** * The `useLocalParticipant` hook returns the local participant and the associated state * around the participant. * * @example * ```tsx * const { localParticipant } = useLocalParticipant(); * ``` * @public */ export declare function useLocalParticipant(options?: UseLocalParticipantOptions): { isMicrophoneEnabled: boolean; isScreenShareEnabled: boolean; isCameraEnabled: boolean; microphoneTrack: TrackPublication | undefined; cameraTrack: TrackPublication | undefined; lastMicrophoneError: Error | undefined; lastCameraError: Error | undefined; localParticipant: LocalParticipant; }; /** @public */ export declare interface UseLocalParticipantOptions { /** * The room to use. If not provided, the hook will use the room from the context. */ room?: Room; } /** * The `useLocalParticipantPermissions` hook returns the local participant's permissions. * * @example * ```tsx * const { canPublish, canPublishData } = useLocalParticipantPermissions(); * ``` * @public */ export declare function useLocalParticipantPermissions(): ParticipantPermission | undefined; /** * Returns a layout context from the `LayoutContext` if it exists, otherwise `undefined`. * @public */ export declare function useMaybeLayoutContext(): LayoutContextType | undefined; /** * Returns a participant from the `ParticipantContext` if it exists, otherwise `undefined`. * @public */ export declare function useMaybeParticipantContext(): Participant | undefined; /** * Returns the room context if it exists, otherwise undefined. * @public */ export declare function useMaybeRoomContext(): Room | undefined; /** * Returns the session context if it exists, otherwise undefined. * @beta */ export declare function useMaybeSessionContext(): UseSessionReturn | undefined; /** * Returns a track reference from the `TrackRefContext` if it exists, otherwise `undefined`. * @public */ export declare function useMaybeTrackRefContext(): TrackReferenceOrPlaceholder | undefined; /** * The `useMediaDevices` hook returns the list of media devices of a given kind. * * @example * ```tsx * const videoDevices = useMediaDevices({ kind: 'videoinput' }); * const audioDevices = useMediaDevices({ kind: 'audioinput' }); * ``` * @public */ export declare function useMediaDevices({ kind, onError, }: { kind: MediaDeviceKind; onError?: (e: Error) => void; }): MediaDeviceInfo[]; /** * The `useMediaDeviceSelect` hook is used to implement the `MediaDeviceSelect` component and * returns o.a. the list of devices of a given kind (audioinput or videoinput), the currently active device * and a function to set the the active device. * * @example * ```tsx * const { devices, activeDeviceId, setActiveMediaDevice } = useMediaDeviceSelect({kind: 'audioinput'}); * ``` * @public */ export declare function useMediaDeviceSelect({ kind, room, track, requestPermissions, onError, }: UseMediaDeviceSelectProps): { devices: MediaDeviceInfo[]; className: string; activeDeviceId: string; setActiveMediaDevice: (id: string, options?: SetMediaDeviceOptions) => Promise; }; /** @public */ export declare interface UseMediaDeviceSelectProps { kind: MediaDeviceKind; room?: Room; track?: LocalAudioTrack | LocalVideoTrack; /** * this will call getUserMedia if the permissions are not yet given to enumerate the devices with device labels. * in some browsers multiple calls to getUserMedia result in multiple permission prompts. * It's generally advised only flip this to true, once a (preview) track has been acquired successfully with the * appropriate permissions. * * @see {@link MediaDeviceMenu} * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices | MDN enumerateDevices} */ requestPermissions?: boolean; /** * this callback gets called if an error is thrown when failing to select a device and also if a user * denied permissions, eventhough the `requestPermissions` option is set to `true`. * Most commonly this will emit a MediaDeviceError */ onError?: (e: Error) => void; } /** * Hook for tracking the volume of an audio track across multiple frequency bands using the Web Audio API. * @alpha */ export declare function useMultibandTrackVolume(trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReferenceOrPlaceholder, options?: MultiBandTrackVolumeOptions): number[]; /** * The `usePagination` hook implements simple pagination logic for use with arrays. * @example * ```tsx * const tracks = useTracks(); * const pagination = usePagination(4, tracks); * * * ``` * @alpha */ export declare function usePagination(itemPerPage: number, trackReferences: TrackReferenceOrPlaceholder[]): { totalPageCount: number; nextPage: () => void; prevPage: () => void; setPage: (num: number) => void; firstItemIndex: number; lastItemIndex: number; tracks: TrackReferenceOrPlaceholder[]; currentPage: number; }; /** * The `useParticipantAttribute` hook returns the latest value of a given attribute key of a participant. * It requires a `Participant` object passed as property in the `UseParticipantAttributesOptions` or via the `ParticipantContext`. * * @example * ```tsx * const myAttributeValue = useParticipantAttribute('targetAttributeName'); * ``` * @public */ export declare function useParticipantAttribute(attributeKey: string, options?: UseParticipantAttributesOptions): string; /** @public */ export declare function useParticipantAttributes(props?: UseParticipantAttributesOptions): { attributes: Readonly> | undefined; }; /** * The `useParticipantAttributes` hook returns the attributes of a given participant and allows to set them. * It requires a `Participant` object passed as property or via the `ParticipantContext`. * * @example * ```tsx * const { attributes } = useParticipantAttributes({ participant }); * ``` * @public */ export declare interface UseParticipantAttributesOptions { participant?: Participant; } /** * Ensures that a participant is provided via context. * If not inside a `ParticipantContext`, an error is thrown. * @public */ export declare function useParticipantContext(): Participant; /** @public */ export declare function useParticipantInfo(props?: UseParticipantInfoOptions): { identity: string | undefined; name: string | undefined; metadata: string | undefined; }; /** * The `useParticipantInfo` hook returns the identity, name, and metadata of a given participant. * It requires a `Participant` object passed as property or via the `ParticipantContext`. * * @example * ```tsx * const { identity, name, metadata } = useParticipantInfo({ participant }); * ``` * @public */ export declare interface UseParticipantInfoOptions { participant?: Participant; } /** @public */ export declare function useParticipantPermissions(options?: UseParticipantPermissionsOptions): ParticipantPermission | undefined; /** * The `useParticipantPermissions` hook returns the permissions of a given participant. * * @example * ```tsx * const permissions = useParticipantPermissions({ participant }); * ``` * @public */ export declare interface UseParticipantPermissionsOptions { participant?: Participant; } /** * The `useParticipants` hook returns all participants (local and remote) of the current room. * @remarks * To optimize performance, you can use the `updateOnlyOn` property to decide on what `RoomEvents` the hook updates. * * @example * ```tsx * const participants = useParticipants(); * * * * ``` * @public */ export declare function useParticipants(options?: UseParticipantsOptions): (RemoteParticipant | LocalParticipant)[]; /** @public */ export declare interface UseParticipantsOptions { /** * To optimize performance, you can use the `updateOnlyOn` property to decide on what RoomEvents the hook updates. * By default it updates on all relevant RoomEvents to keep the returned participants array up to date. * The minimal set of non-overwriteable `RoomEvents` is: `[RoomEvent.ParticipantConnected, RoomEvent.ParticipantDisconnected, RoomEvent.ConnectionStateChanged]` */ updateOnlyOn?: RoomEvent[]; /** * The room to use. If not provided, the hook will use the room from the context. */ room?: Room; } /** * The `useParticipantTile` hook is used to implement the `ParticipantTile` and returns the props needed to render the tile. * @remarks * The returned props include many data attributes that are useful for CSS styling purposes because they * indicate the state of the participant and the track. * For example: `data-lk-audio-muted`, `data-lk-video-muted`, `data-lk-speaking`, `data-lk-local-participant`, `data-lk-source`, `data-lk-facing-mode`. * @public */ export declare function useParticipantTile({ trackRef, onParticipantClick, disableSpeakingIndicator, htmlProps, }: UseParticipantTileProps): { elementProps: React_2.HTMLAttributes; }; /** @public */ export declare interface UseParticipantTileProps extends React_2.HTMLAttributes { /** The track reference to display. */ trackRef?: TrackReferenceOrPlaceholder; disableSpeakingIndicator?: boolean; onParticipantClick?: (event: ParticipantClickEvent) => void; htmlProps: React_2.HTMLAttributes; } /** * `useParticipantTracks` is a custom React that allows you to get tracks of a specific participant only, by specifiying the participant's identity. * If the participant identity is not passed the hook will try to get the participant from a participant context. * @public */ export declare function useParticipantTracks(sources: Array, optionsOrParticipantIdentity?: UseParticipantTracksOptions | UseParticipantTracksOptions['participantIdentity']): Array; declare type UseParticipantTracksOptions = { participantIdentity?: string; room?: Room; }; /** * A hook that provides access to user choices stored in local storage, such as * selected media devices and their current state (on or off), as well as the user name. * @alpha */ export declare function usePersistentUserChoices(options?: UsePersistentUserChoicesOptions): { userChoices: LocalUserChoices; saveAudioInputEnabled: (isEnabled: boolean) => void; saveVideoInputEnabled: (isEnabled: boolean) => void; saveAudioInputDeviceId: (deviceId: string) => void; saveVideoInputDeviceId: (deviceId: string) => void; saveUsername: (username: string) => void; }; /** * Options for the `usePersistentDeviceSettings` hook. * @alpha */ export declare interface UsePersistentUserChoicesOptions { /** * The default value to use if reading from local storage returns no results or fails. */ defaults?: Partial; /** * Whether to prevent saving to persistent storage. * @defaultValue false */ preventSave?: boolean; /** * Whether to prevent loading user choices from persistent storage and use `defaults` instead. * @defaultValue false */ preventLoad?: boolean; } /** * The `usePinnedTracks` hook returns a array of the pinned tracks of the current room. * @remarks * To function properly, this hook must be called within a `LayoutContext`. * @example * ```tsx * const pinnedTracks = usePinnedTracks(); * ``` * @public */ export declare function usePinnedTracks(layoutContext?: LayoutContextType): TrackReferenceOrPlaceholder[]; /** * @public * @deprecated use `usePreviewTracks` instead */ export declare function usePreviewDevice(enabled: boolean, deviceId: string, kind: 'videoinput' | 'audioinput'): { selectedDevice: MediaDeviceInfo | undefined; localTrack: T | undefined; deviceError: Error | null; }; /** @public */ export declare function usePreviewTracks(options: CreateLocalTracksOptions, onError?: (err: Error) => void): (LocalAudioTrack | LocalVideoTrack)[] | undefined; /** * The `useRemoteParticipant` hook returns the first RemoteParticipant by either identity and/or based on the participant kind. * @remarks * To optimize performance, you can use the `updateOnlyOn` property to decide on what `ParticipantEvents` the hook updates. * * @example * ```tsx * const participant = useRemoteParticipant({kind: ParticipantKind.Agent, identity: 'myAgent'}); * ``` * @public */ export declare function useRemoteParticipant(identifier: ParticipantIdentifier, options?: UseRemoteParticipantOptions): RemoteParticipant | undefined; /** * The `useRemoteParticipant` hook returns the first RemoteParticipant by either identity or based on the participant kind. * @remarks * To optimize performance, you can use the `updateOnlyOn` property to decide on what `ParticipantEvents` the hook updates. * * @example * ```tsx * const participant = useRemoteParticipant('Russ'); * ``` * @public */ export declare function useRemoteParticipant(identity: string, options?: UseRemoteParticipantOptions): RemoteParticipant | undefined; /** @public */ export declare interface UseRemoteParticipantOptions { /** * To optimize performance, you can use the `updateOnlyOn` property to decide on what `ParticipantEvents` the hook updates. * By default it updates on all relevant ParticipantEvents to keep the returned participant up to date. */ updateOnlyOn?: ParticipantEvent[]; } /** * The `useRemoteParticipants` hook returns all remote participants (without the local) of the current room. * @remarks * To optimize performance, you can use the `updateOnlyOn` property to decide on what `RoomEvents` the hook updates. * * @example * ```tsx * const participants = useRemoteParticipants(); * * * * ``` * @public */ export declare function useRemoteParticipants(options?: UseRemoteParticipantsOptions): RemoteParticipant[]; /** @public */ export declare interface UseRemoteParticipantsOptions { /** * To optimize performance, you can use the `updateOnlyOn` property to decide on what RoomEvents the hook updates. * By default it updates on all relevant RoomEvents to keep the returned participants array up to date. * The minimal set of non-overwriteable `RoomEvents` is: `[RoomEvent.ParticipantConnected, RoomEvent.ParticipantDisconnected, RoomEvent.ConnectionStateChanged]` */ updateOnlyOn?: RoomEvent[]; /** * The room to use. If not provided, the hook will use the room from the context. */ room?: Room; } /** @public */ export declare interface UserInfo { identity?: string; name?: string; metadata?: string; } /** * Ensures that a room is provided via context. * If no room is provided, an error is thrown. * @public */ export declare function useRoomContext(): Room; /** @public */ export declare function useRoomInfo(options?: UseRoomInfoOptions): { name: string; metadata: string | undefined; }; /** * The `useRoomInfo` hook returns the name and metadata of the given `Room`. * @remarks * Needs to be called inside a `RoomContext` or by passing a `Room` instance. * * @example * ```tsx * const { name, metadata } = useRoomInfo(); * ``` * @public */ export declare interface UseRoomInfoOptions { room?: Room; } /** * When calling room.disconnect() as part of a React useEffect cleanup function, it is possible for * a room.connect(...) in the effect body to start running while the room.disconnect() is still * running. This hook sequentializes these two operations, so they always happen in order and * never overlap. * * @example * ```ts * const { connect, disconnect } = useSequentialRoomConnectDisconnect(room); * * // Connecting to a room: * useEffect(() => { * connect(); * return () => disconnect(); * }, [connect, disconnect]); * ``` * * @public */ export declare function useSequentialRoomConnectDisconnect(room: R): UseSequentialRoomConnectDisconnectResults; /** @public */ export declare type UseSequentialRoomConnectDisconnectResults = { connect: typeof Room.prototype.connect & (R extends undefined ? null : unknown); disconnect: typeof Room.prototype.disconnect & (R extends undefined ? null : unknown); }; /** * A Session represents a managed connection to a Room which can contain Agents. * @beta */ export declare function useSession(tokenSource: TokenSourceConfigurable, options?: UseSessionConfigurableOptions): UseSessionReturn; /** * A Session represents a managed connection to a Room which can contain Agents. * @beta */ export declare function useSession(tokenSource: TokenSourceFixed, options?: UseSessionFixedOptions): UseSessionReturn; declare type UseSessionCommonOptions = { room?: Room; /** * Amount of time in milliseonds the system will wait for an agent to join the room, before * transitioning to the "failure" state. */ agentConnectTimeoutMilliseconds?: number; }; declare type UseSessionConfigurableOptions = UseSessionCommonOptions & TokenSourceFetchOptions; /** * Ensures that a session is provided via context. * If no session is provided, an error is thrown. * @beta */ export declare function useSessionContext(): UseSessionReturn; declare type UseSessionFixedOptions = UseSessionCommonOptions; /** @beta */ export declare function useSessionMessages(session?: UseSessionReturn): UseSessionMessagesReturn; /** @beta */ export declare type UseSessionMessagesReturn = { messages: Array; /** Is a send operation currently in progress? */ isSending: boolean; send: (message: string, options?: SendTextOptions) => Promise; internal: { emitter: default_2; }; }; /** @beta */ export declare type UseSessionReturn = (SessionStateConnecting | SessionStateConnected | SessionStateDisconnected) & SessionActions; /** * The `useSortedParticipants` hook returns the participants sorted by importance. * @public */ export declare function useSortedParticipants(participants: Array): Participant[]; /** * The `useSpeakingParticipants` hook returns only the active speakers of all participants. * * @example * ```tsx * const activeSpeakers = useSpeakingParticipants(); * ``` * @public */ export declare function useSpeakingParticipants(options?: UseSpeakingParticipantsOptions): Participant[]; /** @public */ export declare type UseSpeakingParticipantsOptions = { room?: Room; }; /** * In many browsers to start audio playback, the user must perform a user-initiated event such as clicking a button. * The `useStatAudio` hook returns an object with a boolean `canPlayAudio` flag * that indicates whether audio playback is allowed in the current context, * as well as a `startAudio` function that can be called in a button `onClick` callback to start audio playback in the current context. * * @see Autoplay policy on MDN web docs for more info: {@link https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Best_practices#autoplay_policy} * @alpha */ export declare function useStartAudio({ room, props }: UseStartAudioProps): { mergedProps: React_2.ButtonHTMLAttributes & { className: string; onClick: () => void; style: { display: string; }; }; canPlayAudio: boolean; }; /** @alpha */ export declare interface UseStartAudioProps { room?: Room; props: React_2.ButtonHTMLAttributes; } /** * In some browsers to start video playback in low power mode, the user must perform a user-initiated event such as clicking a button. * The `useStartVideo` hook returns an object with a boolean `canPlayVideo` flag * that indicates whether video playback is allowed in the current context, * as well as a `startVideo` function that can be called in a button `onClick` callback to start video playback in the current context. * * @alpha */ export declare function useStartVideo({ room, props }: UseStartVideoProps): { mergedProps: React_2.ButtonHTMLAttributes & { className: string; onClick: () => void; style: { display: string; }; }; canPlayVideo: boolean; }; /** @alpha */ export declare interface UseStartVideoProps { room?: Room; props: React_2.ButtonHTMLAttributes; } /** * Simple implementation to detect horizontal swipe actions. * Accepts callbacks for on right and left swipes. * @example * ```tsx *
* ``` * @alpha */ export declare function useSwipe(element: React_2.RefObject, options?: UseSwipeOptions): void; /** * @alpha */ export declare type UseSwipeOptions = { minSwipeDistance?: number; onLeftSwipe?: () => void; onRightSwipe?: () => void; }; /** * @beta * @param topic - the topic to listen to * @returns an array of TextStreamData that holds the text, participantInfo, and streamInfo * @example * ```tsx * const { textStreams } = useTextStream('my-topic'); * return
{textStreams.map((textStream) => textStream.text)}
; * ``` */ export declare function useTextStream(topic: string, options?: UseTextStreamOptions): { textStreams: TextStreamData[]; }; /** @beta */ export declare type UseTextStreamOptions = { room?: Room; }; /** * The `useToken` hook fetches a token from the given token endpoint with the given user info. * * @example * ```tsx * const token = useToken(, roomName, { userInfo: { identity, name }}); * ``` * @public */ export declare function useToken(tokenEndpoint: string | undefined, roomName: string, options?: UseTokenOptions): string | undefined; /** @public */ export declare interface UseTokenOptions { userInfo?: UserInfo; } /** * This function `useTrackByName` allows you to access a track by referencing its track name. * Inside the function, it ensures that the a valid `participant` reference is available by checking * for both a passed participant argument and, if not available, a valid participant context. * * @public */ export declare function useTrackByName(name: string, participant?: Participant): TrackReferenceOrPlaceholder; /** * The `useTrackMutedIndicator` hook is used to implement the `TrackMutedIndicator` component * and returns the muted state of the given track. * * @example * ```tsx * const { isMuted } = useTrackMutedIndicator(trackRef); * ``` * @public */ export declare function useTrackMutedIndicator(trackRef?: TrackReferenceOrPlaceholder): TrackMutedIndicatorReturnType; /** * Ensures that a track reference is provided via context. * If not inside a `TrackRefContext`, an error is thrown. * @public */ export declare function useTrackRefContext(): TrackReferenceOrPlaceholder; /** * The `useTracks` hook returns an array of `TrackReference` or `TrackReferenceOrPlaceholder` depending on the provided `sources` property. * If only subscribed tracks are desired, set the `onlySubscribed` property to `true`. * @example * ```ts * // Return all camera track publications. * const trackReferences: TrackReference[] = useTracks([Track.Source.Camera]) * ``` * @example * ```ts * // Return all subscribed camera tracks as well as placeholders for * // participants without a camera subscription. * const trackReferencesWithPlaceholders: TrackReferenceOrPlaceholder[] = useTracks([{source: Track.Source.Camera, withPlaceholder: true}]) * ``` * @public */ export declare function useTracks(sources?: T, options?: UseTracksOptions): UseTracksHookReturnType; /** @public */ export declare type UseTracksHookReturnType = T extends Track.Source[] ? TrackReference[] : T extends TrackSourceWithOptions[] ? TrackReferenceOrPlaceholder[] : never; /** @public */ export declare type UseTracksOptions = { updateOnlyOn?: RoomEvent[]; onlySubscribed?: boolean; room?: Room; }; /** * The `useTrackToggle` hook is used to implement the `TrackToggle` component and returns state * and functionality of the given track. * * @example * ```tsx * const { buttonProps, enabled } = useTrackToggle(trackRef); * return ; * ``` * @public */ export declare function useTrackToggle({ source, onChange, initialState, captureOptions, publishOptions, onDeviceError, room, ...rest }: UseTrackToggleProps): { toggle: ((forceState?: boolean) => Promise) | ((forceState?: boolean, captureOptions?: CaptureOptionsBySource | undefined) => Promise); enabled: boolean; pending: boolean; track: LocalTrackPublication | undefined; buttonProps: React_2.ButtonHTMLAttributes; }; /** @public */ export declare interface UseTrackToggleProps extends Omit, 'showIcon'> { room?: Room; } /** * @returns An object consisting of `segments` with maximum length of opts.bufferSize * @alpha * @deprecated Use useTranscription instead */ export declare function useTrackTranscription(trackRef: TrackReferenceOrPlaceholder | undefined, options?: TrackTranscriptionOptions): { segments: ReceivedTranscriptionSegment[]; }; /** * @alpha * Hook for tracking the volume of an audio track using the Web Audio API. */ export declare function useTrackVolume(trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReference, options?: AudioAnalyserOptions): number; /** * @beta * useTranscriptions is a hook that returns the transcriptions for the given participant identities and track sids, * if no options are provided, it will return all transcriptions * @example * ```tsx * const transcriptions = useTranscriptions(); * return
{transcriptions.map((transcription) => transcription.text)}
; * ``` */ export declare function useTranscriptions(opts?: UseTranscriptionsOptions): TextStreamData[]; /** * @beta */ export declare interface UseTranscriptionsOptions { room?: Room; participantIdentities?: string[]; trackSids?: string[]; } /** * The `useVisualStableUpdate` hook is used to prevent visually jarring jumps and shifts of elements * in an array. The algorithm only starts to update when there are more items than visually fit * on a page. If this is the case, it will make sure that speaking participants move to the first * page and are always visible. * @remarks * Updating the array can occur because attendees leave or join a room, or because they mute/unmute * or start speaking. * The hook is used for the `GridLayout` and `CarouselLayout` components. * * @example * ```tsx * const trackRefs = useTracks(); * const updatedTrackRefs = useVisualStableUpdate(trackRefs, itemPerPage); * ``` * @public */ export declare function useVisualStableUpdate( /** `TrackReference`s to display in the grid. */ trackReferences: TrackReferenceOrPlaceholder[], maxItemsOnPage: number, options?: UseVisualStableUpdateOptions): TrackReferenceOrPlaceholder[]; /** @public */ export declare interface UseVisualStableUpdateOptions { /** Overwrites the default sort function. */ customSortFunction?: (trackReferences: TrackReferenceOrPlaceholder[]) => TrackReferenceOrPlaceholder[]; } /** * This hook looks for the first agent-participant in the room. * @remarks This hook requires an agent running with livekit-agents \>= 0.9.0 * @example * ```tsx * const { state, audioTrack, agentTranscriptions, agentAttributes } = useVoiceAssistant(); * ``` * @beta */ export declare function useVoiceAssistant(): VoiceAssistant; /** * The `VideoConference` ready-made component is your drop-in solution for a classic video conferencing application. * It provides functionality such as focusing on one participant, grid view with pagination to handle large numbers * of participants, basic non-persistent chat, screen sharing, and more. * * @remarks * The component is implemented with other LiveKit components like `FocusContextProvider`, * `GridLayout`, `ControlBar`, `FocusLayoutContainer` and `FocusLayout`. * You can use these components as a starting point for your own custom video conferencing application. * * @example * ```tsx * * * * ``` * @public */ export declare function VideoConference({ chatMessageFormatter, chatMessageDecoder, chatMessageEncoder, SettingsComponent, ...props }: VideoConferenceProps): React_2.JSX.Element; /** * @public */ export declare interface VideoConferenceProps extends React_2.HTMLAttributes { chatMessageFormatter?: MessageFormatter; chatMessageEncoder?: MessageEncoder; chatMessageDecoder?: MessageDecoder; /** @alpha */ SettingsComponent?: React_2.ComponentType; } /** * The `VideoTrack` component is responsible for rendering participant video tracks like `camera` and `screen_share`. * This component must have access to the participant's context, or alternatively pass it a `Participant` as a property. * * @example * ```tsx * * ``` * @see {@link @livekit/components-react#ParticipantTile | ParticipantTile} * @public */ export declare const VideoTrack: (props: VideoTrackProps & React_2.RefAttributes) => React_2.ReactNode; /** @public */ export declare interface VideoTrackProps extends React_2.VideoHTMLAttributes { /** The track reference of the track to render. */ trackRef?: TrackReference; onTrackClick?: (evt: ParticipantClickEvent) => void; onSubscriptionStatusChanged?: (subscribed: boolean) => void; manageSubscription?: boolean; } /** * @beta */ export declare interface VoiceAssistant { /** * The agent participant. */ agent: RemoteParticipant | undefined; /** * The current state of the agent. */ state: AgentState; /** * The microphone track published by the agent or associated avatar worker (if any). */ audioTrack: TrackReference | undefined; /** * The camera track published by the agent or associated avatar worker (if any). */ videoTrack: TrackReference | undefined; /** * The transcriptions of the agent's microphone track (if any). */ agentTranscriptions: ReceivedTranscriptionSegment[]; /** * The agent's participant attributes. */ agentAttributes: RemoteParticipant['attributes'] | undefined; } /** * @example * ```tsx * * * * ``` * @beta */ export declare function VoiceAssistantControlBar({ controls, saveUserChoices, onDeviceError, ...props }: VoiceAssistantControlBarProps): React_2.JSX.Element; /** @beta */ export declare type VoiceAssistantControlBarControls = { microphone?: boolean; leave?: boolean; }; /** @beta */ export declare interface VoiceAssistantControlBarProps extends React_2.HTMLAttributes { onDeviceError?: (error: { source: Track.Source; error: Error; }) => void; controls?: VoiceAssistantControlBarControls; /** * If `true`, the user's device choices will be persisted. * This will enables the user to have the same device choices when they rejoin the room. * @defaultValue true */ saveUserChoices?: boolean; } /** @internal */ declare type WidgetContextType = { dispatch?: React_2.Dispatch; state?: WidgetState; }; export { WidgetState } export { }