import { Analytics } from '../../services/analytics/mixpanel'; import { VideoRTCStatsReport } from '../../services/streaming-manager/stats/report'; import { Auth } from '../auth'; import { Interrupt } from '../entities'; import { ChatProgressCallback } from '../entities/agents/manager'; import { CreateClipStreamRequest, CreateTalkStreamRequest, SendClipStreamPayload, SendTalkStreamPayload } from './api'; import { ICreateStreamRequestResponse, IceCandidate, SendStreamPayloadResponse, Status } from './rtc'; export type CompatibilityMode = 'on' | 'off' | 'auto'; export declare enum StreamingState { Start = "START", Stop = "STOP" } export declare enum ConnectivityState { Strong = "STRONG", Weak = "WEAK", Unknown = "UNKNOWN" } export declare enum AgentActivityState { Idle = "IDLE", Loading = "LOADING", Talking = "TALKING", ToolActive = "TOOL_ACTIVE" } export declare enum StreamEvents { ChatAnswer = "chat/answer", ChatPartial = "chat/partial", ChatAudioTranscribed = "chat/audio-transcribed", StreamDone = "stream/done", StreamStarted = "stream/started", StreamFailed = "stream/error", StreamReady = "stream/ready", StreamCreated = "stream/created", StreamInterrupt = "stream/interrupt", StreamVideoCreated = "stream-video/started", StreamVideoDone = "stream-video/done", StreamVideoError = "stream-video/error", StreamVideoRejected = "stream-video/rejected", ToolCalling = "tool/calling", ToolResult = "tool/result" } export declare enum ConnectionState { New = "new", Fail = "fail", Connected = "connected", Connecting = "connecting", Closed = "closed", Completed = "completed", Disconnecting = "disconnecting", Disconnected = "disconnected" } export declare enum StreamType { Legacy = "legacy", Fluent = "fluent" } export interface ManagerCallbacks { onMessage?: ChatProgressCallback; onConnectionStateChange?: (state: ConnectionState, reason?: string) => void; onVideoStateChange?: (state: StreamingState, report?: VideoRTCStatsReport) => void; onSrcObjectReady?: (value: MediaStream) => void; onError?: (error: Error, errorData: object) => void; onConnectivityStateChange?: (state: ConnectivityState) => void; onAgentActivityStateChange?: (state: AgentActivityState) => void; onVideoIdChange?: (videoId: string | null) => void; onStreamCreated?: (stream: { stream_id: string; session_id: string; agent_id: string; }) => void; onStreamReady?: () => void; onInterruptDetected?: (interrupt: Interrupt) => void; onToolEvent?: (event: StreamEvents.ToolCalling | StreamEvents.ToolResult, data: ToolCallingPayload | ToolResultPayload) => void; onInterruptibleChange?: (interruptible: boolean) => void; onFirstAudioDetected?: (metrics: AudioDetectionMetrics) => void; } export interface AudioDetectionMetrics { latency?: number; networkLatency?: number; } export type ManagerCallbackKeys = keyof ManagerCallbacks; export interface StreamEndUserData { plan?: string; } export interface TalkStreamOptions extends CreateTalkStreamRequest { fluent?: boolean; end_user_data?: StreamEndUserData; } export interface ClipStreamOptions extends CreateClipStreamRequest { fluent?: boolean; end_user_data?: StreamEndUserData; } export type CreateStreamOptions = TalkStreamOptions | ClipStreamOptions; export type PayloadType = T extends TalkStreamOptions ? SendTalkStreamPayload : T extends ClipStreamOptions ? SendClipStreamPayload : never; export interface RtcApi { createStream(options: CreateStreamOptions, signal?: AbortSignal): Promise; startConnection(streamId: string, answer: RTCSessionDescriptionInit, sessionId?: string, signal?: AbortSignal): Promise; addIceCandidate(streamId: string, candidate: IceCandidate, sessionId: string, signal?: AbortSignal): Promise; sendStreamRequest(streamId: string, sessionId: string, payload: SendClipStreamPayload | SendTalkStreamPayload): Promise; close(streamId: string, sessionId: string): Promise; } export interface StreamingManagerOptions { callbacks: ManagerCallbacks; baseURL?: string; debug?: boolean; auth: Auth; analytics: Analytics; /** * Optional MediaStream to use for microphone input. * If provided, the audio track from this stream will be published to the data channel. * Supported by LiveKit streaming managers. */ microphoneStream?: MediaStream; } export interface SlimRTCStatsReport { index: number; codec: string; rtt: number; duration?: number; bitrate?: number; timestamp: any; bytesReceived: any; packetsReceived: any; packetsLost: any; framesDropped: any; framesDecoded: any; jitter: any; jitterBufferDelay: number; jitterBufferEmittedCount: number; avgJitterDelayInInterval: number; frameWidth: any; frameHeight: any; framesPerSecond: any; freezeCount: number; freezeDuration: number; } export interface AnalyticsRTCStatsReport { timestamp?: number; duration: number; bytesReceived: number; bitrate: number; packetsReceived: number; packetsLost: number; framesDropped: number; framesDecoded: number; jitter: number; jitterBufferDelay: number; jitterBufferEmittedCount: number; avgJitterDelayInInterval: number; framesPerSecond: number; freezeCount: number; freezeDuration: number; lowFpsCount?: number; causes?: string[]; } export interface StreamInterruptPayload { type: StreamEvents.StreamInterrupt; videoId: string; timestamp: number; } export type ClientToolHandler = (args: Record) => Promise; export interface ToolCallingPayload { execution_id: string; tool_name: string; arguments: Record; created_at: string; } export interface ToolResultPayload { execution_id: string; tool_name: string; duration_ms: number; result?: unknown; error_message?: string | null; created_at: string; }