import { BaseConnection, type SessionConfig, type FormatConfig } from "./BaseConnection.js"; import { type OutgoingSocketEvent } from "./events.js"; import { Room } from "livekit-client"; import type { InputController } from "../InputController.js"; import type { OutputController } from "../OutputController.js"; import { type VolumeProvider } from "./volumeProvider.js"; export type ConnectionConfig = SessionConfig & { onDebug?: (info: unknown) => void; }; export declare class WebRTCConnection extends BaseConnection { conversationId: string; readonly inputFormat: FormatConfig; readonly outputFormat: FormatConfig; private room; private isConnected; private audioEventId; private audioCaptureContext; private audioElements; private outputDeviceId; private inputAnalyser; private inputAudioContext; private inputVolumeProvider; private outputAnalyser; private outputVolumeProvider; private _isMuted; readonly input: InputController; readonly output: OutputController; private constructor(); static create(config: ConnectionConfig): Promise; private setupRoomEventListeners; close(): void; sendMessage(message: OutgoingSocketEvent): Promise; getRoom(): Room; /** * (Re-)creates an AudioContext + AnalyserNode from the given track and * installs the corresponding VolumeProvider. Called once during create() * and again after an input device switch so the analyser follows the * active mic track. */ private setupInputAnalyser; setInputVolumeProvider(provider: VolumeProvider): void; setOutputVolumeProvider(provider: VolumeProvider): void; private setupAudioCapture; setAudioVolume(volume: number): void; setAudioOutputDevice(deviceId: string): Promise; setAudioInputDevice(deviceId: string): Promise; } //# sourceMappingURL=WebRTCConnection.d.ts.map