import type * as GoogleGenAITypes from '@google/genai'; import * as THREE from 'three'; import * as xb from 'xrblocks'; export interface GeminiManagerEventMap extends THREE.Object3DEventMap { inputTranscription: { message: string; }; outputTranscription: { message: string; }; turnComplete: object; interrupted: object; } export declare class GeminiManager extends xb.Script { xrDeviceCamera?: xb.XRDeviceCamera; ai: xb.AI; audioStream: MediaStream | null; audioContext: AudioContext | null; sourceNode: MediaStreamAudioSourceNode | null; processorNode: AudioWorkletNode | null; queuedSourceNodes: Set; isAIRunning: boolean; audioQueue: AudioBuffer[]; nextAudioStartTime: number; private screenshotInterval?; currentInputText: string; currentOutputText: string; tools: xb.Tool[]; scheduleAheadTime: number; cameraMimeType: string; cameraQuality: number; constructor(); init(): void; startGeminiLive({ liveParams, model, }?: { liveParams?: GoogleGenAITypes.LiveConnectConfig; model?: string; }): Promise; stopGeminiLive(): Promise; setupAudioCapture(): Promise; startLiveAI(params: GoogleGenAITypes.LiveConnectConfig, model?: string): Promise; startScreenshotCapture(intervalMs?: number): void; captureAndSendScreenshot(): Promise; sendAudioData(audioBuffer: ArrayBuffer): void; sendVideoFrame(base64Image: string): void; initializeAudioContext(): Promise; playAudioChunk(audioData: string): Promise; scheduleAudioBuffers(): void; stopPlayingAudio(): void; cleanup(): void; handleAIMessage(message: GoogleGenAITypes.LiveServerMessage): void; arrayBufferToBase64(buffer: ArrayBuffer): string; base64ToArrayBuffer(base64: string): ArrayBuffer; dispose(): void; }