/** * WebCodecs Player Implementation * * Low-latency WebSocket streaming using WebCodecs API for video/audio decoding. * Decoding runs in a Web Worker for optimal performance. * * Features: * - Ultra-low latency streaming (configurable via profiles) * - Worker-based VideoDecoder/AudioDecoder * - Adaptive playback speed for live catchup/slowdown * - Jitter compensation * - Firefox polyfill for MediaStreamTrackGenerator * * Protocol: MistServer raw WebSocket frames (12-byte header + data) */ import { BasePlayer } from "../../core/PlayerInterface"; import type { StreamSource, StreamInfo, PlayerOptions, PlayerCapability, } from "../../core/PlayerInterface"; import type { TrackInfo, CodecDataMessage, InfoMessage, OnTimeMessage, SetSpeedMessage, RawChunk, WebCodecsPlayerOptions, WebCodecsStats, MainToWorkerMessage, WorkerToMainMessage, RenderMode, WorkerStats, } from "./types"; import type { MetaTrackEvent } from "../../types"; import { WebSocketController } from "./WebSocketController"; import { SyncController } from "./SyncController"; import { MetadataWebSocket, buildMetadataWsUrl } from "./MetadataWebSocket"; import { getPresentationTimestamp, isInitData } from "./RawChunkParser"; import { mergeLatencyProfile, selectDefaultProfile } from "./LatencyProfiles"; import { createTrackGenerator, hasNativeMediaStreamTrackGenerator, } from "./polyfills/MediaStreamTrackGenerator"; import { translateCodec, buildDescription } from "../../core/CodecUtils"; import { WebGLRenderer } from "../../rendering/WebGLRenderer"; import { CanvasRenderer } from "../../rendering/CanvasRenderer"; import { AudioWorkletRenderer } from "../../rendering/AudioWorkletRenderer"; /** * Detect if running on Safari (which has VideoTrackGenerator in worker but not MediaStreamTrackGenerator on main thread) */ function isSafari(): boolean { if (typeof navigator === "undefined") return false; const ua = navigator.userAgent; return /^((?!chrome|android).)*safari/i.test(ua); } // Import inline worker (bundled via rollup-plugin-web-worker-loader) /** * Convert string (ASCII with escaped chars) to Uint8Array * Reference: rawws.js:76-84 - init data is raw ASCII from stream info JSON */ function str2bin(str: string): Uint8Array { const out = new Uint8Array(str.length); for (let i = 0; i < str.length; i++) { out[i] = str.charCodeAt(i); } return out; } /** * Create a TimeRanges-like object from an array of [start, end] pairs */ function createTimeRanges(ranges: [number, number][]): TimeRanges { return { length: ranges.length, start(index: number): number { if (index < 0 || index >= ranges.length) throw new DOMException("Index out of bounds"); return ranges[index][0]; }, end(index: number): number { if (index < 0 || index >= ranges.length) throw new DOMException("Index out of bounds"); return ranges[index][1]; }, }; } /** * Type for requestVideoFrameCallback metadata */ interface VideoFrameCallbackMetadata { presentationTime: DOMHighResTimeStamp; expectedDisplayTime: DOMHighResTimeStamp; width: number; height: number; mediaTime: number; presentedFrames: number; processingDuration?: number; } /** * Pipeline state for tracking per-track resources */ interface PipelineInfo { idx: number; track: TrackInfo; generator: ReturnType | null; configured: boolean; /** Video: drop delta frames until first keyframe after configure */ awaitingKeyframe: boolean; /** Safari audio: writer for audio frames relayed from worker */ safariAudioWriter?: WritableStreamDefaultWriter; /** Safari audio: the audio generator created on main thread */ safariAudioGenerator?: MediaStreamTrack; /** Direct rendering audio: writer for routing AudioData through polyfill → video.srcObject */ audioWriter?: WritableStreamDefaultWriter; } /** * WebCodecsPlayerImpl - WebCodecs-based low-latency player */ export class WebCodecsPlayerImpl extends BasePlayer { readonly capability: PlayerCapability = { name: "WebCodecs Player", shortname: "webcodecs", priority: 0, // Highest priority - lowest latency option // Raw WebSocket (12-byte header + codec frames) - NOT MP4-muxed // MistServer's output_wsraw.cpp provides full codec negotiation (audio + video) // MistServer's output_h264.cpp uses same 12-byte header but Annex B payload (video-only) // NOTE: ws/video/mp4 is MP4-fragmented which needs MEWS player (uses MSE) mimes: [ "ws/video/raw", "wss/video/raw", // Raw codec frames: AVCC init, Annex B frame data (audio + video) "ws/video/h264", "wss/video/h264", // Annex B H264/HEVC, no init frame (video-only, same 12-byte header) ], }; private wsController: WebSocketController | null = null; private syncController: SyncController | null = null; private worker: Worker | null = null; private mediaStream: MediaStream | null = null; private container: HTMLElement | null = null; // Rendering pipeline (WebGL mode) private renderCanvas: HTMLCanvasElement | null = null; private webglRenderer: WebGLRenderer | null = null; private canvasRenderer: CanvasRenderer | null = null; private audioRenderer: AudioWorkletRenderer | null = null; private useDirectRendering = false; private pipelines = new Map(); private tracks: TrackInfo[] = []; private tracksByIndex = new Map(); // Track metadata indexed by track idx private queuedInitData = new Map(); // Queued INIT data waiting for track info private queuedChunks = new Map(); // Queued chunks waiting for decoder config private isDestroyed = false; private debugging = false; private verboseDebugging = false; private streamType: "live" | "vod" = "live"; /** Payload format: 'avcc' for ws/video/raw, 'annexb' for ws/video/h264 */ private payloadFormat: "avcc" | "annexb" = "avcc"; private workerUidCounter = 0; private workerListeners = new Map void>(); // Playback state private _duration = Infinity; private _currentTime = 0; private _bufferMs = 0; private _seekableBeginS: number | null = null; private _seekableEndS: number | null = null; /** Date.now() when last on_time was received — for moving live edge (OG rawws.js:1477) */ private _onTimeReceivedAt: number = 0; private _activeSeekId: number | undefined = undefined; private _seekSafetyTimeout: ReturnType | null = null; private _avDrift = 0; private _frameCallbackId: number | null = null; private _statsInterval: ReturnType | null = null; private _framesDropped = 0; private _framesDecoded = 0; private _bytesReceived = 0; private _messagesReceived = 0; private _isPaused = true; private _lastWorkerStats: WorkerStats | null = null; private _suppressPlayPauseSync = false; private _onVideoPlay?: () => void; private _onVideoPause?: () => void; private _pendingStepPause = false; private _stepPauseTimeout: ReturnType | null = null; // Metadata/subtitle track support private metadataWs: MetadataWebSocket | null = null; private activeSubtitleTrackId: string | null = null; private pendingMetaSubs: Array<{ trackId: string; callback: (event: MetaTrackEvent) => void; }> = []; // Codec support cache - keyed by "codec|init_hash" private static codecCache = new Map(); /** * Get cache key for a track's codec configuration */ private static getCodecCacheKey(track: { codec: string; codecstring?: string; init?: string; }): string { const codecStr = track.codecstring || track.codec?.toLowerCase() || ""; // Simple hash of init data for cache key (just first/last bytes + length) const init = track.init ?? ""; const initHash = init.length > 0 ? `${init.length}_${init.charCodeAt(0)}_${init.charCodeAt(init.length - 1)}` : ""; return `${codecStr}|${initHash}`; } /** * Test if a track's codec is supported by WebCodecs * Reference: rawws.js:75-137 - isTrackSupported() */ static async isTrackSupported(track: TrackInfo): Promise<{ supported: boolean; config: any }> { const cacheKey = WebCodecsPlayerImpl.getCodecCacheKey(track); // Check cache first if (WebCodecsPlayerImpl.codecCache.has(cacheKey)) { const cached = WebCodecsPlayerImpl.codecCache.get(cacheKey)!; return { supported: cached, config: { codec: track.codecstring || track.codec } }; } // Build codec config using translateCodec for proper WebCodecs codec strings const codecStr = translateCodec(track as any); const config: any = { codec: codecStr }; // Build properly formatted description from init data const desc = track.init && track.init !== "" ? buildDescription(track.codec, str2bin(track.init)) : null; if (desc) config.description = desc; // Codecs that REQUIRE description per W3C WebCodecs registrations const DESCRIPTION_REQUIRED = ["vorbis"]; if (DESCRIPTION_REQUIRED.includes(codecStr) && !config.description) { WebCodecsPlayerImpl.codecCache.set(cacheKey, false); return { supported: false, config }; } let result: { supported: boolean; config: any }; try { switch (track.type) { case "video": { // Special handling for JPEG - uses ImageDecoder if (track.codec === "JPEG") { if (!("ImageDecoder" in window)) { result = { supported: false, config: { codec: "image/jpeg" } }; } else { // @ts-ignore - ImageDecoder may not have types const isSupported = await (window as any).ImageDecoder.isTypeSupported("image/jpeg"); result = { supported: isSupported, config: { codec: "image/jpeg" } }; } } else { // Use VideoDecoder.isConfigSupported() const videoResult = await VideoDecoder.isConfigSupported(config as VideoDecoderConfig); result = { supported: videoResult.supported === true, config: videoResult.config }; } break; } case "audio": { // Audio requires numberOfChannels and sampleRate config.numberOfChannels = track.channels ?? 2; config.sampleRate = track.rate ?? 48000; const audioResult = await AudioDecoder.isConfigSupported(config as AudioDecoderConfig); result = { supported: audioResult.supported === true, config: audioResult.config }; break; } default: result = { supported: false, config }; } } catch (err) { console.warn(`[WebCodecs] isConfigSupported failed for ${track.codec}:`, err); result = { supported: false, config }; } // Cache the result WebCodecsPlayerImpl.codecCache.set(cacheKey, result.supported); return result; } /** * Validate all tracks and return which are supported * Returns array of supported track types ('video', 'audio') */ static async validateTracks(tracks: TrackInfo[]): Promise { const supportedTypes: Set = new Set(); const validationPromises = tracks .filter((t) => t.type === "video" || t.type === "audio") .map(async (track) => { const result = await WebCodecsPlayerImpl.isTrackSupported(track); if (result.supported) { supportedTypes.add(track.type); } return { track, supported: result.supported }; }); const results = await Promise.all(validationPromises); // Log validation results for debugging for (const { track, supported } of results) { console.debug( `[WebCodecs] Track ${track.idx} (${track.type} ${track.codec}): ${supported ? "supported" : "UNSUPPORTED"}` ); } return Array.from(supportedTypes); } /** * Determine whether to use WebGL direct rendering based on render mode and browser. * Auto mode: use WebGL on Firefox/Safari (replaces polyfills), native on Chrome. */ private static shouldUseDirectRendering(mode: RenderMode): boolean { if (mode === "webgl") return true; if (mode === "native") return false; // Auto: use direct rendering when native MediaStreamTrackGenerator is absent // (Firefox, Safari without polyfill) since WebGL avoids the canvas captureStream hack if (!hasNativeMediaStreamTrackGenerator()) return true; // On Safari, prefer direct rendering to avoid the frame relay path if (isSafari()) return true; // Chrome has native MediaStreamTrackGenerator — use it return false; } isMimeSupported(mimetype: string): boolean { return this.capability.mimes.includes(mimetype); } isBrowserSupported( mimetype: string, source: StreamSource, streamInfo: StreamInfo ): boolean | string[] { // Basic requirements if (!("WebSocket" in window)) { return false; } if (!("Worker" in window)) { return false; } if (!("VideoDecoder" in window) || !("AudioDecoder" in window)) { // WebCodecs not available (requires HTTPS) return false; } // Check for HTTP/HTTPS mismatch const sourceUrl = new URL(source.url.replace(/^ws/, "http"), location.href); if (location.protocol === "https:" && sourceUrl.protocol === "http:") { return false; } // Assume all codecs are available — real validation is async and runs in initialize() // via checkCodecSupport(). Reference rawws.js:29-32 uses the same pattern: // "for now, assume all codecs are available. we will perform the actual testing // in player buildup and nextCombo if it's bad" const playableTracks: Record = {}; for (const track of streamInfo.meta.tracks) { if (track.type === "video" || track.type === "audio") { playableTracks[track.type] = true; } else if (track.type === "meta" && track.codec === "subtitle") { playableTracks["subtitle"] = true; } } // Annex B H264 WebSocket is video-only (no audio payloads) if (mimetype.includes("video/h264")) { delete playableTracks.audio; } if (Object.keys(playableTracks).length === 0) { return false; } return Object.keys(playableTracks); } async initialize( container: HTMLElement, source: StreamSource, options: PlayerOptions, streamInfo?: StreamInfo ): Promise { // Clear any leftover state from previous initialization FIRST // This fixes race condition where async destroy() clears state after new initialize() this.tracksByIndex.clear(); this.pipelines.clear(); this.tracks = []; this.queuedInitData.clear(); this.queuedChunks.clear(); this.isDestroyed = false; this.useDirectRendering = false; this._duration = Infinity; this._currentTime = 0; this._bufferMs = 0; this._seekableBeginS = null; this._seekableEndS = null; this._onTimeReceivedAt = 0; this._avDrift = 0; this._framesDropped = 0; this._framesDecoded = 0; this._bytesReceived = 0; this._messagesReceived = 0; this.metadataWs?.destroy(); this.metadataWs = null; this.activeSubtitleTrackId = null; this.pendingMetaSubs = []; // ws/video/h264 sends no INIT frame — SPS/PPS inline in Annex B bitstream, no description needed. // ws/video/raw sends AVCC init data — used as decoder description. Frame data may be Annex B // (detected and converted at runtime in the worker). this.payloadFormat = source.type?.includes("h264") ? "annexb" : "avcc"; if (this.payloadFormat === "annexb") { this.log("Using Annex B payload format (ws/video/h264)"); } this.container = container; container.classList.add("fw-player-container"); // Pre-populate track metadata from streamInfo (fetched via HTTP before WebSocket) // This is how the reference player (rawws.js) gets track info - from MistVideo.info.meta.tracks if (streamInfo?.meta?.tracks) { this.log(`Pre-populating ${streamInfo.meta.tracks.length} tracks from streamInfo`); for (const track of streamInfo.meta.tracks) { if (track.idx !== undefined) { // Convert StreamTrack to TrackInfo (WebCodecs format) const trackInfo: TrackInfo = { idx: track.idx, type: track.type as TrackInfo["type"], codec: track.codec, codecstring: track.codecstring, init: track.init, width: track.width, height: track.height, fpks: track.fpks, channels: track.channels, rate: track.rate, size: track.size, }; this.tracksByIndex.set(track.idx, trackInfo); this.log(`Pre-registered track ${track.idx}: ${track.type} ${track.codec}`); } } } // Parse WebCodecs-specific options const wcOptions = options as PlayerOptions & WebCodecsPlayerOptions; this.debugging = wcOptions.debug ?? wcOptions.devMode ?? false; this.verboseDebugging = wcOptions.verboseDebug ?? false; // Determine stream type from server API (streamInfo.type), not MIME (source.type) this.streamType = streamInfo?.type === "live" ? "live" : "vod"; // Select latency profile const profileName = wcOptions.latencyProfile ?? selectDefaultProfile(this.streamType === "live"); const profile = mergeLatencyProfile(profileName, wcOptions.customLatencyProfile); this.log(`Initializing WebCodecs player with ${profile.name} profile`); // Create video element const video = document.createElement("video"); video.classList.add("fw-player-video"); video.setAttribute("playsinline", ""); video.setAttribute("crossorigin", "anonymous"); if (options.autoplay) video.autoplay = true; if (options.muted) video.muted = true; video.controls = options.controls === true; if (options.loop && this.streamType !== "live") video.loop = true; if (options.poster) video.poster = options.poster; this.videoElement = video; container.appendChild(video); // Keep paused state in sync with actual element state this._onVideoPlay = () => { if (this._suppressPlayPauseSync) return; this._isPaused = false; this.sendToWorker({ type: "frametiming", action: "setPaused", paused: false, uid: this.workerUidCounter++, }).catch(() => {}); }; this._onVideoPause = () => { if (this._suppressPlayPauseSync) return; this._isPaused = true; this.sendToWorker({ type: "frametiming", action: "setPaused", paused: true, uid: this.workerUidCounter++, }).catch(() => {}); }; video.addEventListener("play", this._onVideoPlay); video.addEventListener("pause", this._onVideoPause); // Determine render mode const renderMode: RenderMode = wcOptions.renderMode ?? "auto"; this.useDirectRendering = WebCodecsPlayerImpl.shouldUseDirectRendering(renderMode); if (this.useDirectRendering) { this.log(`Using WebGL direct rendering (mode=${renderMode})`); this.setupDirectRendering(container, video); } else { this.log(`Using native MediaStreamTrackGenerator (mode=${renderMode})`); } // Create MediaStream for output (only used in native mode) this.mediaStream = new MediaStream(); if (!this.useDirectRendering) { video.srcObject = this.mediaStream; } // Initialize worker await this.initializeWorker(); // Initialize sync controller this.syncController = new SyncController({ profile, isLive: this.streamType === "live", audioClockProvider: this.audioRenderer ? () => this.audioRenderer?.getCurrentTime() ?? 0 : undefined, onSpeedChange: (main, tweak) => { this.sendToWorker({ type: "frametiming", action: "setSpeed", speed: main, tweak, uid: this.workerUidCounter++, }); if (this.videoElement) { this.videoElement.playbackRate = main * tweak; } }, onFastForwardRequest: (ms) => { this.wsController?.fastForward(ms); }, }); // Initialize WebSocket - URL should already be .raw from source selection this.wsController = new WebSocketController(source.url, { debug: this.debugging, }); this.setupWebSocketHandlers(); // Validate track codecs using isConfigSupported() BEFORE connecting // Reference: rawws.js:75-137 tests each track's codec support // This fixes "codec unsupported" errors by only sending verified codecs const supportedAudioCodecs: Set = new Set(); const supportedVideoCodecs: Set = new Set(); if (streamInfo?.meta?.tracks) { this.log("Validating track codecs with isConfigSupported()..."); for (const track of streamInfo.meta.tracks) { if (track.type === "video" || track.type === "audio") { const trackInfo: TrackInfo = { idx: track.idx ?? 0, type: track.type as "video" | "audio", codec: track.codec, codecstring: track.codecstring, init: track.init, width: track.width, height: track.height, channels: track.channels, rate: track.rate, }; const result = await WebCodecsPlayerImpl.isTrackSupported(trackInfo); if (result.supported) { if (track.type === "audio") { supportedAudioCodecs.add(track.codec); } else { supportedVideoCodecs.add(track.codec); } this.log(`Track ${track.idx} (${track.type} ${track.codec}): SUPPORTED`); } else { this.log(`Track ${track.idx} (${track.type} ${track.codec}): NOT SUPPORTED`, "warn"); } } } } // If stream has audio tracks but none validated, reject so PlayerManager falls back // to a player that can handle the audio (e.g., HLS.js, native HTML5) const hasAudioTracks = streamInfo?.meta?.tracks?.some((t) => t.type === "audio"); if (hasAudioTracks && supportedAudioCodecs.size === 0 && supportedVideoCodecs.size > 0) { const audioCodecs = streamInfo!.meta.tracks .filter((t) => t.type === "audio") .map((t) => t.codec); throw new Error( `WebCodecs cannot decode audio codec(s): ${audioCodecs.join(", ")}. Falling back.` ); } // If no codecs validated, check if we have any tracks at all if (supportedAudioCodecs.size === 0 && supportedVideoCodecs.size === 0) { // Fallback: Use default codec list if no tracks provided or all failed // This handles streams where track info isn't available until WebSocket connects this.log("No validated codecs, using default codec list"); ["AAC", "MP3", "opus", "FLAC", "AC3"].forEach((c) => supportedAudioCodecs.add(c)); ["H264", "HEVC", "VP8", "VP9", "AV1", "JPEG"].forEach((c) => supportedVideoCodecs.add(c)); } // Connect and request codec data // Per MistServer rawws.js line 1544, we need to tell the server what codecs we support // Format: [[ [audio codecs], [video codecs] ]] - audio FIRST per Object.values({audio:[], video:[]}) order const supportedCombinations: string[][][] = [ [ Array.from(supportedAudioCodecs), // Audio codecs (position 0) Array.from(supportedVideoCodecs), // Video codecs (position 1) ], ]; this.log( `Requesting codecs: audio=[${supportedCombinations[0][0].join(", ")}], video=[${supportedCombinations[0][1].join(", ")}]` ); try { await this.wsController.connect(); this.wsController.requestCodecData(supportedCombinations); } catch (err) { this.log(`Failed to connect: ${err}`, "error"); this.emit("error", err instanceof Error ? err : new Error(String(err))); throw err; } // Proactively create pipelines for pre-populated tracks // This ensures pipelines exist when first chunks arrive, they just need init data for (const [idx, track] of this.tracksByIndex) { if (track.type === "video" || track.type === "audio") { this.log(`Creating pipeline proactively for track ${idx} (${track.type} ${track.codec})`); await this.createPipeline(track); } } // Initialize metadata WebSocket for subtitle/meta tracks (separate socket) this.initMetadataWebSocket(source.url); // Set up video event listeners this.setupVideoEventListeners(video, options); // Set up requestVideoFrameCallback for accurate frame timing this.setupFrameCallback(); this.isDestroyed = false; return video; } /** * Set up WebGL canvas + AudioWorklet for direct rendering mode. * The canvas overlays the hidden video element. */ private setupDirectRendering(container: HTMLElement, video: HTMLVideoElement): void { // Hide the video element (kept for IPlayer interface contract) video.style.position = "absolute"; video.style.width = "0"; video.style.height = "0"; video.style.opacity = "0"; video.style.pointerEvents = "none"; // Create rendering canvas this.renderCanvas = document.createElement("canvas"); this.renderCanvas.classList.add("fw-player-video", "fw-player-canvas"); this.renderCanvas.style.width = "100%"; this.renderCanvas.style.height = "100%"; this.renderCanvas.style.objectFit = "contain"; container.appendChild(this.renderCanvas); // Try WebGL, fall back to Canvas2D try { this.webglRenderer = new WebGLRenderer(this.renderCanvas, { prefer16bit: true }); this.log( "WebGL renderer initialized" + (this.webglRenderer.hasWebGL2 ? " (WebGL2)" : " (WebGL1)") ); } catch (err) { this.log(`WebGL failed, falling back to Canvas2D: ${err}`, "warn"); this.canvasRenderer = new CanvasRenderer(this.renderCanvas); } // AudioWorklet renderer — only for browsers where audio routes through it. // Firefox/Safari route audio through polyfill → video.srcObject instead. if (hasNativeMediaStreamTrackGenerator() && !isSafari()) { this.audioRenderer = new AudioWorkletRenderer(); this.audioRenderer.onUnderrun = () => { this.log("Audio underrun", "warn"); }; } } /** * Handle transferred frame from worker in direct rendering mode. */ private handleTransferFrame(msg: any): void { if (this.isDestroyed) { try { msg.frame?.close(); } catch {} return; } if (msg.trackType === "video") { const frame = msg.frame as VideoFrame; if (this.webglRenderer && !this.webglRenderer.isContextLost) { this.webglRenderer.render(frame); } else if (this.canvasRenderer) { this.canvasRenderer.render(frame); } else { // WebGL context was lost and no Canvas2D fallback — try to create one if (this.renderCanvas && this.webglRenderer?.isContextLost) { this.log("WebGL context permanently lost, creating Canvas2D fallback"); this.webglRenderer.destroy(); this.webglRenderer = null; this.canvasRenderer = new CanvasRenderer(this.renderCanvas); this.canvasRenderer.render(frame); } else { frame.close(); } } } else if (msg.trackType === "audio") { const audioData = msg.frame as AudioData; // Check if this audio pipeline uses polyfill → video.srcObject (Firefox/Safari) const pipeline = this.pipelines.get(msg.idx); if (pipeline?.audioWriter) { pipeline.audioWriter.write(audioData).catch(() => { try { audioData.close(); } catch {} }); } else if (this.audioRenderer) { // Chromium/Brave: AudioWorkletRenderer → speakers directly if (!this.audioRenderer.getAudioContext()) { this.audioRenderer.start().catch((err) => { this.log(`AudioWorklet start failed: ${err}`, "warn"); }); } this.audioRenderer.feed(audioData); } else { audioData.close(); } } } private handleTransferYUV(msg: { y: Uint8Array; u: Uint8Array; v: Uint8Array; width: number; height: number; format: string; timestamp: number; colorPrimaries?: string; transferFunction?: string; }): void { if (this.isDestroyed) return; const planes = { y: msg.y, u: msg.u, v: msg.v, width: msg.width, height: msg.height, format: msg.format as any, }; if (msg.colorPrimaries || msg.transferFunction) { this.webglRenderer?.setColorSpace(msg.colorPrimaries as any, msg.transferFunction as any); } if (this.webglRenderer && !this.webglRenderer.isContextLost) { this.webglRenderer.renderYUV(planes); } else if (this.canvasRenderer) { this.canvasRenderer.renderYUV(planes); } else if (this.renderCanvas && this.webglRenderer?.isContextLost) { this.log("WebGL context permanently lost, creating Canvas2D fallback"); this.webglRenderer.destroy(); this.webglRenderer = null; this.canvasRenderer = new CanvasRenderer(this.renderCanvas); this.canvasRenderer.renderYUV(planes); } } async destroy(): Promise { if (this.isDestroyed) return; this.log("Destroying WebCodecs player"); // Cancel frame callback this.cancelFrameCallback(); // Stop stats interval if (this._statsInterval) { clearInterval(this._statsInterval); this._statsInterval = null; } // Stop metadata WebSocket this.metadataWs?.destroy(); this.metadataWs = null; this.pendingMetaSubs = []; this.activeSubtitleTrackId = null; // Stop WebSocket this.wsController?.disconnect(); this.wsController = null; // Close all pipelines (before setting isDestroyed so sendToWorker doesn't reject) for (const pipeline of this.pipelines.values()) { await this.closePipeline(pipeline.idx, false); } this.pipelines.clear(); // Set after closePipeline loop — WS is disconnected and frame callbacks cancelled, // so no new messages will arrive that would call sendToWorker. this.isDestroyed = true; // Terminate worker this.worker?.terminate(); this.worker = null; this.workerListeners.clear(); // Clean up MediaStream if (this.mediaStream) { for (const track of this.mediaStream.getTracks()) { track.stop(); this.mediaStream.removeTrack(track); } this.mediaStream = null; } // Clean up direct renderers if (this.webglRenderer) { this.webglRenderer.destroy(); this.webglRenderer = null; } if (this.canvasRenderer) { this.canvasRenderer.destroy(); this.canvasRenderer = null; } if (this.audioRenderer) { this.audioRenderer.destroy(); this.audioRenderer = null; } if (this.renderCanvas) { this.renderCanvas.remove(); this.renderCanvas = null; } // Clean up video element if (this.videoElement) { if (this._onVideoPlay) { this.videoElement.removeEventListener("play", this._onVideoPlay); this._onVideoPlay = undefined; } if (this._onVideoPause) { this.videoElement.removeEventListener("pause", this._onVideoPause); this._onVideoPause = undefined; } if (this._stepPauseTimeout) { clearTimeout(this._stepPauseTimeout); this._stepPauseTimeout = null; } this._pendingStepPause = false; this.videoElement.srcObject = null; this.videoElement.remove(); this.videoElement = null; } if (this._seekSafetyTimeout) { clearTimeout(this._seekSafetyTimeout); this._seekSafetyTimeout = null; } this._activeSeekId = undefined; this.syncController = null; this.useDirectRendering = false; // NOTE: Don't clear tracks/tracksByIndex/queues here! // Since PlayerManager reuses instances, a concurrent initialize() may have // already pre-populated these. Clearing happens at the START of initialize(). } // ============================================================================ // Worker Management // ============================================================================ /** * Try to load a worker from a URL with proper async error detection. * new Worker() doesn't throw on invalid URLs - it fires error events async. */ private tryLoadWorker(url: string): Promise { return new Promise((resolve, reject) => { let worker: Worker; try { worker = new Worker(url, { type: "module" }); } catch (e) { reject(e); return; } const cleanup = () => { clearTimeout(timeout); worker.removeEventListener("error", onError); worker.removeEventListener("message", onMessage); }; const onError = (e: ErrorEvent) => { cleanup(); worker.terminate(); reject(new Error(e.message || "Worker failed to load")); }; const onMessage = () => { cleanup(); resolve(worker); }; // Timeout: if no error after 500ms, assume loaded (worker may not send immediate message) const timeout = setTimeout(() => { cleanup(); resolve(worker); }, 500); worker.addEventListener("error", onError); worker.addEventListener("message", onMessage); }); } private async initializeWorker(): Promise { // Worker paths to try in order: // 1. Dev server path (Vite plugin serves /workers/* from source) // 2. Production npm package path (relative to built module) const paths = ["/workers/decoder.worker.js"]; // Add production path (may fail in dev but that's ok) try { paths.push(new URL("../workers/decoder.worker.js", import.meta.url).href); } catch { // import.meta.url may not work in all environments } let lastError: Error | null = null; for (const path of paths) { try { this.log(`Trying worker path: ${path}`); this.worker = await this.tryLoadWorker(path); this.log(`Worker loaded from: ${path}`); break; } catch (e) { lastError = e instanceof Error ? e : new Error(String(e)); this.log(`Worker path failed: ${path} - ${lastError.message}`, "warn"); } } if (!this.worker) { throw new Error( "Failed to initialize WebCodecs worker. " + `Last error: ${lastError?.message ?? "unknown"}` ); } // Set up worker event handlers (replace the ones from tryLoadWorker) this.worker.onmessage = (event: MessageEvent) => { this.handleWorkerMessage(event.data); }; this.worker.onerror = (err) => { this.log(`Worker error: ${err?.message ?? "unknown error"}`, "error"); this.emit("error", new Error(`Worker error: ${err?.message ?? "unknown"}`)); }; // Configure debugging mode in worker this.sendToWorker({ type: "debugging", value: this.verboseDebugging ? "verbose" : this.debugging, uid: this.workerUidCounter++, }); } private sendToWorker( msg: MainToWorkerMessage & { uid: number }, transfer?: Transferable[] ): Promise { return new Promise((resolve, reject) => { // Reject with proper error if destroyed or no worker // This prevents silent failures and allows callers to handle errors appropriately if (this.isDestroyed) { reject(new Error("Player destroyed")); return; } if (!this.worker) { reject(new Error("Worker not initialized")); return; } const uid = msg.uid; // Register listener for response this.workerListeners.set(uid, (response) => { this.workerListeners.delete(uid); if (response.type === "ack" && response.status === "error") { reject(new Error(response.error)); } else { resolve(response); } }); if (transfer) { this.worker.postMessage(msg, transfer); } else { this.worker.postMessage(msg); } }); } private handleWorkerMessage(msg: WorkerToMainMessage): void { // Check for specific listener if (msg.uid !== undefined && this.workerListeners.has(msg.uid)) { this.workerListeners.get(msg.uid)!(msg); } // Handle message by type switch (msg.type) { case "addtrack": { const pipeline = this.pipelines.get(msg.idx); if (pipeline && this.mediaStream) { // If track was created in worker (Safari), use it directly if (msg.track) { this.mediaStream.addTrack(msg.track); } else if (pipeline.generator) { // Otherwise use generator's track this.mediaStream.addTrack(pipeline.generator.getTrack()); } } break; } case "removetrack": { const pipeline = this.pipelines.get(msg.idx); if (pipeline?.generator && this.mediaStream) { const track = pipeline.generator.getTrack(); this.mediaStream.removeTrack(track); } break; } case "setplaybackrate": { if (this.videoElement) { this.videoElement.playbackRate = msg.speed; } break; } case "sendevent": { if (msg.kind === "timeupdate") { if (this._pendingStepPause) { this.finishStepPause(); } if ( typeof msg.time === "number" && Number.isFinite(msg.time) && !this.syncController?.isSeeking() ) { this._currentTime = msg.time; this.emit("timeupdate", this._currentTime * 1000); } else if (this.videoElement) { this.emit("timeupdate", this.videoElement.currentTime * 1000); } } else if (msg.kind === "seeked") { // Worker confirmed decoder reached seek target frame. // OG webcodecsworker.js:492 emits 'seeked' when target frame is decoded; // rawws.js:1055 forwards it to the video event bus. if ( this._activeSeekId !== undefined && this.syncController?.isSeekActive(this._activeSeekId) ) { this.syncController.completeSeek(this._activeSeekId); this._activeSeekId = undefined; if (this._seekSafetyTimeout) { clearTimeout(this._seekSafetyTimeout); this._seekSafetyTimeout = null; } } if (typeof msg.time === "number" && Number.isFinite(msg.time)) { this._currentTime = msg.time; this.emit("timeupdate", this._currentTime * 1000); } this.emit("seeked", undefined); } else if (msg.kind === "error") { this.emit("error", new Error(msg.message ?? "Unknown error")); } break; } case "writeframe": { // Safari audio: worker sends frames via postMessage, we write them here // Reference: rawws.js line 897-918 const pipeline = this.pipelines.get(msg.idx); if (pipeline?.safariAudioWriter) { const frame = msg.frame; const frameUid = msg.uid; pipeline.safariAudioWriter .write(frame) .then(() => { this.worker?.postMessage({ type: "writeframe", idx: msg.idx, uid: frameUid, status: "ok", }); }) .catch((err: Error) => { this.worker?.postMessage({ type: "writeframe", idx: msg.idx, uid: frameUid, status: "error", error: err.message, }); }); } else { this.worker?.postMessage({ type: "writeframe", idx: msg.idx, uid: msg.uid, status: "error", error: "Pipeline not active or no audio writer", }); } break; } case "transferframe": { this.handleTransferFrame(msg); break; } case "transferyuv": { this.handleTransferYUV(msg as any); break; } case "log": { if (this.debugging) { const level = (msg as any).level ?? "info"; const logFn = level === "error" ? console.error : level === "warn" ? console.warn : console.log; logFn(`[WebCodecs Worker] ${msg.msg}`); } break; } case "stats": { this._lastWorkerStats = (msg as any).stats ?? null; break; } case "closed": { this.pipelines.delete(msg.idx); break; } } } // ============================================================================ // WebSocket Handlers // ============================================================================ private setupWebSocketHandlers(): void { if (!this.wsController) return; this.wsController.on("codecdata", (msg) => this.handleCodecData(msg)); this.wsController.on("info", (msg) => this.handleInfo(msg)); this.wsController.on("ontime", (msg) => this.handleOnTime(msg)); this.wsController.on("setspeed", (msg) => this.handleSetSpeed(msg)); this.wsController.on("tracks", (tracks) => this.handleTracksChange(tracks)); this.wsController.on("chunk", (chunk) => this.handleChunk(chunk)); this.wsController.on("pause", (msg) => this.handleServerPause(msg)); this.wsController.on("stop", () => this.handleStop()); this.wsController.on("error", (err) => this.handleError(err)); this.wsController.on("statechange", (state) => { this.log(`Connection state: ${state}`); if (state === "error") { this.emit("error", new Error("WebSocket connection failed")); } }); } private async handleCodecData(msg: CodecDataMessage): Promise { const codecs = msg.codecs ?? []; const trackIndices = msg.tracks ?? []; // Array of track indices (numbers), NOT TrackInfo this.log( `Received codec data: codecs=[${codecs.join(", ") || "none"}], tracks=[${trackIndices.join(", ") || "none"}]` ); if (codecs.length === 0 || trackIndices.length === 0) { this.log("No playable codecs/tracks selected by server", "warn"); // Still start playback - info message may populate tracks later this.wsController?.play(); return; } // Store codec strings by track index for later lookup // Per rawws.js: codecs[i] corresponds to tracks[i] for (let i = 0; i < trackIndices.length; i++) { const trackIdx = trackIndices[i]; const codec = codecs[i]; if (codec) { // If we have track metadata from info message, update it with codec const existingTrack = this.tracksByIndex.get(trackIdx); if (existingTrack) { existingTrack.codec = codec; } else { // Create minimal track info - will be filled in by info message this.tracksByIndex.set(trackIdx, { idx: trackIdx, type: codec.match(/^(H264|HEVC|VP[89]|AV1|JPEG)/i) ? "video" : codec.match(/^(AAC|MP3|opus|FLAC|AC3|pcm)/i) ? "audio" : "meta", codec, }); } this.log(`Track ${trackIdx}: codec=${codec}`); } } // Create pipelines for selected tracks that have metadata for (const trackIdx of trackIndices) { const track = this.tracksByIndex.get(trackIdx); if (track && (track.type === "video" || track.type === "audio")) { await this.createPipeline(track); } } // Start playback this.wsController?.play(); } /** * Handle stream info message containing track metadata * This is sent by MistServer with full track information */ private async handleInfo(msg: InfoMessage): Promise { this.log("Received stream info"); // Extract tracks from meta.tracks object if (msg.meta?.tracks) { const tracksObj = msg.meta.tracks; this.log(`Info contains ${Object.keys(tracksObj).length} tracks`); for (const [_name, track] of Object.entries(tracksObj)) { // Store track by its index for lookup when chunks arrive if (track.idx !== undefined) { this.tracksByIndex.set(track.idx, track); this.log(`Registered track ${track.idx}: ${track.type} ${track.codec}`); // Process any queued init data for this track if (this.queuedInitData.has(track.idx)) { if (track.type === "video" || track.type === "audio") { this.log(`Processing queued INIT data for track ${track.idx}`); await this.createPipeline(track); const initData = this.queuedInitData.get(track.idx)!; this.configurePipeline(track.idx, initData); this.queuedInitData.delete(track.idx); } } } } // Also update tracks array this.tracks = Object.values(tracksObj); } } private handleOnTime(msg: OnTimeMessage): void { // MistServer on_time values are in milliseconds — convert to seconds // for internal state (matching video element / rVFC conventions) const currentSec = msg.current / 1000; // Track when on_time was received (for moving live edge in jumpToLive) this._onTimeReceivedAt = Date.now(); // Update sync controller with server time (seconds) this.syncController?.updateServerTime(currentSec); this.syncController?.setServerPlayRate(msg.play_rate_curr); // Update current time if no frame callback available and not seeking // (seek completion now comes from worker 'seeked' event, not on_time) if (this._frameCallbackId === null && !this.syncController?.isSeeking()) { this._currentTime = currentSec; } // Record server delay const delay = this.wsController?.getServerDelay() ?? 0; if (delay > 0) { this.syncController?.recordServerDelay(delay); } // Update duration from server (VOD streams have finite duration) if (msg.total !== undefined && isFinite(msg.total) && msg.total > 0) { this._duration = msg.total / 1000; } // Update seekable range from server (live DVR window) if (msg.begin !== undefined) { this._seekableBeginS = msg.begin / 1000; } if (msg.end !== undefined) { this._seekableEndS = msg.end / 1000; } // Buffer management: compute buffer from worker frame timing and evaluate. // OG rawws.js:460-464: buffer = decoded_point - playback_point (microseconds→ms) if ( this.syncController && this._lastWorkerStats?.frameTiming && !this.syncController.isSeeking() ) { const ft = this._lastWorkerStats.frameTiming; if (ft.out && ft.decoded) { const bufferMs = Math.round(ft.decoded * 1e-3 - ft.out * 1e-3); const syncState = this.syncController.evaluateBuffer(bufferMs, { playRateCurr: msg.play_rate_curr, serverCurrentMs: msg.current, serverEndMs: msg.end, serverJitterMs: msg.jitter, }); this._bufferMs = syncState.buffer.current; } } // Create pipelines for tracks mentioned in on_time.tracks (like reference player) if (msg.tracks && msg.tracks.length > 0) { for (const trackIdx of msg.tracks) { if (!this.pipelines.has(trackIdx)) { const track = this.tracksByIndex.get(trackIdx); if (track && (track.type === "video" || track.type === "audio")) { this.log( `Creating pipeline from on_time for track ${track.idx} (${track.type} ${track.codec})` ); this.createPipeline(track).then(() => { // Process any queued init data const queuedInit = this.queuedInitData.get(track.idx); if (queuedInit) { this.configurePipeline(track.idx, queuedInit); this.queuedInitData.delete(track.idx); } }); } } } } } /** * Handle set_speed updates from server. * Upstream rawws.js uses this signal to maintain rate/jitter state. */ private handleSetSpeed(msg: SetSpeedMessage): void { this.syncController?.setServerPlayRate(msg.play_rate_curr, msg.play_rate_prev, { fromSetSpeed: true, }); } private async handleTracksChange(tracks: TrackInfo[]): Promise { this.log(`Tracks changed: ${tracks.map((t) => `${t.idx}:${t.type}`).join(", ")}`); // Check if codecs changed const newTrackIds = new Set(tracks.map((t) => t.idx)); const oldTrackIds = new Set(this.pipelines.keys()); // Remove old pipelines for (const idx of oldTrackIds) { if (!newTrackIds.has(idx)) { await this.closePipeline(idx, true); } } // Update tracksByIndex and create new pipelines for (const track of tracks) { this.tracksByIndex.set(track.idx, track); if (track.type === "video" || track.type === "audio") { if (!this.pipelines.has(track.idx)) { await this.createPipeline(track); } } } this.tracks = tracks; } private handleChunk(chunk: RawChunk): void { if (this.isDestroyed) return; const pipeline = this.pipelines.get(chunk.trackIndex); // Create pipeline if missing - look up track from tracksByIndex (populated by info message) if (!pipeline) { const track = this.tracksByIndex.get(chunk.trackIndex); // If track info not available, try to infer from chunk type // MistServer track indices: video typically 1, audio typically 2, meta typically 9 if (!track) { // INIT data for an unknown track - we need to infer the track type // For now, create a placeholder track entry based on common MistServer patterns if (isInitData(chunk)) { this.log(`Received INIT for unknown track ${chunk.trackIndex}, queuing for later`); // Queue the init data - it will be processed when track info becomes available this.queuedInitData.set(chunk.trackIndex, chunk.data); return; } // For regular chunks without track info, we can't decode without codec config this.log(`Received chunk for unknown track ${chunk.trackIndex} without track info`, "warn"); return; } if (track.type === "video" || track.type === "audio") { this.log( `Creating pipeline for discovered track ${track.idx} (${track.type} ${track.codec})` ); this.createPipeline(track).then(() => { if (this.isDestroyed) return; // Guard against async completion after destroy // Process any queued init data for this track const queuedInit = this.queuedInitData.get(track!.idx); if (queuedInit) { this.configurePipeline(track!.idx, queuedInit); this.queuedInitData.delete(track!.idx); } // Re-process this chunk now that pipeline exists this.handleChunk(chunk); }); } return; } // Handle init data if (isInitData(chunk)) { this.configurePipeline(pipeline.idx, chunk.data); return; } // Queue chunks until pipeline is configured (decoder needs init data first) // Per rawws.js: frames are queued when decoder is "unconfigured" (line 1408-1410) if (!pipeline.configured) { // For AUDIO tracks: configure on FIRST frame (audio doesn't have key/delta distinction) // Audio chunks are sent as type 0 (delta) by the server even though they're independent // Reference: rawws.js line 768-769 forces audio type to 'key' const isAudioTrack = pipeline.track.type === "audio"; // For VIDEO tracks: wait for KEY frame before configuring // This handles Annex B streams where SPS/PPS is inline with keyframes const shouldConfigure = isAudioTrack || chunk.type === "key"; if (shouldConfigure) { this.log( `Received ${chunk.type.toUpperCase()} frame for unconfigured ${pipeline.track.type} track ${chunk.trackIndex}, configuring` ); // Queue this frame at the FRONT so it's sent before any DELTAs if (!this.queuedChunks.has(chunk.trackIndex)) { this.queuedChunks.set(chunk.trackIndex, []); } this.queuedChunks.get(chunk.trackIndex)!.unshift(chunk); // Configure without description (or with description from track.init if available) // For audio codecs like opus/mp3 that don't need init data, this works fine // For AAC, the description should come from track.init or the server will send INIT const initData = pipeline.track.init ? str2bin(pipeline.track.init) : new Uint8Array(0); this.configurePipeline(chunk.trackIndex, initData).catch((err) => { this.log(`Failed to configure track ${chunk.trackIndex}: ${err}`, "error"); }); return; } // Otherwise queue the chunk (video delta before first keyframe) if (!this.queuedChunks.has(chunk.trackIndex)) { this.queuedChunks.set(chunk.trackIndex, []); } this.queuedChunks.get(chunk.trackIndex)!.push(chunk); if (this.verboseDebugging) { this.log(`Queued chunk for track ${chunk.trackIndex} (waiting for decoder config)`); } return; } // Video keyframe gate: drop delta frames until first keyframe after configure. // Joining a live stream mid-GOP means the server sends deltas before the next // keyframe. Decoding those without a reference produces moshing artifacts. if (pipeline.awaitingKeyframe) { if (chunk.type === "key") { pipeline.awaitingKeyframe = false; this.log(`First keyframe received for track ${chunk.trackIndex}, starting decode`); } else { return; } } // Track jitter this.syncController?.recordChunkArrival(chunk.trackIndex, chunk.timestamp); // Send to worker for decoding this.sendChunkToWorker(chunk); } private sendChunkToWorker(chunk: RawChunk): void { const msg: MainToWorkerMessage = { type: "receive", idx: chunk.trackIndex, chunk: { type: chunk.type === "key" ? "key" : "delta", timestamp: getPresentationTimestamp(chunk), data: chunk.data, }, uid: this.workerUidCounter++, }; this.worker?.postMessage(msg, [chunk.data.buffer]); } /** * Handle server-initiated pause (e.g., server-side buffer underrun). * OG util.js:1696-1710: if reason is "at_dead_point", seek to midpoint of available buffer. * Otherwise, per rawws.js:661-662, pause frame timing so the worker stops outputting frames. */ private handleServerPause(msg: { paused: boolean; reason?: string; begin?: number; end?: number; }): void { if (msg.reason === "at_dead_point" && msg.begin !== undefined && msg.end !== undefined) { const seekTo = (msg.begin + msg.end) / 2; if (!isNaN(seekTo) && seekTo > 0) { this.log("At dead point: seeking to midpoint of available buffer"); this.wsController?.seek(seekTo); return; } this.log("At dead point: seek target invalid — pausing"); } if (msg.paused) { this.log("Server requested pause"); this.sendToWorker({ type: "frametiming", action: "setPaused", paused: true, uid: this.workerUidCounter++, }).catch(() => {}); } } private handleStop(): void { this.log("Stream stopped"); this.emit("ended", undefined); } private handleError(err: Error): void { this.log(`WebSocket error: ${err.message}`, "error"); this.emit("error", err); } // ============================================================================ // Metadata WebSocket + Subtitle/Text Track API // ============================================================================ /** * Initialize the metadata WebSocket for subtitle/meta track delivery. * Uses a separate socket at the same URL with ?rate=1. */ private initMetadataWebSocket(sourceUrl: string): void { const hasMetaTracks = Array.from(this.tracksByIndex.values()).some((t) => t.type === "meta"); if (!hasMetaTracks && this.pendingMetaSubs.length === 0) { return; } const metaUrl = buildMetadataWsUrl(sourceUrl); this.metadataWs = new MetadataWebSocket( metaUrl, () => this._currentTime, () => this.videoElement?.playbackRate ?? 1, () => this._isPaused, { debug: this.debugging } ); // Drain pending subscriptions that were registered before the socket existed for (const sub of this.pendingMetaSubs) { this.metadataWs.subscribe(sub.trackId, sub.callback); } this.pendingMetaSubs = []; this.log(`Metadata WebSocket initialized at ${metaUrl}`); } /** * Subscribe to metadata track events (subtitles, scores, etc.). * Returns an unsubscribe function. */ subscribeToMetaTrack(trackId: string, callback: (event: MetaTrackEvent) => void): () => void { if (this.metadataWs) { return this.metadataWs.subscribe(trackId, callback); } // Queue subscription until metadata socket is ready this.pendingMetaSubs.push({ trackId, callback }); return () => { this.pendingMetaSubs = this.pendingMetaSubs.filter( (s) => s.trackId !== trackId || s.callback !== callback ); }; } /** * Get available text/subtitle tracks. */ getTextTracks(): Array<{ id: string; label: string; lang?: string; active: boolean; }> { const tracks: Array<{ id: string; label: string; lang?: string; active: boolean; }> = []; for (const [idx, track] of this.tracksByIndex) { if (track.type === "meta" && track.codec?.toLowerCase() === "subtitle") { tracks.push({ id: String(idx), label: track.codecstring || `Subtitle ${idx}`, active: this.activeSubtitleTrackId === String(idx), }); } } return tracks; } /** * Select a subtitle track by ID, or null to disable. */ selectTextTrack(id: string | null): void { // Unsubscribe from current track if (this.activeSubtitleTrackId !== null) { this.activeSubtitleTrackId = null; } if (id === null) { return; } this.activeSubtitleTrackId = id; this.log(`Selected subtitle track: ${id}`); } // ============================================================================ // Pipeline Management // ============================================================================ private async createPipeline(track: TrackInfo): Promise { if (this.pipelines.has(track.idx)) return; this.log(`Creating pipeline for track ${track.idx} (${track.type} ${track.codec})`); const pipeline: PipelineInfo = { idx: track.idx, track, generator: null, configured: false, awaitingKeyframe: false, }; this.pipelines.set(track.idx, pipeline); this.syncController?.addTrack(track.idx, track); // Create worker pipeline await this.sendToWorker({ type: "create", idx: track.idx, track, opts: { optimizeForLatency: this.streamType === "live", payloadFormat: this.payloadFormat, // 'avcc' for ws/video/raw, 'annexb' for ws/video/h264 }, uid: this.workerUidCounter++, }); // Track generator routing (MediaStreamTrackGenerator is Chrome-only, non-standard): // - Direct rendering video: postMessage → WebGL canvas (Firefox, Safari, or explicit webgl mode) // - Direct rendering audio (Firefox/Safari): polyfill AudioWorklet → MediaStreamDestination → video.srcObject // - Direct rendering audio (Chromium): AudioWorkletRenderer → AudioContext.destination // - Chrome/Edge native: MediaStreamTrackGenerator writable transferred to worker // - Safari native: VideoTrackGenerator in worker (video) or frame relay (audio) // - Firefox polyfill: canvas/AudioWorklet polyfill (non-direct-rendering fallback) if (this.useDirectRendering && track.type === "video") { // Direct rendering for video: worker sends VideoFrames via postMessage → WebGL canvas this.log(`Direct rendering for track ${track.idx} (video)`); await this.sendToWorker({ type: "setrendermode", idx: track.idx, directTransfer: true, uid: this.workerUidCounter++, }); } else if ( this.useDirectRendering && track.type === "audio" && (isSafari() || !hasNativeMediaStreamTrackGenerator()) ) { // Firefox/Safari audio in direct rendering mode: route through polyfill → video.srcObject. // Chrome's MediaStreamTrackGenerator({kind:'audio'}) is non-standard and Chromium-only. // Safari may expose MediaStreamTrackGenerator for video (VideoTrackGenerator) but NOT audio. // Firefox has neither. Always use polyfill on non-Chromium browsers. this.log(`Audio via polyfill for track ${track.idx} (direct rendering mode)`); await this.sendToWorker({ type: "setrendermode", idx: track.idx, directTransfer: true, uid: this.workerUidCounter++, }); pipeline.generator = createTrackGenerator("audio"); if (pipeline.generator.waitForInit) { await pipeline.generator.waitForInit(); } // Get a writer so handleTransferFrame can feed AudioData from the worker pipeline.audioWriter = (pipeline.generator.writable as WritableStream).getWriter(); if (this.mediaStream) { this.mediaStream.addTrack(pipeline.generator.getTrack()); } // Set video.srcObject for audio output (video display uses WebGL canvas) if (this.videoElement && !this.videoElement.srcObject) { this.videoElement.srcObject = this.mediaStream; } } else if (this.useDirectRendering) { // Chromium/Brave audio in direct rendering mode: AudioWorkletRenderer handles it this.log(`Direct rendering for track ${track.idx} (${track.type})`); await this.sendToWorker({ type: "setrendermode", idx: track.idx, directTransfer: true, uid: this.workerUidCounter++, }); } else if (hasNativeMediaStreamTrackGenerator()) { // Chrome/Edge: Create generator and transfer writable to worker // @ts-ignore const generator = new MediaStreamTrackGenerator({ kind: track.type }); pipeline.generator = { writable: generator.writable, getTrack: () => generator, close: () => generator.stop?.(), }; await this.sendToWorker( { type: "setwritable", idx: track.idx, writable: generator.writable, uid: this.workerUidCounter++, }, [generator.writable] ); } else if (isSafari()) { // Safari: Worker uses VideoTrackGenerator (video) or frame relay (audio) // Reference: rawws.js line 1012-1037 this.log(`Safari detected - using worker-based track generator for ${track.type}`); if (track.type === "audio") { // Safari audio: create generator on main thread, frames relayed from worker // @ts-ignore - Safari has MediaStreamTrackGenerator for audio if (typeof MediaStreamTrackGenerator !== "undefined") { // @ts-ignore const audioGen = new MediaStreamTrackGenerator({ kind: "audio" }); pipeline.safariAudioGenerator = audioGen; pipeline.safariAudioWriter = audioGen.writable.getWriter(); // Add track to stream if (this.mediaStream) { this.mediaStream.addTrack(audioGen); } } } // Ask worker to create generator (video uses VideoTrackGenerator, audio sets up relay) await this.sendToWorker({ type: "creategenerator", idx: track.idx, uid: this.workerUidCounter++, }); } else { // Firefox/other: Use canvas/AudioWorklet polyfill pipeline.generator = createTrackGenerator(track.type as "video" | "audio"); if (pipeline.generator.waitForInit) { await pipeline.generator.waitForInit(); } // For polyfill, writable stays on main thread // Worker would need different architecture - for now, fall back to main thread decode this.log("Using MediaStreamTrackGenerator polyfill - main thread decode"); // Add track to stream directly if (this.mediaStream && pipeline.generator) { this.mediaStream.addTrack(pipeline.generator.getTrack()); } } // Per rawws.js: Do NOT configure from HTTP info automatically. // Wait for WebSocket binary INIT frames to configure decoders. // This ensures we use the exact init data the server sends for this session. // // However, if track.init is empty/undefined, the codec doesn't need init data // and we can configure immediately (per rawws.js line 1239-1241). // This applies to codecs like opus, mp3, vp8, vp9 that don't need init data. if (!track.init || track.init === "") { this.log( `Track ${track.idx} (${track.codec}) doesn't need init data, configuring immediately` ); await this.configurePipeline(track.idx, new Uint8Array(0)); } else { // For codecs that need init data (H264, HEVC, AAC), we have two paths: // 1. WebSocket sends INIT frame -> handleChunk triggers configurePipeline // 2. First frame arrives without prior INIT -> handleChunk uses track.init this.log( `Track ${track.idx} (${track.codec}) has init data (${track.init.length} bytes), waiting for first frame` ); } } private async configurePipeline(idx: number, header: Uint8Array): Promise { const pipeline = this.pipelines.get(idx); if (!pipeline || pipeline.configured) return; pipeline.configured = true; // Prevent re-entry during await if (pipeline.track.type === "video") { pipeline.awaitingKeyframe = true; } this.log(`Configuring decoder for track ${idx}`); // Copy the header to avoid transfer issues (neutered buffers) const headerCopy = new Uint8Array(header); // For video: pass raw init data as description (matches reference rawws.js: description = header). // MistServer HTTP info provides valid AVCDecoderConfigurationRecord / HEVCDecoderConfigurationRecord. // For audio: normalize init data (e.g., Vorbis header validation, AAC AudioSpecificConfig). const normalizedHeader = pipeline.track && pipeline.track.type === "audio" ? (buildDescription(pipeline.track.codec, headerCopy) ?? headerCopy) : headerCopy; try { await this.sendToWorker({ type: "configure", idx, header: normalizedHeader, uid: this.workerUidCounter++, }); } catch (err) { pipeline.configured = false; // Allow retry on failure throw err; } // Flush any queued chunks now that decoder is configured const queued = this.queuedChunks.get(idx); if (queued && queued.length > 0) { this.log(`Flushing ${queued.length} queued chunks for track ${idx}`); if (pipeline.track.type === "video") { // Find first keyframe — decoder can't produce clean output from deltas alone const keyIdx = queued.findIndex((c) => c.type === "key"); if (keyIdx === -1) { this.log(`No keyframe in queue for track ${idx}, dropping ${queued.length} delta frames`); this.queuedChunks.delete(idx); return; } if (keyIdx > 0) { this.log(`Skipping ${keyIdx} delta frames, starting from keyframe`); } pipeline.awaitingKeyframe = false; for (let i = keyIdx; i < queued.length; i++) { this.sendChunkToWorker(queued[i]); } } else { for (const chunk of queued) { this.sendChunkToWorker(chunk); } } this.queuedChunks.delete(idx); } } private async closePipeline(idx: number, waitEmpty: boolean): Promise { const pipeline = this.pipelines.get(idx); if (!pipeline) return; this.log(`Closing pipeline ${idx}`); // Close worker pipeline (catch errors during teardown — worker may already be gone) await this.sendToWorker({ type: "close", idx, waitEmpty, uid: this.workerUidCounter++, }).catch(() => {}); // Release audio polyfill writer before closing generator if (pipeline.audioWriter) { try { pipeline.audioWriter.releaseLock(); } catch {} pipeline.audioWriter = undefined; } // Close generator pipeline.generator?.close(); // Remove from sync controller this.syncController?.removeTrack(idx); this.pipelines.delete(idx); } // ============================================================================ // Playback Control // ============================================================================ async play(): Promise { this._isPaused = false; this.wsController?.play(); this.metadataWs?.notifyPlay(); this.sendToWorker({ type: "frametiming", action: "setPaused", paused: false, uid: this.workerUidCounter++, }); await this.videoElement?.play(); } pause(): void { this._isPaused = true; this.wsController?.hold(); this.metadataWs?.notifyPause(); this.sendToWorker({ type: "frametiming", action: "setPaused", paused: true, uid: this.workerUidCounter++, }); this.videoElement?.pause(); } private finishStepPause(): void { if (!this.videoElement) { this._pendingStepPause = false; this._suppressPlayPauseSync = false; if (this._stepPauseTimeout) { clearTimeout(this._stepPauseTimeout); this._stepPauseTimeout = null; } return; } if (this._stepPauseTimeout) { clearTimeout(this._stepPauseTimeout); this._stepPauseTimeout = null; } this._pendingStepPause = false; this._suppressPlayPauseSync = false; try { this.videoElement.pause(); } catch {} } frameStep(direction: -1 | 1, _seconds?: number): void { if (!this._isPaused) return; if (!this.videoElement) return; this.log( `Frame step requested dir=${direction} paused=${this._isPaused} videoPaused=${this.videoElement.paused}` ); // Ensure worker is paused (in case pause didn't flow through) this.sendToWorker({ type: "frametiming", action: "setPaused", paused: true, uid: this.workerUidCounter++, }).catch(() => {}); // MediaStream-backed video elements don't present new frames while paused. // Pulse playback briefly so the stepped frame can render, then pause again. if (this.videoElement.paused) { const video = this.videoElement; this._suppressPlayPauseSync = true; this._pendingStepPause = true; try { const maybePromise = video.play(); if (maybePromise && typeof (maybePromise as Promise).catch === "function") { (maybePromise as Promise).catch(() => {}); } } catch {} if ("requestVideoFrameCallback" in video) { (video as any).requestVideoFrameCallback(() => this.finishStepPause()); } // Failsafe: avoid staying in suppressed state if no frame is delivered this._stepPauseTimeout = setTimeout(() => this.finishStepPause(), 200); } this.sendToWorker({ type: "framestep", direction, uid: this.workerUidCounter++, }); } seek(timeMs: number): void { if (!this.wsController || !this.syncController) return; const seekId = this.syncController.startSeek(timeMs); this._activeSeekId = seekId; // Optimistically update current time for immediate UI feedback this._currentTime = timeMs / 1000; this.emit("timeupdate", timeMs); // Flush worker queues and set seek target for frame skipping. // The worker self-terminates the seek when a frame at/after the target is decoded. // No "frametiming reset" here — that would clear the seeking flag prematurely. this.sendToWorker({ type: "seek", seekTime: timeMs, uid: this.workerUidCounter++, }).catch(() => { this.syncController?.cancelSeek(); this._activeSeekId = undefined; }); // Send seek to server (wsController.seek already takes ms) const desiredBuffer = this.syncController.getDesiredBuffer(); this.wsController.seek(timeMs, desiredBuffer); // Sync metadata socket this.metadataWs?.notifySeek(); // Safety timeout: if worker 'seeked' event doesn't arrive within 3s, force-complete if (this._seekSafetyTimeout) clearTimeout(this._seekSafetyTimeout); this._seekSafetyTimeout = setTimeout(() => { this._seekSafetyTimeout = null; if ( this._activeSeekId !== undefined && this.syncController?.isSeekActive(this._activeSeekId) ) { this.syncController.completeSeek(this._activeSeekId); this._activeSeekId = undefined; } }, 3000); } setPlaybackRate(rate: number): void { this.syncController?.setMainSpeed(rate); } isPaused(): boolean { return this._isPaused; } isLive(): boolean { return this.streamType === "live"; } jumpToLive(): void { if (this.streamType === "live") { // OG rawws.js:1476: duration = (info.end + now - received) * 1e-3 // OG skins.js:3271: api.currentTime = api.duration // The live edge moves forward by elapsed time since last on_time. let liveEdgeMs: number; if (this._seekableEndS !== null && this._onTimeReceivedAt > 0) { const elapsedSec = (Date.now() - this._onTimeReceivedAt) / 1000; liveEdgeMs = (this._seekableEndS + elapsedSec) * 1000; } else if (this._seekableEndS !== null) { liveEdgeMs = this._seekableEndS * 1000; } else { liveEdgeMs = this.getDuration(); } if (!Number.isFinite(liveEdgeMs) || liveEdgeMs <= 0) return; this.seek(liveEdgeMs); this.log(`Jump to live: seek to ${liveEdgeMs.toFixed(0)}ms`); } } /** * Check if seeking is supported. * WebCodecs can seek via server commands when connected. * Reference: rawws.js line 1294-1304 implements seeking via control channel */ canSeek(): boolean { // WebCodecs CAN seek via server commands when WebSocket is connected // This overrides the default MediaStream check in SeekingUtils return this.wsController !== null && !this.isDestroyed; } getSeekableRange(): { start: number; end: number } | null { if (this._seekableBeginS !== null && this._seekableEndS !== null) { return { start: this._seekableBeginS * 1000, end: this._seekableEndS * 1000 }; } return null; } // ============================================================================ // Media Properties (Phase 2A) // ============================================================================ /** * Get stream duration (Infinity for live streams) */ get duration(): number { return this._duration; } getDuration(): number { // OG rawws.js:1476: live duration = (on_time.end + elapsed_since_received) * 1e-3 if (this.streamType === "live" && this._seekableEndS !== null && this._onTimeReceivedAt > 0) { const elapsedSec = (Date.now() - this._onTimeReceivedAt) / 1000; return (this._seekableEndS + elapsedSec) * 1000; } if (!Number.isFinite(this._duration)) return this._duration; // preserve Infinity return this._duration * 1000; } /** * Get current playback time in ms * Uses requestVideoFrameCallback for accurate timing when available */ get currentTime(): number { return this._currentTime * 1000; } getCurrentTime(): number { return this._currentTime * 1000; } /** * Get buffered time ranges * Returns single range from current time to current + buffer */ get buffered(): TimeRanges { if (this._bufferMs <= 0) { return createTimeRanges([]); } // Return TimeRanges in seconds (browser API convention) const start = this._currentTime; const end = start + this._bufferMs / 1000; return createTimeRanges([[start, end]]); } /** * Get comprehensive player statistics */ async getStats(): Promise { const syncState = this.syncController?.getState(); const workerPipelines = this._lastWorkerStats?.pipelines; // Extract per-track queue sizes from worker stats let videoQueueSize = 0; let audioQueueSize = 0; if (workerPipelines) { for (const [idx, pipeStats] of Object.entries(workerPipelines)) { const track = this.tracksByIndex.get(Number(idx)); if (track?.type === "video") { videoQueueSize = pipeStats.queues.decoder; } else if (track?.type === "audio") { audioQueueSize = pipeStats.queues.decoder; } } } return { latency: { buffer: syncState?.buffer.current ?? 0, target: syncState?.buffer.desired ?? 0, jitter: syncState?.jitter.weighted ?? 0, }, sync: { avDrift: this._avDrift, playbackSpeed: syncState?.playbackSpeed ?? 1, }, decoder: { videoQueueSize, audioQueueSize, framesDropped: this._framesDropped, framesDecoded: this._framesDecoded, }, network: { bytesReceived: this._bytesReceived, messagesReceived: this._messagesReceived, }, pipelines: workerPipelines ?? undefined, }; } // ============================================================================ // Rendering API // ============================================================================ /** * Capture the current video frame as a data URL. * Only available in WebGL/Canvas2D render mode. */ snapshot(type: "png" | "jpeg" | "webp" = "png", quality = 0.92): string | null { if (this.webglRenderer) return this.webglRenderer.snapshot(type, quality); if (this.canvasRenderer) return this.canvasRenderer.snapshot(type, quality); return null; } /** * Set video rotation (0, 90, 180, 270 degrees). * Only available in WebGL render mode. */ setRotation(degrees: number): void { this.webglRenderer?.setRotation(degrees); } /** * Set video mirror/flip. * Only available in WebGL render mode. */ setMirror(horizontal: boolean, vertical?: boolean): void { this.webglRenderer?.setMirror(horizontal, vertical); } /** * Whether this player instance is using WebGL/Canvas2D direct rendering. */ get isDirectRendering(): boolean { return this.useDirectRendering; } // ============================================================================ // Frame Timing (requestVideoFrameCallback) // ============================================================================ /** * Set up requestVideoFrameCallback for accurate frame timing * This provides vsync-aligned frame metadata for A/V sync */ private setupFrameCallback(): void { if (!this.videoElement) return; // Check if requestVideoFrameCallback is available if ("requestVideoFrameCallback" in HTMLVideoElement.prototype) { const callback = (_now: DOMHighResTimeStamp, metadata: VideoFrameCallbackMetadata) => { if (this.isDestroyed || !this.videoElement) return; this.onVideoFrame(metadata); // Schedule next callback this._frameCallbackId = (this.videoElement as any).requestVideoFrameCallback(callback); }; this._frameCallbackId = (this.videoElement as any).requestVideoFrameCallback(callback); this.log("requestVideoFrameCallback enabled for accurate frame timing"); } else { // Fallback: Use video element's currentTime directly this.log("requestVideoFrameCallback not available, using fallback timing"); } } /** * Handle video frame presentation callback * Updates current time */ private onVideoFrame(metadata: VideoFrameCallbackMetadata): void { // Don't overwrite optimistic seek position with stale pre-seek frames if (this.syncController?.isSeeking()) return; // Update current time from actual frame presentation (mediaTime is in seconds) this._currentTime = metadata.mediaTime; // Update buffer level from sync controller const syncState = this.syncController?.getState(); if (syncState) { this._bufferMs = syncState.buffer.current; } // Emit timeupdate event in ms this.emit("timeupdate", this._currentTime * 1000); // Update frame stats this._framesDecoded = metadata.presentedFrames; } /** * Cancel frame callback on cleanup */ private cancelFrameCallback(): void { if (this._frameCallbackId !== null && this.videoElement) { if ("cancelVideoFrameCallback" in HTMLVideoElement.prototype) { (this.videoElement as any).cancelVideoFrameCallback(this._frameCallbackId); } this._frameCallbackId = null; } } // ============================================================================ // Logging // ============================================================================ private log(message: string, level: "info" | "warn" | "error" = "info"): void { if (!this.debugging && level === "info") return; console[level](`[WebCodecs] ${message}`); } } // Export for direct use export { WebSocketController } from "./WebSocketController"; export { SyncController } from "./SyncController"; export { MetadataWebSocket, buildMetadataWsUrl } from "./MetadataWebSocket"; export { JitterTracker, MultiTrackJitterTracker } from "./JitterBuffer"; export { getLatencyProfile, mergeLatencyProfile, LATENCY_PROFILES } from "./LatencyProfiles"; export { parseRawChunk, RawChunkParser } from "./RawChunkParser"; export * from "./types";