import { isDevEnvironment, showBalloonError, showBalloonWarning } from "../engine/debug/index.js"; import { Application } from "../engine/engine_application.js"; import { RoomEvents } from "../engine/engine_networking.js"; import { disposeStream, NetworkedStreamEvents, NetworkedStreams, StreamEndedEvent, StreamReceivedEvent } from "../engine/engine_networking_streams.js" import { serializable } from "../engine/engine_serialization_decorator.js"; import { DeviceUtilities, getParam } from "../engine/engine_utils.js"; import { delay } from "../engine/engine_utils.js"; import { getIconElement } from "../engine/webcomponents/icons.js"; import { Behaviour } from "./Component.js"; export const noVoip = "noVoip"; const debugParam = getParam("debugvoip"); /** * The Voice over IP component (VoIP) allows you to send and receive audio streams to other users in the same networked room. * It requires a networking connection to be working (e.g. by having an active SyncedRoom component in the scene or by connecting to a room manually). * @category Networking * @group Components */ export class Voip extends Behaviour { /** When enabled, VoIP will start when a room is joined or when this component is enabled while already in a room. * @default true */ @serializable() autoConnect: boolean = true; /** * When enabled, VoIP will stay connected even when the browser tab is not focused/active anymore. * @default true */ @serializable() runInBackground: boolean = true; /** * When enabled, a menu button will be created to allow the user to toggle VoIP on and off. */ @serializable() createMenuButton: boolean = true; /** * When enabled debug messages will be printed to the console. This is useful for debugging audio issues. You can also append ?debugvoip to the URL to enable this. */ debug: boolean = false; private _net?: NetworkedStreams; private _menubutton?: HTMLElement; /** @internal */ awake() { if (debugParam) this.debug = true; if (this.debug) { console.log("VOIP debugging: press 'v' to toggle mute or 'c' to toggle connect/disconnect"); window.addEventListener("keydown", async (evt) => { const key = evt.key.toLowerCase(); switch (key) { case "v": console.log("MUTE?", !this.isMuted) this.setMuted(!this.isMuted); break; case "c": if (this.isSending) this.disconnect(); else this.connect(); break; } }); // mute unfocused window.addEventListener("blur", () => { console.log("VOIP: MUTE ON BLUR") this.setMuted(true); }); window.addEventListener("focus", () => { console.log("VOIP: UNMUTE ON FOCUS") this.setMuted(false); }); } } /** @internal */ onEnable(): void { if (!this._net) this._net = NetworkedStreams.create(this); if (this.debug) this._net.debug = true; this._net.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream); this._net.addEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded); this._net.enable(); if (this.autoConnect) { if (this.context.connection.isConnected) this.connect(); } this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom); this.context.connection.beginListen(RoomEvents.LeftRoom, this.onLeftRoom); this.onEnabledChanged(); this.updateButton(); window.addEventListener("visibilitychange", this.onVisibilityChanged); } /** @internal */ onDisable(): void { if (this._net) { this._net.stopSendingStream(this._outputStream); //@ts-ignore this._net.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream); //@ts-ignore this._net.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded) this._net?.disable(); } this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom); this.context.connection.stopListen(RoomEvents.LeftRoom, this.onLeftRoom); this.onEnabledChanged(); this.updateButton(); window.removeEventListener("visibilitychange", this.onVisibilityChanged); } /** @internal */ onDestroy(): void { this._menubutton?.remove(); this._menubutton = undefined; } /** Set via the mic button (e.g. when the websocket connection closes and rejoins but the user was muted before we don't want to enable VOIP again automatically) */ private _allowSending = true; private _outputStream: MediaStream | null = null; /** * @returns true if the component is currently sending audio */ get isSending() { return this._outputStream != null && this._outputStream.active; } /** Start sending audio. */ async connect(audioSource?: MediaTrackConstraints) { if (!this._net) { console.error("Cannot connect to voice chat - NetworkedStreams not initialized. Make sure the component is enabled before calling this method."); return false; } if (!this.context.connection.isConnected) { console.error("Cannot connect to voice chat - not connected to server"); this.updateButton(); return false; } else if (!await DeviceUtilities.microphonePermissionsGranted()) { console.error("Cannot connect to voice chat - microphone permissions not granted"); this.updateButton(); return false; } this._allowSending = true; this._net?.stopSendingStream(this._outputStream); disposeStream(this._outputStream); this._outputStream = await this.getAudioStream(audioSource); if (this._outputStream) { if (this.debug) console.log("VOIP: Got audio stream"); this._net?.startSendingStream(this._outputStream); this.updateButton(); return true; } else { this.updateButton(); if (!await DeviceUtilities.microphonePermissionsGranted()) { showBalloonError("Microphone permissions not granted: Please grant microphone permissions to use voice chat"); } else console.error("VOIP: Could not get audio stream - please make sure to connect an audio device and grant microphone permissions"); } if (this.debug || isDevEnvironment()) console.log("VOIP: Failed to get audio stream"); return false; } /** Stop sending audio (muting your own microphone) */ disconnect(opts?: { remember: boolean }) { if (opts?.remember) { this._allowSending = false; } this._net?.stopSendingStream(this._outputStream); disposeStream(this._outputStream); this._outputStream = null; this.updateButton(); } /** * Mute or unmute the audio stream (this will only mute incoming streams and not mute your own microphone. Use disconnect() to mute your own microphone) */ setMuted(mute: boolean) { const audio = this._outputStream?.getAudioTracks(); if (audio) { for (const track of audio) { track.enabled = !mute } } } /** Returns true if the audio stream is currently muted */ get isMuted() { if (this._outputStream === null) return false; const audio = this._outputStream?.getAudioTracks(); if (audio) { for (const track of audio) { if (!track.enabled) return true; } } return false; } private async updateButton() { if (this.createMenuButton) { if (!this._menubutton) { this._menubutton = document.createElement("button"); this._menubutton.addEventListener("click", () => { if (this.isSending) { this.disconnect({ remember: true }); } else this.connect(); DeviceUtilities.microphonePermissionsGranted().then(res => { if (!res) showBalloonWarning("Microphone permissions not granted. Please allow your browser to use the microphone to be able to talk. Click on the button on the left side of your browser's address bar to allow microphone permissions."); }) }); } if (this._menubutton) { this.context.menu.appendChild(this._menubutton); if (this.activeAndEnabled) { this._menubutton.style.display = ""; } else { this._menubutton.style.display = "none"; } this._menubutton.title = this.isSending ? "Click to disable your microphone" : "Click to enable your microphone"; let label = this.isSending ? "" : ""; let icon = this.isSending ? "mic" : "mic_off"; const hasPermission = await DeviceUtilities.microphonePermissionsGranted(); if (!hasPermission) { label = "No Permission"; icon = "mic_off"; this._menubutton.title = "Microphone permissions not granted. Please allow your browser to use the microphone to be able to talk. This can usually be done in the addressbar of the webpage."; } this._menubutton.innerText = label; this._menubutton.prepend(getIconElement(icon)); if (this.context.connection.isConnected == false) this._menubutton.setAttribute("disabled", ""); else this._menubutton.removeAttribute("disabled"); } } else if (!this.activeAndEnabled) { this._menubutton?.remove(); } } // private _analyzer?: AudioAnalyser; /** @deprecated */ public getFrequency(_userId: string | null): number | null { if (!this["unsupported_getfrequency"]) { this["unsupported_getfrequency"] = true; if (isDevEnvironment()) showBalloonWarning("VOIP: getFrequency is currently not supported"); console.warn("VOIP: getFrequency is currently not supported"); } // null is get the first with some data // if (userId === null) { // for (const c in this._incomingStreams) { // const call = this._incomingStreams[c]; // if (call && call.currentAnalyzer) return call.currentAnalyzer.getAverageFrequency(); // } // return null; // } // const call = this._incomingStreams.get(userId); // if (call && call.currentAnalyzer) return call.currentAnalyzer.getAverageFrequency(); return null; } private async getAudioStream(audio?: MediaTrackConstraints) { if (!navigator.mediaDevices.getUserMedia) { console.error("No getDisplayMedia support"); return null; } const getUserMedia = async (constraints?: MediaTrackConstraints): Promise => { return await navigator.mediaDevices.getUserMedia({ audio: constraints ?? true, video: false }) .catch((err) => { console.warn("VOIP failed getting audio stream", err); return null; });; } const stream = await getUserMedia(audio); if (!stream) return null; // NE-5445, on iOS after calling `getUserMedia` it automatically switches the audio to the built-in microphone and speakers even if headphones are connected // if there's no device selected explictly we will try to automatically select an external device if (DeviceUtilities.isiOS() && audio?.deviceId === undefined) { const devices = await navigator.mediaDevices.enumerateDevices(); // select anything that doesn't have "iPhone" is likely "AirPods" or other bluetooth headphones const nonBuiltInAudioSource = devices.find((device) => (device.kind === "audioinput" || device.kind === "audiooutput") && !device.label.includes("iPhone")); if (nonBuiltInAudioSource) { const constraints = Object.assign({}, audio); constraints.deviceId = nonBuiltInAudioSource.deviceId; return await getUserMedia(constraints); } } return stream; } // we have to wait for the user to connect to a room when "auto connect" is enabled private onJoinedRoom = async () => { if (this.debug) console.log("VOIP: Joined room"); // Wait a moment for user list to be populated await delay(300) if (this.autoConnect && !this.isSending && this._allowSending) { this.connect(); } } private onLeftRoom = () => { if (this.debug) console.log("VOIP: Left room"); this.disconnect(); for (const incoming of this._incomingStreams.values()) { disposeStream(incoming.srcObject as MediaStream); } this._incomingStreams.clear(); } private _incomingStreams: Map = new Map(); private onReceiveStream = (evt: StreamReceivedEvent) => { const userId = evt.target.userId; const stream = evt.stream; let audioElement = this._incomingStreams.get(userId); if (!audioElement) { audioElement = new Audio() this._incomingStreams.set(userId, audioElement); } audioElement.srcObject = stream; audioElement.setAttribute("autoplay", "true"); // for mobile we need to wait for user interaction to play audio. Auto play doesnt work on android when the page is refreshed Application.registerWaitForInteraction(() => { audioElement?.play().catch((err) => { console.error("VOIP: Failed to play audio", err); }); }) } private onStreamEnded = (evt: StreamEndedEvent) => { const existing = this._incomingStreams.get(evt.userId); disposeStream(existing?.srcObject as MediaStream); this._incomingStreams.delete(evt.userId); } private onEnabledChanged = () => { for (const key of this._incomingStreams) { const element = key[1]; element.muted = !this.enabled; } } private onVisibilityChanged = () => { if (this.runInBackground) return; const visible = document.visibilityState === "visible"; const muted = !visible; this.setMuted(muted); for (const element of this._incomingStreams) { const str = element[1]; str.muted = muted; } }; }