import { Material, Mesh, Object3D, ShaderMaterial, SRGBColorSpace, Texture, Vector2, Vector4, VideoTexture } from "three";
import { isDevEnvironment } from "../engine/debug/index.js";
import { ObjectUtils, PrimitiveType } from "../engine/engine_create_objects.js";
import { awaitInput } from "../engine/engine_input_utils.js";
import { serializable } from "../engine/engine_serialization_decorator.js";
import { Context } from "../engine/engine_setup.js";
import { getWorldScale } from "../engine/engine_three_utils.js";
import { getParam } from "../engine/engine_utils.js";
import { Behaviour, GameObject } from "./Component.js";
import { Renderer } from "./Renderer.js";
const debug = getParam("debugvideo");
export enum AspectMode {
None = 0,
AdjustHeight = 1,
AdjustWidth = 2,
}
export enum VideoSource {
///
/// Use the current clip as the video content source.
///
VideoClip = 0,
///
/// Use the current URL as the video content source.
///
Url = 1,
}
export enum VideoAudioOutputMode {
None = 0,
AudioSource = 1,
Direct = 2,
APIOnly = 3,
}
export enum VideoRenderMode {
CameraFarPlane = 0,
CameraNearPlane = 1,
RenderTexture = 2,
MaterialOverride = 3,
}
/**
* The VideoPlayer component can be used to playback video clips from urls, streams or m3u8 playlists (livestreams)
* @example Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
* ```typescript
* // Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
* const videoPlayer = addComponent(obj, VideoPlayer, {
* url: "https://www.w3schools.com/html/mov_bbb.mp4",
* playOnAwake: true,
* });
* ```
* @category Multimedia
* @group Components
*/
export class VideoPlayer extends Behaviour {
/**
* When true the video will start playing as soon as the component is enabled
*/
@serializable()
playOnAwake: boolean = true;
/**
* The aspect mode to use for the video. If
*/
@serializable()
aspectMode: AspectMode = AspectMode.None;
@serializable(URL)
private clip?: string | MediaStream | null = null;
// set a default src, this should not be undefined
@serializable()
private source: VideoSource = VideoSource.Url;
/**
* The video clip url to play.
*/
@serializable(URL)
get url() { return this._url }
/**
* The video clip to play.
*/
set url(val: string | null) {
const prev = this._url;
const changed = prev !== val;
if (this.__didAwake) {
if (changed) {
this.setClipURL(val ?? "");
}
}
else this._url = val;
}
private _url: string | null = null;
@serializable()
private renderMode?: VideoRenderMode;
@serializable()
private targetMaterialProperty?: string;
@serializable(Renderer)
private targetMaterialRenderer?: Renderer;
@serializable(Texture)
private targetTexture?: Texture;
@serializable()
private time: number = 0;
private _playbackSpeed: number = 1;
/**
* Get the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
* @default 1
*/
@serializable()
get playbackSpeed(): number {
return this._videoElement?.playbackRate ?? this._playbackSpeed;
}
/**
* Set the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
*/
set playbackSpeed(val: number) {
this._playbackSpeed = val;
if (this._videoElement)
this._videoElement.playbackRate = val;
}
private _isLooping: boolean = false;
get isLooping(): boolean {
return this._videoElement?.loop ?? this._isLooping;
}
@serializable()
set isLooping(val: boolean) {
this._isLooping = val;
if (this._videoElement)
this._videoElement.loop = val;
}
/**
* @returns the current time of the video in seconds
*/
get currentTime(): number {
return this._videoElement?.currentTime ?? this.time;
}
/**
* set the current time of the video in seconds
*/
set currentTime(val: number) {
if (this._videoElement) {
this._videoElement.currentTime = val;
}
else this.time = val;
}
/**
* @returns true if the video is currently playing
*/
get isPlaying(): boolean {
const video = this._videoElement;
if (video) {
if (video.currentTime > 0 && !video.paused && !video.ended
&& video.readyState > video.HAVE_CURRENT_DATA)
return true;
else if (video.srcObject) {
const stream = video.srcObject as MediaStream;
if (stream.active) return true;
}
}
return false;
}
get crossOrigin(): string | null {
return this._videoElement?.crossOrigin ?? this._crossOrigin;
}
set crossOrigin(val: string | null) {
this._crossOrigin = val;
if (this._videoElement) {
if (val !== null) this._videoElement.setAttribute("crossorigin", val);
else this._videoElement.removeAttribute("crossorigin");
}
}
/**
* the material that is used to render the video
*/
get videoMaterial() {
if (!this._videoMaterial) if (!this.create(false)) return null;
return this._videoMaterial;
}
/**
* the video texture that is used to render the video
*/
get videoTexture() {
if (!this._videoTexture) if (!this.create(false)) return null;
return this._videoTexture;
}
/**
* the HTMLVideoElement that is used to play the video
*/
get videoElement() {
if (!this._videoElement) if (!this.create(false)) return null;
return this._videoElement!;
}
/**
* Request the browser to enter picture in picture mode
* @link https://developer.mozilla.org/en-US/docs/Web/API/Picture-in-Picture_API
* @returns the promise returned by the browser
*/
requestPictureInPicture() {
if (this._videoElement) return this._videoElement.requestPictureInPicture();
return null;
}
/**
* @returns true if the video is muted
*/
get muted() {
return this._videoElement?.muted ?? this._muted;
}
/**
* set the video to be muted
*/
set muted(val: boolean) {
this._muted = val;
if (this._videoElement) this._videoElement.muted = val;
}
private _muted: boolean = false;
/**
* The current video clip that is being played
*/
get currentVideo() {
return this.clip;
}
@serializable()
private set audioOutputMode(mode: VideoAudioOutputMode) {
if (mode !== this._audioOutputMode) {
if (mode === VideoAudioOutputMode.AudioSource && isDevEnvironment()) console.warn("VideoAudioOutputMode.AudioSource is not yet implemented");
this._audioOutputMode = mode;
this.updateVideoElementSettings();
}
}
private get audioOutputMode() { return this._audioOutputMode; }
private _audioOutputMode: VideoAudioOutputMode = VideoAudioOutputMode.Direct;
/** Set this to false to pause video playback while the tab is not active
* @default true
*/
playInBackground: boolean = true;
private _crossOrigin: string | null = "anonymous";
private _videoElement: HTMLVideoElement | null = null;
private _videoTexture: VideoTexture | null = null;
private _videoMaterial: Material | null = null;
private _isPlaying: boolean = false;
private wasPlaying: boolean = false;
/** ensure's the video element has been created and will start loading the clip */
preloadVideo() {
if (debug) console.log("Video Preload: " + this.name, this.clip);
this.create(false);
}
/** @deprecated use `preloadVideo()` */
preload() { this.preloadVideo(); }
/** Set a new video stream
* starts to play automatically if the videoplayer hasnt been active before and playOnAwake is true */
setVideo(video: MediaStream) {
this.clip = video;
this.source = VideoSource.VideoClip;
if (!this._videoElement) this.create(this.playOnAwake);
else {
// TODO: how to prevent interruption error when another video is already playing
this._videoElement.srcObject = video;
if (this._isPlaying)
this.play();
this.updateAspect();
}
}
setClipURL(url: string) {
if (this._url === url) return;
this._url = url;
this.source = VideoSource.Url;
if (debug) console.log("set url", url);
if (!this._videoElement) this.create(this.playOnAwake);
else {
if (url.endsWith(".m3u8") || url.includes(".m3u")) {
this.ensureM3UCanBePlayed();
}
else {
this._videoElement.src = url;
if (this._isPlaying) {
this.stop();
this.play();
}
}
}
}
/** @internal */
onEnable(): void {
if (debug) console.log("VideoPlayer.onEnable", VideoSource[this.source], this.clip, this.url, this)
window.addEventListener('visibilitychange', this.visibilityChanged);
if (this.playOnAwake === true) {
this.create(true);
}
else {
this.preloadVideo();
}
if (this.screenspace) {
this._overlay?.start();
}
else this._overlay?.stop();
}
/** @internal */
onDisable(): void {
window.removeEventListener('visibilitychange', this.visibilityChanged);
this._overlay?.stop();
this.pause();
}
private visibilityChanged = (_: Event) => {
switch (document.visibilityState) {
case "hidden":
if (!this.playInBackground) {
this.wasPlaying = this._isPlaying;
this.pause();
}
break;
case "visible":
if (this.wasPlaying && !this._isPlaying) this.play();
break;
}
}
/** @internal */
onDestroy(): void {
if (this._videoElement) {
this.videoElement?.remove();
this._videoElement = null;
}
if (this._videoTexture) {
this._videoTexture.dispose();
this._videoTexture = null;
}
}
private _receivedInput: boolean = false;
/**
* @internal
*/
constructor() {
super();
awaitInput(() => {
this._receivedInput = true;
this.updateVideoElementSettings();
});
this._targetObjects = [];
if (getParam("videoscreenspace")) {
window.addEventListener("keydown", evt => {
if (evt.key === "f") {
this.screenspace = !this.screenspace;
}
});
}
}
/** start playing the video source */
play() {
if (!this._videoElement) this.create(false);
if (!this._videoElement) {
if (debug) console.warn("Can not play: no video element found", this);
return
}
if (this._isPlaying && !this._videoElement?.ended && !this._videoElement?.paused) return;
this._isPlaying = true;
if (!this._receivedInput) this._videoElement.muted = true;
this.handleBeginPlaying(false);
if (this.shouldUseM3U) {
this.ensureM3UCanBePlayed();
return;
}
if (debug) console.log("Video Play()", this.clip, this._videoElement, this.time);
this._videoElement.currentTime = this.time;
this._videoElement.play().catch(err => {
console.log(err);
// https://developer.chrome.com/blog/play-request-was-interrupted/
if (debug)
console.error("Error playing video", err, "CODE=" + err.code, this.videoElement?.src, this);
setTimeout(() => {
if (this._isPlaying && !this.destroyed && this.activeAndEnabled)
this.play();
}, 1000);
});
if (debug) console.log("play", this._videoElement, this.time);
}
/**
* Stop the video playback. This will reset the video to the beginning
*/
stop() {
this._isPlaying = false;
this.time = 0;
if (!this._videoElement) return;
this._videoElement.currentTime = 0;
this._videoElement.pause();
if (debug) console.log("STOP", this);
}
/**
* Pause the video playback
*/
pause(): void {
this.time = this._videoElement?.currentTime ?? 0;
this._isPlaying = false;
this._videoElement?.pause();
if (debug) console.log("PAUSE", this, this.currentTime);
}
/** create the video element and assign the video source url or stream */
create(playAutomatically: boolean): boolean {
let src;
switch (this.source) {
case VideoSource.VideoClip:
src = this.clip;
break;
case VideoSource.Url:
src = this.url;
if (!src?.length && typeof this.clip === "string")
src = this.clip;
break;
}
if (!src) {
if (debug) console.warn("No video source set", this);
return false;
}
if (!this._videoElement) {
if (debug)
console.warn("Create VideoElement", this);
this._videoElement = this.createVideoElement();
this.context.domElement!.shadowRoot!.prepend(this._videoElement);
// hide it because otherwise it would overlay the website with default css
this.updateVideoElementStyles();
}
if (typeof src === "string") {
if (debug) console.log("Set Video src", src);
this._videoElement.src = src;
// Nor sure why we did this here, but with this code the video does not restart when being paused / enable toggled
// const str = this._videoElement["captureStream"]?.call(this._videoElement);
// this.clip = str;
}
else {
if (debug) console.log("Set Video srcObject", src);
this._videoElement.srcObject = src;
}
if (!this._videoTexture)
this._videoTexture = new VideoTexture(this._videoElement);
this._videoTexture.flipY = false;
this._videoTexture.colorSpace = SRGBColorSpace;
if (playAutomatically)
this.handleBeginPlaying(playAutomatically);
if (debug)
console.log("Video: handle playing done...", src, playAutomatically);
return true;
}
updateAspect() {
if (this.aspectMode === AspectMode.None) return;
this.startCoroutine(this.updateAspectImpl());
}
private _overlay: VideoOverlay | null = null;
/**
* If true the video will be rendered in screenspace mode and overlayed on top of the scene.
* Alternatively you can also request the video to be played in PictureInPicture mode by calling `requestPictureInPicture()`
*/
get screenspace(): boolean {
return this._overlay?.enabled ?? false;
}
set screenspace(val: boolean) {
if (val) {
if (!this._videoTexture) return;
if (!this._overlay) this._overlay = new VideoOverlay(this.context);
this._overlay.add(this._videoTexture);
}
else this._overlay?.remove(this._videoTexture);
if (this._overlay) this._overlay.enabled = val;
}
private _targetObjects: Array;
private createVideoElement(): HTMLVideoElement {
const video = document.createElement("video") as HTMLVideoElement;
if (this._crossOrigin)
video.setAttribute("crossorigin", this._crossOrigin);
if (debug) console.log("created video element", video);
return video;
}
private handleBeginPlaying(playAutomatically: boolean) {
if (!this.activeAndEnabled) return;
if (!this._videoElement) return;
this._targetObjects.length = 0;
let target: Object3D | undefined = this.gameObject;
switch (this.renderMode) {
case VideoRenderMode.MaterialOverride:
target = this.targetMaterialRenderer?.gameObject;
if (!target) target = GameObject.getComponent(this.gameObject, Renderer)?.gameObject;
break;
case VideoRenderMode.RenderTexture:
console.error("VideoPlayer renderTexture not implemented yet. Please use material override instead");
return;
}
if (!target) {
console.error("Missing target for video material renderer", this.name, VideoRenderMode[this.renderMode!], this);
return;
}
const mat = target["material"];
if (mat) {
this._targetObjects.push(target);
if (mat !== this._videoMaterial) {
this._videoMaterial = mat.clone();
target["material"] = this._videoMaterial;
}
const fieldName = "map";
const videoMaterial = this._videoMaterial as any;
if (!this.targetMaterialProperty) {
videoMaterial[fieldName] = this._videoTexture;
}
else {
switch (this.targetMaterialProperty) {
default:
videoMaterial[fieldName] = this._videoTexture;
break;
// doesnt render:
// case "emissiveTexture":
// console.log(this.videoMaterial);
// // (this.videoMaterial as any).map = this.videoTexture;
// (this.videoMaterial as any).emissive?.set(1,1,1);// = this.videoTexture;
// (this.videoMaterial as any).emissiveMap = this.videoTexture;
// break;
}
}
}
else {
console.warn("Can not play video, no material found, this might be a multimaterial case which is not supported yet");
return;
}
this.updateVideoElementSettings();
this.updateVideoElementStyles();
if (playAutomatically) {
if (this.shouldUseM3U) {
this.ensureM3UCanBePlayed();
}
this.play();
}
}
private updateVideoElementSettings() {
if (!this._videoElement) return;
this._videoElement.loop = this._isLooping;
this._videoElement.currentTime = this.currentTime;
this._videoElement.playbackRate = this._playbackSpeed;
// dont open in fullscreen on ios
this._videoElement.playsInline = true;
let muted = !this._receivedInput || this.audioOutputMode === VideoAudioOutputMode.None;
if (!muted && this._muted) muted = true;
this._videoElement.muted = muted;
if (this.playOnAwake)
this._videoElement.autoplay = true;
}
private updateVideoElementStyles() {
if (!this._videoElement) return;
// set style here so preview frame is rendered
// set display and selectable because otherwise is interfers with input/focus e.g. breaks orbit control
this._videoElement.style.userSelect = "none";
this._videoElement.style.visibility = "hidden";
this._videoElement.style.display = "none";
this.updateAspect();
}
private _updateAspectRoutineId: number = -1;
private *updateAspectImpl() {
const id = ++this._updateAspectRoutineId;
const lastAspect: number | undefined = undefined;
const stream = this.clip;
while (id === this._updateAspectRoutineId && this.aspectMode !== AspectMode.None && this.clip && stream === this.clip && this._isPlaying) {
if (!stream || typeof stream === "string") {
return;
}
let aspect: number | undefined = undefined;
for (const track of stream.getVideoTracks()) {
const settings = track.getSettings();
if (settings && settings.width && settings.height) {
aspect = settings.width / settings.height;
break;
}
// on firefox capture canvas stream works but looks like
// the canvas stream track doesnt contain settings?!!?
else {
aspect = this.context.renderer.domElement.clientWidth / this.context.renderer.domElement.clientHeight;
}
}
if (aspect === undefined) {
for (let i = 0; i < 10; i++)
yield;
if (!this.isPlaying) break;
continue;
}
if (lastAspect === aspect) {
yield;
continue;
}
for (const obj of this._targetObjects) {
let worldAspect = 1;
if (obj.parent) {
const parentScale = getWorldScale(obj.parent);
worldAspect = parentScale.x / parentScale.y;
}
switch (this.aspectMode) {
case AspectMode.AdjustHeight:
obj.scale.y = 1 / aspect * obj.scale.x * worldAspect;
break;
case AspectMode.AdjustWidth:
obj.scale.x = aspect * obj.scale.y * worldAspect;
break;
}
}
for (let i = 0; i < 3; i++)
yield;
}
}
private get shouldUseM3U(): boolean { return this.url != undefined && (this.url.endsWith(".m3u8") || this.url.endsWith(".m3u")) && this.source === VideoSource.Url; }
private ensureM3UCanBePlayed() {
if (!this.shouldUseM3U) return;
let hls_script = document.head.querySelector("script[data-hls_library]") as HTMLScriptElement;
if (!hls_script) {
if (debug) console.log("HLS: load script");
hls_script = document.createElement("script");
hls_script.dataset["hls_library"] = "hls.js";
hls_script.src = "https://cdn.jsdelivr.net/npm/hls.js@1";
hls_script.addEventListener("load", this.onHlsAvailable);
document.head.append(hls_script);
}
else if (globalThis["Hls"]) {
this.onHlsAvailable();
}
else {
hls_script.addEventListener("load", this.onHlsAvailable);
}
}
private _hls?: Hls;
private onHlsAvailable = () => {
if (debug) console.log("HLS: available", this.clip);
if (!this.shouldUseM3U || !this.url) return;
if (!this._hls)
this._hls = new Hls();
this.videoElement!.autoplay = true;
this._hls.loadSource(this.url);
this._hls.attachMedia(this.videoElement!);
this._videoElement?.play();
if (debug) console.log("HLS: loaded", this.clip);
}
}
declare class Hls {
constructor();
loadSource(url: string);
attachMedia(videoElement: HTMLVideoElement);
}
class VideoOverlay {
readonly context: Context;
constructor(context: Context) {
this.context = context;
this._input = new VideoOverlayInput(this);
}
get enabled() {
return this._isInScreenspaceMode;
}
set enabled(val: boolean) {
if (val) this.start();
else this.stop();
}
add(video: VideoTexture) {
if (this._videos.indexOf(video) === -1) {
this._videos.push(video);
}
}
remove(video: VideoTexture | null | undefined) {
if (!video) return;
const index = this._videos.indexOf(video);
if (index >= 0) {
this._videos.splice(index, 1);
}
}
start() {
if (this._isInScreenspaceMode) return;
if (this._videos.length < 0) return;
const texture = this._videos[this._videos.length - 1];
if (!texture) return;
this._isInScreenspaceMode = true;
if (!this._screenspaceModeQuad) {
this._screenspaceModeQuad = ObjectUtils.createPrimitive(PrimitiveType.Quad, {
material: new ScreenspaceTexture(texture)
});
if (!this._screenspaceModeQuad) return;
this._screenspaceModeQuad.geometry.scale(2, 2, 2);
}
const quad = this._screenspaceModeQuad;
this.context.scene.add(quad);
this.updateScreenspaceMaterialUniforms();
const mat = quad.material as ScreenspaceTexture;
mat?.reset();
this._input?.enable(mat);
}
stop() {
this._isInScreenspaceMode = false;
if (this._screenspaceModeQuad) {
this._input?.disable();
this._screenspaceModeQuad.removeFromParent();
}
}
updateScreenspaceMaterialUniforms() {
const mat = this._screenspaceModeQuad?.material as ScreenspaceTexture;
if (!mat) return;
// mat.videoAspect = this.videoTexture?.image?.width / this.videoTexture?.image?.height;
mat.screenAspect = this.context.domElement.clientWidth / this.context.domElement.clientHeight;
}
private _videos: VideoTexture[] = [];
private _screenspaceModeQuad: Mesh | undefined;
private _isInScreenspaceMode: boolean = false;
private _input: VideoOverlayInput;
}
class VideoOverlayInput {
private _onResizeScreenFn?: () => void;
private _onKeyUpFn?: (e: KeyboardEvent) => void;
private _onMouseWheelFn?: (e: WheelEvent) => void;
private readonly context: Context;
private readonly overlay: VideoOverlay;
constructor(overlay: VideoOverlay) {
this.overlay = overlay;
this.context = overlay.context;
}
private _material?: ScreenspaceTexture;
enable(mat: ScreenspaceTexture) {
this._material = mat;
window.addEventListener("resize", this._onResizeScreenFn = () => {
this.overlay.updateScreenspaceMaterialUniforms();
});
window.addEventListener("keyup", this._onKeyUpFn = (args) => {
if (args.key === "Escape")
this.overlay.stop();
});
window.addEventListener("wheel", this._onMouseWheelFn = (args) => {
if (this.overlay.enabled) {
mat.zoom += args.deltaY * .0005;
args.preventDefault();
}
}, { passive: false });
const delta: Vector2 = new Vector2();
window.addEventListener("mousemove", (args: MouseEvent) => {
if (this.overlay.enabled && this.context.input.getPointerPressed(0)) {
const normalizedMovement = new Vector2(args.movementX, args.movementY);
normalizedMovement.x /= this.context.domElement.clientWidth;
normalizedMovement.y /= this.context.domElement.clientHeight;
delta.set(normalizedMovement.x, normalizedMovement.y);
delta.multiplyScalar(mat.zoom / -this.context.time.deltaTime * .01);
mat.offset = mat.offset.add(delta);
}
});
window.addEventListener("pointermove", (args: PointerEvent) => {
if (this.overlay.enabled && this.context.input.getPointerPressed(0)) {
const count = this.context.input.getTouchesPressedCount();
if (count === 1) {
delta.set(args.movementX, args.movementY);
delta.multiplyScalar(mat.zoom * -this.context.time.deltaTime * .05);
mat.offset = mat.offset.add(delta);
}
}
});
let lastTouchStartTime = 0;
window.addEventListener("touchstart", e => {
if (e.touches.length < 2) {
if (this.context.time.time - lastTouchStartTime < .3) {
this.overlay.stop();
}
lastTouchStartTime = this.context.time.time;
return;
}
this._isPinching = true;
this._lastPinch = 0;
})
window.addEventListener("touchmove", e => {
if (!this._isPinching || !this._material) return;
const touch1 = e.touches[0];
const touch2 = e.touches[1];
const dx = touch1.clientX - touch2.clientX;
const dy = touch1.clientY - touch2.clientY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (this._lastPinch !== 0) {
const delta = distance - this._lastPinch;
this._material.zoom -= delta * .004;
}
this._lastPinch = distance;
})
window.addEventListener("touchend", () => {
this._isPinching = false;
})
}
private _isPinching: boolean = false;
private _lastPinch = 0;
disable() {
if (this._onResizeScreenFn) {
window.removeEventListener("resize", this._onResizeScreenFn);
this._onResizeScreenFn = undefined;
}
if (this._onKeyUpFn) {
window.removeEventListener("keyup", this._onKeyUpFn);
this._onKeyUpFn = undefined;
}
if (this._onMouseWheelFn) {
window.removeEventListener("wheel", this._onMouseWheelFn);
this._onMouseWheelFn = undefined;
}
}
}
class ScreenspaceTexture extends ShaderMaterial {
set screenAspect(val: number) {
this.uniforms["screenAspect"].value = val;
this.needsUpdate = true;
}
set offset(vec: Vector2 | { x: number, y: number }) {
const val = this.uniforms["offsetScale"].value;
val.x = vec.x;
val.y = vec.y;
// console.log(val);
this.uniforms["offsetScale"].value = val;
this.needsUpdate = true;
}
private readonly _offset: Vector2 = new Vector2();
get offset(): Vector2 {
const val = this.uniforms["offsetScale"].value;
this._offset.set(val.x, val.y);
return this._offset;
}
set zoom(val: number) {
const zoom = this.uniforms["offsetScale"].value;
if (val < .001) val = .001;
zoom.z = val;
// zoom.z = this.maxZoom - val;
// zoom.z /= this.maxZoom;
this.needsUpdate = true;
}
get zoom(): number {
return this.uniforms["offsetScale"].value.z;// * this.maxZoom;
}
reset() {
this.offset = this.offset.set(0, 0);
this.zoom = 1;
this.needsUpdate = true;
}
// maxZoom : number = 10
constructor(tex: Texture) {
super();
this.uniforms = {
map: { value: tex },
screenAspect: { value: 1 },
offsetScale: { value: new Vector4(0, 0, 1, 1) }
};
this.vertexShader = `
uniform sampler2D map;
uniform float screenAspect;
uniform vec4 offsetScale;
varying vec2 vUv;
void main() {
gl_Position = vec4( position , 1.0 );
vUv = uv;
vUv.y = 1. - vUv.y;
// fit into screen
ivec2 res = textureSize(map, 0);
float videoAspect = float(res.x) / float(res.y);
float aspect = videoAspect / screenAspect;
if(aspect >= 1.0)
{
vUv.y = vUv.y * aspect;
float offset = (1. - aspect) * .5;
vUv.y = vUv.y + offset;
}
else
{
vUv.x = vUv.x / aspect;
float offset = (1. - 1. / aspect) * .5;
vUv.x = vUv.x + offset;
}
vUv.x -= .5;
vUv.y -= .5;
vUv.x *= offsetScale.z;
vUv.y *= offsetScale.z;
vUv.x += offsetScale.x;
vUv.y += offsetScale.y;
vUv.x += .5;
vUv.y += .5;
}
`
this.fragmentShader = `
uniform sampler2D map;
varying vec2 vUv;
void main() {
if(vUv.x < 0. || vUv.x > 1. || vUv.y < 0. || vUv.y > 1.)
gl_FragColor = vec4(0., 0., 0., 1.);
else
{
vec4 texcolor = texture2D(map, vUv);
gl_FragColor = texcolor;
}
}
`
}
}