import { HMSPluginSupportResult, HMSVideoPlugin, HMSVideoPluginType } from '@100mslive/hms-video-store'; import '@tensorflow/tfjs-backend-webgl'; export declare class HMSVirtualBackgroundPlugin implements HMSVideoPlugin { background: string | HTMLImageElement; personMaskWidth: number; personMaskHeight: number; isVirtualBackground: boolean; backgroundImage: HTMLImageElement | null; backgroundVideo: HTMLVideoElement | null; backgroundType: string; loadModelCalled: boolean; blurValue: any; tfLite: any; tfLitePromise: any; modelName: string; input: HTMLCanvasElement | null; output: HTMLCanvasElement | null; outputCtx: CanvasRenderingContext2D | null; timerID: number; imageAspectRatio: number; personMaskPixelCount: number; personMask: ImageData; personMaskCanvas: HTMLCanvasElement; personMaskCtx: any; filters: any; enableSharpening?: boolean | false; gifFrames: any; gifFramesIndex: number; gifFrameImageData: any; tempGifCanvas: HTMLCanvasElement; tempGifContext: any; giflocalCount: number; constructor(background: string, enableSharpening?: boolean); init(): Promise; isSupported(): boolean; checkSupport(): HMSPluginSupportResult; getName(): string; getPluginType(): HMSVideoPluginType; setBackground(path?: string | HTMLImageElement | HTMLVideoElement): Promise; stop(): void; processVideoFrame(input: HTMLCanvasElement, output: HTMLCanvasElement, skipProcessing?: boolean): Promise | void; private setImage; private setGiF; private log; private resizeInputData; private infer; private postProcessing; private sharpenFilter; private drawPersonMask; private drawSegmentedBackground; private runSegmentation; private fitVideoToBackground; private fitImageToBackground; private fitGifToBackground; private fitData; private addBlurToBackground; private initSharpenFilter; }