import * as i0 from "@angular/core"; export type TranscoderConfig = { sampleRate: number; }; export type TranscodeParams = TranscoderConfig & { blob: Blob; }; type WriteWaveHeaderParams = { arrayBuffer: ArrayBuffer; channelCount: number; sampleRate: number; }; type WriteAudioDataParams = { arrayBuffer: ArrayBuffer; dataByChannel: Float32Array[]; }; /** * The `TranscoderService` is used to transcibe audio recording to a format that's supported by all major browsers. The SDK uses this to create voice messages. * * If you want to use your own transcoder you can provide a `customTranscoder`. */ export declare class TranscoderService { config: TranscoderConfig; customTranscoder?: (blob: Blob) => Blob | Promise; constructor(); /** * The default transcoder will leave audio/mp4 files as is, and transcode webm files to wav. If you want to customize this, you can provide your own transcoder using the `customTranscoder` field * @param blob * @returns the transcoded file */ transcode(blob: Blob): Promise; protected renderAudio(audioBuffer: AudioBuffer, sampleRate: number): Promise; protected toAudioBuffer(blob: Blob): Promise; protected writeWavAudioData({ arrayBuffer, dataByChannel, }: WriteAudioDataParams): void; protected writeWavHeader({ arrayBuffer, channelCount, sampleRate, }: WriteWaveHeaderParams): void; protected splitDataByChannel: (audioBuffer: AudioBuffer) => Float32Array[]; static ɵfac: i0.ɵɵFactoryDeclaration; static ɵprov: i0.ɵɵInjectableDeclaration; } export {};