// Copyright © 2022 BytePlusRTC All rights reserved. // SPDX-License-Identifier: MIT import * as $p_a from '../android/index'; import * as $p_i from '../ios/index'; import { double, float, View, ByteBuffer, Surface } from './types'; import { IMediaPlayerCustomSourceProvider } from './callback'; export declare enum RTCNetworkType { UNKNOWN = 0, LAN = 1, MOBILE_2G = 2, MOBILE_3G = 3, WIFI = 4, MOBILE_4G = 5, MOBILE_5G = 6, /** * @platform android */ NONE = 7, ByteRTCNetworkTypeDisconnected = 8 } export declare enum SubscribeStateChangeReason { SUBSCRIBE = 0, UNSUBSCRIBE = 1, REMOTE_PUBLISH = 2, REMOTE_UNPUBLISH = 3, STREAM_FAILED_5XX = 4, STREAM_FAILED_404 = 5, OVER_STREAM_SUBSCRIBE_LIMIT = 6, NO_SUBSCRIBE_PERMISSION = 7 } export declare enum PublishStateChangeReason { PUBLISH = 0, UNPUBLISH = 1, NO_PUBLISH_PERMISSION = 2, OVER_STREAM_PUBLISH_LIMIT = 3, MULTIROOM_UNPUBLISH_FAILED = 4, PUBLISH_STREAM_FAILED = 5, PUBLISH_STREAM_FORBIDEN = 6, USER_IN_PUBLISH = 7 } export declare enum PublishState { /** {en} * @brief Successfully published. * */ PUBLISHED = 0, /** {en} * @brief Failed to publish. * */ UNPUBLISHED = 1 } export declare enum RTCUserOfflineReason { ByteRTCUserOfflineReasonQuit = 0, ByteRTCUserOfflineReasonDropped = 1, ByteRTCUserOfflineReasonSwitchToInvisible = 2, ByteRTCUserOfflineReasonKickedByAdmin = 3 } export declare enum RTCStreamSinglePushEvent { SINGLE_STREAM_PUSH_START = 0, SINGLE_STREAM_PUSH_SUCCESS = 1, SINGLE_STREAM_PUSH_FAILED = 2, SINGLE_STREAM_PUSH_STOP = 3, SINGLE_STREAM_PUSH_TIMEOUT = 4, SINGLE_STREAM_PUSH_PARAM_ERROR = 5, ByteRTCSingleStreamPushEventStart = 7, ByteRTCSingleStreamPushEventStartSuccess = 8, ByteRTCSingleStreamPushEventStartFailed = 9, ByteRTCSingleStreamPushEventStop = 10, ByteRTCSingleStreamPushEventTimeout = 11, ByteRTCSingleStreamPushEventParamError = 12 } export declare enum RTCStreamMixingErrorCode { TRANSCODER_ERROR_OK = 0, TRANSCODER_ERROR_BASE = 1, TRANSCODER_ERROR_INVALID_PARAM = 2, TRANSCODER_ERROR_INVALID_STATE = 3, TRANSCODER_ERROR_INVALID_OPERATOR = 4, TRANSCODER_ERROR_TIMEOUT = 5, TRANSCODER_ERROR_INVALID_PARAM_BY_SERVER = 6, TRANSCODER_ERROR_SUB_TIMEOUT_BY_SERVER = 7, TRANSCODER_ERROR_INVALID_STATE_BY_SERVER = 8, TRANSCODER_ERROR_AUTHENTICATION_BY_CDN = 9, TRANSCODER_ERROR_TIMEOUT_BY_SIGNALING = 10, TRANSCODER_ERROR_MIX_IMAGE_FAIL = 11, TRANSCODER_ERROR_UNKNOW_ERROR_BY_SERVER = 12, ByteRTCStreamMixingErrorCodeOK = 13, ByteRTCStreamMixingErrorCodeBase = 14, ByteRTCStreamMixingErrorCodeInvalidParam = 15, ByteRTCStreamMixingErrorCodeInvalidState = 16, ByteRTCStreamMixingErrorCodeInvalidOperator = 17, ByteRTCStreamMixingErrorCodeTimeOut = 18, ByteRTCStreamMixingErrorCodeInvalidParamByServer = 19, ByteRTCStreamMixingErrorCodeSubTimeoutByServer = 20, ByteRTCStreamMixingErrorCodeInvalidStateByServer = 21, ByteRTCStreamMixingErrorCodeAuthenticationByCDN = 22, ByteRTCStreamMixingErrorCodeTimeoutBySignaling = 23, ByteRTCStreamMixingErrorCodeMixImageFail = 24, ByteRTCStreamMixingErrorCodeUnKnownErrorByServer = 25 } export declare enum RTCScreenVideoEncoderPreference { MAINTAIN_FRAMERATE = 0, MAINTAIN_QUALITY = 1, DISABLED = 2, BALANCE = 3 } export declare enum RTCVideoEncoderPreference { DISABLED = 0, MAINTAIN_FRAMERATE = 1, MAINTAIN_QUALITY = 2, BALANCE = 3 } export declare enum MixedStreamAlternateImageFillMode { FIT = 0, FILL = 1 } export declare enum PauseResumeControlMediaType { AUDIO = 0, VIDEO = 1, AUDIO_AND_VIDEO = 2 } export declare enum VideoSourceType { VIDEO_SOURCE_TYPE_EXTERNAL = 0, VIDEO_SOURCE_TYPE_INTERNAL = 1, VIDEO_SOURCE_TYPE_ENCODED_WITH_SIMULCAST = 2, VIDEO_SOURCE_TYPE_ENCODED_WITHOUT_SIMULCAST = 3 } export declare enum MessageConfig { RELIABLE_ORDERED = 0, UNRELIABLE_ORDERED = 1, UNRELIABLE_UNORDERED = 2 } export declare class ProblemFeedbackInfo { constructor(problemDesc: string); constructor(); constructor(); protected _instance: any; /** {en} * @brief Text description * */ get problemDesc(): string; set problemDesc(value: string); /** {en} * @brief Room info. See {@link ProblemFeedbackRoomInfo ProblemFeedbackRoomInfo}. * */ get roomInfo(): Array; set roomInfo(value: Array); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MediaDeviceType { /** {en} * @platform android * @brief Unknown device * */ MEDIA_DEVICE_TYPE_AUDIO_UNKNOWN = 0, /** {en} * @platform android * @brief Audio render device * */ MEDIA_DEVICE_TYPE_AUDIO_RENDER_DEVICE = 1, /** {en} * @platform android * @brief Audio capture device * */ MEDIA_DEVICE_TYPE_AUDIO_CAPTURE_DEVICE = 2, /** {en} * @platform android * @brief Video capture device * */ MEDIA_DEVICE_TYPE_VIDEO_CAPTURE_DEVICE = 4, /** {en} * @platform android * @brief Screen video device * */ MEDIA_DEVICE_TYPE_SCREEN_VIDEO_CAPTURE_DEVICE = 5, /** {en} * @platform android * @brief Screen audio device * */ MEDIA_DEVICE_TYPE_SCREEN_AUDIO_CAPTURE_DEVICE = 6, ByteRTCMediaDeviceTypeAudioUnknown = 7, ByteRTCMediaDeviceTypeAudioRenderDevice = 8, ByteRTCMediaDeviceTypeAudioCaptureDevice = 9, ByteRTCMediaDeviceTypeVideoCaptureDevice = 11, ByteRTCMediaDeviceTypeScreenVideoCaptureDevice = 12, ByteRTCMediaDeviceTypeScreenAudioCaptureDevice = 13 } export declare class UserInfo { constructor(uid: string, extraInfo: string); constructor(); protected _instance: any; /** {en} * @brief User ID. The string matches the regular expression: `[a-zA-Z0-9_\@\\-\\.]{1,128}`.
* You must set or manage the uid yourself and ensure that each uid within the same room is unique. * */ get userId(): string; set userId(value: string); /** {en} * @brief Additional information of the user. The maximum length is 200 bytes. The remote user will receive the info in `onUserJoined`. * */ get extraInfo(): string; set extraInfo(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum ForwardStreamEvent { FORWARD_STREAM_EVENT_DISCONNECTED = 0, FORWARD_STREAM_EVENT_CONNECTED = 1, FORWARD_STREAM_EVENT_DST_ROOM_UPDATED = 2, FORWARD_STREAM_EVENT_UN_EXPECT_API_CALL = 3, FORWARD_STREAM_EVENT_INTERRUPT = 4, ByteRTCForwardStreamEventInterrupt = 5 } export declare enum SubscribeMediaType { NONE = 0, AUDIO_ONLY = 1, VIDEO_ONLY = 2, AUDIO_AND_VIDEO = 3 } export declare enum ConnectionState { CONNECTION_STATE_DISCONNECTED = 0, CONNECTION_STATE_CONNECTING = 1, CONNECTION_STATE_CONNECTED = 2, CONNECTION_STATE_RECONNECTING = 3, CONNECTION_STATE_RECONNECTED = 4, CONNECTION_STATE_LOST = 5, CONNECTION_STATE_FAILED = 6 } export declare enum RecordingType { RECORD_AUDIO_ONLY = 0, RECORD_VIDEO_ONLY = 1, RECORD_VIDEO_AND_AUDIO = 2, ByteRTCRecordingTypeAudioOnly = 3, ByteRTCRecordingTypeVideoOnly = 4, ByteRTCRecordingTypeVideoAndAudio = 5 } export declare enum BandFrequency { VOICE_EQUALIZATION_BAND_FREQUENCY_31 = 0, VOICE_EQUALIZATION_BAND_FREQUENCY_62 = 1, VOICE_EQUALIZATION_BAND_FREQUENCY_125 = 2, VOICE_EQUALIZATION_BAND_FREQUENCY_250 = 3, VOICE_EQUALIZATION_BAND_FREQUENCY_500 = 4, VOICE_EQUALIZATION_BAND_FREQUENCY_1K = 5, VOICE_EQUALIZATION_BAND_FREQUENCY_2K = 6, VOICE_EQUALIZATION_BAND_FREQUENCY_4K = 7, VOICE_EQUALIZATION_BAND_FREQUENCY_8K = 8, VOICE_EQUALIZATION_BAND_FREQUENCY_16K = 9, ByteRTCBandFrequency31 = 10, ByteRTCBandFrequency62 = 11, ByteRTCBandFrequency125 = 12, ByteRTCBandFrequency250 = 13, ByteRTCBandFrequency500 = 14, ByteRTCBandFrequency1k = 15, ByteRTCBandFrequency2k = 16, ByteRTCBandFrequency4k = 17, ByteRTCBandFrequency8k = 18, ByteRTCBandFrequency16k = 19 } export declare enum MixedStreamPushMode { ON_STREAM = 0, ON_START_REQUEST = 1 } export declare enum EarMonitorMode { EAR_MONITOR_MODE_OFF = 0, EAR_MONITOR_MODE_ON = 1 } export declare enum FirstFrameSendState { FIRST_FRAME_SEND_STATE_SENDING = 0, FIRST_FRAME_SEND_STATE_SENT = 1, FIRST_FRAME_SEND_STAT_END = 2 } export declare enum MixedStreamMediaType { MIXED_STREAM_MEDIA_TYPE_AUDIO_AND_VIDEO = 0, MIXED_STREAM_MEDIA_TYPE_AUDIO_ONLY = 1, MIXED_STREAM_MEDIA_TYPE_VIDEO_ONLY = 2 } export declare enum SubtitleMode { SUBTITLE_MODE_TRANSLATION = 0, SUBTITLE_MODE_RECOGINTE = 1, ByteRTCSubtitleModeRecognition = 2 } export declare class SourceWantedData { constructor(); constructor(); protected _instance: any; /** {en} * @brief If send performance fallback is not turned on, this value represents the recommended video input width;
* If send performance fallback is turned on, this value represents the maximum width of the current push stream. * */ get width(): number; set width(value: number); /** {en} * @brief If send performance fallback is not turned on, this value represents the recommended video input height;
* If send performance fallback is turned on, this value represents the maximum height of the current push stream. * */ get height(): number; set height(value: number); /** {en} * @brief If send performance fallback is not turned on, this value represents the recommended video input frame rate in fps;
* If send performance fallback is turned on, this value represents the maximum frame rate of the current push stream in fps. * */ get frameRate(): number; set frameRate(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VoiceChangerType { VOICE_CHANGER_ORIGINAL = 0, VOICE_CHANGER_GIANT = 1, VOICE_CHANGER_CHIPMUNK = 2, VOICE_CHANGER_MINIONST = 3, VOICE_CHANGER_VIBRATO = 4, VOICE_CHANGER_ROBOT = 5 } export declare enum EffectBeautyMode { WHITE = 0, SMOOTH = 1, SHARPEN = 2, CLEAR = 3 } export declare class Orientation { constructor(x: number, y: number, z: number); constructor(); protected _instance: any; /** {en} * @brief X-coordinate * */ get x(): number; set x(value: number); /** {en} * @brief Y-coordinate * */ get y(): number; set y(value: number); /** {en} * @brief Z-coordinate * */ get z(): number; set z(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MusicHotType { CONTENT_CENTER = 0, PROJECT = 1 } export declare enum MixedStreamAudioCodecType { MIXED_STREAM_AUDIO_CODEC_TYPE_AAC = 0 } export declare enum LocalLogLevel { INFO = 0, WARNING = 1, ERROR = 2, NONE = 3 } export declare enum ForwardStreamState { FORWARD_STREAM_STATE_SUCCESS = 0, FORWARD_STREAM_STATE_FAILURE = 1, FORWARD_STREAM_STATE_IDLE = 2, ByteRTCForwardStreamStateIdle = 3 } export declare enum TorchState { TORCH_STATE_OFF = 0, TORCH_STATE_ON = 1 } export declare enum VideoCodecType { VIDEO_CODEC_TYPE_H264 = 0, VIDEO_CODEC_TYPE_BYTEVC1 = 1, ByteRTCVideoCodecTypeUnknown = 2 } export declare class RTCASRConfig { constructor(userId: string, accessToken: string, secretKey: string, authorizationType: ASRAuthorizationType, cluster: string, appId: string); constructor(); protected _instance: any; /** {en} * @brief User ID * */ get userId(): string; set userId(value: string); /** {en} * @brief Access token * */ get accessToken(): string; set accessToken(value: string); /** {en} * @brief Private key. Signature cannot be empty in authentication mode, and it is empty in token authentication mode. See [Authentication Method](https://docs.byteplus.com/speech/docs/authentication-method). * */ get secretKey(): string; set secretKey(value: string); /** {en} * @brief Authentication method * */ get authorizationType(): ASRAuthorizationType; set authorizationType(value: ASRAuthorizationType); /** {en} * @brief Scene information. See [business cluster](https://docs.byteplus.com/en/speech/docs/real-time-speech-recog) * */ get cluster(): string; set cluster(value: string); /** {en} * @brief Application ID * */ get appId(): string; set appId(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RTCASRErrorCode { NETWORK_INTERRUPTED = 0, ALREADY_STARTED = 1, TOKEN_EMPTY = 2, SIGNATURE_KEY_EMPTY = 3, USERID_NULL = 4, APPID_NULL = 5, CLUSTER_NULL = 6, OPERATION_DENIED = 7, ByteRTCASRErrorNetworkInterrupted = 8, ByteRTCASRErrorAlreadyStarted = 9, ByteRTCASRErrorTokenEmpty = 10, ByteRTCErrorSignatureKeyEmpty = 11, ByteRTCASRErrorUserIdNull = 12, ByteRTCASRErrorAPPIDNull = 13, ByteRTCASRErrorClusterNull = 14, ByteRTCASRErrorOperationDenied = 15 } export declare class ForwardStreamEventInfo { constructor(roomId: string, event: ForwardStreamEvent); constructor(); protected _instance: any; /** {en} * @brief ID of the room where the media stream aims to relay to
* An Empty string is for all rooms. * */ get roomId(): string; set roomId(value: string); /** {en} * @brief Event from the room during relaying. * */ get event(): ForwardStreamEvent; set event(value: ForwardStreamEvent); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RecordingConfig { constructor(); constructor(dirPath: string, recordingFileType: RecordingFileType); constructor(); protected _instance: any; /** {en} * @brief The absolute folder path to save the recording file.The filename will be generated by RTC. You need to ensure that you have read and write permissions to this path. * */ get dirPath(): string; set dirPath(value: string); /** {en} * @brief Recording stored file format. See {@link RecordingFileType RecordingFileType} * */ get recordingFileType(): RecordingFileType; set recordingFileType(value: RecordingFileType); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class VideoCaptureConfig { constructor(w: number, h: number, fps: number); constructor(); constructor(); protected _instance: any; /** {en} * @platform android * @brief Video capture preference. See {@link CapturePreference CapturePreference}. * */ get android_capturePreference(): $p_a.CapturePreference; set android_capturePreference(value: $p_a.CapturePreference); /** {en} * @platform android * @brief The width of video capture resolution in px. * */ get android_width(): $p_a.int; set android_width(value: $p_a.int); /** {en} * @platform android * @brief The height of video capture resolution in px. * */ get android_height(): $p_a.int; set android_height(value: $p_a.int); /** {en} * @brief Video capture frame rate in fps. * */ get frameRate(): number; set frameRate(value: number); /** {en} * @platform ios * @brief Video capture preference. See {@link ByteRTCVideoCapturePreference ByteRTCVideoCapturePreference} */ get ios_preference(): $p_i.ByteRTCVideoCapturePreference; set ios_preference(value: $p_i.ByteRTCVideoCapturePreference); /** {en} * @platform ios * @brief Video capture resolution */ get ios_videoSize(): $p_i.CGSize; set ios_videoSize(value: $p_i.CGSize); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class HumanOrientation { constructor(); constructor(forward: Orientation, right: Orientation, up: Orientation); constructor(); protected _instance: any; /** {en} * @brief Forward orientation, the default value is `{1,0,0}`, i.e., the forward orientation is in the positive direction of x-axis. * */ get forward(): Orientation; set forward(value: Orientation); /** {en} * @brief Rightward orientation, the default value is `{0,1,0}`, i.e., the rightward orientation is in the positive direction of y-axis. * */ get right(): Orientation; set right(value: Orientation); /** {en} * @brief Upward orientation, the default value is `{0,0,1}`, i.e., the upward orientation is in the positive direction of z-axis. * */ get up(): Orientation; set up(value: Orientation); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class LocalAudioPropertiesInfo { constructor(streamIndex: StreamIndex, audioPropertiesInfo: AudioPropertiesInfo); constructor(); protected _instance: any; /** {en} * @brief Refer to [StreamIndex](#streamindex-2) for details. * */ get streamIndex(): StreamIndex; set streamIndex(value: StreamIndex); /** {en} * @detail keytype * @brief See {@link AudioPropertiesInfo AudioPropertiesInfo}. * */ get audioPropertiesInfo(): AudioPropertiesInfo; set audioPropertiesInfo(value: AudioPropertiesInfo); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum ByteRTCStreamMixingEvent { STREAM_MIXING_START = 0, STREAM_MIXING_START_SUCCESS = 1, STREAM_MIXING_START_FAILED = 2, STREAM_MIXING_UPDATE = 3, STREAM_MIXING_UPDATE_SUCCESS = 4, STREAM_MIXING_UPDATE_FAILED = 5, STREAM_MIXING_STOP = 6, STREAM_MIXING_STOP_SUCCESS = 7, STREAM_MIXING_STOP_FAILED = 8, STREAM_MIXING_CHANGE_MIX_TYPE = 9, STREAM_MIXING_FIRST_AUDIO_FRAME_BY_CLIENT_MIX = 10, STREAM_MIXING_FIRST_VIDEO_FRAME_BY_CLIENT_MIX = 11, STREAM_MIXING_UPDATE_TIMEOUT = 12, STREAM_MIXING_START_TIMEOUT = 13, STREAM_MIXING_REQUEST_PARAM_ERROR = 14, STREAM_MIXING_MIX_IMAGE_EVENT = 15, ByteRTCStreamMixingEventStart = 17, ByteRTCStreamMixingEventStartSuccess = 18, ByteRTCStreamMixingEventStartFailed = 19, ByteRTCStreamMixingEventUpdate = 20, ByteRTCStreamMixingEventUpdateSuccess = 21, ByteRTCStreamMixingEventUpdateFailed = 22, ByteRTCStreamMixingEventStop = 23, ByteRTCStreamMixingEventStopSuccess = 24, ByteRTCStreamMixingEventStopFailed = 25, ByteRTCStreamMixingEventChangeMixType = 26, ByteRTCStreamMixingEventFirstAudioFrameByClientMixer = 27, ByteRTCStreamMixingEventFirstVideoFrameByClientMixer = 28, ByteRTCStreamMixingEventUpdateTimeout = 29, ByteRTCStreamMixingEventStartTimeout = 30, ByteRTCStreamMixingEventRequestParamError = 31, ByteRTCStreamMixingEventMixImage = 32 } export declare enum AudioAlignmentMode { AUDIO_ALIGNMENT_MODE_OFF = 0, AUDIO_ALIGNMENT_MODE_AUDIOMIXING = 1 } export declare enum FallbackOrRecoverReason { FALLBACK_OR_RECOVER_REASON_UNKNOWN = 0, FALLBACK_OR_RECOVER_REASON_SUBSCRIBE_FALLBACK_BY_BANDWIDTH = 1, FALLBACK_OR_RECOVER_REASON_SUBSCRIBE_FALLBACK_BY_PERFORMANCE = 2, FALLBACK_OR_RECOVER_REASON_SUBSCRIBE_RECOVER_BY_BANDWIDTH = 3, FALLBACK_OR_RECOVER_REASON_SUBSCRIBE_RECOVER_BY_PERFORMANCE = 4, FALLBACK_OR_RECOVER_REASON_PUBLISH_FALLBACK_BY_BANDWIDTH = 5, FALLBACK_OR_RECOVER_REASON_PUBLISH_FALLBACK_BY_PERFORMANCE = 6, FALLBACK_OR_RECOVER_REASON_PUBLISH_RECOVER_BY_BANDWIDTH = 7, FALLBACK_OR_RECOVER_REASON_PUBLISH_RECOVER_BY_PERFORMANCE = 8 } export declare class RemoteStreamStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief User ID. The remote user ID of the audio/video source. * */ get uid(): string; set uid(value: string); /** {en} * @brief For statistics on remote audio streams. See {@link RemoteAudioStats RemoteAudioStats} * */ get audioStats(): RemoteAudioStats; set audioStats(value: RemoteAudioStats); /** {en} * @brief For statistics on remote video streams. See {@link RemoteVideoStats RemoteVideoStats} * */ get videoStats(): RemoteVideoStats; set videoStats(value: RemoteVideoStats); /** {en} * @brief Whether the media stream belongs to the user is a screen stream. You can know whether the current statistics come from mainstream or screen stream. * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @brief For the uplink network quality of the media stream owned by the user. See {@link NetworkQuality NetworkQuality}. * @deprecated since 3.45 and will be deleted in 3.51, use {@link onNetworkQuality onNetworkQuality} instead. * */ get txQuality(): NetworkQuality; set txQuality(value: NetworkQuality); /** {en} * @brief The downlink network quality of the media stream belongs to the user. See {@link NetworkQuality NetworkQuality}. * @deprecated since 3.45 and will be deleted in 3.51, use {@link onNetworkQuality onNetworkQuality} instead. * */ get rxQuality(): NetworkQuality; set rxQuality(value: NetworkQuality); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MediaDeviceWarning { /** {en} * @platform android * @brief No warning * */ MEDIA_DEVICE_WARNING_OK = 0, /** {en} * @platform android * @brief Illegal device operation. Calls the API for internal device when using the external device. * */ MEDIA_DEVICE_WARNING_OPERATION_DENIED = 1, /** {en} * @platform android * @brief The media device is capturing silent frames. * */ MEDIA_DEVICE_WARNING_CAPTURE_SILENCE = 2, /** {en} * @platform android * @brief Silence warning by Android system. * */ MEDIA_DEVICE_WARNING_ANDROID_SYS_SILENCE = 3, /** {en} * @platform android * @brief Silence disappearing warning by Android system. * */ MEDIA_DEVICE_WARNING_ANDROID_SYS_SILENCE_DISAPPEAR = 4, /** {en} * @platform android * @brief Echos between mics and speakers are detected during a call.
* `onAudioDeviceWarning` notifies you with this enum of echo issue. During a call, SDK will detect echo issue only when {@link ChannelProfile ChannelProfile} is set to `CHANNEL_PROFIEL_MEETING` or `CHANNEL_PROFILE_MEETING_ROOM` and AEC is disabled. * */ MEDIA_DEVICE_WARNING_DETECT_LEAK_ECHO = 6, /** {en} * @platform android * @brief Howling detected.
* You will receive this callback in the following scenarios: 1) Howling is detected under the room profiles that do not support howling suppression; 2) Detect howling that is not suppressed under the room profiles that support howling suppression.
* You can only enable howling suppression when the room profile is CHANNEL_PROFILE_COMMUNICATION(0), CHANNEL_PROFIEL_MEETING(16), or CHANNEL_PROFILE_MEETING_ROOM(17).
* We recommend that you remind your users to adjust the physical distance between two devices or disable all unused devices except the connecting one. * */ MEDIA_DEVICE_WARNING_CAPTURE_DETECT_HOWLING = 11, /** {en} * @platform android * @brief Setting audio route failed because the audio route can not be changed in the current Audio Scenario. * */ MEDIA_DEVICE_WARNING_SET_AUDIO_ROUTE_INVALID_SCENARIO = 12, /** {en} * @platform android * @brief Setting audio route failed because the routing device did not exist. * */ MEDIA_DEVICE_WARNING_SET_AUDIO_ROUTE_NOT_EXISTS = 13, /** {en} * @platform android * @brief Setting audio route failed because the audio route was occupied by the system or the other Apps. * */ MEDIA_DEVICE_WARNING_SET_AUDIO_ROUTE_FAILED_BY_PRIORITY = 14, /** {en} * @platform android * @brief Setting audio route failed because the audio route can only be changed in AUDIO_SCENARIO_COMMUNICATION(2) mode. * */ MEDIA_DEVICE_WARNING_SET_AUDIO_ROUTE_NOT_VOIP_MODE = 15, /** {en} * @platform android * @brief Setting audio route failed because the audio route did not initiate. * */ MEDIA_DEVICE_WARNING_SET_AUDIO_ROUTE_DEVICE_NOT_START = 16, ByteRTCMediaDeviceWarningOK = 17, ByteRTCMediaDeviceWarningOperationDenied = 18, ByteRTCMediaDeviceWarningCaptureSilence = 19, ByteRTCMediaDeviceWarningDetectLeakEcho = 21, ByteRTCMediaDeviceWarningCaptureDetectHowling = 26, ByteRTCMediaDeviceWarningSetAudioRouteInvalidScenario = 27, ByteRTCMediaDeviceWarningSetAudioRouteNotExists = 28, ByteRTCMediaDeviceWarningSetAudioRouteFailedByPriority = 29, ByteRTCMediaDeviceWarningSetAudioRouteNotVoipMode = 30, ByteRTCMediaDeviceWarningSetAudioRouteDeviceNotStart = 31, ByteRTCMediaDeviceWarningSetBluetoothModeScenarioUnsupport = 32, ByteRTCMediaDeviceWarningSetBluetoothModeUnsupport = 33 } export declare class RemoteVideoRenderConfig { constructor(); constructor(); protected _instance: any; /** {en} * @brief Rendering mode.
* - 1(`RENDER_MODE_HIDDEN`): Fill and Crop. Default setting. The video frame is scaled with a fixed aspect ratio and completely fills the canvas. The region of the video exceeding the canvas will be cropped. * - 2(`RENDER_MODE_FIT`): Fit. The video frame is scaled with a fixed aspect ratio and is shown completely on the canvas. The region of the canvas not filled with the video frame will be filled with `backgroundColor`. * - 3(`RENDER_MODE_FILL`): Fill the canvas. The video frame is scaled to fill the canvas. During the process, the aspect ratio may change. * */ get renderMode(): RenderMode; set renderMode(value: RenderMode); /** {en} * @brief Set the background color of the canvas which is not filled with video frame. The range is `[0x00000000, 0xFFFFFFFF]`. The default is `0x00000000`. The Alpha index is ignored. * */ get backgroundColor(): number; set backgroundColor(value: number); /** {en} * @brief Video frame rotation angle. See {@link VideoRotation VideoRotation}. The default value is 0, which means no rotation is applied. * */ get renderRotation(): VideoRotation; set renderRotation(value: VideoRotation); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioProfileType { AUDIO_PROFILE_DEFAULT = 0, AUDIO_PROFILE_FLUENT = 1, AUDIO_PROFILE_STANDARD = 2, AUDIO_PROFILE_HD = 3, AUDIO_PROFILE_STANDARD_STEREO = 4, AUDIO_PROFILE_HD_MONO = 5 } export declare enum CameraId { CAMERA_ID_FRONT = 0, CAMERA_ID_BACK = 1, CAMERA_ID_INVALID = 2 } export declare class SingScoringRealtimeInfo { /** * @platform android */ static android_create(currentPosition: number, userPitch: number, standardPitch: number, sentenceIndex: number, sentenceScore: number, totalScore: number, averageScore: number): $p_a.SingScoringRealtimeInfo; constructor(currentPosition: number, userPitch: number, standardPitch: number, sentenceIndex: number, sentenceScore: number, totalScore: number, averageScore: number); constructor(); protected _instance: any; /** {en} * @brief Current playback position. */ get currentPosition(): number; set currentPosition(value: number); /** {en} * @brief The user's pitch. */ get userPitch(): number; set userPitch(value: number); /** {en} * @brief Standard pitch. */ get standardPitch(): number; set standardPitch(value: number); /** {en} * @brief Lyric index. */ get sentenceIndex(): number; set sentenceIndex(value: number); /** {en} * @brief The score for the previous lyric. */ get sentenceScore(): number; set sentenceScore(value: number); /** {en} * @brief The total score for the user's current performance. */ get totalScore(): number; set totalScore(value: number); /** {en} * @brief The average score for the user's current performance. */ get averageScore(): number; set averageScore(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RemoteVideoRenderPosition { AFTER_DECODER = 0, AFTER_POST_PROCESS = 1 } export declare enum TranscoderLayoutRegionType { LAYOUT_REGION_TYPE_VIDEO_STREAM = 0, LAYOUT_REGION_TYPE_IMAGE = 1, ByteRTCTranscoderLayoutRegionTypeVideoStream = 2, ByteRTCTranscoderLayoutRegionTypeImage = 3 } export declare class AudioEffectPlayerConfig { constructor(type: AudioMixingType, playCount: number, startPos: number, pitch: number); constructor(); constructor(); protected _instance: any; /** {en} * @brief Mixing playback types. See {@link AudioMixingType AudioMixingType} * */ get type(): AudioMixingType; set type(value: AudioMixingType); /** {en} * @brief Mix playback times
* - Play_count < = 0: Infinite loop * - Play_count == 1: Play once (default) * - Play_count > 1: Play play_count times * */ get playCount(): number; set playCount(value: number); /** {en} * @brief The starting position in ms. 0 by default. * */ get startPos(): number; set startPos(value: number); /** {en} * @brief The increase or decrease value compared with the original pitch of the music file. The range is `[-12, 12]`. The default value is 0. The pitch distance between two adjacent values is half a step. A positive value indicates a rising pitch, and a negative value indicates a falling pitch. * */ get pitch(): number; set pitch(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MixedStreamLayoutRegionImageWaterMarkConfig { constructor(width: number, height: number); constructor(); protected _instance: any; /** {en} * @platform ios * @brief Width of the original image in px. */ get ios_imageWidth(): $p_i.NSInteger; set ios_imageWidth(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Height of the original image in px. */ get ios_imageHeight(): $p_i.NSInteger; set ios_imageHeight(value: $p_i.NSInteger); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @brief Sets the height of the original image. * @param imageHeight The original image height in px. * */ android_setImageHeight(imageHeight: number): this; /** {en} * @platform android * @brief Gets the height of the original image. * */ android_getImageHeight(): number; /** {en} * @platform android * @brief Sets the height of the original image. * @param imageWidth The original image width in px. * */ android_setImageWidth(imageWidth: number): this; /** {en} * @platform android * @brief Gets the width of the original image. * */ android_getImageWidth(): number; } export declare enum LocalVideoStreamError { LOCAL_VIDEO_STREAM_ERROR_OK = 0, LOCAL_VIDEO_STREAM_ERROR_FAILURE = 1, LOCAL_VIDEO_STREAM_ERROR_DEVICE_NO_PERMISSION = 2, LOCAL_VIDEO_STREAM_ERROR_DEVICE_BUSY = 3, LOCAL_VIDEO_STREAM_ERROR_DEVICE_NOT_FOUND = 4, LOCAL_VIDEO_STREAM_ERROR_CAPTURE_FAILURE = 5, LOCAL_VIDEO_STREAM_ERROR_ENCODE_FAILURE = 6, LOCAL_VIDEO_STREAM_ERROR_DEVICE_DISCONNECTED = 7 } export declare class NetworkQualityStats { constructor(uid: string, lost: double, rtt: number, bandwidth: number, txQuality: number, rxQuality: number); constructor(); protected _instance: any; /** {en} * @brief User ID * */ get uid(): string; set uid(value: string); /** {en} * @platform android * @brief Packet loss ratio of the local client, ranging [0.0,1.0]
* For a local user, it is the sent-packet loss ratio.
* For a remote user, it is the loss ratio of all the packets received. * */ get android_fractionLost(): $p_a.double; set android_fractionLost(value: $p_a.double); /** {en} * @brief Round-trip time (RTT) from client to server. Effective for the local user. Unit: ms * */ get rtt(): number; set rtt(value: number); /** {en} * @brief Average transmission rate of the media RTP packages in 2s. unit: bps
* For a local user, it is the packet-transmitting speed.
* For a more user, it is the speed of receiving all the subscribed medias. * */ get totalBandwidth(): number; set totalBandwidth(value: number); /** {en} * @brief Tx network quality grade. The higher grade the worst network quality. Refer to {@link NetworkQuality NetworkQuality} for details. * */ get txQuality(): NetworkQuality; set txQuality(value: NetworkQuality); /** {en} * @brief Rx network quality grade. The higher grade the worst network quality. Refer to {@link NetworkQuality NetworkQuality} for details. * */ get rxQuality(): NetworkQuality; set rxQuality(value: NetworkQuality); /** {en} * @platform ios * @brief Packet loss ratio of the local client, ranging [0.0,1.0]
* For a local user, it is the sent-packet loss ratio.
* For a remote user, it is the loss ratio of all the packets received. */ get ios_lossRatio(): $p_i.double; set ios_lossRatio(value: $p_i.double); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum UserVisibilityChangeError { OK = 0, UNKNOWN = 1, TOO_MANY_VISIBLE_USER = 2 } export declare enum PublishFallbackOption { DISABLE = 0, SIMULCAST_SMALL_VIDEO_ONLY = 1, ByteRTCPublishFallbackOptionDisabled = 2, ByteRTCPublishFallbackOptionSimulcast = 3 } export declare enum LocalAudioStreamError { LOCAL_AUDIO_STREAM_ERROR_OK = 0, LOCAL_AUDIO_STREAM_ERROR_FAILURE = 1, LOCAL_AUDIO_STREAM_ERROR_DEVICE_NO_PERMISSION = 2, LOCAL_AUDIO_STREAM_ERROR_RECORD_FAILURE = 3, LOCAL_AUDIO_STREAM_ERROR_ENCODE_FAILURE = 4, LOCAL_AUDIO_STREAM_ERROR_NO_RECORDING_DEVICE = 5 } export declare class SubtitleMessage { /** * @platform android */ /** * @platform android */ static android_create(userId: string, text: string, language: string, mode: number, sequence: number, definite: boolean): $p_a.SubtitleMessage; constructor(userId: string, text: string, language: string, mode: SubtitleMode, sequence: number, definite: boolean); constructor(); protected _instance: any; /** {en} * @brief The speaker's ID. * */ get userId(): string; set userId(value: string); /** {en} * @brief Subtitles encoded in UTF-8 format. * */ get text(): string; set text(value: string); /** {en} * @brief The language of the transcribed or translated text according to the subtitle mode. * */ get language(): string; set language(value: string); /** {en} * @brief Subtitle mode. Refer to {@link SubtitleMode SubtitleMode} for more details. * */ get mode(): SubtitleMode; set mode(value: SubtitleMode); /** {en} * @brief Incremental sequence numbers of the piece of the subtitle. Complete and incomplete sentences are numbered individually. * */ get sequence(): number; set sequence(value: number); /** {en} * @brief Whether transcribed texts are complete sentences. True means yes and False means no. * */ get definite(): boolean; set definite(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RecordingInfo { constructor(filePath: string, videoCodecType: VideoCodecType, width: number, height: number); constructor(); protected _instance: any; /** {en} * @brief The absolute path of the recorded file, including the file name and file suffix * */ get filePath(): string; set filePath(value: string); /** {en} * @platform android * @brief The video encoding type of the recorded file. See see {@link VideoCodecType VideoCodecType} * */ get android_videoCodecType(): $p_a.VideoCodecType; set android_videoCodecType(value: $p_a.VideoCodecType); /** {en} * @brief The width of the recorded video, in pixels. Please ignore this field for audio-only recording * */ get width(): number; set width(value: number); /** {en} * @brief The height of the recorded video, the unit: pixels. Please ignore this field for audio-only recording * */ get height(): number; set height(value: number); /** {en} * @platform ios * @brief For the video encoding type of the recorded file. See {@link ByteRTCVideoCodecType ByteRTCVideoCodecType} */ get ios_codecType(): $p_i.ByteRTCVideoCodecType; set ios_codecType(value: $p_i.ByteRTCVideoCodecType); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class AudioMixingConfig { constructor(type: AudioMixingType, playCount: number, position: number, progressInterval: number); constructor(type: AudioMixingType, playCount: number); constructor(); protected _instance: any; /** {en} * @brief For mixing playback types. See {@link AudioMixingType AudioMixingType} * */ get type(): AudioMixingType; set type(value: AudioMixingType); /** {en} * @brief Mix playback times
* - Play_count < = 0: Infinite loop * - Play_count == 1: Play once (default) * - Play_count > 1: Play play_count times * */ get playCount(): number; set playCount(value: number); /** {en} * @brief The position of the audio file playback progress bar during audio mixing, the parameter should be an integer, in milliseconds. * */ get position(): number; set position(value: number); /** {en} * @brief Set the time interval (ms) for the audio file playing progress callback. The `onAudioMixingPlayingProgress` callback then will be triggered according to the set value, no callback by default.
* - The value of interval is a multiple of 10 greater than 0. When the value set is not divisible by 10, the default is rounded up by 10. For example, if the value is set to 52ms, it will be automatically adjusted to 60ms, then the SDK will trigger `onAudioMixingPlayingProgress` callback at the set interval. * - If the value is less than or equals to 0, the callback will not be triggered. * */ get callbackOnProgressInterval(): number; set callbackOnProgressInterval(value: number); /** {en} * @brief Attach the process information of local audio file mixing to the captured audio data. Enable the function to enhance the synchronicity of the remote audio mixing.
* - The function is effective when mixing a single audio file. * - Use `true` for enabling the function and `false` for disable the function. The default is `false`. * */ get syncProgressToRecordFrame(): boolean; set syncProgressToRecordFrame(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum SyncInfoStreamType { SYNC_INFO_STREAM_TYPE_AUDIO = 0 } export declare enum MixedStreamAudioProfile { MIXED_STREAM_AUDIO_PROFILE_LC = 0, MIXED_STREAM_AUDIO_PROFILE_HEV1 = 1, MIXED_STREAM_AUDIO_PROFILE_HEV2 = 2 } export declare class RTCRoomStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief The instantaneous value of Tx bitrate in kbps * */ get txKBitRate(): number; set txKBitRate(value: number); /** {en} * @brief The instantaneous value of Rx bitrate in kbps * */ get rxKBitRate(): number; set rxKBitRate(value: number); /** {en} * @brief The instantaneous value of audio Tx bitrate in kbps * */ get txAudioKBitRate(): number; set txAudioKBitRate(value: number); /** {en} * @brief The instantaneous value of audio Rx bitrate in kbps * */ get rxAudioKBitRate(): number; set rxAudioKBitRate(value: number); /** {en} * @brief The instantaneous value of video Tx bitrate in kbps * */ get txVideoKBitRate(): number; set txVideoKBitRate(value: number); /** {en} * @brief The instantaneous value of video Rx bitrate in kbps * */ get rxVideoKBitRate(): number; set rxVideoKBitRate(value: number); /** {en} * @brief The instantaneous RX bitrate of screen-sharing video in Kbps * */ get txScreenKBitRate(): number; set txScreenKBitRate(value: number); /** {en} * @brief The instantaneous TX bitrate of screen-sharing video in Kbps * */ get rxScreenKBitRate(): number; set rxScreenKBitRate(value: number); /** {en} * @platform android * @brief Cumulative time between the user joining the room and leaving the room in seconds. * */ get android_totalDuration(): $p_a.int; set android_totalDuration(value: $p_a.int); /** {en} * @brief Cumulative data sent by the user in bytes. * */ get txBytes(): number; set txBytes(value: number); /** {en} * @brief Cumulative data received by the user in bytes. * */ get rxBytes(): number; set rxBytes(value: number); /** {en} * @platform android * @brief Number of visible users in the current room * */ get android_users(): $p_a.int; set android_users(value: $p_a.int); /** {en} * @platform android * @brief Current CPU usage (\%) * */ get android_cpuTotalUsage(): $p_a.double; set android_cpuTotalUsage(value: $p_a.double); /** {en} * @platform android * @brief CPU usage (\%) of the application * */ get android_cpuAppUsage(): $p_a.double; set android_cpuAppUsage(value: $p_a.double); /** {en} * @brief Current Tx packet loss rate. The range is [0,1]. * */ get txLostrate(): number; set txLostrate(value: number); /** {en} * @brief Current Rx packet loss rate. The range is [0,1]. * */ get rxLostrate(): number; set rxLostrate(value: number); /** {en} * @brief Round-trip time (in ms) from client side to server side * */ get rtt(): number; set rtt(value: number); /** {en} * @brief Tx cellular bandwidth consumption * */ get txCellularKBitrate(): number; set txCellularKBitrate(value: number); /** {en} * @brief Rx cellular bandwidth consumption * */ get rxCellularKBitrate(): number; set rxCellularKBitrate(value: number); /** {en} * @platform ios * @brief The total time after the user enters the room and starts the call, in s, the cumulative value */ get ios_duration(): $p_i.NSInteger; set ios_duration(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Number of visible users in the current room, including local users themselves */ get ios_userCount(): $p_i.NSInteger; set ios_userCount(value: $p_i.NSInteger); /** {en} * @platform ios * @hidden currently not available * @brief The system downlink network jitter(ms) */ get ios_txJitter(): $p_i.NSInteger; set ios_txJitter(value: $p_i.NSInteger); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @region Video Management * @brief Reset {@link RTCRoomStats RTCRoomStats} * */ android_reset(): void; } export declare enum AudioTrackType { ORIGINAL = 0, ACCOMPANY = 1, ByteRTCAudioTrackTypeAccompy = 2 } export declare class ProblemFeedbackRoomInfo { constructor(roomId: string, userId: string); constructor(); constructor(); protected _instance: any; /** {en} * @brief Room ID. * */ get roomId(): string; set roomId(value: string); /** {en} * @brief User ID. * */ get userId(): string; set userId(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioSourceType { AUDIO_SOURCE_TYPE_EXTERNAL = 0, AUDIO_SOURCE_TYPE_INTERNAL = 1 } export declare enum PerformanceAlarmMode { NORMAL = 0, SIMULCAST = 1 } export declare enum RecordingErrorCode { RECORDING_ERROR_CODE_OK = 0, RECORDING_ERROR_CODE_NO_PERMISSION = 1, RECORDING_ERROR_CODE_NOT_SUPPORT = 2, RECORDING_ERROR_CODE_NO_OTHER = 3, ByteRTCRecordingErrorCodeOk = 4, ByteRTCRecordingErrorCodeNoPermission = 5, ByteRTCRecordingErrorCodeNotSupport = 6, ByteRTCRecordingErrorCodeOther = 7 } export declare enum EchoTestResult { ECHO_TEST_SUCCESS = 0, ECHO_TEST_TIMEOUT = 1, ECHO_TEST_INTERVAL_SHORT = 2, ECHO_TEST_AUDIO_DEVICE_ERROR = 3, ECHO_TEST_VIDEO_DEVICE_ERROR = 4, ECHO_TEST_AUDIO_RECEIVE_ERROR = 5, ECHO_TEST_VIDEO_RECEIVE_ERROR = 6, ECHO_TEST_INTERNAL_ERROR = 7, ByteRTCEchoTestResultSuccess = 8, ByteRTCEchoTestResultTimeout = 9, ByteRTCEchoTestResultIntervalShort = 10, ByteRTCEchoTestResultAudioDeviceError = 11, ByteRTCEchoTestResultVideoDeviceError = 12, ByteRTCEchoTestResultAudioReceiveError = 13, ByteRTCEchoTestResultVideoReceiveError = 14, ByteRTCEchoTestResultInternalError = 15 } export declare class NetworkTimeInfo { protected _instance: any; /** {en} * @brief Network time. Unit: ms * */ get timestamp(): number; set timestamp(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RemoteMirrorType { NONE = 0, RENDER = 1 } export declare enum SubscribeState { /** {en} * @platform android * @brief Successfully subscribed. * */ SUBSCRIBED = 0, /** {en} * @brief Failed to subscribe. * */ UNSUBSCRIBED = 1 } export declare class VirtualBackgroundSource { protected _instance: any; /** {en} * @brief See {@link VirtualBackgroundSourceType VirtualBackgroundSourceType}. * */ get sourceType(): VirtualBackgroundSourceType; set sourceType(value: VirtualBackgroundSourceType); /** {en} * @brief The solid color of the background.
* The format is 0xAARRGGBB. * */ get sourceColor(): number; set sourceColor(value: number); /** {en} * @brief The absolute path of the specified image.
* - You can use the absolute path of the local file (file://xxx) and the Asset path (asset://xxx). * - You can upload a .JPG, .PNG, or .JPEG file. * - The image with a resolution higher than 1080p(Full HD) will be rescaled proportionally to fit in the video. * - If the image's aspect ratio matches the video's, the image will be rescaled proportionally to fit in the video. * - If the image’s aspect ratio doesn't match the video's, the shortest side (either height or width) of the image will be stretched proportionally to match the video. Then the image will be cropped to fill in the video. * - The transparent area in the image will be filled with black. * */ get sourcePath(): string; set sourcePath(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MixedStreamClientMixConfig { protected _instance: any; /** {en} * @platform ios * @brief Whether to use audio mixing. Default is true. */ get ios_useAudioMixer(): $p_i.BOOL; set ios_useAudioMixer(value: $p_i.BOOL); /** {en} * @platform ios * @brief The video format to be set. See {@link ByteRTCMixedStreamClientMixVideoFormat ByteRTCMixedStreamClientMixVideoFormat}. */ get ios_videoFormat(): $p_i.ByteRTCMixedStreamClientMixVideoFormat; set ios_videoFormat(value: $p_i.ByteRTCMixedStreamClientMixVideoFormat); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the video format for client stream mixing callback. If the format you set is not valid, the format will be set to the default value. * @param videoFormat The video format to be set. See {@link MixedStreamClientMixVideoFormat MixedStreamClientMixVideoFormat}. * */ android_setVideoFormat(videoFormat: $p_a.MixedStreamClientMixVideoFormat): this; /** {en} * @platform android * @detail api * @brief Gets the video format for client stream mixing callback. * */ android_getVideoFormat(): $p_a.MixedStreamClientMixVideoFormat; /** {en} * @platform android * @detail api * @brief Set whether the client mixing uses audio mixing. * @param useAudioMixer Whether to use audio mixing. True by default. * @return See {@link MixedStreamClientMixConfig MixedStreamClientMixConfig}. * */ android_setUseAudioMixer(useAudioMixer: boolean): this; /** {en} * @platform android * @detail api * @brief Get the setting of whether the client mixing uses audio mixing. * */ android_getUseAudioMixer(): boolean; } export declare class PublicStreaming { /** {en} * @platform android * @type api * @brief Get the default setting of a public stream. * @return The default setting of a public stream. Refer to {@link PublicStreaming PublicStreaming} for more details. * */ static android_getDefualtPublicStreaming(): $p_a.PublicStreaming; /** {en} * @platform ios * @brief Get the default setting of a public stream. * @return {@link ByteRTCPublicStreaming ByteRTCPublicStreaming} struct with default values. */ static ios_defaultPublicStreaming(): PublicStreaming; protected _instance: any; /** {en} * @platform ios * @brief Required. Layout configuration of the public stream. Refer to {@link ByteRTCPublicStreamLayout ByteRTCPublicStreamLayout} for details. */ get ios_layout(): $p_i.ByteRTCPublicStreamLayout; set ios_layout(value: $p_i.ByteRTCPublicStreamLayout); /** {en} * @platform ios * @brief Encoding properties of the public video stream. Refer to {@link ByteRTCPublicStreamVideoConfig ByteRTCPublicStreamVideoConfig} for details. */ get ios_video(): $p_i.ByteRTCPublicStreamVideoConfig; set ios_video(value: $p_i.ByteRTCPublicStreamVideoConfig); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Set the ID of the room. * @param roomId roomID * */ android_setRoomId(roomId: string): void; /** {en} * @platform android * @detail api * @brief Set the action taken to the public stream. * @param action The action taken to the public stream.
* - ACTION_START: Starts * - ACTION_CHANGED: Updates * - ACTION_STOPPED: Stops * */ android_setAction(action: string): void; /** {en} * @platform android * @detail api * @brief Set the layout of the public stream * @param layout Required. Layout configuration of the public stream. Refer to [Layout](#layout-2) for details. * */ android_setLayout(layout: $p_a.Layout): void; /** {en} * @platform android * @detail api * @brief Get the layout of the public stream * @return Layout configuration of the public stream. Refer to [Layout](#layout-2) for details. * */ android_getLayout(): $p_a.Layout; } export declare enum StreamIndex { STREAM_INDEX_MAIN = 0, STREAM_INDEX_SCREEN = 1 } export declare enum RemoteAudioStateChangeReason { REMOTE_AUDIO_STATE_CHANGE_REASON_NETWORK_CONGESTION = 0, REMOTE_AUDIO_STATE_CHANGE_REASON_NETWORK_RECOVERY = 1, REMOTE_AUDIO_STATE_CHANGE_REASON_LOCAL_MUTED = 2, REMOTE_AUDIO_STATE_CHANGE_REASON_LOCAL_UNMUTED = 3, REMOTE_AUDIO_STATE_CHANGE_REASON_REMOTE_MUTED = 4, REMOTE_AUDIO_STATE_CHANGE_REASON_REMOTE_UNMUTED = 5, REMOTE_AUDIO_STATE_CHANGE_REASON_REMOTE_OFFLINE = 6, REMOTE_AUDIO_STATE_CHANGE_REASON_INTERNAL = 7, ByteRTCRemoteAudioStateChangeReasonInternal = 8 } export declare enum PlayerError { OK = 0, FORMAT_NOT_SUPPORT = 1, INVALID_PATH = 2, INVALID_STATE = 3, INVALID_POSITION = 4, INVALID_VOLUME = 5, INVALID_PITCH = 6, INVALID_AUDIO_TRACK_INDEX = 7, INVALID_PLAYBACK_SPEED = 8, INVALID_EFFECT_ID = 9, ByteRTCPlayerErrorInvalidPath = 10, ByteRTCPlayerErrorInvalidState = 11, ByteRTCPlayerErrorInvalidPosition = 12, ByteRTCPlayerErrorInvalidVolume = 13, ByteRTCPlayerErrorInvalidPitch = 14, ByteRTCPlayerErrorInvalidAudioTrackIndex = 15, ByteRTCPlayerErrorInvalidPlaybackSpeed = 16, ByteRTCPlayerErrorInvalidEffectId = 17 } export declare enum ReturnStatus { RETURN_STATUS_SUCCESS = 0, RETURN_STATUS_FAILURE = 1, RETURN_STATUS_PARAMETER_ERR = 2, RETURN_STATUS_WRONG_STATE = 3, RETURN_STATUS_HAS_IN_ROOM = 4, RETURN_STATUS_HAS_IN_LOGIN = 5, RETURN_STATUS_HAS_IN_ECHO_TEST = 6, RETURN_STATUS_NEITHER_VIDEO_NOR_AUDIO = 7, RETURN_STATUS_ROOMID_IN_USE = 8, RETURN_STATUS_SCREEN_NOT_SUPPORT = 9, RETURN_STATUS_NOT_SUPPORT = 10, RETURN_STATUS_RESOURCE_OVERFLOW = 11, RETURN_STATUS_AUDIO_NO_FRAME = 12, RETURN_STATUS_AUDIO_NOT_IMPLEMENTED = 13, RETURN_STATUS_AUDIO_NO_PERMISSION = 14, RETURN_STATUS_AUDIO_DEVICE_NOT_EXISTS = 15, RETURN_STATUS_AUDIO_DEVICE_FORMAT_NOT_SUPPORT = 16, RETURN_STATUS_AUDIO_DEVICE_NO_DEVICE = 17, RETURN_STATUS_AUDIO_DEVICE_CAN_NOT_USE = 18, RETURN_STATUS_AUDIO_DEVICE_INIT_FAILED = 19, RETURN_STATUS_AUDIO_DEVICE_START_FAILED = 20, RETURN_STATUS_NATIVE_IN_VALID = 21, RETURN_STATUS_VIDEO_NOT_SUPPORT = 22, RETURN_STATUS_VIDEO_TIMESTAMP_WARNING = 23 } export declare class ScreenVideoEncoderConfig { constructor(width: number, height: number, frameRate: number, maxBitrate: number, minBitrate: number); constructor(width: number, height: number, frameRate: number, maxBitrate: number, minBitrate: number, codecName: number, codecMode: number, encodePrefer: number); constructor(); constructor(); protected _instance: any; /** {en} * @brief The preference for encoding screen-sharing streams. It defaults to the high-frame-rate mode.
* See {@link EncoderPreference EncoderPreference}. * */ get encodePreference(): RTCScreenVideoEncoderPreference; set encodePreference(value: RTCScreenVideoEncoderPreference); /** {en} * @brief The maximum width of the captured screen, measured in pixel. * */ get width(): number; set width(value: number); /** {en} * @brief The maximum height of the captured screen, measured in pixel. * */ get height(): number; set height(value: number); /** {en} * @brief The frame rate of the screen capture and encoding, measured in fps. * */ get frameRate(): number; set frameRate(value: number); /** {en} * @brief The maximum bitrate(in kbps). Optional for internal capture while mandatory for custom capture.
* If you set this value to -1, RTC will automatically recommend the bitrate based on the input resolution and frame rate.
* If you set this value to 0, the streams will not be encoded and published.
* On Version 3.44 or later, the default value for internal capture is -1. On versions earlier than 3.44, you must set the maximum bit rate because there is no default value. * */ get maxBitrate(): number; set maxBitrate(value: number); /** {en} * @brief The minimum bitrate(in kbps).Optional for internal capture while mandatory for custom capture.
* The minimum bitrate must be set lower than the maximum bitrate. Otherwise, the streams will not be encoded and published. * */ get minBitrate(): number; set minBitrate(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MediaPlayerConfig { constructor(type: AudioMixingType, playCount: number); constructor(type: AudioMixingType, playCount: number, startPos: number, autoPlay: boolean, progressInterval: number, syncProgressToRecordFrame: boolean); constructor(); constructor(); protected _instance: any; /** {en} * @brief For mixing playback types. See {@link AudioMixingType AudioMixingType} * */ get type(): AudioMixingType; set type(value: AudioMixingType); /** {en} * @brief Mix playback times
* - Play_count < = 0: Infinite loop * - Play_count == 1: Play once (default) * - Play_count > 1: Play play_count times * */ get playCount(): number; set playCount(value: number); /** {en} * @brief The starting position in ms. 0 by default. * */ get startPos(): number; set startPos(value: number); /** {en} * @brief Set the interval of the periodic callback {@link onMediaPlayerPlayingProgress onMediaPlayerPlayingProgress} during audio mixing in ms.
* - interval > 0: The callback is enabled. The actual interval is `10*(mod(10)+1)`. * - interval <= 0: The callback is disabled. * */ get callbackOnProgressInterval(): number; set callbackOnProgressInterval(value: number); /** {en} * @brief Attach the process information of local audio file mixing to the captured audio data. Enable the function to enhance the synchronicity of the remote audio mixing.
* - The function is effective when mixing a single audio file. * - Use `true` for enabling the function and `false` for disable the function. The default is `false`. * */ get syncProgressToRecordFrame(): boolean; set syncProgressToRecordFrame(value: boolean); /** {en} * @brief Play the audio automatically. If not, call {@link start start} to play the audio. * */ get autoPlay(): boolean; set autoPlay(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoPictureType { VIDEO_PICTURE_TYPE_UNKNOWN = 0, VIDEO_PICTURE_TYPE_I = 1, VIDEO_PICTURE_TYPE_P = 2, VIDEO_PICTURE_TYPE_B = 3 } export declare enum ColorSpace { UNKNOWN = 0, BT601_LIMITED_RANGE = 1, BT601_FULL_RANGE = 2, BT709_LIMITED_RANGE = 3, BT709_FULL_RANGE = 4, ByteRTCColorSpaceYCbCrBT601LimitedRange = 5, ByteRTCColorSpaceYCbCrBT601FullRange = 6, ByteRTCColorSpaceYCbCrBT709LimitedRange = 7, ByteRTCColorSpaceYCbCrBT709FullRange = 8 } export declare class AudioPropertiesInfo { constructor(linearVolume: number, nonlinearVolume: number, spectrum: Array, vad: number); constructor(); protected _instance: any; /** {en} * @brief linear volume. The value is in linear relation to the original volume. The higher the value, the higher the volume. The range is [0,255].
* - [0, 25]: Silence * - [26, 75]: Low volume * - [76, 204]: Medium volume * - [205, 255]: High volume * */ get linearVolume(): number; set linearVolume(value: number); /** {en} * @brief non-linear volume in dB. The value is in proportion to the log value of the original volume. You can use the value to recognize the Active Speaker in the room. The range is [-127, 0].
* - [-127, -60]: Silence * - [-59, -40]: Low volume * - [-39, -20]: Medium volume * - [-19, 0]: High volume * */ get nonlinearVolume(): number; set nonlinearVolume(value: number); /** {en} * @brief Spectrum array * */ get spectrum(): Array; set spectrum(value: Array); /** {en} * @brief Voice Activity Detection (VAD) result
* - 1: Voice activity detected. * - 0: No voice activity detected. * - -1: VAD not activated. * */ get vad(): number; set vad(value: number); /** {en} * @brief The vocal pitch of the local user, in Hertz.
* When the following two conditions are met at the same time, the vocal pitch of the local user will be returned:
* - Calls {@link enableAudioPropertiesReport enableAudioPropertiesReport}, and sets the value of enableVoicePitch to `true`. * - The local user's voice is included in the locally captured audio data. * In other situations, `0` will be returned. * * */ get voicePitch(): number; set voicePitch(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RemoteVideoState { REMOTE_VIDEO_STATE_STOPPED = 0, REMOTE_VIDEO_STATE_STARTING = 1, REMOTE_VIDEO_STATE_DECODING = 2, REMOTE_VIDEO_STATE_FROZEN = 3, ByteRTCRemoteVideoStateFailed = 4 } export declare enum HardwareEchoDetectionResult { HARDWARE_ECHO_RESULT_CANCELED = 0, HARDWARE_ECHO_RESULT_UNKNOWN = 1, HARDWARE_ECHO_RESULT_NORMAL = 2, HARDWARE_ECHO_RESULT_POOR = 3, ByteRTCHardwareEchoDetectionCanceled = 4, ByteRTCHardwareEchoDetectionUnknown = 5, ByteRTCHardwareEchoDetectionNormal = 6, ByteRTCHardwareEchoDetectionPoor = 7 } export declare class SubscribeVideoConfig { constructor(videoIndex: number, priority: number); constructor(); protected _instance: any; /** {en} * @platform ios * @brief Subscribed video stream resolution subscript.
* Quality level of the video stream subscribed to.
* In Simulcast mode, use a number to specify the expected quality of the video stream to be subscribed to. In Simulcast mode, a video has a diversity of encoded qualities that ranking from 0 to 3. Call {@link enableSimulcastMode enableSimulcastMode:} to enable Simulcast mode on the publisher's clients.
* Ranging from -1 to 3
* 0 (Default): The best quality
* -1: Use the previous settings. */ get ios_videoIndex(): $p_i.NSInteger; set ios_videoIndex(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Remote user priority. See {@link ByteRTCRemoteUserPriority ByteRTCRemoteUserPriority}, the default value is 0. */ get ios_priority(): $p_i.NSInteger; set ios_priority(value: $p_i.NSInteger); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RecordingState { RECORDING_STATE_PROCESSING = 0, RECORDING_STATE_SUCCESS = 1, RECORDING_STATE_ERROE = 2, ByteRTCRecordingStateError = 3 } export declare enum NetworkDetectionLinkType { UP = 0, DOWN = 1 } export declare class RTCStream { /** * @platform android */ static android_create(userId: string, streamIndex: $p_a.StreamIndex, isScreen: boolean, hasVideo: boolean, hasAudio: boolean): $p_a.RTCStream; /** * @platform android */ static android_createWithStreamDescriptions(userId: string, streamIndex: $p_a.StreamIndex, isScreen: boolean, hasVideo: boolean, hasAudio: boolean, videoStreamDescriptions: Array<$p_a.VideoStreamDescription>): $p_a.RTCStream; protected _instance: any; /** {en} * @brief The user ID that published this stream. * */ get userId(): string; /** {en} * @brief Whether this stream is a shared screen stream. * */ get isScreen(): boolean; /** {en} * @brief Whether this stream includes a video stream. * */ get hasVideo(): boolean; /** {en} * @brief Whether the stream includes an audio stream. * */ get hasAudio(): boolean; /** {en} * @brief Properties of the video stream.
* When a remote user calls the {@link setVideoEncoderConfig setVideoEncoderConfig} method to publish multiple configured video streams, this will contain attribute information for all video streams published by the user.
* See {@link VideoStreamDescription VideoStreamDescription}. * */ get videoStreamDescriptions(): $p_a.List<$p_a.VideoStreamDescription> | $p_i.NSArray<$p_i.ByteRTCVideoSolution>; /** {en} * @platform ios * @brief The maximum resolution of the video stream, the maximum publish resolution that can be supported by the callback publisher when multi-resolution publish subscription is enabled. */ get ios_maxVideoStreamDescription(): $p_i.ByteRTCVideoSolution; /** * @platform ios */ /** * @platform ios */ get ios_index(): $p_i.ByteRTCStreamIndex; } export declare enum VideoDenoiseModeChangedReason { VIDEO_DENOISE_MODE_CHANGED_REASON_NULL = 0, VIDEO_DENOISE_MODE_CHANGED_REASON_API_OFF = 1, VIDEO_DENOISE_MODE_CHANGED_REASON_API_ON = 2, VIDEO_DENOISE_MODE_CHANGED_REASON_CONFIG_DISABLED = 3, VIDEO_DENOISE_MODE_CHANGED_REASON_CONFIG_ENABLED = 4, VIDEO_DENOISE_MODE_CHANGED_REASON_DYNAMIC_CLOSE = 5, VIDEO_DENOISE_MODE_CHANGED_REASON_DYNAMIC_OPEN = 6, VIDEO_DENOISE_MODE_CHANGED_REASON_RESOLUTION = 7, VIDEO_DENOISE_MODE_CHANGED_REASON_INTERNAL_EXCEPTION = 8, ByteRTCVideoDenoiseModeChangedReasonInternalException = 9 } export declare enum AudioDeviceType { AUDIO_DEVICE_TYPE_UNKNOWN = 0, AUDIO_DEVICE_TYPE_RENDER_DEVICE = 1, AUDIO_DEVICE_TYPE_CAPTURE_DEVICE = 2, AUDIO_DEVICE_TYPE_SCREEN_CAPTURE_DEVICE = 3 } export declare enum SetRoomExtraInfoResult { SUCCESS = 0, NOT_JOIN_ROOM = 1, KEY_IS_NULL = 2, VALUE_IS_NULL = 3, UNKNOW = 4, KEY_IS_EMPTY = 5, TOO_OFTEN = 6, SILENT_USER = 7, KEY_TOO_LONG = 8, VALUE_TOO_LONG = 9, SERVER_ERROR = 10 } export declare enum LocalProxyState { CONNECTED = 0, ERROR = 1, INITED = 2, ByteRTCLocalProxyStateInited = 3 } export declare enum AVSyncState { AV_SYNC_STATE_AUDIO_STREAM_REMOVE = 0, AV_SYNC_STATE_VIDEO_STREAM_REMOVE = 1, AV_SYNC_STATE_STREAM_SYNC_BEGIN = 2 } export declare class RTCRoomConfig { constructor(channelProfile: ChannelProfile, isPublishAudio: boolean, isPublishVideo: boolean, isAutoSubscribeAudio: boolean, isAutoSubscribeVideo: boolean); constructor(); protected _instance: any; /** {en} * @brief Room profile. See ChannelProfile{@link #ChannelProfile}. The default is `CHANNEL_PROFILE_COMMUNICATION`. The setting cannot be changed after joining the room. * */ get profile(): ChannelProfile; set profile(value: ChannelProfile); /** {en} * @brief Whether to publish media streams automatically. The default is automatic publishing. * + Changing the user role to audience via `setUserVisibility` will void this setting. * + You can publish streams in only one of the rooms you have participated. If you have joined multiple rooms with the automatically publishing setting, the stream will be added into the first room you have joined only. * */ get isPublishAudio(): boolean; set isPublishAudio(value: boolean); /** {en} * @brief Whether to publish media streams automatically. The default is automatic publishing. * + Changing the user role to audience via `setUserVisibility` will void this setting. * + You can publish streams in only one of the rooms you have participated. If you have joined multiple rooms with the automatically publishing setting, the stream will be added into the first room you have joined only. * */ get isPublishVideo(): boolean; set isPublishVideo(value: boolean); /** {en} * @brief Whether to automatically subscribe to the audio stream. The default is automatic subscription.
* This setting affects both the main stream and the screen-sharing stream. * */ get isAutoSubscribeAudio(): boolean; set isAutoSubscribeAudio(value: boolean); /** {en} * @brief Whether to automatically subscribe to the main video stream. The default is automatic subscription.
* This setting affects both the main stream and the screen-sharing stream. * */ get isAutoSubscribeVideo(): boolean; set isAutoSubscribeVideo(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** * @platform android */ /** * @platform android */ android_isPublishAudio$(): boolean; } export declare class Rectangle { constructor(x: number, y: number, w: number, h: number); constructor(); protected _instance: any; /** {en} * @brief The x coordinate of the upper left corner of the rectangular area * */ get x(): number; set x(value: number); /** {en} * @brief The y coordinate of the upper left corner of the rectangular area * */ get y(): number; set y(value: number); /** {en} * @brief Rectangle width in px * */ get width(): number; set width(value: number); /** {en} * @brief Rectangular height in px * */ get height(): number; set height(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MixedStreamClientMixVideoFormat { MIXED_STREAM_CLIENT_MIX_VIDEO_FORMAT_TEXTURE_2D = 0, MIXED_STREAM_CLIENT_MIX_VIDEO_FORMAT_CVPIXEL_BUFFER_BGRA = 1, MIXED_STREAM_CLIENT_MIX_VIDEO_FORMAT_YUV_I420 = 2, MIXED_STREAM_CLIENT_MIX_VIDEO_FORMAT_YUV_NV12 = 3, ByteRTCMixedStreamClientMixVideoFormatI420 = 4, ByteRTCMixedStreamClientMixVideoFormatNV12 = 5 } export declare enum VideoRotationMode { FOLLOW_APP = 0, FOLLOW_GSENSOR = 1 } export declare enum PlayerState { PRELOADED = 0, OPENED = 1, PLAYING = 2, PAUSED = 3, STOPPED = 4, FAILED = 5, FINISHED = 6, IDLE = 7, ByteRTCPlayerStateIdle = 8 } export declare enum MixedStreamVideoType { MIXED_STREAM_VIDEO_TYPE_MAIN = 0, MIXED_STREAM_VIDEO_TYPE_SCREEN = 1 } export declare class MixedStreamLayoutRegionConfig { protected _instance: any; /** {en} * @platform ios * @brief The user ID of the user who publishes the video stream. It's recommended to be set. */ get ios_userID(): $p_i.NSString; set ios_userID(value: $p_i.NSString); /** {en} * @platform ios * @brief The room ID of the media stream. It's recommended to be set.
* If the media stream is the stream forwarded by {@link startForwardStreamToRooms startForwardStreamToRooms:}, you must set the roomID to the room ID of the target room. */ get ios_roomID(): $p_i.NSString; set ios_roomID(value: $p_i.NSString); /** {en} * @platform ios * @brief The X-coordinate in pixels of the upper-left corner of the user's frame in the entire canvas coordinate system, where the origin is at the upper-left corner of the canvas. It represents the horizontal displacement of the upper-left corner of the user's frame relative to the origin.
* The value range is [0, the width of the canvas). The default value is 0. */ get ios_locationX(): $p_i.NSInteger; set ios_locationX(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The Y-coordinate in pixels of the upper-left corner of the user's frame in the entire canvas coordinate system, where the origin is at the upper-left corner of the canvas. It represents the vertical displacement of the upper-left corner of the user's frame relative to the origin.
* The value range is [0, the height of the canvas). The default value is 0. */ get ios_locationY(): $p_i.NSInteger; set ios_locationY(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The width of the user's frame in pixels. The value range is [0, the width of the canvas]. The default value is 360. */ get ios_width(): $p_i.NSInteger; set ios_width(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The heigh of the user's frame in pixels. The value range is [0, the height of the canvas]. The default value is 640. */ get ios_height(): $p_i.NSInteger; set ios_height(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The layer on which the video is rendered. The range is [0, 100]. 0 for the bottom layer, and 100 for the top layer. The default value is 0. It's recommended to be set. */ get ios_zOrder(): $p_i.NSInteger; set ios_zOrder(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Whether the source user of the stream is a local user. */ get ios_isLocalUser(): $p_i.BOOL; set ios_isLocalUser(value: $p_i.BOOL); /** {en} * @platform ios * @brief The mixed stream type, the default value is Main. */ get ios_streamType(): $p_i.ByteRTCMixedStreamVideoType; set ios_streamType(value: $p_i.ByteRTCMixedStreamVideoType); /** {en} * @platform ios * @brief The opacity in range of (0.0, 1.0]. The lower value, the more transparent. The default value is 1.0. */ get ios_alpha(): $p_i.CGFloat; set ios_alpha(value: $p_i.CGFloat); /** {en} * @platform ios * @brief The proportion of the radius to the width of the canvas. `0.0` by default.
* After you set the value, `width_px`, `height_px`, and `cornerRadius_px` are calculated based on `width`, `height`, `cornerRadius`, and the width of the canvas. If `cornerRadius_px < min(width_px/2, height_px/2)` is met, the value of `cornerRadius` is set valid; if not, `cornerRadius_px` is set to `min(width_px/2, height_px/2)`, and `cornerRadius` is set to the proportion of `cornerRadius_px` to the width of the canvas. */ get ios_cornerRadius(): $p_i.CGFloat; set ios_cornerRadius(value: $p_i.CGFloat); /** {en} * @platform ios * @brief The stream mixing content type. The default value is `ByteRTCTranscoderContentControlTypeHasAudioAndVideo`. See {@link ByteRTCMixedStreamMediaType ByteRTCMixedStreamMediaType}. */ get ios_mediaType(): $p_i.ByteRTCMixedStreamMediaType; set ios_mediaType(value: $p_i.ByteRTCMixedStreamMediaType); /** {en} * @platform ios * @brief The render mode. See {@link ByteRTCMixedStreamRenderMode ByteRTCMixedStreamRenderMode}. The default value is 1. */ get ios_renderMode(): $p_i.ByteRTCMixedStreamRenderMode; set ios_renderMode(value: $p_i.ByteRTCMixedStreamRenderMode); /** {en} * @platform ios * @brief Stream mixing region type. See {@link ByteRTCMixedStreamLayoutRegionType ByteRTCMixedStreamLayoutRegionType}. It's recommended to be set. */ get ios_regionContentType(): $p_i.ByteRTCMixedStreamLayoutRegionType; set ios_regionContentType(value: $p_i.ByteRTCMixedStreamLayoutRegionType); /** {en} * @platform ios * @brief The RGBA data of the mixing image. Put in null when mixing video streams. */ get ios_imageWaterMark(): $p_i.NSData; set ios_imageWaterMark(value: $p_i.NSData); /** {en} * @platform ios * @brief Image parameters for stream mixing. See {@link ByteRTCMixedStreamLayoutRegionImageWaterMarkConfig ByteRTCMixedStreamLayoutRegionImageWaterMarkConfig}. Put in null when mixing video streams. */ get ios_imageWaterMarkConfig(): $p_i.ByteRTCMixedStreamLayoutRegionImageWaterMarkConfig; set ios_imageWaterMarkConfig(value: $p_i.ByteRTCMixedStreamLayoutRegionImageWaterMarkConfig); /** {en} * @platform ios * @brief spatial position. See {@link ByteRTCPosition ByteRTCPosition}. */ get ios_spatialPosition(): $p_i.ByteRTCPosition; set ios_spatialPosition(value: $p_i.ByteRTCPosition); /** {en} * @platform ios * @brief Sets whether a user applies spatial audio effects:
* - Yes: Yes (default setting) * - No: No */ get ios_applySpatialAudio(): $p_i.BOOL; set ios_applySpatialAudio(value: $p_i.BOOL); /** {en} * @platform ios * @valid since 3.57 * @brief Sets the fill mode of the placeholder image.
* This parameter is used to control the fill mode of the placeholder image after the user stops publishing video streams and the screen reverts to the placeholder image. See {@link ByteRTCMixedStreamAlternateImageFillMode ByteRTCMixedStreamAlternateImageFillMode}. */ get ios_alternateImageFillMode(): $p_i.ByteRTCMixedStreamAlternateImageFillMode; set ios_alternateImageFillMode(value: $p_i.ByteRTCMixedStreamAlternateImageFillMode); /** {en} * @platform ios * @valid since 3.57 * @brief Sets the URL of the placeholder image, limited to a maximum of 1024 characters. */ get ios_alternateImageUrl(): $p_i.NSString; set ios_alternateImageUrl(value: $p_i.NSString); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the room ID of the user who publishes the video stream. You are recommended to set the value. * @param roomID The room ID to be set. Required. * */ android_setRoomID(roomID: string): this; /** {en} * @platform android * @detail api * @brief Gets the room ID of the user who publishes the video stream. * */ android_getRoomID(): string; /** {en} * @platform android * @detail api * @brief Sets the user ID of the user who publishes the video stream. You are recommended to set the value. * @param userID The user ID to be set. * */ android_setUserID(userID: string): this; /** {en} * @platform android * @detail api * @brief Gets the user ID of the user who publishes the video stream. * */ android_getUserID(): string; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the X-coordinate in pixels of the upper-left corner of the user's frame in the entire canvas coordinate system, where the origin is at the upper-left corner of the canvas. It represents the horizontal displacement of the upper-left corner of the user's frame relative to the origin. * @param locationX The X-coordinate of the upper-left corner of the user's frame. The value range is [0, the width of the canvas). The default value is 0. * */ android_setLocationX(locationX: number): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the X-coordinate of the upper-left corner of the user's frame. * */ android_getLocationX(): number; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the Y-coordinate in pixels of the upper-left corner of the user's frame in the entire canvas coordinate system, where the origin is at the upper-left corner of the canvas. It represents the vertical displacement of the upper-left corner of the user's frame relative to the origin. * @param locationY The Y-coordinate of the upper-left corner of the user's frame. The value range is [0, the height of the canvas). The default value is 0. * */ android_setLocationY(locationY: number): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the Y-coordinate of the upper-left corner of the user's frame. * */ android_getLocationY(): number; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the width of the user's frame in pixels. * @param width The width of the user's frame. The value range is [0, the width of the canvas]. The default value is 360. * */ android_setWidth(width: number): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the width of the user's frame in pixels. * */ android_getWidth(): number; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the height of the user's frame in pixels. * @param height The height of the user's frame. The value range is [0, the height of the canvas]. The default value is 640. * */ android_setHeight(height: number): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the height of the user's frame in pixels. * */ android_getHeight(): $p_a.double; /** {en} * @platform android * @detail api * @brief Sets the layer on which the video is rendered. * @param zOrder The layer to be set. The range is [0, 100]. 0 for the bottom layer, and 100 for the top layer. The default value is 0. * */ android_setZOrder(zOrder: number): this; /** {en} * @platform android * @detail api * @brief Gets the layer on which the video is rendered. * */ android_getZOrder(): number; /** {en} * @platform android * @detail api * @brief Sets the transparency. * @param alpha (Only server-side stream mixing can set this parameter.) The opacity in range of (0.0, 1.0]. The lower value, the more transparent. The default value is 1.0. * */ android_setAlpha(alpha: number): this; /** {en} * @platform android * @detail api * @brief Gets the transparency. * */ android_getAlpha(): $p_a.double; /** {en} * @platform android * @detail api * @brief Sets the corner radius. * @param cornerRadius (Only server-side stream mixing can set this parameter.) The proportion of the radius to the width of the canvas. The default value is 0.0. * @note After you set the value, `width_px`, `height_px`, and `radius_px` are calculated based on `width`, `height`, `radius`, and the width of the canvas. If `radius_px < min(width_px/2, height_px/2)` is met, the value of `radius` is set valid; if not, `radius_px` is set to `min(width_px/2, height_px/2)`, and `radius` is set to the proportion of `radius_px` to the width of the canvas. * */ android_setCornerRadius(cornerRadius: number): this; /** {en} * @platform android * @detail api * @brief Sets the corner radius. * @param cornerRadius (Only server-side stream mixing can set this parameter.) The proportion of the radius to the width of the canvas. The default value is 0.0. * @note After you set the value, `width_px`, `height_px`, and `radius_px` are calculated based on `width`, `height`, `radius`, and the width of the canvas. If `radius_px < min(width_px/2, height_px/2)` is met, the value of `radius` is set valid; if not, `radius_px` is set to `min(width_px/2, height_px/2)`, and `radius` is set to the proportion of `radius_px` to the width of the canvas. * */ android_getCornerRadius(): $p_a.double; /** {en} * @platform android * @detail api * @brief (Only server-side stream mixing can set this parameter.) Sets the stream mixing content type. * @param mediaType The stream mixing content type. The default value is `MIXED_STREAM_MEDIA_TYPE_AUDIO_AND_VIDEO(0)`. See {@link MixedStreamMediaType MixedStreamMediaType}. * */ android_setMediaType(mediaType: $p_a.MixedStreamMediaType): this; /** {en} * @platform android * @detail api * @brief Gets the stream mixing content type. * */ android_getMediaType(): $p_a.MixedStreamMediaType; /** {en} * @platform android * @detail api * @brief Sets the stream mixing region type. * @param regionContentType Stream mixing region type. See {@link MixedStreamLayoutRegionType MixedStreamLayoutRegionType}. * */ android_setRegionContentType(regionContentType: $p_a.MixedStreamLayoutRegionType): this; /** {en} * @platform android * @detail api * @brief Gets the stream mixing region type. * */ android_getRegionContentType(): $p_a.MixedStreamLayoutRegionType; /** {en} * @platform android * @detail api * @brief Sets the render mode. You are recommended to set the value. * @param renderMode The render mode. See {@link MixedStreamRenderMode MixedStreamRenderMode}. The default value is 1. * */ android_setRenderMode(renderMode: $p_a.MixedStreamRenderMode): this; /** {en} * @platform android * @detail api * @brief Gets the render mode. * */ android_getRenderMode(): $p_a.MixedStreamRenderMode; /** {en} * @platform android * @detail api * @brief By calling this API, you can know whether the source user of the stream is the local user. * @return * - true: Yes * - false: No * */ android_getIsLocalUser(): boolean; /** {en} * @platform android * @detail api * @brief Sets whether the source user of the stream is a local user. * @param islocalUser
* - true: Yes * - false: No * */ android_setIsLocalUser(islocalUser: boolean): this; /** {en} * @platform android * @detail api * @brief Sets the stream type in the region. * @param streamType Stream type, see {@link MixedStreamVideoType MixedStreamVideoType}. Only server-side stream mixing can set this parameter to "MIXED_STREAM_VIDEO_TYPE_SCREEN". * */ android_setStreamType(streamType: $p_a.MixedStreamVideoType): this; /** {en} * @platform android * @detail api * @brief Gets the stream type in the region. * */ android_getStreamType(): $p_a.MixedStreamVideoType; /** {en} * @platform android * @detail api * @brief Sets the data of the mixing image. * @param imageWaterMark The RGBA data of the mixing image. Put in null when mixing video streams. * */ android_setImageWaterMark(imageWaterMark: ArrayBuffer): this; /** {en} * @platform android * @detail api * @brief Gets the data of the mixing image. * */ android_getImageWaterMark(): ArrayBuffer; /** {en} * @platform android * @detail api * @brief Sets the image parameters for stream mixing. * @param imageWaterMarkConfig Image parameters for stream mixing. See {@link MixedStreamLayoutRegionImageWaterMarkConfig MixedStreamLayoutRegionImageWaterMarkConfig}. Put in null when mixing video streams. * */ android_setImageWaterMarkConfig(imageWaterMarkConfig: $p_a.MixedStreamLayoutRegionImageWaterMarkConfig): this; /** {en} * @platform android * @detail api * @brief Gets the image parameters for stream mixing. * */ android_getImageWaterMarkConfig(): $p_a.MixedStreamLayoutRegionImageWaterMarkConfig; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the fill mode of the placeholder image.
* This method is used to control the fill mode of the placeholder image after the user stops publishing video streams and the screen reverts to the placeholder image. * @param alternateImageFillMode See {@link MixedStreamAlternateImageFillMode MixedStreamAlternateImageFillMode}. * */ android_setAlternateImageFillMode(alternateImageFillMode: $p_a.MixedStreamAlternateImageFillMode): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the setting on the fill mode of the placeholder image. * */ android_getAlternateImageFillMode(): $p_a.MixedStreamAlternateImageFillMode; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the URL of the placeholder image, limited to a maximum of 1024 characters. * */ android_setAlternateImageURL(alternateImageUrl: string): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the URL of the placeholder image, limited to a maximum of 1024 characters. * */ android_getAlternateImageURL(): string; /** {en} * @platform android * @detail api * @brief Sets spatial audio position of the current region. * @param spatialPosition Spatial position of current region. * */ android_setSpatialPosition(spatialPosition: $p_a.Position): this; /** {en} * @platform android * @detail api * @brief Gets spatial audio position of the current region. * */ android_getSpatialPosition(): $p_a.Position; /** {en} * @platform android * @detail api * @brief Sets whether a user applies spatial audio effects. * @param applySpatialAudio Whether the user applies spatial audio effects:
* - true: Yes (default setting) * - false: No * */ android_setApplySpatialAudio(applySpatialAudio: boolean): this; /** {en} * @platform android * @detail api * @brief Gets the setting of whether a user applies spatial audio effects. * */ android_getApplySpatialAudio(): boolean; } export declare enum AudioScenarioType { AUDIO_SCENARIO_MUSIC = 0, AUDIO_SCENARIO_HIGHQUALITY_COMMUNICATION = 1, AUDIO_SCENARIO_COMMUNICATION = 2, AUDIO_SCENARIO_MEDIA = 3, AUDIO_SCENARIO_GAME_STREAMING = 4, AUDIO_SCENARIO_HIGHQUALITY_CHAT = 5 } export declare enum AudioPropertiesMode { AUDIO_PROPERTIES_MODE_MICROPHONE = 0, AUDIO_PROPERTIES_MODE_AUDIOMIXING = 1 } export declare enum ASRAuthorizationType { ASR_AUTHORIZATION_TYPE_TOKEN = 0, ASR_AUTHORIZATION_TYPE_SIGNATURE = 1 } export declare class SysStats { constructor(cpuCores: number, cpuAppUsage: double, cpuTotalUsage: double, memoryUsage: double, fullMemory: number, totalMemoryUsage: number, freeMemory: number, memoryRatio: double, totalMemoryRatio: double); constructor(); constructor(); protected _instance: any; /** {en} * @brief Device CPU cores * */ get cpuCores(): number; set cpuCores(value: number); /** {en} * @brief The CPU usage of the application, the value range is [0,1]. * */ get cpuAppUsage(): number; set cpuAppUsage(value: number); /** {en} * @platform android * @brief The CPU usage of the system, the value range is [0,1]. * */ get android_cpuTotalUsage(): $p_a.double; set android_cpuTotalUsage(value: $p_a.double); /** {en} * @brief Application's memory occupancy in MB * */ get memoryUsage(): number; set memoryUsage(value: number); /** {en} * @brief Device memory size unit: MB * */ get fullMemory(): number; set fullMemory(value: number); /** {en} * @brief System used memory (in MB) * */ get totalMemoryUsage(): number; set totalMemoryUsage(value: number); /** {en} * @brief System current free memory (in MB) * */ get freeMemory(): number; set freeMemory(value: number); /** {en} * @brief Memory usage of the current application (in \%) * */ get memoryRatio(): number; set memoryRatio(value: number); /** {en} * @brief System memory usage (in \%) * */ get totalMemoryRatio(): number; set totalMemoryRatio(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoContentType { NORMAL_FRAME = 0, BLACK_FRAME = 1 } export declare class SubscribeConfig { /** * @platform android */ static android_create(isScreen: boolean, subVideo: boolean, subAudio: boolean, videoIndex: number, svcLayer: number, subWidth: number, subHeight: number, subVideoIndex: number): $p_a.SubscribeConfig; constructor(oldConfig: SubscribeConfig); constructor(isScreen: boolean, subVideo: boolean, subAudio: boolean, videoIndex: number); constructor(isScreen: boolean, subVideo: boolean, subAudio: boolean, videoIndex: number, svcLayer: number); constructor(isScreen: boolean, subVideo: boolean, subAudio: boolean, videoIndex: number, svcLayer: number, subWidth: number, subHeight: number, subVideoIndex: number); constructor(); constructor(); protected _instance: any; /** {en} * @brief Whether the stream is from screen sharing * * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @platform android * @brief Whether video stream is included * * */ get android_subVideo(): boolean; set android_subVideo(value: boolean); /** {en} * @platform android * @brief Whether audio stream is included * * */ get android_subAudio(): boolean; set android_subAudio(value: boolean); /** {en} * @brief Subscribed video stream resolution subscript.
* Users can publish multiple videos of different resolutions in a stream. Therefore, when subscribing to a stream, you need to specify the specific resolution of the subscription. This parameter is used to specify the subscript of the resolution to be subscribed to, and the default value is 0. * */ get videoIndex(): number; set videoIndex(value: number); /** {en} * @platform android * @brief Width of the video stream in px * */ get android_subWidth(): $p_a.int; set android_subWidth(value: $p_a.int); /** {en} * @platform android * @brief Height of the video stream in px * */ get android_subHeight(): $p_a.int; set android_subHeight(value: $p_a.int); /** {en} * @brief Expected maximum frame rate of the subscribed stream in px. The default value is 0, values greater than 10 are valid.
* If the frame rate of the stream published is higher than the value set by the subscriber, the subscriber receives the video of the frame rate set by this API; if the frame rate of the stream published is lower than the value set by the subscriber, the subscriber receives the video of the same frame rate of the video published.
* Only valid if the stream is coded with SVC technique. * */ get framerate(): number; set framerate(value: number); /** {en} * @platform ios * @brief Whether to subscribe to videos.
* The user selects whether to subscribe to videos in the remote stream by setting this parameter. YES is a subscription video, NO is a non-subscription video, the default value is YES. */ get ios_subscribeVideo(): $p_i.BOOL; set ios_subscribeVideo(value: $p_i.BOOL); /** {en} * @platform ios * @brief Whether to subscribe to audio.
* The user selects whether to subscribe to audio in the remote stream by setting this parameter. YES is subscribed audio, NO is not subscribed audio, the default value is YES. */ get ios_subscribeAudio(): $p_i.BOOL; set ios_subscribeAudio(value: $p_i.BOOL); /** {en} * @platform ios * @brief Time domain hierarchy of the subscribed video stream, default value is 0. */ get ios_svcLayer(): $p_i.NSInteger; set ios_svcLayer(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The width in px of the subscription, the default value is 0. */ get ios_width(): $p_i.NSInteger; set ios_width(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The height in px of the subscription, the default value is 0. */ get ios_height(): $p_i.NSInteger; set ios_height(value: $p_i.NSInteger); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class EchoTestConfig { constructor(view: View, uid: string, roomid: string, token: string, enableAudio: boolean, enableVideo: boolean, interval: number); constructor(); protected _instance: any; /** {en} * @brief User ID for testing * */ get userId(): string; set userId(value: string); /** {en} * @brief ID of the room that the tested user will join. * */ get roomId(): string; set roomId(value: string); /** {en} * @brief Token used for authenticating users when they enter the room. * */ get token(): string; set token(value: string); /** {en} * @brief Whether to test audio. The device to be tested is the default audio device.
* - true: Yes * - If you use internal capture, the device microphone will automatically turn on and `onLocalAudioPropertiesReport` will be triggered when the value of audioReportInterval is set to greater than 0, from which you can judge the working status of the microphone. * - If you choose custom capture, you also need to call {@link pushExternalAudioFrame pushExternalAudioFrame} to push the captured audio to the SDK. * - false: No * */ get enableAudio(): boolean; set enableAudio(value: boolean); /** {en} * @brief Whether to test video. If you are using a desktop PC, the device to be tested is by default the first video device in the list.
* - true: Yes * - If you use internal capture, the device camera will automatically turn on. * - If you choose custom capture, you also need to call {@link pushExternalVideoFrame pushExternalVideoFrame} to push the captured video to the SDK. * - false: No * @note The video is published with fixed parameters: resolution 640px × 360px, frame rate 15fps. * */ get enableVideo(): boolean; set enableVideo(value: boolean); /** {en} * @brief Volume prompt interval in ms, the default value is 100.
* - `<= 0`: Turn off prompt * - `(0,100]` Invalid interval value, and will be automatically reset to 100ms. * - `> 100`: the actual value of interval * */ get audioReportInterval(): number; set audioReportInterval(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** * @platform android */ /** * @platform android */ android_getEchoRenderView(): $p_a.View; /** * @platform android */ /** * @platform android */ android_getEchoUid(): string; /** * @platform android */ /** * @platform android */ android_getEchoRoomId(): string; /** * @platform android */ /** * @platform android */ android_getEchoToken(): string; /** * @platform android */ /** * @platform android */ android_getEchoEnabledAudio(): boolean; /** * @platform android */ /** * @platform android */ android_getEchoEnabledVideo(): boolean; /** * @platform android */ /** * @platform android */ android_getAudioReportInterval(): number; } export declare enum AudioSampleRate { AUDIO_SAMPLE_RATE_AUTO = 0, AUDIO_SAMPLE_RATE_8000 = 1, AUDIO_SAMPLE_RATE_16000 = 2, AUDIO_SAMPLE_RATE_32000 = 3, AUDIO_SAMPLE_RATE_44100 = 4, AUDIO_SAMPLE_RATE_48000 = 5 } export declare class MusicInfo { constructor(musicId: string, musicName: string, singer: string, vendorId: string, vendorName: string, updateTimestamp: number, posterUrl: string, lyricType: LyricStatus, duration: number, enableScore: boolean, climaxStart: number, climaxEnd: number); constructor(); protected _instance: any; /** {en} * @brief Music ID. * */ get musicId(): string; set musicId(value: string); /** {en} * @brief Music name. * */ get musicName(): string; set musicName(value: string); /** {en} * @brief Singer. * */ get singer(): string; set singer(value: string); /** {en} * @brief Vendor ID. * */ get vendorId(): string; set vendorId(value: string); /** {en} * @brief Vendor name. * */ get vendorName(): string; set vendorName(value: string); /** {en} * @brief Latest update timestamp in milliseconds. * */ get updateTimestamp(): number; set updateTimestamp(value: number); /** {en} * @brief The URL of the music cover. * */ get posterUrl(): string; set posterUrl(value: string); /** {en} * @brief The lyrics type. See {@link LyricStatus LyricStatus}. * */ get lyricStatus(): LyricStatus; set lyricStatus(value: LyricStatus); /** {en} * @brief The length of the song in milliseconds. * */ get duration(): number; set duration(value: number); /** {en} * @brief Whether the song supports scoring. * */ get enableScore(): boolean; set enableScore(value: boolean); /** {en} * @brief The starting time of the climax part in milliseconds. * */ get climaxStartTime(): number; set climaxStartTime(value: number); /** {en} * @brief The ending time of the climax part in milliseconds. * */ get climaxEndTime(): number; set climaxEndTime(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MixedStreamVideoCodecType { MIXED_STREAM_VIDEO_CODEC_TYPE_H264 = 0, MIXED_STREAM_VIDEO_CODEC_TYPE_BYTEVC1 = 1 } export declare enum MuteState { MUTE_STATE_OFF = 0, MUTE_STATE_ON = 1 } export declare enum ByteRTCStreamMixingType { STREAM_MIXING_BY_SERVER = 0, STREAM_MIXING_BY_CLIENT = 1, ByteRTCStreamMixingTypeByServer = 2, ByteRTCStreamMixingTypeByClient = 3 } export declare enum MediaDeviceError { /** {en} * @platform android * brief Normal * */ MEDIA_DEVICE_ERROR_OK = 0, /** {en} * @platform android * brief No permission * */ MEDIA_DEVICE_ERROR_NOPERMISSION = 1, /** {en} * @platform android * brief Occupied by the other application * */ MEDIA_DEVICE_ERROR_DEVICEBUSY = 2, /** {en} * @platform android * brief Error * */ MEDIA_DEVICE_ERROR_DEVICEFAILURE = 3, /** {en} * @platform android * brief Not found * */ MEDIA_DEVICE_ERROR_DEVICENOTFOUND = 4, /** {en} * @platform android * brief Disconnected * */ MEDIA_DEVICE_ERROR_DEVICEDISCONNECTED = 5, /** {en} * @platform android * @brief No data callback from the capture devices.
* RTC emits this error when the media device is expected to be working but no data is received. * */ MEDIA_DEVICE_ERROR_DEVICENOCALLBACK = 6, /** {en} * @platform android * @brief Not supported sample rate * */ MEDIA_DEVICE_ERROR_UNSUPPORTFORMAT = 7, ByteRTCMediaDeviceErrorOK = 8, ByteRTCMediaDeviceErrorDeviceNoPermission = 9, ByteRTCMediaDeviceErrorDeviceBusy = 10, ByteRTCMediaDeviceErrorDeviceFailure = 11, ByteRTCMediaDeviceErrorDeviceNotFound = 12, ByteRTCMediaDeviceErrorDeviceDisconnected = 13, ByteRTCMediaDeviceErrorDeviceNoCallback = 14, ByteRTCMediaDeviceErrorUNSupportFormat = 15, ByteRTCMediaDeviceErrorNotFindGroupId = 16, ByteRTCMediaDeviceErrorNotAvailableInBackground = 17, ByteRTCMediaDeviceErrorVideoInUseByAnotherClient = 18, ByteRTCMediaDeviceErrorNotAvailableWithMultipleForegroundApps = 19, ByteRTCMediaDeviceErrorNotAvailableDueToSystemPressure = 20 } export declare enum AudioFrameSource { AUDIO_FRAME_SOURCE_MIC = 0, AUDIO_FRAME_SOURCE_PLAYBACK = 1, AUDIO_FRAME_SOURCE_MIXED = 2, ByteRTCAudioFrameSourceTypeMic = 3, ByteRTCAudioFrameSourceTypePlayback = 4, ByteRTCAudioFrameSourceTypeMixed = 5 } export declare enum StreamRemoveReason { STREAM_REMOVE_REASON_UNPUBLISH = 0, STREAM_REMOVE_REASON_PUBLISH_FAILED = 1, STREAM_REMOVE_REASON_KEEP_LIVE_FAILED = 2, STREAM_REMOVE_REASON_CLIENT_DISCONNECTED = 3, STREAM_REMOVE_REASON_REPUBLISH = 4, STREAM_REMOVE_REASON_OTHER = 5, STREAM_REMOVE_REASON_PUBLISH_PRIVILEGE_TOKEN_EXPIRED = 6, ByteRTCStreamRemoveReasonPublishPrivilegeExpired = 7 } export declare enum ForwardStreamError { FORWARD_STREAM_ERROR_OK = 0, FORWARD_STREAM_ERROR_RESPONSE = 1, FORWARD_STREAM_ERROR_REMOTE_KICKED = 2, FORWARD_STREAM_ERROR_NOT_SUPPORT = 3, FORWARD_STREAM_ERROR_INVALID_ARGUMENT = 4, FORWARD_STREAM_ERROR_INVALID_TOKEN = 5, ByteRTCForwardStreamErrorInvalidArgument = 6, ByteRTCForwardStreamErrorInvalidToken = 7 } export declare enum NetworkDetectionStopReason { USER = 0, TIMEOUT = 1, CONNECTION_LOST = 2, STREAMING = 3, INNER_ERR = 4, ByteRTCNetworkDetectionStopReasonInnerErr = 5 } export declare class ReceiveRange { constructor(min: number, max: number); constructor(); protected _instance: any; /** {en} * @brief The minimum distance at which the local user can hear the attenuated audio from remote users.
* The value must be ≥ 0, but ≤ max.
* No attenuation effect for audio from distances less than this value you set. * */ get min(): number; set min(value: number); /** {en} * @brief The maximum distance at which the local user can hear audio from remote users.
* The value must be ≥ min, and > 0.
* Audio from distances larger than the value you set cannot be heard. * */ get max(): number; set max(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum NetworkQuality { /** {en} * @detail keytype * @brief Media stream network quality unknown. * */ NETWORK_QUALITY_UNKNOWN = 0, /** {en} * @detail keytype * @brief Media Streaming Network quality is excellent. * */ NETWORK_QUALITY_EXCELLENT = 1, /** {en} * @detail keytype * @brief Media streaming network quality is good. * */ NETWORK_QUALITY_GOOD = 2, /** {en} * @detail keytype * @brief Media streaming The network quality is poor but does not affect communication. * */ NETWORK_QUALITY_POOR = 3, /** {en} * @detail keytype * @brief Media streaming Poor network quality Poor communication. * */ NETWORK_QUALITY_BAD = 4, /** {en} * @detail keytype * @brief Media streaming network quality is very poor. * */ NETWORK_QUALITY_VERY_BAD = 5, /** {en} * @detail keytype * @brief The network is down. It may be down due to no answer within 12s, airplane mode on, disconnected cable, etc.
* Refer to [Get connection state](https://docs.byteplus.com/byteplus-rtc/docs/95376) for more details about getting connection state. * */ NETWORK_QUALITY_DOWN = 6 } export declare enum RenderMode { ByteRTCRenderModeHidden = 0, ByteRTCRenderModeFit = 1, ByteRTCRenderModeFill = 2 } export declare enum ZoomConfigType { ZOOM_FOCUS_OFFSET = 0, ZOOM_MOVE_OFFSET = 1, ByteRTCZoomConfigTypeFocusOffset = 2, ByteRTCZoomConfigTypeMoveOffset = 3 } export declare class StreamSycnInfoConfig { constructor(streamIndex: StreamIndex, repeatCount: number, streamType: SyncInfoStreamType); constructor(); protected _instance: any; /** {en} * @brief Stream properties, mainstream or screen streams. Refer to the enumeration class [StreamIndex](#streamindex-2)
* - `STREAM_INDEX_MAIN(0)`: Main stream * - `STREAM_INDEX_SCREEN(1)`: Screen-sharing stream * */ get streamIndex(): StreamIndex; set streamIndex(value: StreamIndex); /** {en} * @brief Repeated number of messages sent * */ get repeatCount(): number; set repeatCount(value: number); /** {en} * @brief Stream types for media stream information synchronization. See {@link SyncInfoStreamType SyncInfoStreamType} * */ get streamType(): SyncInfoStreamType; set streamType(value: SyncInfoStreamType); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class LocalVideoStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief TX bitrate in Kbps of the video stream with the highest resolution within the reference period * */ get sentKBitrate(): number; set sentKBitrate(value: number); /** {en} * @brief Sampling frame rate in fps of video capture during this reference period * */ get inputFrameRate(): number; set inputFrameRate(value: number); /** {en} * @brief TX frame rate in fps of the video stream with the highest resolution within the reference period * */ get sentFrameRate(): number; set sentFrameRate(value: number); /** {en} * @brief Encoder-output frame rate in fps of the video stream with the highest resolution within the reference period * */ get encoderOutputFrameRate(): number; set encoderOutputFrameRate(value: number); /** {en} * @brief Local-rendering frame rate in fps during this reference period * */ get rendererOutputFrameRate(): number; set rendererOutputFrameRate(value: number); /** {en} * @brief Reference period in ms.
* This field is used to set the reference period for the callback, which is 2 s by default. * */ get statsInterval(): number; set statsInterval(value: number); /** {en} * @brief Video packet loss rate. The video uplink packet loss rate in this reference period ranges from [0,1]. * */ get videoLossRate(): number; set videoLossRate(value: number); /** {en} * @brief Round-trip time in ms. * */ get rtt(): number; set rtt(value: number); /** {en} * @brief Video encoding bitrate in Kbps of the video stream with the highest resolution within the reference period. * */ get encodedBitrate(): number; set encodedBitrate(value: number); /** {en} * @brief Video encoding width in px of the video stream with the highest resolution within the reference period * */ get encodedFrameWidth(): number; set encodedFrameWidth(value: number); /** {en} * @brief Video encoding height in px of the video stream with the highest resolution within the reference period * */ get encodedFrameHeight(): number; set encodedFrameHeight(value: number); /** {en} * @brief The total number of the video stream with the highest resolution within the reference period sent in the reference period. * */ get encodedFrameCount(): number; set encodedFrameCount(value: number); /** {en} * @brief For the encoding type of the video, please refer to {@link VideoCodecType VideoCodecType}. * */ get codecType(): VideoCodecType; set codecType(value: VideoCodecType); /** {en} * @brief Whether the media stream belongs to the user is a screen stream. You can know whether the current statistics come from mainstream or screen stream. * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @brief Video uplink network jitter in ms. * */ get jitter(): number; set jitter(value: number); /** {en} * @brief The current state of the video noise reduction mode of local video (0: off/1: on). * */ get videoDenoiseMode(): VideoDenoiseMode; set videoDenoiseMode(value: VideoDenoiseMode); get android_isPSNROn(): boolean; set android_isPSNROn(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioRecordingState { AUDIO_RECORDING_STATE_ERROR = 0, AUDIO_RECORDING_STATE_PROCESSING = 1, AUDIO_RECORDING_STATE_SUCCESS = 2 } export declare enum MulDimSingScoringMode { MUL_DIM_SING_SCORING_MODE_NOTE = 0 } export declare enum AudioPlaybackDevice { AUDIO_PLAYBACK_DEVICE_HEADSET = 0, AUDIO_PLAYBACK_DEVICE_EARPIECE = 1, AUDIO_PLAYBACK_DEVICE_SPEAKERPHONE = 2, AUDIO_PLAYBACK_DEVICE_HEADSET_BLUETOOTH = 3, AUDIO_PLAYBACK_DEVICE_HEADSET_USB = 4 } export declare class VideoPreprocessorConfig { protected _instance: any; /** {en} * @brief To set the requested pixel format, see'VideoPixelFormat 'in {@link VideoPixelFormat VideoPixelFormat}.
* Only `I420,`TEXTURE_2D` and `UNKNOWN` are supported. * */ get requiredPixelFormat(): VideoPixelFormat; set requiredPixelFormat(value: VideoPixelFormat); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class AudioFrame { constructor(buffer: ArrayBuffer, samples: number, sampleRate: AudioSampleRate, channel: AudioChannel); constructor(); constructor(); protected _instance: any; /** {en} * @brief PCM data * */ get buffer(): ArrayBuffer; set buffer(value: ArrayBuffer); /** {en} * @brief Total sampling number * */ get samples(): number; set samples(value: number); /** {en} * @brief Sample rate, see {@link AudioSampleRate AudioSampleRate}. * */ get sampleRate(): AudioSampleRate; set sampleRate(value: AudioSampleRate); /** {en} * @brief Audio channel, see {@link AudioChannel AudioChannel}. * */ get channel(): AudioChannel; set channel(value: AudioChannel); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class DownloadResult { constructor(musicId: string, fileType: DownloadFileType, filePath: string); constructor(); protected _instance: any; /** {en} * @brief Music ID. * */ get musicId(): string; set musicId(value: string); /** {en} * @brief Download file type. See {@link DownloadFileType DownloadFileType}. * */ get fileType(): DownloadFileType; set fileType(value: DownloadFileType); /** {en} * @brief Download file path. * */ get filePath(): string; set filePath(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum ProblemFeedbackOption { NONE = 0, OTHER_MESSAGE = 1, DISCONNECTED = 2, EAR_BACK_DELAY = 3, LOCAL_NOISE = 4, LOCAL_AUDIO_LAGGING = 5, LOCAL_NO_AUDIO = 6, LOCAL_AUDIO_STRENGTH = 7, LOCAL_ECHO = 8, LOCAL_VIDEO_FUZZY = 9, LOCAL_NOT_SYNC = 10, LOCAL_VIDEO_LAGGING = 11, LOCAL_NO_VIDEO = 12, REMOTE_NOISE = 13, REMOTE_AUDIO_LAGGING = 14, REMOTE_NO_AUDIO = 15, REMOTE_AUDIO_STRENGTH = 16, REMOTE_ECHO = 17, REMOTE_VIDEO_FUZZY = 18, REMOTE_NOT_SYNC = 19, REMOTE_VIDEO_LAGGING = 20, REMOTE_NO_VIDEO = 21 } export declare enum UserOnlineStatus { /** {en} * @platform android * @brief Counterpart user is offline
* Counterpart user has called `logout`, or has not called `login` to log in * */ USER_ONLINE_STATUS_OFFLINE = 0, /** {en} * @platform android * @brief The peer user is online
* The peer user calls `login` to log in, and the connection status is normal. * */ USER_ONLINE_STATUS_ONLINE = 1, /** {en} * @platform android * @brief Unable to get the online state of the peer user
* Returned when a cascade error occurs and the online state of the peer user is abnormal * */ USER_ONLINE_STATUS_UNREACHABLE = 2, ByteRTCUserOnlineStatusOffline = 3, ByteRTCUserOnlineStatusOnline = 4, ByteRTCUserOnlineStatusUnreachable = 5 } export declare enum VideoDecoderConfig { VIDEO_DECODER_CONFIG_RAW = 0, VIDEO_DECODER_CONFIG_ENCODE = 1, VIDEO_DECODER_CONFIG_BOTH = 2 } export declare class PositionInfo { constructor(position: Position, orientation: HumanOrientation); constructor(); protected _instance: any; /** {en} * @brief 3D coordinate values of the user's position in the rectangular coordinate system for the spatial audio. You need to build your own rectangular coordinate system. Refer to {@link Position Position} for details. * */ get position(): Position; set position(value: Position); /** {en} * @brief Information on the three-dimensional orientation of the user in the rectangular coordinate system for the spatial audio. Any two of the 3D coordinate vectors of the user's position need to be perpendicular to each other. Refer to {@link HumanOrientation HumanOrientation} for details. * */ get orientation(): HumanOrientation; set orientation(value: HumanOrientation); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class LocalStreamStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief For statistics on audio streams sent by local devices. See {@link LocalAudioStats LocalAudioStats}. * */ get audioStats(): LocalAudioStats; set audioStats(value: LocalAudioStats); /** {en} * @brief For statistics on video streams sent by local devices. See {@link LocalVideoStats LocalVideoStats}. * */ get videoStats(): LocalVideoStats; set videoStats(value: LocalVideoStats); /** {en} * @brief Whether the media stream belongs to the user is a screen stream. You can know whether the current statistics come from mainstream or screen stream. * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @brief For local media uplink network quality. See {@link NetworkQuality NetworkQuality}. * @deprecated since 3.45 and will be deleted in 3.51, use {@link onNetworkQuality onNetworkQuality} instead. * */ get txQuality(): NetworkQuality; set txQuality(value: NetworkQuality); /** {en} * @brief Local media downlink network quality. See {@link NetworkQuality NetworkQuality}. * @deprecated since 3.45 and will be deleted in 3.51, use {@link onNetworkQuality onNetworkQuality} instead. * */ get rxQuality(): NetworkQuality; set rxQuality(value: NetworkQuality); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class VoiceEqualizationConfig { constructor(frequency: BandFrequency, gain: number); constructor(); protected _instance: any; /** {en} * @brief Frequency band. See {@link VoiceEqualizationBandFrequency VoiceEqualizationBandFrequency}. * */ get frequency(): BandFrequency; set frequency(value: BandFrequency); /** {en} * @brief Gain of the frequency band in dB. The range is `[-15, 15]`. * */ get gain(): number; set gain(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoFrameType { RAW_MEMORY = 0, GL_TEXTURE = 1, ByteRTCVideoFrameTypePixelBuffer = 2 } export declare enum TranscoderContentControlType { HAS_AUDIO_AND_VIDEO = 0, HAS_AUDIO_ONLY = 1, HAS_VIDEO_ONLY = 2 } export declare enum ChannelProfile { CHANNEL_PROFILE_COMMUNICATION = 0, CHANNEL_PROFILE_GAME = 1, CHANNEL_PROFILE_CLOUD_GAME = 2, CHANNEL_PROFILE_LOW_LATENCY = 3, CHANNEL_PROFILE_CHAT = 4, CHANNEL_PROFILE_CHAT_ROOM = 5, CHANNEL_PROFILE_LW_TOGETHER = 6, CHANNEL_PROFILE_GAME_HD = 7, CHANNEL_PROFILE_CO_HOST = 8, CHANNEL_PROFILE_KTV = 9, CHANNEL_PROFILE_CHORUS = 10, CHANNEL_PROFILE_GAME_STREAMING = 11, CHANNEL_PROFILE_LAN_LIVE_STREAMING = 12, CHANNEL_PROFILE_MEETING_ROOM = 13, CHANNEL_PROFILE_CLASSROOM = 14, CHANNEL_PROFILE_INTERACTIVE_PODCAST = 15, CHANNEL_PROFIEL_VR_CHAT = 16, CHANNEL_PROFIEL_MEETING = 17, CHANNEL_PROFILE_LIVE_BROADCASTING = 18 } export declare class MixedStreamLayoutConfig { protected _instance: any; /** {en} * @platform ios * @brief Background-color of the mixed stream in hexadecimal values such as #FFFFFF and #000000. The default value is #000000 (black). It's recommended to be set.
* With invalid or empty input, the configurations will be set as the default values. */ get ios_backgroundColor(): $p_i.NSString; set ios_backgroundColor(value: $p_i.NSString); /** {en} * @platform ios * @brief List of user's video layout information. The specific layout of each stream is detailed in {@link ByteRTCMixedStreamLayoutRegionConfig ByteRTCMixedStreamLayoutRegionConfig}. It's recommended to be set.
* With invalid or empty input, the configurations will be set as the default values. */ get ios_regions(): $p_i.NSArray<$p_i.ByteRTCMixedStreamLayoutRegionConfig>; set ios_regions(value: $p_i.NSArray<$p_i.ByteRTCMixedStreamLayoutRegionConfig>); /** {en} * @platform ios * @brief Info passed through from the user. */ get ios_userConfigExtraInfo(): $p_i.NSString; set ios_userConfigExtraInfo(value: $p_i.NSString); /** {en} * @platform ios * @valid since 3.57 * @brief Sets the URL of the background image for the canvas that renders the mixed stream, with a maximum length of 1024 bytes.
* You can input images in the following supported formats: JPG, JPEG, PNG.
* If the width and height of the background image are different from the screen dimensions, the background image will be scaled to fill the screen. */ get ios_backgroundImageUrl(): $p_i.NSString; set ios_backgroundImageUrl(value: $p_i.NSString); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the background color of the canvas. With invalid or empty input, the configurations will be set as the default values. You are recommended to set the value. * @param backgroundColor Background-color of the mixed stream in hexadecimal values such as #FFFFFF and #000000. The default value is #000000 (black). * */ android_setBackgroundColor(backgroundColor: string): this; /** {en} * @platform android * @detail api * @brief Gets the background color of the canvas. * */ android_getBackgroundColor(): string; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the URL of the background image for the canvas that renders the mixed stream * @param backgroundImageURL URL of the background image with a maximum length of 1024 bytes.
* You can input images in the following supported formats: JPG, JPEG, PNG.
* If the width and height of the background image are different from the screen dimensions, the background image will be scaled to fill the screen. * */ android_setBackgroundImageURL(backgroundImageURL: string): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the background image URL. * */ android_getBackgroundImageURL(): string; /** {en} * @platform android * @detail api * @brief Gets the user layout information list. * */ android_getRegions(): Array<$p_a.MixedStreamLayoutRegionConfig>; /** {en} * @platform android * @detail api * @brief Sets the data that you want to be passed through from the App and carried by the video stream. * @param userConfigExtraInfo The data you need, up to 4KB in length. * */ android_setUserConfigExtraInfo(userConfigExtraInfo: string): this; /** {en} * @platform android * @detail api * @brief Gets the data passed through from the App. * */ android_getUserConfigExtraInfo(): string; } export declare class IAudioFrame { protected _instance: any; /** {en} * @platform ios * @brief PCM data */ get ios_buffer(): $p_i.NSData; set ios_buffer(value: $p_i.NSData); /** {en} * @platform ios * @brief Total sampling number */ get ios_samples(): $p_i.int; set ios_samples(value: $p_i.int); /** {en} * @platform ios * @brief Sample rate. See {@link ByteRTCAudioSampleRate ByteRTCAudioSampleRate}. */ get ios_sampleRate(): $p_i.ByteRTCAudioSampleRate; set ios_sampleRate(value: $p_i.ByteRTCAudioSampleRate); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @author majun.lvhiei * @brief Get the audio timestamp. * @return Audio timestamp in microseconds. * */ android_timestamp_us(): number; /** {en} * @platform android * @detail api * @author majun.lvhiei * @brief Get the audio sample rate. See {@link AudioSampleRate AudioSampleRate}. * @return AudioSampleRate * */ android_sample_rate(): $p_a.AudioSampleRate; /** {en} * @detail api * @author majun.lvhiei * @brief Get the audio channel. See {@link AudioChannel AudioChannel}. * @return Audio channel * @note For dual channels, the audio frames are interleaved. * */ channel(): AudioChannel; /** {en} * @platform android * @detail api * @author majun.lvhiei * @brief Get the audio frame's temporary storage address * @return Audio frame's ByteBuffer * */ android_getDataBuffer(): $p_a.ByteBuffer; /** {en} * @platform android * @detail api * @author majun.lvhiei * @brief Get audio frame data size. * @return Audio frame data size in bytes. * */ android_data_size(): number; /** {en} * @platform android * @detail api * @brief Get audio frame type, only PCM is supported currently. See {@link AudioFrameType AudioFrameType}. * @return Audio frame type. * */ android_frame_type(): $p_a.AudioFrameType; /** {en} * @platform android * @detail api * @brief Release audio frame. * */ android_release(): void; } export declare class ForwardStreamStateInfo { constructor(roomId: string, state: ForwardStreamState, error: ForwardStreamError); constructor(); protected _instance: any; /** {en} * @brief ID of the room where the media stream aims to relay to
* An Empty string is for all rooms. * */ get roomId(): string; set roomId(value: string); /** {en} * @brief State of the room during relaying. Refer to {@link ForwardStreamState ForwardStreamState} for more information. * */ get state(): ForwardStreamState; set state(value: ForwardStreamState); /** {en} * @brief Error code from the room during relaying. Refer to {@link ForwardStreamError ForwardStreamError} for more information. * */ get error(): ForwardStreamError; set error(value: ForwardStreamError); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RemoteVideoConfig { constructor(width: number, height: number, framerate: number); constructor(); protected _instance: any; /** {en} * @brief Width of the video frame in px * */ get width(): number; set width(value: number); /** {en} * @brief Height of the video frame in px * */ get height(): number; set height(value: number); /** {en} * @brief Expected maximum frame rate of the subscribed stream in px. The default value is 0, which represents full-frame-rate, values greater than 0 are valid.
* If the frame rate of the stream published is higher than the value set by the subscriber, the subscriber receives the video of the frame rate set by this API; if the frame rate of the stream published is lower than the value set by the subscriber, the subscriber receives the video of the same frame rate of the video published.
* Only valid if the stream is coded with SVC technique. * */ get framerate(): number; set framerate(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RTCEncodedVideoFrame { constructor(buffer: ByteBuffer, timestampUs: number, timestampDtsUs: number, width: number, height: number, videoCodecType: VideoCodecType, videoPictureType: VideoPictureType, videoRotation: VideoRotation); constructor(); protected _instance: any; /** {en} * @platform android * @brief The pointer to the video frame
* The buffer must be Direct. And the buffer size is its capacity. * */ get android_buffer(): $p_a.ByteBuffer; set android_buffer(value: $p_a.ByteBuffer); /** {en} * @brief Video capture timestamp in microseconds * */ get timestampUs(): number; set timestampUs(value: number); /** {en} * @brief Video encoding timestamp in microseconds * */ get timestampDtsUs(): number; set timestampDtsUs(value: number); /** {en} * @brief Width of the video in px * */ get width(): number; set width(value: number); /** {en} * @brief Height of the video in px * */ get height(): number; set height(value: number); /** {en} * @platform android * @brief Video encoding type. See {@link VideoCodecType VideoCodecType} * */ get android_videoCodecType(): $p_a.VideoCodecType; set android_videoCodecType(value: $p_a.VideoCodecType); /** {en} * @platform android * @brief Video compression picture type. See {@link VideoPictureType VideoPictureType} * */ get android_videoPictureType(): $p_a.VideoPictureType; set android_videoPictureType(value: $p_a.VideoPictureType); /** {en} * @platform android * @brief Video frame rotation angle, the default value is 0 degrees. See {@link VideoRotation VideoRotation} * */ get android_videoRotation(): $p_a.VideoRotation; set android_videoRotation(value: $p_a.VideoRotation); /** {en} * @platform ios * @brief Video encoding type. See {@link ByteRTCVideoCodecType ByteRTCVideoCodecType} */ get ios_codecType(): $p_i.ByteRTCVideoCodecType; set ios_codecType(value: $p_i.ByteRTCVideoCodecType); /** {en} * @platform ios * @brief Video compression picture type. See {@link ByteRTCVideoPictureType ByteRTCVideoPictureType} */ get ios_pictureType(): $p_i.ByteRTCVideoPictureType; set ios_pictureType(value: $p_i.ByteRTCVideoPictureType); /** {en} * @platform ios * @brief Video frame rotation angle. See {@link ByteRTCVideoRotation ByteRTCVideoRotation} */ get ios_rotation(): $p_i.ByteRTCVideoRotation; set ios_rotation(value: $p_i.ByteRTCVideoRotation); /** {en} * @platform ios * @brief The pointer to the video frame */ get ios_data(): $p_i.NSData; set ios_data(value: $p_i.NSData); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioMixingState { AUDIO_MIXING_STATE_PRELOADED = 0, AUDIO_MIXING_STATE_PLAYING = 1, AUDIO_MIXING_STATE_PAUSED = 2, AUDIO_MIXING_STATE_STOPPED = 3, AUDIO_MIXING_STATE_FAILED = 4, AUDIO_MIXING_STATE_FINISHED = 5, AUDIO_MIXING_STATE_PCM_ENABLED = 6, AUDIO_MIXING_STATE_PCM_DISABLED = 7 } export declare enum DownloadLyricType { KRC = 0, LRC = 1 } export declare enum MixedStreamType { MIXED_STREAM_TYPE_BY_SERVER = 0, MIXED_STREAM_TYPE_BY_CLIENT = 1, ByteRTCMixedStreamByServer = 2, ByteRTCMixedStreamByClient = 3 } export declare class RemoteAudioStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief Audio packet loss rate. The audio downlink packet loss rate in the reference period. The value range is [0,1]. * */ get audioLossRate(): number; set audioLossRate(value: number); /** {en} * @brief Receiving bit rate. The audio reception rate in the reference period in kbps. * */ get receivedKBitrate(): number; set receivedKBitrate(value: number); /** {en} * @brief Number of audio stalls. * */ get stallCount(): number; set stallCount(value: number); /** {en} * @brief Audio stall duration. Stall duration in the reference period in ms. * */ get stallDuration(): number; set stallDuration(value: number); /** {en} * @brief End-to-end latency at the user experience level. The delay from the start of encoding at the sending end to the start of decoding at the receiving end, in units of ms. * */ get e2eDelay(): number; set e2eDelay(value: number); /** {en} * @brief Play sample rate. Audio playback sample rate information within the reference period in Hz. * */ get playoutSampleRate(): number; set playoutSampleRate(value: number); /** {en} * @brief Statistical interval. The interval of this reference period is in ms. * */ get statsInterval(): number; set statsInterval(value: number); /** {en} * @brief Round-trip time for client side to server level data transfer in ms. * */ get rtt(): number; set rtt(value: number); /** {en} * @brief The sender-server level-the receiver-link data transmission round-trip time. The unit is ms. * */ get totalRtt(): number; set totalRtt(value: number); /** {en} * @brief The quality of the audio stream sent by the remote user. Value meaning reference {@link NetworkQuality NetworkQuality}. * */ get quality(): number; set quality(value: number); /** {en} * @brief The delay caused by the introduction of the jitter buffer mechanism. The unit is ms. * */ get jitterBufferDelay(): number; set jitterBufferDelay(value: number); /** {en} * @brief Number of audio channels. * */ get numChannels(): number; set numChannels(value: number); /** {en} * @brief Audio reception sampling rate. Remote audio sampling rate information received within the reference period, in Hz. * */ get receivedSampleRate(): number; set receivedSampleRate(value: number); /** {en} * @brief The accumulated length of the audio card occurs after the remote user joins the room as a percentage of the total effective length of the audio. The effective duration of audio refers to the duration of audio other than stopping sending audio streams and disabling audio modules after remote users enter the room to publish audio streams. * */ get frozenRate(): number; set frozenRate(value: number); /** {en} * @brief Audio packet loss compensation (PLC) total number of sample points. * */ get concealedSamples(): number; set concealedSamples(value: number); /** {en} * @brief Audio packet loss compensation (PLC) cumulative times. * */ get concealmentEvent(): number; set concealmentEvent(value: number); /** {en} * @brief Audio decoding sample rate. Audio decoding sample rate information in the reference period in Hz. * */ get decSampleRate(): number; set decSampleRate(value: number); /** {en} * @brief Cumulative decoding time in seconds of the remote audio stream in this subscription. * */ get decDuration(): number; set decDuration(value: number); /** {en} * @brief Audio downlink network jitter in ms. * */ get jitter(): number; set jitter(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoOrientation { ADAPTIVE = 0, PORTRAIT = 1, LANDSCAPE = 2 } export declare class AudioRecordingConfig { constructor(); constructor(absoluteFileName: string, sampleRate: AudioSampleRate, channel: AudioChannel, frameSource: AudioFrameSource, quality: AudioQuality); constructor(); protected _instance: any; /** {en} * @brief Absolute path of the recorded file, file name included. The App must have the write and read permission of the path.
* The files format is restricted to .aac and .wav. * */ get absoluteFileName(): string; set absoluteFileName(value: string); /** {en} * @brief See {@link AudioSampleRate AudioSampleRate}. * */ get sampleRate(): AudioSampleRate; set sampleRate(value: AudioSampleRate); /** {en} * @brief Number of audio channels. See {@link AudioChannel AudioChannel}.
* If number of audio channels of recording is different than that of audio capture, the behavior is:
* - If the number of capture is 1, and the number of recording is 2, the recorded audio is two-channel data after copying mono-channel data. * - If the number of capture is 2, and the number of recording is 1, the recorded audio is recorded by mixing the audio of the two channels. * */ get channel(): AudioChannel; set channel(value: AudioChannel); /** {en} * @brief The source of the recording. See {@link AudioFrameSource AudioFrameSource}.
* It is AUDIO_FRAME_SOURCE_MIXED(2) by default. * */ get frameSource(): AudioFrameSource; set frameSource(value: AudioFrameSource); /** {en} * @brief Recording quality. Only valid for .aac file. See {@link AudioQuality AudioQuality}.
* When the sample rate is 32kHz, the file (10min) size for different qualities are:
* - low: 1.2MB; * - [By Default] medium: 2MB; * - high: 3.75MB; * - ultra high: 7.5MB. * */ get quality(): AudioQuality; set quality(value: AudioQuality); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioRoute { AUDIO_ROUTE_DEFAULT = 0, AUDIO_ROUTE_HEADSET = 1, AUDIO_ROUTE_EARPIECE = 2, AUDIO_ROUTE_SPEAKERPHONE = 3, AUDIO_ROUTE_HEADSET_BLUETOOTH = 4, AUDIO_ROUTE_HEADSET_USB = 5 } export declare class Position { constructor(x: number, y: number, z: number); constructor(); constructor(); protected _instance: any; /** {en} * @brief X-coordinate * */ get x(): number; set x(value: number); /** {en} * @brief Y-coordinate * */ get y(): number; set y(value: number); /** {en} * @brief Z-coordinate * */ get z(): number; set z(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioPlayType { LOCAL = 0, REMOTE = 1, LOCAL_AND_REMOTE = 2 } export declare enum DownloadFileType { MUSIC = 0, KRC = 1, LRC = 2, MIDI = 3 } export declare enum MediaPlayerCustomSourceSeekWhence { SET = 0, CUR = 1, END = 2, SIZE = 3 } export declare class SingScoringConfig { constructor(sampleRate: AudioSampleRate, mode: MulDimSingScoringMode, lyricsFilepath: string, midiFilepath: string); constructor(); protected _instance: any; /** {en} * @brief Sampling rate. Only 44,100 Hz and 48,000 Hz are supported. * */ get sampleRate(): AudioSampleRate; set sampleRate(value: AudioSampleRate); /** {en} * @brief Scoring mode, see {@link MulDimSingScoringMode MulDimSingScoringMode}. * */ get mode(): MulDimSingScoringMode; set mode(value: MulDimSingScoringMode); /** {en} * @brief The file path of the lyrics. The scoring feature only supports KRC lyrics file. * */ get lyricsFilepath(): string; set lyricsFilepath(value: string); /** {en} * @brief The path of the midi file. * */ get midiFilepath(): string; set midiFilepath(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum LyricStatus { NONE = 0, KRC = 1, LRC = 2, KRC_AND_LRC = 3 } export declare enum AudioQuality { AUDIO_QUALITY_LOW = 0, AUDIO_QUALITY_MEDIUM = 1, AUDIO_QUALITY_HIGH = 2, AUDIO_QUALITY_ULTRA_HIGH = 3 } export declare enum MixedStreamLayoutRegionType { MIXED_STREAM_LAYOUT_REGION_TYPE_VIDEO_STREAM = 0, MIXED_STREAM_LAYOUT_REGION_TYPE_IMAGE = 1, ByteRTCMixedStreamLayoutRegionTypeImage = 2 } export declare enum AudioFrameCallbackMethod { AUDIO_FRAME_CALLBACK_RECORD = 0, AUDIO_FRAME_CALLBACK_PLAYBACK = 1, AUDIO_FRAME_CALLBACK_MIXED = 2, AUDIO_FRAME_CALLBACK_REMOTE_USER = 3 } export declare enum LogoutReason { /** {en} * @platform android * @brief Users choose to logout by calling the `logout` to log out or terminating the engine to log out. * */ LOGOUT_REASON_LOGOUT = 0, /** {en} * @platform android * @brief The current user is kicked out as another user logs in with the same UserId. * */ LOGOUT_REASON_DUPLICATE_LOGIN = 1, ByteRTCLogoutReasonLogout = 2, ByteRTCLogoutReasonDuplicateLogin = 3 } export declare enum VideoRenderMirrorType { ON = 0, OFF = 1 } export declare class LiveTranscoding { /** {en} * @platform android * @type api * @brief Gets the default configurations for live transcoding. * @return The default configurations for live transcoding. See {@link LiveTranscoding LiveTranscoding}. * */ static android_getDefualtLiveTranscode(): $p_a.LiveTranscoding; /** {en} * @platform ios * @brief Gets the default configurations for pushing streams to CDN. * @return Configurations for pushing streams to CDN. See {@link ByteRTCLiveTranscoding ByteRTCLiveTranscoding}. */ static ios_defaultTranscoding(): LiveTranscoding; protected _instance: any; /** {en} * @platform ios * @brief Stream mixing type. See {@link ByteRTCStreamMixingType ByteRTCStreamMixingType}.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_expectedMixingType(): $p_i.ByteRTCStreamMixingType; set ios_expectedMixingType(value: $p_i.ByteRTCStreamMixingType); /** {en} * @platform ios * @brief The overall video layout of the mixed stream.
* After starting to push streams to CDN, you can set the layout of each video stream, the background information of the mixed stream, etc. See {@link ByteRTCVideoCompositingLayout ByteRTCVideoCompositingLayout} for details. */ get ios_layout(): $p_i.ByteRTCVideoCompositingLayout; set ios_layout(value: $p_i.ByteRTCVideoCompositingLayout); /** {en} * @platform ios * @brief Video transcoding related configurations. See {@link ByteRTCTranscodingVideoConfig ByteRTCTranscodingVideoConfig}. */ get ios_video(): $p_i.ByteRTCTranscodingVideoConfig; set ios_video(value: $p_i.ByteRTCTranscodingVideoConfig); /** {en} * @platform ios * @brief Audio transcoding related configurations. See {@link ByteRTCTranscodingAudioConfig ByteRTCTranscodingAudioConfig} for data types. */ get ios_audio(): $p_i.ByteRTCTranscodingAudioConfig; set ios_audio(value: $p_i.ByteRTCTranscodingAudioConfig); /** {en} * @platform ios * @brief client mix param. See {@link ByteRTCTranscodingClientMixParam ByteRTCTranscodingClientMixParam} for detail. */ get ios_clientMixParam(): $p_i.ByteRTCTranscodingClientMixParam; set ios_clientMixParam(value: $p_i.ByteRTCTranscodingClientMixParam); /** {en} * @platform ios * @hidden(macOS) * @brief The spatial audio config when pushing to CDN. See {@link ByteRTCTranscodingSpatialConfig ByteRTCTranscodingSpatialConfig} for detail. */ get ios_spatialConfig(): $p_i.ByteRTCTranscodingSpatialConfig; set ios_spatialConfig(value: $p_i.ByteRTCTranscodingSpatialConfig); /** {en} * @platform ios * @brief The URL for live transcoding. Only supports live transcoding via RTMP. The URL should match the regular expression `/^rtmps?:\\/\\//`.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_url(): $p_i.NSString; set ios_url(value: $p_i.NSString); /** {en} * @platform ios * @brief The room ID for live transcoding. The sum length of `roomId` and `userId` should not exceed 126 bytes.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_roomId(): $p_i.NSString; set ios_roomId(value: $p_i.NSString); /** {en} * @platform ios * @brief The user ID for live transcoding. The sum length of `roomId` and `userId` should not exceed 126 bytes.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_userId(): $p_i.NSString; set ios_userId(value: $p_i.NSString); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** * @platform android */ /** * @platform android */ android_getFieldVideoConfigBFrame(): boolean; /** {en} * @platform android * @detail api * @brief Sets the URL for live transcoding. Only supports live transcoding via RTMP. The URL should match the regular expression `/^rtmps?:\\/\\//`.
* This parameter cannot be updated while pushing stream to the CDN. * @param url the URL for live transcoding. * */ android_setUrl(url: string): void; /** {en} * @platform android * @detail api * @brief Sets the room ID for live transcoding. The sum length of `roomId` and `userId` should not exceed 126 bytes.
* This parameter cannot be updated while pushing stream to the CDN. * @param roomId room ID。 * */ android_setRoomId(roomId: string): void; /** {en} * @platform android * @detail api * @brief Sets the user ID for live transcoding. The sum length of `roomId` and `userId` should not exceed 126 bytes.
* This parameter cannot be updated while pushing stream to the CDN. * @param userId User ID。 * */ android_setUserId(userId: string): void; /** {en} * @platform android * @detail api * @brief Sets the video transcoding configurations. * @param video The configurations to be set. See {@link VideoConfig VideoConfig}. * */ android_setVideo(video: $p_a.VideoConfig): void; /** {en} * @platform android * @detail api * @brief Sets audio transcoding configurations. * @param audio The audio configurations to be set. See {@link AudioConfig AudioConfig}. * */ android_setAudio(audio: $p_a.AudioConfig): void; /** {en} * @platform android * @detail api * @brief Sets the spatial audio configuration when pushing streams to CDN. * @param spatialConfig The configurations to be set. See {@link SpatialConfig SpatialConfig}. * */ android_setSpatialConfig(spatialConfig: $p_a.SpatialConfig): void; /** {en} * @platform android * @detail api * @brief Sets client mix configurations. * @param param The configurations to be set. See {@link ClientMixParam ClientMixParam}. * */ android_setClientMixParam(param: $p_a.ClientMixParam): void; /** {en} * @platform android * @detail api * @brief Sets the overall video layout of the mixed stream. * @param layout The layout to be set. See {@link Layout Layout}. * */ android_setLayout(layout: $p_a.Layout): void; /** {en} * @platform android * @detail api * @region Push to CDN * @brief Gets the stream mixing type. * @return Stream mixing type. See {@link ByteRTCStreamMixingType ByteRTCStreamMixingType}. * */ android_getMixType(): $p_a.ByteRTCStreamMixingType; /** {en} * @platform android * @detail api * @region Push to CDN * @brief Sets the stream mixing type. * @param mixType Stream mixing type. See {@link ByteRTCStreamMixingType ByteRTCStreamMixingType}. * @note This parameter cannot be updated while pushing stream to the CDN. * */ android_setMixType(mixType: $p_a.ByteRTCStreamMixingType): void; /** {en} * @platform android * @detail api * @brief Gets the CDN url where the mixed stream is pushed * @return The CDN url * */ android_getUrl(): string; /** {en} * @platform android * @detail api * @brief Gets the video transcoding configurations. * @return Current video transcoding configurations. See {@link VideoConfig VideoConfig}. * */ android_getVideo(): $p_a.VideoConfig; /** {en} * @platform android * @detail api * @brief Gets the audio transcoding configurations. * @return Current audio transcoding configurations. See {@link AudioConfig AudioConfig}. * */ android_getAudio(): $p_a.AudioConfig; /** {en} * @platform android * @detail api * @brief Gets the spatial audio configurations. * @return Current spatial audio configurations. See {@link SpatialConfig SpatialConfig}. * */ android_getSpatialConfig(): $p_a.SpatialConfig; /** {en} * @platform android * @detail api * @brief Gets the client mix configurations. * @return Current client mix configurations. See {@link ClientMixParam ClientMixParam}. * */ android_getClientMixParam(): $p_a.ClientMixParam; /** {en} * @platform android * @detail api * @brief Gets the overall video layout of the mixed stream. * @return Current video layout. See {@link Layout Layout}. * */ android_getLayout(): $p_a.Layout; } export declare enum VideoDenoiseMode { VIDEO_DENOISE_MODE_OFF = 0, VIDEO_DENOISE_MODE_AUTO = 1 } export declare enum MixedStreamSEIContentMode { MIXED_STREAM_SEI_CONTENT_MODE_DEFAULT = 0, MIXED_STREAM_SEI_CONTENT_MODE_ENABLE_VOLUME_INDICATION = 1 } export declare enum AudioMixingError { AUDIO_MIXING_ERROR_OK = 0, AUDIO_MIXING_ERROR_PRELOAD_FAILED = 1, AUDIO_MIXING_ERROR_START_FAILED = 2, AUDIO_MIXING_ERROR_SET_POSITION_FAILED = 3, AUDIO_MIXING_ERROR_LOAD_CONFLICT = 4, AUDIO_MIXING_ERROR_ID_NOT_FOUND = 5, AUDIO_MIXING_ERROR_INVALID_VOLUME = 6, AUDIO_MIXING_ERROR_ID_TYPE_NOT_MATCH = 7, AUDIO_MIXING_ERROR_ID_TYPE_INVALID_PITCH = 8, AUDIO_MIXING_ERROR_INVALID_AUDIO_TRACK = 9, AUDIO_MIXING_ERROR_IS_STARTING = 10, AUDIO_MIXING_ERROR_INVALID_PLAYBACK_SPEED = 11, ByteRTCAudioMixingErrorIdNotFound = 12, ByteRTCAudioMixingErrorInValidVolume = 13, ByteRTCAudioMixingErrorIdTypeNotMatch = 14, ByteRTCAudioMixingErrorInValidPitch = 15, ByteRTCAudioMixingErrorInValidAudioTrack = 16, ByteRTCAudioMixingErrorIsStarting = 17, ByteRTCAudioMixingErrorInValidPlaybackSpeed = 18 } export declare enum AACProfile { AAC_PROFILE_LC = 0, AAC_PROFILE_HEV1 = 1, AAC_PROFILE_HEV2 = 2 } export declare enum AudioSelectionPriority { AUDIO_SELECTION_PRIORITY_NORMAL = 0, AUDIO_SELECTION_PRIORITY_HIGH = 1 } export declare enum DataMessageSourceType { DATA_MESSAGE_SOURCE_TYPE_DEFAULT = 0, DATA_MESSAGE_SOURCE_TYPE_SYSTEM = 1 } export declare class FaceDetectionResult { protected _instance: any; /** {en} * @brief Face Detection Result
* - 0: Success * - !0: Failure, see [Error Code Table](https://docs.byteplus.com/en/effects/docs/error-code-table) * */ get detectResult(): number; set detectResult(value: number); /** {en} * @brief Width of the original image (px) * */ get imageWidth(): number; set imageWidth(value: number); /** {en} * @brief Height of the original image (px) * */ get imageHeight(): number; set imageHeight(value: number); /** {en} * @brief The face recognition rectangles. The length of the array is the same as the number of detected faces. See {@link Rectangle Rectangle}. * */ get faces(): Array; set faces(value: Array); /** {en} * @platform android * @brief The time stamp of the video frame using face detection. * */ get android_frameTimestampUs(): $p_a.long; set android_frameTimestampUs(value: $p_a.long); /** {en} * @platform ios * @brief The time stamp of the video frame using face detection. */ get ios_frameTimestamp(): $p_i.CMTime; set ios_frameTimestamp(value: $p_i.CMTime); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class VideoFrame { constructor(); constructor(); protected _instance: any; /** {en} * @platform ios * @brief Video frame format, see {@link ByteRTCVideoPixelFormat ByteRTCVideoPixelFormat} */ get ios_format(): $p_i.int; set ios_format(value: $p_i.int); /** {en} * @platform ios * @brief Gets video content type, see {@link ByteRTCVideoContentType ByteRTCVideoContentType}. */ get ios_contentType(): $p_i.ByteRTCVideoContentType; set ios_contentType(value: $p_i.ByteRTCVideoContentType); /** {en} * @platform ios * @brief The timestamp information of the current frame */ get ios_time(): $p_i.CMTime; set ios_time(value: $p_i.CMTime); /** {en} * @platform ios * @brief Number of bytes per pixel line */ get ios_strideInPixels(): $p_i.int; set ios_strideInPixels(value: $p_i.int); /** {en} * @platform ios * @brief Video frame width */ get ios_width(): $p_i.int; set ios_width(value: $p_i.int); /** {en} * @platform ios * @brief Video frame height */ get ios_height(): $p_i.int; set ios_height(value: $p_i.int); /** {en} * @platform ios * @brief Data of CVPixelBufferRef type, valid when format is kPixelFormatCVPixelBuffer */ get ios_textureBuf(): $p_i.CVPixelBufferRef; set ios_textureBuf(value: $p_i.CVPixelBufferRef); /** {en} * @platform ios * @brief The first address in video frame memory */ get ios_dataBuf(): $p_i.NSData; set ios_dataBuf(value: $p_i.NSData); /** {en} * @platform ios * @brief Video frame rotation angle, the value must be 0, 90, 180, or 270 */ get ios_rotation(): $p_i.ByteRTCVideoRotation; set ios_rotation(value: $p_i.ByteRTCVideoRotation); /** {en} * @platform ios * @brief Video frame colorspace */ get ios_colorSpace(): $p_i.ByteRTCColorSpace; set ios_colorSpace(value: $p_i.ByteRTCColorSpace); /** {en} * @platform ios * @brief the cameraId of the video frame, see {@link ByteRTCCameraID ByteRTCCameraID} */ get ios_cameraId(): $p_i.ByteRTCCameraID; set ios_cameraId(value: $p_i.ByteRTCCameraID); /** {en} * @platform ios * @brief Data attached to the video frame */ get ios_extendedData(): $p_i.NSData; set ios_extendedData(value: $p_i.NSData); /** {en} * @platform ios * @brief Video frame roi data */ get ios_supplementaryInfo(): $p_i.NSData; set ios_supplementaryInfo(value: $p_i.NSData); /** {en} * @platform ios * @brief Video frame color plane number.
* YUV formats are categorized into planar format and packed format.
* In a planar format, the Y, U, and V components are stored separately as three planes, while in a packed format, the Y, U, and V components are stored in a single array.
* When textureBuf has a value, the value is meaningless */ get ios_numberOfPlanes(): $p_i.int; set ios_numberOfPlanes(value: $p_i.int); /** {en} * @platform ios * @brief Gets plane data pointer, the length of array equals numberOfPlanes, and the type of element is NSData* * @param plane_index Plane data index; When textureBuf has a value, the value is meaningless */ get ios_planeDatas(): $p_i.NSArray; set ios_planeDatas(value: $p_i.NSArray); /** {en} * @platform ios * @brief Gets the length of the data line in the plane * @param plane_index Plane data index, the length of array equals numberOfPlanes, and the type of element is NSNumber*; When textureBuf has a value, the value is meaningless */ get ios_planeStrides(): $p_i.NSArray; set ios_planeStrides(value: $p_i.NSArray); /** {en} * @platform ios * @deprecated since 3.45 and will be deleted in 3.51, use strideInPixels instead. */ get ios_stride(): $p_i.int; set ios_stride(value: $p_i.int); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the callback of the video frame release. The callback will be triggered when the reference count of the video frame turns to 0. * @param releaseCallback Callback of the video frame release * */ android_setReleaseCallback(releaseCallback: $p_a.Runnable): void; /** {en} * @platform android * @detail api * @brief Checks if the current video frame has a releaseCallback set * @return * - true:Yes * - false:No * */ android_hasReleaseCallback(): boolean; /** {en} * @platform android * @detail api * @brief Gets video frame type, see {@link VideoFrameType VideoFrameType} * */ android_getFrameType(): $p_a.VideoFrameType; /** {en} * @platform android * @detail api * @brief Gets video frame format, see {@link VideoPixelFormat VideoPixelFormat} * */ android_getPixelFormat(): $p_a.VideoPixelFormat; /** {en} * @platform android * @detail api * @brief Gets video content type. * @return See {@link VideoContentType VideoContentType}. * */ android_getContentType(): $p_a.VideoContentType; /** {en} * @platform android * @detail api * @brief Gets video frame timestamp in microseconds * */ android_getTimeStampUs(): number; /** {en} * @platform android * @detail api * @brief Gets video frame width in px * */ android_getWidth(): number; /** {en} * @platform android * @detail api * @brief Gets video frame height in px * */ android_getHeight(): number; /** {en} * @platform android * @detail api * @brief Gets video frame rotation angle * */ android_getRotation(): $p_a.VideoRotation; /** {en} * @platform android * @detail api * @brief Gets the color space of video frame, see {@link ColorSpace ColorSpace} * */ android_getColorSpace(): $p_a.ColorSpace; /** {en} * @platform android * @detail api * @brief Get cameraId of the frame, see {@link CameraId CameraId} * */ android_getCameraId(): $p_a.CameraId; /** {en} * @platform android * @detail api * @brief Gets Video frame color plane number * @note YUV formats are categorized into planar format and packed format.
* In a planar format, the Y, U, and V components are stored separately as three planes, while in a packed format, the Y, U, and V components are stored in a single array. * */ android_getNumberOfPlanes(): number; /** {en} * @platform android * @detail api * @brief Gets plane data pointer * @param planeIndex Plane data index * */ android_getPlaneData(planeIndex: number): $p_a.ByteBuffer; /** {en} * @platform android * @detail api * @brief Gets the length of the data line in the specified plane * @param planeIndex Plane data index * */ android_getPlaneStride(planeIndex: number): number; /** {en} * @platform android * @detail api * @brief Gets SEI data * */ android_getExternalDataInfo(): $p_a.ByteBuffer; /** {en} * @platform android * @detail api * @brief Gets supplementary data * */ android_getSupplementaryInfo(): $p_a.ByteBuffer; /** {en} * @platform android * @detail api * @brief Gets texture ID * */ android_getTextureID(): number; android_getGLSync(): number; /** {en} * @platform android * @detail api * @brief Gets texture matrix * */ android_getTextureMatrix(): Array<$p_a.float>; /** {en} * @platform android * @detail api * @brief Gets the EGLContext used by the texture * */ android_getEGLContext(): $p_a.EGLContext; /** {en} * @platform android * @detail api * @brief Count once for the number of the current video frame reference * */ android_retain(): void; /** {en} * @platform android * @detail api * @brief Decrease one count for the number of the current video frame reference * */ android_release(): void; } export declare class RemoteStreamKey { constructor(roomId: string, userId: string, streamIndex: StreamIndex); constructor(); protected _instance: any; /** {en} * @brief The ID of the room from which the stream was published. * */ get roomId(): string; set roomId(value: string); /** {en} * @brief The ID of the user who published the stream. * */ get userId(): string; set userId(value: string); /** {en} * @brief Stream type, see [StreamIndex](70083#StreamIndex-2). * */ get streamIndex(): StreamIndex; set streamIndex(value: StreamIndex); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @brief Gets the room ID * */ android_getRoomId(): string; /** {en} * @platform android * @brief Gets the user ID * */ android_getUserId(): string; /** {en} * @platform android * @brief Gets stream type, see [StreamIndex](#StreamIndex-2) * */ android_getStreamIndex(): $p_a.StreamIndex; /** {en} * @platform android * @brief Check the class has null property * */ android_hasNullProperty(): boolean; } export declare enum VoiceReverbType { VOICE_REVERB_ORIGINAL = 0, VOICE_REVERB_ECHO = 1, VOICE_REVERB_CONCERT = 2, VOICE_REVERB_ETHEREAL = 3, VOICE_REVERB_KTV = 4, VOICE_REVERB_STUDIO = 5, VOICE_REVERB_VIRTUAL_STEREO = 6, VOICE_REVERB_SPACIOUS = 7, VOICE_REVERB_3D = 8 } export declare enum MediaPlayerCustomSourceMode { PUSH = 0, PULL = 1 } export declare enum KTVPlayerErrorCode { OK = 0, FILE_NOT_EXIST = 1, FILE_ERROR = 2, NOT_JOIN_ROOM = 3, PARAM = 4, START_ERROR = 5, MIX_ID_ERROR = 6, POSITION_ERROR = 7, AUDIO_VOLUME_ERROR = 8, TYPE_ERROR = 9, PITCH_ERROR = 10, AUDIO_TRACK_ERROR = 11, STARTING_ERROR = 12, ByteRTCKTVPlayerErrorCodeOK = 13, ByteRTCKTVPlayerErrorCodeFileNotExist = 14, ByteRTCKTVPlayerErrorCodeFileError = 15, ByteRTCKTVPlayerErrorCodeNotJoinRoom = 16, ByteRTCKTVPlayerErrorCodeParam = 17, ByteRTCKTVPlayerErrorCodeStartError = 18, ByteRTCKTVPlayerErrorCodeMixIdError = 19, ByteRTCKTVPlayerErrorCodePositionError = 20, ByteRTCKTVPlayerErrorCodeAudioVolumeError = 21, ByteRTCKTVPlayerErrorCodeTypeError = 22, ByteRTCKTVPlayerErrorCodePitchError = 23, ByteRTCKTVPlayerErrorCodeAudioTrackError = 24, ByteRTCKTVPlayerErrorCodeStartingError = 25 } export declare enum AudioChannel { AUDIO_CHANNEL_AUTO = 0, AUDIO_CHANNEL_MONO = 1, AUDIO_CHANNEL_STEREO = 2 } export declare enum RemoteVideoStateChangeReason { REMOTE_VIDEO_STATE_CHANGE_REASON_NETWORK_CONGESTION = 0, REMOTE_VIDEO_STATE_CHANGE_REASON_NETWORK_RECOVERY = 1, REMOTE_VIDEO_STATE_CHANGE_REASON_LOCAL_MUTED = 2, REMOTE_VIDEO_STATE_CHANGE_REASON_LOCAL_UNMUTED = 3, REMOTE_VIDEO_STATE_CHANGE_REASON_REMOTE_MUTED = 4, REMOTE_VIDEO_STATE_CHANGE_REASON_REMOTE_UNMUTED = 5, REMOTE_VIDEO_STATE_CHANGE_REASON_REMOTE_OFFLINE = 6, REMOTE_VIDEO_STATE_CHANGE_REASON_INTERNAL = 7, ByteRTCRemoteVideoStateChangeReasonInternal = 8 } export declare enum PerformanceAlarmReason { BANDWIDTH_RESUMED = 0, BANDWIDTH_FALLBACKED = 1, PERFORMANCE_FALLBACKED = 2, PERFORMANCE_RESUMED = 3, ByteRTCPerformanceAlarmReasonBandwidthFallback = 4, ByteRTCPerformanceAlarmReasonFallback = 5, ByteRTCPerformanceAlarmReasonResumed = 6 } export declare enum VideoDeviceType { VIDEO_DEVICE_TYPE_UNKNOWN = 0, VIDEO_DEVICE_TYPE_RENDER_DEVICE = 1, VIDEO_DEVICE_TYPE_CAPTURE_DEVICE = 2, VIDEO_DEVICE_TYPE_SCREEN_CAPTURE_DEVICE = 3 } export declare class HotMusicInfo { constructor(hotType: MusicHotType, hotName: string, musicInfos: Array); constructor(); protected _instance: any; /** {en} * @brief Hot music type. See {@link MusicHotType MusicHotType}. Multiple hot music types can be combined by the bitwise-or operator. * */ get hotType(): MusicHotType; set hotType(value: MusicHotType); /** {en} * @brief Hot list name. * */ get hotName(): string; set hotName(value: string); /** {en} * @platform android * @brief Music information. See {@link MusicInfo MusicInfo}. * */ get android_musicInfos(): $p_a.MusicInfo[]; set android_musicInfos(value: $p_a.MusicInfo[]); /** {en} * @platform ios * @brief Music information. See {@link ByteRTCMusicInfo ByteRTCMusicInfo}. */ get ios_musics(): $p_i.NSArray<$p_i.ByteRTCMusicInfo>; set ios_musics(value: $p_i.NSArray<$p_i.ByteRTCMusicInfo>); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MixedStreamServerControlConfig { protected _instance: any; /** {en} * @platform ios * @valid since 3.56 * @brief Sets whether to enable the function of separately sending sound indication SEI:
* - True: Enable. * - False: Disable. Default setting. * After setting this parameter to "true", you can choose whether to send sound indication SEI only through `ByteRTCMixedStreamServerControlConfig.seiContentMode`. */ get ios_enableVolumeIndication(): $p_i.BOOL; set ios_enableVolumeIndication(value: $p_i.BOOL); /** {en} * @platform ios * @valid since 3.56 * @brief Sound indication interval in s. The range is [0.3,+∞). The default value is 2.
* If a decimal with two or more decimal places is passed in, it will be rounded to the value of the first decimal place. For example, if you pass in 0.36, it will be automatically adjusted to 0.4. */ get ios_volumeIndicationInterval(): $p_i.CGFloat; set ios_volumeIndicationInterval(value: $p_i.CGFloat); /** {en} * @platform ios * @valid since 3.56 * @brief Effective volume within the range of [0, 255]. The default value is 0.
* If the value is set beyond the range, it will be automatically adjusted to the default value 0. */ get ios_talkVolume(): $p_i.NSInteger; set ios_talkVolume(value: $p_i.NSInteger); /** {en} * @platform ios * @valid since 3.56 * @brief Sets whether volume value is contained in the sound indication SEI:
* - True: Yes. * - False: No. Default setting. */ get ios_isAddVolumeValue(): $p_i.BOOL; set ios_isAddVolumeValue(value: $p_i.BOOL); /** {en} * @platform ios * @valid since 3.56 * @brief Sets SEI content. See {@link ByteRTCMixedStreamSEIContentMode ByteRTCMixedStreamSEIContentMode}. */ get ios_seiContentMode(): $p_i.ByteRTCMixedStreamSEIContentMode; set ios_seiContentMode(value: $p_i.ByteRTCMixedStreamSEIContentMode); /** {en} * @platform ios * @valid since 3.56 * @brief SEI payload type.
* The default value is `100`, and the value supported is `5` and `100`.
* During the process of pushing streams to CDN, you cannot change the parameter. */ get ios_seiPayloadType(): $p_i.NSInteger; set ios_seiPayloadType(value: $p_i.NSInteger); /** {en} * @platform ios * @valid since 3.56 * @brief SEI payload UUID. * @note When PayloadType is `5`, you must set PayloadUUID, or you will receive a callback indicating parameter error. The error code is 1091.
* When PayloadType is not `5`, it is not required to set PayloadUUID. If filled, it will be ignored by the backend.
* The length of PayloadUUID should be 32 bits, or you will receive an error code of 1091.
* Each character of the parameter should be within the range of [0, 9] [a, f] [A, F].
* The PayloadUUID should not contain `-`. If the automatically generated UUID contains `-`, you should delete it.
* During the process of pushing streams to CDN, you cannot change the parameter. */ get ios_seiPayloadUUID(): $p_i.NSString; set ios_seiPayloadUUID(value: $p_i.NSString); /** {en} * @platform ios * @valid since 3.57 * @brief Sets the type of media stream pushed to CDN after being mixed. {@link ByteRTCMixedStreamMediaType ByteRTCMixedStreamMediaType}.
* The default value is 0,which means pushing both audio and video. */ get ios_mediaType(): $p_i.ByteRTCMixedStreamMediaType; set ios_mediaType(value: $p_i.ByteRTCMixedStreamMediaType); /** {en} * @platform ios * @valid since 3.57 * @brief Sets whether to initiate a stream mixing task in the absence of any users publishing streams. See {@link ByteRTCMixedStreamPushMode ByteRTCMixedStreamPushMode}.
* Once the stream mixing task is initiated, this parameter can not be updated any more. */ get ios_pushStreamMode(): $p_i.ByteRTCMixedStreamPushMode; set ios_pushStreamMode(value: $p_i.ByteRTCMixedStreamPushMode); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @valid since 3.56 * @detail api * @author liujingchao * @brief Set SEI payload type. * @param seiPayloadType The default value is `100`, and the value supported is `5` and `100`. * @return MixedStreamServerControlConfig, See MixedStreamServerControlConfig{@link# MixedStreamServerControlConfig}. * @note During the process of pushing streams to CDN, you cannot change the parameter. * */ android_setSeiPayloadType(seiPayloadType: number): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @author liujingchao * @brief Get SEI payload type. * */ android_getSeiPayloadType(): number; /** {en} * @platform android * @valid since 3.56 * @detail api * @author liujingchao * @brief Set SEI Payload UUID. * @param seiPayloadUuid The length of PayloadUUID should be 32 bits, or you will receive an error code of 1091. Each character of the parameter should be within the range of [0, 9] [a, f] [A, F].
* The PayloadUUID should not contain `-`. If the automatically generated UUID contains `-`, you should delete it. * @return MixedStreamServerControlConfig, See MixedStreamServerControlConfig{@link# MixedStreamServerControlConfig}. * @note When PayloadType is `5`, you must set PayloadUUID, or you will receive a callback indicating parameter error. The error code is 1091.
* When PayloadType is not `5`, it is not required to set PayloadUUID. If filled, it will be ignored by the backend.
* During the process of pushing streams to CDN, you cannot change the parameter. * */ android_setSeiPayloadUuid(seiPayloadUuid: string): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @author liujingchao * @brief Get SEI Payload UUID. * */ android_getSeiPayloadUuid(): string; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Sets the sound indication interval. * @param volumeIndicationInterval Sound indication interval in s. The range is [0.3,+∞). The default value is 2.
* If a decimal with two or more decimal places is passed in, it will be rounded to the value of the first decimal place. For example, if you pass in 0.36, it will be automatically adjusted to 0.4. * */ android_setVolumeIndicationInterval(volumeIndicationInterval: number): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the sound indication interval. * */ android_getVolumeIndicationInterval(): number; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Sets whether to enable the function of separately sending sound indication SEI. * @param enableVolumeIndication Whether to enable the function of separately sending sound indication SEI:
* - true: Enable. * - false: Disable. Default setting. * @note After setting this method to "true", you can choose whether to send sound indication SEI only through {@link setSeiContentMode setSeiContentMode}. * */ android_setEnableVolumeIndication(enableVolumeIndication: boolean): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the setting on whether to enable the function of separately sending sound indication SEI. * */ android_getEnableVolumeIndication(): boolean; /** {en} * @platform android * @valid since 3.56 * @brief Sets whether volume value is contained in the sound indication SEI. * @param addVolumeValue Whether volume value is contained in the sound indication SEI:
* - true: Yes. * - false: No. Default setting. * */ android_setIsAddVolumeValue(addVolumeValue: boolean): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the setting on whether volume value is contained in the sound indication SEI. * */ android_getIsAddVolumeValue(): boolean; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Sets SEI content. * @param seiContentMode SEI content. See {@link MixedStreamSEIContentMode MixedStreamSEIContentMode}. * */ android_setSeiContentMode(seiContentMode: $p_a.MixedStreamSEIContentMode): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the setting on SEI content. * */ android_getSeiContentMode(): $p_a.MixedStreamSEIContentMode; /** {en} * @platform android * @detail api * @brief Sets the effective volume. * @param talkVolume Effective volume. The range is [0, 255]. The default value is 0.
* If the value is set beyond the range, it will be automatically adjusted to the default value 0. * */ android_setTalkVolume(talkVolume: number): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the value of the effective volume. * */ android_getTalkVolume(): number; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets whether to initiate a stream mixing task in the absence of any users publishing streams. * @param pushStreamMode The initiation mode. See {@link MixedStreamPushMode MixedStreamPushMode}.
* Once the stream mixing task is initiated, this parameter can not be updated any more. * */ android_setPushStreamMode(pushStreamMode: $p_a.MixedStreamPushMode): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the setting on stream mixing initiation mode. * */ android_getPushStreamMode(): $p_a.MixedStreamPushMode; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Sets the type of media stream pushed to CDN after being mixed. * @param mediaType See {@link MixedStreamMediaType MixedStreamMediaType}.
* The default value is 0,which means pushing both audio and video. Pushing video only is not supported currently. * */ android_setMediaType(mediaType: $p_a.MixedStreamMediaType): this; /** {en} * @platform android * @valid since 3.57 * @detail api * @brief Gets the setting on the type of media stream pushed to CDN after being mixed. * */ android_getMediaType(): $p_a.MixedStreamMediaType; } export declare enum AudioMixingDualMonoMode { AUDIO_MIXING_DUAL_MONO_MODE_AUTO = 0, AUDIO_MIXING_DUAL_MONO_MODE_L = 1, AUDIO_MIXING_DUAL_MONO_MODE_R = 2, AUDIO_MIXING_DUAL_MONO_MODE_MIX = 3 } export declare class LocalAudioStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief Audio packet loss rate. The audio uplink packet loss rate in this reference period. The value range is [0,1]. * */ get audioLossRate(): number; set audioLossRate(value: number); /** {en} * @platform android * @brief Send rate. The audio transmission rate in the reference period is kbps. * */ get android_sendKBitrate(): $p_a.float; set android_sendKBitrate(value: $p_a.float); /** {en} * @brief Acquisition sampling rate. Audio sampling rate information collected in the reference period, in units of Hz. * */ get recordSampleRate(): number; set recordSampleRate(value: number); /** {en} * @brief Statistical interval. The interval of this reference period is in ms.
* This field is used to set the reference period for the callback. The default setting is 2s. * */ get statsInterval(): number; set statsInterval(value: number); /** {en} * @brief Round-trip time. The unit is ms. * */ get rtt(): number; set rtt(value: number); /** {en} * @brief Number of audio channels. * */ get numChannels(): number; set numChannels(value: number); /** {en} * @brief Audio transmission sampling rate. Audio transmission sampling rate information in the reference period, in Hz. * */ get sentSampleRate(): number; set sentSampleRate(value: number); /** {en} * @brief Audio uplink network jitter in ms. * */ get jitter(): number; set jitter(value: number); /** {en} * @platform ios * @brief The bit rate of transmission. The audio transmission rate in the reference period is kbps. */ get ios_sentKBitrate(): $p_i.float; set ios_sentKBitrate(value: $p_i.float); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MixedStreamVideoConfig { protected _instance: any; /** {en} * @platform ios * @brief The video codec. See {@link ByteRTCMixedStreamVideoCodecType ByteRTCMixedStreamVideoCodecType}. The default value is `0`. It's recommended to be set.
* These parameters cannot be updated while pushing stream to the CDN. */ get ios_videoCodec(): $p_i.ByteRTCMixedStreamVideoCodecType; set ios_videoCodec(value: $p_i.ByteRTCMixedStreamVideoCodecType); /** {en} * @platform ios * @brief The width (pixels) to be set. The range is [2, 1920], and must be an even number. The default value is 640 pixels. It's recommended to be set.
* If an odd number is set, the width will be adjusted to the next larger even number. */ get ios_width(): $p_i.NSInteger; set ios_width(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The height (pixels) to be set. The range is [2, 1920], and must be an even number. The default value is 360 pixels. It's recommended to be set.
* If an odd number is set, the height will be adjusted to the next larger even number. */ get ios_height(): $p_i.NSInteger; set ios_height(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The frame rate (FPS) in range of [1, 60]. The default value is 15 FPS. It's recommended to be set. */ get ios_fps(): $p_i.NSInteger; set ios_fps(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The time interval between I-frames (second) in range of [1, 5]. The default value is 2 seconds. It's recommended to be set.
* These parameters cannot be updated while pushing stream to the CDN. */ get ios_gop(): $p_i.NSInteger; set ios_gop(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The bitrate (Kbps) in range of [1, 10000]. The default value is self-adaptive. It's recommended to be set. */ get ios_bitrate(): $p_i.NSInteger; set ios_bitrate(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Whether to push streams with B frame, only support by server mix. */ get ios_enableBFrame(): $p_i.BOOL; set ios_enableBFrame(value: $p_i.BOOL); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the frame rate of the mixed video stream. You are recommended to set the value. * @param fps The frame rate (FPS) in range of [1, 60]. The default value is 15 FPS. * */ android_setFps(fps: number): this; /** {en} * @platform android * @detail api * @brief Gets the frame rate of the mixed video stream. * */ android_getFps(): number; /** {en} * @platform android * @detail api * @brief Sets the time interval between I-frames. You are recommended to set the value.
* This parameter cannot be updated while pushing stream to the CDN. * @param gop The time interval between I-frames (second) in range of [1, 5]. The default value is 2 seconds. * */ android_setGop(gop: number): this; /** {en} * @platform android * @detail api * @brief Gets the time interval between I-frames. * */ android_getGop(): number; /** {en} * @platform android * @detail api * @brief Sets the bitrate of the mixed video stream. You are recommended to set the value. * @param bitrate The bitrate (Kbps) in range of [1, 10000]. The default value is self-adaptive. * */ android_setBitrate(bitrate: number): this; /** {en} * @platform android * @detail api * @brief Gets the bitrate of the mixed video stream. * */ android_getBitrate(): number; /** {en} * @platform android * @detail api * @brief Sets the width of the mixed video stream. You are recommended to set the value. * @param width The width (pixels) to be set. The range is [2, 1920], and must be an even number. The default value is 640 pixels.
* If an odd number is set, the width will be adjusted to the next larger even number. * */ android_setWidth(width: number): this; /** {en} * @platform android * @detail api * @brief Gets the width of the mixed video stream. * */ android_getWidth(): number; /** {en} * @platform android * @detail api * @brief Sets the height of the mixed video stream. You are recommended to set the value. * @param height The height (pixels) to be set. The range is [2, 1920], and must be an even number. The default value is 360 pixels.
* If an odd number is set, the height will be adjusted to the next larger even number. * */ android_setHeight(height: number): this; /** {en} * @platform android * @detail api * @brief Gets the height of the mixed video stream. * */ android_getHeight(): number; /** {en} * @platform android * @detail api * @brief Sets whether to push streams with B frame. Only supported when mixing streams on the server side。 * @param enableBframe Whether to push streams with B frame:
* - true: Yes * - false: No * */ android_setEnableBframe(enableBframe: boolean): this; /** {en} * @platform android * @detail api * @brief Gets setting of whether to push streams with B frame. * */ android_getEnableBframe(): boolean; /** {en} * @platform android * @detail api * @brief Sets the video codec type. You are recommended to set the value.
* This parameter cannot be updated while pushing stream to the CDN. * @param videoCodec The video codec. See {@link MixedStreamVideoCodecType MixedStreamVideoCodecType}. The default value is `MIXED_STREAM_VIDEO_CODEC_TYPE_H264("H264")`. * */ android_setVideoCodec(videoCodec: $p_a.MixedStreamVideoCodecType): this; /** {en} * @platform android * @detail api * @brief Gets the video codec type. * */ android_getVideoCodec(): $p_a.MixedStreamVideoCodecType; } export declare class MediaPlayerCustomSource { constructor(provider: IMediaPlayerCustomSourceProvider, mode: MediaPlayerCustomSourceMode, type: MediaPlayerCustomSourceStreamType); constructor(); constructor(); protected _instance: any; /** {en} * @detail keytype * @brief See {@link MediaPlayerCustomSourceMode MediaPlayerCustomSourceMode}. * */ get mode(): MediaPlayerCustomSourceMode; set mode(value: MediaPlayerCustomSourceMode); /** {en} * @detail keytype * @brief See {@link MediaPlayerCustomSourceStreamType MediaPlayerCustomSourceStreamType}. * */ get type(): MediaPlayerCustomSourceStreamType; set type(value: MediaPlayerCustomSourceStreamType); /** {en} * @platform ios * @detail keytype * @brief Specify the {@link ByteRTCMediaPlayerCustomSourceProvider ByteRTCMediaPlayerCustomSourceProvider} instance when you want to play the audio data in memory. */ get ios_provider(): $p_i.id<$p_i.ByteRTCMediaPlayerCustomSourceProvider>; set ios_provider(value: $p_i.id<$p_i.ByteRTCMediaPlayerCustomSourceProvider>); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MusicFilterType { NONE = 0, WITHOUT_LYRIC = 1, UNSUPPORTED_SCORE = 2, UNSUPPORTED_ACCOMPANY = 3, UNSUPPORTED_CLIMAX = 4, ByteRTCMusicFilterTypeUnsupportedAccopmay = 5, ByteRTCMusicFilterTypeUnsupportedClimx = 6 } export declare class RemoteAudioPropertiesInfo { constructor(streamKey: RemoteStreamKey, audioPropertiesInfo: AudioPropertiesInfo); constructor(); protected _instance: any; /** {en} * @detail keytype * @brief Remote stream information. See {@link RemoteStreamKey RemoteStreamKey}. * */ get streamKey(): RemoteStreamKey; set streamKey(value: RemoteStreamKey); /** {en} * @detail keytype * @brief Information of audio properties. See {@link AudioPropertiesInfo AudioPropertiesInfo}. * */ get audioPropertiesInfo(): AudioPropertiesInfo; set audioPropertiesInfo(value: AudioPropertiesInfo); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum LocalVideoRenderPosition { AFTER_CAPTURE = 0, AFTER_PREPROCESS = 1 } export declare class RTCLogConfig { constructor(logLevel: LocalLogLevel, logPath: string, logFileSize: number, logFilenamePrefix: string); constructor(); protected _instance: any; /** {en} * @brief (Required) Local log directory. * */ get logPath(): string; set logPath(value: string); /** {en} * @brief (Optional) The limits for total log file size in MB. The range is 1 to 100 MB, and the default value is 10 MB.
* If `logFileSize` < 1, it will be set to 1 MB. If `logFileSize` > 100, it will be set to 100 MB.
* The maximum size for a single log file is 2 MB.
* If 1 ≤ `logFileSize` ≤ 2, one log file will be generated. If `logFileSize` > 2, the first `⌊logFileSize/2⌋` files will be filled with 2 MB each, the `⌊logFileSize/2⌋+1` fill will be filled with `logFileSize mod 2`.
* If the size exceeds the remaining space, the oldest file will be deleted. * */ get logFileSize(): number; set logFileSize(value: number); /** {en} * @brief (Optional) The logging level. See {@link LocalLogLevel LocalLogLevel}. The default is warning level. * */ get logLevel(): LocalLogLevel; set logLevel(value: LocalLogLevel); /** {en} * @brief (Optional) Local log file name prefix. It should follow the regular expression pattern of `[a-zA-Z0-9_\@-.]{1,128}`.
* The final file name will be the prefix followed by "_" and the file creation time, and "_rtclog.log" at the end. For example, `logPrefix_2023-05-25_172324_rtclog.log`. * */ get logFilenamePrefix(): string; set logFilenamePrefix(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioReportMode { AUDIO_REPORT_MODE_NORMAL = 0, AUDIO_REPORT_MODE_DISCONNECT = 1, AUDIO_REPORT_MODE_RESET = 2 } export declare class AudioFormat { constructor(sampleRate: AudioSampleRate, channel: AudioChannel, samplesPerCall: number); constructor(sampleRate: AudioSampleRate, channel: AudioChannel); constructor(); protected _instance: any; /** {en} * @brief Audio sample rate. See {@link AudioSampleRate AudioSampleRate}. * */ get sampleRate(): AudioSampleRate; set sampleRate(value: AudioSampleRate); /** {en} * @brief Audio channel. See {@link AudioChannel AudioChannel}. * */ get channel(): AudioChannel; set channel(value: AudioChannel); /** {en} * @brief Samples per audio frame returned by callback. `0` by default. The default samples per callback is the minimum value.
* The minimum value is `sampleRate * channel * 0.01s`, the value when the callback interval is 0.01s.
* The maximum value is `2048`. If the value is invalid, the samples per callback uses the default value.
* This parameter only takes effect when setting the read-write callback. It does not take effect when calling {@link enableAudioFrameCallback enableAudioFrameCallback} to enable read-only callback. * */ get samplesPerCall(): number; set samplesPerCall(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class MediaTypeEnhancementConfig { constructor(enhanceSignaling: boolean, enhanceAudio: boolean, enhanceVideo: boolean, enhanceScreenAudio: boolean, enhanceScreenVideo: boolean); constructor(); constructor(); protected _instance: any; /** {en} * @brief Apply to signaling or not. Not by default. * */ get enhanceSignaling(): boolean; set enhanceSignaling(value: boolean); /** {en} * @brief Apply to audio stream (Screen-sharing audio not included) or not. Not by default. * */ get enhanceAudio(): boolean; set enhanceAudio(value: boolean); /** {en} * @brief Apply to video (Screen-sharing video not included) or not. Not by default. * */ get enhanceVideo(): boolean; set enhanceVideo(value: boolean); /** {en} * @brief Apply to screen sharing audio or not. Not by default. * */ get enhanceScreenAudio(): boolean; set enhanceScreenAudio(value: boolean); /** {en} * @brief Apply to screen sharing video or not. Not by default. * */ get enhanceScreenVideo(): boolean; set enhanceScreenVideo(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum MirrorType { MIRROR_TYPE_NONE = 0, MIRROR_TYPE_RENDER = 1, MIRROR_TYPE_RENDER_AND_ENCODER = 2 } export declare class CloudProxyInfo { constructor(cloudProxyIp: string, cloudProxyPort: number); constructor(); protected _instance: any; /** {en} * @detail keytype * @brief Cloud proxy IP * */ get cloudProxyIp(): string; set cloudProxyIp(value: string); /** {en} * @detail keytype * @brief Cloud proxy Port * */ get cloudProxyPort(): number; set cloudProxyPort(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class VideoEncoderConfig { constructor(width: number, height: number, frameRate: number, maxBitrate: number, minBitrate: number); constructor(width: number, height: number, frameRate: number, maxBitrate: number, minBitrate: number, codecName: number, codecMode: number, encodePrefer: number); constructor(); constructor(); protected _instance: any; /** {en} * @brief Width of the video frame in px * */ get width(): number; set width(value: number); /** {en} * @brief Height of the video frame in px * */ get height(): number; set height(value: number); /** {en} * @brief Video frame rate in fps * */ get frameRate(): number; set frameRate(value: number); /** {en} * @brief Maximum bit rate in kbps. Optional for internal capturing while mandatory for custom capturing.
* If you set this value to -1, SDK will automatically calculate the applicable bit rate based on the input resolution and frame rate.
* No stream will be encoded and published if you set this value to 0.
* In 3.44.1 and later versions, the default value is `-1` for internal capturing, but no default value in versions earlier than 3.44.1. * */ get maxBitrate(): number; set maxBitrate(value: number); /** {en} * @brief Minimum video encoding bitrate in kbps. The encoding bitrate will not be lower than the `minBitrate`.
* It defaults to `0`.
* It ranges within [0, maxBitrate). When `maxBitrate` < `minBitrate`, the bitrate is self-adpapted.
* In the following circumstance, the assignment to this variable has no effect:
* - When `maxBitrate` = `0`, the video encoding is disabled. * - When `maxBitrate` < `0`, the bitrate is self-adapted. * */ get minBitrate(): number; set minBitrate(value: number); /** {en} * @platform android * @brief Encoding preference. The default value is MaintainFramerate(1). See {@link EncoderPreference EncoderPreference}. * */ get android_encodePreference(): $p_a.EncoderPreference; set android_encodePreference(value: $p_a.EncoderPreference); get android_maxQp(): $p_a.int; set android_maxQp(value: $p_a.int); get android_minQp(): $p_a.int; set android_minQp(value: $p_a.int); /** {en} * @platform ios * @brief Encoding preference. The default value is ByteRTCVideoEncoderPreferenceMaintainFramerate(1). See {@link ByteRTCVideoEncoderPreference ByteRTCVideoEncoderPreference}. */ get ios_encoderPreference(): $p_i.ByteRTCVideoEncoderPreference; set ios_encoderPreference(value: $p_i.ByteRTCVideoEncoderPreference); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum FirstFramePlayState { FIRST_FRAME_PLAY_STATE_PLAYING = 0, FIRST_FRAME_PLAY_STATE_END = 1, FIRST_FRAME_PLAY_STATE_PLAYED = 2, ByteRTCFirstFramePlayStatePlay = 3 } export declare class RemoteVideoSinkConfig { protected _instance: any; /** {en} * @brief The position of the remote video frame. See {@link RemoteVideoRenderPosition RemoteVideoRenderPosition}. The default position is after processing. * */ get position(): RemoteVideoRenderPosition; set position(value: RemoteVideoRenderPosition); /** {en} * @platform android * @brief The pixel format of the remote video frame. See {@link VideoPixelFormat VideoPixelFormat}. The default value is 0. * */ get android_pixelFormat(): $p_a.int; set android_pixelFormat(value: $p_a.int); /** {en} * @brief Whether the video frame needs to be automatically rotate. See {@link VideoApplyRotation VideoApplyRotation}. The default value is no rotate. * */ get applyRotation(): VideoApplyRotation; set applyRotation(value: VideoApplyRotation); /** {en} * @brief Whether the video frame needs to be mirrored. See {@link VideoRenderMirrorType VideoRenderMirrorType}. The default value is no mirroring effect. * */ get mirrorType(): VideoRenderMirrorType; set mirrorType(value: VideoRenderMirrorType); /** {en} * @platform ios * @brief The pixel format of the remote video frame. See {@link ByteRTCVideoSinkPixelFormat ByteRTCVideoSinkPixelFormat}. The default value is 0. */ get ios_requiredPixelFormat(): $p_i.ByteRTCVideoSinkPixelFormat; set ios_requiredPixelFormat(value: $p_i.ByteRTCVideoSinkPixelFormat); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoSuperResolutionModeChangedReason { VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_API_OFF = 0, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_API_ON = 1, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_RESOLUTION_EXCEED = 2, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_OVER_USE = 3, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_DEVICE_NOT_SUPPORT = 4, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_DYNAMIC_CLOSE = 5, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_OTHER_SETTING_DISABLED = 6, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_OTHER_SETTING_ENABLED = 7, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_NO_COMPONENT = 8, VIDEO_SUPER_RESOLUTION_MODE_CHANGED_REASON_STREAM_NOT_EXIST = 9 } export declare enum AnsMode { ANS_MODE_DISABLE = 0, ANS_MODE_LOW = 1, ANS_MODE_MEDIUM = 2, ANS_MODE_HIGH = 3, ANS_MODE_AUTOMATIC = 4 } export declare enum LocalAudioStreamState { LOCAL_AUDIO_STREAM_STATE_STOPPED = 0, LOCAL_AUDIO_STREAM_STATE_RECORDING = 1, LOCAL_AUDIO_STREAM_STATE_ENCODING = 2, LOCAL_AUDIO_STREAM_STATE_FAILED = 3 } export declare class VideoFrameInfo { constructor(width: number, height: number, rotation: number); constructor(); protected _instance: any; /** {en} * @brief Width (pixels) * */ get width(): number; set width(value: number); /** {en} * @brief High (pixels) * */ get height(): number; set height(value: number); /** {en} * @brief Video frame clockwise rotation angle. See {@link VideoRotation VideoRotation}. * */ get rotation(): VideoRotation; set rotation(value: VideoRotation); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class VideoCanvas { constructor(surface: Surface, renderMode: number); constructor(surface: Surface, renderMode: number, backgroundColor: number); constructor(); constructor(); protected _instance: any; /** {en} * @platform android * @brief Local view handler.
* When you render the `View` object, use the field, and set `renderSurface` to `null`. * */ get android_renderView(): $p_a.View; set android_renderView(value: $p_a.View); /** {en} * @platform android * @brief Local surface handler.
* When you render the Surface object, use the field, and set `renderView` to `null`. * */ get android_renderSurface(): $p_a.Surface; set android_renderSurface(value: $p_a.Surface); /** {en} * @brief Rendering mode, the value must be `RENDER_MODE_HIDDEN(1)`, `RENDER_MODE_FIT(2)`, or `RENDER_MODE_FILL(3)`. The default value is `RENDER_MODE_HIDDEN(1)`. * */ get renderMode(): RenderMode; set renderMode(value: RenderMode); /** {en} * @brief Set the background color of the canvas which is not filled with video frame. The range is `[0x00000000, 0xFFFFFFFF]`. The default is `0x00000000`. The Alpha index is ignored. * */ get backgroundColor(): number; set backgroundColor(value: number); /** {en} * @brief Video frame rotation angle. See {@link VideoRotation VideoRotation}. The default value is 0, which means no rotation is applied.
* This parameter only applies to remote video and does not affect local video settings. * */ get renderRotation(): VideoRotation; set renderRotation(value: VideoRotation); /** {en} * @platform ios * @brief Local view handle */ get ios_view(): $p_i.ByteRTCView; set ios_view(value: $p_i.ByteRTCView); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RemoteVideoStats { constructor(); constructor(); protected _instance: any; /** {en} * @brief Remote Video Stream Width * */ get width(): number; set width(value: number); /** {en} * @brief Remote Video Stream Height * */ get height(): number; set height(value: number); /** {en} * @brief Video packet loss rate. The video downlink packet loss rate in the reference period. The value range is [0,1]. * */ get videoLossRate(): number; set videoLossRate(value: number); /** {en} * @brief Receiving bit rate. Video reception rate within the reference period, in kbps. * */ get receivedKBitrate(): number; set receivedKBitrate(value: number); /** {en} * @brief The decoder outputs the frame rate. Video decoder output frame rate within the reference period, in fps. * */ get decoderOutputFrameRate(): number; set decoderOutputFrameRate(value: number); /** {en} * @platform android * @brief Render frame rate. The video rendering frame rate in the reference period, in fps. * */ get android_rendererOutputFrameRate(): $p_a.int; set android_rendererOutputFrameRate(value: $p_a.int); /** {en} * @brief Number of cards. Number of cards in the reference period. * */ get stallCount(): number; set stallCount(value: number); /** {en} * @brief Catton duration. The total duration of the video card in the reference period. Unit ms. * */ get stallDuration(): number; set stallDuration(value: number); /** {en} * @brief User experience-level end-to-end delay, in milliseconds, from the time when the sender captures the encoding to the time when the receiver decodes the rendering * */ get e2eDelay(): number; set e2eDelay(value: number); /** {en} * @brief Whether the media stream belongs to the user is a screen stream. You can know whether the current statistics come from mainstream or screen stream. * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @brief Statistical interval, the interval of this reference period, in ms.
* This field is used to set the reference period for the callback, currently set to 2s. * */ get statsInterval(): number; set statsInterval(value: number); /** {en} * @brief Round-trip time in ms. * */ get rtt(): number; set rtt(value: number); /** {en} * @brief The cumulative duration of the video card of the remote user accounts for the percentage (\%) of the total effective duration of the video after entering the room. The effective duration of the video refers to the duration of the video other than stopping sending the video stream and disabling the video module after the remote user enters the room to publish the video stream. * */ get frozenRate(): number; set frozenRate(value: number); /** {en} * @brief For the encoding type of the video, please refer to {@link VideoCodecType VideoCodecType}. * */ get codecType(): VideoCodecType; set codecType(value: VideoCodecType); /** {en} * @brief For subscripts for streams with multiple resolutions. * */ get videoIndex(): number; set videoIndex(value: number); /** {en} * @brief Video downlink network jitter in ms. * */ get jitter(): number; set jitter(value: number); /** {en} * @brief Super-resolution mode. See {@link VideoSuperResolutionMode VideoSuperResolutionMode}. * */ get superResolutionMode(): VideoSuperResolutionMode; set superResolutionMode(value: VideoSuperResolutionMode); /** {en} * @platform ios * @brief Remote video receiving frame rate. */ get ios_receivedFrameRate(): $p_i.NSInteger; set ios_receivedFrameRate(value: $p_i.NSInteger); /** {en} * @platform ios * @brief Remote video rendering output frame rate. */ get ios_renderOutputFrameRate(): $p_i.NSInteger; set ios_renderOutputFrameRate(value: $p_i.NSInteger); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoApplyRotation { DEFAULT = 0, DEGREE_0 = 1, ByteRTCVideoApplyRotation0 = 2 } export declare enum VideoRotation { VIDEO_ROTATION_0 = 0, VIDEO_ROTATION_90 = 1, VIDEO_ROTATION_180 = 2, VIDEO_ROTATION_270 = 3 } export declare class LocalVideoSinkConfig { protected _instance: any; /** {en} * @brief The position of the local video frame. See {@link LocalVideoRenderPosition LocalVideoRenderPosition}. * */ get position(): LocalVideoRenderPosition; set position(value: LocalVideoRenderPosition); /** {en} * @platform android * @brief The pixel format of the local video frame. See {@link VideoPixelFormat VideoPixelFormat}. The default value is 0. * */ get android_pixelFormat(): $p_a.int; set android_pixelFormat(value: $p_a.int); /** {en} * @platform ios * @brief The pixel format of the local video frame. See {@link ByteRTCVideoSinkPixelFormat ByteRTCVideoSinkPixelFormat}. The default value is 0. */ get ios_requiredPixelFormat(): $p_i.ByteRTCVideoSinkPixelFormat; set ios_requiredPixelFormat(value: $p_i.ByteRTCVideoSinkPixelFormat); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class RecordingProgress { constructor(duration: number, fileSize: number); constructor(); protected _instance: any; /** {en} * @brief The cumulative recording time of the current file, in milliseconds * */ get duration(): number; set duration(value: number); /** {en} * @brief The size of the currently recorded file in bytes * */ get fileSize(): number; set fileSize(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum RemoteUserPriority { REMOTE_USER_PRIORITY_LOW = 0, REMOTE_USER_PRIORITY_MEDIUM = 1, REMOTE_USER_PRIORITY_HIGH = 2 } export declare enum LocalVideoStreamState { LOCAL_VIDEO_STREAM_STATE_STOPPED = 0, LOCAL_VIDEO_STREAM_STATE_RECORDING = 1, LOCAL_VIDEO_STREAM_STATE_ENCODING = 2, LOCAL_VIDEO_STREAM_STATE_FAILED = 3 } export declare enum RemoteAudioState { REMOTE_AUDIO_STATE_STOPPED = 0, REMOTE_AUDIO_STATE_STARTING = 1, REMOTE_AUDIO_STATE_DECODING = 2, REMOTE_AUDIO_STATE_FROZEN = 3 } export declare class AudioPropertiesConfig { constructor(interval: number, enableSpectrum: boolean, enableVad: boolean); constructor(interval: number, enableSpectrum: boolean, enableVad: boolean, localMainReportMode: AudioReportMode, smooth: number, audioReportMode: AudioPropertiesMode); constructor(interval: number, enableSpectrum: boolean, enableVad: boolean, localMainReportMode: AudioReportMode); constructor(interval: number, enableSpectrum: boolean, enableVad: boolean, localMainReportMode: AudioReportMode, smooth: number); constructor(interval: number); constructor(); protected _instance: any; /** {en} * @brief Prompt interval in ms
* - `<= 0`: Turn off prompt * - `(0,100]`: Invalid interval value, and will be automatically reset to 100ms. * - `> 100`: the actual value of interval * */ get interval(): number; set interval(value: number); /** {en} * @brief Whether to enable audio spectrum detection * */ get enableSpectrum(): boolean; set enableSpectrum(value: boolean); /** {en} * @brief Whether to enable Voice Activity Detection * */ get enableVad(): boolean; set enableVad(value: boolean); /** {en} * @brief The volume callback modes. See {@link AudioReportMode AudioReportMode}. * */ get localMainReportMode(): AudioReportMode; set localMainReportMode(value: AudioReportMode); /** {en} * @brief The audio info included in {@link onLocalAudioPropertiesReport onLocalAudioPropertiesReport}. See {@link AudioPropertiesMode AudioPropertiesMode}.
* Locally captured microphone audio info and locally captured screen audio info are included by default. * */ get audioReportMode(): AudioPropertiesMode; set audioReportMode(value: AudioPropertiesMode); /** {en} * @brief The smoothing coefficient for audio attribute information prompt. The range is `(0.0, 1.0]`.
* The default value is `1.0`, which means the smoothing effect is off by default. Smaller the value, smoother the audio volume prompt. If you want to enable the smooth effect, the recommended value is `0.3`. * */ get smooth(): number; set smooth(value: number); /** {en} * @brief Sets whether to return the vocal pitch of the local user. * */ get enableVoicePitch(): boolean; set enableVoicePitch(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoPixelFormat { UNKNOWN = 0, I420 = 1, NV21 = 2, RGBA = 3, NV12 = 4, TEXTURE_2D = 5, TEXTURE_OES = 6, ByteRTCVideoPixelFormatBGRA = 7, ByteRTCVideoPixelFormatIMC2 = 8, ByteRTCVideoPixelFormatARGB = 9, ByteRTCVideoPixelFormatGLTexture2D = 10, ByteRTCVideoPixelFormatGLTextureOES = 11, ByteRTCVideoPixelFormatCVPixelBuffer = 12 } export declare enum LocalProxyError { OK = 0, SOCKS5_VERSION_ERROR = 1, SOCKS5_FORMAT_ERROR = 2, SOCKS5_INVALID_VALUE = 3, SOCKS5_USER_PASS_NOT_GIVEN = 4, SOCKS5_TCP_CLOSED = 5, HTTP_TUNNEL_FAILED = 6 } export declare enum MediaStreamType { RTC_MEDIA_STREAM_TYPE_AUDIO = 1, RTC_MEDIA_STREAM_TYPE_VIDEO = 2, RTC_MEDIA_STREAM_TYPE_BOTH = 3 } export declare enum MediaDeviceState { /** {en} * @platform android * @brief Device started capturing * */ MEDIA_DEVICE_STATE_STARTED = 0, /** {en} * @platform android * @brief Device stopped capturing * */ MEDIA_DEVICE_STATE_STOPPED = 1, /** {en} * @platform android * @brief Runtime error
* For example, when the media device is expected to be working but no data is received. * */ MEDIA_DEVICE_STATE_RUNTIMEERROR = 2, /** {en} * @platform android * @brief Device is added. * */ MEDIA_DEVICE_STATE_ADDED = 3, /** {en} * @platform android * @brief Device is removed. * */ MEDIA_DEVICE_STATE_REMOVED = 4, /** {en} * @platform android * @brief A Phone call interrupted the RTC call. RTC call will resume once the call ends. * */ MEDIA_DEVICE_STATE_INTERRUPTION_BEGAN = 5, /** {en} * @platform android * @brief RTC call resumed from the interruption caused by a phone call. * */ MEDIA_DEVICE_STATE_INTERRUPTION_ENDED = 6, /** {en} * @platform ios * @brief On * */ ByteRTCMediaDeviceStateStarted = 7, /** {en} * @platform ios * @brief Off * */ ByteRTCMediaDeviceStateStopped = 8, /** {en} * @platform ios * @brief Runtime error
* For example, when the media device is expected to be working but no data is received. * */ ByteRTCMediaDeviceStateRuntimeError = 9, /** {en} * @platform ios * @brief Added
* Call enumerate-device api to update the device list when you get this notification. * */ ByteRTCMediaDeviceStateAdded = 10, /** {en} * @platform ios * @brief Removed
* Call enumerate-device api to update the device list when you get this notification. * */ ByteRTCMediaDeviceStateRemoved = 11, /** {en} * @platform ios * @brief Phone calls, locking screen or the other Apps interrupted the RTC call. RTC call will resume once the call ends or the other Apps release the media devices. * */ ByteRTCMediaDeviceStateInterruptionBegan = 12, /** {en} * @platform ios * @brief RTC call resumed from the interruption caused by the phone calls or the other Apps. * */ ByteRTCMediaDeviceStateInterruptionEnded = 13 } export declare enum LocalProxyType { SOCKS5 = 0, HTTP_TUNNEL = 1 } export declare enum PlayState { PLAYING = 0, PAUSED = 1, FAILED = 2, FINISHED = 3, STOPPED = 4, ByteRTCPlayStateStoped = 5 } export declare class VoiceReverbConfig { constructor(roomSize: number, decayTime: number, damping: number, wetGain: number, dryGain: number, preDelay: number); constructor(); protected _instance: any; /** {en} * @brief The room size for reverb simulation. The range is `[0.0, 100.0]`. The default value is `50.0f`. The larger the room, the stronger the reverberation. * */ get roomSize(): number; set roomSize(value: number); /** {en} * @brief The decay time of the reverb effect. The range is `[0.0, 100.0]`. The default value is `50.0f`. * */ get decayTime(): number; set decayTime(value: number); /** {en} * @brief The damping index of the reverb effect. The range is `[0.0, 100.0]`. The default value is `50.0f`. * */ get damping(): number; set damping(value: number); /** {en} * @brief The Intensity of the wet signal in dB. The range is `[-20.0, 10.0]`. The default value is `0.0f`. * */ get wetGain(): number; set wetGain(value: number); /** {en} * @brief The Intensity of the dry signal in dB. The range is `[-20.0, 10.0]`. The default value is `0.0f`. * */ get dryGain(): number; set dryGain(value: number); /** {en} * @brief The delay of the wet signal in ms. The range is `[0.0, 200.0]`. The default value is `0.0f`. * */ get preDelay(): number; set preDelay(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VideoSuperResolutionMode { VIDEO_SUPER_RESOLUTION_MODE_OFF = 0, VIDEO_SUPER_RESOLUTION_MODE_ON = 1 } export declare class PushSingleStreamParam { constructor(rid: string, uid: string, url: string, isScreen: boolean); constructor(); constructor(); protected _instance: any; /** {en} * @brief The room ID of the media stream * */ get roomId(): string; set roomId(value: string); /** {en} * @brief The user ID of the media stream * */ get userId(): string; set userId(value: string); /** {en} * @brief The URL for live transcoding. Only supports live transcoding via RTMP. The URL should match the regular expression `/^rtmps?:\\/\\//`.
* This parameter cannot be updated while pushing stream to the CDN. * */ get url(): string; set url(value: string); /** {en} * @brief Whether the media stream is a screen-sharing stream. * */ get isScreen(): boolean; set isScreen(value: boolean); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum SubtitleState { SUBTITLE_STATE_STARTED = 0, SUBTITLE_STATE_STOPED = 1, SUBTITLE_STATE_ERROR = 2 } export declare enum RecordingFileType { AAC = 0, MP4 = 1 } export declare enum ScreenMediaType { SCREEN_MEDIA_TYPE_VIDEO_ONLY = 0, SCREEN_MEDIA_TYPE_AUDIO_ONLY = 1, SCREEN_MEDIA_TYPE_VIDEO_AND_AUDIO = 2 } export declare enum MixedStreamRenderMode { MIXED_STREAM_RENDER_MODE_HIDDEN = 0, MIXED_STREAM_RENDER_MODE_FIT = 1, MIXED_STREAM_RENDER_MODE_ADAPTIVE = 2 } export declare class SubtitleConfig { constructor(mode: SubtitleMode, targetLanguage: string); constructor(); protected _instance: any; /** {en} * @brief Subtitle mode. You can choose between the recognition and translation modes based on your needs. In the recognition mode, the speech of the speaker in the room will be recognized and converted into captions. In the translation mode, the transcribed text will be translated. Refer to {@link SubtitleMode SubtitleMode} for more details. * */ get mode(): SubtitleMode; set mode(value: SubtitleMode); /** {en} * @brief Target language. * */ get targetLanguage(): string; set targetLanguage(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum VirtualBackgroundSourceType { COLOR = 0, IMAGE = 1, ByteRTCVirtualBackgroundSourceTypeImage = 2 } export declare enum AudioRenderType { AUDIO_RENDER_TYPE_EXTERNAL = 0, AUDIO_RENDER_TYPE_INTERNAL = 1, ByteRTCAudioRenderTypeInternal = 2 } export declare class MixedStreamAudioConfig { protected _instance: any; /** {en} * @platform ios * @brief The audio codec. * @param codec The audio codec. See {@link ByteRTCMixedStreamAudioCodecType ByteRTCMixedStreamAudioCodecType}. Defaults to `0`. It's recommended to be set. */ get ios_audioCodec(): $p_i.ByteRTCMixedStreamAudioCodecType; set ios_audioCodec(value: $p_i.ByteRTCMixedStreamAudioCodecType); /** {en} * @platform ios * @brief The sample rate (Hz). Supported sample rates: 32,000 Hz, 44,100 Hz, 48,000 Hz. Defaults to 48,000 Hz. It's recommended to be set. */ get ios_sampleRate(): $p_i.NSInteger; set ios_sampleRate(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The number of channels. Supported channels: 1 (single channel), 2 (dual channel). Defaults to 2. It's recommended to be set. */ get ios_channels(): $p_i.NSInteger; set ios_channels(value: $p_i.NSInteger); /** {en} * @platform ios * @brief The bitrate (Kbps) in range of [32, 192]. Defaults to 64 Kbps. It's recommended to be set. */ get ios_bitrate(): $p_i.NSInteger; set ios_bitrate(value: $p_i.NSInteger); /** {en} * @platform ios * @brief AAC profile. See {@link ByteRTCMixedStreamAudioProfile ByteRTCMixedStreamAudioProfile}. Defaults to `0`. It's recommended to be set. */ get ios_audioProfile(): $p_i.ByteRTCMixedStreamAudioProfile; set ios_audioProfile(value: $p_i.ByteRTCMixedStreamAudioProfile); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the number of channels. You are recommended to set the value. * @param channels The number of channels. Supported channels: 1 (single channel), 2 (dual channel). Defaults to 2. * */ android_setChannels(channels: number): this; /** {en} * @platform android * @detail api * @brief Gets the number of channels. * */ android_getChannels(): number; /** {en} * @platform android * @detail api * @brief Sets the sample rate. You are recommended to set the value. * @param sampleRate The sample rate (Hz). Supported sample rates: 32,000 Hz, 44,100 Hz, 48,000 Hz. Defaults to 48,000 Hz. * */ android_setSampleRate(sampleRate: number): this; /** {en} * @platform android * @detail api * @brief Gets the sample rate. * */ android_getSampleRate(): number; /** {en} * @platform android * @detail api * @brief Sets the bitrate. You are recommended to set the value. * @param bitrate The bitrate (Kbps) in range of [32, 192]. Defaults to 64 Kbps. * */ android_setBitrate(bitrate: number): this; /** {en} * @platform android * @detail api * @brief Gets the bitrate. * */ android_getBitrate(): number; /** {en} * @platform android * @detail api * @brief Sets the audio codec. You are recommended to set the value. * @param audioCodec The audio codec. See {@link MixedStreamAudioCodecType MixedStreamAudioCodecType}. Defaults to `MIXED_STREAM_AUDIO_CODEC_TYPE_AAC("AAC")`.
* This parameter cannot be updated while pushing stream to the CDN. * */ android_setAudioCodec(audioCodec: $p_a.MixedStreamAudioCodecType): this; /** {en} * @platform android * @detail api * @brief Gets the audio codec. * */ android_getAudioCodec(): $p_a.MixedStreamAudioCodecType; /** {en} * @platform android * @detail api * @brief Sets the AAC profile. You are recommended to set the value. * @param audioProfile AAC profile. See {@link MixedStreamAudioProfile MixedStreamAudioProfile}. Defaults to `MIXED_STREAM_AUDIO_PROFILE_LC("LC")`. * */ android_setAudioProfile(audioProfile: $p_a.MixedStreamAudioProfile): this; /** {en} * @platform android * @detail api * @brief Gets the AAC profile. * */ android_getAudioProfile(): $p_a.MixedStreamAudioProfile; } export declare enum ZoomDirectionType { CAMERA_MOVE_LEFT = 0, CAMERA_MOVE_RIGHT = 1, CAMERA_MOVE_UP = 2, CAMERA_MOVE_DOWN = 3, CAMERA_ZOOM_OUT = 4, CAMERA_ZOOM_IN = 5, CAMERA_RESET = 6, ByteRTCZoomDirectionTypeMoveLeft = 7, ByteRTCZoomDirectionTypeMoveRight = 8, ByteRTCZoomDirectionTypeMoveUp = 9, ByteRTCZoomDirectionTypeMoveDown = 10, ByteRTCZoomDirectionTypeZoomOut = 11, ByteRTCZoomDirectionTypeZoomIn = 12, ByteRTCZoomDirectionTypeReset = 13 } export declare class StandardPitchInfo { /** * @platform android */ static android_create(startTime: number, duration: number, pitch: number): $p_a.StandardPitchInfo; constructor(startTime: number, duration: number, pitch: number); constructor(); protected _instance: any; /** {en} * @brief Starting time, unit: ms. */ get startTime(): number; set startTime(value: number); /** {en} * @brief Duration, unit: ms. */ get duration(): number; set duration(value: number); /** {en} * @brief pitch. */ get pitch(): number; set pitch(value: number); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum AudioMixingType { AUDIO_MIXING_TYPE_PLAYOUT = 0, AUDIO_MIXING_TYPE_PUBLISH = 1, AUDIO_MIXING_TYPE_PLAYOUT_AND_PUBLISH = 2 } export declare enum AttenuationType { ATTENUATION_TYPE_NONE = 0, ATTENUATION_TYPE_LINEAR = 1, ATTENUATION_TYPE_EXPONENTIAL = 2 } export declare enum MediaPlayerCustomSourceStreamType { RAW = 0, ENCODED = 1 } export declare enum AlphaLayout { TOP = 0 } export declare class MixedStreamConfig { /** {en} * @type api * @brief Gets the default configurations for live transcoding. * @return The default configurations for live transcoding. See {@link MixedStreamConfig MixedStreamConfig}. * */ static defaultMixedStreamConfig(): MixedStreamConfig; protected _instance: any; /** {en} * @platform ios * @brief Stream mixing type. See {@link ByteRTCMixedStreamType ByteRTCMixedStreamType}. It's recommended to be set. */ get ios_expectedMixingType(): $p_i.ByteRTCMixedStreamType; set ios_expectedMixingType(value: $p_i.ByteRTCMixedStreamType); /** {en} * @platform ios * @brief The overall video layout of the mixed stream.
* After starting to push streams to CDN, you can set the layout of each video stream, the background information of the mixed stream, etc. See {@link ByteRTCMixedStreamLayoutConfig ByteRTCMixedStreamLayoutConfig} for details. */ get ios_layoutConfig(): $p_i.ByteRTCMixedStreamLayoutConfig; set ios_layoutConfig(value: $p_i.ByteRTCMixedStreamLayoutConfig); /** {en} * @platform ios * @brief Video transcoding related configurations. See {@link ByteRTCMixedStreamVideoConfig ByteRTCMixedStreamVideoConfig}. It's recommended to be set. */ get ios_videoConfig(): $p_i.ByteRTCMixedStreamVideoConfig; set ios_videoConfig(value: $p_i.ByteRTCMixedStreamVideoConfig); /** {en} * @platform ios * @brief Audio mixed related configurations. See {@link ByteRTCMixedStreamAudioConfig ByteRTCMixedStreamAudioConfig} for data types. It's recommended to be set. */ get ios_audioConfig(): $p_i.ByteRTCMixedStreamAudioConfig; set ios_audioConfig(value: $p_i.ByteRTCMixedStreamAudioConfig); /** {en} * @platform ios * @brief client mix param. See {@link ByteRTCMixedStreamClientMixConfig ByteRTCMixedStreamClientMixConfig} for detail. It's recommended to be set. */ get ios_clientMixConfig(): $p_i.ByteRTCMixedStreamClientMixConfig; set ios_clientMixConfig(value: $p_i.ByteRTCMixedStreamClientMixConfig); /** {en} * @platform ios * @hidden(macOS) * @brief The spatial audio config when pushing to CDN. See {@link ByteRTCMixedStreamSpatialAudioConfig ByteRTCMixedStreamSpatialAudioConfig} for detail. */ get ios_spatialAudioConfig(): $p_i.ByteRTCMixedStreamSpatialAudioConfig; set ios_spatialAudioConfig(value: $p_i.ByteRTCMixedStreamSpatialAudioConfig); /** {en} * @platform ios * @brief Server mix Control config. See {@link ByteRTCMixedStreamServerControlConfig ByteRTCMixedStreamServerControlConfig} for detail. */ get ios_serverControlConfig(): $p_i.ByteRTCMixedStreamServerControlConfig; set ios_serverControlConfig(value: $p_i.ByteRTCMixedStreamServerControlConfig); /** {en} * @platform ios * @brief The URL for live transcoding. Only supports live transcoding via RTMP. The URL should match the regular expression `/^rtmps?:\\/\\//`. It's recommended to be set.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_pushURL(): $p_i.NSString; set ios_pushURL(value: $p_i.NSString); /** {en} * @platform ios * @brief The room ID for live transcoding. The sum length of `roomID` and `userID` should not exceed 126 bytes. It's recommended to be set.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_roomID(): $p_i.NSString; set ios_roomID(value: $p_i.NSString); /** {en} * @platform ios * @brief The user ID for live transcoding. The sum length of `roomID` and `userID` should not exceed 126 bytes. It's recommended to be set.
* This parameter cannot be updated while pushing stream to the CDN. */ get ios_userID(): $p_i.NSString; set ios_userID(value: $p_i.NSString); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; /** {en} * @platform android * @detail api * @brief Sets the user ID of the mixed stream pusher. This parameter cannot be updated while pushing stream to the CDN. You are recommended to set the value. * @param userID User ID of the mixed stream pusher. The sum length of `roomId` and `userId` should not exceed 126 bytes. * */ android_setUserID(userID: string): this; /** {en} * @platform android * @detail api * @brief Gets the user ID of the mixed stream pusher. * */ android_getUserID(): string; /** {en} * @platform android * @detail api * @brief Sets the room ID where the mixed stream pusher is in. This parameter cannot be updated while pushing stream to the CDN. You are recommended to set the value. * @param roomID The room ID. The sum length of `roomId` and `userId` should not exceed 126 bytes. * */ android_setRoomID(roomID: string): this; /** {en} * @platform android * @detail api * @brief Gets the room ID where the mixed stream pusher is in. * */ android_getRoomID(): string; /** {en} * @platform android * @detail api * @brief Sets the URL for the mixed stream to be pushed to. Only supports live transcoding via RTMP. The URL should match the regular expression `/^rtmps?:\\/\\//`. You are recommended to set the value.
* This parameter cannot be updated while pushing stream to the CDN. * @param pushURL URL for the mixed stream to be pushed to. * */ android_setPushURL(pushURL: string): this; /** {en} * @platform android * @detail api * @brief Gets the URL for the mixed stream to be pushed to * */ android_getPushURL(): string; /** {en} * @platform android * @detail api * @region Push to CDN * @brief Sets the stream mixing type. You are recommended to set the value. * @param expectedMixingType Stream mixing type. See {@link ByteRTCStreamMixingType ByteRTCStreamMixingType}. * @note This parameter cannot be updated while pushing stream to the CDN. * */ android_setExpectedMixingType(expectedMixingType: $p_a.ByteRTCStreamMixingType): this; /** {en} * @platform android * @detail api * @region Push to CDN * @brief Gets the stream mixing type. * */ android_getExpectedMixingType(): $p_a.ByteRTCStreamMixingType; /** {en} * @platform android * @detail api * @brief Sets audio transcoding configurations. You are recommended to set the value. * @param audioConfig The audio configurations to be set. See {@link MixedStreamAudioConfig MixedStreamAudioConfig}. * */ android_setAudioConfig(audioConfig: $p_a.MixedStreamAudioConfig): this; /** {en} * @platform android * @detail api * @brief Gets audio transcoding configurations. * */ android_getAudioConfig(): $p_a.MixedStreamAudioConfig; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Sets the configurations during mixing streams on the server side. * @param serverControlConfig The configurations to be set. See {@link MixedStreamServerControlConfig MixedStreamServerControlConfig}. * */ android_setServerControlConfig(serverControlConfig: $p_a.MixedStreamServerControlConfig): this; /** {en} * @platform android * @valid since 3.56 * @detail api * @brief Gets the configurations during mixing streams on the server side. * */ android_getServerControlConfig(): $p_a.MixedStreamServerControlConfig; /** {en} * @platform android * @detail api * @brief Sets the video transcoding configurations. You are recommended to set the value. * @param videoConfig The configurations to be set. See {@link MixedStreamVideoConfig MixedStreamVideoConfig}. * */ android_setVideoConfig(videoConfig: $p_a.MixedStreamVideoConfig): this; /** {en} * @platform android * @detail api * @brief Gets the video transcoding configurations. * */ android_getVideoConfig(): $p_a.MixedStreamVideoConfig; /** {en} * @platform android * @detail api * @brief Sets client mix configurations. You are recommended to set the value. * @param clientMixConfig The configurations to be set. See {@link MixedStreamClientMixConfig MixedStreamClientMixConfig}. * */ android_setClientMixConfig(clientMixConfig: $p_a.MixedStreamClientMixConfig): this; /** {en} * @platform android * @detail api * @brief Sets the spatial audio configuration when pushing streams to CDN. * @param spatialConfig The configurations to be set. See {@link MixedStreamSpatialConfig MixedStreamSpatialConfig}. * */ android_setSpatialConfig(spatialConfig: $p_a.MixedStreamSpatialConfig): this; /** {en} * @platform android * @detail api * @brief Gets the spatial audio configuration when pushing streams to CDN. * */ android_getSpatialConfig(): $p_a.MixedStreamSpatialConfig; /** {en} * @platform android * @detail api * @brief Sets the overall video layout of the mixed stream. You are recommended to set the value. * @param layout The layout to be set. See {@link MixedStreamLayoutConfig MixedStreamLayoutConfig}. * */ android_setLayout(layout: $p_a.MixedStreamLayoutConfig): this; /** {en} * @platform android * @detail api * @brief Gets the overall video layout of the mixed stream. * */ android_getLayout(): $p_a.MixedStreamLayoutConfig; /** * @platform android */ /** * @platform android */ android_getMixedStreamVideoConfigBFrame(): boolean; } export declare enum SEICountPerFrame { SEI_COUNT_PER_FRAME_SINGLE = 0, SEI_COUNT_PER_FRAME_MULTI = 1 } export declare class RTCUser { constructor(); constructor(); protected _instance: any; /** {en} * @brief User id * */ get userId(): string; set userId(value: string); /** {en} * @brief Metadata * */ get metaData(): string; set metaData(value: string); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare enum SEIStreamUpdateEventType { STREAM_ADD = 0, STREAM_REMOVE = 1, ByteRTCSEIStreamEventTypeStreamAdd = 2, ByteRTCSEIStreamEventTypeStreamRemove = 3 } export declare class RemoteStreamSwitchEvent { constructor(); constructor(); protected _instance: any; /** {en} * @brief User ID of the publisher of the subscribed media stream * */ get uid(): string; set uid(value: string); /** {en} * @brief Whether it is a screen-sharing stream * */ get isScreen(): boolean; set isScreen(value: boolean); /** {en} * @brief The quality index of the subscribed stream before the stream switching * */ get beforeVideoIndex(): number; set beforeVideoIndex(value: number); /** {en} * @brief The quality index of the subscribed stream after the stream switching * */ get afterVideoIndex(): number; set afterVideoIndex(value: number); /** {en} * @platform android * @brief Whether a video stream before the stream switching * */ get android_beforeEnable(): boolean; set android_beforeEnable(value: boolean); /** {en} * @platform android * @brief Whether a video stream after the stream switching * */ get android_afterEnable(): boolean; set android_afterEnable(value: boolean); /** {en} * @brief Refer to {@link FallbackOrRecoverReason FallbackOrRecoverReason} for the reason of the Fallback or reverting from the Fallback of the subscribed stream. * */ get reason(): FallbackOrRecoverReason; set reason(value: FallbackOrRecoverReason); /** {en} * @platform ios * @brief Whether a video stream before the stream switching */ get ios_beforeVideoEnabled(): $p_i.BOOL; set ios_beforeVideoEnabled(value: $p_i.BOOL); /** {en} * @platform ios * @brief Whether a video stream after the stream switching */ get ios_afterVideoEnabled(): $p_i.BOOL; set ios_afterVideoEnabled(value: $p_i.BOOL); protected __init(...args: any[]): void; protected __new_instance(...args: any[]): any; } export declare class t_AudioTrackType { static ts_to_android(value: AudioTrackType): $p_a.AudioTrackType; static android_to_ts(value: $p_a.AudioTrackType): AudioTrackType; static ts_to_ios(value: AudioTrackType): $p_i.ByteRTCAudioTrackType; static ios_to_ts(value: $p_i.ByteRTCAudioTrackType): AudioTrackType; } export declare class t_AudioPlayType { static ts_to_android(value: AudioPlayType): $p_a.AudioPlayType; static android_to_ts(value: $p_a.AudioPlayType): AudioPlayType; static ts_to_ios(value: AudioPlayType): $p_i.ByteRTCAudioPlayType; static ios_to_ts(value: $p_i.ByteRTCAudioPlayType): AudioPlayType; } export declare class t_AudioScenarioType { static ts_to_android(value: AudioScenarioType): $p_a.AudioScenarioType; static android_to_ts(value: $p_a.AudioScenarioType): AudioScenarioType; static ts_to_ios(value: AudioScenarioType): $p_i.ByteRTCAudioScenarioType; static ios_to_ts(value: $p_i.ByteRTCAudioScenarioType): AudioScenarioType; } export declare class t_AudioProfileType { static ts_to_android(value: AudioProfileType): $p_a.AudioProfileType; static android_to_ts(value: $p_a.AudioProfileType): AudioProfileType; static ts_to_ios(value: AudioProfileType): $p_i.ByteRTCAudioProfileType; static ios_to_ts(value: $p_i.ByteRTCAudioProfileType): AudioProfileType; } export declare class t_AnsMode { static ts_to_android(value: AnsMode): $p_a.AnsMode; static android_to_ts(value: $p_a.AnsMode): AnsMode; static ts_to_ios(value: AnsMode): $p_i.ByteRTCAnsMode; static ios_to_ts(value: $p_i.ByteRTCAnsMode): AnsMode; } export declare class t_VoiceChangerType { static ts_to_android(value: VoiceChangerType): $p_a.VoiceChangerType; static android_to_ts(value: $p_a.VoiceChangerType): VoiceChangerType; static ts_to_ios(value: VoiceChangerType): $p_i.ByteRTCVoiceChangerType; static ios_to_ts(value: $p_i.ByteRTCVoiceChangerType): VoiceChangerType; } export declare class t_VoiceReverbType { static ts_to_android(value: VoiceReverbType): $p_a.VoiceReverbType; static android_to_ts(value: $p_a.VoiceReverbType): VoiceReverbType; static ts_to_ios(value: VoiceReverbType): $p_i.ByteRTCVoiceReverbType; static ios_to_ts(value: $p_i.ByteRTCVoiceReverbType): VoiceReverbType; } export declare class t_StreamIndex { static ts_to_android(value: StreamIndex): $p_a.StreamIndex; static android_to_ts(value: $p_a.StreamIndex): StreamIndex; static ts_to_ios(value: StreamIndex): $p_i.ByteRTCStreamIndex; static ios_to_ts(value: $p_i.ByteRTCStreamIndex): StreamIndex; } export declare class t_RenderMode { static ts_to_android(value: RenderMode): $p_a.RenderMode; static android_to_ts(value: $p_a.RenderMode): RenderMode; static ts_to_ios(value: RenderMode): $p_i.ByteRTCRenderMode; static ios_to_ts(value: $p_i.ByteRTCRenderMode): RenderMode; } export declare class t_VideoSuperResolutionMode { static ts_to_android(value: VideoSuperResolutionMode): $p_a.VideoSuperResolutionMode; static android_to_ts(value: $p_a.VideoSuperResolutionMode): VideoSuperResolutionMode; static ts_to_ios(value: VideoSuperResolutionMode): $p_i.ByteRTCVideoSuperResolutionMode; static ios_to_ts(value: $p_i.ByteRTCVideoSuperResolutionMode): VideoSuperResolutionMode; } export declare class t_MirrorType { static ts_to_android(value: MirrorType): $p_a.MirrorType; static android_to_ts(value: $p_a.MirrorType): MirrorType; static ts_to_ios(value: MirrorType): $p_i.ByteRTCMirrorType; static ios_to_ts(value: $p_i.ByteRTCMirrorType): MirrorType; } export declare class t_RemoteMirrorType { static ts_to_android(value: RemoteMirrorType): $p_a.RemoteMirrorType; static android_to_ts(value: $p_a.RemoteMirrorType): RemoteMirrorType; static ts_to_ios(value: RemoteMirrorType): $p_i.ByteRTCRemoteMirrorType; static ios_to_ts(value: $p_i.ByteRTCRemoteMirrorType): RemoteMirrorType; } export declare class t_VideoRotationMode { static ts_to_android(value: VideoRotationMode): $p_a.VideoRotationMode; static android_to_ts(value: $p_a.VideoRotationMode): VideoRotationMode; static ts_to_ios(value: VideoRotationMode): $p_i.ByteRTCVideoRotationMode; static ios_to_ts(value: $p_i.ByteRTCVideoRotationMode): VideoRotationMode; } export declare class t_CameraId { static ts_to_android(value: CameraId): $p_a.CameraId; static android_to_ts(value: $p_a.CameraId): CameraId; static ts_to_ios(value: CameraId): $p_i.ByteRTCCameraID; static ios_to_ts(value: $p_i.ByteRTCCameraID): CameraId; } export declare class t_AudioRoute { static ts_to_android(value: AudioRoute): $p_a.AudioRoute; static android_to_ts(value: $p_a.AudioRoute): AudioRoute; static ts_to_ios(value: AudioRoute): $p_i.ByteRTCAudioRoute; static ios_to_ts(value: $p_i.ByteRTCAudioRoute): AudioRoute; } export declare class t_MessageConfig { static ts_to_android(value: MessageConfig): $p_a.MessageConfig; static android_to_ts(value: $p_a.MessageConfig): MessageConfig; static ts_to_ios(value: MessageConfig): $p_i.ByteRTCMessageConfig; static ios_to_ts(value: $p_i.ByteRTCMessageConfig): MessageConfig; } export declare class t_AudioFrameCallbackMethod { static ts_to_android(value: AudioFrameCallbackMethod): $p_a.AudioFrameCallbackMethod; static android_to_ts(value: $p_a.AudioFrameCallbackMethod): AudioFrameCallbackMethod; static ts_to_ios(value: AudioFrameCallbackMethod): $p_i.ByteRTCAudioFrameCallbackMethod; static ios_to_ts(value: $p_i.ByteRTCAudioFrameCallbackMethod): AudioFrameCallbackMethod; } export declare class t_ZoomConfigType { static ts_to_android(value: ZoomConfigType): $p_a.ZoomConfigType; static android_to_ts(value: $p_a.ZoomConfigType): ZoomConfigType; static ts_to_ios(value: ZoomConfigType): $p_i.ByteRTCZoomConfigType; static ios_to_ts(value: $p_i.ByteRTCZoomConfigType): ZoomConfigType; } export declare class t_ZoomDirectionType { static ts_to_android(value: ZoomDirectionType): $p_a.ZoomDirectionType; static android_to_ts(value: $p_a.ZoomDirectionType): ZoomDirectionType; static ts_to_ios(value: ZoomDirectionType): $p_i.ByteRTCZoomDirectionType; static ios_to_ts(value: $p_i.ByteRTCZoomDirectionType): ZoomDirectionType; } export declare class t_VideoSourceType { static ts_to_android(value: VideoSourceType): $p_a.VideoSourceType; static android_to_ts(value: $p_a.VideoSourceType): VideoSourceType; static ts_to_ios(value: VideoSourceType): $p_i.ByteRTCVideoSourceType; static ios_to_ts(value: $p_i.ByteRTCVideoSourceType): VideoSourceType; } export declare class t_AlphaLayout { static ts_to_android(value: AlphaLayout): $p_a.AlphaLayout; static android_to_ts(value: $p_a.AlphaLayout): AlphaLayout; static ts_to_ios(value: AlphaLayout): $p_i.ByteRTCAlphaLayout; static ios_to_ts(value: $p_i.ByteRTCAlphaLayout): AlphaLayout; } export declare class t_AudioSourceType { static ts_to_android(value: AudioSourceType): $p_a.AudioSourceType; static android_to_ts(value: $p_a.AudioSourceType): AudioSourceType; static ts_to_ios(value: AudioSourceType): $p_i.ByteRTCAudioSourceType; static ios_to_ts(value: $p_i.ByteRTCAudioSourceType): AudioSourceType; } export declare class t_AudioRenderType { static ts_to_android(value: AudioRenderType): $p_a.AudioRenderType; static android_to_ts(value: $p_a.AudioRenderType): AudioRenderType; static ts_to_ios(value: AudioRenderType): $p_i.ByteRTCAudioRenderType; static ios_to_ts(value: $p_i.ByteRTCAudioRenderType): AudioRenderType; } export declare class t_PublishFallbackOption { static ts_to_android(value: PublishFallbackOption): $p_a.PublishFallbackOption; static android_to_ts(value: $p_a.PublishFallbackOption): PublishFallbackOption; static ts_to_ios(value: PublishFallbackOption): $p_i.ByteRTCPublishFallbackOption; static ios_to_ts(value: $p_i.ByteRTCPublishFallbackOption): PublishFallbackOption; } export declare class t_RemoteUserPriority { static ts_to_android(value: RemoteUserPriority): $p_a.RemoteUserPriority; static android_to_ts(value: $p_a.RemoteUserPriority): RemoteUserPriority; static ts_to_ios(value: RemoteUserPriority): $p_i.ByteRTCRemoteUserPriority; static ios_to_ts(value: $p_i.ByteRTCRemoteUserPriority): RemoteUserPriority; } export declare class t_SEICountPerFrame { static ts_to_android(value: SEICountPerFrame): $p_a.SEICountPerFrame; static android_to_ts(value: $p_a.SEICountPerFrame): SEICountPerFrame; static ts_to_ios(value: SEICountPerFrame): $p_i.ByteRTCSEICountPerFrame; static ios_to_ts(value: $p_i.ByteRTCSEICountPerFrame): SEICountPerFrame; } export declare class t_EffectBeautyMode { static ts_to_android(value: EffectBeautyMode): $p_a.EffectBeautyMode; static android_to_ts(value: $p_a.EffectBeautyMode): EffectBeautyMode; static ts_to_ios(value: EffectBeautyMode): $p_i.ByteRTCEffectBeautyMode; static ios_to_ts(value: $p_i.ByteRTCEffectBeautyMode): EffectBeautyMode; } export declare class t_VideoOrientation { static ts_to_android(value: VideoOrientation): $p_a.VideoOrientation; static android_to_ts(value: $p_a.VideoOrientation): VideoOrientation; static ts_to_ios(value: VideoOrientation): $p_i.ByteRTCVideoOrientation; static ios_to_ts(value: $p_i.ByteRTCVideoOrientation): VideoOrientation; } export declare class t_RecordingType { static ts_to_android(value: RecordingType): $p_a.RecordingType; static android_to_ts(value: $p_a.RecordingType): RecordingType; static ts_to_ios(value: RecordingType): $p_i.ByteRTCRecordingType; static ios_to_ts(value: $p_i.ByteRTCRecordingType): RecordingType; } export declare class t_ScreenMediaType { static ts_to_android(value: ScreenMediaType): $p_a.ScreenMediaType; static android_to_ts(value: $p_a.ScreenMediaType): ScreenMediaType; static ts_to_ios(value: ScreenMediaType): $p_i.ByteRTCScreenMediaType; static ios_to_ts(value: $p_i.ByteRTCScreenMediaType): ScreenMediaType; } export declare class t_VideoDecoderConfig { static ts_to_android(value: VideoDecoderConfig): $p_a.VideoDecoderConfig; static android_to_ts(value: $p_a.VideoDecoderConfig): VideoDecoderConfig; static ts_to_ios(value: VideoDecoderConfig): $p_i.ByteRTCVideoDecoderConfig; static ios_to_ts(value: $p_i.ByteRTCVideoDecoderConfig): VideoDecoderConfig; } export declare class t_EarMonitorMode { static ts_to_android(value: EarMonitorMode): $p_a.EarMonitorMode; static android_to_ts(value: $p_a.EarMonitorMode): EarMonitorMode; static ts_to_ios(value: EarMonitorMode): $p_i.ByteRTCEarMonitorMode; static ios_to_ts(value: $p_i.ByteRTCEarMonitorMode): EarMonitorMode; } export declare class t_TorchState { static ts_to_android(value: TorchState): $p_a.TorchState; static android_to_ts(value: $p_a.TorchState): TorchState; static ts_to_ios(value: TorchState): $p_i.ByteRTCTorchState; static ios_to_ts(value: $p_i.ByteRTCTorchState): TorchState; } export declare class t_VideoRotation { static ts_to_android(value: VideoRotation): $p_a.VideoRotation; static android_to_ts(value: $p_a.VideoRotation): VideoRotation; static ts_to_ios(value: VideoRotation): $p_i.ByteRTCVideoRotation; static ios_to_ts(value: $p_i.ByteRTCVideoRotation): VideoRotation; } export declare class t_MediaStreamType { static ts_to_android(value: MediaStreamType): $p_a.MediaStreamType; static android_to_ts(value: $p_a.MediaStreamType): MediaStreamType; static ts_to_ios(value: MediaStreamType): $p_i.ByteRTCMediaStreamType; static ios_to_ts(value: $p_i.ByteRTCMediaStreamType): MediaStreamType; } export declare class t_PauseResumeControlMediaType { static ts_to_android(value: PauseResumeControlMediaType): $p_a.PauseResumeControlMediaType; static android_to_ts(value: $p_a.PauseResumeControlMediaType): PauseResumeControlMediaType; static ts_to_ios(value: PauseResumeControlMediaType): $p_i.ByteRTCPauseResumControlMediaType; static ios_to_ts(value: $p_i.ByteRTCPauseResumControlMediaType): PauseResumeControlMediaType; } export declare class t_AudioSelectionPriority { static ts_to_android(value: AudioSelectionPriority): $p_a.AudioSelectionPriority; static android_to_ts(value: $p_a.AudioSelectionPriority): AudioSelectionPriority; static ts_to_ios(value: AudioSelectionPriority): $p_i.ByteRTCAudioSelectionPriority; static ios_to_ts(value: $p_i.ByteRTCAudioSelectionPriority): AudioSelectionPriority; } export declare class t_DownloadLyricType { static ts_to_android(value: DownloadLyricType): $p_a.DownloadLyricType; static android_to_ts(value: $p_a.DownloadLyricType): DownloadLyricType; static ts_to_ios(value: DownloadLyricType): $p_i.ByteRTCDownloadLyricType; static ios_to_ts(value: $p_i.ByteRTCDownloadLyricType): DownloadLyricType; } export declare class t_AttenuationType { static ts_to_android(value: AttenuationType): $p_a.AttenuationType; static android_to_ts(value: $p_a.AttenuationType): AttenuationType; static ts_to_ios(value: AttenuationType): $p_i.ByteRTCAttenuationType; static ios_to_ts(value: $p_i.ByteRTCAttenuationType): AttenuationType; } export declare class t_AudioMixingType { static ts_to_android(value: AudioMixingType): $p_a.AudioMixingType; static android_to_ts(value: $p_a.AudioMixingType): AudioMixingType; static ts_to_ios(value: AudioMixingType): $p_i.ByteRTCAudioMixingType; static ios_to_ts(value: $p_i.ByteRTCAudioMixingType): AudioMixingType; } export declare class t_AudioMixingDualMonoMode { static ts_to_android(value: AudioMixingDualMonoMode): $p_a.AudioMixingDualMonoMode; static android_to_ts(value: $p_a.AudioMixingDualMonoMode): AudioMixingDualMonoMode; static ts_to_ios(value: AudioMixingDualMonoMode): $p_i.ByteRTCAudioMixingDualMonoMode; static ios_to_ts(value: $p_i.ByteRTCAudioMixingDualMonoMode): AudioMixingDualMonoMode; } export declare class t_RTCNetworkType { static ts_to_android(value: RTCNetworkType): $p_a.NetworkType; static android_to_ts(value: $p_a.NetworkType): RTCNetworkType; static ts_to_ios(value: RTCNetworkType): $p_i.ByteRTCNetworkType; static ios_to_ts(value: $p_i.ByteRTCNetworkType): RTCNetworkType; } export declare class t_SubscribeStateChangeReason { static ts_to_android(value: SubscribeStateChangeReason): $p_a.SubscribeStateChangeReason; static android_to_ts(value: $p_a.SubscribeStateChangeReason): SubscribeStateChangeReason; static ts_to_ios(value: SubscribeStateChangeReason): $p_i.ByteRTCSubscribeStateChangeReason; static ios_to_ts(value: $p_i.ByteRTCSubscribeStateChangeReason): SubscribeStateChangeReason; } export declare class t_PublishStateChangeReason { static ts_to_android(value: PublishStateChangeReason): $p_a.PublishStateChangeReason; static android_to_ts(value: $p_a.PublishStateChangeReason): PublishStateChangeReason; static ts_to_ios(value: PublishStateChangeReason): $p_i.ByteRTCPublishStateChangeReason; static ios_to_ts(value: $p_i.ByteRTCPublishStateChangeReason): PublishStateChangeReason; } export declare class t_PublishState { static ts_to_android(value: PublishState): $p_a.PublishState; static android_to_ts(value: $p_a.PublishState): PublishState; static ts_to_ios(value: PublishState): $p_i.ByteRTCPublishState; static ios_to_ts(value: $p_i.ByteRTCPublishState): PublishState; } export declare class t_RTCUserOfflineReason { static ts_to_android(value: RTCUserOfflineReason): $p_a.UserOfflineReason; static android_to_ts(value: $p_a.UserOfflineReason): RTCUserOfflineReason; static ts_to_ios(value: RTCUserOfflineReason): $p_i.ByteRTCUserOfflineReason; static ios_to_ts(value: $p_i.ByteRTCUserOfflineReason): RTCUserOfflineReason; } export declare class t_RTCStreamSinglePushEvent { static ts_to_android(value: RTCStreamSinglePushEvent): $p_a.ByteRTCStreamSinglePushEvent; static android_to_ts(value: $p_a.ByteRTCStreamSinglePushEvent): RTCStreamSinglePushEvent; static ts_to_ios(value: RTCStreamSinglePushEvent): $p_i.ByteRTCSingleStreamPushEvent; static ios_to_ts(value: $p_i.ByteRTCSingleStreamPushEvent): RTCStreamSinglePushEvent; } export declare class t_RTCStreamMixingErrorCode { static ts_to_android(value: RTCStreamMixingErrorCode): $p_a.ByteRTCTranscoderErrorCode; static android_to_ts(value: $p_a.ByteRTCTranscoderErrorCode): RTCStreamMixingErrorCode; static ts_to_ios(value: RTCStreamMixingErrorCode): $p_i.ByteRTCStreamMixingErrorCode; static ios_to_ts(value: $p_i.ByteRTCStreamMixingErrorCode): RTCStreamMixingErrorCode; } export declare class t_RTCScreenVideoEncoderPreference { static ts_to_android(value: RTCScreenVideoEncoderPreference): $p_a.EncoderPreference; static android_to_ts(value: $p_a.EncoderPreference): RTCScreenVideoEncoderPreference; static ts_to_ios(value: RTCScreenVideoEncoderPreference): $p_i.ByteRTCScreenVideoEncoderPreference; static ios_to_ts(value: $p_i.ByteRTCScreenVideoEncoderPreference): RTCScreenVideoEncoderPreference; } export declare class t_RTCVideoEncoderPreference { static ts_to_android(value: RTCVideoEncoderPreference): $p_a.EncoderPreference; static android_to_ts(value: $p_a.EncoderPreference): RTCVideoEncoderPreference; static ts_to_ios(value: RTCVideoEncoderPreference): $p_i.ByteRTCVideoEncoderPreference; static ios_to_ts(value: $p_i.ByteRTCVideoEncoderPreference): RTCVideoEncoderPreference; } export declare class t_MixedStreamAlternateImageFillMode { static ts_to_android(value: MixedStreamAlternateImageFillMode): $p_a.MixedStreamAlternateImageFillMode; static android_to_ts(value: $p_a.MixedStreamAlternateImageFillMode): MixedStreamAlternateImageFillMode; static ts_to_ios(value: MixedStreamAlternateImageFillMode): $p_i.ByteRTCMixedStreamAlternateImageFillMode; static ios_to_ts(value: $p_i.ByteRTCMixedStreamAlternateImageFillMode): MixedStreamAlternateImageFillMode; } export declare class t_MediaDeviceType { static ts_to_android(value: MediaDeviceType): $p_a.MediaDeviceType; static android_to_ts(value: $p_a.MediaDeviceType): MediaDeviceType; static ts_to_ios(value: MediaDeviceType): $p_i.ByteRTCMediaDeviceType; static ios_to_ts(value: $p_i.ByteRTCMediaDeviceType): MediaDeviceType; } export declare class t_ForwardStreamEvent { static ts_to_android(value: ForwardStreamEvent): $p_a.ForwardStreamEvent; static android_to_ts(value: $p_a.ForwardStreamEvent): ForwardStreamEvent; static ts_to_ios(value: ForwardStreamEvent): $p_i.ByteRTCForwardStreamEvent; static ios_to_ts(value: $p_i.ByteRTCForwardStreamEvent): ForwardStreamEvent; } export declare class t_SubscribeMediaType { static ts_to_android(value: SubscribeMediaType): $p_a.SubscribeMediaType; static android_to_ts(value: $p_a.SubscribeMediaType): SubscribeMediaType; static ts_to_ios(value: SubscribeMediaType): $p_i.ByteRTCSubscribeMediaType; static ios_to_ts(value: $p_i.ByteRTCSubscribeMediaType): SubscribeMediaType; } export declare class t_ConnectionState { static ts_to_android(value: ConnectionState): $p_a.ConnectionState; static android_to_ts(value: $p_a.ConnectionState): ConnectionState; static ts_to_ios(value: ConnectionState): $p_i.ByteRTCConnectionState; static ios_to_ts(value: $p_i.ByteRTCConnectionState): ConnectionState; } export declare class t_BandFrequency { static ts_to_android(value: BandFrequency): $p_a.VoiceEqualizationBandFrequency; static android_to_ts(value: $p_a.VoiceEqualizationBandFrequency): BandFrequency; static ts_to_ios(value: BandFrequency): $p_i.ByteRTCBandFrequency; static ios_to_ts(value: $p_i.ByteRTCBandFrequency): BandFrequency; } export declare class t_MixedStreamPushMode { static ts_to_android(value: MixedStreamPushMode): $p_a.MixedStreamPushMode; static android_to_ts(value: $p_a.MixedStreamPushMode): MixedStreamPushMode; static ts_to_ios(value: MixedStreamPushMode): $p_i.ByteRTCMixedStreamPushMode; static ios_to_ts(value: $p_i.ByteRTCMixedStreamPushMode): MixedStreamPushMode; } export declare class t_FirstFrameSendState { static ts_to_android(value: FirstFrameSendState): $p_a.FirstFrameSendState; static android_to_ts(value: $p_a.FirstFrameSendState): FirstFrameSendState; static ts_to_ios(value: FirstFrameSendState): $p_i.ByteRTCFirstFrameSendState; static ios_to_ts(value: $p_i.ByteRTCFirstFrameSendState): FirstFrameSendState; } export declare class t_MixedStreamMediaType { static ts_to_android(value: MixedStreamMediaType): $p_a.MixedStreamMediaType; static android_to_ts(value: $p_a.MixedStreamMediaType): MixedStreamMediaType; static ts_to_ios(value: MixedStreamMediaType): $p_i.ByteRTCMixedStreamMediaType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamMediaType): MixedStreamMediaType; } export declare class t_SubtitleMode { static ts_to_android(value: SubtitleMode): $p_a.SubtitleMode; static android_to_ts(value: $p_a.SubtitleMode): SubtitleMode; static ts_to_ios(value: SubtitleMode): $p_i.ByteRTCSubtitleMode; static ios_to_ts(value: $p_i.ByteRTCSubtitleMode): SubtitleMode; } export declare class t_MusicHotType { static ts_to_android(value: MusicHotType): $p_a.MusicHotType; static android_to_ts(value: $p_a.MusicHotType): MusicHotType; static ts_to_ios(value: MusicHotType): $p_i.ByteRTCMusicHotType; static ios_to_ts(value: $p_i.ByteRTCMusicHotType): MusicHotType; } export declare class t_MixedStreamAudioCodecType { static ts_to_android(value: MixedStreamAudioCodecType): $p_a.MixedStreamAudioCodecType; static android_to_ts(value: $p_a.MixedStreamAudioCodecType): MixedStreamAudioCodecType; static ts_to_ios(value: MixedStreamAudioCodecType): $p_i.ByteRTCMixedStreamAudioCodecType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamAudioCodecType): MixedStreamAudioCodecType; } export declare class t_LocalLogLevel { static ts_to_android(value: LocalLogLevel): $p_a.LocalLogLevel; static android_to_ts(value: $p_a.LocalLogLevel): LocalLogLevel; static ts_to_ios(value: LocalLogLevel): $p_i.ByteRTCLocalLogLevel; static ios_to_ts(value: $p_i.ByteRTCLocalLogLevel): LocalLogLevel; } export declare class t_ForwardStreamState { static ts_to_android(value: ForwardStreamState): $p_a.ForwardStreamState; static android_to_ts(value: $p_a.ForwardStreamState): ForwardStreamState; static ts_to_ios(value: ForwardStreamState): $p_i.ByteRTCForwardStreamState; static ios_to_ts(value: $p_i.ByteRTCForwardStreamState): ForwardStreamState; } export declare class t_VideoCodecType { static ts_to_android(value: VideoCodecType): $p_a.VideoConfig$VideoCodecType; static android_to_ts(value: $p_a.VideoConfig$VideoCodecType): VideoCodecType; static ts_to_ios(value: VideoCodecType): $p_i.ByteRTCVideoCodecType; static ios_to_ts(value: $p_i.ByteRTCVideoCodecType): VideoCodecType; } export declare class t_ASRAuthorizationType { static ts_to_android(value: ASRAuthorizationType): $p_a.ASRAuthorizationType; static android_to_ts(value: $p_a.ASRAuthorizationType): ASRAuthorizationType; static ts_to_ios(value: ASRAuthorizationType): $p_i.ByteRTCASRAuthorizationType; static ios_to_ts(value: $p_i.ByteRTCASRAuthorizationType): ASRAuthorizationType; } export declare class t_RTCASRErrorCode { static ts_to_android(value: RTCASRErrorCode): $p_a.RTCASRErrorCode; static android_to_ts(value: $p_a.RTCASRErrorCode): RTCASRErrorCode; static ts_to_ios(value: RTCASRErrorCode): $p_i.ByteRTCASRErrorCode; static ios_to_ts(value: $p_i.ByteRTCASRErrorCode): RTCASRErrorCode; } export declare class t_RecordingFileType { static ts_to_android(value: RecordingFileType): $p_a.RecordingFileType; static android_to_ts(value: $p_a.RecordingFileType): RecordingFileType; static ts_to_ios(value: RecordingFileType): $p_i.ByteRTCRecordingFileType; static ios_to_ts(value: $p_i.ByteRTCRecordingFileType): RecordingFileType; } export declare class t_ByteRTCStreamMixingEvent { static ts_to_android(value: ByteRTCStreamMixingEvent): $p_a.ByteRTCStreamMixingEvent; static android_to_ts(value: $p_a.ByteRTCStreamMixingEvent): ByteRTCStreamMixingEvent; static ts_to_ios(value: ByteRTCStreamMixingEvent): $p_i.ByteRTCStreamMixingEvent; static ios_to_ts(value: $p_i.ByteRTCStreamMixingEvent): ByteRTCStreamMixingEvent; } export declare class t_AudioAlignmentMode { static ts_to_android(value: AudioAlignmentMode): $p_a.AudioAlignmentMode; static android_to_ts(value: $p_a.AudioAlignmentMode): AudioAlignmentMode; static ts_to_ios(value: AudioAlignmentMode): $p_i.ByteRTCAudioAlignmentMode; static ios_to_ts(value: $p_i.ByteRTCAudioAlignmentMode): AudioAlignmentMode; } export declare class t_FallbackOrRecoverReason { static ts_to_android(value: FallbackOrRecoverReason): $p_a.FallbackOrRecoverReason; static android_to_ts(value: $p_a.FallbackOrRecoverReason): FallbackOrRecoverReason; static ts_to_ios(value: FallbackOrRecoverReason): $p_i.ByteRTCFallbackOrRecoverReason; static ios_to_ts(value: $p_i.ByteRTCFallbackOrRecoverReason): FallbackOrRecoverReason; } export declare class t_NetworkQuality { static ts_to_android(value: NetworkQuality): $p_a.NetworkQuality; static android_to_ts(value: $p_a.NetworkQuality): NetworkQuality; static ts_to_ios(value: NetworkQuality): $p_i.ByteRTCNetworkQuality; static ios_to_ts(value: $p_i.ByteRTCNetworkQuality): NetworkQuality; } export declare class t_MediaDeviceWarning { static ts_to_android(value: MediaDeviceWarning): $p_a.MediaDeviceWarning; static android_to_ts(value: $p_a.MediaDeviceWarning): MediaDeviceWarning; static ts_to_ios(value: MediaDeviceWarning): $p_i.ByteRTCMediaDeviceWarning; static ios_to_ts(value: $p_i.ByteRTCMediaDeviceWarning): MediaDeviceWarning; } export declare class t_RemoteVideoRenderPosition { static ts_to_android(value: RemoteVideoRenderPosition): $p_a.RemoteVideoRenderPosition; static android_to_ts(value: $p_a.RemoteVideoRenderPosition): RemoteVideoRenderPosition; static ts_to_ios(value: RemoteVideoRenderPosition): $p_i.ByteRTCRemoteVideoRenderPosition; static ios_to_ts(value: $p_i.ByteRTCRemoteVideoRenderPosition): RemoteVideoRenderPosition; } export declare class t_TranscoderLayoutRegionType { static ts_to_android(value: TranscoderLayoutRegionType): $p_a.TranscoderLayoutRegionType; static android_to_ts(value: $p_a.TranscoderLayoutRegionType): TranscoderLayoutRegionType; static ts_to_ios(value: TranscoderLayoutRegionType): $p_i.ByteRTCTranscoderLayoutRegionType; static ios_to_ts(value: $p_i.ByteRTCTranscoderLayoutRegionType): TranscoderLayoutRegionType; } export declare class t_LocalVideoStreamError { static ts_to_android(value: LocalVideoStreamError): $p_a.LocalVideoStreamError; static android_to_ts(value: $p_a.LocalVideoStreamError): LocalVideoStreamError; static ts_to_ios(value: LocalVideoStreamError): $p_i.ByteRTCLocalVideoStreamError; static ios_to_ts(value: $p_i.ByteRTCLocalVideoStreamError): LocalVideoStreamError; } export declare class t_UserVisibilityChangeError { static ts_to_android(value: UserVisibilityChangeError): $p_a.UserVisibilityChangeError; static android_to_ts(value: $p_a.UserVisibilityChangeError): UserVisibilityChangeError; static ts_to_ios(value: UserVisibilityChangeError): $p_i.ByteRTCUserVisibilityChangeError; static ios_to_ts(value: $p_i.ByteRTCUserVisibilityChangeError): UserVisibilityChangeError; } export declare class t_LocalAudioStreamError { static ts_to_android(value: LocalAudioStreamError): $p_a.LocalAudioStreamError; static android_to_ts(value: $p_a.LocalAudioStreamError): LocalAudioStreamError; static ts_to_ios(value: LocalAudioStreamError): $p_i.ByteRTCLocalAudioStreamError; static ios_to_ts(value: $p_i.ByteRTCLocalAudioStreamError): LocalAudioStreamError; } export declare class t_SyncInfoStreamType { static ts_to_android(value: SyncInfoStreamType): $p_a.SyncInfoStreamType; static android_to_ts(value: $p_a.SyncInfoStreamType): SyncInfoStreamType; static ts_to_ios(value: SyncInfoStreamType): $p_i.ByteRTCSyncInfoStreamType; static ios_to_ts(value: $p_i.ByteRTCSyncInfoStreamType): SyncInfoStreamType; } export declare class t_MixedStreamAudioProfile { static ts_to_android(value: MixedStreamAudioProfile): $p_a.MixedStreamAudioProfile; static android_to_ts(value: $p_a.MixedStreamAudioProfile): MixedStreamAudioProfile; static ts_to_ios(value: MixedStreamAudioProfile): $p_i.ByteRTCMixedStreamAudioProfile; static ios_to_ts(value: $p_i.ByteRTCMixedStreamAudioProfile): MixedStreamAudioProfile; } export declare class t_PerformanceAlarmMode { static ts_to_android(value: PerformanceAlarmMode): $p_a.PerformanceAlarmMode; static android_to_ts(value: $p_a.PerformanceAlarmMode): PerformanceAlarmMode; static ts_to_ios(value: PerformanceAlarmMode): $p_i.ByteRTCPerformanceAlarmMode; static ios_to_ts(value: $p_i.ByteRTCPerformanceAlarmMode): PerformanceAlarmMode; } export declare class t_RecordingErrorCode { static ts_to_android(value: RecordingErrorCode): $p_a.RecordingErrorCode; static android_to_ts(value: $p_a.RecordingErrorCode): RecordingErrorCode; static ts_to_ios(value: RecordingErrorCode): $p_i.ByteRTCRecordingErrorCode; static ios_to_ts(value: $p_i.ByteRTCRecordingErrorCode): RecordingErrorCode; } export declare class t_EchoTestResult { static ts_to_android(value: EchoTestResult): $p_a.EchoTestResult; static android_to_ts(value: $p_a.EchoTestResult): EchoTestResult; static ts_to_ios(value: EchoTestResult): $p_i.ByteRTCEchoTestResult; static ios_to_ts(value: $p_i.ByteRTCEchoTestResult): EchoTestResult; } export declare class t_SubscribeState { static ts_to_android(value: SubscribeState): $p_a.SubscribeState; static android_to_ts(value: $p_a.SubscribeState): SubscribeState; static ts_to_ios(value: SubscribeState): $p_i.ByteRTCSubscribeState; static ios_to_ts(value: $p_i.ByteRTCSubscribeState): SubscribeState; } export declare class t_VirtualBackgroundSourceType { static ts_to_android(value: VirtualBackgroundSourceType): $p_a.VirtualBackgroundSourceType; static android_to_ts(value: $p_a.VirtualBackgroundSourceType): VirtualBackgroundSourceType; static ts_to_ios(value: VirtualBackgroundSourceType): $p_i.ByteRTCVirtualBackgroundSourceType; static ios_to_ts(value: $p_i.ByteRTCVirtualBackgroundSourceType): VirtualBackgroundSourceType; } export declare class t_RemoteAudioStateChangeReason { static ts_to_android(value: RemoteAudioStateChangeReason): $p_a.RemoteAudioStateChangeReason; static android_to_ts(value: $p_a.RemoteAudioStateChangeReason): RemoteAudioStateChangeReason; static ts_to_ios(value: RemoteAudioStateChangeReason): $p_i.ByteRTCRemoteAudioStateChangeReason; static ios_to_ts(value: $p_i.ByteRTCRemoteAudioStateChangeReason): RemoteAudioStateChangeReason; } export declare class t_PlayerError { static ts_to_android(value: PlayerError): $p_a.PlayerError; static android_to_ts(value: $p_a.PlayerError): PlayerError; static ts_to_ios(value: PlayerError): $p_i.ByteRTCPlayerError; static ios_to_ts(value: $p_i.ByteRTCPlayerError): PlayerError; } export declare class t_ReturnStatus { static ts_to_android(value: ReturnStatus): $p_a.ReturnStatus; static android_to_ts(value: $p_a.ReturnStatus): ReturnStatus; static ts_to_ios(value: ReturnStatus): $p_i.ByteRTCReturnStatus; static ios_to_ts(value: $p_i.ByteRTCReturnStatus): ReturnStatus; } export declare class t_VideoPictureType { static ts_to_android(value: VideoPictureType): $p_a.VideoPictureType; static android_to_ts(value: $p_a.VideoPictureType): VideoPictureType; static ts_to_ios(value: VideoPictureType): $p_i.ByteRTCVideoPictureType; static ios_to_ts(value: $p_i.ByteRTCVideoPictureType): VideoPictureType; } export declare class t_ColorSpace { static ts_to_android(value: ColorSpace): $p_a.ColorSpace; static android_to_ts(value: $p_a.ColorSpace): ColorSpace; static ts_to_ios(value: ColorSpace): $p_i.ByteRTCColorSpace; static ios_to_ts(value: $p_i.ByteRTCColorSpace): ColorSpace; } export declare class t_RemoteVideoState { static ts_to_android(value: RemoteVideoState): $p_a.RemoteVideoState; static android_to_ts(value: $p_a.RemoteVideoState): RemoteVideoState; static ts_to_ios(value: RemoteVideoState): $p_i.ByteRTCRemoteVideoState; static ios_to_ts(value: $p_i.ByteRTCRemoteVideoState): RemoteVideoState; } export declare class t_HardwareEchoDetectionResult { static ts_to_android(value: HardwareEchoDetectionResult): $p_a.HardwareEchoDetectionResult; static android_to_ts(value: $p_a.HardwareEchoDetectionResult): HardwareEchoDetectionResult; static ts_to_ios(value: HardwareEchoDetectionResult): $p_i.ByteRTCHardwareEchoDetectionResult; static ios_to_ts(value: $p_i.ByteRTCHardwareEchoDetectionResult): HardwareEchoDetectionResult; } export declare class t_RecordingState { static ts_to_android(value: RecordingState): $p_a.RecordingState; static android_to_ts(value: $p_a.RecordingState): RecordingState; static ts_to_ios(value: RecordingState): $p_i.ByteRTCRecordingState; static ios_to_ts(value: $p_i.ByteRTCRecordingState): RecordingState; } export declare class t_NetworkDetectionLinkType { static ts_to_android(value: NetworkDetectionLinkType): $p_a.NetworkDetectionLinkType; static android_to_ts(value: $p_a.NetworkDetectionLinkType): NetworkDetectionLinkType; static ts_to_ios(value: NetworkDetectionLinkType): $p_i.ByteRTCNetworkDetectionLinkType; static ios_to_ts(value: $p_i.ByteRTCNetworkDetectionLinkType): NetworkDetectionLinkType; } export declare class t_VideoDenoiseModeChangedReason { static ts_to_android(value: VideoDenoiseModeChangedReason): $p_a.VideoDenoiseModeChangedReason; static android_to_ts(value: $p_a.VideoDenoiseModeChangedReason): VideoDenoiseModeChangedReason; static ts_to_ios(value: VideoDenoiseModeChangedReason): $p_i.ByteRTCVideoDenoiseModeChangedReason; static ios_to_ts(value: $p_i.ByteRTCVideoDenoiseModeChangedReason): VideoDenoiseModeChangedReason; } export declare class t_AudioDeviceType { static ts_to_android(value: AudioDeviceType): $p_a.AudioDeviceType; static android_to_ts(value: $p_a.AudioDeviceType): AudioDeviceType; static ts_to_ios(value: AudioDeviceType): $p_i.ByteRTCAudioDeviceType; static ios_to_ts(value: $p_i.ByteRTCAudioDeviceType): AudioDeviceType; } export declare class t_SetRoomExtraInfoResult { static ts_to_android(value: SetRoomExtraInfoResult): $p_a.SetRoomExtraInfoResult; static android_to_ts(value: $p_a.SetRoomExtraInfoResult): SetRoomExtraInfoResult; static ts_to_ios(value: SetRoomExtraInfoResult): $p_i.ByteRTCSetRoomExtraInfoResult; static ios_to_ts(value: $p_i.ByteRTCSetRoomExtraInfoResult): SetRoomExtraInfoResult; } export declare class t_LocalProxyState { static ts_to_android(value: LocalProxyState): $p_a.LocalProxyState; static android_to_ts(value: $p_a.LocalProxyState): LocalProxyState; static ts_to_ios(value: LocalProxyState): $p_i.ByteRTCLocalProxyState; static ios_to_ts(value: $p_i.ByteRTCLocalProxyState): LocalProxyState; } export declare class t_AVSyncState { static ts_to_android(value: AVSyncState): $p_a.AVSyncState; static android_to_ts(value: $p_a.AVSyncState): AVSyncState; static ts_to_ios(value: AVSyncState): $p_i.ByteRTCAVSyncState; static ios_to_ts(value: $p_i.ByteRTCAVSyncState): AVSyncState; } export declare class t_ChannelProfile { static ts_to_android(value: ChannelProfile): $p_a.ChannelProfile; static android_to_ts(value: $p_a.ChannelProfile): ChannelProfile; static ts_to_ios(value: ChannelProfile): $p_i.ByteRTCRoomProfile; static ios_to_ts(value: $p_i.ByteRTCRoomProfile): ChannelProfile; } export declare class t_MixedStreamClientMixVideoFormat { static ts_to_android(value: MixedStreamClientMixVideoFormat): $p_a.MixedStreamClientMixVideoFormat; static android_to_ts(value: $p_a.MixedStreamClientMixVideoFormat): MixedStreamClientMixVideoFormat; static ts_to_ios(value: MixedStreamClientMixVideoFormat): $p_i.ByteRTCMixedStreamClientMixVideoFormat; static ios_to_ts(value: $p_i.ByteRTCMixedStreamClientMixVideoFormat): MixedStreamClientMixVideoFormat; } export declare class t_PlayerState { static ts_to_android(value: PlayerState): $p_a.PlayerState; static android_to_ts(value: $p_a.PlayerState): PlayerState; static ts_to_ios(value: PlayerState): $p_i.ByteRTCPlayerState; static ios_to_ts(value: $p_i.ByteRTCPlayerState): PlayerState; } export declare class t_MixedStreamVideoType { static ts_to_android(value: MixedStreamVideoType): $p_a.MixedStreamVideoType; static android_to_ts(value: $p_a.MixedStreamVideoType): MixedStreamVideoType; static ts_to_ios(value: MixedStreamVideoType): $p_i.ByteRTCMixedStreamVideoType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamVideoType): MixedStreamVideoType; } export declare class t_AudioPropertiesMode { static ts_to_android(value: AudioPropertiesMode): $p_a.AudioPropertiesMode; static android_to_ts(value: $p_a.AudioPropertiesMode): AudioPropertiesMode; static ts_to_ios(value: AudioPropertiesMode): $p_i.ByteRTCAudioPropertiesMode; static ios_to_ts(value: $p_i.ByteRTCAudioPropertiesMode): AudioPropertiesMode; } export declare class t_VideoContentType { static ts_to_android(value: VideoContentType): $p_a.VideoContentType; static android_to_ts(value: $p_a.VideoContentType): VideoContentType; static ts_to_ios(value: VideoContentType): $p_i.ByteRTCVideoContentType; static ios_to_ts(value: $p_i.ByteRTCVideoContentType): VideoContentType; } export declare class t_AudioSampleRate { static ts_to_android(value: AudioSampleRate): $p_a.AudioSampleRate; static android_to_ts(value: $p_a.AudioSampleRate): AudioSampleRate; static ts_to_ios(value: AudioSampleRate): $p_i.ByteRTCAudioSampleRate; static ios_to_ts(value: $p_i.ByteRTCAudioSampleRate): AudioSampleRate; } export declare class t_LyricStatus { static ts_to_android(value: LyricStatus): $p_a.LyricStatus; static android_to_ts(value: $p_a.LyricStatus): LyricStatus; static ts_to_ios(value: LyricStatus): $p_i.ByteRTCLyricStatus; static ios_to_ts(value: $p_i.ByteRTCLyricStatus): LyricStatus; } export declare class t_MixedStreamVideoCodecType { static ts_to_android(value: MixedStreamVideoCodecType): $p_a.MixedStreamVideoCodecType; static android_to_ts(value: $p_a.MixedStreamVideoCodecType): MixedStreamVideoCodecType; static ts_to_ios(value: MixedStreamVideoCodecType): $p_i.ByteRTCMixedStreamVideoCodecType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamVideoCodecType): MixedStreamVideoCodecType; } export declare class t_MuteState { static ts_to_android(value: MuteState): $p_a.MuteState; static android_to_ts(value: $p_a.MuteState): MuteState; static ts_to_ios(value: MuteState): $p_i.ByteRTCMuteState; static ios_to_ts(value: $p_i.ByteRTCMuteState): MuteState; } export declare class t_ByteRTCStreamMixingType { static ts_to_android(value: ByteRTCStreamMixingType): $p_a.ByteRTCStreamMixingType; static android_to_ts(value: $p_a.ByteRTCStreamMixingType): ByteRTCStreamMixingType; static ts_to_ios(value: ByteRTCStreamMixingType): $p_i.ByteRTCStreamMixingType; static ios_to_ts(value: $p_i.ByteRTCStreamMixingType): ByteRTCStreamMixingType; } export declare class t_MediaDeviceError { static ts_to_android(value: MediaDeviceError): $p_a.MediaDeviceError; static android_to_ts(value: $p_a.MediaDeviceError): MediaDeviceError; static ts_to_ios(value: MediaDeviceError): $p_i.ByteRTCMediaDeviceError; static ios_to_ts(value: $p_i.ByteRTCMediaDeviceError): MediaDeviceError; } export declare class t_AudioFrameSource { static ts_to_android(value: AudioFrameSource): $p_a.AudioFrameSource; static android_to_ts(value: $p_a.AudioFrameSource): AudioFrameSource; static ts_to_ios(value: AudioFrameSource): $p_i.ByteRTCAudioFrameSource; static ios_to_ts(value: $p_i.ByteRTCAudioFrameSource): AudioFrameSource; } export declare class t_StreamRemoveReason { static ts_to_android(value: StreamRemoveReason): $p_a.StreamRemoveReason; static android_to_ts(value: $p_a.StreamRemoveReason): StreamRemoveReason; static ts_to_ios(value: StreamRemoveReason): $p_i.ByteRTCStreamRemoveReason; static ios_to_ts(value: $p_i.ByteRTCStreamRemoveReason): StreamRemoveReason; } export declare class t_ForwardStreamError { static ts_to_android(value: ForwardStreamError): $p_a.ForwardStreamError; static android_to_ts(value: $p_a.ForwardStreamError): ForwardStreamError; static ts_to_ios(value: ForwardStreamError): $p_i.ByteRTCForwardStreamError; static ios_to_ts(value: $p_i.ByteRTCForwardStreamError): ForwardStreamError; } export declare class t_NetworkDetectionStopReason { static ts_to_android(value: NetworkDetectionStopReason): $p_a.NetworkDetectionStopReason; static android_to_ts(value: $p_a.NetworkDetectionStopReason): NetworkDetectionStopReason; static ts_to_ios(value: NetworkDetectionStopReason): $p_i.ByteRTCNetworkDetectionStopReason; static ios_to_ts(value: $p_i.ByteRTCNetworkDetectionStopReason): NetworkDetectionStopReason; } export declare class t_VideoDenoiseMode { static ts_to_android(value: VideoDenoiseMode): $p_a.VideoDenoiseMode; static android_to_ts(value: $p_a.VideoDenoiseMode): VideoDenoiseMode; static ts_to_ios(value: VideoDenoiseMode): $p_i.ByteRTCVideoDenoiseMode; static ios_to_ts(value: $p_i.ByteRTCVideoDenoiseMode): VideoDenoiseMode; } export declare class t_AudioRecordingState { static ts_to_android(value: AudioRecordingState): $p_a.AudioRecordingState; static android_to_ts(value: $p_a.AudioRecordingState): AudioRecordingState; static ts_to_ios(value: AudioRecordingState): $p_i.ByteRTCAudioRecordingState; static ios_to_ts(value: $p_i.ByteRTCAudioRecordingState): AudioRecordingState; } export declare class t_MulDimSingScoringMode { static ts_to_android(value: MulDimSingScoringMode): $p_a.MulDimSingScoringMode; static android_to_ts(value: $p_a.MulDimSingScoringMode): MulDimSingScoringMode; static ts_to_ios(value: MulDimSingScoringMode): $p_i.ByteRTCMulDimSingScoringMode; static ios_to_ts(value: $p_i.ByteRTCMulDimSingScoringMode): MulDimSingScoringMode; } export declare class t_AudioPlaybackDevice { static ts_to_android(value: AudioPlaybackDevice): $p_a.AudioPlaybackDevice; static android_to_ts(value: $p_a.AudioPlaybackDevice): AudioPlaybackDevice; static ts_to_ios(value: AudioPlaybackDevice): $p_i.ByteRTCAudioPlaybackDevice; static ios_to_ts(value: $p_i.ByteRTCAudioPlaybackDevice): AudioPlaybackDevice; } export declare class t_VideoPixelFormat { static ts_to_android(value: VideoPixelFormat): $p_a.VideoPixelFormat; static android_to_ts(value: $p_a.VideoPixelFormat): VideoPixelFormat; static ts_to_ios(value: VideoPixelFormat): $p_i.ByteRTCVideoPixelFormat; static ios_to_ts(value: $p_i.ByteRTCVideoPixelFormat): VideoPixelFormat; } export declare class t_AudioChannel { static ts_to_android(value: AudioChannel): $p_a.AudioChannel; static android_to_ts(value: $p_a.AudioChannel): AudioChannel; static ts_to_ios(value: AudioChannel): $p_i.ByteRTCAudioChannel; static ios_to_ts(value: $p_i.ByteRTCAudioChannel): AudioChannel; } export declare class t_DownloadFileType { static ts_to_android(value: DownloadFileType): $p_a.DownloadFileType; static android_to_ts(value: $p_a.DownloadFileType): DownloadFileType; static ts_to_ios(value: DownloadFileType): $p_i.ByteRTCDownloadFileType; static ios_to_ts(value: $p_i.ByteRTCDownloadFileType): DownloadFileType; } export declare class t_ProblemFeedbackOption { static ts_to_android(value: ProblemFeedbackOption): $p_a.ProblemFeedbackOption; static android_to_ts(value: $p_a.ProblemFeedbackOption): ProblemFeedbackOption; static ts_to_ios(value: ProblemFeedbackOption): $p_i.ByteRTCProblemFeedbackOption; static ios_to_ts(value: $p_i.ByteRTCProblemFeedbackOption): ProblemFeedbackOption; } export declare class t_UserOnlineStatus { static ts_to_android(value: UserOnlineStatus): $p_a.UserOnlineStatus; static android_to_ts(value: $p_a.UserOnlineStatus): UserOnlineStatus; static ts_to_ios(value: UserOnlineStatus): $p_i.ByteRTCUserOnlineStatus; static ios_to_ts(value: $p_i.ByteRTCUserOnlineStatus): UserOnlineStatus; } export declare class t_VideoFrameType { static ts_to_android(value: VideoFrameType): $p_a.VideoFrameType; static android_to_ts(value: $p_a.VideoFrameType): VideoFrameType; static ts_to_ios(value: VideoFrameType): $p_i.ByteRTCVideoFrameType; static ios_to_ts(value: $p_i.ByteRTCVideoFrameType): VideoFrameType; } export declare class t_TranscoderContentControlType { static ts_to_android(value: TranscoderContentControlType): $p_a.TranscoderContentControlType; static android_to_ts(value: $p_a.TranscoderContentControlType): TranscoderContentControlType; static ts_to_ios(value: TranscoderContentControlType): $p_i.ByteRTCTranscoderContentControlType; static ios_to_ts(value: $p_i.ByteRTCTranscoderContentControlType): TranscoderContentControlType; } export declare class t_AudioMixingState { static ts_to_android(value: AudioMixingState): $p_a.AudioMixingState; static android_to_ts(value: $p_a.AudioMixingState): AudioMixingState; static ts_to_ios(value: AudioMixingState): $p_i.ByteRTCAudioMixingState; static ios_to_ts(value: $p_i.ByteRTCAudioMixingState): AudioMixingState; } export declare class t_MixedStreamType { static ts_to_android(value: MixedStreamType): $p_a.MixedStreamType; static android_to_ts(value: $p_a.MixedStreamType): MixedStreamType; static ts_to_ios(value: MixedStreamType): $p_i.ByteRTCMixedStreamType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamType): MixedStreamType; } export declare class t_AudioQuality { static ts_to_android(value: AudioQuality): $p_a.AudioQuality; static android_to_ts(value: $p_a.AudioQuality): AudioQuality; static ts_to_ios(value: AudioQuality): $p_i.ByteRTCAudioQuality; static ios_to_ts(value: $p_i.ByteRTCAudioQuality): AudioQuality; } export declare class t_MediaPlayerCustomSourceSeekWhence { static ts_to_android(value: MediaPlayerCustomSourceSeekWhence): $p_a.MediaPlayerCustomSourceSeekWhence; static android_to_ts(value: $p_a.MediaPlayerCustomSourceSeekWhence): MediaPlayerCustomSourceSeekWhence; static ts_to_ios(value: MediaPlayerCustomSourceSeekWhence): $p_i.ByteRTCMediaPlayerCustomSourceSeekWhence; static ios_to_ts(value: $p_i.ByteRTCMediaPlayerCustomSourceSeekWhence): MediaPlayerCustomSourceSeekWhence; } export declare class t_MixedStreamLayoutRegionType { static ts_to_android(value: MixedStreamLayoutRegionType): $p_a.MixedStreamLayoutRegionType; static android_to_ts(value: $p_a.MixedStreamLayoutRegionType): MixedStreamLayoutRegionType; static ts_to_ios(value: MixedStreamLayoutRegionType): $p_i.ByteRTCMixedStreamLayoutRegionType; static ios_to_ts(value: $p_i.ByteRTCMixedStreamLayoutRegionType): MixedStreamLayoutRegionType; } export declare class t_LogoutReason { static ts_to_android(value: LogoutReason): $p_a.LogoutReason; static android_to_ts(value: $p_a.LogoutReason): LogoutReason; static ts_to_ios(value: LogoutReason): $p_i.ByteRTCLogoutReason; static ios_to_ts(value: $p_i.ByteRTCLogoutReason): LogoutReason; } export declare class t_VideoRenderMirrorType { static ts_to_android(value: VideoRenderMirrorType): $p_a.VideoRenderMirrorType; static android_to_ts(value: $p_a.VideoRenderMirrorType): VideoRenderMirrorType; static ts_to_ios(value: VideoRenderMirrorType): $p_i.ByteRTCVideoRenderMirrorType; static ios_to_ts(value: $p_i.ByteRTCVideoRenderMirrorType): VideoRenderMirrorType; } export declare class t_MixedStreamSEIContentMode { static ts_to_android(value: MixedStreamSEIContentMode): $p_a.MixedStreamSEIContentMode; static android_to_ts(value: $p_a.MixedStreamSEIContentMode): MixedStreamSEIContentMode; static ts_to_ios(value: MixedStreamSEIContentMode): $p_i.ByteRTCMixedStreamSEIContentMode; static ios_to_ts(value: $p_i.ByteRTCMixedStreamSEIContentMode): MixedStreamSEIContentMode; } export declare class t_AudioMixingError { static ts_to_android(value: AudioMixingError): $p_a.AudioMixingError; static android_to_ts(value: $p_a.AudioMixingError): AudioMixingError; static ts_to_ios(value: AudioMixingError): $p_i.ByteRTCAudioMixingError; static ios_to_ts(value: $p_i.ByteRTCAudioMixingError): AudioMixingError; } export declare class t_AACProfile { static ts_to_android(value: AACProfile): $p_a.AACProfile; static android_to_ts(value: $p_a.AACProfile): AACProfile; static ts_to_ios(value: AACProfile): $p_i.ByteRTCAACProfile; static ios_to_ts(value: $p_i.ByteRTCAACProfile): AACProfile; } export declare class t_DataMessageSourceType { static ts_to_android(value: DataMessageSourceType): $p_a.DataMessageSourceType; static android_to_ts(value: $p_a.DataMessageSourceType): DataMessageSourceType; static ts_to_ios(value: DataMessageSourceType): $p_i.ByteRTCDataMessageSourceType; static ios_to_ts(value: $p_i.ByteRTCDataMessageSourceType): DataMessageSourceType; } export declare class t_MediaPlayerCustomSourceMode { static ts_to_android(value: MediaPlayerCustomSourceMode): $p_a.MediaPlayerCustomSourceMode; static android_to_ts(value: $p_a.MediaPlayerCustomSourceMode): MediaPlayerCustomSourceMode; static ts_to_ios(value: MediaPlayerCustomSourceMode): $p_i.ByteRTCMediaPlayerCustomSourceMode; static ios_to_ts(value: $p_i.ByteRTCMediaPlayerCustomSourceMode): MediaPlayerCustomSourceMode; } export declare class t_KTVPlayerErrorCode { static ts_to_android(value: KTVPlayerErrorCode): $p_a.KTVPlayerErrorCode; static android_to_ts(value: $p_a.KTVPlayerErrorCode): KTVPlayerErrorCode; static ts_to_ios(value: KTVPlayerErrorCode): $p_i.ByteRTCKTVPlayerErrorCode; static ios_to_ts(value: $p_i.ByteRTCKTVPlayerErrorCode): KTVPlayerErrorCode; } export declare class t_RemoteVideoStateChangeReason { static ts_to_android(value: RemoteVideoStateChangeReason): $p_a.RemoteVideoStateChangeReason; static android_to_ts(value: $p_a.RemoteVideoStateChangeReason): RemoteVideoStateChangeReason; static ts_to_ios(value: RemoteVideoStateChangeReason): $p_i.ByteRTCRemoteVideoStateChangeReason; static ios_to_ts(value: $p_i.ByteRTCRemoteVideoStateChangeReason): RemoteVideoStateChangeReason; } export declare class t_PerformanceAlarmReason { static ts_to_android(value: PerformanceAlarmReason): $p_a.PerformanceAlarmReason; static android_to_ts(value: $p_a.PerformanceAlarmReason): PerformanceAlarmReason; static ts_to_ios(value: PerformanceAlarmReason): $p_i.ByteRTCPerformanceAlarmReason; static ios_to_ts(value: $p_i.ByteRTCPerformanceAlarmReason): PerformanceAlarmReason; } export declare class t_VideoDeviceType { static ts_to_android(value: VideoDeviceType): $p_a.VideoDeviceType; static android_to_ts(value: $p_a.VideoDeviceType): VideoDeviceType; static ts_to_ios(value: VideoDeviceType): $p_i.ByteRTCVideoDeviceType; static ios_to_ts(value: $p_i.ByteRTCVideoDeviceType): VideoDeviceType; } export declare class t_MediaPlayerCustomSourceStreamType { static ts_to_android(value: MediaPlayerCustomSourceStreamType): $p_a.MediaPlayerCustomSourceStreamType; static android_to_ts(value: $p_a.MediaPlayerCustomSourceStreamType): MediaPlayerCustomSourceStreamType; static ts_to_ios(value: MediaPlayerCustomSourceStreamType): $p_i.ByteRTCMediaPlayerCustomSourceStreamType; static ios_to_ts(value: $p_i.ByteRTCMediaPlayerCustomSourceStreamType): MediaPlayerCustomSourceStreamType; } export declare class t_MusicFilterType { static ts_to_android(value: MusicFilterType): $p_a.MusicFilterType; static android_to_ts(value: $p_a.MusicFilterType): MusicFilterType; static ts_to_ios(value: MusicFilterType): $p_i.ByteRTCMusicFilterType; static ios_to_ts(value: $p_i.ByteRTCMusicFilterType): MusicFilterType; } export declare class t_LocalVideoRenderPosition { static ts_to_android(value: LocalVideoRenderPosition): $p_a.LocalVideoRenderPosition; static android_to_ts(value: $p_a.LocalVideoRenderPosition): LocalVideoRenderPosition; static ts_to_ios(value: LocalVideoRenderPosition): $p_i.ByteRTCLocalVideoRenderPosition; static ios_to_ts(value: $p_i.ByteRTCLocalVideoRenderPosition): LocalVideoRenderPosition; } export declare class t_AudioReportMode { static ts_to_android(value: AudioReportMode): $p_a.AudioReportMode; static android_to_ts(value: $p_a.AudioReportMode): AudioReportMode; static ts_to_ios(value: AudioReportMode): $p_i.ByteRTCAudioReportMode; static ios_to_ts(value: $p_i.ByteRTCAudioReportMode): AudioReportMode; } export declare class t_FirstFramePlayState { static ts_to_android(value: FirstFramePlayState): $p_a.FirstFramePlayState; static android_to_ts(value: $p_a.FirstFramePlayState): FirstFramePlayState; static ts_to_ios(value: FirstFramePlayState): $p_i.ByteRTCFirstFramePlayState; static ios_to_ts(value: $p_i.ByteRTCFirstFramePlayState): FirstFramePlayState; } export declare class t_VideoApplyRotation { static ts_to_android(value: VideoApplyRotation): $p_a.VideoApplyRotation; static android_to_ts(value: $p_a.VideoApplyRotation): VideoApplyRotation; static ts_to_ios(value: VideoApplyRotation): $p_i.ByteRTCVideoApplyRotation; static ios_to_ts(value: $p_i.ByteRTCVideoApplyRotation): VideoApplyRotation; } export declare class t_VideoSuperResolutionModeChangedReason { static ts_to_android(value: VideoSuperResolutionModeChangedReason): $p_a.VideoSuperResolutionModeChangedReason; static android_to_ts(value: $p_a.VideoSuperResolutionModeChangedReason): VideoSuperResolutionModeChangedReason; static ts_to_ios(value: VideoSuperResolutionModeChangedReason): $p_i.ByteRTCVideoSuperResolutionModeChangedReason; static ios_to_ts(value: $p_i.ByteRTCVideoSuperResolutionModeChangedReason): VideoSuperResolutionModeChangedReason; } export declare class t_LocalAudioStreamState { static ts_to_android(value: LocalAudioStreamState): $p_a.LocalAudioStreamState; static android_to_ts(value: $p_a.LocalAudioStreamState): LocalAudioStreamState; static ts_to_ios(value: LocalAudioStreamState): $p_i.ByteRTCLocalAudioStreamState; static ios_to_ts(value: $p_i.ByteRTCLocalAudioStreamState): LocalAudioStreamState; } export declare class t_LocalVideoStreamState { static ts_to_android(value: LocalVideoStreamState): $p_a.LocalVideoStreamState; static android_to_ts(value: $p_a.LocalVideoStreamState): LocalVideoStreamState; static ts_to_ios(value: LocalVideoStreamState): $p_i.ByteRTCLocalVideoStreamState; static ios_to_ts(value: $p_i.ByteRTCLocalVideoStreamState): LocalVideoStreamState; } export declare class t_RemoteAudioState { static ts_to_android(value: RemoteAudioState): $p_a.RemoteAudioState; static android_to_ts(value: $p_a.RemoteAudioState): RemoteAudioState; static ts_to_ios(value: RemoteAudioState): $p_i.ByteRTCRemoteAudioState; static ios_to_ts(value: $p_i.ByteRTCRemoteAudioState): RemoteAudioState; } export declare class t_LocalProxyError { static ts_to_android(value: LocalProxyError): $p_a.LocalProxyError; static android_to_ts(value: $p_a.LocalProxyError): LocalProxyError; static ts_to_ios(value: LocalProxyError): $p_i.ByteRTCLocalProxyError; static ios_to_ts(value: $p_i.ByteRTCLocalProxyError): LocalProxyError; } export declare class t_MediaDeviceState { static ts_to_android(value: MediaDeviceState): $p_a.MediaDeviceState; static android_to_ts(value: $p_a.MediaDeviceState): MediaDeviceState; static ts_to_ios(value: MediaDeviceState): $p_i.ByteRTCMediaDeviceState; static ios_to_ts(value: $p_i.ByteRTCMediaDeviceState): MediaDeviceState; } export declare class t_LocalProxyType { static ts_to_android(value: LocalProxyType): $p_a.LocalProxyType; static android_to_ts(value: $p_a.LocalProxyType): LocalProxyType; static ts_to_ios(value: LocalProxyType): $p_i.ByteRTCLocalProxyType; static ios_to_ts(value: $p_i.ByteRTCLocalProxyType): LocalProxyType; } export declare class t_PlayState { static ts_to_android(value: PlayState): $p_a.PlayState; static android_to_ts(value: $p_a.PlayState): PlayState; static ts_to_ios(value: PlayState): $p_i.ByteRTCPlayState; static ios_to_ts(value: $p_i.ByteRTCPlayState): PlayState; } export declare class t_SubtitleState { static ts_to_android(value: SubtitleState): $p_a.SubtitleState; static android_to_ts(value: $p_a.SubtitleState): SubtitleState; static ts_to_ios(value: SubtitleState): $p_i.ByteRTCSubtitleState; static ios_to_ts(value: $p_i.ByteRTCSubtitleState): SubtitleState; } export declare class t_MixedStreamRenderMode { static ts_to_android(value: MixedStreamRenderMode): $p_a.MixedStreamRenderMode; static android_to_ts(value: $p_a.MixedStreamRenderMode): MixedStreamRenderMode; static ts_to_ios(value: MixedStreamRenderMode): $p_i.ByteRTCMixedStreamRenderMode; static ios_to_ts(value: $p_i.ByteRTCMixedStreamRenderMode): MixedStreamRenderMode; } export declare class t_SEIStreamUpdateEventType { static ts_to_android(value: SEIStreamUpdateEventType): $p_a.SEIStreamUpdateEvent; static android_to_ts(value: $p_a.SEIStreamUpdateEvent): SEIStreamUpdateEventType; static ts_to_ios(value: SEIStreamUpdateEventType): $p_i.ByteRTCSEIStreamEventType; static ios_to_ts(value: $p_i.ByteRTCSEIStreamEventType): SEIStreamUpdateEventType; }