// Copyright © 2022 BytePlusRTC All rights reserved. // SPDX-License-Identifier: MIT import { NSString, int, int64_t, float, id, BOOL, NSArray, NSUInteger, NSInteger, NSData, NSDictionary, bool, CGPoint, CVPixelBufferRef, CMTime } from './types'; import { ByteRTCMediaPlayerConfig, ByteRTCMediaPlayerCustomSource, ByteRTCAudioMixingType, ByteRTCAudioMixingDualMonoMode, ByteRTCAudioFrame, ByteRTCAudioEffectPlayerConfig, ByteRTCPositionInfo, ByteRTCPosition, ByteRTCHumanOrientation, ByteRTCSingScoringConfig, ByteRTCStandardPitchInfo, ByteRTCMusicFilterType, ByteRTCMusicHotType, ByteRTCDownloadLyricType, ByteRTCVirtualBackgroundSource, ByteRTCUserInfo, ByteRTCRoomConfig, ByteRTCRemoteVideoConfig, ByteRTCMediaStreamType, ByteRTCPauseResumControlMediaType, ByteRTCMessageConfig, ByteRTCForwardStreamConfiguration, ByteRTCAudioSelectionPriority, ByteRTCSubtitleConfig, ByteRTCStreamIndex, ByteRTCSubscribeMediaType, ByteRTCSubscribeVideoConfig, ByteRTCLogConfig, ByteRTCAudioSourceType, ByteRTCAudioRenderType, ByteRTCAudioScenarioType, ByteRTCAudioProfileType, ByteRTCAnsMode, ByteRTCVoiceChangerType, ByteRTCVoiceReverbType, ByteRTCVoiceEqualizationConfig, ByteRTCVoiceReverbConfig, ByteRTCAudioPropertiesConfig, ByteRTCEarMonitorMode, ByteRTCBluetoothMode, ByteRTCVideoSinkDelegate, ByteRTCLocalVideoSinkConfig, ByteRTCRemoteStreamKey, ByteRTCRemoteVideoSinkConfig, ByteRTCVideoSuperResolutionMode, ByteRTCVideoRotation, ByteRTCVideoEncoderConfig, ByteRTCScreenVideoEncoderConfig, ByteRTCAlphaLayout, ByteRTCVideoCaptureConfig, ByteRTCVideoCanvas, ByteRTCRenderMode, ByteRTCRemoteVideoRenderConfig, ByteRTCMirrorType, ByteRTCRemoteMirrorType, ByteRTCVideoRotationMode, ByteRTCVideoOrientation, ByteRTCCameraID, ByteRTCEffectBeautyMode, ByteRTCTorchState, ByteRTCSEICountPerFrame, ByteRTCZoomConfigType, ByteRTCZoomDirectionType, ByteRTCVideoPreprocessorConfig, ByteRTCAudioRoute, ByteRTCMixedStreamConfig, ByteRTCPushSingleStreamParam, ByteRTCPublicStreaming, ByteRTCVideoSinkPixelFormat, ByteRTCVideoFrame, ByteRTCAudioFrameCallbackMethod, ByteRTCAudioFormat, ByteRTCAudioFrameMethod, ByteRTCProblemFeedbackOption, ByteRTCProblemFeedbackInfo, ByteRTCPublishFallbackOption, ByteRTCSubscribeFallbackOption, ByteRTCRemoteUserPriority, ByteRTCEncryptType, ByteRTCScreenMediaType, ByteRTCASRConfig, ByteRTCRecordingConfig, ByteRTCRecordingType, ByteRTCAudioRecordingConfig, ByteRTCVideoSourceType, ByteRTCEncodedVideoFrame, ByteRTCVideoDecoderConfig, ByteRTCStreamSycnInfoConfig, ByteRTCEchoTestConfig, ByteRTCVideoWatermarkConfig, ByteRTCCloudProxyInfo, ByteRTCNetworkTimeInfo, ByteRTCMediaTypeEnhancementConfig, ByteRTCLocalProxyInfo, ByteRTCVideoSolution, ByteRTCAudioPlaybackDevice, ByteRTCLiveTranscoding, ByteRTCMuteState, ByteRTCAudioMixingConfig, ByteRTCAudioTrackType, ByteRTCAudioPlayType, ByteRTCReceiveRange, ByteRTCAttenuationType } from './keytype'; import { ByteRTCMediaPlayerAudioFrameObserver, ByteRTCMediaPlayerEventHandler, ByteRTCAudioEffectPlayerEventHandler, ByteRTCSingScoringDelegate, ByteRTCKTVManagerDelegate, ByteRTCFaceDetectionObserver, ByteRTCRoomDelegate, ByteRTCVideoDelegate, ByteRTCVideoProcessorDelegate, ByteRTCLocalEncodedVideoFrameObserver, ByteRTCMixedStreamObserver, ByteRTCPushSingleStreamToCDNObserver, ByteRTCAudioFrameObserver, ByteRTCAudioFrameProcessor, ByteRTCEncryptHandler, ByteRTCASREngineEventHandler, ByteRTCExternalVideoEncoderEventHandler, ByteRTCRemoteEncodedVideoFrameObserver, ByteRTCVideoSnapshotCallbackDelegate, LiveTranscodingDelegate, ByteRTCAudioFileFrameObserver, ByteRTCKTVPlayerDelegate } from './callback'; export declare class ByteRTCMediaPlayer { open(filePath: NSString, config: ByteRTCMediaPlayerConfig): int; start(): int; openWithCustomSource(source: ByteRTCMediaPlayerCustomSource, config: ByteRTCMediaPlayerConfig): int; stop(): int; pause(): int; resume(): int; setVolume(volume: int, type: ByteRTCAudioMixingType): int; getVolume(type: ByteRTCAudioMixingType): int; getTotalDuration(): int; getPlaybackDuration(): int; getPosition(): int; setAudioPitch(pitch: int): int; setPosition(position: int): int; setAudioDualMonoMode(mode: ByteRTCAudioMixingDualMonoMode): int; getAudioTrackCount(): int; selectAudioTrack(index: int): int; setPlaybackSpeed(speed: int): int; setProgressInterval(interval: int64_t): int; setLoudness(loudness: float): int; registerAudioFrameObserver(observer: id): int; pushExternalAudioFrame(audioFrame: ByteRTCAudioFrame): int; setEventHandler(handler: id): int; init(): this; } export declare class ByteRTCAudioEffectPlayer { start(effectId: int, filePath: NSString, config: ByteRTCAudioEffectPlayerConfig): int; stop(effectId: int): int; stopAll(): int; preload(effectId: int, filePath: NSString): int; unload(effectId: int): int; unloadAll(): int; pause(effectId: int): int; pauseAll(): int; resume(effectId: int): int; resumeAll(): int; setPosition(effectId: int, position: int): int; getPosition(effectId: int): int; setVolume(effectId: int, volume: int): int; setVolumeAll(volume: int): int; getVolume(effectId: int): int; getDuration(effectId: int): int; setEventHandler(handler: id): int; init(): this; } export declare class ByteRTCSpatialAudio { enableSpatialAudio(enable: BOOL): void; disableRemoteOrientation(): void; updateSelfPosition(positionInfo: ByteRTCPositionInfo): int; updateRemotePosition(uid: NSString, positionInfo: ByteRTCPositionInfo): int; removeRemotePosition(uid: NSString): int; removeAllRemotePosition(): int; updatePosition(pos: ByteRTCPosition): int; updateSelfOrientation(orientation: ByteRTCHumanOrientation): int; updateListenerPosition(pos: ByteRTCPosition): int; updateListenerOrientation(orientation: ByteRTCHumanOrientation): int; init(): this; } export declare class ByteRTCSingScoringManager { initSingScoring(singScoringAppkey: NSString, singScoringToken: NSString, delegate: id): int; setSingScoringConfig(config: ByteRTCSingScoringConfig): int; getStandardPitchInfo(midiFilepath: NSString): ByteRTCStandardPitchInfo; startSingScoring(position: int, scoringInfoInterval: int): int; stopSingScoring(): int; getLastSentenceScore(): int; getTotalScore(): int; getAverageScore(): int; } export declare class ByteRTCKTVManager { delegate: id; getMusicList(pageNumber: int, pageSize: int, filterType: ByteRTCMusicFilterType): void; searchMusic(keyWord: NSString, pageNumber: int, pageSize: int, filterType: ByteRTCMusicFilterType): void; getHotMusic(hotType: ByteRTCMusicHotType, filterType: ByteRTCMusicFilterType): void; getMusicDetail(musicId: NSString): void; downloadMusic(musicId: NSString): int; downloadLyric(musicId: NSString, lyricType: ByteRTCDownloadLyricType): int; downloadMidi(musicId: NSString): int; cancelDownload(downloadId: int): void; clearCache(): void; setMaxCacheSize(maxCacheSizeMB: int): void; getKTVPlayer(): ByteRTCKTVPlayer; init(): this; } export declare class ByteRTCVideoEffect { initCVResource(licenseFile: NSString, algoModelDir: NSString): int; enableVideoEffect(): int; disableVideoEffect(): int; setEffectNodes(effectNodes: NSArray): int; updateEffectNode(node: NSString, key: NSString, value: float): int; setColorFilter(filterRes: NSString): int; setColorFilterIntensity(intensity: float): int; enableVirtualBackground(backgroundStickerPath: NSString, source: ByteRTCVirtualBackgroundSource): int; disableVirtualBackground(): int; enableFaceDetection(observer: id, interval: NSUInteger, path: NSString): int; disableFaceDetection(): int; registerFaceDetectionObserver(observer: id, interval: NSInteger): int; } export declare class ByteRTCRoom { delegate: id; destroy(): void; getRoomId(): NSString; setRTCRoomDelegate(roomDelegate: id): int; joinRoom(token: NSString, userInfo: ByteRTCUserInfo, roomConfig: ByteRTCRoomConfig): int; setUserVisibility(enable: BOOL): int; setMultiDeviceAVSync(audioUserId: NSString): int; leaveRoom(): int; updateToken(token: NSString): int; setRemoteVideoConfig(userId: NSString, remoteVideoConfig: ByteRTCRemoteVideoConfig): int; /** {en} * @detail api * @valid since 3.60. * @brief Start or stop publishing video streams captured by camera in the current room. * @param publish Whether to publish the media stream. * @return * - 0: Success. * - < 0 : Fail. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for more details * @note * - You don't need to call this API if you set it to Auto-publish when calling joinRoom:userInfo:roomConfig:{@link #ByteRTCRoom#joinRoom:userInfo:roomConfig}. * - An invisible user cannot publish media streams. Call setUserVisibility{@link #IRTCRoom#setUserVisibility} to change your visibility in the room. * - Call publishScreenAudio:{@link #ByteRTCRoom#publishScreenAudio} (not supported on Linux) and/or publishScreenVideo:{@link #ByteRTCRoom#publishScreenVideo} to start or stop screen sharing. * - Call publishStreamAudio:{@link #ByteRTCRoom#publishStreamAudio} to start or stop publishing the audio stream captured by the microphone. * - Call startForwardStreamToRooms:{@link #ByteRTCRoom#startForwardStreamToRooms} to forward the published streams to the other rooms. * - After you call this API, the other users in the room will receive rtcRoom:onUserPublishStreamVideo:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishStreamVideo:uid:isPublish}. Those who successfully received your streams will receive rtcEngine:onFirstRemoteVideoFrameDecoded:withFrameInfo:{@link #ByteRTCRoomDelegate#rtcEngine:onFirstRemoteVideoFrameDecoded:withFrameInfo} at the same time. * */ publishStreamVideo(publish: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Start or stop publishing media streams captured by the local microphone in the current room. * @param publish Whether to publish the media stream. * @return * - 0: Success. * - < 0 : Fail. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for more details * @note * - You don't need to call this API if you set it to Auto-publish when calling joinRoom:userInfo:roomConfig:{@link #ByteRTCRoom#joinRoom:userInfo:roomConfig}. * - An invisible user cannot publish media streams. Call setUserVisibility{@link #IRTCRoom#setUserVisibility} to change your visibility in the room. * - Call publishScreenAudio:{@link #ByteRTCRoom#publishScreenAudio} (not supported on Linux) and/or publishScreenVideo:{@link #ByteRTCRoom#publishScreenVideo} to start or stop screen sharing. * - Call publishStreamVideo:{@link #ByteRTCRoom#publishStreamVideo} to start or stop publishing the video stream captured by the camera. * - Call startForwardStreamToRooms:{@link #ByteRTCRoom#startForwardStreamToRooms} to forward the published streams to the other rooms. * - After you call this API, the other users in the room will receive rtcRoom:onUserPublishStreamAudio:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishStreamAudio:uid:isPublish}. Those who successfully received your streams will receive rtcEngine:onFirstRemoteAudioFrame:{@link #ByteRTCEngineDelegate#rtcEngine:onFirstRemoteAudioFrame} at the same time. * */ publishStreamAudio(publish: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Start or stop sharing the local screen in the room. * If you need to share your screen in multiple rooms, you can use the same uid to join multiple rooms and call this API in each room. Also, you can publish different types of screen-sharing streams in different rooms. * @param publish Whether to publish video stream. * @return * - 0: Success. * - < 0 : Fail. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for more details * @note * - You need to call this API to publish screen even if you set it to Auto-publish when calling joinRoom:userInfo:roomConfig:{@link #ByteRTCRoom#joinRoom:userInfo:roomConfig}. * - An invisible user cannot publish media streams. Call setUserVisibility{@link #IRTCRoom#setUserVisibility} to change your visibility in the room. * - Call publishScreenAudio:{@link #ByteRTCRoom#publishScreenAudio} (not supported on Linux) to start or stop sharing computer audio. * - Call publishStreamVideo:{@link #ByteRTCRoom#publishStreamVideo} to start or stop publishing the video stream captured by the camera. * - Call publishStreamAudio:{@link #ByteRTCRoom#publishStreamAudio} to start or stop publishing the audio stream captured by the microphone. * - Call startForwardStreamToRooms:{@link #ByteRTCRoom#startForwardStreamToRooms} to forward the published streams to the other rooms. * - After you called this API, the other users in the room will receive rtcRoom:onUserPublishScreenVideo:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishScreenVideo:uid:isPublish}. Those who successfully received your streams will receive rtcEngine:onFirstRemoteVideoFrameDecoded:withFrameInfo:{@link #ByteRTCRoomDelegate#rtcEngine:onFirstRemoteVideoFrameDecoded:withFrameInfo} at the same time. * - After calling this API, you'll receive rtcEngine:onScreenVideoFrameSendStateChanged:rtcUser:state:{@link #ByteRTCEngineDelegate#rtcEngine:onScreenVideoFrameSendStateChanged:rtcUser:state}. * - Refer to [Sharing Screen in PC](https://docs.byteplus.com/byteplus-rtc/docs/70144) for more information. * */ publishScreenVideo(publish: BOOL): int; /** {en} * @hidden(Linux) * @detail api * @valid since 3.60. * @brief Manually publishes local screen-sharing streams in the current room.
* If you need to share your screen in multiple rooms, you can use the same uid to join multiple rooms and call this API in each room. Also, you can publish different types of screen-sharing streams in different rooms. * @param publish Media stream type, used for specifying whether to publish audio stream or video stream. * @return * - 0: Success. * - < 0 : Fail. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for more details * @note * - You need to call this API to publish screen even if you set it to Auto-publish when calling joinRoom:userInfo:roomConfig:{@link #ByteRTCRoom#joinRoom:userInfo:roomConfig}. * - An invisible user cannot publish media streams. Call setUserVisibility{@link #IRTCRoom#setUserVisibility} to change your visibility in the room. * - Call publishScreenVideo:{@link #ByteRTCRoom#publishScreenVideo} to start or stop sharing the local screen. * - Call publishStreamVideo:{@link #ByteRTCRoom#publishStreamVideo} to start or stop publishing the video stream captured by the camera. * - Call publishStreamAudio:{@link #ByteRTCRoom#publishStreamAudio} to start or stop publishing the audio stream captured by the microphone. * - Call startForwardStreamToRooms:{@link #ByteRTCRoom#startForwardStreamToRooms} to forward the published streams to the other rooms. * - After you called this API, the other users in the room will receive rtcRoom:onUserPublishScreenAudio:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishScreenAudio:uid:isPublish}. Those who successfully received your streams will receive rtcEngine:onFirstRemoteAudioFrame:{@link #ByteRTCEngineDelegate#rtcEngine:onFirstRemoteAudioFrame} at the same time. * - Refer to [Sharing Screen in PC](https://docs.byteplus.com/byteplus-rtc/docs/70144) for more information. * */ publishScreenAudio(publish: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Subscribes to specific remote media streams captured by the local camera. Or update the subscribe options of the subscribed user. * @param userId The ID of the remote user who published the target video stream. * @param subscribe Whether to subscribe to the stream. * @return API call result:
* - 0: Success. * - <0: Failure. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for specific reasons. * @note * - Calling this API to update the subscribe configuration when the user has subscribed the remote user either by calling this API or by auto-subscribe. * - You must first get the remote stream information through rtcRoom:onUserPublishStreamVideo:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishStreamVideo:uid:isPublish} before calling this API to subscribe to streams accordingly. * - After calling this API, you will be informed of the calling result with onVideoSubscribeStateChanged{@link #ByteRTCRoomDelegate#onVideoSubscribeStateChanged}. * - Once the local user subscribes to the stream of a remote user, the subscription to the remote user will sustain until the local user leaves the room or unsubscribe from it by calling subscribeStreamVideo{@link #IRTCRoom#subscribeStreamVideo}. * - Any other exceptions will be included in onVideoSubscribeStateChanged{@link #ByteRTCRoomDelegate#onVideoSubscribeStateChanged}, see SubscribeStateChangeReason{@link #SubscribeStateChangeReason} for the reasons. * */ subscribeStreamVideo(userId: NSString, subscribe: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Subscribes to specific remote media streams captured by the local microphone. Or update the subscribe options of the subscribed user. * @param userId The ID of the remote user who published the target media stream. * @param subscribe Whether to subscribe to the audio stream. * @return API call result:
* - 0: Success. * - <0: Failure. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for specific reasons. * @note * - Calling this API to update the subscribe configuration when the user has subscribed the remote user either by calling this API or by auto-subscribe. * - You must first get the remote stream information through rtcRoom:onUserPublishStreamAudio:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishStreamAudio:uid:isPublish} before calling this API to subscribe to streams accordingly. * - After calling this API, you will be informed of the calling result with onAudioSubscribeStateChanged{@link #ByteRTCRoomDelegate#onAudioSubscribeStateChanged}. * - Once the local user subscribes to the stream of a remote user, the subscription to the remote user will sustain until the local user leaves the room or unsubscribe from it by calling subscribeStreamAudio{@link #IRTCRoom#subscribeStreamAudio}. * - Any other exceptions will be included in onAudioSubscribeStateChanged{@link #ByteRTCRoomDelegate#onAudioSubscribeStateChanged}, see ErrorCode{@link #ErrorCode} for the reasons. * */ subscribeStreamAudio(userId: NSString, subscribe: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Subscribes to specific screen sharing media stream. Or update the subscribe options of the subscribed user. * @param userId The ID of the remote user who published the target screen video stream. * @param subscribe Whether to subscribe to the screen video stream. * @return API call result:
* - 0: Success. * - <0: Failure. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for specific reasons. * @note * - Calling this API to update the subscribe configuration when the user has subscribed the remote user either by calling this API or by auto-subscribe. * - You must first get the remote stream information through rtcRoom:onUserPublishScreenVideo:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishScreenVideo:uid:isPublish} before calling this API to subscribe to streams accordingly. * - After calling this API, you will be informed of the calling result with onScreenVideoSubscribeStateChanged{@link #ByteRTCRoomDelegate#onScreenVideoSubscribeStateChanged}. * - Once the local user subscribes to the stream of a remote user, the subscription to the remote user will sustain until the local user leaves the room or unsubscribe from it by calling subscribeScreenVideo{@link #IRTCRoom#subscribeScreenVideo}. * - Any other exceptions will be included in onScreenVideoSubscribeStateChanged{@link #ByteRTCRoomDelegate#onScreenVideoSubscribeStateChanged}, see SubscribeStateChangeReason{@link #SubscribeStateChangeReason} for the reasons. * */ subscribeScreenVideo(userId: NSString, subscribe: BOOL): int; /** {en} * @detail api * @valid since 3.60. * @brief Subscribes to specific screen sharing media stream. Or update the subscribe options of the subscribed user. * @param userId The ID of the remote user who published the target screen audio stream. * @param subscribe Whether to subscribe to the screen audio stream. * @return API call result:
* - 0: Success. * - <0: Failure. See ByteRTCReturnStatus{@link #ByteRTCReturnStatus} for specific reasons. * @note * - Calling this API to update the subscribe configuration when the user has subscribed the remote user either by calling this API or by auto-subscribe. * - You must first get the remote stream information through rtcRoom:onUserPublishScreenAudio:uid:isPublish:{@link #ByteRTCRoomDelegate#rtcRoom:onUserPublishScreenAudio:uid:isPublish} before calling this API to subscribe to streams accordingly. * - After calling this API, you will be informed of the calling result with onScreenAudioSubscribeStateChanged{@link #ByteRTCRoomDelegate#onScreenAudioSubscribeStateChanged}. * - Once the local user subscribes to the stream of a remote user, the subscription to the remote user will sustain until the local user leaves the room or unsubscribe from it by calling subscribeScreenAudio{@link #IRTCRoom#subscribeScreenAudio}. * - Any other exceptions will be included in onScreenAudioSubscribeStateChanged{@link #ByteRTCRoomDelegate#onScreenAudioSubscribeStateChanged}, see SubscribeStateChangeReason{@link #SubscribeStateChangeReason} for the reasons. * */ subscribeScreenAudio(userId: NSString, subscribe: BOOL): int; subscribeAllStreamsWithMediaStreamType(mediaStreamType: ByteRTCMediaStreamType): int; unsubscribeAllStreamsWithMediaStreamType(mediaStreamType: ByteRTCMediaStreamType): int; pauseAllSubscribedStream(mediaType: ByteRTCPauseResumControlMediaType): int; resumeAllSubscribedStream(mediaType: ByteRTCPauseResumControlMediaType): int; sendUserMessage(userId: NSString, message: NSString, config: ByteRTCMessageConfig): NSInteger; sendUserBinaryMessage(uid: NSString, message: NSData, config: ByteRTCMessageConfig): NSInteger; sendRoomMessage(message: NSString): NSInteger; sendRoomBinaryMessage(message: NSData): NSInteger; startForwardStreamToRooms(configurations: NSArray): int; updateForwardStreamToRooms(configurations: NSArray): int; stopForwardStreamToRooms(): int; pauseForwardStreamToAllRooms(): int; resumeForwardStreamToAllRooms(): int; getRangeAudio(): ByteRTCRangeAudio; getSpatialAudio(): ByteRTCSpatialAudio; setRemoteRoomAudioPlaybackVolume(volume: NSInteger): int; setAudioSelectionConfig(audioSelectionPriority: ByteRTCAudioSelectionPriority): int; setRoomExtraInfo(key: NSString, value: NSString): NSInteger; startSubtitle(subtitleConfig: ByteRTCSubtitleConfig): int; stopSubtitle(): int; subscribeUserStream(userId: NSString, streamType: ByteRTCStreamIndex, mediaType: ByteRTCSubscribeMediaType, videoConfig: ByteRTCSubscribeVideoConfig): int; init(): this; } export declare class ByteRTCVideo { static createRTCVideo(appId: NSString, delegate: id, parameters: NSDictionary): ByteRTCVideo; static destroyRTCVideo(): void; static getSDKVersion(): NSString; static setLogConfig(logConfig: ByteRTCLogConfig): int; static getErrorDescription(code: NSInteger): NSString; delegate: id; setAudioSourceType(type: ByteRTCAudioSourceType): int; setAudioRenderType(type: ByteRTCAudioRenderType): int; startAudioCapture(): int; stopAudioCapture(): int; setAudioScenario(audioScenario: ByteRTCAudioScenarioType): int; setAudioProfile(audioProfile: ByteRTCAudioProfileType): int; setAnsMode(ansMode: ByteRTCAnsMode): int; setVoiceChangerType(voiceChanger: ByteRTCVoiceChangerType): int; setVoiceReverbType(voiceReverb: ByteRTCVoiceReverbType): int; setLocalVoiceEqualization(config: ByteRTCVoiceEqualizationConfig): int; setLocalVoiceReverbParam(param: ByteRTCVoiceReverbConfig): int; enableLocalVoiceReverb(enable: bool): int; muteAudioCapture(index: ByteRTCStreamIndex, mute: bool): int; setCaptureVolume(index: ByteRTCStreamIndex, volume: int): int; setPlaybackVolume(volume: NSInteger): int; enableAudioPropertiesReport(config: ByteRTCAudioPropertiesConfig): int; setRemoteAudioPlaybackVolume(streamKey: ByteRTCRemoteStreamKey, volume: int): int; setEarMonitorMode(mode: ByteRTCEarMonitorMode): int; setEarMonitorVolume(volume: NSInteger): int; setBluetoothMode(mode: ByteRTCBluetoothMode): int; setLocalVoicePitch(pitch: NSInteger): int; enableVocalInstrumentBalance(enable: BOOL): int; enablePlaybackDucking(enable: BOOL): int; setLocalVideoRender(index: ByteRTCStreamIndex, videoSink: id, config: ByteRTCLocalVideoSinkConfig): int; setRemoteVideoRender(streamKey: ByteRTCRemoteStreamKey, videoSink: id, config: ByteRTCRemoteVideoSinkConfig): int; setRemoteVideoSuperResolution(streamKey: ByteRTCRemoteStreamKey, mode: ByteRTCVideoSuperResolutionMode): int; setVideoCaptureRotation(rotation: ByteRTCVideoRotation): int; enableSimulcastMode(enabled: BOOL): int; setMaxVideoEncoderConfig(encoderConfig: ByteRTCVideoEncoderConfig): int; setVideoEncoderConfig(encoderConfigs: NSArray): int; setScreenVideoEncoderConfig(encoderConfig: ByteRTCScreenVideoEncoderConfig): int; enableAlphaChannelVideoEncode(streamIndex: ByteRTCStreamIndex, alphaLayout: ByteRTCAlphaLayout): int; disableAlphaChannelVideoEncode(streamIndex: ByteRTCStreamIndex): int; setVideoCaptureConfig(captureConfig: ByteRTCVideoCaptureConfig): int; setLocalVideoCanvas(streamIndex: ByteRTCStreamIndex, canvas: ByteRTCVideoCanvas): int; updateLocalVideoCanvas(streamIndex: ByteRTCStreamIndex, renderMode: ByteRTCRenderMode, backgroundColor: NSUInteger): int; setRemoteVideoCanvas(key: ByteRTCRemoteStreamKey, canvas: ByteRTCVideoCanvas): int; updateRemoteStreamVideoCanvas(key: ByteRTCRemoteStreamKey, remoteVideoRenderConfig: ByteRTCRemoteVideoRenderConfig): int; startVideoCapture(): int; stopVideoCapture(): int; setLocalVideoMirrorType(mirrorType: ByteRTCMirrorType): int; setRemoteVideoMirrorType(key: ByteRTCRemoteStreamKey, mirrorType: ByteRTCRemoteMirrorType): int; setVideoRotationMode(rotationMode: ByteRTCVideoRotationMode): int; setVideoOrientation(orientation: ByteRTCVideoOrientation): int; switchCamera(cameraId: ByteRTCCameraID): int; getVideoEffectInterface(): ByteRTCVideoEffect; enableEffectBeauty(enable: BOOL): int; setBeautyIntensity(beautyMode: ByteRTCEffectBeautyMode, intensity: float): int; setCameraZoomRatio(zoomRatio: float): int; getCameraZoomMaxRatio(): float; isCameraZoomSupported(): bool; isCameraTorchSupported(): bool; setCameraTorch(torchState: ByteRTCTorchState): int; isCameraFocusPositionSupported(): bool; setCameraFocusPosition(position: CGPoint): int; isCameraExposurePositionSupported(): bool; setCameraExposurePosition(position: CGPoint): int; setCameraExposureCompensation(val: float): int; enableCameraAutoExposureFaceMode(enable: bool): int; setCameraAdaptiveMinimumFrameRate(framerate: int): int; sendSEIMessage(streamIndex: ByteRTCStreamIndex, message: NSData, repeatCount: int, mode: ByteRTCSEICountPerFrame): int; setVideoDigitalZoomConfig(type: ByteRTCZoomConfigType, size: float): int; setVideoDigitalZoomControl(direction: ByteRTCZoomDirectionType): int; startVideoDigitalZoomControl(direction: ByteRTCZoomDirectionType): int; stopVideoDigitalZoomControl(): int; registerLocalVideoProcessor(processor: id, config: ByteRTCVideoPreprocessorConfig): int; registerLocalEncodedVideoFrameObserver(frameObserver: id): int; setAudioRoute(audioRoute: ByteRTCAudioRoute): int; getAudioRoute(): ByteRTCAudioRoute; enableExternalSoundCard(enable: bool): int; setDefaultAudioRoute(audioRoute: ByteRTCAudioRoute): int; startPushMixedStreamToCDN(taskID: NSString, config: ByteRTCMixedStreamConfig, observer: id): int; updatePushMixedStreamToCDN(taskID: NSString, config: ByteRTCMixedStreamConfig): int; startPushSingleStreamToCDN(taskID: NSString, singleStream: ByteRTCPushSingleStreamParam, observer: id): int; stopPushStreamToCDN(taskID: NSString): int; startPushPublicStream(publicStreamId: NSString, publicStream: ByteRTCPublicStreaming): int; stopPushPublicStream(publicStreamId: NSString): int; updatePublicStreamParam(publicStreamId: NSString, publicStream: ByteRTCPublicStreaming): int; startPlayPublicStream(publicStreamId: NSString): int; stopPlayPublicStream(publicStreamId: NSString): int; setPublicStreamVideoCanvas(publicStreamId: NSString, canvas: ByteRTCVideoCanvas): int; setPublicStreamVideoSink(publicStreamId: NSString, videoSink: id, requiredFormat: ByteRTCVideoSinkPixelFormat): int; setPublicStreamAudioPlaybackVolume(publicStreamId: NSString, volume: NSInteger): int; pushExternalVideoFrame(frame: ByteRTCVideoFrame): int; enableAudioFrameCallback(method: ByteRTCAudioFrameCallbackMethod, format: ByteRTCAudioFormat): int; disableAudioFrameCallback(method: ByteRTCAudioFrameCallbackMethod): int; registerAudioFrameObserver(audioFrameObserver: id): int; registerAudioProcessor(processor: id): int; enableAudioProcessor(method: ByteRTCAudioFrameMethod, format: ByteRTCAudioFormat): int; disableAudioProcessor(method: ByteRTCAudioFrameMethod): int; pushExternalAudioFrame(audioFrame: ByteRTCAudioFrame): int; pullExternalAudioFrame(audioFrame: ByteRTCAudioFrame): int; setBusinessId(businessId: NSString): int; feedback(types: ByteRTCProblemFeedbackOption, info: ByteRTCProblemFeedbackInfo): int; getNativeHandle(): void; setPublishFallbackOption(option: ByteRTCPublishFallbackOption): int; setSubscribeFallbackOption(option: ByteRTCSubscribeFallbackOption): int; setRemoteUserPriority(priority: ByteRTCRemoteUserPriority, roomId: NSString, uid: NSString): int; setEncryptInfo(encrypt_type: ByteRTCEncryptType, key: NSString): int; setCustomizeEncryptHandler(handler: id): int; createRTCRoom(roomId: NSString): ByteRTCRoom; pushScreenVideoFrame(frame: CVPixelBufferRef, pts: CMTime, rotation: int): int; setExtensionConfig(groupId: NSString): int; startScreenCapture(type: ByteRTCScreenMediaType, bundleId: NSString): int; updateScreenCapture(type: ByteRTCScreenMediaType): int; stopScreenCapture(): int; sendScreenCaptureExtensionMessage(messsage: NSData): int; setRuntimeParameters(parameters: NSDictionary): int; startASR(asrConfig: ByteRTCASRConfig, handler: id): int; stopASR(): int; startFileRecording(streamIndex: ByteRTCStreamIndex, recordingConfig: ByteRTCRecordingConfig, recordingType: ByteRTCRecordingType): int; stopFileRecording(streamIndex: ByteRTCStreamIndex): int; startAudioRecording(recordingConfig: ByteRTCAudioRecordingConfig): int; stopAudioRecording(): int; getAudioEffectPlayer(): ByteRTCAudioEffectPlayer; getMediaPlayer(playerId: int): ByteRTCMediaPlayer; login(token: NSString, uid: NSString): int; logout(): int; updateLoginToken(token: NSString): int; setServerParams(signature: NSString, url: NSString): int; getPeerOnlineStatus(peerUserId: NSString): int; sendUserMessageOutsideRoom(userId: NSString, messageStr: NSString, config: ByteRTCMessageConfig): NSInteger; sendUserBinaryMessageOutsideRoom(userId: NSString, messageStr: NSData, config: ByteRTCMessageConfig): NSInteger; sendServerMessage(messageStr: NSString): NSInteger; sendServerBinaryMessage(messageStr: NSData): NSInteger; startNetworkDetection(isTestUplink: bool, expectedUplinkBitrate: int, isTestDownlink: bool, expectedDownlinkBitrate: int): int; stopNetworkDetection(): int; setScreenAudioSourceType(sourceType: ByteRTCAudioSourceType): int; setScreenAudioStreamIndex(index: ByteRTCStreamIndex): int; pushScreenAudioFrame(audioFrame: ByteRTCAudioFrame): int; setVideoSourceType(type: ByteRTCVideoSourceType, streamIndex: ByteRTCStreamIndex): int; setExternalVideoEncoderEventHandler(handler: id): int; pushExternalEncodedVideoFrame(streamIndex: ByteRTCStreamIndex, videoIndex: NSInteger, videoFrame: ByteRTCEncodedVideoFrame): int; setVideoDecoderConfig(key: ByteRTCRemoteStreamKey, config: ByteRTCVideoDecoderConfig): int; requestRemoteVideoKeyFrame(key: ByteRTCRemoteStreamKey): int; registerRemoteEncodedVideoFrameObserver(observer: id): int; sendStreamSyncInfo(data: NSData, config: ByteRTCStreamSycnInfoConfig): int; startEchoTest(echoConfig: ByteRTCEchoTestConfig, delayTime: NSInteger): int; stopEchoTest(): int; setVideoWatermark(streamIndex: ByteRTCStreamIndex, imagePath: NSString, rtcWatermarkConfig: ByteRTCVideoWatermarkConfig): int; clearVideoWatermark(streamIndex: ByteRTCStreamIndex): int; takeLocalSnapshot(streamIndex: ByteRTCStreamIndex, callback: id): NSInteger; takeRemoteSnapshot(streamKey: ByteRTCRemoteStreamKey, callback: id): NSInteger; startCloudProxy(cloudProxiesInfo: NSArray): int; stopCloudProxy(): int; getSingScoringManager(): ByteRTCSingScoringManager; setDummyCaptureImagePath(filePath: NSString): int; getNetworkTimeInfo(): ByteRTCNetworkTimeInfo; getKTVManager(): ByteRTCKTVManager; startHardwareEchoDetection(testAudioFilePath: NSString): int; stopHardwareEchoDetection(): int; setCellularEnhancement(config: ByteRTCMediaTypeEnhancementConfig): int; setLocalProxy(configurations: NSArray): int; setLocalVideoSink(index: ByteRTCStreamIndex, videoSink: id, requiredFormat: ByteRTCVideoSinkPixelFormat): int; setRemoteVideoSink(streamKey: ByteRTCRemoteStreamKey, videoSink: id, requiredFormat: ByteRTCVideoSinkPixelFormat): int; setVideoEncoderConfig$config$(streamIndex: ByteRTCStreamIndex, videoSolutions: NSArray): int; updateRemoteStreamVideoCanvas$withRenderMode$withBackgroundColor$(key: ByteRTCRemoteStreamKey, renderMode: ByteRTCRenderMode, backgroundColor: NSUInteger): int; checkVideoEffectLicense(licenseFile: NSString): int; setVideoEffectAlgoModelPath(modelPath: NSString): void; enableVideoEffect(enabled: BOOL): int; setVideoEffectNodes(effectNodePaths: NSArray): int; updateVideoEffectNode(nodePath: NSString, nodeKey: NSString, nodeValue: float): int; setVideoEffectColorFilter(resPath: NSString): int; setVideoEffectColorFilterIntensity(intensity: float): int; setBackgroundSticker(modelPath: NSString, source: ByteRTCVirtualBackgroundSource): int; registerFaceDetectionObserver(faceDetectionObserver: id, interval: NSInteger): int; sendSEIMessage$andMessage$andRepeatCount$(streamIndex: ByteRTCStreamIndex, message: NSData, repeatCount: int): int; setAudioPlaybackDevice(audioPlaybackDevice: ByteRTCAudioPlaybackDevice): int; startLiveTranscoding(taskID: NSString, transcoding: ByteRTCLiveTranscoding, observer: id): int; stopLiveTranscoding(taskID: NSString): int; updateLiveTranscoding(taskID: NSString, transcoding: ByteRTCLiveTranscoding): int; getAudioMixingManager(): ByteRTCAudioMixingManager; muteAudioPlayback(muteState: ByteRTCMuteState): int; init(): this; } export declare class ByteRTCVideoDeviceManager { getVideoCaptureDevice(deviceID: NSString): int; setVideoCaptureDevice(deviceID: NSString): int; init(): this; } export declare class ByteRTCAudioMixingManager { startAudioMixing(mixId: int, filePath: NSString, config: ByteRTCAudioMixingConfig): void; stopAudioMixing(mixId: int): void; stopAllAudioMixing(): void; pauseAudioMixing(mixId: int): void; pauseAllAudioMixing(): void; resumeAudioMixing(mixId: int): void; resumeAllAudioMixing(): void; preloadAudioMixing(mixId: int, filePath: NSString): void; unloadAudioMixing(mixId: int): void; setAllAudioMixingVolume(volume: int, type: ByteRTCAudioMixingType): void; setAudioMixingVolume(mixId: int, volume: int, type: ByteRTCAudioMixingType): void; getAudioMixingDuration(mixId: int): int; getAudioMixingCurrentPosition(mixId: int): int; setAudioMixingPosition(mixId: int, position: int): void; setAudioMixingDualMonoMode(mixId: int, mode: ByteRTCAudioMixingDualMonoMode): void; setAudioMixingPitch(mixId: int, pitch: int): void; setAudioMixingPlaybackSpeed(mixId: int, speed: int): int; setAudioMixingLoudness(mixId: int, loudness: float): void; setAudioMixingProgressInterval(mixId: int, interval: int64_t): void; enableAudioMixingFrame(mixId: int, type: ByteRTCAudioMixingType): void; disableAudioMixingFrame(mixId: int): void; pushAudioMixingFrame(mixId: int, audioFrame: ByteRTCAudioFrame): int; getAudioTrackCount(mixId: int): int; selectAudioTrack(mixId: int, audioTrackIndex: int): void; registerAudioFileFrameObserver(observer: id): void; getAudioMixingPlaybackDuration(mixId: int): int; init(): this; } export declare class ByteRTCKTVPlayer { delegate: id; playMusic(musicId: NSString, trackType: ByteRTCAudioTrackType, playType: ByteRTCAudioPlayType): void; pauseMusic(musicId: NSString): void; resumeMusic(musicId: NSString): void; stopMusic(musicId: NSString): void; seekMusic(musicId: NSString, position: int): void; setMusicVolume(musicId: NSString, volume: int): void; switchAudioTrackType(musicId: NSString): void; setMusicPitch(musicId: NSString, pitch: int): void; init(): this; } export declare class ByteRTCRangeAudio { enableRangeAudio(enable: BOOL): void; updateReceiveRange(range: ByteRTCReceiveRange): int; updatePosition(pos: ByteRTCPosition): int; setAttenuationModel(type: ByteRTCAttenuationType, coefficient: float): int; setNoAttenuationFlags(flags: NSArray): void; init(): this; }