import React, { RefObject, useEffect, useMemo } from 'react'; import { I18nManager, StyleProp, StyleSheet, Text, TextStyle, View, ViewStyle } from 'react-native'; import dayjs from 'dayjs'; import duration from 'dayjs/plugin/duration'; import { isLocalVoiceRecordingAttachment, isVoiceRecordingAttachment, LocalMessage, AudioAttachment as StreamAudioAttachment, VoiceRecordingAttachment as StreamVoiceRecordingAttachment, } from 'stream-chat'; import { PlayPauseButton } from './PlayPauseButton'; import { useTheme } from '../../../contexts'; import { useStableCallback } from '../../../hooks'; import { useStateStore } from '../../../hooks'; import { useAudioPlayer } from '../../../hooks/useAudioPlayer'; import { NativeHandlers, SoundReturnType, VideoPayloadData, VideoProgressData, VideoSeekResponse, } from '../../../native'; import { AudioPlayerState } from '../../../state-store/audio-player'; import { primitives } from '../../../theme'; import { AudioConfig } from '../../../types/types'; import { ProgressControl } from '../../ProgressControl/ProgressControl'; import { StableDurationLabel } from '../../ProgressControl/StableDurationLabel'; import { WaveProgressBar } from '../../ProgressControl/WaveProgressBar'; import { SpeedSettingsButton } from '../../ui/SpeedSettingsButton'; const ONE_HOUR_IN_MILLISECONDS = 3600 * 1000; const ONE_SECOND_IN_MILLISECONDS = 1000; dayjs.extend(duration); const getAudioDurationLabel = (durationInMilliseconds: number) => { if (!durationInMilliseconds) { return '00:00'; } return durationInMilliseconds / ONE_HOUR_IN_MILLISECONDS >= 1 ? dayjs.duration(durationInMilliseconds, 'milliseconds').format('HH:mm:ss') : dayjs.duration(durationInMilliseconds, 'milliseconds').format('mm:ss'); }; export type AudioAttachmentType = AudioConfig & Pick< StreamAudioAttachment | StreamVoiceRecordingAttachment, 'waveform_data' | 'asset_url' | 'title' | 'mime_type' > & { id: string; type: 'audio' | 'voiceRecording'; }; export type AudioAttachmentProps = { item: AudioAttachmentType; message?: LocalMessage; hideProgressBar?: boolean; showTitle?: boolean; /** * If true, the speed settings button will be shown. */ showSpeedSettings?: boolean; testID?: string; /** * If true, the audio attachment is in preview mode in the message input. */ isPreview?: boolean; containerStyle?: StyleProp; indicator?: React.ReactNode; styles?: { container?: StyleProp; playPauseButton?: StyleProp; speedSettingsButton?: StyleProp; durationText?: StyleProp; }; }; const audioPlayerSelector = (state: AudioPlayerState) => ({ currentPlaybackRate: state.currentPlaybackRate, duration: state.duration, isPlaying: state.isPlaying, position: state.position, progress: state.progress, }); /** * AudioAttachment * UI Component to preview the audio files */ export const AudioAttachment = (props: AudioAttachmentProps) => { const soundRef = React.useRef(null); const styles = useStyles(); const { hideProgressBar = false, item, message, showSpeedSettings = false, showTitle = true, testID, isPreview = false, containerStyle, styles: stylesProps, indicator, } = props; const isVoiceRecording = isVoiceRecordingAttachment(item); const audioPlayer = useAudioPlayer({ duration: item.duration ?? 0, mimeType: item.mime_type ?? '', requester: isPreview ? 'preview' : message?.id && `${message?.parent_id ?? message?.id}${message?.id}`, type: isVoiceRecording ? 'voiceRecording' : 'audio', uri: item.asset_url ?? '', }); const { duration, isPlaying, position, progress, currentPlaybackRate } = useStateStore( audioPlayer.state, audioPlayerSelector, ); // Initialize the player for native cli apps useEffect(() => { if (soundRef.current) { audioPlayer.initPlayer({ playerRef: soundRef.current }); } }, [audioPlayer]); // When a audio attachment in preview is removed, we need to remove the player from the pool useEffect( () => () => { if (isPreview) { audioPlayer.onRemove(); } }, [audioPlayer, isPreview], ); /** This is for Native CLI Apps */ const handleLoad = (payload: VideoPayloadData) => { // Voice recordings already carry the canonical duration in the attachment payload. if (isVoiceRecording) { return; } audioPlayer.duration = payload.duration * ONE_SECOND_IN_MILLISECONDS; }; /** This is for Native CLI Apps */ const handleProgress = (data: VideoProgressData) => { const { currentTime } = data; audioPlayer.position = currentTime * ONE_SECOND_IN_MILLISECONDS; }; /** This is for Native CLI Apps */ const onSeek = (seekResponse: VideoSeekResponse) => { audioPlayer.position = seekResponse.currentTime * ONE_SECOND_IN_MILLISECONDS; }; const handlePlayPause = () => { audioPlayer.toggle(); }; const handleEnd = async () => { await audioPlayer.stop(); }; const dragStart = useStableCallback(() => { audioPlayer.pause(); }); const dragEnd = useStableCallback(async (currentProgress: number) => { const positionInSeconds = (currentProgress * duration) / ONE_SECOND_IN_MILLISECONDS; await audioPlayer.seek(positionInSeconds); }); const onSpeedChangeHandler = async () => { await audioPlayer.changePlaybackRate(); }; const { theme: { audioAttachment: { container, centerContainer, audioInfo, leftContainer, progressControlContainer, progressDurationText, rightContainer, }, semantics, messageComposer: { fileAttachmentUploadPreview: { filenameText }, }, }, } = useTheme(); const maxDurationLabel = useMemo(() => getAudioDurationLabel(duration), [duration]); const remainingDuration = useMemo(() => Math.max(duration - position, 0), [duration, position]); const progressDuration = useMemo( () => getAudioDurationLabel(remainingDuration || duration), [duration, remainingDuration], ); return ( {showTitle ? ( {isVoiceRecordingAttachment(item) || isLocalVoiceRecordingAttachment(item) ? 'Voice Message' : item.title} ) : null} {indicator ? ( indicator ) : ( {!hideProgressBar && ( {item.waveform_data ? ( ) : ( )} )} )} {NativeHandlers.Sound?.Player && ( } testID='sound-player' uri={item.asset_url} /> )} {showSpeedSettings && !indicator ? ( ) : null} ); }; const useStyles = () => { const { theme: { semantics }, } = useTheme(); return useMemo(() => { return StyleSheet.create({ container: { alignItems: 'center', flexDirection: 'row', padding: primitives.spacingSm, gap: primitives.spacingXs, minWidth: 256, // TODO: Fix this borderColor: semantics.borderCoreDefault, borderWidth: 1, }, audioInfo: { alignItems: 'center', flexDirection: 'row', gap: primitives.spacingXxs, }, centerContainer: { gap: primitives.spacingXxs, }, filenameText: { color: semantics.textPrimary, fontSize: primitives.typographyFontSizeSm, fontWeight: primitives.typographyFontWeightSemiBold, lineHeight: primitives.typographyLineHeightTight, }, leftContainer: { padding: primitives.spacingXxs, }, progressControlContainer: { flex: 1, }, progressDurationText: { color: semantics.textPrimary, fontVariant: ['tabular-nums'], fontSize: primitives.typographyFontSizeXs, fontWeight: primitives.typographyFontWeightRegular, lineHeight: primitives.typographyLineHeightTight, }, rightContainer: {}, }); }, [semantics]); }; AudioAttachment.displayName = 'AudioAttachment{messageComposer{audioAttachment}}';