From 78887b0c71fa294c9f1d34217117a9240a4e46d2 Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 26 Mar 2026 08:11:33 +0100 Subject: [PATCH] fix: prevent audio recorder player UI from overflowing the parent boundaries --- .../__tests__/WaveProgressBar.test.tsx | 6 ++-- src/components/AudioPlayback/AudioPlayer.ts | 6 ++-- .../__tests__/AudioPlayer.test.ts | 18 ++++++++++ .../__tests__/WithAudioPlayback.test.tsx | 5 +-- .../__tests__/AudioRecorder.test.tsx | 36 +++++++++++++++++++ .../AudioRecorder/styling/AudioRecorder.scss | 11 +++--- .../MessageComposer/MessageComposer.tsx | 2 +- .../TextareaComposer/TextareaComposer.tsx | 7 ---- src/context/ChannelListContext.tsx | 6 +--- 9 files changed, 70 insertions(+), 27 deletions(-) diff --git a/src/components/Attachment/__tests__/WaveProgressBar.test.tsx b/src/components/Attachment/__tests__/WaveProgressBar.test.tsx index ba9f585fd3..447dc3f0d5 100644 --- a/src/components/Attachment/__tests__/WaveProgressBar.test.tsx +++ b/src/components/Attachment/__tests__/WaveProgressBar.test.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { act, render, screen } from '@testing-library/react'; +import { act, render, screen, waitFor } from '@testing-library/react'; import { fromPartial } from '@total-typescript/shoehorn'; import { WaveProgressBar } from '../../AudioPlayback'; import { ResizeObserverMock } from '../../../mock-builders/browser'; @@ -95,7 +95,9 @@ describe('WaveProgressBar', () => { '2px', ), ).toBeTruthy(); - expect(screen.getAllByTestId(AMPLITUDE_BAR_TEST_ID)).toHaveLength(7); + await waitFor(() => { + expect(screen.getAllByTestId(AMPLITUDE_BAR_TEST_ID)).toHaveLength(7); + }); }); it('does not recalculate the number of bars on root resize if ResizeObserver is unsupported', () => { diff --git a/src/components/AudioPlayback/AudioPlayer.ts b/src/components/AudioPlayback/AudioPlayer.ts index d1388c5371..8e29b675bc 100644 --- a/src/components/AudioPlayback/AudioPlayer.ts +++ b/src/components/AudioPlayback/AudioPlayer.ts @@ -526,6 +526,7 @@ export class AudioPlayer { stop = () => { this.pause(); + this.state.partialNext({ isPlaying: false }); this.setSecondsElapsed(0); if (this.elementRef) this.elementRef.currentTime = 0; }; @@ -628,10 +629,7 @@ export class AudioPlayer { if (audioElement) { this.updateDurationFromElement(audioElement); } - this.state.partialNext({ - isPlaying: false, - secondsElapsed: audioElement?.duration ?? this.durationSeconds ?? 0, - }); + this.stop(); }; const handleError = (e: HTMLMediaElementEventMap['error']) => { diff --git a/src/components/AudioPlayback/__tests__/AudioPlayer.test.ts b/src/components/AudioPlayback/__tests__/AudioPlayer.test.ts index cc183c9d7f..4015de44f4 100644 --- a/src/components/AudioPlayback/__tests__/AudioPlayer.test.ts +++ b/src/components/AudioPlayback/__tests__/AudioPlayer.test.ts @@ -269,6 +269,24 @@ describe('AudioPlayer', () => { expect(player.elementRef.currentTime).toBe(0); }); + it('ended event resets playback state to initial position', () => { + const player = makePlayer(); + + player.play(); + player.state.partialNext({ isPlaying: true }); + player.setSecondsElapsed(50); + + expect(player.state.getLatestValue().secondsElapsed).toBe(50); + + player.elementRef.dispatchEvent(new Event('ended')); + + const st = player.state.getLatestValue(); + expect(st.isPlaying).toBe(false); + expect(st.secondsElapsed).toBe(0); + expect(st.progressPercent).toBe(0); + expect(player.elementRef.currentTime).toBe(0); + }); + it('togglePlay delegates to play() / pause()', async () => { const p = makePlayer(); diff --git a/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.tsx b/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.tsx index 28959312a7..589716bffa 100644 --- a/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.tsx +++ b/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.tsx @@ -185,7 +185,7 @@ describe('WithAudioPlayback + useAudioPlayer', () => { expect(st.progressPercent).toBeCloseTo(25, 5); }); - it('subscriptions: sets isPlaying=false and secondsElapsed to duration on Event "ended"', () => { + it('subscriptions: resets playback state on Event "ended"', () => { let player; renderWithProvider({ allowConcurrentPlayback, @@ -216,7 +216,8 @@ describe('WithAudioPlayback + useAudioPlayer', () => { const st = player.state.getLatestValue(); expect(st.isPlaying).toBe(false); - expect(st.secondsElapsed).toBe(200); + expect(st.secondsElapsed).toBe(0); + expect(st.progressPercent).toBe(0); }); it('subscriptions: error with MediaError.code=4 logs and sets canPlayRecord=false', () => { diff --git a/src/components/MediaRecorder/AudioRecorder/__tests__/AudioRecorder.test.tsx b/src/components/MediaRecorder/AudioRecorder/__tests__/AudioRecorder.test.tsx index ef828b6b16..f78cdad94b 100644 --- a/src/components/MediaRecorder/AudioRecorder/__tests__/AudioRecorder.test.tsx +++ b/src/components/MediaRecorder/AudioRecorder/__tests__/AudioRecorder.test.tsx @@ -34,6 +34,10 @@ import { AudioRecorder } from '../AudioRecorder'; import { MediaRecordingState } from '../../classes'; import { WithAudioPlayback } from '../../../AudioPlayback'; import { ChatViewContext } from '../../../ChatView/ChatView'; +import type { + AppSettingsAPIResponse, + SendFileAPIResponse, +} from '../../../../../../stream-chat-js/src'; const chatViewContextValue = { activeChatView: 'channels', @@ -376,6 +380,38 @@ describe('MessageInput', () => { }); expect(sendMessageSpy).not.toHaveBeenCalled(); }); + + it('renders voice preview slot after stopping recording when multiple async messages are enabled', async () => { + MediaRecorderMock.autoEmitDataOnStop = true; + const { + channels: [channel], + client, + } = await initClientWithChannels({ + channelsData: [{ channel: { own_capabilities: ['upload-file'] } }], + }); + + vi.spyOn(client, 'getAppSettings').mockResolvedValue({} as AppSettingsAPIResponse); + vi.spyOn(channel, 'sendFile').mockResolvedValue({ + file: fileObjectURL, + } as SendFileAPIResponse); + + await renderComponent({ + channelStateCtx: { channel }, + chatCtx: { client }, + props: { asyncMessagesMultiSendEnabled: true }, + }); + + fireEvent.click(screen.getByTestId(START_RECORDING_AUDIO_BUTTON_TEST_ID)); + await waitFor(() => { + expect(screen.getByTestId(AUDIO_RECORDER_TEST_ID)).toBeInTheDocument(); + }); + + fireEvent.click(screen.getByTestId(AUDIO_RECORDER_STOP_BTN_TEST_ID)); + + await waitFor(() => { + expect(screen.getByTestId('voice-preview-slot')).toBeInTheDocument(); + }); + }); }); const recorderMock = {}; diff --git a/src/components/MediaRecorder/AudioRecorder/styling/AudioRecorder.scss b/src/components/MediaRecorder/AudioRecorder/styling/AudioRecorder.scss index 97ea579de6..3dd7ff73e6 100644 --- a/src/components/MediaRecorder/AudioRecorder/styling/AudioRecorder.scss +++ b/src/components/MediaRecorder/AudioRecorder/styling/AudioRecorder.scss @@ -14,6 +14,7 @@ .str-chat__audio-recorder__recording-playback, .str-chat__audio-recorder__recording-preview { flex: 1; + min-width: 0; display: flex; align-items: center; gap: var(--spacing-md); @@ -101,13 +102,12 @@ .str-chat__wave-progress-bar__track-container, .str-chat__waveform-box-container { - //flex: 1; + flex: 1 1 auto; display: flex; align-items: center; - width: 100%; + width: auto; min-width: 0; //overflow-x: hidden; - flex-shrink: 1; } .str-chat__wave-progress-bar__track-container { @@ -120,7 +120,7 @@ .str-chat__wave-progress-bar__track { display: flex; min-width: 0; - width: unset; + width: 100%; align-items: center; flex-wrap: nowrap; height: 2rem; @@ -139,8 +139,7 @@ padding-inline: var(--spacing-xs); .str-chat__wave-progress-bar__track { - flex: unset; - //width: max-content; + flex: 1 1 auto; } } diff --git a/src/components/MessageComposer/MessageComposer.tsx b/src/components/MessageComposer/MessageComposer.tsx index 026a922bb1..70888f629a 100644 --- a/src/components/MessageComposer/MessageComposer.tsx +++ b/src/components/MessageComposer/MessageComposer.tsx @@ -95,7 +95,7 @@ const MessageComposerProvider = (props: PropsWithChildren) useEffect( () => () => { - messageComposer.createDraft(); + messageComposer.createDraft().finally(() => messageComposer.clear()); }, [messageComposer], ); diff --git a/src/components/TextareaComposer/TextareaComposer.tsx b/src/components/TextareaComposer/TextareaComposer.tsx index d0357cc117..f3fc9e7568 100644 --- a/src/components/TextareaComposer/TextareaComposer.tsx +++ b/src/components/TextareaComposer/TextareaComposer.tsx @@ -267,13 +267,6 @@ export const TextareaComposer = ({ textareaRef.current.focus(); }, [attachments, focus, quotedMessage, textareaRef]); - useEffect( - () => () => { - messageComposer.clear(); - }, - [messageComposer], - ); - useLayoutEffect(() => { /** * It is important to perform set text and after that the range diff --git a/src/context/ChannelListContext.tsx b/src/context/ChannelListContext.tsx index 014f7ab5d9..8cba1706fe 100644 --- a/src/context/ChannelListContext.tsx +++ b/src/context/ChannelListContext.tsx @@ -41,14 +41,10 @@ export const ChannelListContextProvider = ({ ); -export const useChannelListContext = (componentName?: string) => { +export const useChannelListContext = () => { const contextValue = useContext(ChannelListContext); if (!contextValue) { - console.warn( - `The useChannelListContext hook was called outside of the ChannelListContext provider. Make sure this hook is called within the ChannelList component. The errored call is located in the ${componentName} component.`, - ); - return {} as ChannelListContextValue; }