-
-
Notifications
You must be signed in to change notification settings - Fork 49
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
1a37f4f
commit 51d4129
Showing
18 changed files
with
367 additions
and
127 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
import { ActionIcon, ActionIconProps, Tag } from '@lobehub/ui'; | ||
import { Slider } from 'antd'; | ||
import { Pause, Play, StopCircle } from 'lucide-react'; | ||
import React, { memo, useMemo } from 'react'; | ||
import { Flexbox } from 'react-layout-kit'; | ||
|
||
import { AudioProps } from '@/hooks/useStreamAudioPlayer'; | ||
import { secondsToMinutesAndSeconds } from '@/utils/secondsToMinutesAndSeconds'; | ||
|
||
export interface StreamAudioPlayerProps { | ||
allowPause?: boolean; | ||
audio: AudioProps; | ||
buttonSize?: ActionIconProps['size']; | ||
className?: string; | ||
showSlider?: boolean; | ||
showTime?: boolean; | ||
style?: React.CSSProperties; | ||
timeRender?: 'tag' | 'text'; | ||
timeStyle?: React.CSSProperties; | ||
timeType?: 'left' | 'current' | 'combine'; | ||
} | ||
|
||
const StreamAudioPlayer = memo<StreamAudioPlayerProps>( | ||
({ | ||
style, | ||
timeStyle, | ||
buttonSize, | ||
className, | ||
audio, | ||
allowPause, | ||
timeType = 'left', | ||
showTime = true, | ||
showSlider = true, | ||
timeRender = 'text', | ||
}) => { | ||
const { isPlaying, play, stop, pause, duration, setTime, currentTime } = audio; | ||
|
||
const formatedLeftTime = secondsToMinutesAndSeconds(duration - currentTime); | ||
const formatedCurrentTime = secondsToMinutesAndSeconds(currentTime); | ||
const formatedDuration = secondsToMinutesAndSeconds(duration); | ||
|
||
const Time = useMemo( | ||
() => (timeRender === 'tag' ? Tag : (props: any) => <time {...props} />), | ||
[timeRender], | ||
); | ||
|
||
return ( | ||
<Flexbox | ||
align={'center'} | ||
className={className} | ||
gap={8} | ||
horizontal | ||
style={{ paddingRight: 8, width: '100%', ...style }} | ||
> | ||
{allowPause ? ( | ||
<ActionIcon | ||
icon={isPlaying ? Pause : Play} | ||
onClick={isPlaying ? pause : play} | ||
size={buttonSize} | ||
style={{ flex: 'none' }} | ||
/> | ||
) : ( | ||
<ActionIcon | ||
icon={isPlaying ? StopCircle : Play} | ||
onClick={isPlaying ? stop : play} | ||
size={buttonSize} | ||
style={{ flex: 'none' }} | ||
/> | ||
)} | ||
{showSlider && ( | ||
<Slider | ||
max={duration} | ||
min={0} | ||
onChange={(e) => setTime(e)} | ||
style={{ flex: 1 }} | ||
tooltip={{ formatter: secondsToMinutesAndSeconds as any }} | ||
value={currentTime} | ||
/> | ||
)} | ||
{showTime && ( | ||
<Time style={{ flex: 'none', ...timeStyle }}> | ||
{timeType === 'left' && formatedLeftTime} | ||
{timeType === 'current' && formatedCurrentTime} | ||
{timeType === 'combine' && ( | ||
<span> | ||
{formatedCurrentTime} | ||
<span style={{ opacity: 0.66 }}>{` / ${formatedDuration}`}</span> | ||
</span> | ||
)} | ||
</Time> | ||
)} | ||
</Flexbox> | ||
); | ||
}, | ||
); | ||
|
||
export default StreamAudioPlayer; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,156 @@ | ||
import { useCallback, useEffect, useRef, useState } from 'react'; | ||
|
||
type BufferQueueItem = { | ||
audioBuffer: AudioBuffer; | ||
endOffset: number; | ||
startOffset: number; | ||
}; | ||
|
||
export interface AudioProps { | ||
currentTime: number; | ||
duration: number; | ||
isPlaying: boolean; | ||
pause: () => void; | ||
play: () => void; | ||
setTime: (time: number) => void; | ||
stop: () => void; | ||
} | ||
export interface StreamAudioPlayerHook extends AudioProps { | ||
load: (audioBuffer: AudioBuffer) => void; | ||
reset: () => void; | ||
} | ||
|
||
export const useStreamAudioPlayer = (): StreamAudioPlayerHook => { | ||
const audioContextRef = useRef<AudioContext | null>(null); | ||
const [bufferQueue, setBufferQueue] = useState<BufferQueueItem[]>([]); | ||
const [duration, setDuration] = useState(0); | ||
const [isPlaying, setIsPlaying] = useState(false); | ||
const [currentTime, setCurrentTime] = useState(0); | ||
const sourceNodeRef = useRef<AudioBufferSourceNode | null>(null); | ||
const startTimeRef = useRef(0); | ||
const startOffsetRef = useRef(0); | ||
|
||
const initAudioContext = () => { | ||
if (!audioContextRef.current) { | ||
audioContextRef.current = new AudioContext(); | ||
} | ||
}; | ||
|
||
const addAudioBuffer = useCallback( | ||
(audioBuffer: AudioBuffer) => { | ||
initAudioContext(); | ||
const context = audioContextRef.current; | ||
if (context) { | ||
const newItem: BufferQueueItem = { | ||
audioBuffer, | ||
endOffset: duration + audioBuffer.duration, | ||
startOffset: duration, | ||
}; | ||
setBufferQueue((prevQueue) => [...prevQueue, newItem]); | ||
setDuration(newItem.endOffset); | ||
} | ||
}, | ||
[duration], | ||
); | ||
|
||
const playAudio = useCallback(() => { | ||
if (!audioContextRef.current || isPlaying) return; | ||
|
||
const context = audioContextRef.current; | ||
const sourceNode = context.createBufferSource(); | ||
sourceNodeRef.current = sourceNode; | ||
|
||
const nextBufferItem = bufferQueue.find((item) => item.startOffset >= currentTime); | ||
if (nextBufferItem) { | ||
sourceNode.buffer = nextBufferItem.audioBuffer; | ||
const playOffset = currentTime - nextBufferItem.startOffset; | ||
sourceNode.connect(context.destination); | ||
sourceNode.start(0, playOffset); | ||
startTimeRef.current = context.currentTime - playOffset; | ||
startOffsetRef.current = playOffset; | ||
|
||
setIsPlaying(true); | ||
|
||
sourceNode.addEventListener('ended', () => { | ||
// 检查是否是队列中的最后一段音频 | ||
setIsPlaying(false); | ||
if (nextBufferItem === bufferQueue.at(-1)) { | ||
setCurrentTime(0); // 回到开头 | ||
} else { | ||
setCurrentTime(nextBufferItem.endOffset); | ||
} | ||
sourceNodeRef.current = null; | ||
}); | ||
} | ||
}, [bufferQueue, currentTime, isPlaying]); | ||
|
||
const pauseAudio = useCallback(() => { | ||
if (!audioContextRef.current || !isPlaying) return; | ||
|
||
sourceNodeRef.current?.stop(); | ||
setIsPlaying(false); | ||
}, [isPlaying]); | ||
|
||
const seekAudio = useCallback( | ||
(time: number) => { | ||
if (time < 0 || time > duration) return; | ||
|
||
const wasPlaying = isPlaying; | ||
pauseAudio(); | ||
setCurrentTime(time); | ||
|
||
if (wasPlaying) { | ||
playAudio(); | ||
} | ||
}, | ||
[duration, isPlaying, pauseAudio, playAudio], | ||
); | ||
|
||
// Update currentTime while playing | ||
useEffect(() => { | ||
let intervalId: any; | ||
|
||
if (isPlaying) { | ||
intervalId = setInterval(() => { | ||
if (!audioContextRef.current) return; | ||
const elapsed = audioContextRef.current.currentTime - startTimeRef.current; | ||
setCurrentTime(startOffsetRef.current + elapsed); | ||
}, 100); | ||
} | ||
|
||
return () => { | ||
clearInterval(intervalId); | ||
}; | ||
}, [isPlaying]); | ||
|
||
// Clean up the audio context on unmount | ||
useEffect(() => { | ||
return () => { | ||
audioContextRef.current?.close(); | ||
}; | ||
}, []); | ||
|
||
const stopAudio = useCallback(() => { | ||
pauseAudio(); | ||
seekAudio(0); | ||
}, [pauseAudio, seekAudio]); | ||
|
||
const resetAudio = useCallback(() => { | ||
pauseAudio(); | ||
seekAudio(0); | ||
setBufferQueue([]); | ||
setDuration(0); | ||
}, [pauseAudio, seekAudio]); | ||
|
||
return { | ||
currentTime, | ||
duration, | ||
isPlaying, | ||
load: addAudioBuffer, | ||
pause: pauseAudio, | ||
play: playAudio, | ||
reset: resetAudio, | ||
setTime: seekAudio, | ||
stop: stopAudio, | ||
}; | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.