Skip to content

Commit b6497dc

Browse files
authored
perf: use shallow-ref for web audio api class (#623)
* perf: use shallow-ref for web audio api class * Update packages/stage-ui/src/stores/audio.ts
1 parent c3e37a9 commit b6497dc

File tree

5 files changed

+18
-21
lines changed

5 files changed

+18
-21
lines changed

apps/realtime-audio/src/pages/index.vue

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import type {
1111
1212
import { useLocalStorage, useWebSocket } from '@vueuse/core'
1313
import { streamText } from '@xsai/stream-text'
14-
import { computed, ref, toRaw, watch } from 'vue'
14+
import { computed, ref, shallowRef, toRaw, watch } from 'vue'
1515
1616
import { useQueue } from '../composables/queue'
1717
@@ -21,7 +21,7 @@ const model = useLocalStorage('settings/llm/model', 'openai/gpt-4o-mini')
2121
const sendingMessage = ref('')
2222
const messages = ref<Message[]>([])
2323
const streamingMessage = ref<AssistantMessage>({ role: 'assistant', content: '' })
24-
const audioContext = ref<AudioContext>()
24+
const audioContext = shallowRef<AudioContext>()
2525
2626
const voiceId = useLocalStorage('settings/voiceId', 'lNxY9WuCBCZCISASyJ55')
2727
const voiceApiKey = useLocalStorage('settings/voiceApiKey', '')
@@ -46,16 +46,14 @@ async function handleChatSendMessage() {
4646
messages.value.push({ role: 'user', content: sendingMessage.value })
4747
messages.value.push(streamingMessage.value)
4848
49-
const response = await streamText({
49+
const response = streamText({
5050
baseURL: baseUrl.value,
5151
apiKey: apiKey.value,
5252
model: model.value,
5353
messages: messages.value.slice(0, messages.value.length - 1).map(msg => toRaw(msg)),
5454
})
5555
56-
for await (const chunk of response.chunkStream) {
57-
const text = chunk.choices[0].delta.content || ''
58-
56+
for await (const text of response.textStream) {
5957
if (text !== '') {
6058
sendPayload({
6159
'xi-api-key': voiceApiKey.value,

packages/stage-ui/src/components/gadgets/AudioSpectrum.story.vue

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
<script setup lang="ts">
22
import { FieldRange, Radio } from '@proj-airi/ui'
3-
import { onBeforeUnmount, ref, watch } from 'vue'
3+
import { onBeforeUnmount, ref, shallowRef, watch } from 'vue'
44
55
import AudioSpectrum from './AudioSpectrum.vue'
66
import AudioSpectrumVisualizer from './AudioSpectrumVisualizer.vue'
77
88
// Create a mock oscillator to generate audio for demonstration
9-
const audioContext = ref<AudioContext>()
10-
const oscillator = ref<OscillatorNode>()
11-
const mediaStream = ref<MediaStream>()
9+
const audioContext = shallowRef<AudioContext>()
10+
const oscillator = shallowRef<OscillatorNode>()
11+
const mediaStream = shallowRef<MediaStream>()
1212
const isPlaying = ref(false)
1313
const frequency = ref(440) // A4 note
1414
const waveform = ref<OscillatorType>('sine')
@@ -19,7 +19,7 @@ const waveforms: OscillatorType[] = ['sine', 'square', 'sawtooth', 'triangle']
1919
function createMockAudioStream() {
2020
try {
2121
// Create audio context
22-
audioContext.value = new (window.AudioContext || (window as any).webkitAudioContext)()
22+
audioContext.value = new AudioContext()
2323
2424
// Create oscillator
2525
oscillator.value = audioContext.value.createOscillator()

packages/stage-ui/src/components/scenarios/providers/TranscriptionPlayground.vue

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import type { GenerateTranscriptionResult } from '@xsai/generate-transcription'
33
44
import { FieldRange, FieldSelect } from '@proj-airi/ui'
55
import { until } from '@vueuse/core'
6-
import { computed, onUnmounted, ref, watch } from 'vue'
6+
import { computed, onUnmounted, ref, shallowRef, watch } from 'vue'
77
import { useI18n } from 'vue-i18n'
88
99
import { useAudioAnalyzer } from '../../../composables/audio/audio-analyzer'
@@ -30,7 +30,7 @@ const isSpeaking = ref(false)
3030
3131
const errorMessage = ref<string>('')
3232
33-
const audioContext = ref<AudioContext>()
33+
const audioContext = shallowRef<AudioContext>()
3434
const dataArray = ref<Uint8Array<ArrayBuffer>>()
3535
const animationFrame = ref<number>()
3636

packages/stage-ui/src/composables/queues.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import type { UseQueueReturn } from '../utils/queue'
44
import { sleep } from '@moeru/std'
55
import { invoke } from '@vueuse/core'
66
import { defineStore } from 'pinia'
7-
import { ref } from 'vue'
7+
import { ref, shallowRef } from 'vue'
88

99
import { EMOTION_VALUES } from '../constants/emotions'
1010
import { createQueue } from '../utils/queue'
@@ -116,10 +116,10 @@ export const usePipelineCharacterSpeechPlaybackQueueStore = defineStore('pipelin
116116
onPlaybackFinishedHooks.value.push(hook)
117117
}
118118

119-
const currentAudioSource = ref<AudioBufferSourceNode>()
119+
const currentAudioSource = shallowRef<AudioBufferSourceNode>()
120120

121-
const audioContext = ref<AudioContext>()
122-
const audioAnalyser = ref<AnalyserNode>()
121+
const audioContext = shallowRef<AudioContext>()
122+
const audioAnalyser = shallowRef<AnalyserNode>()
123123

124124
function connectAudioContext(context: AudioContext) {
125125
audioContext.value = context

packages/stage-ui/src/stores/audio.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { useDevicesList, useUserMedia } from '@vueuse/core'
22
import { defineStore } from 'pinia'
3-
import { computed, nextTick, onMounted, onUnmounted, ref, watch } from 'vue'
3+
import { computed, nextTick, onMounted, onUnmounted, ref, shallowRef, watch } from 'vue'
44

55
function calculateVolumeWithLinearNormalize(analyser: AnalyserNode) {
66
const dataBuffer = new Uint8Array(analyser.frequencyBinCount)
@@ -66,13 +66,12 @@ function calculateVolume(analyser: AnalyserNode, mode: 'linear' | 'minmax' = 'li
6666
}
6767

6868
export const useAudioContext = defineStore('audio-context', () => {
69-
const audioContext = ref<AudioContext>(new AudioContext())
69+
const audioContext = shallowRef<AudioContext>(new AudioContext())
7070

7171
onUnmounted(async () => {
7272
// Close audio context
73-
if (audioContext) {
73+
if (audioContext)
7474
await audioContext.value.suspend()
75-
}
7675
})
7776

7877
return {

0 commit comments

Comments
 (0)