Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 33 additions & 25 deletions evi/evi-react-native/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import {
SafeAreaView,
LayoutAnimation,
} from "react-native";
import { useEvent } from 'expo'

// We use Hume's low-level typescript SDK for this example.
// The React SDK (@humeai/voice-react) does not support React Native.
Expand All @@ -22,6 +21,7 @@ import { HumeClient, type Hume } from "hume";
// The provided native module is a good starting place, but you should
// modify it to fit the audio recording needs of your specific app.
import NativeAudio, { AudioEventPayload } from "./modules/audio";
import VoiceIsolationModePrompt from "./VoiceIsolationModePrompt";

// Represents a chat message in the chat display.
interface ChatEntry {
Expand Down Expand Up @@ -55,6 +55,8 @@ const App = () => {
const [isConnected, setIsConnected] = useState(false);
const [isMuted, setIsMuted] = useState(false);
const [chatEntries, setChatEntries] = useState<ChatEntry[]>([]);
const [showVoiceIsolationPrompt, setShowVoiceIsolationPrompt] = useState(false);
const [currentMicMode, setCurrentMicMode] = useState("Standard");
const humeRef = useRef<HumeClient | null>(null);
const addChatEntry = (entry: ChatEntry) => {
setChatEntries((prev) => [...prev, entry]);
Expand Down Expand Up @@ -95,6 +97,14 @@ const App = () => {
return;
}

const micMode = await NativeAudio.getMicrophoneMode();
setCurrentMicMode(micMode);

if (micMode !== "N/A" && micMode !== "Voice Isolation") {
setShowVoiceIsolationPrompt(true);
return
}

const chatSocket = hume.empathicVoice.chat.connect({
configId: process.env.EXPO_PUBLIC_HUME_CONFIG_ID,
});
Expand Down Expand Up @@ -142,50 +152,42 @@ const App = () => {
};

const handleDisconnect = async () => {
if (chatSocketRef.current) {
chatSocketRef.current.close();
chatSocketRef.current = null;
}
try {
await NativeAudio.stopRecording();
await NativeAudio.stopPlayback();
} catch (error) {
console.error("Error while stopping recording", error);
}
if (chatSocketRef.current) {
chatSocketRef.current.close();
}

await NativeAudio.stopPlayback();
};

useEffect(() => {
if (isConnected) {
handleConnect().catch((error) => {
console.error("Error while connecting:", error);
});
handleConnect()
} else {
handleDisconnect().catch((error) => {
console.error("Error while disconnecting:", error);
});
handleDisconnect()
}
const onUnmount = () => {
NativeAudio.stopRecording().catch((error: any) => {
console.error("Error while stopping recording", error);
});
if (
chatSocketRef.current &&
chatSocketRef.current.readyState === WebSocket.OPEN
) {
chatSocketRef.current?.close();
if (chatSocketRef.current) {
chatSocketRef.current.close();
chatSocketRef.current = null;
}

NativeAudio.stopRecording();
NativeAudio.stopPlayback();
};
return onUnmount;
}, [isConnected]);

useEffect(() => {
if (isMuted) {
NativeAudio.mute().catch((error) => {
console.error("Error while muting", error);
});
NativeAudio.mute();
} else {
NativeAudio.unmute().catch((error) => {
console.error("Error while unmuting", error);
});
NativeAudio.unmute();
}
}, [isMuted]);

Expand Down Expand Up @@ -290,6 +292,12 @@ const App = () => {
/>
</View>
</SafeAreaView>

<VoiceIsolationModePrompt
isVisible={showVoiceIsolationPrompt}
currentMode={currentMicMode}
onDismiss={() => setShowVoiceIsolationPrompt(false)}
/>
</View>
);
};
Expand Down
73 changes: 73 additions & 0 deletions evi/evi-react-native/VoiceIsolationModePrompt.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import React from 'react';
import {
View,
Text,
Button,
Linking,
Platform,
Modal,
} from 'react-native';
import NativeAudio from './modules/audio';

interface VoiceIsolationModePromptProps {
isVisible: boolean;
currentMode: string;
onDismiss: () => void;
}

const VoiceIsolationModePrompt: React.FC<VoiceIsolationModePromptProps> = ({
isVisible,
currentMode,
onDismiss,
}) => {
const handleOpenSettings = async () => {
if (Platform.OS === 'ios') {
try {
await NativeAudio.showMicrophoneModes();
} catch (error) {
// Fallback to general settings if the API is not available
Linking.openSettings();
}
} else {
Linking.openSettings();
}
onDismiss();
};

const handleShowMeHow = () => {
const supportUrl = 'https://support.apple.com/en-us/101993';
Linking.openURL(supportUrl);
};

return (
<Modal
visible={isVisible}
transparent={true}
animationType="slide"
onRequestClose={onDismiss}
>
<View style={{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'rgba(0, 0, 0, 0.5)'
}}>
<View style={{ backgroundColor: 'white', padding: 20, borderRadius: 10, width: '90%' }}>
<Text>Enable voice isolation for the best experience</Text>

<Text>
Your device is currently using a {currentMode} microphone mode.
Enabling voice isolation will provide the best audio experience
in a noisy setting.
</Text>

<Button title="Open settings" onPress={handleOpenSettings} />
<Button title="Show me how" onPress={handleShowMeHow} />
<Button title="I'll do this later" onPress={onDismiss} />
</View>
</View>
</Modal>
);
};

export default VoiceIsolationModePrompt;
41 changes: 41 additions & 0 deletions evi/evi-react-native/modules/audio/ios/AudioModule.swift
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,47 @@ public class AudioModule: Module {
AsyncFunction("stopPlayback") {
await _soundPlayer?.clearQueue()
}

AsyncFunction("showMicrophoneModes") {
if #available(iOS 15.0, *) {
let wasRecording = await self.audioHub.isRecording

if !wasRecording {
try await self.prepare()
try await self.audioHub.startMicrophone(handler: { _, _ in })
}

AVCaptureDevice.showSystemUserInterface(.microphoneModes)

if !wasRecording {
await self.audioHub.stopMicrophone()
}
} else {
throw NSError(
domain: "AudioModule", code: 3,
userInfo: [NSLocalizedDescriptionKey: "Microphone modes are only available on iOS 15+"])
}
}

AsyncFunction("getMicrophoneMode") { () -> String in
if #available(iOS 15.0, *) {
let mode = AVCaptureDevice.preferredMicrophoneMode
switch mode {
case .standard:
return "Standard"
case .voiceIsolation:
return "Voice Isolation"
case .wideSpectrum:
return "Wide Spectrum"
default:
throw NSError(
domain: "AudioModule", code: 4,
userInfo: [NSLocalizedDescriptionKey: "Unknown microphone mode encountered"])
}
} else {
return "N/A"
}
}
}

private func getPermissions() async throws -> Bool {
Expand Down
4 changes: 3 additions & 1 deletion evi/evi-react-native/modules/audio/src/AudioModule.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NativeModule, requireNativeModule } from 'expo';

import { AudioModuleEvents } from './AudioModule.types';
import { AudioModuleEvents, MicrophoneMode } from './AudioModule.types';

declare class AudioModule extends NativeModule<AudioModuleEvents> {
getPermissions(): Promise<boolean>;
Expand All @@ -9,6 +9,8 @@ declare class AudioModule extends NativeModule<AudioModuleEvents> {
stopPlayback(): Promise<void>;
mute(): Promise<void>;
unmute(): Promise<void>;
showMicrophoneModes(): Promise<void>;
getMicrophoneMode(): Promise<MicrophoneMode>;
}

// This call loads the native module object from the JSI.
Expand Down
4 changes: 3 additions & 1 deletion evi/evi-react-native/modules/audio/src/AudioModule.types.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
export type MicrophoneMode = "N/A" | "Standard" | "Voice Isolation" | "Wide Spectrum";

export type AudioModuleEvents = {
onAudioInput: (params: AudioEventPayload) => void;
onError: (params: { error: string }) => void;
onError: (params: { message: string }) => void;
};

export type AudioEventPayload = {
Expand Down
11 changes: 10 additions & 1 deletion evi/evi-react-native/modules/audio/src/AudioModule.web.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { EventEmitter } from 'expo-modules-core';
import { convertBlobToBase64, getAudioStream, ensureSingleValidAudioTrack, getBrowserSupportedMimeType, MimeType } from 'hume';
import { EVIWebAudioPlayer } from "hume";
import { AudioModuleEvents } from './AudioModule.types';
import { AudioModuleEvents, MicrophoneMode } from './AudioModule.types';

const emitter = new EventEmitter<AudioModuleEvents>();

Expand Down Expand Up @@ -84,5 +84,14 @@ export default {
async addListener(eventName: keyof AudioModuleEvents, f: AudioModuleEvents[typeof eventName]): Promise<void> {
emitter.addListener(eventName, f);
return
},

async showMicrophoneModes(): Promise<void> {
console.log('Microphone modes are only available on iOS');
return;
},

async getMicrophoneMode(): Promise<MicrophoneMode> {
return 'N/A';
}
};
Loading