title | description | sourceCodeUrl | packageName | iconUrl | platforms | ||||
---|---|---|---|---|---|---|---|---|---|
Audio |
A library that provides an API to implement audio playback and recording in apps. |
expo-av |
/static/images/packages/expo-av.png |
|
import { APIInstallSection } from '/components/plugins/InstallSection';
import { SnackInline } from '/ui/components/Snippet';
import APISection from '/components/plugins/APISection';
import { PlatformTags } from '/ui/components/Tag';
Audio
from expo-av
allows you to implement audio playback and recording in your app.
Info Audio recording APIs are not available on tvOS (Apple TV).
Note that audio automatically stops if headphones/bluetooth audio devices are disconnected.
Try the playlist example app (source code is on GitHub) to see an example usage of the media playback API, and the recording example app (source code is on GitHub) to see an example usage of the recording API.
<SnackInline label='Playing sounds' dependencies={['expo-av', 'expo-asset']} files={{ 'assets/Hello.mp3': 'https://snack-code-uploads.s3.us-west-1.amazonaws.com/~asset/c9c43b458d6daa9771a7287cae9f5b47' }}
import { useEffect, useState } from 'react';
import { View, StyleSheet, Button } from 'react-native';
import { Audio } from 'expo-av';
export default function App() {
const [sound, setSound] = useState();
async function playSound() {
console.log('Loading Sound');
/* @info */ const { sound } = await Audio.Sound.createAsync(
/* @end */ require('./assets/Hello.mp3')
);
setSound(sound);
console.log('Playing Sound');
await /* @info */ sound.playAsync(); /* @end */
}
useEffect(() => {
return sound
? () => {
console.log('Unloading Sound');
/* @info Always unload the Sound after using it to prevent memory leaks.*/ sound.unloadAsync(); /* @end */
}
: undefined;
}, [sound]);
return (
<View style={styles.container}>
<Button title="Play Sound" onPress={playSound} />
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
backgroundColor: '#ecf0f1',
padding: 10,
},
});
<SnackInline label='Recording sounds' dependencies={['expo-av', 'expo-asset']}>
import { useState } from 'react';
import { View, StyleSheet, Button } from 'react-native';
import { Audio } from 'expo-av';
export default function App() {
const [recording, setRecording] = useState();
const [permissionResponse, requestPermission] = Audio.usePermissions();
async function startRecording() {
try {
/* @info */ if (permissionResponse.status !== 'granted') {
console.log('Requesting permission..');
await requestPermission();
}
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
}); /* @end */
console.log('Starting recording..');
/* @info */ const { recording } = await Audio.Recording.createAsync(
/* @end */ Audio.RecordingOptionsPresets.HIGH_QUALITY
);
setRecording(recording);
console.log('Recording started');
} catch (err) {
console.error('Failed to start recording', err);
}
}
async function stopRecording() {
console.log('Stopping recording..');
setRecording(undefined);
/* @info */ await recording.stopAndUnloadAsync(); /* @end */
/* @info iOS may reroute audio playback to the phone earpiece when recording is allowed, so disable once finished. */ await Audio.setAudioModeAsync(
{
allowsRecordingIOS: false,
}
); /* @end */
/* @info */ const uri = recording.getURI(); /* @end */
console.log('Recording stopped and stored at', uri);
}
return (
<View style={styles.container}>
<Button
title={recording ? 'Stop Recording' : 'Start Recording'}
onPress={recording ? stopRecording : startRecording}
/>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
backgroundColor: '#ecf0f1',
padding: 10,
},
});
On iOS, audio playback and recording in background is only available in standalone apps, and it requires some extra configuration.
On iOS, each background feature requires a special key in UIBackgroundModes
array in your Info.plist file.
In standalone apps this array is empty by default, so to use background features you will need to add appropriate keys to your app.json configuration.
See an example of app.json that enables audio playback in background:
{
"expo": {
...
"ios": {
...
"infoPlist": {
...
"UIBackgroundModes": [
"audio"
]
}
}
}
}
- A MediaRecorder issue on Chrome produces WebM files missing the duration metadata. See the open Chromium issue.
- MediaRecorder encoding options and other configurations are inconsistent across browsers, utilizing a Polyfill such as kbumsik/opus-media-recorder or ai/audio-recorder-polyfill in your application will improve your experience. Any options passed to
prepareToRecordAsync
will be passed directly to the MediaRecorder API and as such the polyfill. - Web browsers require sites to be served securely for them to listen to a mic. See MediaDevices
getUserMedia()
security for more details.
import { Audio } from 'expo-av';
The rest of the API on the Sound.Audio
is the same as the API for Video
component ref
. See the AV documentation for more information.