Permalink
Switch branches/tags
Nothing to show
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
8274 lines (7164 sloc) 259 KB
/************************************************************************/
/*! \class RtAudio
\brief Realtime audio i/o C++ classes.
RtAudio provides a common API (Application Programming Interface)
for realtime audio input/output across Linux (native ALSA, Jack,
and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
(DirectSound and ASIO) operating systems.
RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
RtAudio: a realtime audio i/o C++ class
Copyright (c) 2001-2004 Gary P. Scavone
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
Any person wishing to distribute modifications to the Software is
requested to send the modifications to the original developer so that
they can be incorporated into the canonical version.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/************************************************************************/
// RtAudio: Version 3.0.1, 22 March 2004
#include "rtaudio.h"
#include <stdio.h>
// Static variable definitions.
const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
const unsigned int RtApi::SAMPLE_RATES[] = {
4000, 5512, 8000, 9600, 11025, 16000, 22050,
32000, 44100, 48000, 88200, 96000, 176400, 192000
};
#if ( defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) ) && !defined(__WINDOWS_PTHREAD__)
#define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
#define MUTEX_DESTROY(A) DeleteCriticalSection(A);
#define MUTEX_LOCK(A) EnterCriticalSection(A)
#define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
#else // pthread API
#define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
#define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
#define MUTEX_LOCK(A) pthread_mutex_lock(A)
#define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
#endif
// *************************************************** //
//
// Public common (OS-independent) methods.
//
// *************************************************** //
RtAudio :: RtAudio( RtAudioApi api )
{
initialize( api );
}
RtAudio :: RtAudio( int outputDevice, int outputChannels,
int inputDevice, int inputChannels,
RtAudioFormat format, int sampleRate,
int *bufferSize, int numberOfBuffers, RtAudioApi api )
{
initialize( api );
try {
rtapi_->openStream( outputDevice, outputChannels,
inputDevice, inputChannels,
format, sampleRate,
bufferSize, numberOfBuffers );
}
catch (RtError &exception) {
// Deallocate the RtApi instance.
delete rtapi_;
throw exception;
}
}
RtAudio :: ~RtAudio()
{
delete rtapi_;
}
void RtAudio :: openStream( int outputDevice, int outputChannels,
int inputDevice, int inputChannels,
RtAudioFormat format, int sampleRate,
int *bufferSize, int numberOfBuffers )
{
rtapi_->openStream( outputDevice, outputChannels, inputDevice,
inputChannels, format, sampleRate,
bufferSize, numberOfBuffers );
}
void RtAudio::initialize( RtAudioApi api )
{
rtapi_ = 0;
// First look for a compiled match to a specified API value. If one
// of these constructors throws an error, it will be passed up the
// inheritance chain.
#if defined(__LINUX_JACK__)
if ( api == LINUX_JACK )
rtapi_ = new RtApiJack();
#endif
#if defined(__LINUX_ALSA__)
if ( api == LINUX_ALSA )
rtapi_ = new RtApiAlsa();
#endif
#if defined(__LINUX_OSS__)
if ( api == LINUX_OSS )
rtapi_ = new RtApiOss();
#endif
#if defined(__WINDOWS_ASIO__)
if ( api == WINDOWS_ASIO )
rtapi_ = new RtApiAsio();
#endif
#if defined(__WINDOWS_DS__)
if ( api == WINDOWS_DS )
rtapi_ = new RtApiDs();
#endif
#if defined(__IRIX_AL__)
if ( api == IRIX_AL )
rtapi_ = new RtApiAl();
#endif
#if defined(__MACOSX_CORE__)
if ( api == MACOSX_CORE )
rtapi_ = new RtApiCore();
#endif
if ( rtapi_ ) return;
if ( api > 0 ) {
// No compiled support for specified API value.
throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER );
}
// No specified API ... search for "best" option.
try {
#if defined(__LINUX_JACK__)
rtapi_ = new RtApiJack();
#elif defined(__WINDOWS_ASIO__)
rtapi_ = new RtApiAsio();
#elif defined(__IRIX_AL__)
rtapi_ = new RtApiAl();
#elif defined(__MACOSX_CORE__)
rtapi_ = new RtApiCore();
#else
;
#endif
}
catch (RtError &) {
#if defined(__RTAUDIO_DEBUG__)
fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
#endif
rtapi_ = 0;
}
if ( rtapi_ ) return;
// Try second API support
if ( rtapi_ == 0 ) {
try {
#if defined(__LINUX_ALSA__)
rtapi_ = new RtApiAlsa();
#elif defined(__WINDOWS_DS__)
rtapi_ = new RtApiDs();
#else
;
#endif
}
catch (RtError &) {
#if defined(__RTAUDIO_DEBUG__)
fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
#endif
rtapi_ = 0;
}
}
if ( rtapi_ ) return;
// Try third API support
if ( rtapi_ == 0 ) {
#if defined(__LINUX_OSS__)
try {
rtapi_ = new RtApiOss();
}
catch (RtError &error) {
rtapi_ = 0;
}
#else
;
#endif
}
if ( rtapi_ == 0 ) {
// No devices found.
throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND );
}
}
RtApi :: RtApi()
{
stream_.mode = UNINITIALIZED;
stream_.apiHandle = 0;
MUTEX_INITIALIZE(&stream_.mutex);
}
RtApi :: ~RtApi()
{
MUTEX_DESTROY(&stream_.mutex);
}
void RtApi :: openStream( int outputDevice, int outputChannels,
int inputDevice, int inputChannels,
RtAudioFormat format, int sampleRate,
int *bufferSize, int numberOfBuffers )
{
if ( stream_.mode != UNINITIALIZED ) {
sprintf(message_, "RtApi: only one open stream allowed per class instance.");
error(RtError::INVALID_STREAM);
}
if (outputChannels < 1 && inputChannels < 1) {
sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero.");
error(RtError::INVALID_PARAMETER);
}
if ( formatBytes(format) == 0 ) {
sprintf(message_,"RtApi: 'format' parameter value is undefined.");
error(RtError::INVALID_PARAMETER);
}
if ( outputChannels > 0 ) {
if (outputDevice > nDevices_ || outputDevice < 0) {
sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice);
error(RtError::INVALID_PARAMETER);
}
}
if ( inputChannels > 0 ) {
if (inputDevice > nDevices_ || inputDevice < 0) {
sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice);
error(RtError::INVALID_PARAMETER);
}
}
clearStreamInfo();
bool result = FAILURE;
int device, defaultDevice = 0;
StreamMode mode;
int channels;
if ( outputChannels > 0 ) {
mode = OUTPUT;
channels = outputChannels;
if ( outputDevice == 0 ) { // Try default device first.
defaultDevice = getDefaultOutputDevice();
device = defaultDevice;
}
else
device = outputDevice - 1;
for ( int i=-1; i<nDevices_; i++ ) {
if ( i >= 0 ) {
if ( i == defaultDevice ) continue;
device = i;
}
if (devices_[device].probed == false) {
// If the device wasn't successfully probed before, try it
// (again) now.
clearDeviceInfo(&devices_[device]);
probeDeviceInfo(&devices_[device]);
}
if ( devices_[device].probed )
result = probeDeviceOpen(device, mode, channels, sampleRate,
format, bufferSize, numberOfBuffers);
if ( result == SUCCESS ) break;
if ( outputDevice > 0 ) break;
clearStreamInfo();
}
}
if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) {
mode = INPUT;
channels = inputChannels;
if ( inputDevice == 0 ) { // Try default device first.
defaultDevice = getDefaultInputDevice();
device = defaultDevice;
}
else
device = inputDevice - 1;
for (int i=-1; i<nDevices_; i++) {
if (i >= 0 ) {
if ( i == defaultDevice ) continue;
device = i;
}
if (devices_[device].probed == false) {
// If the device wasn't successfully probed before, try it
// (again) now.
clearDeviceInfo(&devices_[device]);
probeDeviceInfo(&devices_[device]);
}
if ( devices_[device].probed )
result = probeDeviceOpen(device, mode, channels, sampleRate,
format, bufferSize, numberOfBuffers);
if (result == SUCCESS) break;
if ( outputDevice > 0 ) break;
}
}
if ( result == SUCCESS )
return;
// If we get here, all attempted probes failed. Close any opened
// devices and clear the stream structure.
if ( stream_.mode != UNINITIALIZED ) closeStream();
clearStreamInfo();
if ( ( outputDevice == 0 && outputChannels > 0 )
|| ( inputDevice == 0 && inputChannels > 0 ) )
sprintf(message_,"RtApi: no devices found for given stream parameters.");
else
sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters.");
error(RtError::INVALID_PARAMETER);
return;
}
int RtApi :: getDeviceCount(void)
{
return devices_.size();
}
RtAudioDeviceInfo RtApi :: getDeviceInfo( int device )
{
if (device > (int) devices_.size() || device < 1) {
sprintf(message_, "RtApi: invalid device specifier (%d)!", device);
error(RtError::INVALID_DEVICE);
}
RtAudioDeviceInfo info;
int deviceIndex = device - 1;
// If the device wasn't successfully probed before, try it now (or again).
if (devices_[deviceIndex].probed == false) {
clearDeviceInfo(&devices_[deviceIndex]);
probeDeviceInfo(&devices_[deviceIndex]);
}
info.name.append( devices_[deviceIndex].name );
info.probed = devices_[deviceIndex].probed;
if ( info.probed == true ) {
info.outputChannels = devices_[deviceIndex].maxOutputChannels;
info.inputChannels = devices_[deviceIndex].maxInputChannels;
info.duplexChannels = devices_[deviceIndex].maxDuplexChannels;
for (unsigned int i=0; i<devices_[deviceIndex].sampleRates.size(); i++)
info.sampleRates.push_back( devices_[deviceIndex].sampleRates[i] );
info.nativeFormats = devices_[deviceIndex].nativeFormats;
if ( (deviceIndex == getDefaultOutputDevice()) ||
(deviceIndex == getDefaultInputDevice()) )
info.isDefault = true;
}
return info;
}
char * const RtApi :: getStreamBuffer(void)
{
verifyStream();
return stream_.userBuffer;
}
int RtApi :: getDefaultInputDevice(void)
{
// Should be implemented in subclasses if appropriate.
return 0;
}
int RtApi :: getDefaultOutputDevice(void)
{
// Should be implemented in subclasses if appropriate.
return 0;
}
void RtApi :: closeStream(void)
{
// MUST be implemented in subclasses!
}
void RtApi :: probeDeviceInfo( RtApiDevice *info )
{
// MUST be implemented in subclasses!
}
bool RtApi :: probeDeviceOpen( int device, StreamMode mode, int channels,
int sampleRate, RtAudioFormat format,
int *bufferSize, int numberOfBuffers )
{
// MUST be implemented in subclasses!
return FAILURE;
}
// *************************************************** //
//
// OS/API-specific methods.
//
// *************************************************** //
#if defined(__LINUX_OSS__)
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/soundcard.h>
#include <errno.h>
#include <math.h>
#define DAC_NAME "/dev/dsp"
#define MAX_DEVICES 16
#define MAX_CHANNELS 16
extern "C" void *ossCallbackHandler(void * ptr);
RtApiOss :: RtApiOss()
{
this->initialize();
if (nDevices_ <= 0) {
sprintf(message_, "RtApiOss: no Linux OSS audio devices found!");
error(RtError::NO_DEVICES_FOUND);
}
}
RtApiOss :: ~RtApiOss()
{
if ( stream_.mode != UNINITIALIZED )
closeStream();
}
void RtApiOss :: initialize(void)
{
// Count cards and devices
nDevices_ = 0;
// We check /dev/dsp before probing devices. /dev/dsp is supposed to
// be a link to the "default" audio device, of the form /dev/dsp0,
// /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
// real device, so we need to check for that. Also, sometimes the
// link is to /dev/dspx and other times just dspx. I'm not sure how
// the latter works, but it does.
char device_name[16];
struct stat dspstat;
int dsplink = -1;
int i = 0;
if (lstat(DAC_NAME, &dspstat) == 0) {
if (S_ISLNK(dspstat.st_mode)) {
i = readlink(DAC_NAME, device_name, sizeof(device_name));
if (i > 0) {
device_name[i] = '\0';
if (i > 8) { // check for "/dev/dspx"
if (!strncmp(DAC_NAME, device_name, 8))
dsplink = atoi(&device_name[8]);
}
else if (i > 3) { // check for "dspx"
if (!strncmp("dsp", device_name, 3))
dsplink = atoi(&device_name[3]);
}
}
else {
sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME);
error(RtError::SYSTEM_ERROR);
}
}
}
else {
sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME);
error(RtError::SYSTEM_ERROR);
}
// The OSS API doesn't provide a routine for determining the number
// of devices. Thus, we'll just pursue a brute force method. The
// idea is to start with /dev/dsp(0) and continue with higher device
// numbers until we reach MAX_DSP_DEVICES. This should tell us how
// many devices we have ... it is not a fullproof scheme, but hopefully
// it will work most of the time.
int fd = 0;
RtApiDevice device;
for (i=-1; i<MAX_DEVICES; i++) {
// Probe /dev/dsp first, since it is supposed to be the default device.
if (i == -1)
sprintf(device_name, "%s", DAC_NAME);
else if (i == dsplink)
continue; // We've aready probed this device via /dev/dsp link ... try next device.
else
sprintf(device_name, "%s%d", DAC_NAME, i);
// First try to open the device for playback, then record mode.
fd = open(device_name, O_WRONLY | O_NONBLOCK);
if (fd == -1) {
// Open device for playback failed ... either busy or doesn't exist.
if (errno != EBUSY && errno != EAGAIN) {
// Try to open for capture
fd = open(device_name, O_RDONLY | O_NONBLOCK);
if (fd == -1) {
// Open device for record failed.
if (errno != EBUSY && errno != EAGAIN)
continue;
else {
sprintf(message_, "RtApiOss: OSS record device (%s) is busy.", device_name);
error(RtError::WARNING);
// still count it for now
}
}
}
else {
sprintf(message_, "RtApiOss: OSS playback device (%s) is busy.", device_name);
error(RtError::WARNING);
// still count it for now
}
}
if (fd >= 0) close(fd);
device.name.erase();
device.name.append( (const char *)device_name, strlen(device_name)+1);
devices_.push_back(device);
nDevices_++;
}
}
void RtApiOss :: probeDeviceInfo(RtApiDevice *info)
{
int i, fd, channels, mask;
// The OSS API doesn't provide a means for probing the capabilities
// of devices. Thus, we'll just pursue a brute force method.
// First try for playback
fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
if (fd == -1) {
// Open device failed ... either busy or doesn't exist
if (errno == EBUSY || errno == EAGAIN)
sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
info->name.c_str());
else
sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str());
error(RtError::DEBUG_WARNING);
goto capture_probe;
}
// We have an open device ... see how many channels it can handle
for (i=MAX_CHANNELS; i>0; i--) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) {
// This would normally indicate some sort of hardware error, but under ALSA's
// OSS emulation, it sometimes indicates an invalid channel value. Further,
// the returned channel value is not changed. So, we'll ignore the possible
// hardware error.
continue; // try next channel number
}
// Check to see whether the device supports the requested number of channels
if (channels != i ) continue; // try next channel number
// If here, we found the largest working channel value
break;
}
info->maxOutputChannels = i;
// Now find the minimum number of channels it can handle
for (i=1; i<=info->maxOutputChannels; i++) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
continue; // try next channel number
// If here, we found the smallest working channel value
break;
}
info->minOutputChannels = i;
close(fd);
capture_probe:
// Now try for capture
fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
if (fd == -1) {
// Open device for capture failed ... either busy or doesn't exist
if (errno == EBUSY || errno == EAGAIN)
sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
info->name.c_str());
else
sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str());
error(RtError::DEBUG_WARNING);
if (info->maxOutputChannels == 0)
// didn't open for playback either ... device invalid
return;
goto probe_parameters;
}
// We have the device open for capture ... see how many channels it can handle
for (i=MAX_CHANNELS; i>0; i--) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
continue; // as above
}
// If here, we found a working channel value
break;
}
info->maxInputChannels = i;
// Now find the minimum number of channels it can handle
for (i=1; i<=info->maxInputChannels; i++) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
continue; // try next channel number
// If here, we found the smallest working channel value
break;
}
info->minInputChannels = i;
close(fd);
if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// If device opens for both playback and capture, we determine the channels.
if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
goto probe_parameters;
fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK);
if (fd == -1)
goto probe_parameters;
ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0);
ioctl(fd, SNDCTL_DSP_GETCAPS, &mask);
if (mask & DSP_CAP_DUPLEX) {
info->hasDuplexSupport = true;
// We have the device open for duplex ... see how many channels it can handle
for (i=MAX_CHANNELS; i>0; i--) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
continue; // as above
// If here, we found a working channel value
break;
}
info->maxDuplexChannels = i;
// Now find the minimum number of channels it can handle
for (i=1; i<=info->maxDuplexChannels; i++) {
channels = i;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
continue; // try next channel number
// If here, we found the smallest working channel value
break;
}
info->minDuplexChannels = i;
}
close(fd);
probe_parameters:
// At this point, we need to figure out the supported data formats
// and sample rates. We'll proceed by openning the device in the
// direction with the maximum number of channels, or playback if
// they are equal. This might limit our sample rate options, but so
// be it.
if (info->maxOutputChannels >= info->maxInputChannels) {
fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
channels = info->maxOutputChannels;
}
else {
fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
channels = info->maxInputChannels;
}
if (fd == -1) {
// We've got some sort of conflict ... abort
sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// We have an open device ... set to maximum channels.
i = channels;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
// We've got some sort of conflict ... abort
close(fd);
sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
close(fd);
sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// Probe the supported data formats ... we don't care about endian-ness just yet.
int format;
info->nativeFormats = 0;
#if defined (AFMT_S32_BE)
// This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
if (mask & AFMT_S32_BE) {
format = AFMT_S32_BE;
info->nativeFormats |= RTAUDIO_SINT32;
}
#endif
#if defined (AFMT_S32_LE)
/* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
if (mask & AFMT_S32_LE) {
format = AFMT_S32_LE;
info->nativeFormats |= RTAUDIO_SINT32;
}
#endif
if (mask & AFMT_S8) {
format = AFMT_S8;
info->nativeFormats |= RTAUDIO_SINT8;
}
if (mask & AFMT_S16_BE) {
format = AFMT_S16_BE;
info->nativeFormats |= RTAUDIO_SINT16;
}
if (mask & AFMT_S16_LE) {
format = AFMT_S16_LE;
info->nativeFormats |= RTAUDIO_SINT16;
}
// Check that we have at least one supported format
if (info->nativeFormats == 0) {
close(fd);
sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// Set the format
i = format;
if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {
close(fd);
sprintf(message_, "RtApiOss: device (%s) error setting data format.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// Probe the supported sample rates.
info->sampleRates.clear();
for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
int speed = SAMPLE_RATES[k];
if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])
info->sampleRates.push_back(speed);
}
if (info->sampleRates.size() == 0) {
close(fd);
sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// That's all ... close the device and return
close(fd);
info->probed = true;
return;
}
bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,
int sampleRate, RtAudioFormat format,
int *bufferSize, int numberOfBuffers)
{
int buffers, buffer_bytes, device_channels, device_format;
int srate, temp, fd;
int *handle = (int *) stream_.apiHandle;
const char *name = devices_[device].name.c_str();
if (mode == OUTPUT)
fd = open(name, O_WRONLY | O_NONBLOCK);
else { // mode == INPUT
if (stream_.mode == OUTPUT && stream_.device[0] == device) {
// We just set the same device for playback ... close and reopen for duplex (OSS only).
close(handle[0]);
handle[0] = 0;
// First check that the number previously set channels is the same.
if (stream_.nUserChannels[0] != channels) {
sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);
goto error;
}
fd = open(name, O_RDWR | O_NONBLOCK);
}
else
fd = open(name, O_RDONLY | O_NONBLOCK);
}
if (fd == -1) {
if (errno == EBUSY || errno == EAGAIN)
sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",
name);
else
sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
goto error;
}
// Now reopen in blocking mode.
close(fd);
if (mode == OUTPUT)
fd = open(name, O_WRONLY | O_SYNC);
else { // mode == INPUT
if (stream_.mode == OUTPUT && stream_.device[0] == device)
fd = open(name, O_RDWR | O_SYNC);
else
fd = open(name, O_RDONLY | O_SYNC);
}
if (fd == -1) {
sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
goto error;
}
// Get the sample format mask
int mask;
if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
close(fd);
sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
name);
goto error;
}
// Determine how to set the device format.
stream_.userFormat = format;
device_format = -1;
stream_.doByteSwap[mode] = false;
if (format == RTAUDIO_SINT8) {
if (mask & AFMT_S8) {
device_format = AFMT_S8;
stream_.deviceFormat[mode] = RTAUDIO_SINT8;
}
}
else if (format == RTAUDIO_SINT16) {
if (mask & AFMT_S16_NE) {
device_format = AFMT_S16_NE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
}
#if BYTE_ORDER == LITTLE_ENDIAN
else if (mask & AFMT_S16_BE) {
device_format = AFMT_S16_BE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
stream_.doByteSwap[mode] = true;
}
#else
else if (mask & AFMT_S16_LE) {
device_format = AFMT_S16_LE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
stream_.doByteSwap[mode] = true;
}
#endif
}
#if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
else if (format == RTAUDIO_SINT32) {
if (mask & AFMT_S32_NE) {
device_format = AFMT_S32_NE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
}
#if BYTE_ORDER == LITTLE_ENDIAN
else if (mask & AFMT_S32_BE) {
device_format = AFMT_S32_BE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
stream_.doByteSwap[mode] = true;
}
#else
else if (mask & AFMT_S32_LE) {
device_format = AFMT_S32_LE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
stream_.doByteSwap[mode] = true;
}
#endif
}
#endif
if (device_format == -1) {
// The user requested format is not natively supported by the device.
if (mask & AFMT_S16_NE) {
device_format = AFMT_S16_NE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
}
#if BYTE_ORDER == LITTLE_ENDIAN
else if (mask & AFMT_S16_BE) {
device_format = AFMT_S16_BE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
stream_.doByteSwap[mode] = true;
}
#else
else if (mask & AFMT_S16_LE) {
device_format = AFMT_S16_LE;
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
stream_.doByteSwap[mode] = true;
}
#endif
#if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
else if (mask & AFMT_S32_NE) {
device_format = AFMT_S32_NE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
}
#if BYTE_ORDER == LITTLE_ENDIAN
else if (mask & AFMT_S32_BE) {
device_format = AFMT_S32_BE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
stream_.doByteSwap[mode] = true;
}
#else
else if (mask & AFMT_S32_LE) {
device_format = AFMT_S32_LE;
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
stream_.doByteSwap[mode] = true;
}
#endif
#endif
else if (mask & AFMT_S8) {
device_format = AFMT_S8;
stream_.deviceFormat[mode] = RTAUDIO_SINT8;
}
}
if (stream_.deviceFormat[mode] == 0) {
// This really shouldn't happen ...
close(fd);
sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
name);
goto error;
}
// Determine the number of channels for this device. Note that the
// channel value requested by the user might be < min_X_Channels.
stream_.nUserChannels[mode] = channels;
device_channels = channels;
if (mode == OUTPUT) {
if (channels < devices_[device].minOutputChannels)
device_channels = devices_[device].minOutputChannels;
}
else { // mode == INPUT
if (stream_.mode == OUTPUT && stream_.device[0] == device) {
// We're doing duplex setup here.
if (channels < devices_[device].minDuplexChannels)
device_channels = devices_[device].minDuplexChannels;
}
else {
if (channels < devices_[device].minInputChannels)
device_channels = devices_[device].minInputChannels;
}
}
stream_.nDeviceChannels[mode] = device_channels;
// Attempt to set the buffer size. According to OSS, the minimum
// number of buffers is two. The supposed minimum buffer size is 16
// bytes, so that will be our lower bound. The argument to this
// call is in the form 0xMMMMSSSS (hex), where the buffer size (in
// bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
// We'll check the actual value used near the end of the setup
// procedure.
buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;
if (buffer_bytes < 16) buffer_bytes = 16;
buffers = numberOfBuffers;
if (buffers < 2) buffers = 2;
temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));
if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {
close(fd);
sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",
name);
goto error;
}
stream_.nBuffers = buffers;
// Set the data format.
temp = device_format;
if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {
close(fd);
sprintf(message_, "RtApiOss: error setting data format for device (%s).",
name);
goto error;
}
// Set the number of channels.
temp = device_channels;
if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {
close(fd);
sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",
temp, name);
goto error;
}
// Set the sample rate.
srate = sampleRate;
temp = srate;
if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {
close(fd);
sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",
temp, name);
goto error;
}
// Verify the sample rate setup worked.
if (abs(srate - temp) > 100) {
close(fd);
sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
name, temp);
goto error;
}
stream_.sampleRate = sampleRate;
if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {
close(fd);
sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",
name);
goto error;
}
// Save buffer size (in sample frames).
*bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);
stream_.bufferSize = *bufferSize;
if (mode == INPUT && stream_.mode == OUTPUT &&
stream_.device[0] == device) {
// We're doing duplex setup here.
stream_.deviceFormat[0] = stream_.deviceFormat[1];
stream_.nDeviceChannels[0] = device_channels;
}
// Allocate the stream handles if necessary and then save.
if ( stream_.apiHandle == 0 ) {
handle = (int *) calloc(2, sizeof(int));
stream_.apiHandle = (void *) handle;
handle[0] = 0;
handle[1] = 0;
}
else {
handle = (int *) stream_.apiHandle;
}
handle[mode] = fd;
// Set flags for buffer conversion
stream_.doConvertBuffer[mode] = false;
if (stream_.userFormat != stream_.deviceFormat[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
stream_.doConvertBuffer[mode] = true;
// Allocate necessary internal buffers
if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
long buffer_bytes;
if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
buffer_bytes = stream_.nUserChannels[0];
else
buffer_bytes = stream_.nUserChannels[1];
buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
if (stream_.userBuffer) free(stream_.userBuffer);
stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.userBuffer == NULL) {
close(fd);
sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",
name);
goto error;
}
}
if ( stream_.doConvertBuffer[mode] ) {
long buffer_bytes;
bool makeBuffer = true;
if ( mode == OUTPUT )
buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
else { // mode == INPUT
buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
if ( buffer_bytes < bytes_out ) makeBuffer = false;
}
}
if ( makeBuffer ) {
buffer_bytes *= *bufferSize;
if (stream_.deviceBuffer) free(stream_.deviceBuffer);
stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.deviceBuffer == NULL) {
close(fd);
sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",
name);
goto error;
}
}
}
stream_.device[mode] = device;
stream_.state = STREAM_STOPPED;
if ( stream_.mode == OUTPUT && mode == INPUT ) {
stream_.mode = DUPLEX;
if (stream_.device[0] == device)
handle[0] = fd;
}
else
stream_.mode = mode;
stream_.sub_mode = mode;
return SUCCESS;
error:
if (handle) {
if (handle[0])
close(handle[0]);
free(handle);
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
error(RtError::WARNING);
return FAILURE;
}
void RtApiOss :: closeStream()
{
// We don't want an exception to be thrown here because this
// function is called by our class destructor. So, do our own
// stream check.
if ( stream_.mode == UNINITIALIZED ) {
sprintf(message_, "RtApiOss::closeStream(): no open stream to close!");
error(RtError::WARNING);
return;
}
int *handle = (int *) stream_.apiHandle;
if (stream_.state == STREAM_RUNNING) {
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
ioctl(handle[0], SNDCTL_DSP_RESET, 0);
else
ioctl(handle[1], SNDCTL_DSP_RESET, 0);
stream_.state = STREAM_STOPPED;
}
if (stream_.callbackInfo.usingCallback) {
stream_.callbackInfo.usingCallback = false;
pthread_join(stream_.callbackInfo.thread, NULL);
}
if (handle) {
if (handle[0]) close(handle[0]);
if (handle[1]) close(handle[1]);
free(handle);
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
if (stream_.deviceBuffer) {
free(stream_.deviceBuffer);
stream_.deviceBuffer = 0;
}
stream_.mode = UNINITIALIZED;
stream_.sub_mode = UNINITIALIZED;
}
void RtApiOss :: startStream()
{
verifyStream();
if (stream_.state == STREAM_RUNNING) return;
MUTEX_LOCK(&stream_.mutex);
stream_.state = STREAM_RUNNING;
// No need to do anything else here ... OSS automatically starts
// when fed samples.
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiOss :: stopStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
// Change the state before the lock to improve shutdown response
// when using a callback.
stream_.state = STREAM_STOPPED;
MUTEX_LOCK(&stream_.mutex);
int err;
int *handle = (int *) stream_.apiHandle;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
err = ioctl(handle[0], SNDCTL_DSP_POST, 0);
//err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
if (err < -1) {
sprintf(message_, "RtApiOss: error stopping device (%s).",
devices_[stream_.device[0]].name.c_str());
error(RtError::DRIVER_ERROR);
}
}
else {
err = ioctl(handle[1], SNDCTL_DSP_POST, 0);
//err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
if (err < -1) {
sprintf(message_, "RtApiOss: error stopping device (%s).",
devices_[stream_.device[1]].name.c_str());
error(RtError::DRIVER_ERROR);
}
}
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiOss :: abortStream()
{
stopStream();
}
int RtApiOss :: streamWillBlock()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return 0;
MUTEX_LOCK(&stream_.mutex);
int bytes = 0, channels = 0, frames = 0;
audio_buf_info info;
int *handle = (int *) stream_.apiHandle;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
ioctl(handle[0], SNDCTL_DSP_GETOSPACE, &info);
bytes = info.bytes;
channels = stream_.nDeviceChannels[0];
}
if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
ioctl(handle[1], SNDCTL_DSP_GETISPACE, &info);
if (stream_.mode == DUPLEX ) {
bytes = (bytes < info.bytes) ? bytes : info.bytes;
channels = stream_.nDeviceChannels[0];
}
else {
bytes = info.bytes;
channels = stream_.nDeviceChannels[1];
}
}
frames = (int) (bytes / (channels * formatBytes(stream_.deviceFormat[0])));
frames -= stream_.bufferSize;
if (frames < 0) frames = 0;
MUTEX_UNLOCK(&stream_.mutex);
return frames;
}
void RtApiOss :: tickStream()
{
verifyStream();
int stopStream = 0;
if (stream_.state == STREAM_STOPPED) {
if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
return;
}
else if (stream_.callbackInfo.usingCallback) {
RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
}
MUTEX_LOCK(&stream_.mutex);
// The state might change while waiting on a mutex.
if (stream_.state == STREAM_STOPPED)
goto unlock;
int result, *handle;
char *buffer;
int samples;
RtAudioFormat format;
handle = (int *) stream_.apiHandle;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
// Setup parameters and do buffer conversion if necessary.
if (stream_.doConvertBuffer[0]) {
convertStreamBuffer(OUTPUT);
buffer = stream_.deviceBuffer;
samples = stream_.bufferSize * stream_.nDeviceChannels[0];
format = stream_.deviceFormat[0];
}
else {
buffer = stream_.userBuffer;
samples = stream_.bufferSize * stream_.nUserChannels[0];
format = stream_.userFormat;
}
// Do byte swapping if necessary.
if (stream_.doByteSwap[0])
byteSwapBuffer(buffer, samples, format);
// Write samples to device.
result = write(handle[0], buffer, samples * formatBytes(format));
if (result == -1) {
// This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
sprintf(message_, "RtApiOss: audio write error for device (%s).",
devices_[stream_.device[0]].name.c_str());
error(RtError::DRIVER_ERROR);
}
}
if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
// Setup parameters.
if (stream_.doConvertBuffer[1]) {
buffer = stream_.deviceBuffer;
samples = stream_.bufferSize * stream_.nDeviceChannels[1];
format = stream_.deviceFormat[1];
}
else {
buffer = stream_.userBuffer;
samples = stream_.bufferSize * stream_.nUserChannels[1];
format = stream_.userFormat;
}
// Read samples from device.
result = read(handle[1], buffer, samples * formatBytes(format));
if (result == -1) {
// This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
sprintf(message_, "RtApiOss: audio read error for device (%s).",
devices_[stream_.device[1]].name.c_str());
error(RtError::DRIVER_ERROR);
}
// Do byte swapping if necessary.
if (stream_.doByteSwap[1])
byteSwapBuffer(buffer, samples, format);
// Do buffer conversion if necessary.
if (stream_.doConvertBuffer[1])
convertStreamBuffer(INPUT);
}
unlock:
MUTEX_UNLOCK(&stream_.mutex);
if (stream_.callbackInfo.usingCallback && stopStream)
this->stopStream();
}
void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData)
{
verifyStream();
CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
if ( info->usingCallback ) {
sprintf(message_, "RtApiOss: A callback is already set for this stream!");
error(RtError::WARNING);
return;
}
info->callback = (void *) callback;
info->userData = userData;
info->usingCallback = true;
info->object = (void *) this;
// Set the thread attributes for joinable and realtime scheduling
// priority. The higher priority will only take affect if the
// program is run as root or suid.
pthread_attr_t attr;
pthread_attr_init(&attr);
// pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
// pthread_attr_setschedpolicy(&attr, SCHED_RR);
int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo);
pthread_attr_destroy(&attr);
if (err) {
info->usingCallback = false;
sprintf(message_, "RtApiOss: error starting callback thread!");
error(RtError::THREAD_ERROR);
}
}
void RtApiOss :: cancelStreamCallback()
{
verifyStream();
if (stream_.callbackInfo.usingCallback) {
if (stream_.state == STREAM_RUNNING)
stopStream();
MUTEX_LOCK(&stream_.mutex);
stream_.callbackInfo.usingCallback = false;
pthread_join(stream_.callbackInfo.thread, NULL);
stream_.callbackInfo.thread = 0;
stream_.callbackInfo.callback = NULL;
stream_.callbackInfo.userData = NULL;
MUTEX_UNLOCK(&stream_.mutex);
}
}
extern "C" void *ossCallbackHandler(void *ptr)
{
CallbackInfo *info = (CallbackInfo *) ptr;
RtApiOss *object = (RtApiOss *) info->object;
bool *usingCallback = &info->usingCallback;
while ( *usingCallback ) {
pthread_testcancel();
try {
object->tickStream();
}
catch (RtError &exception) {
fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
exception.getMessageString());
break;
}
}
return 0;
}
//******************** End of __LINUX_OSS__ *********************//
#endif
#if defined(__MACOSX_CORE__)
// The OS X CoreAudio API is designed to use a separate callback
// procedure for each of its audio devices. A single RtAudio duplex
// stream using two different devices is supported here, though it
// cannot be guaranteed to always behave correctly because we cannot
// synchronize these two callbacks. This same functionality can be
// achieved with better synchrony by opening two separate streams for
// the devices and using RtAudio blocking calls (i.e. tickStream()).
//
// A property listener is installed for over/underrun information.
// However, no functionality is currently provided to allow property
// listeners to trigger user handlers because it is unclear what could
// be done if a critical stream parameter (buffer size, sample rate,
// device disconnect) notification arrived. The listeners entail
// quite a bit of extra code and most likely, a user program wouldn't
// be prepared for the result anyway.
// A structure to hold various information related to the CoreAuio API
// implementation.
struct CoreHandle {
UInt32 index[2];
bool stopStream;
bool xrun;
char *deviceBuffer;
pthread_cond_t condition;
CoreHandle()
:stopStream(false), xrun(false), deviceBuffer(0) {}
};
RtApiCore :: RtApiCore()
{
this->initialize();
if (nDevices_ <= 0) {
sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
error(RtError::NO_DEVICES_FOUND);
}
}
RtApiCore :: ~RtApiCore()
{
// The subclass destructor gets called before the base class
// destructor, so close an existing stream before deallocating
// apiDeviceId memory.
if ( stream_.mode != UNINITIALIZED ) closeStream();
// Free our allocated apiDeviceId memory.
AudioDeviceID *id;
for ( unsigned int i=0; i<devices_.size(); i++ ) {
id = (AudioDeviceID *) devices_[i].apiDeviceId;
if (id) free(id);
}
}
void RtApiCore :: initialize(void)
{
OSStatus err = noErr;
UInt32 dataSize;
AudioDeviceID *deviceList = NULL;
nDevices_ = 0;
// Find out how many audio devices there are, if any.
err = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &dataSize, NULL);
if (err != noErr) {
sprintf(message_, "RtApiCore: OS-X error getting device info!");
error(RtError::SYSTEM_ERROR);
}
nDevices_ = dataSize / sizeof(AudioDeviceID);
if (nDevices_ == 0) return;
// Make space for the devices we are about to get.
deviceList = (AudioDeviceID *) malloc( dataSize );
if (deviceList == NULL) {
sprintf(message_, "RtApiCore: memory allocation error during initialization!");
error(RtError::MEMORY_ERROR);
}
// Get the array of AudioDeviceIDs.
err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &dataSize, (void *) deviceList);
if (err != noErr) {
free(deviceList);
sprintf(message_, "RtApiCore: OS-X error getting device properties!");
error(RtError::SYSTEM_ERROR);
}
// Create list of device structures and write device identifiers.
RtApiDevice device;
AudioDeviceID *id;
for (int i=0; i<nDevices_; i++) {
devices_.push_back(device);
id = (AudioDeviceID *) malloc( sizeof(AudioDeviceID) );
*id = deviceList[i];
devices_[i].apiDeviceId = (void *) id;
}
free(deviceList);
}
int RtApiCore :: getDefaultInputDevice(void)
{
AudioDeviceID id, *deviceId;
UInt32 dataSize = sizeof( AudioDeviceID );
OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
&dataSize, &id );
if (result != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting default input device." );
error(RtError::WARNING);
return 0;
}
for ( int i=0; i<nDevices_; i++ ) {
deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
if ( id == *deviceId ) return i;
}
return 0;
}
int RtApiCore :: getDefaultOutputDevice(void)
{
AudioDeviceID id, *deviceId;
UInt32 dataSize = sizeof( AudioDeviceID );
OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
&dataSize, &id );
if (result != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting default output device." );
error(RtError::WARNING);
return 0;
}
for ( int i=0; i<nDevices_; i++ ) {
deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
if ( id == *deviceId ) return i;
}
return 0;
}
static bool deviceSupportsFormat( AudioDeviceID id, bool isInput,
AudioStreamBasicDescription *desc, bool isDuplex )
{
OSStatus result = noErr;
UInt32 dataSize = sizeof( AudioStreamBasicDescription );
result = AudioDeviceGetProperty( id, 0, isInput,
kAudioDevicePropertyStreamFormatSupported,
&dataSize, desc );
if (result == kAudioHardwareNoError) {
if ( isDuplex ) {
result = AudioDeviceGetProperty( id, 0, true,
kAudioDevicePropertyStreamFormatSupported,
&dataSize, desc );
if (result != kAudioHardwareNoError)
return false;
}
return true;
}
return false;
}
void RtApiCore :: probeDeviceInfo( RtApiDevice *info )
{
OSStatus err = noErr;
// Get the device manufacturer and name.
char name[256];
char fullname[512];
UInt32 dataSize = 256;
AudioDeviceID *id = (AudioDeviceID *) info->apiDeviceId;
err = AudioDeviceGetProperty( *id, 0, false,
kAudioDevicePropertyDeviceManufacturer,
&dataSize, name );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." );
error(RtError::DEBUG_WARNING);
return;
}
strncpy(fullname, name, 256);
strcat(fullname, ": " );
dataSize = 256;
err = AudioDeviceGetProperty( *id, 0, false,
kAudioDevicePropertyDeviceName,
&dataSize, name );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting device name." );
error(RtError::DEBUG_WARNING);
return;
}
strncat(fullname, name, 254);
info->name.erase();
info->name.append( (const char *)fullname, strlen(fullname)+1);
// Get output channel information.
unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0;
AudioBufferList *bufferList = nil;
err = AudioDeviceGetPropertyInfo( *id, 0, false,
kAudioDevicePropertyStreamConfiguration,
&dataSize, NULL );
if (err == noErr && dataSize > 0) {
bufferList = (AudioBufferList *) malloc( dataSize );
if (bufferList == NULL) {
sprintf(message_, "RtApiCore: memory allocation error!");
error(RtError::DEBUG_WARNING);
return;
}
err = AudioDeviceGetProperty( *id, 0, false,
kAudioDevicePropertyStreamConfiguration,
&dataSize, bufferList );
if (err == noErr) {
maxChannels = 0;
minChannels = 1000;
nStreams = bufferList->mNumberBuffers;
for ( i=0; i<nStreams; i++ ) {
maxChannels += bufferList->mBuffers[i].mNumberChannels;
if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
minChannels = bufferList->mBuffers[i].mNumberChannels;
}
}
}
free (bufferList);
if (err != noErr || dataSize <= 0) {
sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).",
info->name.c_str() );
error(RtError::DEBUG_WARNING);
return;
}
if ( nStreams ) {
if ( maxChannels > 0 )
info->maxOutputChannels = maxChannels;
if ( minChannels > 0 )
info->minOutputChannels = minChannels;
}
// Get input channel information.
bufferList = nil;
err = AudioDeviceGetPropertyInfo( *id, 0, true,
kAudioDevicePropertyStreamConfiguration,
&dataSize, NULL );
if (err == noErr && dataSize > 0) {
bufferList = (AudioBufferList *) malloc( dataSize );
if (bufferList == NULL) {
sprintf(message_, "RtApiCore: memory allocation error!");
error(RtError::DEBUG_WARNING);
return;
}
err = AudioDeviceGetProperty( *id, 0, true,
kAudioDevicePropertyStreamConfiguration,
&dataSize, bufferList );
if (err == noErr) {
maxChannels = 0;
minChannels = 1000;
nStreams = bufferList->mNumberBuffers;
for ( i=0; i<nStreams; i++ ) {
if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
minChannels = bufferList->mBuffers[i].mNumberChannels;
maxChannels += bufferList->mBuffers[i].mNumberChannels;
}
}
}
free (bufferList);
if (err != noErr || dataSize <= 0) {
sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).",
info->name.c_str() );
error(RtError::DEBUG_WARNING);
return;
}
if ( nStreams ) {
if ( maxChannels > 0 )
info->maxInputChannels = maxChannels;
if ( minChannels > 0 )
info->minInputChannels = minChannels;
}
// If device opens for both playback and capture, we determine the channels.
if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
info->hasDuplexSupport = true;
info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
info->maxInputChannels : info->maxOutputChannels;
info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
info->minInputChannels : info->minOutputChannels;
}
// Probe the device sample rate and data format parameters. The
// core audio query mechanism is performed on a "stream"
// description, which can have a variable number of channels and
// apply to input or output only.
// Create a stream description structure.
AudioStreamBasicDescription description;
dataSize = sizeof( AudioStreamBasicDescription );
memset(&description, 0, sizeof(AudioStreamBasicDescription));
bool isInput = false;
if ( info->maxOutputChannels == 0 ) isInput = true;
bool isDuplex = false;
if ( info->maxDuplexChannels > 0 ) isDuplex = true;
// Determine the supported sample rates.
info->sampleRates.clear();
for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
description.mSampleRate = (double) SAMPLE_RATES[k];
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->sampleRates.push_back( SAMPLE_RATES[k] );
}
if (info->sampleRates.size() == 0) {
sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).",
info->name.c_str() );
error(RtError::DEBUG_WARNING);
return;
}
// Determine the supported data formats.
info->nativeFormats = 0;
description.mFormatID = kAudioFormatLinearPCM;
description.mBitsPerChannel = 8;
description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT8;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT8;
}
description.mBitsPerChannel = 16;
description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT16;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT16;
}
description.mBitsPerChannel = 32;
description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT32;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT32;
}
description.mBitsPerChannel = 24;
description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT24;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_SINT24;
}
description.mBitsPerChannel = 32;
description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_FLOAT32;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_FLOAT32;
}
description.mBitsPerChannel = 64;
description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_FLOAT64;
else {
description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
info->nativeFormats |= RTAUDIO_FLOAT64;
}
// Check that we have at least one supported format.
if (info->nativeFormats == 0) {
sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
info->probed = true;
}
OSStatus callbackHandler(AudioDeviceID inDevice,
const AudioTimeStamp* inNow,
const AudioBufferList* inInputData,
const AudioTimeStamp* inInputTime,
AudioBufferList* outOutputData,
const AudioTimeStamp* inOutputTime,
void* infoPointer)
{
CallbackInfo *info = (CallbackInfo *) infoPointer;
RtApiCore *object = (RtApiCore *) info->object;
try {
object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData );
}
catch (RtError &exception) {
fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString());
return kAudioHardwareUnspecifiedError;
}
return kAudioHardwareNoError;
}
OSStatus deviceListener(AudioDeviceID inDevice,
UInt32 channel,
Boolean isInput,
AudioDevicePropertyID propertyID,
void* handlePointer)
{
CoreHandle *handle = (CoreHandle *) handlePointer;
if ( propertyID == kAudioDeviceProcessorOverload ) {
#ifdef __CHUCK_DEBUG__
if ( isInput )
fprintf(stderr, "[chuck](via rtaudio): RtApiCore: OS-X audio input overrun detected!\n");
else
fprintf(stderr, "[chuck](via rtaudio): RtApiCore: OS-X audio output underrun detected!\n");
#endif
handle->xrun = true;
}
return kAudioHardwareNoError;
}
bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels,
int sampleRate, RtAudioFormat format,
int *bufferSize, int numberOfBuffers )
{
// Setup for stream mode.
bool isInput = false;
AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId);
if ( mode == INPUT ) isInput = true;
// Search for a stream which contains the desired number of channels.
OSStatus err = noErr;
UInt32 dataSize;
unsigned int deviceChannels, nStreams = 0;
UInt32 iChannel = 0, iStream = 0;
AudioBufferList *bufferList = nil;
err = AudioDeviceGetPropertyInfo( id, 0, isInput,
kAudioDevicePropertyStreamConfiguration,
&dataSize, NULL );
if (err == noErr && dataSize > 0) {
bufferList = (AudioBufferList *) malloc( dataSize );
if (bufferList == NULL) {
sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!");
error(RtError::DEBUG_WARNING);
return FAILURE;
}
err = AudioDeviceGetProperty( id, 0, isInput,
kAudioDevicePropertyStreamConfiguration,
&dataSize, bufferList );
if (err == noErr) {
stream_.deInterleave[mode] = false;
nStreams = bufferList->mNumberBuffers;
for ( iStream=0; iStream<nStreams; iStream++ ) {
if ( bufferList->mBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break;
iChannel += bufferList->mBuffers[iStream].mNumberChannels;
}
// If we didn't find a single stream above, see if we can meet
// the channel specification in mono mode (i.e. using separate
// non-interleaved buffers). This can only work if there are N
// consecutive one-channel streams, where N is the number of
// desired channels.
iChannel = 0;
if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) {
int counter = 0;
for ( iStream=0; iStream<nStreams; iStream++ ) {
if ( bufferList->mBuffers[iStream].mNumberChannels == 1 )
counter++;
else
counter = 0;
if ( counter == channels ) {
iStream -= channels - 1;
iChannel -= channels - 1;
stream_.deInterleave[mode] = true;
break;
}
iChannel += bufferList->mBuffers[iStream].mNumberChannels;
}
}
}
}
if (err != noErr || dataSize <= 0) {
if ( bufferList ) free( bufferList );
sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
if (iStream >= nStreams) {
free (bufferList);
sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
devices_[device].name.c_str(), channels );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
// This is ok even for mono mode ... it gets updated later.
deviceChannels = bufferList->mBuffers[iStream].mNumberChannels;
free (bufferList);
// Determine the buffer size.
AudioValueRange bufferRange;
dataSize = sizeof(AudioValueRange);
err = AudioDeviceGetProperty( id, 0, isInput,
kAudioDevicePropertyBufferSizeRange,
&dataSize, &bufferRange);
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32);
if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum;
else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum;
// Set the buffer size. For mono mode, I'm assuming we only need to
// make this setting for the first channel.
UInt32 theSize = (UInt32) bufferBytes;
dataSize = sizeof( UInt32);
err = AudioDeviceSetProperty(id, NULL, 0, isInput,
kAudioDevicePropertyBufferSize,
dataSize, &theSize);
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
// If attempting to setup a duplex stream, the bufferSize parameter
// MUST be the same in both directions!
*bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) );
if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
stream_.bufferSize = *bufferSize;
stream_.nBuffers = 1;
// Set the stream format description. Do for each channel in mono mode.
AudioStreamBasicDescription description;
dataSize = sizeof( AudioStreamBasicDescription );
if ( stream_.deInterleave[mode] ) nStreams = channels;
else nStreams = 1;
for ( unsigned int i=0; i<nStreams; i++, iChannel++ ) {
err = AudioDeviceGetProperty( id, iChannel, isInput,
kAudioDevicePropertyStreamFormat,
&dataSize, &description );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
// Set the sample rate and data format id.
description.mSampleRate = (double) sampleRate;
description.mFormatID = kAudioFormatLinearPCM;
err = AudioDeviceSetProperty( id, NULL, iChannel, isInput,
kAudioDevicePropertyStreamFormat,
dataSize, &description );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
}
// Check whether we need byte-swapping (assuming OS-X host is big-endian).
iChannel -= nStreams;
err = AudioDeviceGetProperty( id, iChannel, isInput,
kAudioDevicePropertyStreamFormat,
&dataSize, &description );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).", devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
stream_.doByteSwap[mode] = false;
if ( !description.mFormatFlags & kLinearPCMFormatFlagIsBigEndian )
stream_.doByteSwap[mode] = true;
// From the CoreAudio documentation, PCM data must be supplied as
// 32-bit floats.
stream_.userFormat = format;
stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
if ( stream_.deInterleave[mode] ) // mono mode
stream_.nDeviceChannels[mode] = channels;
else
stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
stream_.nUserChannels[mode] = channels;
// Set flags for buffer conversion.
stream_.doConvertBuffer[mode] = false;
if (stream_.userFormat != stream_.deviceFormat[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
stream_.doConvertBuffer[mode] = true;
// Allocate our CoreHandle structure for the stream.
CoreHandle *handle;
if ( stream_.apiHandle == 0 ) {
handle = (CoreHandle *) calloc(1, sizeof(CoreHandle));
if ( handle == NULL ) {
sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
devices_[device].name.c_str());
goto error;
}
handle->index[0] = 0;
handle->index[1] = 0;
if ( pthread_cond_init(&handle->condition, NULL) ) {
sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).",
devices_[device].name.c_str());
goto error;
}
stream_.apiHandle = (void *) handle;
}
else
handle = (CoreHandle *) stream_.apiHandle;
handle->index[mode] = iStream;
// Allocate necessary internal buffers.
if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
long buffer_bytes;
if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
buffer_bytes = stream_.nUserChannels[0];
else
buffer_bytes = stream_.nUserChannels[1];
buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
if (stream_.userBuffer) free(stream_.userBuffer);
stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.userBuffer == NULL) {
sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
}
if ( stream_.deInterleave[mode] ) {
long buffer_bytes;
bool makeBuffer = true;
if ( mode == OUTPUT )
buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
else { // mode == INPUT
buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
if ( buffer_bytes < bytes_out ) makeBuffer = false;
}
}
if ( makeBuffer ) {
buffer_bytes *= *bufferSize;
if (stream_.deviceBuffer) free(stream_.deviceBuffer);
stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.deviceBuffer == NULL) {
sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
// If not de-interleaving, we point stream_.deviceBuffer to the
// OS X supplied device buffer before doing any necessary data
// conversions. This presents a problem if we have a duplex
// stream using one device which needs de-interleaving and
// another device which doesn't. So, save a pointer to our own
// device buffer in the CallbackInfo structure.
handle->deviceBuffer = stream_.deviceBuffer;
}
}
stream_.sampleRate = sampleRate;
stream_.device[mode] = device;
stream_.state = STREAM_STOPPED;
stream_.callbackInfo.object = (void *) this;
if ( stream_.mode == OUTPUT && mode == INPUT && stream_.device[0] == device )
// Only one callback procedure per device.
stream_.mode = DUPLEX;
else {
err = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
if (err != noErr) {
sprintf( message_, "RtApiCore: OS-X error setting callback for device (%s).", devices_[device].name.c_str() );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
if ( stream_.mode == OUTPUT && mode == INPUT )
stream_.mode = DUPLEX;
else
stream_.mode = mode;
}
stream_.sub_mode = mode;
// Setup the device property listener for over/underload.
err = AudioDeviceAddPropertyListener( id, iChannel, isInput,
kAudioDeviceProcessorOverload,
deviceListener, (void *) handle );
return SUCCESS;
error:
if ( handle ) {
pthread_cond_destroy(&handle->condition);
free(handle);
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
error(RtError::WARNING);
return FAILURE;
}
void RtApiCore :: closeStream()
{
// We don't want an exception to be thrown here because this
// function is called by our class destructor. So, do our own
// stream check.
if ( stream_.mode == UNINITIALIZED ) {
sprintf(message_, "RtApiCore::closeStream(): no open stream to close!");
error(RtError::WARNING);
return;
}
AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
if (stream_.state == STREAM_RUNNING)
AudioDeviceStop( id, callbackHandler );
AudioDeviceRemoveIOProc( id, callbackHandler );
}
id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
if (stream_.state == STREAM_RUNNING)
AudioDeviceStop( id, callbackHandler );
AudioDeviceRemoveIOProc( id, callbackHandler );
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) {
free(stream_.deviceBuffer);
stream_.deviceBuffer = 0;
}
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
// Destroy pthread condition variable and free the CoreHandle structure.
if ( handle ) {
pthread_cond_destroy(&handle->condition);
free( handle );
stream_.apiHandle = 0;
}
stream_.mode = UNINITIALIZED;
stream_.sub_mode = UNINITIALIZED;
}
void RtApiCore :: startStream()
{
verifyStream();
if (stream_.state == STREAM_RUNNING) return;
MUTEX_LOCK(&stream_.mutex);
OSStatus err;
AudioDeviceID id;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
err = AudioDeviceStart(id, callbackHandler);
if (err != noErr) {
sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).",
devices_[stream_.device[0]].name.c_str());
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
err = AudioDeviceStart(id, callbackHandler);
if (err != noErr) {
sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
devices_[stream_.device[0]].name.c_str());
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
handle->stopStream = false;
stream_.state = STREAM_RUNNING;
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiCore :: stopStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
// Change the state before the lock to improve shutdown response
// when using a callback.
stream_.state = STREAM_STOPPED;
MUTEX_LOCK(&stream_.mutex);
OSStatus err;
AudioDeviceID id;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
err = AudioDeviceStop(id, callbackHandler);
if (err != noErr) {
sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
devices_[stream_.device[0]].name.c_str());
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
err = AudioDeviceStop(id, callbackHandler);
if (err != noErr) {
sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
devices_[stream_.device[0]].name.c_str());
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiCore :: abortStream()
{
stopStream();
}
void RtApiCore :: tickStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
if (stream_.callbackInfo.usingCallback) {
sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!");
error(RtError::WARNING);
return;
}
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
MUTEX_LOCK(&stream_.mutex);
pthread_cond_wait(&handle->condition, &stream_.mutex);
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData )
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
AudioBufferList *inBufferList = (AudioBufferList *) inData;
AudioBufferList *outBufferList = (AudioBufferList *) outData;
if ( info->usingCallback && handle->stopStream ) {
// Check if the stream should be stopped (via the previous user
// callback return value). We stop the stream here, rather than
// after the function call, so that output data can first be
// processed.
this->stopStream();
return;
}
MUTEX_LOCK(&stream_.mutex);
// Invoke user callback first, to get fresh output data. Don't
// invoke the user callback if duplex mode AND the input/output devices
// are different AND this function is called for the input device.
AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) {
RtAudioCallback callback = (RtAudioCallback) info->callback;
handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
if ( handle->xrun == true ) {
handle->xrun = false;
MUTEX_UNLOCK(&stream_.mutex);
return;
}
}
else if( info->usingCallback && (stream_.mode == DUPLEX && deviceId != id) )
{
if( stream_.sub_mode == INPUT )
memcpy(stream_.userBuffer,
inBufferList->mBuffers[handle->index[1]].mData,
inBufferList->mBuffers[handle->index[1]].mDataByteSize);
else
memcpy(outBufferList->mBuffers[handle->index[0]].mData,
stream_.userBuffer,
outBufferList->mBuffers[handle->index[0]].mDataByteSize);
}
if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
if (stream_.doConvertBuffer[0]) {
if ( !stream_.deInterleave[0] )
stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData;
else
stream_.deviceBuffer = handle->deviceBuffer;
convertStreamBuffer(OUTPUT);
if ( stream_.doByteSwap[0] )
byteSwapBuffer(stream_.deviceBuffer,
stream_.bufferSize * stream_.nDeviceChannels[0],
stream_.deviceFormat[0]);
if ( stream_.deInterleave[0] ) {
int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize;
for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
memcpy(outBufferList->mBuffers[handle->index[0]+i].mData,
&stream_.deviceBuffer[i*bufferBytes], bufferBytes );
}
}
}
else {
if (stream_.doByteSwap[0])
byteSwapBuffer(stream_.userBuffer,
stream_.bufferSize * stream_.nUserChannels[0],
stream_.userFormat);
memcpy(outBufferList->mBuffers[handle->index[0]].mData,
stream_.userBuffer,
outBufferList->mBuffers[handle->index[0]].mDataByteSize );
}
}
if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
if (stream_.doConvertBuffer[1]) {
if ( stream_.deInterleave[1] ) {
stream_.deviceBuffer = (char *) handle->deviceBuffer;
int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize;
for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
memcpy(&stream_.deviceBuffer[i*bufferBytes],
inBufferList->mBuffers[handle->index[1]+i].mData, bufferBytes );
}
}
else
stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData;
if ( stream_.doByteSwap[1] )
byteSwapBuffer(stream_.deviceBuffer,
stream_.bufferSize * stream_.nDeviceChannels[1],
stream_.deviceFormat[1]);
convertStreamBuffer(INPUT);
}
else {
memcpy(stream_.userBuffer,
inBufferList->mBuffers[handle->index[1]].mData,
inBufferList->mBuffers[handle->index[1]].mDataByteSize );
if (stream_.doByteSwap[1])
byteSwapBuffer(stream_.userBuffer,
stream_.bufferSize * stream_.nUserChannels[1],
stream_.userFormat);
}
}
if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) )
pthread_cond_signal(&handle->condition);
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData)
{
verifyStream();
if ( stream_.callbackInfo.usingCallback ) {
sprintf(message_, "RtApiCore: A callback is already set for this stream!");
error(RtError::WARNING);
return;
}
stream_.callbackInfo.callback = (void *) callback;
stream_.callbackInfo.userData = userData;
stream_.callbackInfo.usingCallback = true;
}
void RtApiCore :: cancelStreamCallback()
{
verifyStream();
if (stream_.callbackInfo.usingCallback) {
if (stream_.state == STREAM_RUNNING)
stopStream();
MUTEX_LOCK(&stream_.mutex);
stream_.callbackInfo.usingCallback = false;
stream_.callbackInfo.userData = NULL;
stream_.state = STREAM_STOPPED;
stream_.callbackInfo.callback = NULL;
MUTEX_UNLOCK(&stream_.mutex);
}
}
//******************** End of __MACOSX_CORE__ *********************//
#endif
#if defined(__LINUX_JACK__)
// JACK is a low-latency audio server, written primarily for the
// GNU/Linux operating system. It can connect a number of different
// applications to an audio device, as well as allowing them to share
// audio between themselves.
//
// The JACK server must be running before RtApiJack can be instantiated.
// RtAudio will report just a single "device", which is the JACK audio
// server. The JACK server is typically started in a terminal as follows:
//
// .jackd -d alsa -d hw:0
//
// Many of the parameters normally set for a stream are fixed by the
// JACK server and can be specified when the JACK server is started.
// In particular,
//
// .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
//
// specifies a sample rate of 44100 Hz, a buffer size of 512 sample
// frames, and number of buffers = 4. Once the server is running, it
// is not possible to override these values. If the values are not
// specified in the command-line, the JACK server uses default values.
#include <jack/jack.h>
#include <unistd.h>
// A structure to hold various information related to the Jack API
// implementation.
struct JackHandle {
jack_client_t *client;
jack_port_t **ports[2];
bool clientOpen;
bool stopStream;
pthread_cond_t condition;
JackHandle()
:client(0), clientOpen(false), stopStream(false) {}
};
std::string jackmsg;
static void jackerror (const char *desc)
{
jackmsg.erase();
jackmsg.append( desc, strlen(desc)+1 );
}
RtApiJack :: RtApiJack()
{
this->initialize();
if (nDevices_ <= 0) {
sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
jackmsg.c_str());
error(RtError::NO_DEVICES_FOUND);
}
}
RtApiJack :: ~RtApiJack()
{
if ( stream_.mode != UNINITIALIZED ) closeStream();
}
void RtApiJack :: initialize(void)
{
nDevices_ = 0;
// Tell the jack server to call jackerror() when it experiences an
// error. This function saves the error message for subsequent
// reporting via the normal RtAudio error function.
jack_set_error_function( jackerror );
// Look for jack server and try to become a client.
jack_client_t *client;
if ( (client = jack_client_new( "RtApiJack" )) == 0)
return;
RtApiDevice device;
// Determine the name of the device.
device.name = "Jack Server";
devices_.push_back(device);
nDevices_++;
jack_client_close(client);
}
void RtApiJack :: probeDeviceInfo(RtApiDevice *info)
{
// Look for jack server and try to become a client.
jack_client_t *client;
if ( (client = jack_client_new( "RtApiJack" )) == 0) {
sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
jackmsg.c_str());
error(RtError::WARNING);
return;
}
// Get the current jack server sample rate.
info->sampleRates.clear();
info->sampleRates.push_back( jack_get_sample_rate(client) );
// Count the available ports as device channels. Jack "input ports"
// equal RtAudio output channels.
const char **ports;
char *port;
unsigned int nChannels = 0;
ports = jack_get_ports( client, NULL, NULL, JackPortIsInput );
if ( ports ) {
port = (char *) ports[nChannels];
while ( port )
port = (char *) ports[++nChannels];
free( ports );
info->maxOutputChannels = nChannels;
info->minOutputChannels = 1;
}
// Jack "output ports" equal RtAudio input channels.
nChannels = 0;
ports = jack_get_ports( client, NULL, NULL, JackPortIsOutput );
if ( ports ) {
port = (char *) ports[nChannels];
while ( port )
port = (char *) ports[++nChannels];
free( ports );
info->maxInputChannels = nChannels;
info->minInputChannels = 1;
}
if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
jack_client_close(client);
sprintf(message_, "RtApiJack: error determining jack input/output channels!");
error(RtError::WARNING);
return;
}
if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
info->hasDuplexSupport = true;
info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
info->maxInputChannels : info->maxOutputChannels;
info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
info->minInputChannels : info->minOutputChannels;
}
// Get the jack data format type. There isn't much documentation
// regarding supported data formats in jack. I'm assuming here that
// the default type will always be a floating-point type, of length
// equal to either 4 or 8 bytes.
int sample_size = sizeof( jack_default_audio_sample_t );
if ( sample_size == 4 )
info->nativeFormats = RTAUDIO_FLOAT32;
else if ( sample_size == 8 )
info->nativeFormats = RTAUDIO_FLOAT64;
// Check that we have a supported format
if (info->nativeFormats == 0) {
jack_client_close(client);
sprintf(message_, "RtApiJack: error determining jack server data format!");
error(RtError::WARNING);
return;
}
jack_client_close(client);
info->probed = true;
}
int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer)
{
CallbackInfo *info = (CallbackInfo *) infoPointer;
RtApiJack *object = (RtApiJack *) info->object;
try {
object->callbackEvent( (unsigned long) nframes );
}
catch (RtError &exception) {
fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString());
return 0;
}
return 0;
}
void jackShutdown(void *infoPointer)
{
CallbackInfo *info = (CallbackInfo *) infoPointer;
JackHandle *handle = (JackHandle *) info->apiInfo;
handle->clientOpen = false;
RtApiJack *object = (RtApiJack *) info->object;
try {
object->closeStream();
}
catch (RtError &exception) {
fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString());
return;
}
fprintf(stderr, "\nRtApiJack: the Jack server is shutting down ... stream stopped and closed!!!\n\n");
}
int jackXrun( void * )
{
fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n");
return 0;
}
bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels,
int sampleRate, RtAudioFormat format,
int *bufferSize, int numberOfBuffers)
{
// Compare the jack server channels to the requested number of channels.
if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) ||
(mode == INPUT && devices_[device].maxInputChannels < channels ) ) {
sprintf(message_, "RtApiJack: the Jack server does not support requested channels!");
error(RtError::DEBUG_WARNING);
return FAILURE;
}
JackHandle *handle = (JackHandle *) stream_.apiHandle;
// Look for jack server and try to become a client (only do once per stream).
char label[32];
jack_client_t *client = 0;
if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) {
snprintf(label, 32, "RtApiJack");
if ( (client = jack_client_new( (const char *) label )) == 0) {
sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
jackmsg.c_str());
error(RtError::DEBUG_WARNING);
return FAILURE;
}
}
else {
// The handle must have been created on an earlier pass.
client = handle->client;
}
// First, check the jack server sample rate.
int jack_rate;
jack_rate = (int) jack_get_sample_rate(client);
if ( sampleRate != jack_rate ) {
jack_client_close(client);
sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
sampleRate, jack_rate );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
stream_.sampleRate = jack_rate;
// The jack server seems to support just a single floating-point
// data type. Since we already checked it before, just use what we
// found then.
stream_.deviceFormat[mode] = devices_[device].nativeFormats;
stream_.userFormat = format;
// Jack always uses non-interleaved buffers. We'll need to
// de-interleave if we have more than one channel.
stream_.deInterleave[mode] = false;
if ( channels > 1 )
stream_.deInterleave[mode] = true;
// Jack always provides host byte-ordered data.
stream_.doByteSwap[mode] = false;
// Get the buffer size. The buffer size and number of buffers
// (periods) is set when the jack server is started.
stream_.bufferSize = (int) jack_get_buffer_size(client);
*bufferSize = stream_.bufferSize;
stream_.nDeviceChannels[mode] = channels;
stream_.nUserChannels[mode] = channels;
stream_.doConvertBuffer[mode] = false;
if (stream_.userFormat != stream_.deviceFormat[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.deInterleave[mode])
stream_.doConvertBuffer[mode] = true;
// Allocate our JackHandle structure for the stream.
if ( handle == 0 ) {
handle = (JackHandle *) calloc(1, sizeof(JackHandle));
if ( handle == NULL ) {
sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).",
devices_[device].name.c_str());
goto error;
}
handle->ports[0] = 0;
handle->ports[1] = 0;
if ( pthread_cond_init(&handle->condition, NULL) ) {
sprintf(message_, "RtApiJack: error initializing pthread condition variable!");
goto error;
}
stream_.apiHandle = (void *) handle;
handle->client = client;
handle->clientOpen = true;
}
// Allocate necessary internal buffers.
if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
long buffer_bytes;
if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
buffer_bytes = stream_.nUserChannels[0];
else
buffer_bytes = stream_.nUserChannels[1];
buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
if (stream_.userBuffer) free(stream_.userBuffer);
stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.userBuffer == NULL) {
sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
}
if ( stream_.doConvertBuffer[mode] ) {
long buffer_bytes;
bool makeBuffer = true;
if ( mode == OUTPUT )
buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
else { // mode == INPUT
buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
if ( buffer_bytes < bytes_out ) makeBuffer = false;
}
}
if ( makeBuffer ) {
buffer_bytes *= *bufferSize;
if (stream_.deviceBuffer) free(stream_.deviceBuffer);
stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.deviceBuffer == NULL) {
sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
}
}
// Allocate memory for the Jack ports (channels) identifiers.
handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
if ( handle->ports[mode] == NULL ) {
sprintf(message_, "RtApiJack: error allocating port handle memory (%s).",
devices_[device].name.c_str());
goto error;
}
stream_.device[mode] = device;
stream_.state = STREAM_STOPPED;
stream_.callbackInfo.usingCallback = false;
stream_.callbackInfo.object = (void *) this;
stream_.callbackInfo.apiInfo = (void *) handle;
if ( stream_.mode == OUTPUT && mode == INPUT )
// We had already set up the stream for output.
stream_.mode = DUPLEX;
else {
stream_.mode = mode;
jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
jack_set_xrun_callback( handle->client, jackXrun, NULL );
jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
}
return SUCCESS;
error:
if ( handle ) {
pthread_cond_destroy(&handle->condition);
if ( handle->clientOpen == true )
jack_client_close(handle->client);
if ( handle->ports[0] ) free(handle->ports[0]);
if ( handle->ports[1] ) free(handle->ports[1]);
free( handle );
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
error(RtError::WARNING);
return FAILURE;
}
void RtApiJack :: closeStream()
{
// We don't want an exception to be thrown here because this
// function is called by our class destructor. So, do our own
// stream check.
if ( stream_.mode == UNINITIALIZED ) {
sprintf(message_, "RtApiJack::closeStream(): no open stream to close!");
error(RtError::WARNING);
return;
}
JackHandle *handle = (JackHandle *) stream_.apiHandle;
if ( handle && handle->clientOpen == true ) {
if (stream_.state == STREAM_RUNNING)
jack_deactivate(handle->client);
jack_client_close(handle->client);
}
if ( handle ) {
if ( handle->ports[0] ) free(handle->ports[0]);
if ( handle->ports[1] ) free(handle->ports[1]);
pthread_cond_destroy(&handle->condition);
free( handle );
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
if (stream_.deviceBuffer) {
free(stream_.deviceBuffer);
stream_.deviceBuffer = 0;
}
stream_.mode = UNINITIALIZED;
}
void RtApiJack :: startStream()
{
verifyStream();
if (stream_.state == STREAM_RUNNING) return;
MUTEX_LOCK(&stream_.mutex);
char label[64];
JackHandle *handle = (JackHandle *) stream_.apiHandle;
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
snprintf(label, 64, "outport %d", i);
handle->ports[0][i] = jack_port_register(handle->client, (const char *)label,
JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
}
}
if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
snprintf(label, 64, "inport %d", i);
handle->ports[1][i] = jack_port_register(handle->client, (const char *)label,
JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0);
}
}
if (jack_activate(handle->client)) {
sprintf(message_, "RtApiJack: unable to activate JACK client!");
error(RtError::SYSTEM_ERROR);
}
const char **ports;
int result;
// Get the list of available ports.
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
ports = jack_get_ports(handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsInput);
if ( ports == NULL) {
sprintf(message_, "RtApiJack: error determining available jack input ports!");
error(RtError::SYSTEM_ERROR);
}
// Now make the port connections. Since RtAudio wasn't designed to
// allow the user to select particular channels of a device, we'll
// just open the first "nChannels" ports.
for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
result = 1;
if ( ports[i] )
result = jack_connect( handle->client, jack_port_name(handle->ports[0][i]), ports[i] );
if ( result ) {
free(ports);
sprintf(message_, "RtApiJack: error connecting output ports!");
error(RtError::SYSTEM_ERROR);
}
}
free(ports);
}
if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
ports = jack_get_ports( handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsOutput );
if ( ports == NULL) {
sprintf(message_, "RtApiJack: error determining available jack output ports!");
error(RtError::SYSTEM_ERROR);
}
// Now make the port connections. See note above.
for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
result = 1;
if ( ports[i] )
result = jack_connect( handle->client, ports[i], jack_port_name(handle->ports[1][i]) );
if ( result ) {
free(ports);
sprintf(message_, "RtApiJack: error connecting input ports!");
error(RtError::SYSTEM_ERROR);
}
}
free(ports);
}
handle->stopStream = false;
stream_.state = STREAM_RUNNING;
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiJack :: stopStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
// Change the state before the lock to improve shutdown response
// when using a callback.
stream_.state = STREAM_STOPPED;
MUTEX_LOCK(&stream_.mutex);
JackHandle *handle = (JackHandle *) stream_.apiHandle;
jack_deactivate(handle->client);
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiJack :: abortStream()
{
stopStream();
}
void RtApiJack :: tickStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
if (stream_.callbackInfo.usingCallback) {
sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!");
error(RtError::WARNING);
return;
}
JackHandle *handle = (JackHandle *) stream_.apiHandle;
MUTEX_LOCK(&stream_.mutex);
pthread_cond_wait(&handle->condition, &stream_.mutex);
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiJack :: callbackEvent( unsigned long nframes )
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;
CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
JackHandle *handle = (JackHandle *) stream_.apiHandle;
if ( info->usingCallback && handle->stopStream ) {
// Check if the stream should be stopped (via the previous user
// callback return value). We stop the stream here, rather than
// after the function call, so that output data can first be
// processed.
this->stopStream();
return;
}
MUTEX_LOCK(&stream_.mutex);
// Invoke user callback first, to get fresh output data.
if ( info->usingCallback ) {
RtAudioCallback callback = (RtAudioCallback) info->callback;
handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
}
jack_default_audio_sample_t *jackbuffer;
long bufferBytes = nframes * sizeof (jack_default_audio_sample_t);
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
if (stream_.doConvertBuffer[0]) {
convertStreamBuffer(OUTPUT);
for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i],
(jack_nframes_t) nframes);
memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
}
}
else { // single channel only
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0],
(jack_nframes_t) nframes);
memcpy(jackbuffer, stream_.userBuffer, bufferBytes );
}
}
if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
if (stream_.doConvertBuffer[1]) {
for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i],
(jack_nframes_t) nframes);
memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
}
convertStreamBuffer(INPUT);
}
else { // single channel only
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0],
(jack_nframes_t) nframes);
memcpy(stream_.userBuffer, jackbuffer, bufferBytes );
}
}
if ( !info->usingCallback )
pthread_cond_signal(&handle->condition);
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData)
{
verifyStream();
if ( stream_.callbackInfo.usingCallback ) {
sprintf(message_, "RtApiJack: A callback is already set for this stream!");
error(RtError::WARNING);
return;
}
stream_.callbackInfo.callback = (void *) callback;
stream_.callbackInfo.userData = userData;
stream_.callbackInfo.usingCallback = true;
}
void RtApiJack :: cancelStreamCallback()
{
verifyStream();
if (stream_.callbackInfo.usingCallback) {
if (stream_.state == STREAM_RUNNING)
stopStream();
MUTEX_LOCK(&stream_.mutex);
stream_.callbackInfo.usingCallback = false;
stream_.callbackInfo.userData = NULL;
stream_.state = STREAM_STOPPED;
stream_.callbackInfo.callback = NULL;
MUTEX_UNLOCK(&stream_.mutex);
}
}
#endif
#if defined(__LINUX_ALSA__)
#include <alsa/asoundlib.h>
#include <unistd.h>
#include <ctype.h>
extern "C" void *alsaCallbackHandler(void * ptr);
RtApiAlsa :: RtApiAlsa()
{
this->initialize();
if (nDevices_ <= 0) {
sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!");
error(RtError::NO_DEVICES_FOUND);
}
}
RtApiAlsa :: ~RtApiAlsa()
{
if ( stream_.mode != UNINITIALIZED )
closeStream();
}
void RtApiAlsa :: initialize(void)
{
int card, subdevice, result;
char name[64];
const char *cardId;
snd_ctl_t *handle;
snd_ctl_card_info_t *info;
snd_ctl_card_info_alloca(&info);
RtApiDevice device;
// Count cards and devices
nDevices_ = 0;
card = -1;
snd_card_next(&card);
while ( card >= 0 ) {
sprintf(name, "hw:%d", card);
result = snd_ctl_open(&handle, name, 0);
if (result < 0) {
sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result));
error(RtError::DEBUG_WARNING);
goto next_card;
}
result = snd_ctl_card_info(handle, info);
if (result < 0) {
sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result));
error(RtError::DEBUG_WARNING);
goto next_card;
}
cardId = snd_ctl_card_info_get_id(info);
subdevice = -1;
while (1) {
result = snd_ctl_pcm_next_device(handle, &subdevice);
if (result < 0) {
sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result));
error(RtError::DEBUG_WARNING);
break;
}
if (subdevice < 0)
break;
sprintf( name, "hw:%d,%d", card, subdevice );
// If a cardId exists and it contains at least one non-numeric
// character, use it to identify the device. This avoids a bug
// in ALSA such that a numeric string is interpreted as a device
// number.
for ( unsigned int i=0; i<strlen(cardId); i++ ) {
if ( !isdigit( cardId[i] ) ) {
sprintf( name, "hw:%s,%d", cardId, subdevice );
break;
}
}
device.name.erase();
device.name.append( (const char *)name, strlen(name)+1 );
devices_.push_back(device);
nDevices_++;
}
next_card:
snd_ctl_close(handle);
snd_card_next(&card);
}
}
void RtApiAlsa :: probeDeviceInfo(RtApiDevice *info)
{
int err;
int open_mode = SND_PCM_ASYNC;
snd_pcm_t *handle;
snd_ctl_t *chandle;
snd_pcm_stream_t stream;
snd_pcm_info_t *pcminfo;
snd_pcm_info_alloca(&pcminfo);
snd_pcm_hw_params_t *params;
snd_pcm_hw_params_alloca(&params);
char name[64];
char *card;
// Open the control interface for this card.
strncpy( name, info->name.c_str(), 64 );
card = strtok(name, ",");
err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK);
if (err < 0) {
sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err));
error(RtError::DEBUG_WARNING);
return;
}
unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") );
// First try for playback
stream = SND_PCM_STREAM_PLAYBACK;
snd_pcm_info_set_device(pcminfo, dev);
snd_pcm_info_set_subdevice(pcminfo, 0);
snd_pcm_info_set_stream(pcminfo, stream);
if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) {
if (err == -ENOENT) {
sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str());
error(RtError::DEBUG_WARNING);
}
else {
sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
info->name.c_str(), snd_strerror(err));
error(RtError::DEBUG_WARNING);
}
goto capture_probe;
}
err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK );
if (err < 0) {
if ( err == EBUSY )
sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
info->name.c_str(), snd_strerror(err));
else
sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::DEBUG_WARNING);
goto capture_probe;
}
// We have an open device ... allocate the parameter structure.
err = snd_pcm_hw_params_any(handle, params);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
goto capture_probe;
}
// Get output channel information.
unsigned int value;
err = snd_pcm_hw_params_get_channels_min(params, &value);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
goto capture_probe;
}
info->minOutputChannels = value;
err = snd_pcm_hw_params_get_channels_max(params, &value);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
goto capture_probe;
}
info->maxOutputChannels = value;
snd_pcm_close(handle);
capture_probe:
// Now try for capture
stream = SND_PCM_STREAM_CAPTURE;
snd_pcm_info_set_stream(pcminfo, stream);
err = snd_ctl_pcm_info(chandle, pcminfo);
snd_ctl_close(chandle);
if ( err < 0 ) {
if (err == -ENOENT) {
sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str());
error(RtError::DEBUG_WARNING);
}
else {
sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
info->name.c_str(), snd_strerror(err));
error(RtError::DEBUG_WARNING);
}
if (info->maxOutputChannels == 0)
// didn't open for playback either ... device invalid
return;
goto probe_parameters;
}
err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK);
if (err < 0) {
if ( err == EBUSY )
sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
info->name.c_str(), snd_strerror(err));
else
sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::DEBUG_WARNING);
if (info->maxOutputChannels == 0)
// didn't open for playback either ... device invalid
return;
goto probe_parameters;
}
// We have an open capture device ... allocate the parameter structure.
err = snd_pcm_hw_params_any(handle, params);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
if (info->maxOutputChannels > 0)
goto probe_parameters;
else
return;
}
// Get input channel information.
err = snd_pcm_hw_params_get_channels_min(params, &value);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
if (info->maxOutputChannels > 0)
goto probe_parameters;
else
return;
}
info->minInputChannels = value;
err = snd_pcm_hw_params_get_channels_max(params, &value);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
if (info->maxOutputChannels > 0)
goto probe_parameters;
else
return;
}
info->maxInputChannels = value;
snd_pcm_close(handle);
// If device opens for both playback and capture, we determine the channels.
if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
goto probe_parameters;
info->hasDuplexSupport = true;
info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
info->maxInputChannels : info->maxOutputChannels;
info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
info->minInputChannels : info->minOutputChannels;
probe_parameters:
// At this point, we just need to figure out the supported data
// formats and sample rates. We'll proceed by opening the device in
// the direction with the maximum number of channels, or playback if
// they are equal. This might limit our sample rate options, but so
// be it.
if (info->maxOutputChannels >= info->maxInputChannels)
stream = SND_PCM_STREAM_PLAYBACK;
else
stream = SND_PCM_STREAM_CAPTURE;
err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode);
if (err < 0) {
sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
return;
}
// We have an open device ... allocate the parameter structure.
err = snd_pcm_hw_params_any(handle, params);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.",
info->name.c_str(), snd_strerror(err));
error(RtError::WARNING);
return;
}
// Test our discrete set of sample rate values.
int dir = 0;
info->sampleRates.clear();
for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
if (snd_pcm_hw_params_test_rate(handle, params, SAMPLE_RATES[i], dir) == 0)
info->sampleRates.push_back(SAMPLE_RATES[i]);
}
if (info->sampleRates.size() == 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).",
info->name.c_str());
error(RtError::DEBUG_WARNING);
return;
}
// Probe the supported data formats ... we don't care about endian-ness just yet
snd_pcm_format_t format;
info->nativeFormats = 0;
format = SND_PCM_FORMAT_S8;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_SINT8;
format = SND_PCM_FORMAT_S16;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_SINT16;
format = SND_PCM_FORMAT_S24;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_SINT24;
format = SND_PCM_FORMAT_S32;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_SINT32;
format = SND_PCM_FORMAT_FLOAT;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_FLOAT32;
format = SND_PCM_FORMAT_FLOAT64;
if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
info->nativeFormats |= RTAUDIO_FLOAT64;
// Check that we have at least one supported format
if (info->nativeFormats == 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
info->name.c_str());
error(RtError::WARNING);
return;
}
// That's all ... close the device and return
snd_pcm_close(handle);
info->probed = true;
return;
}
bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels,
int sampleRate, RtAudioFormat format,
int *bufferSize, int numberOfBuffers )
{
#if defined(__RTAUDIO_DEBUG__)
snd_output_t *out;
snd_output_stdio_attach(&out, stderr, 0);
#endif
// I'm not using the "plug" interface ... too much inconsistent behavior.
const char *name = devices_[device].name.c_str();
snd_pcm_stream_t alsa_stream;
if (mode == OUTPUT)
alsa_stream = SND_PCM_STREAM_PLAYBACK;
else
alsa_stream = SND_PCM_STREAM_CAPTURE;
int err;
snd_pcm_t *handle;
int alsa_open_mode = SND_PCM_ASYNC;
err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode);
if (err < 0) {
sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Fill the parameter structure.
snd_pcm_hw_params_t *hw_params;
snd_pcm_hw_params_alloca(&hw_params);
err = snd_pcm_hw_params_any(handle, hw_params);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
#if defined(__RTAUDIO_DEBUG__)
fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
snd_pcm_hw_params_dump(hw_params, out);
#endif
// Set access ... try interleaved access first, then non-interleaved
if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) {
err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED);
}
else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) {
err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED);
stream_.deInterleave[mode] = true;
}
else {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name);
error(RtError::WARNING);
return FAILURE;
}
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Determine how to set the device format.
stream_.userFormat = format;
snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN;
if (format == RTAUDIO_SINT8)
device_format = SND_PCM_FORMAT_S8;
else if (format == RTAUDIO_SINT16)
device_format = SND_PCM_FORMAT_S16;
else if (format == RTAUDIO_SINT24)
device_format = SND_PCM_FORMAT_S24;
else if (format == RTAUDIO_SINT32)
device_format = SND_PCM_FORMAT_S32;
else if (format == RTAUDIO_FLOAT32)
device_format = SND_PCM_FORMAT_FLOAT;
else if (format == RTAUDIO_FLOAT64)
device_format = SND_PCM_FORMAT_FLOAT64;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = format;
goto set_format;
}
// The user requested format is not natively supported by the device.
device_format = SND_PCM_FORMAT_FLOAT64;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
goto set_format;
}
device_format = SND_PCM_FORMAT_FLOAT;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
goto set_format;
}
device_format = SND_PCM_FORMAT_S32;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_SINT32;
goto set_format;
}
device_format = SND_PCM_FORMAT_S24;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_SINT24;
goto set_format;
}
device_format = SND_PCM_FORMAT_S16;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_SINT16;
goto set_format;
}
device_format = SND_PCM_FORMAT_S8;
if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
stream_.deviceFormat[mode] = RTAUDIO_SINT8;
goto set_format;
}
// If we get here, no supported format was found.
sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name);
snd_pcm_close(handle);
error(RtError::WARNING);
return FAILURE;
set_format:
err = snd_pcm_hw_params_set_format(handle, hw_params, device_format);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting format (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Determine whether byte-swaping is necessary.
stream_.doByteSwap[mode] = false;
if (device_format != SND_PCM_FORMAT_S8) {
err = snd_pcm_format_cpu_endian(device_format);
if (err == 0)
stream_.doByteSwap[mode] = true;
else if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
}
// Set the sample rate.
err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
sampleRate, name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Determine the number of channels for this device. We support a possible
// minimum device channel number > than the value requested by the user.
stream_.nUserChannels[mode] = channels;
unsigned int value;
err = snd_pcm_hw_params_get_channels_max(hw_params, &value);
int device_channels = value;
if (err < 0 || device_channels < channels) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).",
channels, name);
error(RtError::WARNING);
return FAILURE;
}
err = snd_pcm_hw_params_get_channels_min(hw_params, &value);
if (err < 0 ) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name);
error(RtError::WARNING);
return FAILURE;
}
device_channels = value;
if (device_channels < channels) device_channels = channels;
stream_.nDeviceChannels[mode] = device_channels;
// Set the device channels.
err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
device_channels, name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Set the buffer number, which in ALSA is referred to as the "period".
int dir;
unsigned int periods = numberOfBuffers;
// Even though the hardware might allow 1 buffer, it won't work reliably.
if (periods < 2) periods = 2;
err = snd_pcm_hw_params_get_periods_min(hw_params, &value, &dir);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting min periods on device (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
if (value > periods) periods = value;
err = snd_pcm_hw_params_get_periods_max(hw_params, &value, &dir);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting max periods on device (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
if (value < periods) periods = value;
err = snd_pcm_hw_params_set_periods(handle, hw_params, periods, 0);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// Set the buffer (or period) size.
snd_pcm_uframes_t period_size;
err = snd_pcm_hw_params_get_period_size_min(hw_params, &period_size, &dir);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error getting period size (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
if (*bufferSize < (int) period_size) *bufferSize = (int) period_size;
err = snd_pcm_hw_params_set_period_size(handle, hw_params, *bufferSize, 0);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
// If attempting to setup a duplex stream, the bufferSize parameter
// MUST be the same in both directions!
if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
name );
error(RtError::DEBUG_WARNING);
return FAILURE;
}
stream_.bufferSize = *bufferSize;
// Install the hardware configuration
err = snd_pcm_hw_params(handle, hw_params);
if (err < 0) {
snd_pcm_close(handle);
sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.",
name, snd_strerror(err));
error(RtError::WARNING);
return FAILURE;
}
#if defined(__RTAUDIO_DEBUG__)
fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
snd_pcm_hw_params_dump(hw_params, out);
#endif
// Allocate the stream handle if necessary and then save.
snd_pcm_t **handles;
if ( stream_.apiHandle == 0 ) {
handles = (snd_pcm_t **) calloc(2, sizeof(snd_pcm_t *));
if ( handle == NULL ) {
sprintf(message_, "RtApiAlsa: error allocating handle memory (%s).",
devices_[device].name.c_str());
goto error;
}
stream_.apiHandle = (void *) handles;
handles[0] = 0;
handles[1] = 0;
}
else {
handles = (snd_pcm_t **) stream_.apiHandle;
}
handles[mode] = handle;
// Set flags for buffer conversion
stream_.doConvertBuffer[mode] = false;
if (stream_.userFormat != stream_.deviceFormat[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
stream_.doConvertBuffer[mode] = true;
if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
stream_.doConvertBuffer[mode] = true;
// Allocate necessary internal buffers
if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
long buffer_bytes;
if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
buffer_bytes = stream_.nUserChannels[0];
else
buffer_bytes = stream_.nUserChannels[1];
buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
if (stream_.userBuffer) free(stream_.userBuffer);
stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.userBuffer == NULL) {
sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
}
if ( stream_.doConvertBuffer[mode] ) {
long buffer_bytes;
bool makeBuffer = true;
if ( mode == OUTPUT )
buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
else { // mode == INPUT
buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
if ( buffer_bytes < bytes_out ) makeBuffer = false;
}
}
if ( makeBuffer ) {
buffer_bytes *= *bufferSize;
if (stream_.deviceBuffer) free(stream_.deviceBuffer);
stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
if (stream_.deviceBuffer == NULL) {
sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).",
devices_[device].name.c_str());
goto error;
}
}
}
stream_.device[mode] = device;
stream_.state = STREAM_STOPPED;
if ( stream_.mode == OUTPUT && mode == INPUT )
// We had already set up an output stream.
stream_.mode = DUPLEX;
else
stream_.mode = mode;
stream_.nBuffers = periods;
stream_.sampleRate = sampleRate;
return SUCCESS;
error:
if (handles) {
if (handles[0])
snd_pcm_close(handles[0]);
if (handles[1])
snd_pcm_close(handles[1]);
free(handles);
stream_.apiHandle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
error(RtError::WARNING);
return FAILURE;
}
void RtApiAlsa :: closeStream()
{
// We don't want an exception to be thrown here because this
// function is called by our class destructor. So, do our own
// stream check.
if ( stream_.mode == UNINITIALIZED ) {
sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!");
error(RtError::WARNING);
return;
}
snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
if (stream_.state == STREAM_RUNNING) {
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
snd_pcm_drop(handle[0]);
if (stream_.mode == INPUT || stream_.mode == DUPLEX)
snd_pcm_drop(handle[1]);
stream_.state = STREAM_STOPPED;
}
if (stream_.callbackInfo.usingCallback) {
stream_.callbackInfo.usingCallback = false;
pthread_join(stream_.callbackInfo.thread, NULL);
}
if (handle) {
if (handle[0]) snd_pcm_close(handle[0]);
if (handle[1]) snd_pcm_close(handle[1]);
free(handle);
handle = 0;
}
if (stream_.userBuffer) {
free(stream_.userBuffer);
stream_.userBuffer = 0;
}
if (stream_.deviceBuffer) {
free(stream_.deviceBuffer);
stream_.deviceBuffer = 0;
}
stream_.mode = UNINITIALIZED;
}
void RtApiAlsa :: startStream()
{
// This method calls snd_pcm_prepare if the device isn't already in that state.
verifyStream();
if (stream_.state == STREAM_RUNNING) return;
MUTEX_LOCK(&stream_.mutex);
int err;
snd_pcm_state_t state;
snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
state = snd_pcm_state(handle[0]);
if (state != SND_PCM_STATE_PREPARED) {
err = snd_pcm_prepare(handle[0]);
if (err < 0) {
sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
}
if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
state = snd_pcm_state(handle[1]);
if (state != SND_PCM_STATE_PREPARED) {
err = snd_pcm_prepare(handle[1]);
if (err < 0) {
sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
MUTEX_UNLOCK(&stream_.mutex);
error(RtError::DRIVER_ERROR);
}
}
}
stream_.state = STREAM_RUNNING;
MUTEX_UNLOCK(&stream_.mutex);
}
void RtApiAlsa :: stopStream()
{
verifyStream();
if (stream_.state == STREAM_STOPPED) return;