Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions sdk/objc/native/src/audio/audio_source_sink.mm
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
[sink_ onLocalAudioFrameWithFlags:flags timeStamp:time_stamp busNumber:bus_number numFrames:num_frames ioData:io_data];
}

Expand All @@ -22,7 +21,6 @@
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
[sink_ onRemoteAudioFrameWithFlags:flags timeStamp:time_stamp busNumber:bus_number numFrames:num_frames ioData:io_data];
}
}
110 changes: 60 additions & 50 deletions sdk/objc/native/src/audio/voice_processing_audio_unit.mm
Original file line number Diff line number Diff line change
Expand Up @@ -111,17 +111,20 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
return false;
}

// Enable input on the input scope of the input element.
UInt32 enable_input = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kInputBus, &enable_input,
sizeof(enable_input));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to enable input on input scope of input element. "
"Error=%ld.",
(long)result);
return false;
RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
if (webRTCConfiguration.category != AVAudioSessionCategoryPlayback) {
// Enable input on the input scope of the input element.
UInt32 enable_input = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kInputBus, &enable_input,
sizeof(enable_input));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to enable input on input scope of input element. "
"Error=%ld.",
(long)result);
return false;
}
}

// Enable output on the output scope of the output element.
Expand Down Expand Up @@ -153,36 +156,40 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
return false;
}

// Disable AU buffer allocation for the recorder, we allocate our own.
// TODO(henrika): not sure that it actually saves resource to make this call.
UInt32 flag = 0;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to disable buffer allocation on the input bus. "
"Error=%ld.",
(long)result);
return false;
if (webRTCConfiguration.category != AVAudioSessionCategoryPlayback) {
// Disable AU buffer allocation for the recorder, we allocate our own.
// TODO(henrika): not sure that it actually saves resource to make this call.
UInt32 flag = 0;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to disable buffer allocation on the input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

// Specify the callback to be called by the I/O thread to us when input audio
// is available. The recorded samples can then be obtained by calling the
// AudioUnitRender() method.
AURenderCallbackStruct input_callback;
input_callback.inputProc = OnDeliverRecordedData;
input_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, kInputBus,
&input_callback, sizeof(input_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the input callback on the input bus. "
"Error=%ld.",
(long)result);
return false;
if (webRTCConfiguration.category != AVAudioSessionCategoryPlayback) {
// Specify the callback to be called by the I/O thread to us when input audio
// is available. The recorded samples can then be obtained by calling the
// AudioUnitRender() method.
AURenderCallbackStruct input_callback;
input_callback.inputProc = OnDeliverRecordedData;
input_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, kInputBus,
&input_callback, sizeof(input_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the input callback on the input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

state_ = kUninitialized;
Expand All @@ -204,24 +211,27 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
LogStreamDescription(format);
#endif

// Set the format on the output scope of the input element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, kInputBus, &format, size);
if (result != noErr) {
RTCLogError(@"Failed to set format on output scope of input bus. "
"Error=%ld.",
(long)result);
return false;
RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
if (webRTCConfiguration.category != AVAudioSessionCategoryPlayback) {
// Set the format on the output scope of the input element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, kInputBus, &format, size);
if (result != noErr) {
RTCLogError(@"Failed to set format on output scope of input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

// Set the format on the input scope of the output element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, kOutputBus, &format, size);
kAudioUnitScope_Input, kOutputBus, &format, size);
if (result != noErr) {
RTCLogError(@"Failed to set format on input scope of output bus. "
"Error=%ld.",
"Error=%ld.",
(long)result);
return false;
}
Expand Down