Skip to content

Commit

Permalink
Allowed time stretching filters by keeping per-channel timeStamps.
Browse files Browse the repository at this point in the history
  • Loading branch information
psobot committed Aug 13, 2013
1 parent 25371b5 commit 3bcb6f1
Showing 1 changed file with 12 additions and 4 deletions.
16 changes: 12 additions & 4 deletions TheAmazingAudioEngine/AEAudioController.m
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,10 @@ + (NSError*)audioControllerErrorWithMessage:(NSString*)message OSStatus:(OSStatu
BOOL muted;
AudioStreamBasicDescription audioDescription;
callback_table_t callbacks;
AudioTimeStamp timeStamp;

BOOL setRenderNotification;

AEAudioController *audioController;
ABOutputPort *audiobusOutputPort;
AEFloatConverter *audiobusFloatConverter;
Expand Down Expand Up @@ -437,7 +440,8 @@ static OSStatus channelAudioProducer(void *userInfo, AudioBufferList *audio, UIn
memset(audio->mBuffers[i].mData, 0, audio->mBuffers[i].mDataByteSize);
}

status = callback(channelObj, channel->audioController, &arg->inTimeStamp, *frames, audio);
status = callback(channelObj, channel->audioController, &channel->timeStamp, *frames, audio);
channel->timeStamp.mSampleTime += *frames;

} else if ( channel->type == kChannelTypeGroup ) {
AEChannelGroupRef group = (AEChannelGroupRef)channel->ptr;
Expand All @@ -449,10 +453,10 @@ static OSStatus channelAudioProducer(void *userInfo, AudioBufferList *audio, UIn
if ( group->level_monitor_data.monitoringEnabled ) {
performLevelMonitoring(&group->level_monitor_data, audio, *frames);
}
}

// Advance the sample time, to make sure we continue to render if we're called again with the same arguments
arg->inTimeStamp.mSampleTime += *frames;
// Advance the sample time, to make sure we continue to render if we're called again with the same arguments
arg->inTimeStamp.mSampleTime += *frames;
}

return status;
}
Expand All @@ -466,6 +470,9 @@ static OSStatus renderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioAct
}

AudioTimeStamp timestamp = *inTimeStamp;
if (channel->timeStamp.mFlags == 0) {
channel->timeStamp = *inTimeStamp;
}

if ( channel->audiobusOutputPort && ABOutputPortGetConnectedPortAttributes(channel->audiobusOutputPort) & ABInputPortAttributePlaysLiveAudio ) {
// We're sending via the output port, and the receiver plays live - offset the timestamp by the reported latency
Expand Down Expand Up @@ -974,6 +981,7 @@ - (void)addChannels:(NSArray*)channels toChannelGroup:(AEChannelGroupRef)group {
channelElement->pan = [channel respondsToSelector:@selector(pan)] ? channel.pan : 0.0;
channelElement->muted = [channel respondsToSelector:@selector(channelIsMuted)] ? channel.channelIsMuted : NO;
channelElement->audioDescription = [channel respondsToSelector:@selector(audioDescription)] && channel.audioDescription.mSampleRate ? channel.audioDescription : _audioDescription;
memset(&channelElement->timeStamp, 0, sizeof(channelElement->timeStamp));
channelElement->audioController = self;

group->channels[group->channelCount++] = channelElement;
Expand Down

0 comments on commit 3bcb6f1

Please sign in to comment.