Skip to content

Commit

Permalink
fix(client): add fade in/out in audio stream start/end; play al sourc…
Browse files Browse the repository at this point in the history
…e on initial state
  • Loading branch information
Apehum committed Jun 8, 2024
1 parent 748d527 commit 3fe1c4c
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,13 @@ public interface AlSource extends DeviceSource {
*/
int getFormat();

/**
* Gets the number of channels.
*
* @return The number of channels.
*/
int getChannels();

enum State {
INITIAL(4113),
PLAYING(4114),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ interface ClientConfig {

val jitterPacketDelay: IntConfigEntry

val alNumberBuffers: IntConfigEntry
val alPlaybackBuffers: IntConfigEntry
}

interface Overlay {
Expand Down
32 changes: 32 additions & 0 deletions api/common/src/main/java/su/plo/voice/api/util/AudioUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,38 @@ public static float gainCoefficient(int sampleRate, float time) {
return (float) Math.exp(-1.0f / (sampleRate * time));
}

public static short[] fadeIn(short[] samples, int channels) {
int fadeInDuration = samples.length;

short[] processed = new short[samples.length];

for (int index = 0; index < samples.length; index += channels) {
float fade = Math.min(index / (float) fadeInDuration, 1.0f);

for (int channel = 0; channel < channels; channel++) {
processed[index + channel] = (short) (samples[index + channel] * fade);
}
}

return processed;
}

public static short[] fadeOut(short[] samples, int channels) {
int fadeOutDuration = samples.length;

short[] processed = new short[samples.length];

for (int index = 0; index < samples.length; index += channels) {
float fade = Math.max((fadeOutDuration - index) / (float) fadeOutDuration, 0.0f);

for (int channel = 0; channel < channels; channel++) {
processed[index + channel] = (short) (samples[index + channel] * fade);
}
}

return processed;
}

private AudioUtil() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,15 @@ public int getFormat() {
return format;
}

@Override
public int getChannels() {
if (format == AL11.AL_FORMAT_STEREO16) {
return 2;
} else {
return 1;
}
}

private boolean callParamEvent(int param, Object value) {
AlSourceUpdateParamEvent event = new AlSourceUpdateParamEvent(this, param, value);
return client.getEventBus().fire(event);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ public static class Advanced implements ClientConfig.Advanced {
private IntConfigEntry jitterPacketDelay = new IntConfigEntry(3, 0, 16);

@ConfigField
private IntConfigEntry alNumberBuffers = new IntConfigEntry(4, 1, 32);
private IntConfigEntry alPlaybackBuffers = new IntConfigEntry(5, 1, 32);
}

@Config
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ class StreamAlSource private constructor(
private var lastBufferTime: Long = 0

init {
this.numBuffers = if (numBuffers == 0) client.config.advanced.alNumberBuffers.value() else numBuffers
emptyBuffer = ByteArray(device.frameSize)
this.numBuffers = if (numBuffers == 0) client.config.advanced.alPlaybackBuffers.value() else numBuffers
this.emptyBuffer = ByteArray(device.frameSize)
}

override fun play() {
Expand Down Expand Up @@ -237,6 +237,7 @@ class StreamAlSource private constructor(
break
}
}

val state = state
if (state == AlSource.State.STOPPED && queueSize == 0 && !emptyFilled.get()) {
removeProcessedBuffers()
Expand All @@ -246,12 +247,15 @@ class StreamAlSource private constructor(
fillQueue()

client.eventBus.fire(AlStreamSourceStoppedEvent(alSource))
play()
continue
} else if (state != AlSource.State.PLAYING && state != AlSource.State.PAUSED && queueSize > 0) {
AL11.alSourcePlay(pointer)
AlUtil.checkErrors("Source play")
} else if (state != AlSource.State.PLAYING && state != AlSource.State.PAUSED && queueSize > 0) {
continue
} else if (state == AlSource.State.INITIAL) {
AL11.alSourcePlay(pointer)
AlUtil.checkErrors("Source play")
continue
}

if (closeTimeoutMs > 0L && timeSupplier.currentTimeMillis - lastBufferTime > closeTimeoutMs) {
Expand All @@ -265,9 +269,6 @@ class StreamAlSource private constructor(
}
}

// todo: this creates playback delay
// if I understand this correctly, AL plays only last buffer in the queue when new buffer is pushed
// so it probably better to use one buffer to decrease delays?
private fun queueWithEmptyBuffers() {
for (i in 0 until numBuffers) {
write(emptyBuffer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ abstract class BaseClientAudioSource<T>(
private var decoder: AudioDecoder? = null

private var endRequest: Job? = null
private var endSequenceNumber: Long = -1L

override var closeTimeoutMs: Long = 500
set(value) {
Expand Down Expand Up @@ -148,6 +149,7 @@ abstract class BaseClientAudioSource<T>(
decoder = createDecoder(sourceInfo, voiceInfo, it)
}
lastSequenceNumber = -1L
endSequenceNumber = -1L
BaseVoice.DEBUG_LOGGER.log("Update decoder for {}", sourceInfo)
}

Expand Down Expand Up @@ -180,6 +182,7 @@ abstract class BaseClientAudioSource<T>(

buffer.offer(packet)
endRequest?.cancel()
endSequenceNumber = packet.sequenceNumber

// because SourceAudioEndPacket can be received BEFORE the end of the stream,
// we need to wait for some time to actually end the stream
Expand Down Expand Up @@ -248,7 +251,7 @@ abstract class BaseClientAudioSource<T>(
while (isActive) {
val wrappedPacket = buffer.poll()
if (wrappedPacket == null) {
delay(5L)
delay(0L)
continue
}

Expand Down Expand Up @@ -345,15 +348,17 @@ abstract class BaseClientAudioSource<T>(
if (packetsToCompensate in 1..4) {
BaseVoice.DEBUG_LOGGER.warn("Compensate {} lost packets", packetsToCompensate)
for (i in 0 until packetsToCompensate) {
val compensatedSequenceNumber = lastSequenceNumber + i + 1

if (decoder != null && decoder is AudioDecoderPlc && !sourceInfo.isStereo) {
try {
write((decoder as AudioDecoderPlc).decodePLC())
write((decoder as AudioDecoderPlc).decodePLC(), compensatedSequenceNumber)
} catch (e: CodecException) {
LOGGER.warn("Failed to decode source audio", e)
return
}
} else {
write(ShortArray(0))
write(ShortArray(0), compensatedSequenceNumber)
}
}
}
Expand All @@ -365,9 +370,9 @@ abstract class BaseClientAudioSource<T>(
val decoded = decoder?.decode(decrypted) ?: AudioUtil.bytesToShorts(decrypted)

if (sourceInfo.isStereo && config.advanced.stereoSourcesToMono.value()) {
write(AudioUtil.convertToMonoShorts(decoded))
write(AudioUtil.convertToMonoShorts(decoded), packet.sequenceNumber)
} else {
write(decoded)
write(decoded, packet.sequenceNumber)
}
} catch (e: EncryptionException) {
BaseVoice.DEBUG_LOGGER.warn("Failed to decrypt source audio", e)
Expand Down Expand Up @@ -457,8 +462,14 @@ abstract class BaseClientAudioSource<T>(
)
}

private fun write(samples: ShortArray) {
source.write(samples)
private fun write(samples: ShortArray, sequenceNumber: Long) {
if (!activated.get()) {
source.write(AudioUtil.fadeIn(samples, source.channels))
} else if (sequenceNumber + 1 == endSequenceNumber) {
source.write(AudioUtil.fadeOut(samples, source.channels))
} else {
source.write(samples)
}
}

private suspend fun updateSource(volume: Float, position: Vec3) {
Expand Down

0 comments on commit 3fe1c4c

Please sign in to comment.