Skip to content

Instantly share code, notes, and snippets.

@devoxin
Last active December 29, 2023 21:05
Show Gist options
  • Save devoxin/7092e99a05ae1da15833f8f369409f16 to your computer and use it in GitHub Desktop.
Save devoxin/7092e99a05ae1da15833f8f369409f16 to your computer and use it in GitHub Desktop.
Lavaplayer cross-fading, using ShortBuffers with a revised player implementation.
// DecodableAudioFrame.kt
import com.sedmelluq.discord.lavaplayer.format.StandardAudioDataFormats
import com.sedmelluq.discord.lavaplayer.format.transcoder.AudioChunkDecoder
import com.sedmelluq.discord.lavaplayer.track.playback.MutableAudioFrame
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.ShortBuffer
class DecodableAudioFrame : MutableAudioFrame() {
private val decodedBuffer = ByteBuffer.allocateDirect(StandardAudioDataFormats.DISCORD_PCM_S16_BE.maximumChunkSize())
.order(ByteOrder.nativeOrder())
val buffer: ByteBuffer = ByteBuffer.allocate(StandardAudioDataFormats.DISCORD_OPUS.maximumChunkSize())
private var decoderFormat: AudioDataFormat? = null
private var decoder: AudioChunkDecoder? = null
init {
setBuffer(buffer)
}
fun destroyDecoder() {
decoder?.close()
decoder = null
}
fun decode(): ByteBuffer {
if (decoder == null || decoderFormat != format) {
destroyDecoder()
decoderFormat = format
decoder = format.createDecoder()
}
decoder!!.decode(data, decodedBuffer.asShortBuffer())
return decodedBuffer
}
/**
* Convenience method to decode a buffer into PCM samples.
* @return The number of bytes in the decoded buffer, and the ShortBuffer containing the samples.
*/
fun decodeToShortBuffer(): Pair<Int, ShortBuffer> {
val decoded = decode()
return decoded.limit() to decoded.asShortBuffer()
}
}
// CrossfadingAudioPlayer.kt
import com.sedmelluq.discord.lavaplayer.format.StandardAudioDataFormats
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer
import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager
import com.sedmelluq.discord.lavaplayer.player.event.AudioEventAdapter
import com.sedmelluq.discord.lavaplayer.tools.FriendlyException
import com.sedmelluq.discord.lavaplayer.track.*
import net.dv8tion.jda.api.audio.AudioSendHandler
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.util.concurrent.TimeUnit
import kotlin.math.min
class CrossfadingAudioPlayer(
apm: AudioPlayerManager,
private val p1: AudioPlayer,
private val p2: AudioPlayer
) : AudioSendHandler, AudioEventAdapter() {
companion object {
val CROSSFADE_BEGIN = TimeUnit.SECONDS.toMillis(6) // When to begin cross-fading (DURATION - CROSSFADE_BEGIN).
val CROSSFADE_PRELOAD = CROSSFADE_BEGIN + TimeUnit.SECONDS.toMillis(2) // When to begin preloading (buffering) the next track.
}
val queue = mutableListOf<AudioTrack>()
private val primaryFrame = DecodableAudioFrame()
private val secondaryFrame = DecodableAudioFrame()
// TODO: Perhaps we should use apm.configuration.outputFormat here.
private val encoder = StandardAudioDataFormats.DISCORD_OPUS.createEncoder(apm.configuration)
var crossfading = false
private set
private var swapped = false
// This should always return the player whose track is considered to be the primary one.
// For example, when crossfading begins, the player responsible for providing frames from the new track is considered
// to be the primary player as the old track is pretty much dead.
val primaryPlayer: AudioPlayer
get() = when {
crossfading && !swapped -> p2
crossfading && swapped -> p1
!crossfading && !swapped -> p1
/* !crossfading && swapped */ else -> p2
}
private val secondaryPlayer: AudioPlayer
get() = if (p1 == primaryPlayer) p2 else p1 // yeah, no. I'm not rewriting that logic above. Just check which isn't primary.
private var endPoint = 0L
init {
p1.addListener(this)
p2.addListener(this)
}
@Synchronized
fun enqueue(track: AudioTrack) {
queue.add(track)
primaryPlayer.let {
if (it.playingTrack == null) {
it.startTrack(queue.removeFirst(), true)
}
}
}
private fun setupAndGetBufferHandlerForTrack(track: AudioTrack, playBeginTimecode: Long): TrackMarkerHandler {
val currentPlayer = primaryPlayer
val endMarker = TrackMarkerHandler {
if (it == TrackMarkerHandler.MarkerState.REACHED && track.duration - track.position > 1000) {
currentPlayer.stopTrack()
}
}
val playMarker = TrackMarkerHandler {
if (it == TrackMarkerHandler.MarkerState.REACHED && secondaryPlayer.playingTrack != null) {
secondaryPlayer.isPaused = false
crossfading = true
track.setMarker(TrackMarker(playBeginTimecode + CROSSFADE_BEGIN, endMarker))
}
}
return TrackMarkerHandler {
if (it == TrackMarkerHandler.MarkerState.REACHED && queue.isNotEmpty()) {
val prepareTrack = queue.removeFirst()
secondaryPlayer.playTrack(prepareTrack)
track.setMarker(TrackMarker(playBeginTimecode, playMarker))
}
}
}
private fun setupCrossfading(track: AudioTrack) { // perhaps allow a trigger point variable here?
println("[crossfade] initialising for track ${track.info.title}")
secondaryPlayer.isPaused = true
endPoint = track.duration
val bufferHandler = setupAndGetBufferHandlerForTrack(track, track.duration - CROSSFADE_BEGIN)
track.setMarker(TrackMarker(track.duration - CROSSFADE_PRELOAD, bufferHandler))
}
// Method to begin crossfading immediately. Only to be called when needed, i.e., when skipping.
fun startCrossfading() {
if (crossfading) {
return
}
val loadTime = 20 // ms
val currentTrack = primaryPlayer.playingTrack
val bufferTime = CROSSFADE_PRELOAD - CROSSFADE_BEGIN
endPoint = currentTrack.position + CROSSFADE_PRELOAD + loadTime
val bufferHandler = setupAndGetBufferHandlerForTrack(currentTrack, currentTrack.position + bufferTime + loadTime)
currentTrack.setMarker(TrackMarker(currentTrack.position + loadTime, bufferHandler))
}
override fun onTrackStart(player: AudioPlayer, track: AudioTrack) {
if (player == primaryPlayer) {
setupCrossfading(track) // This should only trigger for the first track in the queue.
}
}
override fun onTrackException(player: AudioPlayer, track: AudioTrack, exception: FriendlyException) = exception.printStackTrace()
override fun onTrackStuck(player: AudioPlayer, track: AudioTrack, thresholdMs: Long) {
println("${track.info.title} stuck for ${thresholdMs / 1000} seconds. Skipping...")
player.stopTrack()
}
override fun onTrackEnd(player: AudioPlayer, track: AudioTrack, endReason: AudioTrackEndReason) {
if (!crossfading) {
// We're not crossfading, so we can just tell the same player to play again.
// In this situation, [player] should always be primaryPlayer.
queue.removeFirstOrNull()?.let(player::playTrack)
return
}
// When crossfading = true, the players are swapped so that primary becomes secondary, and secondary becomes primary.
// This means that the player responsible for the track that's stopping *should* always be secondaryPlayer.
if (player == secondaryPlayer) {
swapped = !swapped
crossfading = false
}
// primaryPlayer should've previously been secondaryPlayer, but as we've swapped them (see above),
// our secondaryPlayer is now our primaryPlayer as it's playing the new track.
// This new track won't have had crossfade applied to it yet, so we do that here.
primaryPlayer.playingTrack?.let(::setupCrossfading)
}
private var primaryProvided = false
private var secondaryProvided = false
override fun canProvide(): Boolean {
primaryProvided = primaryPlayer.provide(primaryFrame)
secondaryProvided = secondaryPlayer.takeIf { crossfading }?.provide(secondaryFrame) == true
return primaryProvided
}
override fun provide20MsAudio(): ByteBuffer {
return if (crossfading && secondaryProvided) fadeFrames(secondaryFrame, primaryFrame) else primaryFrame.buffer.flip()
}
/**
* Merges the frames from two audio players.
*
* @param endingTrackFrame The audio frame from the player that's playing the old track.
* This frame will receive a 'fade out' effect.
* @param newTrackFrame The audio frame from the player that's playing the new track.
* This frame will receive a 'fade in' effect.
* @return The merged, faded, opus-encoded samples.
*/
private fun fadeFrames(endingTrackFrame: DecodableAudioFrame, newTrackFrame: DecodableAudioFrame): ByteBuffer {
val track = secondaryPlayer.playingTrack
val dist = ((endPoint - track.position) / CROSSFADE_BEGIN.toDouble()).coerceIn(0.0, 1.0)
val (outByteCount, outShorts) = endingTrackFrame.decodeToShortBuffer()
val (inByteCount, inShorts) = newTrackFrame.decodeToShortBuffer()
val shorts = ByteBuffer.allocateDirect(min(outByteCount, inByteCount))
.order(ByteOrder.nativeOrder())
.asShortBuffer()
for (i in 0 until shorts.limit()) {
val inSample = (inShorts[i] * (1.0 - dist))
val outSample = (outShorts[i] * dist)
val modifiedSample = (inSample + outSample).toInt().coerceIn(-32768, 32767).toShort()
shorts.put(i, modifiedSample)
}
return ByteBuffer.wrap(encoder.encode(shorts))
}
override fun isOpus() = true
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment