mirror of
https://github.com/moonlight-stream/moonlight-embedded.git
synced 2026-04-05 23:46:17 +00:00
Replace Javax Sound with direct alsa access
This commit is contained in:
@@ -3,7 +3,7 @@ package com.limelight.binding;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import com.limelight.binding.audio.JavaxAudioRenderer;
|
||||
import com.limelight.binding.audio.AlsaAudioRenderer;
|
||||
import com.limelight.binding.video.OmxDecoderRenderer;
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
|
||||
@@ -40,6 +40,6 @@ public class PlatformBinding {
|
||||
* @return an audio decoder and renderer
|
||||
*/
|
||||
public static AudioRenderer getAudioRenderer() {
|
||||
return new JavaxAudioRenderer();
|
||||
return new AlsaAudioRenderer();
|
||||
}
|
||||
}
|
||||
|
||||
17
src/com/limelight/binding/audio/AlsaAudio.java
Normal file
17
src/com/limelight/binding/audio/AlsaAudio.java
Normal file
@@ -0,0 +1,17 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
/**
|
||||
* JNI Alsa bindings
|
||||
* @author Iwan Timmer
|
||||
*/
|
||||
public class AlsaAudio {
|
||||
static {
|
||||
System.loadLibrary("nv_alsa");
|
||||
}
|
||||
|
||||
public static native int init(int channelCount, int sampleRate);
|
||||
|
||||
public static native void close();
|
||||
|
||||
public static native int play(byte[] indata, int inoff, int inlen);
|
||||
}
|
||||
26
src/com/limelight/binding/audio/AlsaAudioRenderer.java
Normal file
26
src/com/limelight/binding/audio/AlsaAudioRenderer.java
Normal file
@@ -0,0 +1,26 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
|
||||
/**
|
||||
* Audio renderer implementation
|
||||
* @author Iwan Timmer
|
||||
*/
|
||||
public class AlsaAudioRenderer implements AudioRenderer {
|
||||
|
||||
@Override
|
||||
public void streamInitialized(int channelCount, int sampleRate) {
|
||||
AlsaAudio.init(channelCount, sampleRate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void playDecodedAudio(byte[] bytes, int offset, int length) {
|
||||
AlsaAudio.play(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void streamClosing() {
|
||||
AlsaAudio.close();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
import javax.sound.sampled.AudioFormat;
|
||||
import javax.sound.sampled.AudioSystem;
|
||||
import javax.sound.sampled.DataLine;
|
||||
import javax.sound.sampled.LineUnavailableException;
|
||||
import javax.sound.sampled.SourceDataLine;
|
||||
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* Audio renderer implementation
|
||||
* @author Cameron Gutman<br>
|
||||
* Iwan Timmer
|
||||
*/
|
||||
public class JavaxAudioRenderer implements AudioRenderer {
|
||||
|
||||
private SourceDataLine soundLine;
|
||||
private int channelCount;
|
||||
private int sampleRate;
|
||||
|
||||
public static final int DEFAULT_BUFFER_SIZE = 4096;
|
||||
|
||||
/**
|
||||
* Takes some audio data and writes it out to the renderer.
|
||||
* @param pcmData the array that contains the audio data
|
||||
* @param offset the offset at which the data starts in the array
|
||||
* @param length the length of data to be rendered
|
||||
*/
|
||||
@Override
|
||||
public void playDecodedAudio(byte[] pcmData, int offset, int length) {
|
||||
soundLine.write(pcmData, offset, length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for when the stream session is closing and the audio renderer should stop.
|
||||
*/
|
||||
@Override
|
||||
public void streamClosing() {
|
||||
if (soundLine != null) {
|
||||
soundLine.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void createSoundLine(int bufferSize) {
|
||||
AudioFormat audioFormat = new AudioFormat(sampleRate, 16, channelCount, true, ByteOrder.nativeOrder()==ByteOrder.BIG_ENDIAN);
|
||||
|
||||
DataLine.Info info;
|
||||
|
||||
if (bufferSize == DEFAULT_BUFFER_SIZE) {
|
||||
info = new DataLine.Info(SourceDataLine.class, audioFormat);
|
||||
}
|
||||
else {
|
||||
info = new DataLine.Info(SourceDataLine.class, audioFormat, bufferSize);
|
||||
}
|
||||
|
||||
try {
|
||||
soundLine = (SourceDataLine) AudioSystem.getLine(info);
|
||||
|
||||
if (bufferSize == DEFAULT_BUFFER_SIZE) {
|
||||
soundLine.open(audioFormat);
|
||||
}
|
||||
else {
|
||||
soundLine.open(audioFormat, bufferSize);
|
||||
}
|
||||
|
||||
soundLine.start();
|
||||
} catch (LineUnavailableException e) {
|
||||
soundLine = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The callback for the audio stream being initialized and starting to receive.
|
||||
* @param channelCount the number of channels in the audio
|
||||
* @param sampleRate the sample rate for the audio.
|
||||
*/
|
||||
@Override
|
||||
public void streamInitialized(int channelCount, int sampleRate) {
|
||||
this.channelCount = channelCount;
|
||||
this.sampleRate = sampleRate;
|
||||
|
||||
createSoundLine(DEFAULT_BUFFER_SIZE);
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user