mirror of
https://github.com/moonlight-stream/moonlight-embedded.git
synced 2026-02-16 10:30:47 +00:00
Replace Javax Sound with direct alsa access
This commit is contained in:
@@ -9,6 +9,7 @@
|
||||
|
||||
<property name="libs.opus.dir" location="${basedir}/jni/nv_opus_dec/"/>
|
||||
<property name="libs.omx.dir" location="${basedir}/jni/nv_omx_dec/"/>
|
||||
<property name="libs.alsa.dir" location="${basedir}/jni/nv_alsa/"/>
|
||||
|
||||
<property name="lib.dir" location="${basedir}/lib"/>
|
||||
<property name="build.dir" location="${basedir}/build"/>
|
||||
@@ -38,6 +39,7 @@
|
||||
<target name="compile-native">
|
||||
<exec executable="./build.sh" dir="${libs.omx.dir}"/>
|
||||
<exec executable="./build.sh" dir="${libs.opus.dir}"/>
|
||||
<exec executable="./build.sh" dir="${libs.alsa.dir}"/>
|
||||
</target>
|
||||
|
||||
<target name="compile-java" depends="init">
|
||||
@@ -81,6 +83,7 @@
|
||||
<binlib>
|
||||
<fileset dir="${libs.opus.dir}" includes="*.so"/>
|
||||
<fileset dir="${libs.omx.dir}" includes="*.so"/>
|
||||
<fileset dir="${libs.alsa.dir}" includes="*.so"/>
|
||||
</binlib>
|
||||
</one-jar>
|
||||
</target>
|
||||
|
||||
4
jni/nv_alsa/build.sh
Normal file
4
jni/nv_alsa/build.sh
Normal file
@@ -0,0 +1,4 @@
|
||||
rm *.o libnv_alsa.so
|
||||
gcc -I $JAVA_HOME/include -I $JAVA_HOME/include/linux -I /opt/vc/include -I/opt/vc/include/interface/vcos/pthreads -I/opt/vc/include/interface/vmcs_host/linux -I ./inc -fPIC -L. -c *.c
|
||||
gcc -shared -Wl,-soname,libnv_alsa.so -Wl,--no-undefined -o libnv_alsa.so *.o -L. -lasound
|
||||
rm *.o
|
||||
54
jni/nv_alsa/nv_alsa.c
Normal file
54
jni/nv_alsa/nv_alsa.c
Normal file
@@ -0,0 +1,54 @@
|
||||
/* Use the newer ALSA API */
|
||||
#define ALSA_PCM_NEW_HW_PARAMS_API
|
||||
|
||||
/* All of the ALSA library API is defined
|
||||
* in this header */
|
||||
#include <alsa/asoundlib.h>
|
||||
|
||||
snd_pcm_t *handle;
|
||||
|
||||
int nv_alsa_init(unsigned int channelCount, unsigned int sampleRate) {
|
||||
int rc;
|
||||
snd_pcm_hw_params_t *params;
|
||||
int dir;
|
||||
|
||||
/* Open PCM device for playback. */
|
||||
if ((rc = snd_pcm_open(&handle, "default", SND_PCM_STREAM_PLAYBACK, 0)) != 0)
|
||||
return rc;
|
||||
|
||||
snd_pcm_hw_params_alloca(¶ms);
|
||||
snd_pcm_hw_params_any(handle, params);
|
||||
|
||||
snd_pcm_hw_params_set_access(handle, params, SND_PCM_ACCESS_RW_INTERLEAVED);
|
||||
snd_pcm_hw_params_set_format(handle, params, SND_PCM_FORMAT_S16_LE);
|
||||
snd_pcm_hw_params_set_channels(handle, params, channelCount);
|
||||
snd_pcm_hw_params_set_rate_near(handle, params, &sampleRate, &dir);
|
||||
|
||||
snd_pcm_uframes_t frames = 32;
|
||||
snd_pcm_hw_params_set_period_size_near(handle, params, &frames, &dir);
|
||||
|
||||
if ((rc = snd_pcm_hw_params(handle, params)) != 0)
|
||||
return rc;
|
||||
}
|
||||
|
||||
int nv_alsa_play(const unsigned char* indata, int data_len) {
|
||||
int frames = data_len/4; /* 2 bytes/sample, 2 channels */
|
||||
int rc = snd_pcm_writei(handle, indata, frames);
|
||||
if (rc == -EPIPE) {
|
||||
/* EPIPE means underrun */
|
||||
fprintf(stderr, "underrun occurred\n");
|
||||
snd_pcm_prepare(handle);
|
||||
} else if (rc < 0) {
|
||||
fprintf(stderr,
|
||||
"error from writei: %s\n",
|
||||
snd_strerror(rc));
|
||||
} else if (rc != (int) frames) {
|
||||
fprintf(stderr,
|
||||
"short write, write %d frames\n", rc);
|
||||
}
|
||||
}
|
||||
|
||||
int nv_alsa_close(void) {
|
||||
snd_pcm_drain(handle);
|
||||
snd_pcm_close(handle);
|
||||
}
|
||||
5
jni/nv_alsa/nv_alsa.h
Normal file
5
jni/nv_alsa/nv_alsa.h
Normal file
@@ -0,0 +1,5 @@
|
||||
#include <jni.h>
|
||||
|
||||
int nv_alsa_init(unsigned int channelCount, unsigned int sampleRate);
|
||||
int nv_alsa_play(unsigned char* indata, int inlen);
|
||||
void nv_alsa_close(void);
|
||||
36
jni/nv_alsa/nv_alsa_jni.c
Normal file
36
jni/nv_alsa/nv_alsa_jni.c
Normal file
@@ -0,0 +1,36 @@
|
||||
#include "nv_alsa.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <jni.h>
|
||||
|
||||
// This function must be called before
|
||||
// any other decoding functions
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_com_limelight_binding_audio_AlsaAudio_init(JNIEnv *env, jobject this, jint channelCount, jint sampleRate)
|
||||
{
|
||||
return nv_alsa_init(channelCount, sampleRate);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_com_limelight_binding_audio_AlsaAudio_close(JNIEnv *env, jobject this)
|
||||
{
|
||||
nv_alsa_close();
|
||||
}
|
||||
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_com_limelight_binding_audio_AlsaAudio_play(
|
||||
JNIEnv *env, jobject this, // JNI parameters
|
||||
jbyteArray indata, jint inoff, jint inlen)
|
||||
{
|
||||
jint ret;
|
||||
jbyte* jni_input_data;
|
||||
|
||||
jni_input_data = (*env)->GetByteArrayElements(env, indata, 0);
|
||||
|
||||
ret = nv_alsa_play(&jni_input_data[inoff], inlen);
|
||||
|
||||
// The input data isn't changed so it can be safely aborted
|
||||
(*env)->ReleaseByteArrayElements(env, indata, jni_input_data, JNI_ABORT);
|
||||
|
||||
return ret;
|
||||
}
|
||||
@@ -3,7 +3,7 @@ package com.limelight.binding;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import com.limelight.binding.audio.JavaxAudioRenderer;
|
||||
import com.limelight.binding.audio.AlsaAudioRenderer;
|
||||
import com.limelight.binding.video.OmxDecoderRenderer;
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
|
||||
@@ -40,6 +40,6 @@ public class PlatformBinding {
|
||||
* @return an audio decoder and renderer
|
||||
*/
|
||||
public static AudioRenderer getAudioRenderer() {
|
||||
return new JavaxAudioRenderer();
|
||||
return new AlsaAudioRenderer();
|
||||
}
|
||||
}
|
||||
|
||||
17
src/com/limelight/binding/audio/AlsaAudio.java
Normal file
17
src/com/limelight/binding/audio/AlsaAudio.java
Normal file
@@ -0,0 +1,17 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
/**
|
||||
* JNI Alsa bindings
|
||||
* @author Iwan Timmer
|
||||
*/
|
||||
public class AlsaAudio {
|
||||
static {
|
||||
System.loadLibrary("nv_alsa");
|
||||
}
|
||||
|
||||
public static native int init(int channelCount, int sampleRate);
|
||||
|
||||
public static native void close();
|
||||
|
||||
public static native int play(byte[] indata, int inoff, int inlen);
|
||||
}
|
||||
26
src/com/limelight/binding/audio/AlsaAudioRenderer.java
Normal file
26
src/com/limelight/binding/audio/AlsaAudioRenderer.java
Normal file
@@ -0,0 +1,26 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
|
||||
/**
|
||||
* Audio renderer implementation
|
||||
* @author Iwan Timmer
|
||||
*/
|
||||
public class AlsaAudioRenderer implements AudioRenderer {
|
||||
|
||||
@Override
|
||||
public void streamInitialized(int channelCount, int sampleRate) {
|
||||
AlsaAudio.init(channelCount, sampleRate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void playDecodedAudio(byte[] bytes, int offset, int length) {
|
||||
AlsaAudio.play(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void streamClosing() {
|
||||
AlsaAudio.close();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
package com.limelight.binding.audio;
|
||||
|
||||
import javax.sound.sampled.AudioFormat;
|
||||
import javax.sound.sampled.AudioSystem;
|
||||
import javax.sound.sampled.DataLine;
|
||||
import javax.sound.sampled.LineUnavailableException;
|
||||
import javax.sound.sampled.SourceDataLine;
|
||||
|
||||
import com.limelight.nvstream.av.audio.AudioRenderer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
/**
|
||||
* Audio renderer implementation
|
||||
* @author Cameron Gutman<br>
|
||||
* Iwan Timmer
|
||||
*/
|
||||
public class JavaxAudioRenderer implements AudioRenderer {
|
||||
|
||||
private SourceDataLine soundLine;
|
||||
private int channelCount;
|
||||
private int sampleRate;
|
||||
|
||||
public static final int DEFAULT_BUFFER_SIZE = 4096;
|
||||
|
||||
/**
|
||||
* Takes some audio data and writes it out to the renderer.
|
||||
* @param pcmData the array that contains the audio data
|
||||
* @param offset the offset at which the data starts in the array
|
||||
* @param length the length of data to be rendered
|
||||
*/
|
||||
@Override
|
||||
public void playDecodedAudio(byte[] pcmData, int offset, int length) {
|
||||
soundLine.write(pcmData, offset, length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for when the stream session is closing and the audio renderer should stop.
|
||||
*/
|
||||
@Override
|
||||
public void streamClosing() {
|
||||
if (soundLine != null) {
|
||||
soundLine.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void createSoundLine(int bufferSize) {
|
||||
AudioFormat audioFormat = new AudioFormat(sampleRate, 16, channelCount, true, ByteOrder.nativeOrder()==ByteOrder.BIG_ENDIAN);
|
||||
|
||||
DataLine.Info info;
|
||||
|
||||
if (bufferSize == DEFAULT_BUFFER_SIZE) {
|
||||
info = new DataLine.Info(SourceDataLine.class, audioFormat);
|
||||
}
|
||||
else {
|
||||
info = new DataLine.Info(SourceDataLine.class, audioFormat, bufferSize);
|
||||
}
|
||||
|
||||
try {
|
||||
soundLine = (SourceDataLine) AudioSystem.getLine(info);
|
||||
|
||||
if (bufferSize == DEFAULT_BUFFER_SIZE) {
|
||||
soundLine.open(audioFormat);
|
||||
}
|
||||
else {
|
||||
soundLine.open(audioFormat, bufferSize);
|
||||
}
|
||||
|
||||
soundLine.start();
|
||||
} catch (LineUnavailableException e) {
|
||||
soundLine = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The callback for the audio stream being initialized and starting to receive.
|
||||
* @param channelCount the number of channels in the audio
|
||||
* @param sampleRate the sample rate for the audio.
|
||||
*/
|
||||
@Override
|
||||
public void streamInitialized(int channelCount, int sampleRate) {
|
||||
this.channelCount = channelCount;
|
||||
this.sampleRate = sampleRate;
|
||||
|
||||
createSoundLine(DEFAULT_BUFFER_SIZE);
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user