Rework interfaces for JNI bridge

This commit is contained in:
Cameron Gutman 2017-05-14 17:12:30 -07:00
parent 04e77e557b
commit a3d5e955aa
7 changed files with 125 additions and 95 deletions

View File

@ -5,7 +5,6 @@ import java.net.InetAddress;
import javax.crypto.SecretKey; import javax.crypto.SecretKey;
import com.limelight.nvstream.av.video.VideoDecoderRenderer; import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat;
public class ConnectionContext { public class ConnectionContext {
// Gen 3 servers are 2.1.1 - 2.2.1 // Gen 3 servers are 2.1.1 - 2.2.1
@ -35,7 +34,7 @@ public class ConnectionContext {
// This is the version quad from the appversion tag of /serverinfo // This is the version quad from the appversion tag of /serverinfo
public int[] serverAppVersion; public int[] serverAppVersion;
public VideoFormat negotiatedVideoFormat; public int negotiatedVideoFormat;
public int negotiatedWidth, negotiatedHeight; public int negotiatedWidth, negotiatedHeight;
public int negotiatedFps; public int negotiatedFps;
} }

View File

@ -14,7 +14,6 @@ import org.xmlpull.v1.XmlPullParserException;
import com.limelight.LimeLog; import com.limelight.LimeLog;
import com.limelight.nvstream.av.audio.AudioRenderer; import com.limelight.nvstream.av.audio.AudioRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer; import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat;
import com.limelight.nvstream.http.GfeHttpResponseException; import com.limelight.nvstream.http.GfeHttpResponseException;
import com.limelight.nvstream.http.LimelightCryptoProvider; import com.limelight.nvstream.http.LimelightCryptoProvider;
import com.limelight.nvstream.http.NvApp; import com.limelight.nvstream.http.NvApp;
@ -47,7 +46,7 @@ public class NvConnection {
this.context.riKeyId = generateRiKeyId(); this.context.riKeyId = generateRiKeyId();
this.context.negotiatedVideoFormat = VideoFormat.Unknown; this.context.negotiatedVideoFormat = -1;
} }
private static SecretKey generateRiAesKey() throws NoSuchAlgorithmException { private static SecretKey generateRiAesKey() throws NoSuchAlgorithmException {
@ -249,7 +248,7 @@ public class NvConnection {
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
context.connListener.displayMessage(e.getMessage()); context.connListener.displayMessage(e.getMessage());
context.connListener.stageFailed(appName); context.connListener.stageFailed(appName, 0);
return; return;
} }
} }
@ -263,7 +262,7 @@ public class NvConnection {
try { try {
context.serverAddress = InetAddress.getByName(host); context.serverAddress = InetAddress.getByName(host);
} catch (UnknownHostException e) { } catch (UnknownHostException e) {
context.connListener.connectionTerminated(e); context.connListener.connectionTerminated(-1);
return; return;
} }
@ -312,7 +311,7 @@ public class NvConnection {
} }
public VideoFormat getActiveVideoFormat() { public int getActiveVideoFormat() {
return context.negotiatedVideoFormat; return context.negotiatedVideoFormat;
} }
} }

View File

@ -3,10 +3,10 @@ package com.limelight.nvstream;
public interface NvConnectionListener { public interface NvConnectionListener {
void stageStarting(String stage); void stageStarting(String stage);
void stageComplete(String stage); void stageComplete(String stage);
void stageFailed(String stage); void stageFailed(String stage, long errorCode);
void connectionStarted(); void connectionStarted();
void connectionTerminated(Exception e); void connectionTerminated(long errorCode);
void displayMessage(String message); void displayMessage(String message);
void displayTransientMessage(String message); void displayTransientMessage(String message);

View File

@ -1,50 +0,0 @@
package com.limelight.nvstream.av;
public class DecodeUnit {
public static final int DU_FLAG_CODEC_CONFIG = 0x1;
public static final int DU_FLAG_SYNC_FRAME = 0x2;
private ByteBufferDescriptor bufferHead;
private int dataLength;
private int frameNumber;
private long receiveTimestamp;
private int flags;
public DecodeUnit() {
}
public void initialize(ByteBufferDescriptor bufferHead, int dataLength,
int frameNumber, long receiveTimestamp, int flags)
{
this.bufferHead = bufferHead;
this.dataLength = dataLength;
this.frameNumber = frameNumber;
this.receiveTimestamp = receiveTimestamp;
this.flags = flags;
}
public long getReceiveTimestamp()
{
return receiveTimestamp;
}
public ByteBufferDescriptor getBufferHead()
{
return bufferHead;
}
public int getDataLength()
{
return dataLength;
}
public int getFrameNumber()
{
return frameNumber;
}
public int getFlags()
{
return flags;
}
}

View File

@ -1,14 +1,11 @@
package com.limelight.nvstream.av.audio; package com.limelight.nvstream.av.audio;
public interface AudioRenderer { public interface AudioRenderer {
// playDecodedAudio() is lightweight, so don't use an extra thread for playback int getCapabilities();
public static final int CAPABILITY_DIRECT_SUBMIT = 0x1;
public int getCapabilities(); void setup(int audioConfiguration);
public boolean streamInitialized(int channelCount, int channelMask, int samplesPerFrame, int sampleRate); void playDecodedAudio(byte[] audioData);
public void playDecodedAudio(byte[] audioData, int offset, int length); void cleanup();
public void streamClosing();
} }

View File

@ -1,38 +1,13 @@
package com.limelight.nvstream.av.video; package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.DecodeUnit;
public abstract class VideoDecoderRenderer { public abstract class VideoDecoderRenderer {
public enum VideoFormat {
Unknown,
H264,
H265
};
public static final int FLAG_PREFER_QUALITY = 0x1;
public static final int FLAG_FORCE_HARDWARE_DECODING = 0x2;
public static final int FLAG_FORCE_SOFTWARE_DECODING = 0x4;
public static final int FLAG_FILL_SCREEN = 0x8;
// Allows the resolution to dynamically change mid-stream
public static final int CAPABILITY_ADAPTIVE_RESOLUTION = 0x1;
// Allows decode units to be submitted directly from the receive thread
public static final int CAPABILITY_DIRECT_SUBMIT = 0x2;
// !!! EXPERIMENTAL !!!
// Allows reference frame invalidation to be use to recover from packet loss
public static final int CAPABILITY_REFERENCE_FRAME_INVALIDATION = 0x4;
public int getCapabilities() { public int getCapabilities() {
return 0; return 0;
} }
public abstract boolean setup(VideoFormat format, int width, int height, int redrawRate); public abstract boolean setup(int format, int width, int height, int redrawRate);
public abstract int submitDecodeUnit(byte[] frameData);
public abstract boolean start(); public abstract void cleanup();
public abstract void stop();
public abstract void release();
} }

View File

@ -0,0 +1,110 @@
package com.limelight.nvstream.jni;
import com.limelight.nvstream.NvConnectionListener;
import com.limelight.nvstream.av.audio.AudioRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public class MoonBridge {
public static final int AUDIO_CONFIGURATION_STEREO = 0;
public static final int AUDIO_CONFIGURATION_51_SURROUND = 1;
public static final int VIDEO_FORMAT_H264 = 1;
public static final int VIDEO_FORMAT_H265 = 2;
public static final int CAPABILITY_DIRECT_SUBMIT = 1;
public static final int CAPABILITY_REFERENCE_FRAME_INVALIDATION = 2;
public static final int DR_OK = 0;
public static final int DR_NEED_IDR = -1;
private static AudioRenderer audioRenderer;
private static VideoDecoderRenderer videoRenderer;
private static NvConnectionListener connectionListener;
public static int CAPABILITY_SLICES_PER_FRAME(byte slices) {
return slices << 24;
}
public static void bridgeDrSetup(int videoFormat, int width, int height, int redrawRate) {
if (videoRenderer != null) {
videoRenderer.setup(videoFormat, width, height, redrawRate);
}
}
public static void bridgeDrCleanup() {
if (videoRenderer != null) {
videoRenderer.cleanup();
}
}
public static int bridgeDrSubmitDecodeUnit(byte[] frameData) {
if (videoRenderer != null) {
return videoRenderer.submitDecodeUnit(frameData);
}
else {
return DR_OK;
}
}
public static void bridgeArInit(int audioConfiguration) {
if (audioRenderer != null) {
audioRenderer.setup(audioConfiguration);
}
}
public static void bridgeArCleanup() {
if (audioRenderer != null) {
audioRenderer.cleanup();
}
}
public static void bridgeArPlaySample(byte[] pcmData) {
if (audioRenderer != null) {
audioRenderer.playDecodedAudio(pcmData);
}
}
public static void bridgeClStageStarting(int stage) {
if (connectionListener != null) {
connectionListener.stageStarting(getStageName(stage));
}
}
public static void bridgeClStageComplete(int stage) {
if (connectionListener != null) {
connectionListener.stageComplete(getStageName(stage));
}
}
public static void bridgeClStageFailed(int stage, long errorCode) {
if (connectionListener != null) {
connectionListener.stageFailed(getStageName(stage), errorCode);
}
}
public static void bridgeClConnectionStarted() {
if (connectionListener != null) {
connectionListener.connectionStarted();
}
}
public static void bridgeClConnectionTerminated(long errorCode) {
if (connectionListener != null) {
connectionListener.connectionTerminated(errorCode);
}
}
public static void bridgeClDisplayMessage(String message) {
if (connectionListener != null) {
connectionListener.displayMessage(message);
}
}
public static void bridgeClDisplayTransientMessage(String message) {
if (connectionListener != null) {
connectionListener.displayTransientMessage(message);
}
}
public static native String getStageName(int stage);
}