Purge the majority of the streaming core

This commit is contained in:
Cameron Gutman 2017-05-12 18:46:01 -07:00
parent 92b86674b9
commit 23ebc4d927
35 changed files with 47 additions and 4608 deletions

View File

@ -12,19 +12,14 @@ import javax.crypto.SecretKey;
import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlPullParserException;
import com.limelight.LimeLog; import com.limelight.LimeLog;
import com.limelight.nvstream.av.audio.AudioStream;
import com.limelight.nvstream.av.audio.AudioRenderer; import com.limelight.nvstream.av.audio.AudioRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer; import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat; import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat;
import com.limelight.nvstream.av.video.VideoStream;
import com.limelight.nvstream.control.ControlStream;
import com.limelight.nvstream.http.GfeHttpResponseException; import com.limelight.nvstream.http.GfeHttpResponseException;
import com.limelight.nvstream.http.LimelightCryptoProvider; import com.limelight.nvstream.http.LimelightCryptoProvider;
import com.limelight.nvstream.http.NvApp; import com.limelight.nvstream.http.NvApp;
import com.limelight.nvstream.http.NvHTTP; import com.limelight.nvstream.http.NvHTTP;
import com.limelight.nvstream.http.PairingManager; import com.limelight.nvstream.http.PairingManager;
import com.limelight.nvstream.input.ControllerStream;
import com.limelight.nvstream.rtsp.RtspConnection;
public class NvConnection { public class NvConnection {
// Context parameters // Context parameters
@ -33,17 +28,6 @@ public class NvConnection {
private String uniqueId; private String uniqueId;
private ConnectionContext context; private ConnectionContext context;
// Stream objects
private ControlStream controlStream;
private ControllerStream inputStream;
private VideoStream videoStream;
private AudioStream audioStream;
// Start parameters
private int drFlags;
private Object videoRenderTarget;
private AudioRenderer audioRenderer;
public NvConnection(String host, String uniqueId, NvConnectionListener listener, StreamConfiguration config, LimelightCryptoProvider cryptoProvider) public NvConnection(String host, String uniqueId, NvConnectionListener listener, StreamConfiguration config, LimelightCryptoProvider cryptoProvider)
{ {
this.host = host; this.host = host;
@ -81,22 +65,7 @@ public class NvConnection {
public void stop() public void stop()
{ {
if (inputStream != null) {
inputStream.abort();
inputStream = null;
}
if (audioStream != null) {
audioStream.abort();
}
if (videoStream != null) {
videoStream.abort();
}
if (controlStream != null) {
controlStream.abort();
}
} }
private boolean startApp() throws XmlPullParserException, IOException private boolean startApp() throws XmlPullParserException, IOException
@ -269,118 +238,24 @@ public class NvConnection {
return true; return true;
} }
private boolean doRtspHandshake() throws IOException
{
RtspConnection r = new RtspConnection(context);
r.doRtspHandshake();
return true;
}
private boolean startControlStream() throws IOException
{
controlStream = new ControlStream(context);
controlStream.initialize();
controlStream.start();
return true;
}
private boolean startVideoStream() throws IOException
{
videoStream = new VideoStream(context, controlStream);
return videoStream.startVideoStream(videoRenderTarget, drFlags);
}
private boolean startAudioStream() throws IOException
{
audioStream = new AudioStream(context, audioRenderer);
return audioStream.startAudioStream();
}
private boolean startInputConnection() throws IOException
{
// Because input events can be delivered at any time, we must only assign
// it to the instance variable once the object is properly initialized.
// This avoids the race where inputStream != null but inputStream.initialize()
// has not returned yet.
ControllerStream tempController = new ControllerStream(context);
tempController.initialize(controlStream);
tempController.start();
inputStream = tempController;
return true;
}
private void establishConnection() { private void establishConnection() {
for (NvConnectionListener.Stage currentStage : NvConnectionListener.Stage.values()) String appName = context.streamConfig.getApp().getAppName();
{
boolean success = false;
if (currentStage == NvConnectionListener.Stage.LAUNCH_APP) { context.connListener.stageStarting(appName);
// Display the app name instead of the stage name
currentStage.setName(context.streamConfig.getApp().getAppName());
}
context.connListener.stageStarting(currentStage); try {
try { startApp();
switch (currentStage) context.connListener.stageComplete(appName);
{ } catch (Exception e) {
case LAUNCH_APP: e.printStackTrace();
success = startApp(); context.connListener.displayMessage(e.getMessage());
break; context.connListener.stageFailed(appName);
return;
case RTSP_HANDSHAKE:
success = doRtspHandshake();
break;
case CONTROL_START:
success = startControlStream();
break;
case VIDEO_START:
success = startVideoStream();
break;
case AUDIO_START:
success = startAudioStream();
break;
case INPUT_START:
success = startInputConnection();
break;
}
} catch (Exception e) {
e.printStackTrace();
context.connListener.displayMessage(e.getMessage());
success = false;
}
if (success) {
context.connListener.stageComplete(currentStage);
}
else {
context.connListener.stageFailed(currentStage);
return;
}
} }
// Move the mouse cursor very slightly to wake the screen up for
// gamepad-only scenarios
sendMouseMove((short) 1, (short) 1);
try {
Thread.sleep(10);
} catch (InterruptedException e) {}
sendMouseMove((short) -1, (short) -1);
try {
Thread.sleep(10);
} catch (InterruptedException e) {}
context.connListener.connectionStarted();
} }
public void start(String localDeviceName, Object videoRenderTarget, int drFlags, AudioRenderer audioRenderer, VideoDecoderRenderer videoDecoderRenderer) public void start(AudioRenderer audioRenderer, VideoDecoderRenderer videoDecoderRenderer)
{ {
this.drFlags = drFlags;
this.audioRenderer = audioRenderer;
this.videoRenderTarget = videoRenderTarget;
this.context.videoDecoderRenderer = videoDecoderRenderer; this.context.videoDecoderRenderer = videoDecoderRenderer;
new Thread(new Runnable() { new Thread(new Runnable() {
@ -399,26 +274,17 @@ public class NvConnection {
public void sendMouseMove(final short deltaX, final short deltaY) public void sendMouseMove(final short deltaX, final short deltaY)
{ {
if (inputStream == null)
return;
inputStream.sendMouseMove(deltaX, deltaY);
} }
public void sendMouseButtonDown(final byte mouseButton) public void sendMouseButtonDown(final byte mouseButton)
{ {
if (inputStream == null)
return;
inputStream.sendMouseButtonDown(mouseButton);
} }
public void sendMouseButtonUp(final byte mouseButton) public void sendMouseButtonUp(final byte mouseButton)
{ {
if (inputStream == null)
return;
inputStream.sendMouseButtonUp(mouseButton);
} }
public void sendControllerInput(final short controllerNumber, public void sendControllerInput(final short controllerNumber,
@ -427,13 +293,7 @@ public class NvConnection {
final short leftStickX, final short leftStickY, final short leftStickX, final short leftStickY,
final short rightStickX, final short rightStickY) final short rightStickX, final short rightStickY)
{ {
if (inputStream == null)
return;
inputStream.sendControllerInput(controllerNumber, activeGamepadMask,
buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY);
} }
public void sendControllerInput(final short buttonFlags, public void sendControllerInput(final short buttonFlags,
@ -441,26 +301,15 @@ public class NvConnection {
final short leftStickX, final short leftStickY, final short leftStickX, final short leftStickY,
final short rightStickX, final short rightStickY) final short rightStickX, final short rightStickY)
{ {
if (inputStream == null)
return;
inputStream.sendControllerInput(buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY);
} }
public void sendKeyboardInput(final short keyMap, final byte keyDirection, final byte modifier) { public void sendKeyboardInput(final short keyMap, final byte keyDirection, final byte modifier) {
if (inputStream == null)
return;
inputStream.sendKeyboardInput(keyMap, keyDirection, modifier);
} }
public void sendMouseScroll(final byte scrollClicks) { public void sendMouseScroll(final byte scrollClicks) {
if (inputStream == null)
return;
inputStream.sendMouseScroll(scrollClicks);
} }
public VideoFormat getActiveVideoFormat() { public VideoFormat getActiveVideoFormat() {

View File

@ -1,36 +1,13 @@
package com.limelight.nvstream; package com.limelight.nvstream;
public interface NvConnectionListener { public interface NvConnectionListener {
void stageStarting(String stage);
void stageComplete(String stage);
void stageFailed(String stage);
public enum Stage { void connectionStarted();
LAUNCH_APP("app"), void connectionTerminated(Exception e);
RTSP_HANDSHAKE("RTSP handshake"),
CONTROL_START("control connection"),
VIDEO_START("video stream"),
AUDIO_START("audio stream"),
INPUT_START("input connection");
private String name; void displayMessage(String message);
private Stage(String name) { void displayTransientMessage(String message);
this.name = name;
}
void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
};
public void stageStarting(Stage stage);
public void stageComplete(Stage stage);
public void stageFailed(Stage stage);
public void connectionStarted();
public void connectionTerminated(Exception e);
public void displayMessage(String message);
public void displayTransientMessage(String message);
} }

View File

@ -1,13 +0,0 @@
package com.limelight.nvstream.av;
public interface ConnectionStatusListener {
public void connectionDetectedFrameLoss(int firstLostFrame, int nextSuccessfulFrame);
public void connectionSinkTooSlow(int firstLostFrame, int nextSuccessfulFrame);
public void connectionReceivedCompleteFrame(int frameIndex);
public void connectionSawFrame(int frameIndex);
public void connectionLostPackets(int lastReceivedPacket, int nextReceivedPacket);
}

View File

@ -1,7 +1,5 @@
package com.limelight.nvstream.av; package com.limelight.nvstream.av;
import com.limelight.nvstream.av.video.VideoPacket;
public class DecodeUnit { public class DecodeUnit {
public static final int DU_FLAG_CODEC_CONFIG = 0x1; public static final int DU_FLAG_CODEC_CONFIG = 0x1;
public static final int DU_FLAG_SYNC_FRAME = 0x2; public static final int DU_FLAG_SYNC_FRAME = 0x2;
@ -11,20 +9,18 @@ public class DecodeUnit {
private int frameNumber; private int frameNumber;
private long receiveTimestamp; private long receiveTimestamp;
private int flags; private int flags;
private VideoPacket backingPacketHead;
public DecodeUnit() { public DecodeUnit() {
} }
public void initialize(ByteBufferDescriptor bufferHead, int dataLength, public void initialize(ByteBufferDescriptor bufferHead, int dataLength,
int frameNumber, long receiveTimestamp, int flags, VideoPacket backingPacketHead) int frameNumber, long receiveTimestamp, int flags)
{ {
this.bufferHead = bufferHead; this.bufferHead = bufferHead;
this.dataLength = dataLength; this.dataLength = dataLength;
this.frameNumber = frameNumber; this.frameNumber = frameNumber;
this.receiveTimestamp = receiveTimestamp; this.receiveTimestamp = receiveTimestamp;
this.flags = flags; this.flags = flags;
this.backingPacketHead = backingPacketHead;
} }
public long getReceiveTimestamp() public long getReceiveTimestamp()
@ -51,13 +47,4 @@ public class DecodeUnit {
{ {
return flags; return flags;
} }
// Internal use only
public VideoPacket removeBackingPacketHead() {
VideoPacket pkt = backingPacketHead;
if (pkt != null) {
backingPacketHead = pkt.nextPacket;
}
return pkt;
}
} }

View File

@ -1,88 +0,0 @@
package com.limelight.nvstream.av;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class RtpPacket implements RtpPacketFields {
private byte packetType;
private short seqNum;
private int headerSize;
private ByteBufferDescriptor buffer;
private ByteBuffer bb;
public static final int FLAG_EXTENSION = 0x10;
public static final int FIXED_HEADER_SIZE = 12;
public static final int MAX_HEADER_SIZE = 16;
public RtpPacket(byte[] buffer)
{
this.buffer = new ByteBufferDescriptor(buffer, 0, buffer.length);
this.bb = ByteBuffer.wrap(buffer).order(ByteOrder.BIG_ENDIAN);
}
public void initializeWithLength(int length)
{
// Rewind to start
bb.rewind();
// Read the RTP header byte
byte header = bb.get();
// Get the packet type
packetType = bb.get();
// Get the sequence number
seqNum = bb.getShort();
// If an extension is present, read the fields
headerSize = FIXED_HEADER_SIZE;
if ((header & FLAG_EXTENSION) != 0) {
headerSize += 4; // 2 additional fields
}
// Update descriptor length
buffer.length = length;
}
public byte getPacketType()
{
return packetType;
}
public short getRtpSequenceNumber()
{
return seqNum;
}
public byte[] getBuffer()
{
return buffer.data;
}
public void initializePayloadDescriptor(ByteBufferDescriptor bb)
{
bb.reinitialize(buffer.data, buffer.offset+headerSize, buffer.length-headerSize);
}
@Override
public int referencePacket() {
// There's no circular buffer for audio packets so this is a no-op
return 0;
}
@Override
public int dereferencePacket() {
// There's no circular buffer for audio packets so this is a no-op
return 0;
}
@Override
public int getRefCount() {
// There's no circular buffer for audio packets so this is a no-op
return 0;
}
}

View File

@ -1,13 +0,0 @@
package com.limelight.nvstream.av;
public interface RtpPacketFields {
public byte getPacketType();
public short getRtpSequenceNumber();
public int referencePacket();
public int dereferencePacket();
public int getRefCount();
}

View File

@ -1,239 +0,0 @@
package com.limelight.nvstream.av;
import java.util.Iterator;
import java.util.LinkedList;
import com.limelight.LimeLog;
import com.limelight.utils.TimeHelper;
public class RtpReorderQueue {
private final int maxSize;
private final int maxQueueTime;
private final LinkedList<RtpQueueEntry> queue;
private short nextRtpSequenceNumber;
private long oldestQueuedTime;
public enum RtpQueueStatus {
HANDLE_IMMEDIATELY,
QUEUED_NOTHING_READY,
QUEUED_PACKETS_READY,
REJECTED
};
public RtpReorderQueue() {
this.maxSize = 16;
this.maxQueueTime = 40;
this.queue = new LinkedList<RtpQueueEntry>();
this.oldestQueuedTime = Long.MAX_VALUE;
this.nextRtpSequenceNumber = Short.MAX_VALUE;
}
public RtpReorderQueue(int maxSize, int maxQueueTime) {
this.maxSize = maxSize;
this.maxQueueTime = maxQueueTime;
this.queue = new LinkedList<RtpQueueEntry>();
this.oldestQueuedTime = Long.MAX_VALUE;
this.nextRtpSequenceNumber = Short.MAX_VALUE;
}
private boolean queuePacket(boolean head, RtpPacketFields packet) {
short seq = packet.getRtpSequenceNumber();
if (nextRtpSequenceNumber != Short.MAX_VALUE) {
// Don't queue packets we're already ahead of
if (SequenceHelper.isBeforeSigned(seq, nextRtpSequenceNumber, false)) {
return false;
}
// Don't queue duplicates either
for (RtpQueueEntry existingEntry : queue) {
if (existingEntry.sequenceNumber == seq) {
return false;
}
}
}
RtpQueueEntry entry = new RtpQueueEntry();
entry.packet = packet;
entry.queueTime = TimeHelper.getMonotonicMillis();
entry.sequenceNumber = seq;
if (oldestQueuedTime == Long.MAX_VALUE) {
oldestQueuedTime = entry.queueTime;
}
// Add a reference to the packet while it's in the queue
packet.referencePacket();
if (head) {
queue.addFirst(entry);
}
else {
queue.addLast(entry);
}
return true;
}
private void updateOldestQueued() {
oldestQueuedTime = Long.MAX_VALUE;
for (RtpQueueEntry entry : queue) {
if (entry.queueTime < oldestQueuedTime) {
oldestQueuedTime = entry.queueTime;
}
}
}
private RtpQueueEntry getEntryByLowestSeq() {
if (queue.isEmpty()) {
return null;
}
RtpQueueEntry lowestSeqEntry = queue.getFirst();
short nextSeq = lowestSeqEntry.sequenceNumber;
for (RtpQueueEntry entry : queue) {
if (SequenceHelper.isBeforeSigned(entry.sequenceNumber, nextSeq, true)) {
lowestSeqEntry = entry;
nextSeq = entry.sequenceNumber;
}
}
if (nextSeq != Short.MAX_VALUE) {
nextRtpSequenceNumber = nextSeq;
}
return lowestSeqEntry;
}
private RtpQueueEntry validateQueueConstraints() {
if (queue.isEmpty()) {
return null;
}
boolean dequeuePacket = false;
// Check that the queue's time constraint is satisfied
if (TimeHelper.getMonotonicMillis() - oldestQueuedTime > maxQueueTime) {
LimeLog.info("Returning RTP packet queued for too long: "+(TimeHelper.getMonotonicMillis() - oldestQueuedTime));
dequeuePacket = true;
}
// Check that the queue's size constraint is satisfied. We subtract one
// because this is validating that the queue will meet constraints _after_
// the current packet is enqueued.
if (!dequeuePacket && queue.size() == maxSize - 1) {
LimeLog.info("Returning RTP packet after queue overgrowth");
dequeuePacket = true;
}
if (dequeuePacket) {
// Return the lowest seq queued
return getEntryByLowestSeq();
}
else {
return null;
}
}
public RtpQueueStatus addPacket(RtpPacketFields packet) {
if (nextRtpSequenceNumber != Short.MAX_VALUE &&
SequenceHelper.isBeforeSigned(packet.getRtpSequenceNumber(), nextRtpSequenceNumber, false)) {
// Reject packets behind our current sequence number
return RtpQueueStatus.REJECTED;
}
if (queue.isEmpty()) {
// Return immediately for an exact match with an empty queue
if (nextRtpSequenceNumber == Short.MAX_VALUE ||
packet.getRtpSequenceNumber() == nextRtpSequenceNumber) {
nextRtpSequenceNumber = (short) (packet.getRtpSequenceNumber() + 1);
return RtpQueueStatus.HANDLE_IMMEDIATELY;
}
else {
// Queue is empty currently so we'll put this packet on there
if (queuePacket(false, packet)) {
return RtpQueueStatus.QUEUED_NOTHING_READY;
}
else {
return RtpQueueStatus.REJECTED;
}
}
}
else {
// Validate that the queue remains within our contraints
RtpQueueEntry lowestEntry = validateQueueConstraints();
// If the queue is now empty after validating queue constraints,
// this packet can be returned immediately
if (lowestEntry == null && queue.isEmpty()) {
nextRtpSequenceNumber = (short) (packet.getRtpSequenceNumber() + 1);
return RtpQueueStatus.HANDLE_IMMEDIATELY;
}
// Queue has data inside, so we need to see where this packet fits
if (packet.getRtpSequenceNumber() == nextRtpSequenceNumber) {
// It fits in a hole where we need a packet, now we have some ready
if (queuePacket(true, packet)) {
return RtpQueueStatus.QUEUED_PACKETS_READY;
}
else {
return RtpQueueStatus.REJECTED;
}
}
else {
if (queuePacket(false, packet)) {
// Constraint validation may have changed the oldest packet to one that
// matches the next sequence number
return (lowestEntry != null) ? RtpQueueStatus.QUEUED_PACKETS_READY :
RtpQueueStatus.QUEUED_NOTHING_READY;
}
else {
return RtpQueueStatus.REJECTED;
}
}
}
}
// This function returns a referenced packet. The caller must dereference
// the packet when it is finished.
public RtpPacketFields getQueuedPacket() {
RtpQueueEntry queuedEntry = null;
// Find the matching entry
Iterator<RtpQueueEntry> i = queue.iterator();
while (i.hasNext()) {
RtpQueueEntry entry = i.next();
if (entry.sequenceNumber == nextRtpSequenceNumber) {
nextRtpSequenceNumber++;
queuedEntry = entry;
i.remove();
break;
}
}
// Bail if we found nothing
if (queuedEntry == null) {
// Update the oldest queued packet time
updateOldestQueued();
return null;
}
// We don't update the oldest queued entry here, because we know
// the caller will call again until it receives null
return queuedEntry.packet;
}
private class RtpQueueEntry {
public RtpPacketFields packet;
public short sequenceNumber;
public long queueTime;
}
}

View File

@ -1,20 +0,0 @@
package com.limelight.nvstream.av;
public class SequenceHelper {
public static boolean isBeforeSigned(int numA, int numB, boolean ambiguousCase) {
// This should be the common case for most callers
if (numA == numB) {
return false;
}
// If numA and numB have the same signs,
// we can just do a regular comparison.
if ((numA < 0 && numB < 0) || (numA >= 0 && numB >= 0)) {
return numA < numB;
}
else {
// The sign switch is ambiguous
return ambiguousCase;
}
}
}

View File

@ -1,116 +0,0 @@
package com.limelight.nvstream.av.audio;
import com.limelight.LimeLog;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.SequenceHelper;
import com.limelight.nvstream.av.buffer.AbstractPopulatedBufferList;
import com.limelight.nvstream.av.buffer.AtomicPopulatedBufferList;
public class AudioDepacketizer {
private static final int DU_LIMIT = 30;
private AbstractPopulatedBufferList<ByteBufferDescriptor> decodedUnits;
// Direct submit state
private AudioRenderer directSubmitRenderer;
private byte[] directSubmitData;
// Cached objects
private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0);
// Sequencing state
private short lastSequenceNumber;
public AudioDepacketizer(AudioRenderer directSubmitRenderer, final int bufferSizeShorts)
{
this.directSubmitRenderer = directSubmitRenderer;
if (directSubmitRenderer != null) {
this.directSubmitData = new byte[bufferSizeShorts*2];
}
else {
decodedUnits = new AtomicPopulatedBufferList<ByteBufferDescriptor>(DU_LIMIT, new AbstractPopulatedBufferList.BufferFactory() {
public Object createFreeBuffer() {
return new ByteBufferDescriptor(new byte[bufferSizeShorts*2], 0, bufferSizeShorts*2);
}
public void cleanupObject(Object o) {
// Nothing to do
}
});
}
}
private void decodeData(byte[] data, int off, int len)
{
// Submit this data to the decoder
int decodeLen;
ByteBufferDescriptor bb;
if (directSubmitData != null) {
bb = null;
decodeLen = OpusDecoder.decode(data, off, len, directSubmitData);
}
else {
bb = decodedUnits.pollFreeObject();
if (bb == null) {
LimeLog.warning("Audio player too slow! Forced to drop decoded samples");
decodedUnits.clearPopulatedObjects();
bb = decodedUnits.pollFreeObject();
if (bb == null) {
LimeLog.severe("Audio player is leaking buffers!");
return;
}
}
decodeLen = OpusDecoder.decode(data, off, len, bb.data);
}
if (decodeLen > 0) {
if (directSubmitRenderer != null) {
directSubmitRenderer.playDecodedAudio(directSubmitData, 0, decodeLen);
}
else {
bb.length = decodeLen;
decodedUnits.addPopulatedObject(bb);
}
}
else if (directSubmitRenderer == null) {
decodedUnits.freePopulatedObject(bb);
}
}
public void decodeInputData(RtpPacket packet)
{
short seq = packet.getRtpSequenceNumber();
// Toss out the current NAL if we receive a packet that is
// out of sequence
if (lastSequenceNumber != 0 &&
(short)(lastSequenceNumber + 1) != seq)
{
LimeLog.warning("Received OOS audio data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
// Only tell the decoder if we got packets ahead of what we expected
// If the packet is behind the current sequence number, drop it
if (!SequenceHelper.isBeforeSigned(seq, (short)(lastSequenceNumber + 1), false)) {
decodeData(null, 0, 0);
}
else {
return;
}
}
lastSequenceNumber = seq;
// This is all the depacketizing we need to do
packet.initializePayloadDescriptor(cachedDesc);
decodeData(cachedDesc.data, cachedDesc.offset, cachedDesc.length);
}
public ByteBufferDescriptor getNextDecodedData() throws InterruptedException {
return decodedUnits.takePopulatedObject();
}
public void freeDecodedData(ByteBufferDescriptor data) {
decodedUnits.freePopulatedObject(data);
}
}

View File

@ -1,274 +0,0 @@
package com.limelight.nvstream.av.audio;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.util.LinkedList;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.RtpReorderQueue;
public class AudioStream {
private static final int RTP_PORT = 48000;
private static final int SAMPLE_RATE = 48000;
private static final int SHORTS_PER_CHANNEL = 240;
private static final int RTP_RECV_BUFFER = 64 * 1024;
private static final int MAX_PACKET_SIZE = 250;
private DatagramSocket rtp;
private AudioDepacketizer depacketizer;
private LinkedList<Thread> threads = new LinkedList<Thread>();
private boolean aborting = false;
private ConnectionContext context;
private AudioRenderer streamListener;
public AudioStream(ConnectionContext context, AudioRenderer streamListener)
{
this.context = context;
this.streamListener = streamListener;
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
for (Thread t : threads) {
t.interrupt();
}
// Close the socket to interrupt the receive thread
if (rtp != null) {
rtp.close();
}
// Wait for threads to terminate
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) { }
}
streamListener.streamClosing();
threads.clear();
}
public boolean startAudioStream() throws SocketException
{
setupRtpSession();
if (!setupAudio()) {
abort();
return false;
}
startReceiveThread();
if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) {
startDecoderThread();
}
startUdpPingThread();
return true;
}
private void setupRtpSession() throws SocketException
{
rtp = new DatagramSocket();
rtp.setReceiveBufferSize(RTP_RECV_BUFFER);
}
private static final int[] STREAMS_2 = new int[] {1, 1};
private static final int[] STREAMS_5_1 = new int[] {4, 2};
private static final byte[] MAPPING_2 = new byte[] {0, 1};
private static final byte[] MAPPING_5_1 = new byte[] {0, 4, 1, 5, 2, 3};
private boolean setupAudio()
{
int err;
int channels = context.streamConfig.getAudioChannelCount();
byte[] mapping;
int[] streams;
if (channels == 2) {
mapping = MAPPING_2;
streams = STREAMS_2;
}
else if (channels == 6) {
mapping = MAPPING_5_1;
streams = STREAMS_5_1;
}
else {
throw new IllegalStateException("Unsupported surround configuration");
}
err = OpusDecoder.init(SAMPLE_RATE, SHORTS_PER_CHANNEL, channels,
streams[0], streams[1], mapping);
if (err != 0) {
throw new IllegalStateException("Opus decoder failed to initialize: "+err);
}
if (!streamListener.streamInitialized(context.streamConfig.getAudioChannelCount(),
context.streamConfig.getAudioChannelMask(),
context.streamConfig.getAudioChannelCount()*SHORTS_PER_CHANNEL,
SAMPLE_RATE)) {
return false;
}
if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) != 0) {
depacketizer = new AudioDepacketizer(streamListener, context.streamConfig.getAudioChannelCount()*SHORTS_PER_CHANNEL);
}
else {
depacketizer = new AudioDepacketizer(null, context.streamConfig.getAudioChannelCount()*SHORTS_PER_CHANNEL);
}
return true;
}
private void startDecoderThread()
{
// Decoder thread
Thread t = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
ByteBufferDescriptor samples;
try {
samples = depacketizer.getNextDecodedData();
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
streamListener.playDecodedAudio(samples.data, samples.offset, samples.length);
depacketizer.freeDecodedData(samples);
}
}
};
threads.add(t);
t.setName("Audio - Player");
t.setPriority(Thread.NORM_PRIORITY + 2);
t.start();
}
private void startReceiveThread()
{
// Receive thread
Thread t = new Thread() {
@Override
public void run() {
byte[] buffer = new byte[MAX_PACKET_SIZE];
DatagramPacket packet = new DatagramPacket(buffer, buffer.length);
RtpPacket queuedPacket, rtpPacket = new RtpPacket(buffer);
RtpReorderQueue rtpQueue = new RtpReorderQueue();
RtpReorderQueue.RtpQueueStatus queueStatus;
while (!isInterrupted())
{
try {
rtp.receive(packet);
// DecodeInputData() doesn't hold onto the buffer so we are free to reuse it
rtpPacket.initializeWithLength(packet.getLength());
// Throw away non-audio packets before queuing
if (rtpPacket.getPacketType() != 97) {
// Only type 97 is audio
packet.setLength(MAX_PACKET_SIZE);
continue;
}
queueStatus = rtpQueue.addPacket(rtpPacket);
if (queueStatus == RtpReorderQueue.RtpQueueStatus.HANDLE_IMMEDIATELY) {
// Send directly to the depacketizer
depacketizer.decodeInputData(rtpPacket);
packet.setLength(MAX_PACKET_SIZE);
}
else {
if (queueStatus != RtpReorderQueue.RtpQueueStatus.REJECTED) {
// The queue consumed our packet, so we must allocate a new one
buffer = new byte[MAX_PACKET_SIZE];
packet = new DatagramPacket(buffer, buffer.length);
rtpPacket = new RtpPacket(buffer);
}
else {
packet.setLength(MAX_PACKET_SIZE);
}
// If packets are ready, pull them and send them to the depacketizer
if (queueStatus == RtpReorderQueue.RtpQueueStatus.QUEUED_PACKETS_READY) {
while ((queuedPacket = (RtpPacket) rtpQueue.getQueuedPacket()) != null) {
depacketizer.decodeInputData(queuedPacket);
queuedPacket.dereferencePacket();
}
}
}
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setName("Audio - Receive");
t.setPriority(Thread.NORM_PRIORITY + 1);
t.start();
}
private void startUdpPingThread()
{
// Ping thread
Thread t = new Thread() {
@Override
public void run() {
// PING in ASCII
final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
pingPacket.setSocketAddress(new InetSocketAddress(context.serverAddress, RTP_PORT));
// Send PING every 500 ms
while (!isInterrupted())
{
try {
rtp.send(pingPacket);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setPriority(Thread.MIN_PRIORITY);
t.setName("Audio - Ping");
t.start();
}
}

View File

@ -1,11 +0,0 @@
package com.limelight.nvstream.av.audio;
public class OpusDecoder {
static {
System.loadLibrary("nv_opus_dec");
}
public static native int init(int sampleRate, int samplesPerChannel, int channelCount, int streams, int coupledStreams, byte[] mapping);
public static native void destroy();
public static native int decode(byte[] indata, int inoff, int inlen, byte[] outpcmdata);
}

View File

@ -1,41 +0,0 @@
package com.limelight.nvstream.av.buffer;
public abstract class AbstractPopulatedBufferList<T> {
protected final int maxQueueSize;
protected final BufferFactory factory;
public AbstractPopulatedBufferList(int maxQueueSize, BufferFactory factory) {
this.factory = factory;
this.maxQueueSize = maxQueueSize;
}
public abstract int getPopulatedCount();
public abstract int getFreeCount();
public abstract T pollFreeObject();
public abstract void addPopulatedObject(T object);
public abstract void freePopulatedObject(T object);
public void clearPopulatedObjects() {
T object;
while ((object = pollPopulatedObject()) != null) {
freePopulatedObject(object);
}
}
public abstract T pollPopulatedObject();
public abstract T peekPopulatedObject();
public T takePopulatedObject() throws InterruptedException {
throw new UnsupportedOperationException("Blocking is unsupported on this buffer list");
}
public static interface BufferFactory {
public Object createFreeBuffer();
public void cleanupObject(Object o);
}
}

View File

@ -1,61 +0,0 @@
package com.limelight.nvstream.av.buffer;
import java.util.concurrent.ArrayBlockingQueue;
public class AtomicPopulatedBufferList<T> extends AbstractPopulatedBufferList<T> {
private final ArrayBlockingQueue<T> populatedList;
private final ArrayBlockingQueue<T> freeList;
@SuppressWarnings("unchecked")
public AtomicPopulatedBufferList(int maxQueueSize, BufferFactory factory) {
super(maxQueueSize, factory);
this.populatedList = new ArrayBlockingQueue<T>(maxQueueSize, false);
this.freeList = new ArrayBlockingQueue<T>(maxQueueSize, false);
for (int i = 0; i < maxQueueSize; i++) {
freeList.add((T) factory.createFreeBuffer());
}
}
@Override
public int getPopulatedCount() {
return populatedList.size();
}
@Override
public int getFreeCount() {
return freeList.size();
}
@Override
public T pollFreeObject() {
return freeList.poll();
}
@Override
public void addPopulatedObject(T object) {
populatedList.add(object);
}
@Override
public void freePopulatedObject(T object) {
factory.cleanupObject(object);
freeList.add(object);
}
@Override
public T pollPopulatedObject() {
return populatedList.poll();
}
@Override
public T peekPopulatedObject() {
return populatedList.peek();
}
@Override
public T takePopulatedObject() throws InterruptedException {
return populatedList.take();
}
}

View File

@ -1,68 +0,0 @@
package com.limelight.nvstream.av.buffer;
import java.util.ArrayList;
public class UnsynchronizedPopulatedBufferList<T> extends AbstractPopulatedBufferList<T> {
private final ArrayList<T> populatedList;
private final ArrayList<T> freeList;
@SuppressWarnings("unchecked")
public UnsynchronizedPopulatedBufferList(int maxQueueSize, BufferFactory factory) {
super(maxQueueSize, factory);
this.populatedList = new ArrayList<T>(maxQueueSize);
this.freeList = new ArrayList<T>(maxQueueSize);
for (int i = 0; i < maxQueueSize; i++) {
freeList.add((T) factory.createFreeBuffer());
}
}
@Override
public int getPopulatedCount() {
return populatedList.size();
}
@Override
public int getFreeCount() {
return freeList.size();
}
@Override
public T pollFreeObject() {
if (freeList.isEmpty()) {
return null;
}
return freeList.remove(0);
}
@Override
public void addPopulatedObject(T object) {
populatedList.add(object);
}
@Override
public void freePopulatedObject(T object) {
factory.cleanupObject(object);
freeList.add(object);
}
@Override
public T pollPopulatedObject() {
if (populatedList.isEmpty()) {
return null;
}
return populatedList.remove(0);
}
@Override
public T peekPopulatedObject() {
if (populatedList.isEmpty()) {
return null;
}
return populatedList.get(0);
}
}

View File

@ -28,21 +28,9 @@ public abstract class VideoDecoderRenderer {
return 0; return 0;
} }
public int getAverageEndToEndLatency() { public abstract boolean setup(VideoFormat format, int width, int height, int redrawRate);
return 0;
}
public int getAverageDecoderLatency() { public abstract boolean start();
return 0;
}
public void directSubmitDecodeUnit(DecodeUnit du) {
throw new UnsupportedOperationException("CAPABILITY_DIRECT_SUBMIT requires overriding directSubmitDecodeUnit()");
}
public abstract boolean setup(VideoFormat format, int width, int height, int redrawRate, Object renderTarget, int drFlags);
public abstract boolean start(VideoDepacketizer depacketizer);
public abstract void stop(); public abstract void stop();

View File

@ -1,656 +0,0 @@
package com.limelight.nvstream.av.video;
import com.limelight.LimeLog;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.SequenceHelper;
import com.limelight.nvstream.av.buffer.AbstractPopulatedBufferList;
import com.limelight.nvstream.av.buffer.AtomicPopulatedBufferList;
import com.limelight.nvstream.av.buffer.UnsynchronizedPopulatedBufferList;
import com.limelight.utils.TimeHelper;
public class VideoDepacketizer {
// Current frame state
private int frameDataLength = 0;
private ByteBufferDescriptor frameDataChainHead;
private ByteBufferDescriptor frameDataChainTail;
private VideoPacket backingPacketHead;
private VideoPacket backingPacketTail;
// Sequencing state
private int lastPacketInStream = -1;
private int nextFrameNumber = 1;
private int startFrameNumber = 0;
private boolean waitingForNextSuccessfulFrame;
private boolean waitingForIdrFrame = true;
private long frameStartTime;
private boolean decodingFrame;
private boolean strictIdrFrameWait;
// Cached objects
private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0);
private ByteBufferDescriptor cachedSpecialDesc = new ByteBufferDescriptor(null, 0, 0);
private ConnectionStatusListener controlListener;
private final int nominalPacketDataLength;
private static final int CONSECUTIVE_DROP_LIMIT = 120;
private int consecutiveFrameDrops = 0;
private static final int DU_LIMIT = 15;
private AbstractPopulatedBufferList<DecodeUnit> decodedUnits;
private final int frameHeaderOffset;
public VideoDepacketizer(ConnectionContext context, ConnectionStatusListener controlListener, int nominalPacketSize)
{
this.controlListener = controlListener;
this.nominalPacketDataLength = nominalPacketSize - VideoPacket.HEADER_SIZE;
if ((context.serverAppVersion[0] > 7) ||
(context.serverAppVersion[0] == 7 && context.serverAppVersion[1] > 1) ||
(context.serverAppVersion[0] == 7 && context.serverAppVersion[1] == 1 && context.serverAppVersion[2] >= 350)) {
// >= 7.1.350 should use the 8 byte header again
frameHeaderOffset = 8;
}
else if ((context.serverAppVersion[0] > 7) ||
(context.serverAppVersion[0] == 7 && context.serverAppVersion[1] > 1) ||
(context.serverAppVersion[0] == 7 && context.serverAppVersion[1] == 1 && context.serverAppVersion[2] >= 320)) {
// [7.1.320, 7.1.350) should use the 12 byte frame header
frameHeaderOffset = 12;
}
else if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
// [5.x, 7.1.320) should use the 8 byte header
frameHeaderOffset = 8;
}
else {
frameHeaderOffset = 0;
}
boolean unsynchronized;
if (context.videoDecoderRenderer != null) {
int videoCaps = context.videoDecoderRenderer.getCapabilities();
this.strictIdrFrameWait = (videoCaps & VideoDecoderRenderer.CAPABILITY_REFERENCE_FRAME_INVALIDATION) == 0;
unsynchronized = (videoCaps & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0;
}
else {
// If there's no renderer, it doesn't matter if we synchronize or wait for IDRs
this.strictIdrFrameWait = false;
unsynchronized = true;
}
AbstractPopulatedBufferList.BufferFactory factory = new AbstractPopulatedBufferList.BufferFactory() {
public Object createFreeBuffer() {
return new DecodeUnit();
}
public void cleanupObject(Object o) {
DecodeUnit du = (DecodeUnit) o;
// Disassociate video packets from this DU
VideoPacket pkt;
while ((pkt = du.removeBackingPacketHead()) != null) {
pkt.dereferencePacket();
}
}
};
if (unsynchronized) {
decodedUnits = new UnsynchronizedPopulatedBufferList<DecodeUnit>(DU_LIMIT, factory);
}
else {
decodedUnits = new AtomicPopulatedBufferList<DecodeUnit>(DU_LIMIT, factory);
}
}
private void dropFrameState()
{
// We'll need an IDR frame now if we're in strict mode
if (strictIdrFrameWait) {
waitingForIdrFrame = true;
}
// Count the number of consecutive frames dropped
consecutiveFrameDrops++;
// If we reach our limit, immediately request an IDR frame
// and reset
if (consecutiveFrameDrops == CONSECUTIVE_DROP_LIMIT) {
LimeLog.warning("Reached consecutive drop limit");
// Restart the count
consecutiveFrameDrops = 0;
// Request an IDR frame (0 tuple always generates an IDR frame)
controlListener.connectionDetectedFrameLoss(0, 0);
}
cleanupFrameState();
}
private void cleanupFrameState()
{
backingPacketTail = null;
while (backingPacketHead != null) {
backingPacketHead.dereferencePacket();
backingPacketHead = backingPacketHead.nextPacket;
}
frameDataChainHead = frameDataChainTail = null;
frameDataLength = 0;
}
private static boolean isReferencePictureNalu(byte nalType) {
switch (nalType) {
case 0x20:
case 0x22:
case 0x24:
case 0x26:
case 0x28:
case 0x2A:
// H265
return true;
case 0x65:
// H264
return true;
default:
return false;
}
}
private void reassembleFrame(int frameNumber)
{
// This is the start of a new frame
if (frameDataChainHead != null) {
ByteBufferDescriptor firstBuffer = frameDataChainHead;
int flags = 0;
if (NAL.getSpecialSequenceDescriptor(firstBuffer, cachedSpecialDesc) && NAL.isAnnexBFrameStart(cachedSpecialDesc)) {
switch (cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length]) {
// H265
case 0x40: // VPS
case 0x42: // SPS
case 0x44: // PPS
flags |= DecodeUnit.DU_FLAG_CODEC_CONFIG;
break;
// H264
case 0x67: // SPS
case 0x68: // PPS
flags |= DecodeUnit.DU_FLAG_CODEC_CONFIG;
break;
}
if (isReferencePictureNalu(cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length])) {
flags |= DecodeUnit.DU_FLAG_SYNC_FRAME;
}
}
// Construct the video decode unit
DecodeUnit du = decodedUnits.pollFreeObject();
if (du == null) {
LimeLog.warning("Video decoder is too slow! Forced to drop decode units");
// Invalidate all frames from the start of the DU queue
// (0 tuple always generates an IDR frame)
controlListener.connectionSinkTooSlow(0, 0);
waitingForIdrFrame = true;
// Remove existing frames
decodedUnits.clearPopulatedObjects();
// Clear frame state and wait for an IDR
dropFrameState();
return;
}
// Initialize the free DU
du.initialize(frameDataChainHead, frameDataLength, frameNumber,
frameStartTime, flags, backingPacketHead);
// Packets now owned by the DU
backingPacketTail = backingPacketHead = null;
controlListener.connectionReceivedCompleteFrame(frameNumber);
// Submit the DU to the consumer
decodedUnits.addPopulatedObject(du);
// Clear old state
cleanupFrameState();
// Clear frame drops
consecutiveFrameDrops = 0;
}
}
private void chainBufferToCurrentFrame(ByteBufferDescriptor desc) {
desc.nextDescriptor = null;
// Chain the packet
if (frameDataChainTail != null) {
frameDataChainTail.nextDescriptor = desc;
frameDataChainTail = desc;
}
else {
frameDataChainHead = frameDataChainTail = desc;
}
frameDataLength += desc.length;
}
private void chainPacketToCurrentFrame(VideoPacket packet) {
packet.referencePacket();
packet.nextPacket = null;
// Chain the packet
if (backingPacketTail != null) {
backingPacketTail.nextPacket = packet;
backingPacketTail = packet;
}
else {
backingPacketHead = backingPacketTail = packet;
}
}
private void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location)
{
boolean isDecodingVideoData = false;
while (location.length != 0)
{
// Remember the start of the NAL data in this packet
int start = location.offset;
// Check for a special sequence
if (NAL.getSpecialSequenceDescriptor(location, cachedSpecialDesc))
{
if (NAL.isAnnexBStartSequence(cachedSpecialDesc))
{
// We're decoding video data now
isDecodingVideoData = true;
// Check if it's the end of the last frame
if (NAL.isAnnexBFrameStart(cachedSpecialDesc))
{
// Update the global state that we're decoding a new frame
this.decodingFrame = true;
// Reassemble any pending NAL
reassembleFrame(packet.getFrameIndex());
// Reload cachedSpecialDesc after reassembleFrame overwrote it
NAL.getSpecialSequenceDescriptor(location, cachedSpecialDesc);
if (isReferencePictureNalu(cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length])) {
// This is the NALU code for I-frame data
waitingForIdrFrame = false;
// Cancel any pending IDR frame request
waitingForNextSuccessfulFrame = false;
}
}
// Skip the start sequence
location.length -= cachedSpecialDesc.length;
location.offset += cachedSpecialDesc.length;
}
else
{
// Check if this is padding after a full video frame
if (isDecodingVideoData && NAL.isPadding(cachedSpecialDesc)) {
// The decode unit is complete
reassembleFrame(packet.getFrameIndex());
}
// Not decoding video
isDecodingVideoData = false;
// Just skip this byte
location.length--;
location.offset++;
}
}
// Move to the next special sequence
while (location.length != 0)
{
// Catch the easy case first where byte 0 != 0x00
if (location.data[location.offset] == 0x00)
{
// Check if this should end the current NAL
if (NAL.getSpecialSequenceDescriptor(location, cachedSpecialDesc))
{
// Only stop if we're decoding something or this
// isn't padding
if (isDecodingVideoData || !NAL.isPadding(cachedSpecialDesc))
{
break;
}
}
}
// This byte is part of the NAL data
location.offset++;
location.length--;
}
if (isDecodingVideoData && decodingFrame)
{
// The slow path may result in multiple decode units per packet.
// The VideoPacket objects only support being in 1 DU list, so we'll
// copy this data into a new array rather than reference the packet, if
// this NALU ends before the end of the frame. Only copying if this doesn't
// go to the end of the frame means we'll be only copying the SPS and PPS which
// are quite small, while the actual I-frame data is referenced via the packet.
if (location.length != 0) {
// Copy the packet data into a new array
byte[] dataCopy = new byte[location.offset-start];
System.arraycopy(location.data, start, dataCopy, 0, dataCopy.length);
// Chain a descriptor referencing the copied data
chainBufferToCurrentFrame(new ByteBufferDescriptor(dataCopy, 0, dataCopy.length));
}
else {
// Chain this packet to the current frame
chainPacketToCurrentFrame(packet);
// Add a buffer descriptor describing the NAL data in this packet
chainBufferToCurrentFrame(new ByteBufferDescriptor(location.data, start, location.offset-start));
}
}
}
}
private void addInputDataFast(VideoPacket packet, ByteBufferDescriptor location, boolean firstPacket)
{
if (firstPacket) {
// Setup state for the new frame
frameStartTime = TimeHelper.getMonotonicMillis();
}
// Add the payload data to the chain
chainBufferToCurrentFrame(new ByteBufferDescriptor(location));
// The receive thread can't use this until we're done with it
chainPacketToCurrentFrame(packet);
}
private static boolean isFirstPacket(int flags) {
// Clear the picture data flag
flags &= ~VideoPacket.FLAG_CONTAINS_PIC_DATA;
// Check if it's just the start or both start and end of a frame
return (flags == (VideoPacket.FLAG_SOF | VideoPacket.FLAG_EOF) ||
flags == VideoPacket.FLAG_SOF);
}
public void addInputData(VideoPacket packet)
{
// Load our reassembly descriptor
packet.initializePayloadDescriptor(cachedReassemblyDesc);
int flags = packet.getFlags();
int frameIndex = packet.getFrameIndex();
boolean firstPacket = isFirstPacket(flags);
// Drop duplicates or re-ordered packets
int streamPacketIndex = packet.getStreamPacketIndex();
if (SequenceHelper.isBeforeSigned((short)streamPacketIndex, (short)(lastPacketInStream + 1), false)) {
return;
}
// Drop packets from a previously completed frame
if (SequenceHelper.isBeforeSigned(frameIndex, nextFrameNumber, false)) {
return;
}
// Notify the listener of the latest frame we've seen from the PC
controlListener.connectionSawFrame(frameIndex);
// Look for a frame start before receiving a frame end
if (firstPacket && decodingFrame)
{
LimeLog.warning("Network dropped end of a frame");
nextFrameNumber = frameIndex;
// Unexpected start of next frame before terminating the last
waitingForNextSuccessfulFrame = true;
// Clear the old state and wait for an IDR
dropFrameState();
}
// Look for a non-frame start before a frame start
else if (!firstPacket && !decodingFrame) {
// Check if this looks like a real frame
if (flags == VideoPacket.FLAG_CONTAINS_PIC_DATA ||
flags == VideoPacket.FLAG_EOF ||
cachedReassemblyDesc.length < nominalPacketDataLength)
{
LimeLog.warning("Network dropped beginning of a frame");
nextFrameNumber = frameIndex + 1;
waitingForNextSuccessfulFrame = true;
dropFrameState();
decodingFrame = false;
return;
}
else {
// FEC data
return;
}
}
// Check sequencing of this frame to ensure we didn't
// miss one in between
else if (firstPacket) {
// Make sure this is the next consecutive frame
if (SequenceHelper.isBeforeSigned(nextFrameNumber, frameIndex, true)) {
LimeLog.warning("Network dropped an entire frame");
nextFrameNumber = frameIndex;
// Wait until an IDR frame comes
waitingForNextSuccessfulFrame = true;
dropFrameState();
}
else if (nextFrameNumber != frameIndex) {
// Duplicate packet or FEC dup
decodingFrame = false;
return;
}
// We're now decoding a frame
decodingFrame = true;
}
// If it's not the first packet of a frame
// we need to drop it if the stream packet index
// doesn't match
if (!firstPacket && decodingFrame) {
if (streamPacketIndex != (int)(lastPacketInStream + 1)) {
LimeLog.warning("Network dropped middle of a frame");
nextFrameNumber = frameIndex + 1;
waitingForNextSuccessfulFrame = true;
dropFrameState();
decodingFrame = false;
return;
}
}
// Notify the server of any packet losses
if (streamPacketIndex != (int)(lastPacketInStream + 1)) {
// Packets were lost so report this to the server
controlListener.connectionLostPackets(lastPacketInStream, streamPacketIndex);
}
lastPacketInStream = streamPacketIndex;
// If this is the first packet, skip the frame header (if one exists)
if (firstPacket) {
cachedReassemblyDesc.offset += frameHeaderOffset;
cachedReassemblyDesc.length -= frameHeaderOffset;
}
if (firstPacket && isIdrFrameStart(cachedReassemblyDesc))
{
// The slow path doesn't update the frame start time by itself
frameStartTime = TimeHelper.getMonotonicMillis();
// SPS and PPS prefix is padded between NALs, so we must decode it with the slow path
addInputDataSlow(packet, cachedReassemblyDesc);
}
else
{
// Everything else can take the fast path
addInputDataFast(packet, cachedReassemblyDesc, firstPacket);
}
if ((flags & VideoPacket.FLAG_EOF) != 0) {
// Move on to the next frame
decodingFrame = false;
nextFrameNumber = frameIndex + 1;
// If waiting for next successful frame and we got here
// with an end flag, we can send a message to the server
if (waitingForNextSuccessfulFrame) {
// This is the next successful frame after a loss event
controlListener.connectionDetectedFrameLoss(startFrameNumber, nextFrameNumber - 1);
waitingForNextSuccessfulFrame = false;
}
// If we need an IDR frame first, then drop this frame
if (waitingForIdrFrame) {
LimeLog.warning("Waiting for IDR frame");
dropFrameState();
return;
}
reassembleFrame(frameIndex);
startFrameNumber = nextFrameNumber;
}
}
private boolean isIdrFrameStart(ByteBufferDescriptor desc) {
return NAL.getSpecialSequenceDescriptor(desc, cachedSpecialDesc) &&
NAL.isAnnexBFrameStart(cachedSpecialDesc) &&
(cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x67 || // H264 SPS
cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x40); // H265 VPS
}
public DecodeUnit takeNextDecodeUnit() throws InterruptedException
{
return decodedUnits.takePopulatedObject();
}
public DecodeUnit pollNextDecodeUnit()
{
return decodedUnits.pollPopulatedObject();
}
public void freeDecodeUnit(DecodeUnit du)
{
decodedUnits.freePopulatedObject(du);
}
}
class NAL {
// This assumes that the buffer passed in is already a special sequence
public static boolean isAnnexBStartSequence(ByteBufferDescriptor specialSeq)
{
// The start sequence is 00 00 01 or 00 00 00 01
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
}
// This assumes that the buffer passed in is already a special sequence
public static boolean isAnnexBFrameStart(ByteBufferDescriptor specialSeq)
{
if (specialSeq.length != 4)
return false;
// The frame start sequence is 00 00 00 01
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
}
// This assumes that the buffer passed in is already a special sequence
public static boolean isPadding(ByteBufferDescriptor specialSeq)
{
// The padding sequence is 00 00 00
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x00);
}
// Returns a buffer descriptor describing the start sequence
public static boolean getSpecialSequenceDescriptor(ByteBufferDescriptor buffer, ByteBufferDescriptor outputDesc)
{
// NAL start sequence is 00 00 00 01 or 00 00 01
if (buffer.length < 3)
return false;
// 00 00 is magic
if (buffer.data[buffer.offset] == 0x00 &&
buffer.data[buffer.offset+1] == 0x00)
{
// Another 00 could be the end of the special sequence
// 00 00 00 or the middle of 00 00 00 01
if (buffer.data[buffer.offset+2] == 0x00)
{
if (buffer.length >= 4 &&
buffer.data[buffer.offset+3] == 0x01)
{
// It's the Annex B start sequence 00 00 00 01
outputDesc.reinitialize(buffer.data, buffer.offset, 4);
}
else
{
// It's 00 00 00
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
}
return true;
}
else if (buffer.data[buffer.offset+2] == 0x01 ||
buffer.data[buffer.offset+2] == 0x02)
{
// These are easy: 00 00 01 or 00 00 02
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
return true;
}
else if (buffer.data[buffer.offset+2] == 0x03)
{
// 00 00 03 is special because it's a subsequence of the
// NAL wrapping substitute for 00 00 00, 00 00 01, 00 00 02,
// or 00 00 03 in the RBSP sequence. We need to check the next
// byte to see whether it's 00, 01, 02, or 03 (a valid RBSP substitution)
// or whether it's something else
if (buffer.length < 4)
return false;
if (buffer.data[buffer.offset+3] >= 0x00 &&
buffer.data[buffer.offset+3] <= 0x03)
{
// It's not really a special sequence after all
return false;
}
else
{
// It's not a standard replacement so it's a special sequence
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
return true;
}
}
}
return false;
}
}

View File

@ -1,144 +0,0 @@
package com.limelight.nvstream.av.video;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.atomic.AtomicInteger;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.RtpPacketFields;
public class VideoPacket implements RtpPacketFields {
private final ByteBufferDescriptor buffer;
private final ByteBuffer byteBuffer;
private final boolean useAtomicRefCount;
private int dataOffset;
private int frameIndex;
private int flags;
private int streamPacketIndex;
private short rtpSequenceNumber;
private AtomicInteger duAtomicRefCount = new AtomicInteger();
private int duRefCount;
// Only for use in DecodeUnit for packet queuing
public VideoPacket nextPacket;
public static final int FLAG_CONTAINS_PIC_DATA = 0x1;
public static final int FLAG_EOF = 0x2;
public static final int FLAG_SOF = 0x4;
public static final int HEADER_SIZE = 16;
public VideoPacket(byte[] buffer, boolean useAtomicRefCount)
{
this.buffer = new ByteBufferDescriptor(buffer, 0, buffer.length);
this.byteBuffer = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN);
this.useAtomicRefCount = useAtomicRefCount;
}
public void initializeWithLengthNoRtpHeader(int length)
{
// Back to beginning
byteBuffer.rewind();
// No sequence number field is present in these packets
// Read the video header fields
streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF;
frameIndex = byteBuffer.getInt();
flags = byteBuffer.getInt() & 0xFF;
// Data offset without the RTP header
dataOffset = HEADER_SIZE;
// Update descriptor length
buffer.length = length;
}
public void initializeWithLength(int length)
{
// Read the RTP sequence number field (big endian)
byteBuffer.position(2);
rtpSequenceNumber = byteBuffer.getShort();
rtpSequenceNumber = (short)(((rtpSequenceNumber << 8) & 0xFF00) | (((rtpSequenceNumber >> 8) & 0x00FF)));
// Skip the rest of the RTP header
byteBuffer.position(RtpPacket.MAX_HEADER_SIZE);
// Read the video header fields
streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF;
frameIndex = byteBuffer.getInt();
flags = byteBuffer.getInt() & 0xFF;
// Data offset includes the RTP header
dataOffset = RtpPacket.MAX_HEADER_SIZE + HEADER_SIZE;
// Update descriptor length
buffer.length = length;
}
public int getFlags()
{
return flags;
}
public int getFrameIndex()
{
return frameIndex;
}
public int getStreamPacketIndex()
{
return streamPacketIndex;
}
public byte[] getBuffer()
{
return buffer.data;
}
public void initializePayloadDescriptor(ByteBufferDescriptor bb)
{
bb.reinitialize(buffer.data, buffer.offset+dataOffset, buffer.length-dataOffset);
}
public byte getPacketType() {
// No consumers use this field so we don't look it up
return -1;
}
public short getRtpSequenceNumber() {
return rtpSequenceNumber;
}
public int referencePacket() {
if (useAtomicRefCount) {
return duAtomicRefCount.incrementAndGet();
}
else {
return ++duRefCount;
}
}
public int dereferencePacket() {
if (useAtomicRefCount) {
return duAtomicRefCount.decrementAndGet();
}
else {
return --duRefCount;
}
}
public int getRefCount() {
if (useAtomicRefCount) {
return duAtomicRefCount.get();
}
else {
return duRefCount;
}
}
}

View File

@ -1,300 +0,0 @@
package com.limelight.nvstream.av.video;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.util.LinkedList;
import com.limelight.LimeLog;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.RtpReorderQueue;
public class VideoStream {
private static final int RTP_PORT = 47998;
private static final int FIRST_FRAME_PORT = 47996;
private static final int FIRST_FRAME_TIMEOUT = 5000;
private static final int RTP_RECV_BUFFER = 256 * 1024;
// We can't request an IDR frame until the depacketizer knows
// that a packet was lost. This timeout bounds the time that
// the RTP queue will wait for missing/reordered packets.
private static final int MAX_RTP_QUEUE_DELAY_MS = 10;
// The ring size MUST be greater than or equal to
// the maximum number of packets in a fully
// presentable frame
private static final int VIDEO_RING_SIZE = 384;
private DatagramSocket rtp;
private Socket firstFrameSocket;
private LinkedList<Thread> threads = new LinkedList<Thread>();
private ConnectionContext context;
private ConnectionStatusListener avConnListener;
private VideoDepacketizer depacketizer;
private VideoDecoderRenderer decRend;
private boolean startedRendering;
private boolean aborting = false;
public VideoStream(ConnectionContext context, ConnectionStatusListener avConnListener)
{
this.context = context;
this.avConnListener = avConnListener;
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
// Interrupt threads
for (Thread t : threads) {
t.interrupt();
}
// Close the socket to interrupt the receive thread
if (rtp != null) {
rtp.close();
}
if (firstFrameSocket != null) {
try {
firstFrameSocket.close();
} catch (IOException e) {}
}
// Wait for threads to terminate
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) { }
}
if (decRend != null) {
if (startedRendering) {
decRend.stop();
}
decRend.release();
}
threads.clear();
}
private void connectFirstFrame() throws IOException
{
firstFrameSocket = new Socket();
firstFrameSocket.setSoTimeout(FIRST_FRAME_TIMEOUT);
firstFrameSocket.connect(new InetSocketAddress(context.serverAddress, FIRST_FRAME_PORT), FIRST_FRAME_TIMEOUT);
}
private void readFirstFrame() throws IOException
{
// We can actually ignore this data. It's the act of gracefully closing the socket
// that matters.
firstFrameSocket.close();
firstFrameSocket = null;
}
public void setupRtpSession() throws SocketException
{
rtp = new DatagramSocket();
rtp.setReceiveBufferSize(RTP_RECV_BUFFER);
}
public boolean setupDecoderRenderer(VideoDecoderRenderer decRend, Object renderTarget, int drFlags) {
this.decRend = decRend;
depacketizer = new VideoDepacketizer(context, avConnListener, context.streamConfig.getMaxPacketSize());
if (decRend != null) {
try {
if (!decRend.setup(context.negotiatedVideoFormat, context.negotiatedWidth,
context.negotiatedHeight, context.negotiatedFps,
renderTarget, drFlags)) {
return false;
}
if (!decRend.start(depacketizer)) {
abort();
return false;
}
startedRendering = true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
return true;
}
public boolean startVideoStream(Object renderTarget, int drFlags) throws IOException
{
// Setup the decoder and renderer
if (!setupDecoderRenderer(context.videoDecoderRenderer, renderTarget, drFlags)) {
// Nothing to cleanup here
throw new IOException("Video decoder failed to initialize. Your device may not support the selected resolution.");
}
// Open RTP sockets and start session
setupRtpSession();
if (this.decRend != null) {
// Start the receive thread early to avoid missing
// early packets that are part of the IDR frame
startReceiveThread();
}
// Open the first frame port connection on Gen 3 servers
if (context.serverGeneration == ConnectionContext.SERVER_GENERATION_3) {
connectFirstFrame();
}
// Start pinging before reading the first frame
// so GFE knows where to send UDP data
startUdpPingThread();
// Read the first frame on Gen 3 servers
if (context.serverGeneration == ConnectionContext.SERVER_GENERATION_3) {
readFirstFrame();
}
return true;
}
private void startReceiveThread()
{
// Receive thread
Thread t = new Thread() {
@Override
public void run() {
VideoPacket ring[] = new VideoPacket[VIDEO_RING_SIZE];
VideoPacket queuedPacket;
int ringIndex = 0;
RtpReorderQueue rtpQueue = new RtpReorderQueue(16, MAX_RTP_QUEUE_DELAY_MS);
RtpReorderQueue.RtpQueueStatus queueStatus;
boolean directSubmit = (decRend != null && (decRend.getCapabilities() &
VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0);
// Preinitialize the ring buffer
int requiredBufferSize = context.streamConfig.getMaxPacketSize() + RtpPacket.MAX_HEADER_SIZE;
for (int i = 0; i < VIDEO_RING_SIZE; i++) {
ring[i] = new VideoPacket(new byte[requiredBufferSize], !directSubmit);
}
byte[] buffer;
DatagramPacket packet = new DatagramPacket(new byte[1], 1); // Placeholder array
int iterationStart;
while (!isInterrupted())
{
try {
// Pull the next buffer in the ring and reset it
buffer = ring[ringIndex].getBuffer();
// Read the video data off the network
packet.setData(buffer, 0, buffer.length);
rtp.receive(packet);
// Initialize the video packet
ring[ringIndex].initializeWithLength(packet.getLength());
queueStatus = rtpQueue.addPacket(ring[ringIndex]);
if (queueStatus == RtpReorderQueue.RtpQueueStatus.HANDLE_IMMEDIATELY) {
// Submit immediately because the packet is in order
depacketizer.addInputData(ring[ringIndex]);
}
else if (queueStatus == RtpReorderQueue.RtpQueueStatus.QUEUED_PACKETS_READY) {
// The packet queue now has packets ready
while ((queuedPacket = (VideoPacket) rtpQueue.getQueuedPacket()) != null) {
depacketizer.addInputData(queuedPacket);
queuedPacket.dereferencePacket();
}
}
// If the DR supports direct submission, call the direct submit callback
if (directSubmit) {
DecodeUnit du;
while ((du = depacketizer.pollNextDecodeUnit()) != null) {
decRend.directSubmitDecodeUnit(du);
}
}
// Go to the next free element in the ring
iterationStart = ringIndex;
do {
ringIndex = (ringIndex + 1) % VIDEO_RING_SIZE;
if (ringIndex == iterationStart) {
// Reinitialize the video ring since they're all being used
LimeLog.warning("Packet ring wrapped around!");
for (int i = 0; i < VIDEO_RING_SIZE; i++) {
ring[i] = new VideoPacket(new byte[requiredBufferSize], !directSubmit);
}
break;
}
} while (ring[ringIndex].getRefCount() != 0);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setName("Video - Receive");
t.setPriority(Thread.MAX_PRIORITY - 1);
t.start();
}
private void startUdpPingThread()
{
// Ping thread
Thread t = new Thread() {
@Override
public void run() {
// PING in ASCII
final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
pingPacket.setSocketAddress(new InetSocketAddress(context.serverAddress, RTP_PORT));
// Send PING every 500 ms
while (!isInterrupted())
{
try {
rtp.send(pingPacket);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setName("Video - Ping");
t.setPriority(Thread.MIN_PRIORITY);
t.start();
}
}

View File

@ -1,724 +0,0 @@
package com.limelight.nvstream.control;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.LimeLog;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.enet.EnetConnection;
import com.limelight.utils.TimeHelper;
public class ControlStream implements ConnectionStatusListener, InputPacketSender {
private static final int TCP_PORT = 47995;
private static final int UDP_PORT = 47999;
private static final int CONTROL_TIMEOUT = 10000;
private static final int IDX_START_A = 0;
private static final int IDX_REQUEST_IDR_FRAME = 0;
private static final int IDX_START_B = 1;
private static final int IDX_INVALIDATE_REF_FRAMES = 2;
private static final int IDX_LOSS_STATS = 3;
private static final int IDX_INPUT_DATA = 5;
private static final short packetTypesGen3[] = {
0x1407, // Request IDR frame
0x1410, // Start B
0x1404, // Invalidate reference frames
0x140c, // Loss Stats
0x1417, // Frame Stats (unused)
-1, // Input data (unused)
};
private static final short packetTypesGen4[] = {
0x0606, // Request IDR frame
0x0609, // Start B
0x0604, // Invalidate reference frames
0x060a, // Loss Stats
0x0611, // Frame Stats (unused)
-1, // Input data (unused)
};
private static final short packetTypesGen5[] = {
0x0305, // Start A
0x0307, // Start B
0x0301, // Invalidate reference frames
0x0201, // Loss Stats
0x0204, // Frame Stats (unused)
0x0207, // Input data
};
private static final short packetTypesGen7[] = {
0x0305, // Start A
0x0307, // Start B
0x0301, // Invalidate reference frames
0x0201, // Loss Stats
0x0204, // Frame Stats (unused)
0x0206, // Input data
};
private static final short payloadLengthsGen3[] = {
-1, // Request IDR frame
16, // Start B
24, // Invalidate reference frames
32, // Loss Stats
64, // Frame Stats
-1, // Input Data
};
private static final short payloadLengthsGen4[] = {
-1, // Request IDR frame
-1, // Start B
24, // Invalidate reference frames
32, // Loss Stats
64, // Frame Stats
-1, // Input Data
};
private static final short payloadLengthsGen5[] = {
-1, // Start A
16, // Start B
24, // Invalidate reference frames
32, // Loss Stats
80, // Frame Stats
-1, // Input Data
};
private static final short payloadLengthsGen7[] = {
-1, // Start A
16, // Start B
24, // Invalidate reference frames
32, // Loss Stats
80, // Frame Stats
-1, // Input Data
};
private static final byte[] precontructedPayloadsGen3[] = {
new byte[]{0, 0}, // Request IDR frame
null, // Start B
null, // Invalidate reference frames
null, // Loss Stats
null, // Frame Stats
null, // Input Data
};
private static final byte[] precontructedPayloadsGen4[] = {
new byte[]{0, 0}, // Request IDR frame
new byte[]{0}, // Start B
null, // Invalidate reference frames
null, // Loss Stats
null, // Frame Stats
null, // Input Data
};
private static final byte[] precontructedPayloadsGen5[] = {
new byte[]{0, 0}, // Start A
null, // Start B
null, // Invalidate reference frames
null, // Loss Stats
null, // Frame Stats
null, // Input Data
};
private static final byte[] precontructedPayloadsGen7[] = {
new byte[]{0, 0}, // Start A
null, // Start B
null, // Invalidate reference frames
null, // Loss Stats
null, // Frame Stats
null, // Input Data
};
public static final int LOSS_REPORT_INTERVAL_MS = 50;
private int lastGoodFrame;
private int lastSeenFrame;
private int lossCountSinceLastReport;
private ConnectionContext context;
// If we drop at least 10 frames in 15 second (or less) window
// more than 5 times in 60 seconds, we'll display a warning
public static final int LOSS_PERIOD_MS = 15000;
public static final int LOSS_EVENT_TIME_THRESHOLD_MS = 60000;
public static final int MAX_LOSS_COUNT_IN_PERIOD = 10;
public static final int LOSS_EVENTS_TO_WARN = 5;
public static final int MAX_SLOW_SINK_COUNT = 2;
public static final int MESSAGE_DELAY_FACTOR = 3;
private long lossTimestamp;
private long lossEventTimestamp;
private int lossCount;
private int lossEventCount;
private int slowSinkCount;
// Used on Gen 5 servers and above
private EnetConnection enetConnection;
// Used on Gen 4 servers and below
private Socket s;
private InputStream in;
private OutputStream out;
private Thread lossStatsThread;
private Thread resyncThread;
private LinkedBlockingQueue<int[]> invalidReferenceFrameTuples = new LinkedBlockingQueue<int[]>();
private boolean aborting = false;
private boolean forceIdrRequest;
private final short[] packetTypes;
private final short[] payloadLengths;
private final byte[][] preconstructedPayloads;
public ControlStream(ConnectionContext context)
{
this.context = context;
switch (context.serverGeneration)
{
case ConnectionContext.SERVER_GENERATION_3:
packetTypes = packetTypesGen3;
payloadLengths = payloadLengthsGen3;
preconstructedPayloads = precontructedPayloadsGen3;
break;
case ConnectionContext.SERVER_GENERATION_4:
packetTypes = packetTypesGen4;
payloadLengths = payloadLengthsGen4;
preconstructedPayloads = precontructedPayloadsGen4;
break;
case ConnectionContext.SERVER_GENERATION_5:
packetTypes = packetTypesGen5;
payloadLengths = payloadLengthsGen5;
preconstructedPayloads = precontructedPayloadsGen5;
break;
case ConnectionContext.SERVER_GENERATION_7:
default:
packetTypes = packetTypesGen7;
payloadLengths = payloadLengthsGen7;
preconstructedPayloads = precontructedPayloadsGen7;
break;
}
if (context.videoDecoderRenderer != null) {
forceIdrRequest = (context.videoDecoderRenderer.getCapabilities() &
VideoDecoderRenderer.CAPABILITY_REFERENCE_FRAME_INVALIDATION) == 0;
}
}
public void initialize() throws IOException
{
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
enetConnection = EnetConnection.connect(context.serverAddress.getHostAddress(), UDP_PORT, CONTROL_TIMEOUT);
}
else {
s = new Socket();
s.setTcpNoDelay(true);
s.connect(new InetSocketAddress(context.serverAddress, TCP_PORT), CONTROL_TIMEOUT);
in = s.getInputStream();
out = s.getOutputStream();
}
}
private void sendPacket(NvCtlPacket packet) throws IOException
{
// Prevent multiple clients from writing to the stream at the same time
synchronized (this) {
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
enetConnection.pumpSocket();
packet.write(enetConnection);
}
else {
packet.write(out);
out.flush();
}
}
}
private void sendAndDiscardReply(NvCtlPacket packet) throws IOException
{
synchronized (this) {
sendPacket(packet);
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
enetConnection.readPacket(0, CONTROL_TIMEOUT);
}
else {
new NvCtlResponse(in);
}
}
}
private void sendLossStats(ByteBuffer bb) throws IOException
{
bb.rewind();
bb.putInt(lossCountSinceLastReport); // Packet loss count
bb.putInt(LOSS_REPORT_INTERVAL_MS); // Time since last report in milliseconds
bb.putInt(1000);
bb.putLong(lastGoodFrame); // Last successfully received frame
bb.putInt(0);
bb.putInt(0);
bb.putInt(0x14);
sendPacket(new NvCtlPacket(packetTypes[IDX_LOSS_STATS],
payloadLengths[IDX_LOSS_STATS], bb.array()));
}
public void sendInputPacket(byte[] data, short length) throws IOException {
sendPacket(new NvCtlPacket(packetTypes[IDX_INPUT_DATA], length, data));
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
if (s != null) {
try {
s.close();
} catch (IOException e) {}
}
if (lossStatsThread != null) {
lossStatsThread.interrupt();
try {
lossStatsThread.join();
} catch (InterruptedException e) {}
}
if (resyncThread != null) {
resyncThread.interrupt();
try {
resyncThread.join();
} catch (InterruptedException e) {}
}
if (enetConnection != null) {
try {
enetConnection.close();
} catch (IOException e) {}
}
}
public void start() throws IOException
{
// Use a finite timeout during the handshake process
if (s != null) {
s.setSoTimeout(CONTROL_TIMEOUT);
}
doStartA();
doStartB();
// Return to an infinte read timeout after the initial control handshake
if (s != null) {
s.setSoTimeout(0);
}
lossStatsThread = new Thread() {
@Override
public void run() {
ByteBuffer bb = ByteBuffer.allocate(payloadLengths[IDX_LOSS_STATS]).order(ByteOrder.LITTLE_ENDIAN);
while (!isInterrupted())
{
try {
sendLossStats(bb);
lossCountSinceLastReport = 0;
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
try {
Thread.sleep(LOSS_REPORT_INTERVAL_MS);
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
lossStatsThread.setPriority(Thread.MIN_PRIORITY + 1);
lossStatsThread.setName("Control - Loss Stats Thread");
lossStatsThread.start();
resyncThread = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
int[] tuple;
boolean idrFrameRequired = false;
// Wait for a tuple
try {
tuple = invalidReferenceFrameTuples.take();
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
// Check for the magic IDR frame tuple
int[] lastTuple = null;
if (tuple[0] != 0 || tuple[1] != 0) {
// Aggregate all lost frames into one range
for (;;) {
int[] nextTuple = lastTuple = invalidReferenceFrameTuples.poll();
if (nextTuple == null) {
break;
}
// Check if this tuple has IDR frame magic values
if (nextTuple[0] == 0 && nextTuple[1] == 0) {
// We will need an IDR frame now, but we won't break out
// of the loop because we want to dequeue all pending requests
idrFrameRequired = true;
}
lastTuple = nextTuple;
}
}
else {
// We must require an IDR frame
idrFrameRequired = true;
}
try {
if (forceIdrRequest || idrFrameRequired) {
requestIdrFrame();
}
else {
// Update the end of the range to the latest tuple
if (lastTuple != null) {
tuple[1] = lastTuple[1];
}
invalidateReferenceFrames(tuple[0], tuple[1]);
}
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
};
resyncThread.setName("Control - Resync Thread");
resyncThread.setPriority(Thread.MAX_PRIORITY - 1);
resyncThread.start();
}
private void doStartA() throws IOException
{
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_START_A],
(short) preconstructedPayloads[IDX_START_A].length,
preconstructedPayloads[IDX_START_A]));
}
private void doStartB() throws IOException
{
// Gen 3 and 5 both use a packet of this form
if (context.serverGeneration != ConnectionContext.SERVER_GENERATION_4) {
ByteBuffer payload = ByteBuffer.wrap(new byte[payloadLengths[IDX_START_B]]).order(ByteOrder.LITTLE_ENDIAN);
payload.putInt(0);
payload.putInt(0);
payload.putInt(0);
payload.putInt(0xa);
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_START_B],
payloadLengths[IDX_START_B], payload.array()));
}
else {
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_START_B],
(short) preconstructedPayloads[IDX_START_B].length,
preconstructedPayloads[IDX_START_B]));
}
}
private void requestIdrFrame() throws IOException {
// On Gen 3, we use the invalidate reference frames trick.
// On Gen 4+, we use the known IDR frame request packet
// On Gen 5, we're currently using the invalidate reference frames trick again.
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
ByteBuffer conf = ByteBuffer.wrap(new byte[payloadLengths[IDX_INVALIDATE_REF_FRAMES]]).order(ByteOrder.LITTLE_ENDIAN);
//conf.putLong(firstLostFrame);
//conf.putLong(nextSuccessfulFrame);
// Early on, we'll use a special IDR sequence. Otherwise,
// we'll just say we lost the last 32 frames. This is larger
// than the number of buffered frames in the encoder (16) so
// it should trigger an IDR frame.
if (lastSeenFrame < 0x20) {
conf.putLong(0);
conf.putLong(0x20);
}
else {
conf.putLong(lastSeenFrame - 0x20);
conf.putLong(lastSeenFrame);
}
conf.putLong(0);
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_INVALIDATE_REF_FRAMES],
payloadLengths[IDX_INVALIDATE_REF_FRAMES], conf.array()));
}
else {
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_REQUEST_IDR_FRAME],
(short) preconstructedPayloads[IDX_REQUEST_IDR_FRAME].length,
preconstructedPayloads[IDX_REQUEST_IDR_FRAME]));
}
LimeLog.warning("IDR frame request sent");
}
private void invalidateReferenceFrames(int firstLostFrame, int nextSuccessfulFrame) throws IOException {
LimeLog.warning("Invalidating reference frames from "+firstLostFrame+" to "+nextSuccessfulFrame);
ByteBuffer conf = ByteBuffer.wrap(new byte[payloadLengths[IDX_INVALIDATE_REF_FRAMES]]).order(ByteOrder.LITTLE_ENDIAN);
conf.putLong(firstLostFrame);
conf.putLong(nextSuccessfulFrame);
conf.putLong(0);
sendAndDiscardReply(new NvCtlPacket(packetTypes[IDX_INVALIDATE_REF_FRAMES],
payloadLengths[IDX_INVALIDATE_REF_FRAMES], conf.array()));
LimeLog.warning("Reference frame invalidation sent");
}
static class NvCtlPacket {
public short type;
public short paylen;
public byte[] payload;
private static final ByteBuffer headerBuffer = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
private static final ByteBuffer serializationBuffer = ByteBuffer.allocate(256).order(ByteOrder.LITTLE_ENDIAN);
public NvCtlPacket(InputStream in) throws IOException
{
// Use the class's static header buffer for parsing the header
synchronized (headerBuffer) {
int offset = 0;
byte[] header = headerBuffer.array();
do
{
int bytesRead = in.read(header, offset, header.length - offset);
if (bytesRead < 0) {
break;
}
offset += bytesRead;
} while (offset != header.length);
if (offset != header.length) {
throw new IOException("Socket closed prematurely");
}
headerBuffer.rewind();
type = headerBuffer.getShort();
paylen = headerBuffer.getShort();
}
if (paylen != 0)
{
payload = new byte[paylen];
int offset = 0;
do
{
int bytesRead = in.read(payload, offset, payload.length - offset);
if (bytesRead < 0) {
break;
}
offset += bytesRead;
} while (offset != payload.length);
if (offset != payload.length) {
throw new IOException("Socket closed prematurely");
}
}
}
public NvCtlPacket(byte[] packet)
{
synchronized (headerBuffer) {
headerBuffer.rewind();
headerBuffer.put(packet, 0, 4);
headerBuffer.rewind();
type = headerBuffer.getShort();
paylen = headerBuffer.getShort();
}
if (paylen != 0)
{
payload = new byte[paylen];
System.arraycopy(packet, 4, payload, 0, paylen);
}
}
public NvCtlPacket(short type, short paylen)
{
this.type = type;
this.paylen = paylen;
}
public NvCtlPacket(short type, short paylen, byte[] payload)
{
this.type = type;
this.paylen = paylen;
this.payload = payload;
}
public short getType()
{
return type;
}
public short getPaylen()
{
return paylen;
}
public void setType(short type)
{
this.type = type;
}
public void setPaylen(short paylen)
{
this.paylen = paylen;
}
public void write(OutputStream out) throws IOException
{
// Use the class's serialization buffer to construct the wireform to send
synchronized (serializationBuffer) {
serializationBuffer.rewind();
serializationBuffer.limit(serializationBuffer.capacity());
serializationBuffer.putShort(type);
serializationBuffer.putShort(paylen);
serializationBuffer.put(payload, 0, paylen);
out.write(serializationBuffer.array(), 0, serializationBuffer.position());
}
}
public void write(EnetConnection conn) throws IOException
{
// Use the class's serialization buffer to construct the wireform to send
synchronized (serializationBuffer) {
serializationBuffer.rewind();
serializationBuffer.limit(serializationBuffer.capacity());
serializationBuffer.putShort(type);
serializationBuffer.put(payload, 0, paylen);
serializationBuffer.limit(serializationBuffer.position());
conn.writePacket(serializationBuffer);
}
}
}
class NvCtlResponse extends NvCtlPacket {
public short status;
public NvCtlResponse(InputStream in) throws IOException {
super(in);
}
public NvCtlResponse(short type, short paylen) {
super(type, paylen);
}
public NvCtlResponse(short type, short paylen, byte[] payload) {
super(type, paylen, payload);
}
public NvCtlResponse(byte[] payload) {
super(payload);
}
public void setStatusCode(short status)
{
this.status = status;
}
public short getStatusCode()
{
return status;
}
}
private void resyncConnection(int firstLostFrame, int nextSuccessfulFrame) {
invalidReferenceFrameTuples.add(new int[]{firstLostFrame, nextSuccessfulFrame});
}
public void connectionDetectedFrameLoss(int firstLostFrame, int nextSuccessfulFrame) {
resyncConnection(firstLostFrame, nextSuccessfulFrame);
// Suppress connection warnings for the first 150 frames to allow the connection
// to stabilize
if (lastGoodFrame < 150) {
return;
}
// Reset the loss count if it's been too long
if (TimeHelper.getMonotonicMillis() > LOSS_PERIOD_MS + lossTimestamp) {
lossCount = 0;
lossTimestamp = TimeHelper.getMonotonicMillis();
}
// Count this loss event
if (++lossCount == MAX_LOSS_COUNT_IN_PERIOD) {
// Reset the loss event count if it's been too long
if (TimeHelper.getMonotonicMillis() > LOSS_EVENT_TIME_THRESHOLD_MS + lossEventTimestamp) {
lossEventCount = 0;
lossEventTimestamp = TimeHelper.getMonotonicMillis();
}
if (++lossEventCount == LOSS_EVENTS_TO_WARN) {
context.connListener.displayTransientMessage("Poor network connection");
lossEventCount = 0;
lossEventTimestamp = 0;
}
lossCount = 0;
lossTimestamp = 0;
}
}
public void connectionSinkTooSlow(int firstLostFrame, int nextSuccessfulFrame) {
resyncConnection(firstLostFrame, nextSuccessfulFrame);
// Suppress connection warnings for the first 150 frames to allow the connection
// to stabilize
if (lastGoodFrame < 150) {
return;
}
if (++slowSinkCount == MAX_SLOW_SINK_COUNT) {
context.connListener.displayTransientMessage("Your device is processing the A/V data too slowly. Try lowering stream resolution and/or frame rate.");
slowSinkCount = -MAX_SLOW_SINK_COUNT * MESSAGE_DELAY_FACTOR;
}
}
public void connectionReceivedCompleteFrame(int frameIndex) {
lastGoodFrame = frameIndex;
}
public void connectionSawFrame(int frameIndex) {
lastSeenFrame = frameIndex;
}
public void connectionLostPackets(int lastReceivedPacket, int nextReceivedPacket) {
// Update the loss count for the next loss report
lossCountSinceLastReport += (nextReceivedPacket - lastReceivedPacket) - 1;
}
}

View File

@ -1,7 +0,0 @@
package com.limelight.nvstream.control;
import java.io.IOException;
public interface InputPacketSender {
void sendInputPacket(byte[] data, short length) throws IOException;
}

View File

@ -1,112 +0,0 @@
package com.limelight.nvstream.enet;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
public class EnetConnection implements Closeable {
private long enetPeer;
private long enetClient;
private static final int ENET_PACKET_FLAG_RELIABLE = 1;
static {
System.loadLibrary("jnienet");
initializeEnet();
}
private EnetConnection() {}
public static EnetConnection connect(String host, int port, int timeout) throws IOException {
EnetConnection conn = new EnetConnection();
conn.enetClient = createClient(host);
if (conn.enetClient == 0) {
throw new IOException("Unable to create ENet client");
}
conn.enetPeer = connectToPeer(conn.enetClient, host, port, timeout);
if (conn.enetPeer == 0) {
try {
conn.close();
} catch (IOException e) {}
throw new IOException("Unable to connect to UDP port "+port);
}
return conn;
}
public void pumpSocket() throws IOException {
int ret;
while ((ret = readPacket(enetClient, null, 0, 0)) > 0);
if (ret < 0) {
throw new IOException("ENet connection failed");
}
}
public ByteBuffer readPacket(int maxSize, int timeout) throws IOException {
ByteBuffer buffer;
byte[] array;
int length;
if (maxSize != 0) {
buffer = ByteBuffer.allocate(maxSize);
array = buffer.array();
length = buffer.limit();
}
else {
// The caller doesn't want the packet back
buffer = null;
array = null;
length = 0;
}
int readLength = readPacket(enetClient, array, length, timeout);
if (readLength > length && length != 0) {
// This is a packet that was unexpectedly large compared to
// what the caller was expected.
throw new IOException("Received ENet packet too large: "+readLength);
}
else if (readLength <= 0) {
// We either got nothing or a socket error
throw new IOException("Failed to receive ENet packet");
}
else if (length == 0) {
// We received a packet but the caller didn't want it back
return null;
}
else {
// A packet was received which matched the caller's expectations
buffer.limit(readLength);
return buffer;
}
}
public void writePacket(ByteBuffer buffer) throws IOException {
if (!writePacket(enetClient, enetPeer, buffer.array(), buffer.limit(), ENET_PACKET_FLAG_RELIABLE)) {
throw new IOException("Failed to send ENet packet");
}
}
@Override
public void close() throws IOException {
if (enetPeer != 0) {
disconnectPeer(enetPeer);
enetPeer = 0;
}
if (enetClient != 0) {
destroyClient(enetClient);
enetClient = 0;
}
}
private static native int initializeEnet();
private static native long createClient(String address);
private static native long connectToPeer(long client, String host, int port, int timeout);
private static native int readPacket(long client, byte[] data, int length, int timeout);
private static native boolean writePacket(long client, long peer, byte[] data, int length, int packetFlags);
private static native void destroyClient(long client);
private static native void disconnectPeer(long peer);
}

View File

@ -1,97 +0,0 @@
package com.limelight.nvstream.input;
public class ControllerBatchingBlock {
private byte[] axisDirs = new byte[6];
private short buttonFlags;
private byte leftTrigger;
private byte rightTrigger;
private short leftStickX;
private short leftStickY;
private short rightStickX;
private short rightStickY;
private short controllerNumber;
private short activeGamepadMask;
public ControllerBatchingBlock(MultiControllerPacket initialPacket) {
this.controllerNumber = initialPacket.controllerNumber;
this.activeGamepadMask = initialPacket.activeGamepadMask;
this.buttonFlags = initialPacket.buttonFlags;
this.leftTrigger = initialPacket.leftTrigger;
this.rightTrigger = initialPacket.rightTrigger;
this.leftStickX = initialPacket.leftStickX;
this.leftStickY = initialPacket.leftStickY;
this.rightStickX = initialPacket.rightStickX;
this.rightStickY = initialPacket.rightStickY;
}
private boolean checkDirs(short currentVal, short newVal, int dirIndex) {
if (currentVal == newVal) {
return true;
}
// We want to send a packet if we've now zeroed an axis
if (newVal == 0) {
return false;
}
if (axisDirs[dirIndex] == 0) {
if (newVal < currentVal) {
axisDirs[dirIndex] = -1;
}
else {
axisDirs[dirIndex] = 1;
}
}
else if (axisDirs[dirIndex] == -1) {
return newVal < currentVal;
}
else if (newVal < currentVal) {
return false;
}
return true;
}
// Controller packet batching is far more restricted than mouse move batching.
// We have several restrictions that will cause batching to break up the controller packets.
// 1) Button flags must be the same for all packets in the batch
// 2) The movement direction of all axes must remain the same or be neutral
public boolean submitNewPacket(MultiControllerPacket packet) {
if (buttonFlags != packet.buttonFlags ||
controllerNumber != packet.controllerNumber ||
activeGamepadMask != packet.activeGamepadMask ||
!checkDirs(leftTrigger, packet.leftTrigger, 0) ||
!checkDirs(rightTrigger, packet.rightTrigger, 1) ||
!checkDirs(leftStickX, packet.leftStickX, 2) ||
!checkDirs(leftStickY, packet.leftStickY, 3) ||
!checkDirs(rightStickX, packet.rightStickX, 4) ||
!checkDirs(rightStickY, packet.rightStickY, 5))
{
return false;
}
this.controllerNumber = packet.controllerNumber;
this.activeGamepadMask = packet.activeGamepadMask;
this.leftTrigger = packet.leftTrigger;
this.rightTrigger = packet.rightTrigger;
this.leftStickX = packet.leftStickX;
this.leftStickY = packet.leftStickY;
this.rightStickX = packet.rightStickX;
this.rightStickY = packet.rightStickY;
return true;
}
public void reinitializePacket(MultiControllerPacket packet) {
packet.controllerNumber = controllerNumber;
packet.activeGamepadMask = activeGamepadMask;
packet.buttonFlags = buttonFlags;
packet.leftTrigger = leftTrigger;
packet.rightTrigger = rightTrigger;
packet.leftStickX = leftStickX;
packet.leftStickY = leftStickY;
packet.rightStickX = rightStickX;
packet.rightStickY = rightStickY;
}
}

View File

@ -1,85 +1,19 @@
package com.limelight.nvstream.input; package com.limelight.nvstream.input;
import java.nio.ByteBuffer; public class ControllerPacket {
import java.nio.ByteOrder; public static final short A_FLAG = 0x1000;
public static final short B_FLAG = 0x2000;
public class ControllerPacket extends MultiControllerPacket { public static final short X_FLAG = 0x4000;
private static final byte[] HEADER = public static final short Y_FLAG = (short)0x8000;
{ public static final short UP_FLAG = 0x0001;
0x0A, public static final short DOWN_FLAG = 0x0002;
0x00, public static final short LEFT_FLAG = 0x0004;
0x00, public static final short RIGHT_FLAG = 0x0008;
0x00, public static final short LB_FLAG = 0x0100;
0x00, public static final short RB_FLAG = 0x0200;
0x14 public static final short PLAY_FLAG = 0x0010;
}; public static final short BACK_FLAG = 0x0020;
public static final short LS_CLK_FLAG = 0x0040;
private static final byte[] TAIL = public static final short RS_CLK_FLAG = 0x0080;
{ public static final short SPECIAL_BUTTON_FLAG = 0x0400;
(byte)0x9C, }
0x00,
0x00,
0x00,
0x55,
0x00
};
private static final int PACKET_TYPE = 0x18;
public static final short A_FLAG = 0x1000;
public static final short B_FLAG = 0x2000;
public static final short X_FLAG = 0x4000;
public static final short Y_FLAG = (short)0x8000;
public static final short UP_FLAG = 0x0001;
public static final short DOWN_FLAG = 0x0002;
public static final short LEFT_FLAG = 0x0004;
public static final short RIGHT_FLAG = 0x0008;
public static final short LB_FLAG = 0x0100;
public static final short RB_FLAG = 0x0200;
public static final short PLAY_FLAG = 0x0010;
public static final short BACK_FLAG = 0x0020;
public static final short LS_CLK_FLAG = 0x0040;
public static final short RS_CLK_FLAG = 0x0080;
public static final short SPECIAL_BUTTON_FLAG = 0x0400;
private static final short PAYLOAD_LENGTH = 24;
private static final short PACKET_LENGTH = PAYLOAD_LENGTH +
InputPacket.HEADER_LENGTH;
public ControllerPacket(short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY,
short rightStickX, short rightStickY)
{
super(PACKET_TYPE, (short) 0, (short) 0, buttonFlags, leftTrigger, rightTrigger, leftStickX,
leftStickY, rightStickX, rightStickY);
this.buttonFlags = buttonFlags;
this.leftTrigger = leftTrigger;
this.rightTrigger = rightTrigger;
this.leftStickX = leftStickX;
this.leftStickY = leftStickY;
this.rightStickX = rightStickX;
this.rightStickY = rightStickY;
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.LITTLE_ENDIAN);
bb.put(HEADER);
bb.putShort(buttonFlags);
bb.put(leftTrigger);
bb.put(rightTrigger);
bb.putShort(leftStickX);
bb.putShort(leftStickY);
bb.putShort(rightStickX);
bb.putShort(rightStickY);
bb.put(TAIL);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
}

View File

@ -1,432 +0,0 @@
package com.limelight.nvstream.input;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.concurrent.LinkedBlockingQueue;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.ShortBufferException;
import javax.crypto.spec.IvParameterSpec;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.control.InputPacketSender;
public class ControllerStream {
private final static int PORT = 35043;
private final static int CONTROLLER_TIMEOUT = 10000;
private ConnectionContext context;
// Only used on Gen 4 or below servers
private Socket s;
private OutputStream out;
// Used on Gen 5+ servers
private InputPacketSender controlSender;
private InputCipher cipher;
private Thread inputThread;
private LinkedBlockingQueue<InputPacket> inputQueue = new LinkedBlockingQueue<InputPacket>();
private ByteBuffer stagingBuffer = ByteBuffer.allocate(128);
private ByteBuffer sendBuffer = ByteBuffer.allocate(128).order(ByteOrder.BIG_ENDIAN);
public ControllerStream(ConnectionContext context)
{
this.context = context;
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_7) {
// Newer GFE versions use AES GCM
cipher = new AesGcmCipher();
}
else {
// Older versions used AES CBC
cipher = new AesCbcCipher();
}
ByteBuffer bb = ByteBuffer.allocate(16);
bb.putInt(context.riKeyId);
cipher.initialize(context.riKey, bb.array());
}
public void initialize(InputPacketSender controlSender) throws IOException
{
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
// Gen 5 sends input over the control stream
this.controlSender = controlSender;
}
else {
// Gen 4 and below uses a separate TCP connection for input
s = new Socket();
s.connect(new InetSocketAddress(context.serverAddress, PORT), CONTROLLER_TIMEOUT);
s.setTcpNoDelay(true);
out = s.getOutputStream();
}
}
public void start()
{
inputThread = new Thread() {
@Override
public void run() {
while (!isInterrupted()) {
InputPacket packet;
try {
packet = inputQueue.take();
} catch (InterruptedException e) {
context.connListener.connectionTerminated(e);
return;
}
// Try to batch mouse move packets
if (!inputQueue.isEmpty() && packet instanceof MouseMovePacket) {
MouseMovePacket initialMouseMove = (MouseMovePacket) packet;
int totalDeltaX = initialMouseMove.deltaX;
int totalDeltaY = initialMouseMove.deltaY;
// Combine the deltas with other mouse move packets in the queue
synchronized (inputQueue) {
Iterator<InputPacket> i = inputQueue.iterator();
while (i.hasNext()) {
InputPacket queuedPacket = i.next();
if (queuedPacket instanceof MouseMovePacket) {
MouseMovePacket queuedMouseMove = (MouseMovePacket) queuedPacket;
// Add this packet's deltas to the running total
totalDeltaX += queuedMouseMove.deltaX;
totalDeltaY += queuedMouseMove.deltaY;
// Remove this packet from the queue
i.remove();
}
}
}
// Total deltas could overflow the short so we must split them if required
do {
short partialDeltaX = (short)(totalDeltaX < 0 ?
Math.max(Short.MIN_VALUE, totalDeltaX) :
Math.min(Short.MAX_VALUE, totalDeltaX));
short partialDeltaY = (short)(totalDeltaY < 0 ?
Math.max(Short.MIN_VALUE, totalDeltaY) :
Math.min(Short.MAX_VALUE, totalDeltaY));
initialMouseMove.deltaX = partialDeltaX;
initialMouseMove.deltaY = partialDeltaY;
try {
sendPacket(initialMouseMove);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
totalDeltaX -= partialDeltaX;
totalDeltaY -= partialDeltaY;
} while (totalDeltaX != 0 && totalDeltaY != 0);
}
// Try to batch axis changes on controller packets too
else if (!inputQueue.isEmpty() && packet instanceof MultiControllerPacket) {
MultiControllerPacket initialControllerPacket = (MultiControllerPacket) packet;
ControllerBatchingBlock batchingBlock = null;
synchronized (inputQueue) {
Iterator<InputPacket> i = inputQueue.iterator();
while (i.hasNext()) {
InputPacket queuedPacket = i.next();
if (queuedPacket instanceof MultiControllerPacket) {
// Only initialize the batching block if we got here
if (batchingBlock == null) {
batchingBlock = new ControllerBatchingBlock(initialControllerPacket);
}
if (batchingBlock.submitNewPacket((MultiControllerPacket) queuedPacket))
{
// Batching was successful, so remove this packet
i.remove();
}
else
{
// Unable to batch so we must stop
break;
}
}
}
}
if (batchingBlock != null) {
// Reinitialize the initial packet with the new values
batchingBlock.reinitializePacket(initialControllerPacket);
}
try {
sendPacket(packet);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
}
else {
// Send any other packet as-is
try {
sendPacket(packet);
} catch (IOException e) {
context.connListener.connectionTerminated(e);
return;
}
}
}
}
};
inputThread.setName("Input - Queue");
inputThread.setPriority(Thread.NORM_PRIORITY + 1);
inputThread.start();
}
public void abort()
{
if (inputThread != null) {
inputThread.interrupt();
try {
inputThread.join();
} catch (InterruptedException e) {}
}
if (s != null) {
try {
s.close();
} catch (IOException e) {}
}
}
private void sendPacket(InputPacket packet) throws IOException {
// Store the packet in wire form in the byte buffer
packet.toWire(stagingBuffer);
int packetLen = packet.getPacketLength();
// Get final encrypted size of this block
int paddedLength = cipher.getEncryptedSize(packetLen);
// Allocate a byte buffer to represent the final packet
sendBuffer.rewind();
sendBuffer.putInt(paddedLength);
try {
cipher.encrypt(stagingBuffer.array(), packetLen, sendBuffer.array(), 4);
} catch (Exception e) {
// Should never happen
e.printStackTrace();
return;
}
// Send the packet over the control stream on Gen 5+
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
controlSender.sendInputPacket(sendBuffer.array(), (short) (paddedLength + 4));
// For reasons that I can't understand, NVIDIA decides to use the last 16
// bytes of ciphertext in the most recent game controller packet as the IV for
// future encryption. I think it may be a buffer overrun on their end but we'll have
// to mimic it to work correctly.
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_7 && paddedLength >= 32) {
cipher.initialize(context.riKey,
Arrays.copyOfRange(sendBuffer.array(), 4 + paddedLength - 16, 4 + paddedLength));
}
}
else {
// Send the packet over the TCP connection on Gen 4 and below
out.write(sendBuffer.array(), 0, paddedLength + 4);
out.flush();
}
}
private void queuePacket(InputPacket packet) {
synchronized (inputQueue) {
inputQueue.add(packet);
}
}
public void sendControllerInput(short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY, short rightStickX, short rightStickY)
{
if (context.serverGeneration == ConnectionContext.SERVER_GENERATION_3) {
// Use legacy controller packets for generation 3
queuePacket(new ControllerPacket(buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY));
}
else {
// Use multi-controller packets for generation 4 and above
queuePacket(new MultiControllerPacket(context,
(short) 0, (short) 0x1,
buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY));
}
}
public void sendControllerInput(short controllerNumber, short activeGamepadMask,
short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY, short rightStickX, short rightStickY)
{
if (context.serverGeneration == ConnectionContext.SERVER_GENERATION_3) {
// Use legacy controller packets for generation 3
queuePacket(new ControllerPacket(buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY));
}
else {
// Use multi-controller packets for generation 4 and above
queuePacket(new MultiControllerPacket(context,
controllerNumber, activeGamepadMask,
buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY));
}
}
public void sendMouseButtonDown(byte mouseButton)
{
queuePacket(new MouseButtonPacket(context, true, mouseButton));
}
public void sendMouseButtonUp(byte mouseButton)
{
queuePacket(new MouseButtonPacket(context, false, mouseButton));
}
public void sendMouseMove(short deltaX, short deltaY)
{
queuePacket(new MouseMovePacket(context, deltaX, deltaY));
}
public void sendKeyboardInput(short keyMap, byte keyDirection, byte modifier)
{
queuePacket(new KeyboardPacket(keyMap, keyDirection, modifier));
}
public void sendMouseScroll(byte scrollClicks)
{
queuePacket(new MouseScrollPacket(context, scrollClicks));
}
private static interface InputCipher {
public void initialize(SecretKey key, byte[] iv);
public int getEncryptedSize(int plaintextSize);
public void encrypt(byte[] inputData, int inputLength, byte[] outputData, int outputOffset);
}
private static class AesCbcCipher implements InputCipher {
private Cipher cipher;
public void initialize(SecretKey key, byte[] iv) {
try {
cipher = Cipher.getInstance("AES/CBC/NoPadding");
cipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (NoSuchPaddingException e) {
e.printStackTrace();
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (InvalidAlgorithmParameterException e) {
e.printStackTrace();
}
}
public int getEncryptedSize(int plaintextSize) {
// CBC requires padding to the next multiple of 16
return ((plaintextSize + 15) / 16) * 16;
}
private int inPlacePadData(byte[] data, int length) {
// This implements the PKCS7 padding algorithm
if ((length % 16) == 0) {
// Already a multiple of 16
return length;
}
int paddedLength = getEncryptedSize(length);
byte paddingByte = (byte)(16 - (length % 16));
for (int i = length; i < paddedLength; i++) {
data[i] = paddingByte;
}
return paddedLength;
}
public void encrypt(byte[] inputData, int inputLength, byte[] outputData, int outputOffset) {
int encryptedLength = inPlacePadData(inputData, inputLength);
try {
cipher.update(inputData, 0, encryptedLength, outputData, outputOffset);
} catch (ShortBufferException e) {
e.printStackTrace();
}
}
}
private static class AesGcmCipher implements InputCipher {
private SecretKey key;
private byte[] iv;
public int getEncryptedSize(int plaintextSize) {
// GCM uses no padding + 16 bytes tag for message authentication
return plaintextSize + 16;
}
@Override
public void initialize(SecretKey key, byte[] iv) {
this.key = key;
this.iv = iv;
}
@Override
public void encrypt(byte[] inputData, int inputLength, byte[] outputData, int outputOffset) {
// Reconstructing the cipher on every invocation really sucks but we have to do it
// because of the way NVIDIA is using GCM where each message is tagged. Java doesn't
// have an easy way that I know of to get a tag out mid-stream.
Cipher cipher;
try {
cipher = Cipher.getInstance("AES/GCM/NoPadding");
cipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
// This is also non-ideal. Java gives us <ciphertext><tag> but we want to send <tag><ciphertext>
// so we'll take the output and arraycopy it into the right spot in the output buffer
byte[] rawCipherOut = cipher.doFinal(inputData, 0, inputLength);
System.arraycopy(rawCipherOut, inputLength, outputData, outputOffset, 16);
System.arraycopy(rawCipherOut, 0, outputData, outputOffset + 16, inputLength);
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (NoSuchPaddingException e) {
e.printStackTrace();
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (InvalidAlgorithmParameterException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
}
}
}
}

View File

@ -1,32 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public abstract class InputPacket {
public static final int HEADER_LENGTH = 0x4;
protected int packetType;
public InputPacket(int packetType)
{
this.packetType = packetType;
}
public abstract void toWirePayload(ByteBuffer bb);
public abstract int getPacketLength();
public void toWireHeader(ByteBuffer bb)
{
bb.order(ByteOrder.BIG_ENDIAN);
bb.putInt(packetType);
}
public void toWire(ByteBuffer bb)
{
bb.rewind();
toWireHeader(bb);
toWirePayload(bb);
}
}

View File

@ -1,44 +1,10 @@
package com.limelight.nvstream.input; package com.limelight.nvstream.input;
import java.nio.ByteBuffer; public class KeyboardPacket {
import java.nio.ByteOrder;
public class KeyboardPacket extends InputPacket {
private static final int PACKET_TYPE = 0x0A;
private static final int PACKET_LENGTH = 14;
public static final byte KEY_DOWN = 0x03; public static final byte KEY_DOWN = 0x03;
public static final byte KEY_UP = 0x04; public static final byte KEY_UP = 0x04;
public static final byte MODIFIER_SHIFT = 0x01; public static final byte MODIFIER_SHIFT = 0x01;
public static final byte MODIFIER_CTRL = 0x02; public static final byte MODIFIER_CTRL = 0x02;
public static final byte MODIFIER_ALT = 0x04; public static final byte MODIFIER_ALT = 0x04;
private short keyCode;
private byte keyDirection;
private byte modifier;
public KeyboardPacket(short keyCode, byte keyDirection, byte modifier) {
super(PACKET_TYPE);
this.keyCode = keyCode;
this.keyDirection = keyDirection;
this.modifier = modifier;
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.LITTLE_ENDIAN);
bb.put(keyDirection);
bb.putShort((short)0);
bb.putShort((short)0);
bb.putShort(keyCode);
bb.put(modifier);
bb.put((byte)0);
bb.put((byte)0);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
} }

View File

@ -1,20 +0,0 @@
package com.limelight.nvstream.input;
import com.limelight.nvstream.NvConnection;
public abstract class KeycodeTranslator {
public abstract short translate(int keycode);
protected NvConnection conn;
public KeycodeTranslator(NvConnection conn) {
this.conn = conn;
}
public void sendKeyDown(short keyMap, byte modifier) {
conn.sendKeyboardInput(keyMap, KeyboardPacket.KEY_DOWN, modifier);
}
public void sendKeyUp(short keyMap, byte modifier) {
conn.sendKeyboardInput(keyMap, KeyboardPacket.KEY_UP, modifier);
}
}

View File

@ -1,51 +1,10 @@
package com.limelight.nvstream.input; package com.limelight.nvstream.input;
import java.nio.ByteBuffer; public class MouseButtonPacket {
import java.nio.ByteOrder;
import com.limelight.nvstream.ConnectionContext;
public class MouseButtonPacket extends InputPacket {
byte buttonEventType;
byte mouseButton;
private static final int PACKET_TYPE = 0x5;
private static final int PAYLOAD_LENGTH = 5;
private static final int PACKET_LENGTH = PAYLOAD_LENGTH +
InputPacket.HEADER_LENGTH;
private static final byte PRESS_EVENT = 0x07; private static final byte PRESS_EVENT = 0x07;
private static final byte RELEASE_EVENT = 0x08; private static final byte RELEASE_EVENT = 0x08;
public static final byte BUTTON_LEFT = 0x01; public static final byte BUTTON_LEFT = 0x01;
public static final byte BUTTON_MIDDLE = 0x02; public static final byte BUTTON_MIDDLE = 0x02;
public static final byte BUTTON_RIGHT = 0x03; public static final byte BUTTON_RIGHT = 0x03;
public MouseButtonPacket(ConnectionContext context, boolean buttonDown, byte mouseButton)
{
super(PACKET_TYPE);
this.mouseButton = mouseButton;
buttonEventType = buttonDown ?
PRESS_EVENT : RELEASE_EVENT;
// On Gen 5 servers, the button event codes are incremented by one
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
buttonEventType++;
}
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.BIG_ENDIAN);
bb.put(buttonEventType);
bb.putInt(mouseButton);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
} }

View File

@ -1,48 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.ConnectionContext;
public class MouseMovePacket extends InputPacket {
private static final int HEADER_CODE = 0x06;
private static final int PACKET_TYPE = 0x8;
private static final int PAYLOAD_LENGTH = 8;
private static final int PACKET_LENGTH = PAYLOAD_LENGTH +
InputPacket.HEADER_LENGTH;
private int headerCode;
// Accessed in ControllerStream for batching
short deltaX;
short deltaY;
public MouseMovePacket(ConnectionContext context, short deltaX, short deltaY)
{
super(PACKET_TYPE);
this.headerCode = HEADER_CODE;
// On Gen 5 servers, the header code is incremented by one
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
headerCode++;
}
this.deltaX = deltaX;
this.deltaY = deltaY;
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.LITTLE_ENDIAN).putInt(headerCode);
bb.order(ByteOrder.BIG_ENDIAN);
bb.putShort(deltaX);
bb.putShort(deltaY);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
}

View File

@ -1,49 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.ConnectionContext;
public class MouseScrollPacket extends InputPacket {
private static final int HEADER_CODE = 0x09;
private static final int PACKET_TYPE = 0xa;
private static final int PAYLOAD_LENGTH = 10;
private static final int PACKET_LENGTH = PAYLOAD_LENGTH +
InputPacket.HEADER_LENGTH;
private int headerCode;
private short scroll;
public MouseScrollPacket(ConnectionContext context, byte scrollClicks)
{
super(PACKET_TYPE);
this.headerCode = HEADER_CODE;
// On Gen 5 servers, the header code is incremented by one
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
headerCode++;
}
this.scroll = (short)(scrollClicks * 120);
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.LITTLE_ENDIAN).putInt(headerCode);
bb.order(ByteOrder.BIG_ENDIAN);
bb.putShort(scroll);
bb.putShort(scroll);
bb.putShort((short) 0);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
}

View File

@ -1,112 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.ConnectionContext;
public class MultiControllerPacket extends InputPacket {
private static final byte[] TAIL =
{
(byte)0x9C,
0x00,
0x00,
0x00,
0x55,
0x00
};
private static final int HEADER_CODE = 0x0d;
private static final int PACKET_TYPE = 0x1e;
private static final short PAYLOAD_LENGTH = 30;
private static final short PACKET_LENGTH = PAYLOAD_LENGTH +
InputPacket.HEADER_LENGTH;
short controllerNumber;
short activeGamepadMask;
short buttonFlags;
byte leftTrigger;
byte rightTrigger;
short leftStickX;
short leftStickY;
short rightStickX;
short rightStickY;
private int headerCode;
public MultiControllerPacket(ConnectionContext context,
short controllerNumber, short activeGamepadMask,
short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY,
short rightStickX, short rightStickY)
{
super(PACKET_TYPE);
this.headerCode = HEADER_CODE;
// On Gen 5 servers, the header code is decremented by one
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
headerCode--;
}
this.controllerNumber = controllerNumber;
this.activeGamepadMask = activeGamepadMask;
this.buttonFlags = buttonFlags;
this.leftTrigger = leftTrigger;
this.rightTrigger = rightTrigger;
this.leftStickX = leftStickX;
this.leftStickY = leftStickY;
this.rightStickX = rightStickX;
this.rightStickY = rightStickY;
}
public MultiControllerPacket(int packetType,
short controllerNumber, short activeGamepadMask,
short buttonFlags,
byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY,
short rightStickX, short rightStickY)
{
super(packetType);
this.controllerNumber = controllerNumber;
this.activeGamepadMask = activeGamepadMask;
this.buttonFlags = buttonFlags;
this.leftTrigger = leftTrigger;
this.rightTrigger = rightTrigger;
this.leftStickX = leftStickX;
this.leftStickY = leftStickY;
this.rightStickX = rightStickX;
this.rightStickY = rightStickY;
}
@Override
public void toWirePayload(ByteBuffer bb) {
bb.order(ByteOrder.LITTLE_ENDIAN);
bb.putInt(headerCode);
bb.putShort((short) 0x1a);
bb.putShort(controllerNumber);
bb.putShort(activeGamepadMask);
bb.putShort((short) 0x14);
bb.putShort(buttonFlags);
bb.put(leftTrigger);
bb.put(rightTrigger);
bb.putShort(leftStickX);
bb.putShort(leftStickY);
bb.putShort(rightStickX);
bb.putShort(rightStickY);
bb.put(TAIL);
}
@Override
public int getPacketLength() {
return PACKET_LENGTH;
}
}

View File

@ -1,282 +0,0 @@
package com.limelight.nvstream.rtsp;
import java.io.IOException;
import java.net.Inet6Address;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.util.HashMap;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat;
import com.limelight.nvstream.enet.EnetConnection;
import com.tinyrtsp.rtsp.message.RtspMessage;
import com.tinyrtsp.rtsp.message.RtspRequest;
import com.tinyrtsp.rtsp.message.RtspResponse;
import com.tinyrtsp.rtsp.parser.RtspParser;
import com.tinyrtsp.rtsp.parser.RtspStream;
public class RtspConnection {
public static final int PORT = 48010;
public static final int RTSP_TIMEOUT = 10000;
private int sequenceNumber = 1;
private int sessionId = 0;
private EnetConnection enetConnection;
private ConnectionContext context;
private String hostStr;
public RtspConnection(ConnectionContext context) {
this.context = context;
if (context.serverAddress instanceof Inet6Address) {
// RFC2732-formatted IPv6 address for use in URL
this.hostStr = "["+context.serverAddress.getHostAddress()+"]";
}
else {
this.hostStr = context.serverAddress.getHostAddress();
}
}
private String getRtspVideoStreamName() {
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
return "video/0/0";
}
else {
return "video";
}
}
private String getRtspAudioStreamName() {
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
return "audio/0/0";
}
else {
return "audio";
}
}
public static int getRtspVersionFromContext(ConnectionContext context) {
switch (context.serverGeneration)
{
case ConnectionContext.SERVER_GENERATION_3:
return 10;
case ConnectionContext.SERVER_GENERATION_4:
return 11;
case ConnectionContext.SERVER_GENERATION_5:
return 12;
case ConnectionContext.SERVER_GENERATION_6:
// Gen 6 has never been seen in the wild
return 13;
case ConnectionContext.SERVER_GENERATION_7:
default:
return 14;
}
}
private RtspRequest createRtspRequest(String command, String target) {
RtspRequest m = new RtspRequest(command, target, "RTSP/1.0",
sequenceNumber++, new HashMap<String, String>(), null);
m.setOption("X-GS-ClientVersion", ""+getRtspVersionFromContext(context));
return m;
}
private String byteBufferToString(byte[] bytes, int length) {
StringBuilder message = new StringBuilder();
for (int i = 0; i < length; i++) {
message.append((char) bytes[i]);
}
return message.toString();
}
private RtspResponse transactRtspMessageEnet(RtspMessage m) throws IOException {
byte[] header, payload;
header = m.toWireNoPayload();
payload = m.toWirePayloadOnly();
// Send the RTSP header
enetConnection.writePacket(ByteBuffer.wrap(header));
// Send payload in a separate packet if there's payload on this
if (payload != null) {
enetConnection.writePacket(ByteBuffer.wrap(payload));
}
// Wait for a response
ByteBuffer responseHeader = enetConnection.readPacket(2048, RTSP_TIMEOUT);
// Parse the response and determine whether it has a payload
RtspResponse message = (RtspResponse) RtspParser.parseMessageNoPayload(byteBufferToString(responseHeader.array(), responseHeader.limit()));
if (message.getOption("Content-Length") != null) {
// The payload comes in a second packet
ByteBuffer responsePayload = enetConnection.readPacket(65536, RTSP_TIMEOUT);
message.setPayload(byteBufferToString(responsePayload.array(), responsePayload.limit()));
}
return message;
}
private RtspResponse transactRtspMessageTcp(RtspMessage m) throws IOException {
Socket s = new Socket();
try {
s.setTcpNoDelay(true);
s.connect(new InetSocketAddress(context.serverAddress, PORT), RTSP_TIMEOUT);
s.setSoTimeout(RTSP_TIMEOUT);
RtspStream rtspStream = new RtspStream(s.getInputStream(), s.getOutputStream());
try {
rtspStream.write(m);
return (RtspResponse) rtspStream.read();
} finally {
rtspStream.close();
}
} finally {
s.close();
}
}
private RtspResponse transactRtspMessage(RtspMessage m) throws IOException {
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
return transactRtspMessageEnet(m);
}
else {
return transactRtspMessageTcp(m);
}
}
private RtspResponse requestOptions() throws IOException {
RtspRequest m = createRtspRequest("OPTIONS", "rtsp://"+hostStr);
return transactRtspMessage(m);
}
private RtspResponse requestDescribe() throws IOException {
RtspRequest m = createRtspRequest("DESCRIBE", "rtsp://"+hostStr);
m.setOption("Accept", "application/sdp");
m.setOption("If-Modified-Since", "Thu, 01 Jan 1970 00:00:00 GMT");
return transactRtspMessage(m);
}
private RtspResponse setupStream(String streamName) throws IOException {
RtspRequest m = createRtspRequest("SETUP", "streamid="+streamName);
if (sessionId != 0) {
m.setOption("Session", ""+sessionId);
}
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_6) {
// It looks like GFE doesn't care what we say our port is but
// we need to give it some port to successfully complete the
// handshake process.
m.setOption("Transport", "unicast;X-GS-ClientPort=50000-50001");
}
else {
m.setOption("Transport", " ");
}
m.setOption("If-Modified-Since", "Thu, 01 Jan 1970 00:00:00 GMT");
return transactRtspMessage(m);
}
private RtspResponse playStream(String streamName) throws IOException {
RtspRequest m = createRtspRequest("PLAY", "streamid="+streamName);
m.setOption("Session", ""+sessionId);
return transactRtspMessage(m);
}
private RtspResponse sendVideoAnnounce() throws IOException {
RtspRequest m = createRtspRequest("ANNOUNCE", "streamid=video");
m.setOption("Session", ""+sessionId);
m.setOption("Content-type", "application/sdp");
m.setPayload(SdpGenerator.generateSdpFromContext(context));
m.setOption("Content-length", ""+m.getPayload().length());
return transactRtspMessage(m);
}
private void processDescribeResponse(RtspResponse r) {
// The RTSP DESCRIBE reply will contain a collection of SDP media attributes that
// describe the various supported video stream formats and include the SPS, PPS,
// and VPS (if applicable). We will use this information to determine whether the
// server can support HEVC. For some reason, they still set the MIME type of the HEVC
// format to H264, so we can't just look for the HEVC MIME type. What we'll do instead is
// look for the base 64 encoded VPS NALU prefix that is unique to the HEVC bitstream.
String describeSdpContent = r.getPayload();
if (context.streamConfig.getHevcSupported() &&
describeSdpContent.contains("sprop-parameter-sets=AAAAAU")) {
context.negotiatedVideoFormat = VideoFormat.H265;
}
else {
context.negotiatedVideoFormat = VideoFormat.H264;
}
}
private void processRtspSetupAudio(RtspResponse r) throws IOException {
try {
sessionId = Integer.parseInt(r.getOption("Session"));
} catch (NumberFormatException e) {
throw new IOException("RTSP SETUP response was malformed");
}
}
public void doRtspHandshake() throws IOException {
RtspResponse r;
// Gen 5+ servers do RTSP over ENet instead of TCP
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
enetConnection = EnetConnection.connect(context.serverAddress.getHostAddress(), PORT, RTSP_TIMEOUT);
}
try {
r = requestOptions();
if (r.getStatusCode() != 200) {
throw new IOException("RTSP OPTIONS request failed: "+r.getStatusCode());
}
r = requestDescribe();
if (r.getStatusCode() != 200) {
throw new IOException("RTSP DESCRIBE request failed: "+r.getStatusCode());
}
// Process the RTSP DESCRIBE response
processDescribeResponse(r);
r = setupStream(getRtspAudioStreamName());
if (r.getStatusCode() != 200) {
throw new IOException("RTSP SETUP request failed: "+r.getStatusCode());
}
// Process the RTSP SETUP streamid=audio response
processRtspSetupAudio(r);
r = setupStream(getRtspVideoStreamName());
if (r.getStatusCode() != 200) {
throw new IOException("RTSP SETUP request failed: "+r.getStatusCode());
}
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
r = setupStream("control/1/0");
if (r.getStatusCode() != 200) {
throw new IOException("RTSP SETUP request failed: "+r.getStatusCode());
}
}
r = sendVideoAnnounce();
if (r.getStatusCode() != 200) {
throw new IOException("RTSP ANNOUNCE request failed: "+r.getStatusCode());
}
r = playStream("video");
if (r.getStatusCode() != 200) {
throw new IOException("RTSP PLAY request failed: "+r.getStatusCode());
}
r = playStream("audio");
if (r.getStatusCode() != 200) {
throw new IOException("RTSP PLAY request failed: "+r.getStatusCode());
}
} finally {
if (enetConnection != null) {
enetConnection.close();
enetConnection = null;
}
}
}
}

View File

@ -1,208 +0,0 @@
package com.limelight.nvstream.rtsp;
import java.net.Inet6Address;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.video.VideoDecoderRenderer.VideoFormat;
public class SdpGenerator {
private static void addSessionAttribute(StringBuilder config, String attribute, String value) {
config.append("a="+attribute+":"+value+" \r\n");
}
private static void addSessionAttributeBytes(StringBuilder config, String attribute, byte[] value) {
char str[] = new char[value.length];
for (int i = 0; i < value.length; i++) {
str[i] = (char)value[i];
}
addSessionAttribute(config, attribute, new String(str));
}
private static void addSessionAttributeInt(StringBuilder config, String attribute, int value) {
ByteBuffer b = ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN);
b.putInt(value);
addSessionAttributeBytes(config, attribute, b.array());
}
private static void addGen3Attributes(StringBuilder config, ConnectionContext context) {
addSessionAttribute(config, "x-nv-general.serverAddress", context.serverAddress.getHostAddress());
addSessionAttributeInt(config, "x-nv-general.featureFlags", 0x42774141);
addSessionAttributeInt(config, "x-nv-video[0].transferProtocol", 0x41514141);
addSessionAttributeInt(config, "x-nv-video[1].transferProtocol", 0x41514141);
addSessionAttributeInt(config, "x-nv-video[2].transferProtocol", 0x41514141);
addSessionAttributeInt(config, "x-nv-video[3].transferProtocol", 0x41514141);
addSessionAttributeInt(config, "x-nv-video[0].rateControlMode", 0x42414141);
addSessionAttributeInt(config, "x-nv-video[1].rateControlMode", 0x42514141);
addSessionAttributeInt(config, "x-nv-video[2].rateControlMode", 0x42514141);
addSessionAttributeInt(config, "x-nv-video[3].rateControlMode", 0x42514141);
addSessionAttribute(config, "x-nv-vqos[0].bw.flags", "14083");
addSessionAttribute(config, "x-nv-vqos[0].videoQosMaxConsecutiveDrops", "0");
addSessionAttribute(config, "x-nv-vqos[1].videoQosMaxConsecutiveDrops", "0");
addSessionAttribute(config, "x-nv-vqos[2].videoQosMaxConsecutiveDrops", "0");
addSessionAttribute(config, "x-nv-vqos[3].videoQosMaxConsecutiveDrops", "0");
}
private static void addGen4Attributes(StringBuilder config, ConnectionContext context) {
addSessionAttribute(config, "x-nv-general.serverAddress", "rtsp://"+context.serverAddress.getHostAddress()+":48010");
addSessionAttribute(config, "x-nv-video[0].rateControlMode", "4");
}
private static void addGen5Attributes(StringBuilder config, ConnectionContext context) {
// We want to use the new ENet connections for control and input
addSessionAttribute(config, "x-nv-general.useReliableUdp", "1");
addSessionAttribute(config, "x-nv-ri.useControlChannel", "1");
// Disable dynamic resolution switching
addSessionAttribute(config, "x-nv-vqos[0].drc.enable", "0");
}
public static String generateSdpFromContext(ConnectionContext context) {
// By now, we must have decided on a format
if (context.negotiatedVideoFormat == VideoFormat.Unknown) {
throw new IllegalStateException("Video format negotiation must be completed before generating SDP response");
}
// Also, resolution and frame rate must be set
if (context.negotiatedWidth == 0 || context.negotiatedHeight == 0 || context.negotiatedFps == 0) {
throw new IllegalStateException("Video resolution/FPS negotiation must be completed before generating SDP response");
}
StringBuilder config = new StringBuilder();
config.append("v=0").append("\r\n"); // SDP Version 0
config.append("o=android 0 "+RtspConnection.getRtspVersionFromContext(context)+" IN ");
if (context.serverAddress instanceof Inet6Address) {
config.append("IPv6 ");
}
else {
config.append("IPv4 ");
}
config.append(context.serverAddress.getHostAddress());
config.append("\r\n");
config.append("s=NVIDIA Streaming Client").append("\r\n");
addSessionAttribute(config, "x-nv-video[0].clientViewportWd", ""+context.negotiatedWidth);
addSessionAttribute(config, "x-nv-video[0].clientViewportHt", ""+context.negotiatedHeight);
addSessionAttribute(config, "x-nv-video[0].maxFPS", ""+context.negotiatedFps);
addSessionAttribute(config, "x-nv-video[0].packetSize", ""+context.streamConfig.getMaxPacketSize());
addSessionAttribute(config, "x-nv-video[0].timeoutLengthMs", "7000");
addSessionAttribute(config, "x-nv-video[0].framesWithInvalidRefThreshold", "0");
// H.265 can encode much more efficiently, but we have a problem since not all
// users will be using H.265 and we don't have an independent bitrate setting
// for H.265. We'll use use the selected bitrate * .75 when H.265 is in use.
int bitrate;
if (context.negotiatedVideoFormat == VideoFormat.H265) {
bitrate = (int)(context.streamConfig.getBitrate()*0.75);
}
else {
bitrate = context.streamConfig.getBitrate();
}
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_5) {
addSessionAttribute(config, "x-nv-vqos[0].bw.minimumBitrateKbps", ""+bitrate);
addSessionAttribute(config, "x-nv-vqos[0].bw.maximumBitrateKbps", ""+bitrate);
}
else {
if (context.streamConfig.getRemote()) {
addSessionAttribute(config, "x-nv-video[0].averageBitrate", "4");
addSessionAttribute(config, "x-nv-video[0].peakBitrate", "4");
}
// We don't support dynamic bitrate scaling properly (it tends to bounce between min and max and never
// settle on the optimal bitrate if it's somewhere in the middle), so we'll just latch the bitrate
// to the requested value.
addSessionAttribute(config, "x-nv-vqos[0].bw.minimumBitrate", ""+bitrate);
addSessionAttribute(config, "x-nv-vqos[0].bw.maximumBitrate", ""+bitrate);
}
// Using FEC turns padding on which makes us have to take the slow path
// in the depacketizer, not to mention exposing some ambiguous cases with
// distinguishing padding from valid sequences. Since we can only perform
// execute an FEC recovery on a 1 packet frame, we'll just turn it off completely.
addSessionAttribute(config, "x-nv-vqos[0].fec.enable", "0");
addSessionAttribute(config, "x-nv-vqos[0].videoQualityScoreUpdateTime", "5000");
if (context.streamConfig.getRemote()) {
addSessionAttribute(config, "x-nv-vqos[0].qosTrafficType", "0");
}
else {
addSessionAttribute(config, "x-nv-vqos[0].qosTrafficType", "5");
}
if (context.streamConfig.getRemote()) {
addSessionAttribute(config, "x-nv-aqos.qosTrafficType", "0");
}
else {
addSessionAttribute(config, "x-nv-aqos.qosTrafficType", "4");
}
// Add generation-specific attributes
switch (context.serverGeneration) {
case ConnectionContext.SERVER_GENERATION_3:
addGen3Attributes(config, context);
break;
case ConnectionContext.SERVER_GENERATION_4:
addGen4Attributes(config, context);
break;
case ConnectionContext.SERVER_GENERATION_5:
default:
addGen5Attributes(config, context);
break;
}
// Gen 4+ supports H.265 and surround sound
if (context.serverGeneration >= ConnectionContext.SERVER_GENERATION_4) {
// If client and server are able, request HEVC
if (context.negotiatedVideoFormat == VideoFormat.H265) {
addSessionAttribute(config, "x-nv-clientSupportHevc", "1");
addSessionAttribute(config, "x-nv-vqos[0].bitStreamFormat", "1");
// Disable slicing on HEVC
addSessionAttribute(config, "x-nv-video[0].videoEncoderSlicesPerFrame", "1");
}
else {
// Otherwise, use AVC
addSessionAttribute(config, "x-nv-clientSupportHevc", "0");
addSessionAttribute(config, "x-nv-vqos[0].bitStreamFormat", "0");
// Use slicing for increased performance on some decoders
addSessionAttribute(config, "x-nv-video[0].videoEncoderSlicesPerFrame", "4");
}
// Enable surround sound if configured for it
addSessionAttribute(config, "x-nv-audio.surround.numChannels", ""+context.streamConfig.getAudioChannelCount());
addSessionAttribute(config, "x-nv-audio.surround.channelMask", ""+context.streamConfig.getAudioChannelMask());
if (context.streamConfig.getAudioChannelCount() > 2) {
addSessionAttribute(config, "x-nv-audio.surround.enable", "1");
}
else {
addSessionAttribute(config, "x-nv-audio.surround.enable", "0");
}
}
config.append("t=0 0").append("\r\n");
if (context.serverGeneration == ConnectionContext.SERVER_GENERATION_3) {
config.append("m=video 47996 ").append("\r\n");
}
else {
config.append("m=video 47998 ").append("\r\n");
}
return config.toString();
}
}

View File

@ -1,7 +0,0 @@
package com.limelight.utils;
public class TimeHelper {
public static long getMonotonicMillis() {
return System.nanoTime() / 1000000L;
}
}

View File

@ -1,47 +0,0 @@
package com.limelight.utils;
public class Vector2d {
private float x;
private float y;
private double magnitude;
public static final Vector2d ZERO = new Vector2d();
public Vector2d() {
initialize(0, 0);
}
public void initialize(float x, float y) {
this.x = x;
this.y = y;
this.magnitude = Math.sqrt(Math.pow(x, 2) + Math.pow(y, 2));
}
public double getMagnitude() {
return magnitude;
}
public void getNormalized(Vector2d vector) {
vector.initialize((float)(x / magnitude), (float)(y / magnitude));
}
public void scalarMultiply(double factor) {
initialize((float)(x * factor), (float)(y * factor));
}
public void setX(float x) {
initialize(x, this.y);
}
public void setY(float y) {
initialize(this.x, y);
}
public float getX() {
return x;
}
public float getY() {
return y;
}
}