Rebase of limelight-pc off RenderScript-Renderer and Limelight-common

This commit is contained in:
Cameron Gutman
2013-12-05 18:12:22 -05:00
parent 2c0fb8100b
commit 751d601112
31 changed files with 214 additions and 2717 deletions

1
.gitignore vendored
View File

@@ -8,6 +8,5 @@
# Package Files #
*.jar
*.war
*.ear

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="lib" path="libs/limelight-common.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -2,10 +2,12 @@ package com.limelight;
import javax.swing.JOptionPane;
import com.limelight.binding.PlatformBinding;
import com.limelight.gui.MainFrame;
import com.limelight.gui.StreamFrame;
import com.limelight.nvstream.NvConnection;
import com.limelight.nvstream.NvConnectionListener;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public class Limelight implements NvConnectionListener {
public static final double VERSION = 1.0;
@@ -21,8 +23,11 @@ public class Limelight implements NvConnectionListener {
private void startUp() {
streamFrame = new StreamFrame();
streamFrame.build();
conn = new NvConnection(host, streamFrame, this);
conn.start();
conn = new NvConnection(host, this);
conn.start(PlatformBinding.getDeviceName(), streamFrame,
VideoDecoderRenderer.FLAG_PREFER_QUALITY,
PlatformBinding.getAudioRenderer(),
PlatformBinding.getVideoDecoderRenderer());
}
public static void createInstance(String host) {
@@ -38,7 +43,6 @@ public class Limelight implements NvConnectionListener {
@Override
public void stageStarting(Stage stage) {
System.out.println("Starting "+stage.getName());
}
@Override
@@ -49,7 +53,6 @@ public class Limelight implements NvConnectionListener {
public void stageFailed(Stage stage) {
JOptionPane.showMessageDialog(streamFrame, "Starting "+stage.getName()+" failed", "Connection Error", JOptionPane.ERROR_MESSAGE);
conn.stop();
}
@Override
@@ -62,5 +65,10 @@ public class Limelight implements NvConnectionListener {
JOptionPane.showMessageDialog(streamFrame, "The connection failed unexpectedly", "Connection Terminated", JOptionPane.ERROR_MESSAGE);
conn.stop();
}
@Override
public void displayMessage(String message) {
JOptionPane.showMessageDialog(streamFrame, message, "Limelight", JOptionPane.INFORMATION_MESSAGE);
}
}

View File

@@ -0,0 +1,20 @@
package com.limelight.binding;
import com.limelight.binding.audio.JavaxAudioRenderer;
import com.limelight.binding.video.SwingCpuDecoderRenderer;
import com.limelight.nvstream.av.audio.AudioRenderer;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public class PlatformBinding {
public static VideoDecoderRenderer getVideoDecoderRenderer() {
return new SwingCpuDecoderRenderer();
}
public static String getDeviceName() {
return "foobar";
}
public static AudioRenderer getAudioRenderer() {
return new JavaxAudioRenderer();
}
}

View File

@@ -0,0 +1,46 @@
package com.limelight.binding.audio;
import java.nio.ByteBuffer;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import com.limelight.nvstream.av.audio.AudioRenderer;
public class JavaxAudioRenderer implements AudioRenderer {
private SourceDataLine soundLine;
@Override
public void playDecodedAudio(short[] pcmData, int offset, int length) {
if (soundLine != null) {
byte[] pcmDataBytes = new byte[length * 2];
ByteBuffer.wrap(pcmDataBytes).asShortBuffer().put(pcmData, offset, length);
soundLine.write(pcmDataBytes, 0, pcmDataBytes.length);
}
}
@Override
public void streamClosing() {
if (soundLine != null) {
soundLine.close();
}
}
@Override
public void streamInitialized(int channelCount, int sampleRate) {
AudioFormat audioFormat = new AudioFormat(sampleRate, 16, channelCount, true, true);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, 1);
try {
soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
} catch (LineUnavailableException e) {
soundLine = null;
}
}
}

View File

@@ -0,0 +1,135 @@
package com.limelight.binding.video;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import javax.swing.JFrame;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.cpu.AvcDecoder;
public class SwingCpuDecoderRenderer implements VideoDecoderRenderer {
private Thread rendererThread;
private int targetFps;
private int width, height;
private byte[] imageData;
private int[] imageBuffer;
private Graphics graphics;
private BufferedImage image;
private static final int DECODER_BUFFER_SIZE = 92*1024;
private ByteBuffer decoderBuffer;
// Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 8;
@Override
public void setup(int width, int height, Object renderTarget, int drFlags) {
this.targetFps = 30;
this.width = width;
this.height = height;
// Single threaded low latency decode is ideal
int avcFlags = AvcDecoder.LOW_LATENCY_DECODE;
int threadCount = 1;
int err = AvcDecoder.init(width, height, avcFlags, threadCount);
if (err != 0) {
throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
graphics = ((JFrame)renderTarget).getGraphics();
image = new BufferedImage(width, height,
BufferedImage.TYPE_INT_ARGB);
imageData = new byte[width * height * 4]; // RGBA per pixel
imageBuffer = new int[width * height];
decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
System.out.println("Using software decoding");
}
@Override
public void start() {
rendererThread = new Thread() {
@Override
public void run() {
long nextFrameTime = System.currentTimeMillis();
while (!isInterrupted())
{
long diff = nextFrameTime - System.currentTimeMillis();
if (diff > WAIT_CEILING_MS) {
try {
Thread.sleep(diff);
} catch (InterruptedException e) {
return;
}
}
nextFrameTime = computePresentationTimeMs(targetFps);
if (AvcDecoder.getRgbFrame(imageData, imageData.length)) {
ByteBuffer.wrap(imageData).asIntBuffer().get(imageBuffer);
image.setRGB(0, 0, width, height, imageBuffer, 0, width);
graphics.drawImage(image, 0, 0, width, height, null);
}
}
}
};
rendererThread.setName("Video - Renderer (CPU)");
rendererThread.start();
}
private long computePresentationTimeMs(int frameRate) {
return System.currentTimeMillis() + (1000 / frameRate);
}
@Override
public void stop() {
rendererThread.interrupt();
try {
rendererThread.join();
} catch (InterruptedException e) { }
}
@Override
public void release() {
AvcDecoder.destroy();
}
@Override
public boolean submitDecodeUnit(DecodeUnit decodeUnit) {
byte[] data;
// Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
decoderBuffer.clear();
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
}
data = decoderBuffer.array();
}
else {
data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
int offset = 0;
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
System.arraycopy(bbd.data, bbd.offset, data, offset, bbd.length);
offset += bbd.length;
}
}
return (AvcDecoder.decode(data, 0, decodeUnit.getDataLength()) == 0);
}
}

View File

@@ -1,31 +0,0 @@
package com.limelight.nvstream;
public class NvApp {
private String appName;
private int appId;
private boolean isRunning;
public void setAppName(String appName) {
this.appName = appName;
}
public void setAppId(String appId) {
this.appId = Integer.parseInt(appId);
}
public void setIsRunning(String isRunning) {
this.isRunning = isRunning.equals("1");
}
public String getAppName() {
return this.appName;
}
public int getAppId() {
return this.appId;
}
public boolean getIsRunning() {
return this.isRunning;
}
}

View File

@@ -1,250 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.SourceDataLine;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.AvShortBufferDescriptor;
import com.limelight.nvstream.av.audio.AvAudioDepacketizer;
import com.limelight.nvstream.av.audio.OpusDecoder;
public class NvAudioStream {
public static final int RTP_PORT = 48000;
public static final int RTCP_PORT = 47999;
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>(100);
private SourceDataLine track;
private DatagramSocket rtp;
private AvAudioDepacketizer depacketizer = new AvAudioDepacketizer();
private LinkedList<Thread> threads = new LinkedList<Thread>();
private boolean aborting = false;
private InetAddress host;
private NvConnectionListener listener;
public NvAudioStream(InetAddress host, NvConnectionListener listener)
{
this.host = host;
this.listener = listener;
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
for (Thread t : threads) {
t.interrupt();
}
// Close the socket to interrupt the receive thread
if (rtp != null) {
rtp.close();
}
// Wait for threads to terminate
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) { }
}
if (track != null) {
track.close();
}
threads.clear();
}
public void startAudioStream() throws SocketException
{
setupRtpSession();
setupAudio();
startReceiveThread();
startDepacketizerThread();
startDecoderThread();
startUdpPingThread();
}
private void setupRtpSession() throws SocketException
{
rtp = new DatagramSocket(RTP_PORT);
}
private void setupAudio()
{
int channelConfig;
int err;
err = OpusDecoder.init();
if (err != 0) {
throw new IllegalStateException("Opus decoder failed to initialize");
}
switch (OpusDecoder.getChannelCount())
{
case 1:
channelConfig = 1;
break;
case 2:
channelConfig = 2;
break;
default:
throw new IllegalStateException("Opus decoder returned unhandled channel count");
}
/*
track = new AudioTrack(AudioManager.STREAM_MUSIC,
OpusDecoder.getSampleRate(),
channelConfig,
AudioFormat.ENCODING_PCM_16BIT,
1024, // 1KB buffer
AudioTrack.MODE_STREAM);
track.play();*/
}
private void startDepacketizerThread()
{
// This thread lessens the work on the receive thread
// so it can spend more time waiting for data
Thread t = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
AvRtpPacket packet;
try {
packet = packets.take();
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
depacketizer.decodeInputData(packet);
}
}
};
threads.add(t);
t.setName("Audio - Depacketizer");
t.start();
}
private void startDecoderThread()
{
// Decoder thread
Thread t = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
AvShortBufferDescriptor samples;
try {
samples = depacketizer.getNextDecodedData();
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
//track.write(samples.data, samples.offset, samples.length);
}
}
};
threads.add(t);
t.setName("Audio - Player");
t.start();
}
private void startReceiveThread()
{
// Receive thread
Thread t = new Thread() {
@Override
public void run() {
AvByteBufferDescriptor desc = new AvByteBufferDescriptor(new byte[1500], 0, 1500);
DatagramPacket packet = new DatagramPacket(desc.data, desc.length);
while (!isInterrupted())
{
try {
rtp.receive(packet);
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
// Give the packet to the depacketizer thread
desc.length = packet.getLength();
if (packets.offer(new AvRtpPacket(desc))) {
desc.reinitialize(new byte[1500], 0, 1500);
packet.setData(desc.data, desc.offset, desc.length);
}
}
}
};
threads.add(t);
t.setName("Audio - Receive");
t.start();
}
private void startUdpPingThread()
{
// Ping thread
Thread t = new Thread() {
@Override
public void run() {
// PING in ASCII
final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
pingPacket.setSocketAddress(new InetSocketAddress(host, RTP_PORT));
// Send PING every 100 ms
while (!isInterrupted())
{
try {
rtp.send(pingPacket);
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setPriority(Thread.MIN_PRIORITY);
t.setName("Audio - Ping");
t.start();
}
}

View File

@@ -1,245 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import java.util.concurrent.ThreadPoolExecutor;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import javax.xml.stream.XMLStreamException;
import com.limelight.nvstream.input.NvController;
public class NvConnection {
private String host;
private JFrame parent;
private NvConnectionListener listener;
private int drFlags;
private InetAddress hostAddr;
private NvControl controlStream;
private NvController inputStream;
private NvVideoStream videoStream;
private NvAudioStream audioStream;
private ThreadPoolExecutor threadPool;
public NvConnection(String host, JFrame parent, NvConnectionListener listener) {
this.host = host;
this.parent = parent;
this.listener = listener;
}
public static String getMacAddressString() throws SocketException {
Enumeration<NetworkInterface> ifaceList;
NetworkInterface selectedIface = null;
// First look for a WLAN interface (since those generally aren't removable)
ifaceList = NetworkInterface.getNetworkInterfaces();
while (selectedIface == null && ifaceList.hasMoreElements()) {
NetworkInterface iface = ifaceList.nextElement();
if (iface.getName().startsWith("wlan") &&
iface.getHardwareAddress() != null) {
selectedIface = iface;
}
}
// If we didn't find that, look for an Ethernet interface
ifaceList = NetworkInterface.getNetworkInterfaces();
while (selectedIface == null && ifaceList.hasMoreElements()) {
NetworkInterface iface = ifaceList.nextElement();
if (iface.getName().startsWith("eth") &&
iface.getHardwareAddress() != null) {
selectedIface = iface;
}
}
// Now just find something with a MAC address
ifaceList = NetworkInterface.getNetworkInterfaces();
while (selectedIface == null && ifaceList.hasMoreElements()) {
NetworkInterface iface = ifaceList.nextElement();
if (iface.getHardwareAddress() != null) {
selectedIface = ifaceList.nextElement();
break;
}
}
if (selectedIface == null) {
return null;
}
byte[] macAddress = selectedIface.getHardwareAddress();
if (macAddress != null) {
StringBuilder addrStr = new StringBuilder();
for (int i = 0; i < macAddress.length; i++) {
addrStr.append(String.format("%02x", macAddress[i]));
if (i != macAddress.length - 1) {
addrStr.append(':');
}
}
return addrStr.toString();
}
return null;
}
public void start() {
new Thread(new Runnable() {
@Override
public void run() {
try {
hostAddr = InetAddress.getByName(host);
} catch (UnknownHostException e) {
e.printStackTrace();
displayMessage(e.getMessage());
listener.connectionTerminated(e);
return;
}
establishConnection();
}
}).start();
}
private void establishConnection() {
for (NvConnectionListener.Stage currentStage : NvConnectionListener.Stage.values())
{
boolean success = false;
listener.stageStarting(currentStage);
try {
switch (currentStage)
{
case LAUNCH_APP:
success = startSteamBigPicture();
break;
case HANDSHAKE:
success = NvHandshake.performHandshake(hostAddr);
break;
case CONTROL_START:
success = startControlStream();
break;
case VIDEO_START:
success = startVideoStream();
break;
case AUDIO_START:
success = startAudioStream();
break;
case CONTROL_START2:
controlStream.startJitterPackets();
success = true;
break;
case INPUT_START:
success = startInputConnection();
break;
}
} catch (Exception e) {
e.printStackTrace();
success = false;
}
if (success) {
listener.stageComplete(currentStage);
}
else {
listener.stageFailed(currentStage);
return;
}
}
listener.connectionStarted();
}
private boolean startSteamBigPicture() throws XMLStreamException, IOException
{
System.out.println(hostAddr.toString() + "\t" + getMacAddressString());
NvHTTP h = new NvHTTP(hostAddr.toString(), "");//getMacAddressString());
if (!h.getPairState()) {
displayMessage("Device not paired with computer");
return false;
}
int sessionId = h.getSessionId();
int appId = h.getSteamAppId(sessionId);
h.launchApp(sessionId, appId);
return true;
}
private boolean startControlStream() throws IOException
{
controlStream = new NvControl(hostAddr, listener);
controlStream.initialize();
controlStream.start();
return true;
}
private boolean startVideoStream() throws IOException
{
videoStream = new NvVideoStream(hostAddr, listener, controlStream);
//videoStream.startVideoStream(video, drFlags);
return true;
}
private boolean startAudioStream() throws IOException
{
audioStream = new NvAudioStream(hostAddr, listener);
audioStream.startAudioStream();
return true;
}
private boolean startInputConnection() throws IOException
{
inputStream = new NvController(hostAddr);
inputStream.initialize();
return true;
}
public void stop()
{
threadPool.shutdownNow();
if (videoStream != null) {
videoStream.abort();
}
if (audioStream != null) {
audioStream.abort();
}
if (controlStream != null) {
controlStream.abort();
}
if (inputStream != null) {
inputStream.close();
inputStream = null;
}
}
private void displayMessage(final String text) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JOptionPane.showMessageDialog(parent, text);
}
});
}
}

View File

@@ -1,30 +0,0 @@
package com.limelight.nvstream;
public interface NvConnectionListener {
public enum Stage {
LAUNCH_APP("app"),
HANDSHAKE("handshake"),
CONTROL_START("control connection"),
VIDEO_START("video stream"),
AUDIO_START("audio stream"),
CONTROL_START2("control connection"),
INPUT_START("input connection");
private String name;
private Stage(String name) {
this.name = name;
}
public String getName() {
return name;
}
};
public void stageStarting(Stage stage);
public void stageComplete(Stage stage);
public void stageFailed(Stage stage);
public void connectionStarted();
public void connectionTerminated(Exception e);
}

View File

@@ -1,483 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.av.ConnectionStatusListener;
public class NvControl implements ConnectionStatusListener {
public static final int PORT = 47995;
public static final int CONTROL_TIMEOUT = 5000;
public static final short PTYPE_HELLO = 0x1204;
public static final short PPAYLEN_HELLO = 0x0004;
public static final byte[] PPAYLOAD_HELLO =
{
(byte)0x00,
(byte)0x05,
(byte)0x00,
(byte)0x00
};
public static final short PTYPE_KEEPALIVE = 0x13ff;
public static final short PPAYLEN_KEEPALIVE = 0x0000;
public static final short PTYPE_HEARTBEAT = 0x1401;
public static final short PPAYLEN_HEARTBEAT = 0x0000;
public static final short PTYPE_1405 = 0x1405;
public static final short PPAYLEN_1405 = 0x0000;
public static final short PTYPE_RESYNC = 0x1404;
public static final short PPAYLEN_RESYNC = 16;
public static final short PTYPE_CONFIG = 0x1205;
public static final short PPAYLEN_CONFIG = 0x0004;
public static final int[] PPAYLOAD_CONFIG =
{
720,
266758,
1,
266762,
30,
70151,
68291329,
1280,
68291584,
1280,
68291840,
15360,
68292096,
25600,
68292352,
2048,
68292608,
1024,
68289024,
262144,
17957632,
302055424,
134217729,
16777490,
70153,
68293120,
768000,
17961216,
303235072,
335609857,
838861842,
352321536,
1006634002,
369098752,
335545362,
385875968,
1042,
402653184,
134218770,
419430400,
167773202,
436207616,
855638290,
266779,
7000,
266780,
2000,
266781,
50,
266782,
3000,
266783,
2,
266794,
5000,
266795,
500,
266784,
75,
266785,
25,
266786,
10,
266787,
60,
266788,
30,
266789,
3,
266790,
1000,
266791,
5000,
266792,
5000,
266793,
5000,
70190,
68301063,
10240,
68301312,
6400,
68301568,
768000,
68299776,
768,
68300032,
2560,
68300544,
0,
34746368,
(int)0xFE000000
};
public static final short PTYPE_JITTER = 0x140c;
public static final short PPAYLEN_JITTER = 0x10;
private int seqNum;
private NvConnectionListener listener;
private InetAddress host;
private Socket s;
private InputStream in;
private OutputStream out;
private Thread heartbeatThread;
private Thread jitterThread;
private boolean aborting = false;
public NvControl(InetAddress host, NvConnectionListener listener)
{
this.listener = listener;
this.host = host;
}
public void initialize() throws IOException
{
s = new Socket();
s.setSoTimeout(CONTROL_TIMEOUT);
s.connect(new InetSocketAddress(host, PORT), CONTROL_TIMEOUT);
in = s.getInputStream();
out = s.getOutputStream();
}
private void sendPacket(NvCtlPacket packet) throws IOException
{
out.write(packet.toWire());
out.flush();
}
private NvControl.NvCtlResponse sendAndGetReply(NvCtlPacket packet) throws IOException
{
sendPacket(packet);
return new NvCtlResponse(in);
}
private void sendJitter() throws IOException
{
ByteBuffer bb = ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
bb.putInt(0);
bb.putInt(77);
bb.putInt(888);
bb.putInt(seqNum += 2);
sendPacket(new NvCtlPacket(PTYPE_JITTER, PPAYLEN_JITTER, bb.array()));
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
if (jitterThread != null) {
jitterThread.interrupt();
}
if (heartbeatThread != null) {
heartbeatThread.interrupt();
}
try {
s.close();
} catch (IOException e) {}
}
public void requestResync() throws IOException
{
System.out.println("CTL: Requesting IDR frame");
sendResync();
}
public void start() throws IOException
{
sendHello();
sendConfig();
pingPong();
send1405AndGetResponse();
heartbeatThread = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
try {
sendHeartbeat();
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
}
}
};
heartbeatThread.start();
}
public void startJitterPackets()
{
jitterThread = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
try {
sendJitter();
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
}
}
};
jitterThread.start();
}
private NvControl.NvCtlResponse send1405AndGetResponse() throws IOException
{
return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405));
}
private void sendHello() throws IOException
{
sendPacket(new NvCtlPacket(PTYPE_HELLO, PPAYLEN_HELLO, PPAYLOAD_HELLO));
}
private void sendResync() throws IOException
{
ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN);
conf.putLong(0);
conf.putLong(0xFFFF);
sendAndGetReply(new NvCtlPacket(PTYPE_RESYNC, PPAYLEN_RESYNC, conf.array()));
}
private void sendConfig() throws IOException
{
ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLOAD_CONFIG.length * 4 + 3]).order(ByteOrder.LITTLE_ENDIAN);
for (int i : PPAYLOAD_CONFIG)
conf.putInt(i);
conf.putShort((short)0x0013);
conf.put((byte) 0x00);
sendPacket(new NvCtlPacket(PTYPE_CONFIG, PPAYLEN_CONFIG, conf.array()));
}
private void sendHeartbeat() throws IOException
{
sendPacket(new NvCtlPacket(PTYPE_HEARTBEAT, PPAYLEN_HEARTBEAT));
}
private NvControl.NvCtlResponse pingPong() throws IOException
{
sendPacket(new NvCtlPacket(PTYPE_KEEPALIVE, PPAYLEN_KEEPALIVE));
return new NvControl.NvCtlResponse(in);
}
class NvCtlPacket {
public short type;
public short paylen;
public byte[] payload;
public NvCtlPacket(InputStream in) throws IOException
{
byte[] header = new byte[4];
int offset = 0;
do
{
int bytesRead = in.read(header, offset, header.length - offset);
if (bytesRead < 0) {
break;
}
offset += bytesRead;
} while (offset != header.length);
if (offset != header.length) {
throw new IOException("Socket closed prematurely");
}
ByteBuffer bb = ByteBuffer.wrap(header).order(ByteOrder.LITTLE_ENDIAN);
type = bb.getShort();
paylen = bb.getShort();
if (paylen != 0)
{
payload = new byte[paylen];
offset = 0;
do
{
int bytesRead = in.read(payload, offset, payload.length - offset);
if (bytesRead < 0) {
break;
}
offset += bytesRead;
} while (offset != payload.length);
if (offset != payload.length) {
throw new IOException("Socket closed prematurely");
}
}
}
public NvCtlPacket(byte[] payload)
{
ByteBuffer bb = ByteBuffer.wrap(payload).order(ByteOrder.LITTLE_ENDIAN);
type = bb.getShort();
paylen = bb.getShort();
if (bb.hasRemaining())
{
payload = new byte[bb.remaining()];
bb.get(payload);
}
}
public NvCtlPacket(short type, short paylen)
{
this.type = type;
this.paylen = paylen;
}
public NvCtlPacket(short type, short paylen, byte[] payload)
{
this.type = type;
this.paylen = paylen;
this.payload = payload;
}
public short getType()
{
return type;
}
public short getPaylen()
{
return paylen;
}
public void setType(short type)
{
this.type = type;
}
public void setPaylen(short paylen)
{
this.paylen = paylen;
}
public byte[] toWire()
{
ByteBuffer bb = ByteBuffer.allocate(4 + (payload != null ? payload.length : 0)).order(ByteOrder.LITTLE_ENDIAN);
bb.putShort(type);
bb.putShort(paylen);
if (payload != null)
bb.put(payload);
return bb.array();
}
}
class NvCtlResponse extends NvCtlPacket {
public short status;
public NvCtlResponse(InputStream in) throws IOException {
super(in);
}
public NvCtlResponse(short type, short paylen) {
super(type, paylen);
}
public NvCtlResponse(short type, short paylen, byte[] payload) {
super(type, paylen, payload);
}
public NvCtlResponse(byte[] payload) {
super(payload);
}
public void setStatusCode(short status)
{
this.status = status;
}
public short getStatusCode()
{
return status;
}
}
@Override
public void connectionTerminated() {
abort();
}
@Override
public void connectionNeedsResync() {
new Thread(new Runnable() {
@Override
public void run() {
try {
requestResync();
} catch (IOException e1) {
abort();
return;
}
}
}).start();
}
}

View File

@@ -1,144 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.LinkedList;
import java.util.Stack;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
public class NvHTTP {
private String macAddress;
public static final int PORT = 47989;
public String baseUrl;
public NvHTTP(String host, String macAddress) {
this.macAddress = macAddress;
this.baseUrl = "http://" + host + ":" + PORT;
}
private String getXmlString(InputStream in, String tagname)
throws IOException, XMLStreamException {
XMLInputFactory factory = XMLInputFactory.newFactory();
XMLStreamReader xReader = factory.createXMLStreamReader(in);
int eventType = xReader.getEventType();
Stack<String> currentTag = new Stack<String>();
while (eventType != XMLStreamReader.END_DOCUMENT) {
switch (eventType) {
case (XMLStreamReader.START_ELEMENT):
currentTag.push(xReader.getElementText());
break;
case (XMLStreamReader.END_ELEMENT):
currentTag.pop();
break;
case (XMLStreamReader.CHARACTERS):
if (currentTag.peek().equals(tagname)) {
return xReader.getElementText();
}
break;
}
eventType = xReader.next();
}
return null;
}
private InputStream openHttpConnection(String url) throws IOException {
return new URL(url).openConnection().getInputStream();
}
public String getAppVersion() throws XMLStreamException, IOException {
InputStream in = openHttpConnection(baseUrl + "/appversion");
return getXmlString(in, "appversion");
}
public boolean getPairState() throws IOException, XMLStreamException {
InputStream in = openHttpConnection(baseUrl + "/pairstate?mac=" + macAddress);
String paired = getXmlString(in, "paired");
return Integer.valueOf(paired) != 0;
}
public int getSessionId() throws IOException, XMLStreamException {
/* Pass the model (minus spaces) as the device name */
String deviceName = "Unknown";
try
{
InetAddress addr;
addr = InetAddress.getLocalHost();
deviceName = addr.getHostName();
}
catch (UnknownHostException ex)
{
System.out.println("Hostname can not be resolved");
}
InputStream in = openHttpConnection(baseUrl + "/pair?mac=" + macAddress
+ "&devicename=" + deviceName);
String sessionId = getXmlString(in, "sessionid");
return Integer.parseInt(sessionId);
}
public int getSteamAppId(int sessionId) throws IOException,
XMLStreamException {
LinkedList<NvApp> appList = getAppList(sessionId);
for (NvApp app : appList) {
if (app.getAppName().equals("Steam")) {
return app.getAppId();
}
}
return 0;
}
public LinkedList<NvApp> getAppList(int sessionId) throws IOException, XMLStreamException {
InputStream in = openHttpConnection(baseUrl + "/applist?session=" + sessionId);
XMLInputFactory factory = XMLInputFactory.newFactory();
XMLStreamReader xReader = factory.createXMLStreamReader(in);
int eventType = xReader.getEventType();
LinkedList<NvApp> appList = new LinkedList<NvApp>();
Stack<String> currentTag = new Stack<String>();
while (eventType != XMLStreamReader.END_DOCUMENT) {
switch (eventType) {
case (XMLStreamReader.START_ELEMENT):
currentTag.push(xReader.getName().toString());
if (xReader.getName().toString().equals("App")) {
appList.addLast(new NvApp());
}
break;
case (XMLStreamReader.END_DOCUMENT):
currentTag.pop();
break;
case (XMLStreamReader.CHARACTERS):
NvApp app = appList.getLast();
if (currentTag.peek().equals("AppTitle")) {
app.setAppName(xReader.getText());
} else if (currentTag.peek().equals("ID")) {
app.setAppId(xReader.getText());
} else if (currentTag.peek().equals("IsRunning")) {
app.setIsRunning(xReader.getText());
}
break;
}
eventType = xReader.next();
}
return appList;
}
// Returns gameSession XML attribute
public int launchApp(int sessionId, int appId) throws IOException,
XMLStreamException {
InputStream in = openHttpConnection(baseUrl + "/launch?session="
+ sessionId + "&appid=" + appId);
String gameSession = getXmlString(in, "gamesession");
return Integer.parseInt(gameSession);
}
}

View File

@@ -1,133 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
public class NvHandshake {
public static final int PORT = 47991;
public static final int HANDSHAKE_TIMEOUT = 5000;
public static final byte[] PLATFORM_HELLO =
{
(byte)0x07,
(byte)0x00,
(byte)0x00,
(byte)0x00,
// android in ASCII
(byte)0x61,
(byte)0x6e,
(byte)0x64,
(byte)0x72,
(byte)0x6f,
(byte)0x69,
(byte)0x64,
(byte)0x03,
(byte)0x01,
(byte)0x00,
(byte)0x00
};
public static final byte[] PACKET_2 =
{
(byte)0x01,
(byte)0x03,
(byte)0x02,
(byte)0x00,
(byte)0x08,
(byte)0x00
};
public static final byte[] PACKET_3 =
{
(byte)0x04,
(byte)0x01,
(byte)0x00,
(byte)0x00,
(byte)0x00,
(byte)0x00,
(byte)0x00,
(byte)0x00
};
public static final byte[] PACKET_4 =
{
(byte)0x01,
(byte)0x01,
(byte)0x00,
(byte)0x00
};
private static boolean waitAndDiscardResponse(InputStream in)
{
// Wait for response and discard response
try {
in.read();
// Wait for the full response to come in
Thread.sleep(250);
for (int i = 0; i < in.available(); i++)
in.read();
} catch (IOException e1) {
return false;
} catch (InterruptedException e) {
return false;
}
return true;
}
public static boolean performHandshake(InetAddress host) throws IOException
{
Socket s = new Socket();
s.connect(new InetSocketAddress(host, PORT), HANDSHAKE_TIMEOUT);
s.setSoTimeout(HANDSHAKE_TIMEOUT);
OutputStream out = s.getOutputStream();
InputStream in = s.getInputStream();
// First packet
out.write(PLATFORM_HELLO);
out.flush();
if (!waitAndDiscardResponse(in)) {
s.close();
return false;
}
// Second packet
out.write(PACKET_2);
out.flush();
if (!waitAndDiscardResponse(in)) {
s.close();
return false;
}
// Third packet
out.write(PACKET_3);
out.flush();
if (!waitAndDiscardResponse(in)) {
s.close();
return false;
}
// Fourth packet
out.write(PACKET_4);
out.flush();
// Done
s.close();
return true;
}
}

View File

@@ -1,307 +0,0 @@
package com.limelight.nvstream;
import java.io.IOException;
import java.io.InputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
import com.limelight.nvstream.av.video.AvVideoPacket;
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
import com.limelight.nvstream.av.video.DecoderRenderer;
public class NvVideoStream {
public static final int RTP_PORT = 47998;
public static final int RTCP_PORT = 47999;
public static final int FIRST_FRAME_PORT = 47996;
public static final int FIRST_FRAME_TIMEOUT = 5000;
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>(100);
private InetAddress host;
private DatagramSocket rtp;
private Socket firstFrameSocket;
private LinkedList<Thread> threads = new LinkedList<Thread>();
private NvConnectionListener listener;
private AvVideoDepacketizer depacketizer;
private DecoderRenderer decrend;
private boolean startedRendering;
private boolean aborting = false;
public NvVideoStream(InetAddress host, NvConnectionListener listener, ConnectionStatusListener avConnListener)
{
this.host = host;
this.listener = listener;
this.depacketizer = new AvVideoDepacketizer(avConnListener);
}
public void abort()
{
if (aborting) {
return;
}
aborting = true;
// Interrupt threads
for (Thread t : threads) {
t.interrupt();
}
// Close the socket to interrupt the receive thread
if (rtp != null) {
rtp.close();
}
if (firstFrameSocket != null) {
try {
firstFrameSocket.close();
} catch (IOException e) {}
}
// Wait for threads to terminate
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) { }
}
if (startedRendering) {
decrend.stop();
}
if (decrend != null) {
decrend.release();
}
threads.clear();
}
private void readFirstFrame() throws IOException
{
byte[] firstFrame = new byte[1500];
firstFrameSocket = new Socket();
firstFrameSocket.setSoTimeout(FIRST_FRAME_TIMEOUT);
try {
firstFrameSocket.connect(new InetSocketAddress(host, FIRST_FRAME_PORT), FIRST_FRAME_TIMEOUT);
InputStream firstFrameStream = firstFrameSocket.getInputStream();
int offset = 0;
for (;;)
{
int bytesRead = firstFrameStream.read(firstFrame, offset, firstFrame.length-offset);
if (bytesRead == -1)
break;
offset += bytesRead;
}
depacketizer.addInputData(new AvVideoPacket(new AvByteBufferDescriptor(firstFrame, 0, offset)));
} finally {
firstFrameSocket.close();
firstFrameSocket = null;
}
}
public void setupRtpSession() throws SocketException
{
rtp = new DatagramSocket(RTP_PORT);
}
/*
public void setupDecoderRenderer(SurfaceHolder renderTarget, int drFlags) {
if (Build.HARDWARE.equals("goldfish")) {
// Emulator - don't render video (it's slow!)
decrend = null;
}
else if (MediaCodecDecoderRenderer.findSafeDecoder() != null) {
// Hardware decoding
decrend = new MediaCodecDecoderRenderer();
}
else {
// Software decoding
decrend = new CpuDecoderRenderer();
}
if (decrend != null) {
decrend.setup(1280, 720, renderTarget, drFlags);
}
}*/
/*
public void startVideoStream(final SurfaceHolder surface, int drFlags) throws IOException
{
// Setup the decoder and renderer
setupDecoderRenderer(surface, drFlags);
// Open RTP sockets and start session
setupRtpSession();
// Start pinging before reading the first frame
// so Shield Proxy knows we're here and sends us
// the reference frame
startUdpPingThread();
// Read the first frame to start the UDP video stream
// This MUST be called before the normal UDP receive thread
// starts in order to avoid state corruption caused by two
// threads simultaneously adding input data.
readFirstFrame();
if (decrend != null) {
// Start the receive thread early to avoid missing
// early packets
startReceiveThread();
// Start the depacketizer thread to deal with the RTP data
startDepacketizerThread();
// Start decoding the data we're receiving
startDecoderThread();
// Start the renderer
decrend.start();
startedRendering = true;
}
}
*/
private void startDecoderThread()
{
Thread t = new Thread() {
@Override
public void run() {
// Read the decode units generated from the RTP stream
while (!isInterrupted())
{
AvDecodeUnit du;
try {
du = depacketizer.getNextDecodeUnit();
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
decrend.submitDecodeUnit(du);
}
}
};
threads.add(t);
t.setName("Video - Decoder");
t.setPriority(Thread.MAX_PRIORITY);
t.start();
}
private void startDepacketizerThread()
{
// This thread lessens the work on the receive thread
// so it can spend more time waiting for data
Thread t = new Thread() {
@Override
public void run() {
while (!isInterrupted())
{
AvRtpPacket packet;
try {
packet = packets.take();
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
// !!! We no longer own the data buffer at this point !!!
depacketizer.addInputData(packet);
}
}
};
threads.add(t);
t.setName("Video - Depacketizer");
t.start();
}
private void startReceiveThread()
{
// Receive thread
Thread t = new Thread() {
@Override
public void run() {
AvByteBufferDescriptor desc = new AvByteBufferDescriptor(new byte[1500], 0, 1500);
DatagramPacket packet = new DatagramPacket(desc.data, desc.length);
while (!isInterrupted())
{
try {
rtp.receive(packet);
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
// Give the packet to the depacketizer thread
desc.length = packet.getLength();
if (packets.offer(new AvRtpPacket(desc))) {
desc.reinitialize(new byte[1500], 0, 1500);
packet.setData(desc.data, desc.offset, desc.length);
}
}
}
};
threads.add(t);
t.setName("Video - Receive");
t.start();
}
private void startUdpPingThread()
{
// Ping thread
Thread t = new Thread() {
@Override
public void run() {
// PING in ASCII
final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
pingPacket.setSocketAddress(new InetSocketAddress(host, RTP_PORT));
// Send PING every 100 ms
while (!isInterrupted())
{
try {
rtp.send(pingPacket);
} catch (IOException e) {
listener.connectionTerminated(e);
return;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
listener.connectionTerminated(e);
return;
}
}
}
};
threads.add(t);
t.setName("Video - Ping");
t.setPriority(Thread.MIN_PRIORITY);
t.start();
}
}

View File

@@ -1,46 +0,0 @@
package com.limelight.nvstream.av;
public class AvByteBufferDescriptor {
public byte[] data;
public int offset;
public int length;
public AvByteBufferDescriptor(byte[] data, int offset, int length)
{
this.data = data;
this.offset = offset;
this.length = length;
}
public AvByteBufferDescriptor(AvByteBufferDescriptor desc)
{
this.data = desc.data;
this.offset = desc.offset;
this.length = desc.length;
}
public void reinitialize(byte[] data, int offset, int length)
{
this.data = data;
this.offset = offset;
this.length = length;
}
public void print()
{
print(offset, length);
}
public void print(int length)
{
print(this.offset, length);
}
public void print(int offset, int length)
{
for (int i = offset; i < offset+length; i++) {
System.out.printf("%d: %02x \n", i, data[i]);
}
System.out.println();
}
}

View File

@@ -1,42 +0,0 @@
package com.limelight.nvstream.av;
import java.util.List;
public class AvDecodeUnit {
public static final int TYPE_UNKNOWN = 0;
public static final int TYPE_H264 = 1;
public static final int TYPE_OPUS = 2;
private int type;
private List<AvByteBufferDescriptor> bufferList;
private int dataLength;
private int flags;
public AvDecodeUnit(int type, List<AvByteBufferDescriptor> bufferList, int dataLength, int flags)
{
this.type = type;
this.bufferList = bufferList;
this.dataLength = dataLength;
this.flags = flags;
}
public int getType()
{
return type;
}
public int getFlags()
{
return flags;
}
public List<AvByteBufferDescriptor> getBufferList()
{
return bufferList;
}
public int getDataLength()
{
return dataLength;
}
}

View File

@@ -1,46 +0,0 @@
package com.limelight.nvstream.av;
import java.nio.ByteBuffer;
public class AvRtpPacket {
private byte packetType;
private short seqNum;
private AvByteBufferDescriptor buffer;
public AvRtpPacket(AvByteBufferDescriptor buffer)
{
this.buffer = new AvByteBufferDescriptor(buffer);
ByteBuffer bb = ByteBuffer.wrap(buffer.data, buffer.offset, buffer.length);
// Discard the first byte
bb.position(bb.position()+1);
// Get the packet type
packetType = bb.get();
// Get the sequence number
seqNum = bb.getShort();
}
public byte getPacketType()
{
return packetType;
}
public short getSequenceNumber()
{
return seqNum;
}
public byte[] getBackingBuffer()
{
return buffer.data;
}
public AvByteBufferDescriptor getNewPayloadDescriptor()
{
return new AvByteBufferDescriptor(buffer.data, buffer.offset+12, buffer.length-12);
}
}

View File

@@ -1,28 +0,0 @@
package com.limelight.nvstream.av;
public class AvShortBufferDescriptor {
public short[] data;
public int offset;
public int length;
public AvShortBufferDescriptor(short[] data, int offset, int length)
{
this.data = data;
this.offset = offset;
this.length = length;
}
public AvShortBufferDescriptor(AvShortBufferDescriptor desc)
{
this.data = desc.data;
this.offset = desc.offset;
this.length = desc.length;
}
public void reinitialize(short[] data, int offset, int length)
{
this.data = data;
this.offset = offset;
this.length = length;
}
}

View File

@@ -1,7 +0,0 @@
package com.limelight.nvstream.av;
public interface ConnectionStatusListener {
public void connectionTerminated();
public void connectionNeedsResync();
}

View File

@@ -1,65 +0,0 @@
package com.limelight.nvstream.av.audio;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.AvShortBufferDescriptor;
public class AvAudioDepacketizer {
private static final int DU_LIMIT = 15;
private LinkedBlockingQueue<AvShortBufferDescriptor> decodedUnits =
new LinkedBlockingQueue<AvShortBufferDescriptor>(DU_LIMIT);
// Sequencing state
private short lastSequenceNumber;
private void decodeData(byte[] data, int off, int len)
{
// Submit this data to the decoder
short[] pcmData = new short[OpusDecoder.getMaxOutputShorts()];
int decodeLen = OpusDecoder.decode(data, off, len, pcmData);
if (decodeLen > 0) {
// Return value of decode is frames decoded per channel
decodeLen *= OpusDecoder.getChannelCount();
// Put it on the decoded queue
if (!decodedUnits.offer(new AvShortBufferDescriptor(pcmData, 0, decodeLen))) {
// Clear out the queue
decodedUnits.clear();
}
}
}
public void decodeInputData(AvRtpPacket packet)
{
short seq = packet.getSequenceNumber();
if (packet.getPacketType() != 97) {
// Only type 97 is audio
return;
}
// Toss out the current NAL if we receive a packet that is
// out of sequence
if (lastSequenceNumber != 0 &&
(short)(lastSequenceNumber + 1) != seq)
{
System.out.println("Received OOS audio data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
decodeData(null, 0, 0);
}
lastSequenceNumber = seq;
// This is all the depacketizing we need to do
AvByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
decodeData(rtpPayload.data, rtpPayload.offset, rtpPayload.length);
}
public AvShortBufferDescriptor getNextDecodedData() throws InterruptedException
{
return decodedUnits.take();
}
}

View File

@@ -1,14 +0,0 @@
package com.limelight.nvstream.av.audio;
public class OpusDecoder {
static {
System.loadLibrary("nv_opus_dec");
}
public static native int init();
public static native void destroy();
public static native int getChannelCount();
public static native int getMaxOutputShorts();
public static native int getSampleRate();
public static native int decode(byte[] indata, int inoff, int inlen, short[] outpcmdata);
}

View File

@@ -1,313 +0,0 @@
package com.limelight.nvstream.av.video;
import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener;
public class AvVideoDepacketizer {
// Current NAL state
private LinkedList<AvByteBufferDescriptor> avcNalDataChain = null;
private int avcNalDataLength = 0;
private int currentlyDecoding;
// Cached buffer descriptor to save on allocations
// Only safe to use in decode thread!!!!
private AvByteBufferDescriptor cachedDesc;
// Sequencing state
private short lastSequenceNumber;
private ConnectionStatusListener controlListener;
private static final int DU_LIMIT = 15;
private LinkedBlockingQueue<AvDecodeUnit> decodedUnits = new LinkedBlockingQueue<AvDecodeUnit>(DU_LIMIT);
public AvVideoDepacketizer(ConnectionStatusListener controlListener)
{
this.controlListener = controlListener;
this.cachedDesc = new AvByteBufferDescriptor(null, 0, 0);
}
private void clearAvcNalState()
{
avcNalDataChain = null;
avcNalDataLength = 0;
}
private void reassembleAvcNal()
{
// This is the start of a new NAL
if (avcNalDataChain != null && avcNalDataLength != 0)
{
int flags = 0;
// Check if this is a special NAL unit
AvByteBufferDescriptor header = avcNalDataChain.getFirst();
if (NAL.getSpecialSequenceDescriptor(header, cachedDesc))
{
// The next byte after the special sequence is the NAL header
byte nalHeader = cachedDesc.data[cachedDesc.offset+cachedDesc.length];
switch (nalHeader)
{
// SPS and PPS
case 0x67:
case 0x68:
System.out.println("Codec config");
//flags |= MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
break;
// IDR
case 0x65:
System.out.println("Reference frame");
//flags |= MediaCodec.BUFFER_FLAG_SYNC_FRAME;
break;
// non-IDR frame
case 0x61:
break;
// Unknown type
default:
System.out.printf("Unknown NAL header: %02x %02x %02x %02x %02x\n",
header.data[header.offset], header.data[header.offset+1],
header.data[header.offset+2], header.data[header.offset+3],
header.data[header.offset+4]);
break;
}
}
else
{
System.out.printf("Invalid NAL: %02x %02x %02x %02x %02x\n",
header.data[header.offset], header.data[header.offset+1],
header.data[header.offset+2], header.data[header.offset+3],
header.data[header.offset+4]);
}
// Construct the H264 decode unit
AvDecodeUnit du = new AvDecodeUnit(AvDecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, flags);
if (!decodedUnits.offer(du)) {
// We need a new IDR frame since we're discarding data now
decodedUnits.clear();
controlListener.connectionNeedsResync();
}
// Clear old state
avcNalDataChain = null;
avcNalDataLength = 0;
}
}
public void addInputData(AvVideoPacket packet)
{
AvByteBufferDescriptor location = packet.getNewPayloadDescriptor();
while (location.length != 0)
{
// Remember the start of the NAL data in this packet
int start = location.offset;
// Check for a special sequence
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
{
if (NAL.isAvcStartSequence(cachedDesc))
{
// We're decoding H264 now
currentlyDecoding = AvDecodeUnit.TYPE_H264;
// Check if it's the end of the last frame
if (NAL.isAvcFrameStart(cachedDesc))
{
// Reassemble any pending AVC NAL
reassembleAvcNal();
// Setup state for the new NAL
avcNalDataChain = new LinkedList<AvByteBufferDescriptor>();
avcNalDataLength = 0;
}
// Skip the start sequence
location.length -= cachedDesc.length;
location.offset += cachedDesc.length;
}
else
{
// Check if this is padding after a full AVC frame
if (currentlyDecoding == AvDecodeUnit.TYPE_H264 &&
NAL.isPadding(cachedDesc)) {
// The decode unit is complete
reassembleAvcNal();
}
// Not decoding AVC
currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
// Just skip this byte
location.length--;
location.offset++;
}
}
// Move to the next special sequence
while (location.length != 0)
{
// Catch the easy case first where byte 0 != 0x00
if (location.data[location.offset] == 0x00)
{
// Check if this should end the current NAL
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
{
// Only stop if we're decoding something or this
// isn't padding
if (currentlyDecoding != AvDecodeUnit.TYPE_UNKNOWN ||
!NAL.isPadding(cachedDesc))
{
break;
}
}
}
// This byte is part of the NAL data
location.offset++;
location.length--;
}
if (currentlyDecoding == AvDecodeUnit.TYPE_H264 && avcNalDataChain != null)
{
AvByteBufferDescriptor data = new AvByteBufferDescriptor(location.data, start, location.offset-start);
// Add a buffer descriptor describing the NAL data in this packet
avcNalDataChain.add(data);
avcNalDataLength += location.offset-start;
}
}
}
public void addInputData(AvRtpPacket packet)
{
short seq = packet.getSequenceNumber();
// Toss out the current NAL if we receive a packet that is
// out of sequence
if (lastSequenceNumber != 0 &&
(short)(lastSequenceNumber + 1) != seq)
{
System.out.println("Received OOS video data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
// Reset the depacketizer state
currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
clearAvcNalState();
// Request an IDR frame
controlListener.connectionNeedsResync();
}
lastSequenceNumber = seq;
// Pass the payload to the non-sequencing parser
AvByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
addInputData(new AvVideoPacket(rtpPayload));
}
public AvDecodeUnit getNextDecodeUnit() throws InterruptedException
{
return decodedUnits.take();
}
}
class NAL {
// This assumes that the buffer passed in is already a special sequence
public static boolean isAvcStartSequence(AvByteBufferDescriptor specialSeq)
{
// The start sequence is 00 00 01 or 00 00 00 01
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
}
// This assumes that the buffer passed in is already a special sequence
public static boolean isPadding(AvByteBufferDescriptor specialSeq)
{
// The padding sequence is 00 00 00
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x00);
}
// This assumes that the buffer passed in is already a special sequence
public static boolean isAvcFrameStart(AvByteBufferDescriptor specialSeq)
{
if (specialSeq.length != 4)
return false;
// The frame start sequence is 00 00 00 01
return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
}
// Returns a buffer descriptor describing the start sequence
public static boolean getSpecialSequenceDescriptor(AvByteBufferDescriptor buffer, AvByteBufferDescriptor outputDesc)
{
// NAL start sequence is 00 00 00 01 or 00 00 01
if (buffer.length < 3)
return false;
// 00 00 is magic
if (buffer.data[buffer.offset] == 0x00 &&
buffer.data[buffer.offset+1] == 0x00)
{
// Another 00 could be the end of the special sequence
// 00 00 00 or the middle of 00 00 00 01
if (buffer.data[buffer.offset+2] == 0x00)
{
if (buffer.length >= 4 &&
buffer.data[buffer.offset+3] == 0x01)
{
// It's the AVC start sequence 00 00 00 01
outputDesc.reinitialize(buffer.data, buffer.offset, 4);
}
else
{
// It's 00 00 00
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
}
return true;
}
else if (buffer.data[buffer.offset+2] == 0x01 ||
buffer.data[buffer.offset+2] == 0x02)
{
// These are easy: 00 00 01 or 00 00 02
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
return true;
}
else if (buffer.data[buffer.offset+2] == 0x03)
{
// 00 00 03 is special because it's a subsequence of the
// NAL wrapping substitute for 00 00 00, 00 00 01, 00 00 02,
// or 00 00 03 in the RBSP sequence. We need to check the next
// byte to see whether it's 00, 01, 02, or 03 (a valid RBSP substitution)
// or whether it's something else
if (buffer.length < 4)
return false;
if (buffer.data[buffer.offset+3] >= 0x00 &&
buffer.data[buffer.offset+3] <= 0x03)
{
// It's not really a special sequence after all
return false;
}
else
{
// It's not a standard replacement so it's a special sequence
outputDesc.reinitialize(buffer.data, buffer.offset, 3);
return true;
}
}
}
return false;
}
}

View File

@@ -1,17 +0,0 @@
package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
public class AvVideoPacket {
private AvByteBufferDescriptor buffer;
public AvVideoPacket(AvByteBufferDescriptor rtpPayload)
{
buffer = new AvByteBufferDescriptor(rtpPayload);
}
public AvByteBufferDescriptor getNewPayloadDescriptor()
{
return new AvByteBufferDescriptor(buffer.data, buffer.offset+56, buffer.length-56);
}
}

View File

@@ -1,33 +0,0 @@
package com.limelight.nvstream.av.video;
public class AvcDecoder {
static {
// FFMPEG dependencies
System.loadLibrary("avutil-52");
System.loadLibrary("swresample-0");
System.loadLibrary("swscale-2");
System.loadLibrary("avcodec-55");
System.loadLibrary("avformat-55");
System.loadLibrary("avfilter-3");
System.loadLibrary("nv_avc_dec");
}
/** Disables the deblocking filter at the cost of image quality */
public static final int DISABLE_LOOP_FILTER = 0x1;
/** Uses the low latency decode flag (disables multithreading) */
public static final int LOW_LATENCY_DECODE = 0x2;
/** Threads process each slice, rather than each frame */
public static final int SLICE_THREADING = 0x4;
/** Uses nonstandard speedup tricks */
public static final int FAST_DECODE = 0x8;
/** Uses bilinear filtering instead of bicubic */
public static final int BILINEAR_FILTERING = 0x10;
/** Uses a faster bilinear filtering with lower image quality */
public static final int FAST_BILINEAR_FILTERING = 0x20;
public static native int init(int width, int height, int perflvl, int threadcount);
public static native void destroy();
//public static native void redraw(Surface surface);
public static native int decode(byte[] indata, int inoff, int inlen);
}

View File

@@ -1,203 +0,0 @@
package com.limelight.nvstream.av.video;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.ByteBuffer;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
public class CpuDecoderRenderer/* implements DecoderRenderer */{
private ByteBuffer decoderBuffer;
private Thread rendererThread;
private int targetFps;
// Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 8;
private static final int LOW_PERF = 1;
private static final int MED_PERF = 2;
private static final int HIGH_PERF = 3;
private int cpuCount = Runtime.getRuntime().availableProcessors();
private int findOptimalPerformanceLevel() {
StringBuilder cpuInfo = new StringBuilder();
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(new File("/proc/cpuinfo")));
for (;;) {
int ch = br.read();
if (ch == -1)
break;
cpuInfo.append((char)ch);
}
// Here we're doing very simple heuristics based on CPU model
String cpuInfoStr = cpuInfo.toString();
// We order them from greatest to least for proper detection
// of devices with multiple sets of cores (like Exynos 5 Octa)
// TODO Make this better
if (cpuInfoStr.contains("0xc0f")) {
// Cortex-A15
return MED_PERF;
}
else if (cpuInfoStr.contains("0xc09")) {
// Cortex-A9
return LOW_PERF;
}
else if (cpuInfoStr.contains("0xc07")) {
// Cortex-A7
return LOW_PERF;
}
else {
// Didn't have anything we're looking for
return MED_PERF;
}
} catch (IOException e) {
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {}
}
}
// Couldn't read cpuinfo, so assume medium
return MED_PERF;
}
/*@Override
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
this.renderTarget = renderTarget.getSurface();
this.targetFps = 30;
int perfLevel = findOptimalPerformanceLevel();
int threadCount;
int avcFlags = 0;
switch (perfLevel) {
case HIGH_PERF:
// Single threaded low latency decode is ideal but hard to acheive
avcFlags = AvcDecoder.LOW_LATENCY_DECODE;
threadCount = 1;
break;
case LOW_PERF:
// Disable the loop filter for performance reasons
avcFlags = AvcDecoder.DISABLE_LOOP_FILTER |
AvcDecoder.FAST_BILINEAR_FILTERING |
AvcDecoder.FAST_DECODE;
// Use plenty of threads to try to utilize the CPU as best we can
threadCount = cpuCount - 1;
break;
default:
case MED_PERF:
avcFlags = AvcDecoder.BILINEAR_FILTERING |
AvcDecoder.FAST_DECODE;
// Only use 2 threads to minimize frame processing latency
threadCount = 2;
break;
}
// If the user wants quality, we'll remove the low IQ flags
if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
// Make sure the loop filter is enabled
avcFlags &= ~AvcDecoder.DISABLE_LOOP_FILTER;
// Disable the non-compliant speed optimizations
avcFlags &= ~AvcDecoder.FAST_DECODE;
System.out.println("Using high quality decoding");
}
int err = AvcDecoder.init(width, height, avcFlags, threadCount);
if (err != 0) {
throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
decoderBuffer = ByteBuffer.allocate(92*1024);
System.out.println("Using software decoding (performance level: "+perfLevel+")");
}
*/
//@Override
public void start() {
rendererThread = new Thread() {
@Override
public void run() {
long nextFrameTime = System.currentTimeMillis();
while (!isInterrupted())
{
long diff = nextFrameTime - System.currentTimeMillis();
if (diff > WAIT_CEILING_MS) {
try {
Thread.sleep(diff);
} catch (InterruptedException e) {
return;
}
}
nextFrameTime = computePresentationTimeMs(targetFps);
// AvcDecoder.redraw(renderTarget);
}
}
};
rendererThread.setName("Video - Renderer (CPU)");
rendererThread.start();
}
private long computePresentationTimeMs(int frameRate) {
return System.currentTimeMillis() + (1000 / frameRate);
}
//@Override
public void stop() {
rendererThread.interrupt();
try {
rendererThread.join();
} catch (InterruptedException e) { }
}
//@Override
public void release() {
AvcDecoder.destroy();
}
//@Override
public boolean submitDecodeUnit(AvDecodeUnit decodeUnit) {
byte[] data;
// Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= decoderBuffer.limit()) {
decoderBuffer.clear();
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
}
data = decoderBuffer.array();
}
else {
data = new byte[decodeUnit.getDataLength()];
int offset = 0;
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
System.arraycopy(bbd.data, bbd.offset, data, offset, bbd.length);
offset += bbd.length;
}
}
return (AvcDecoder.decode(data, 0, decodeUnit.getDataLength()) == 0);
}
}

View File

@@ -1,17 +0,0 @@
package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.AvDecodeUnit;
public interface DecoderRenderer {
public static int FLAG_PREFER_QUALITY = 0x1;
public void setup(int width, int height, int drFlags);
public void start();
public void stop();
public void release();
public boolean submitDecodeUnit(AvDecodeUnit decodeUnit);
}

View File

@@ -1,65 +0,0 @@
package com.limelight.nvstream.input;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
public class NvController {
public final static int PORT = 35043;
public final static int CONTROLLER_TIMEOUT = 3000;
private InetAddress host;
private Socket s;
private OutputStream out;
public NvController(InetAddress host)
{
this.host = host;
}
public void initialize() throws IOException
{
s = new Socket();
s.connect(new InetSocketAddress(host, PORT), CONTROLLER_TIMEOUT);
s.setTcpNoDelay(true);
out = s.getOutputStream();
}
public void close()
{
try {
s.close();
} catch (IOException e) {}
}
public void sendControllerInput(short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY, short rightStickX, short rightStickY) throws IOException
{
out.write(new NvControllerPacket(buttonFlags, leftTrigger,
rightTrigger, leftStickX, leftStickY,
rightStickX, rightStickY).toWire());
out.flush();
}
public void sendMouseButtonDown() throws IOException
{
out.write(new NvMouseButtonPacket(true).toWire());
out.flush();
}
public void sendMouseButtonUp() throws IOException
{
out.write(new NvMouseButtonPacket(false).toWire());
out.flush();
}
public void sendMouseMove(short deltaX, short deltaY) throws IOException
{
out.write(new NvMouseMovePacket(deltaX, deltaY).toWire());
out.flush();
}
}

View File

@@ -1,89 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class NvControllerPacket extends NvInputPacket {
public static final byte[] HEADER =
{
0x0A,
0x00,
0x00,
0x00,
0x00,
0x14
};
public static final byte[] TAIL =
{
(byte)0x9C,
0x00,
0x00,
0x00,
0x55,
0x00
};
public static final int PACKET_TYPE = 0x18;
public static final short A_FLAG = 0x1000;
public static final short B_FLAG = 0x2000;
public static final short X_FLAG = 0x4000;
public static final short Y_FLAG = (short)0x8000;
public static final short UP_FLAG = 0x0001;
public static final short DOWN_FLAG = 0x0002;
public static final short LEFT_FLAG = 0x0004;
public static final short RIGHT_FLAG = 0x0008;
public static final short LB_FLAG = 0x0100;
public static final short RB_FLAG = 0x0200;
public static final short PLAY_FLAG = 0x0010;
public static final short BACK_FLAG = 0x0020;
public static final short LS_CLK_FLAG = 0x0040;
public static final short RS_CLK_FLAG = 0x0080;
public static final short SPECIAL_BUTTON_FLAG = 0x0400;
public static final short PAYLOAD_LENGTH = 24;
public static final short PACKET_LENGTH = PAYLOAD_LENGTH +
NvInputPacket.HEADER_LENGTH;
private short buttonFlags;
private byte leftTrigger;
private byte rightTrigger;
private short leftStickX;
private short leftStickY;
private short rightStickX;
private short rightStickY;
public NvControllerPacket(short buttonFlags, byte leftTrigger, byte rightTrigger,
short leftStickX, short leftStickY,
short rightStickX, short rightStickY)
{
super(PACKET_TYPE);
this.buttonFlags = buttonFlags;
this.leftTrigger = leftTrigger;
this.rightTrigger = rightTrigger;
this.leftStickX = leftStickX;
this.leftStickY = leftStickY;
this.rightStickX = rightStickX;
this.rightStickY = rightStickY;
}
public byte[] toWire()
{
ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH).order(ByteOrder.LITTLE_ENDIAN);
bb.put(toWireHeader());
bb.put(HEADER);
bb.putShort(buttonFlags);
bb.put(leftTrigger);
bb.put(rightTrigger);
bb.putShort(leftStickX);
bb.putShort(leftStickY);
bb.putShort(rightStickX);
bb.putShort(rightStickY);
bb.put(TAIL);
return bb.array();
}
}

View File

@@ -1,26 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public abstract class NvInputPacket {
public static final int HEADER_LENGTH = 0x4;
protected int packetType;
public NvInputPacket(int packetType)
{
this.packetType = packetType;
}
public abstract byte[] toWire();
public byte[] toWireHeader()
{
ByteBuffer bb = ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN);
bb.putInt(packetType);
return bb.array();
}
}

View File

@@ -1,36 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class NvMouseButtonPacket extends NvInputPacket {
private byte buttonEventType;
public static final int PACKET_TYPE = 0x5;
public static final int PAYLOAD_LENGTH = 5;
public static final int PACKET_LENGTH = PAYLOAD_LENGTH +
NvInputPacket.HEADER_LENGTH;
public static final byte PRESS_EVENT = 0x07;
public static final byte RELEASE_EVENT = 0x08;
public NvMouseButtonPacket(boolean leftButtonDown)
{
super(PACKET_TYPE);
buttonEventType = leftButtonDown ?
PRESS_EVENT : RELEASE_EVENT;
}
@Override
public byte[] toWire() {
ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH).order(ByteOrder.BIG_ENDIAN);
bb.put(toWireHeader());
bb.put(buttonEventType);
bb.putInt(1); // FIXME: button index?
return bb.array();
}
}

View File

@@ -1,42 +0,0 @@
package com.limelight.nvstream.input;
import java.nio.ByteBuffer;
public class NvMouseMovePacket extends NvInputPacket {
private static final byte[] HEADER =
{
0x06,
0x00,
0x00,
0x00
};
public static final int PACKET_TYPE = 0x8;
public static final int PAYLOAD_LENGTH = 8;
public static final int PACKET_LENGTH = PAYLOAD_LENGTH +
NvInputPacket.HEADER_LENGTH;
private short deltaX;
private short deltaY;
public NvMouseMovePacket(short deltaX, short deltaY)
{
super(PACKET_TYPE);
this.deltaX = deltaX;
this.deltaY = deltaY;
}
@Override
public byte[] toWire() {
ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH);
bb.put(toWireHeader());
bb.put(HEADER);
bb.putShort(deltaX);
bb.putShort(deltaY);
return bb.array();
}
}