Rewrite video support so that it actually works. Add a janky NAL parser dissector.

This commit is contained in:
Cameron Gutman 2013-10-29 20:18:22 -04:00
parent 1abdefdc15
commit e6af9df142
8 changed files with 350 additions and 114 deletions

View File

@ -27,6 +27,7 @@
</activity>
<activity
android:name="com.limelight.Game"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|screenSize"
android:label="@string/title_activity_game"
android:parentActivityName="com.limelight.Connection"

62
LuaScripts/NALParser.lua Normal file
View File

@ -0,0 +1,62 @@
-- H264 NAL Parser
-- Version: 1.0
-- Cameron Gutman
-- NAL header
local nal_start = ProtoField.bytes("nal.start", "H264 NAL Start Sequence") -- 4 Byte Start
local nal_type = ProtoField.uint8("nal.type", "H264 NAL Type") -- 1 byte NAL type
local nal_data = ProtoField.bytes("nal.data", "H264 NAL Data") -- variable byte NAL data
p_h264raw = Proto("h264raw", "H264 Raw NAL Parser")
p_h264raw.fields = {
nal_start,
nal_type,
nal_data
}
function p_h264raw.dissector(buf, pkt, root)
pkt.cols.protocol = p_h264raw.name
subtree = root:add(p_h264raw, buf(0))
local i = 0
local data_start = -1
while i < buf:len do
-- Make sure we have a potential start sequence and type
if buf:len() - i < 5 then
-- We need more data
pkt.desegment_len = DESEGMENT_ONE_MORE_SEGMENT
pkt.desegment_offset = 0
return
end
-- Check for start sequence
start = buf(i, 4):uint()
if start == 1 then
if data_start ~= -1 then
-- End the last NAL
subtree:add(nal_data, buf(data_start, i-data_start))
end
-- This is the start of a NAL
subtree:add(nal_start, buf(i, 4))
i = i + 4
-- Next byte is NAL type
subtree:add(nal_type, buf(i, 1), buf(i, 1):uint8())
i = i + 1
-- Data begins here
data_start = i
else
-- This must be a data byte
i = i + 1
end
end
end
function p_h264raw.init()
end
local udp_dissector_table = DissectorTable.get("rtp.pt")
udp_dissector_table:add(96, p_h264raw)

View File

@ -21,19 +21,9 @@ public class Connection extends Activity {
private TextView hostText;
private SharedPreferences prefs;
private static final String DEFAULT_HOST = "35.0.113.120";
private static final String DEFAULT_HOST = "192.168.1.240";
public static final String HOST_KEY = "hostText";
@Override
public void onResume() {
super.onResume();
}
@Override
public void onPause() {
SharedPreferences.Editor editor = prefs.edit();

View File

@ -8,12 +8,18 @@ import java.io.IOException;
import java.io.InputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.av.AvBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvPacket;
import com.limelight.nvstream.av.AvParser;
import jlibrtp.DataFrame;
import jlibrtp.Participant;
import jlibrtp.RTPAppIntf;
@ -24,16 +30,16 @@ import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.view.Surface;
public class NvVideoStream implements RTPAppIntf {
public class NvVideoStream {
public static final int RTP_PORT = 47998;
public static final int RTCP_PORT = 47999;
public static final int FIRST_FRAME_PORT = 47996;
private static final int FRAME_RATE = 60;
private ByteBuffer[] decoderInputBuffers = null;
private MediaCodec decoder;
private ByteBuffer[] videoDecoderInputBuffers = null;
private MediaCodec videoDecoder;
private int frameIndex = 0;
private AvParser parser = new AvParser();
private InputStream getFirstFrame(String host) throws UnknownHostException, IOException
{
@ -79,31 +85,16 @@ public class NvVideoStream implements RTPAppIntf {
return;
}
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
videoDecoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
decoder.configure(mediaFormat, surface, null, 0);
decoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
decoder.start();
decoderInputBuffers = decoder.getInputBuffers();
int inputIndex = decoder.dequeueInputBuffer(-1);
if (inputIndex >= 0)
{
ByteBuffer buf = decoderInputBuffers[inputIndex];
buf.clear();
buf.put(firstFrame);
decoder.queueInputBuffer(inputIndex,
0, firstFrame.length,
0, 0);
frameIndex++;
}
videoDecoder.configure(videoFormat, surface, null, 0);
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
videoDecoder.start();
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
final RTPSession session = new RTPSession(rtp, rtcp);
session.addParticipant(new Participant(host, RTP_PORT, RTCP_PORT));
//session.RTPSessionRegister(NvVideoStream.this, null, null);
// Ping thread
new Thread(new Runnable() {
@ -129,64 +120,103 @@ public class NvVideoStream implements RTPAppIntf {
}
}).start();
// Receive thread
// Decoder thread
new Thread(new Runnable() {
@Override
public void run() {
byte[] packet = new byte[1500];
// Send PING every 100 ms
// Read the decode units generated from the RTP stream
for (;;)
{
DatagramPacket dp = new DatagramPacket(packet, 0, packet.length);
AvDecodeUnit du;
try {
rtp.receive(dp);
} catch (IOException e) {
du = parser.getNextDecodeUnit();
} catch (InterruptedException e) {
e.printStackTrace();
break;
return;
}
System.out.println("in receiveData");
int inputIndex = decoder.dequeueInputBuffer(-1);
switch (du.getType())
{
case AvDecodeUnit.TYPE_H264:
{
int inputIndex = videoDecoder.dequeueInputBuffer(-1);
if (inputIndex >= 0)
{
ByteBuffer buf = decoderInputBuffers[inputIndex];
NvVideoPacket nvVideo = new NvVideoPacket(dp.getData());
ByteBuffer buf = videoDecoderInputBuffers[inputIndex];
// Clear old input data
buf.clear();
buf.put(nvVideo.data);
System.out.println(nvVideo);
if (nvVideo.length == 0xc803) {
decoder.queueInputBuffer(inputIndex,
0, nvVideo.length,
0, 0);
frameIndex++;
} else {
decoder.queueInputBuffer(inputIndex,
0, 0,
// Copy data from our buffer list into the input buffer
for (AvBufferDescriptor desc : du.getBufferList())
{
buf.put(desc.data, desc.offset, desc.length);
}
videoDecoder.queueInputBuffer(inputIndex,
0, du.getDataLength(),
0, 0);
}
}
break;
default:
{
System.out.println("Unknown decode unit type");
}
break;
}
}
}
}).start();
// Receive thread
new Thread(new Runnable() {
@Override
public void run() {
byte[] buffer = new byte[1500];
AvBufferDescriptor desc = new AvBufferDescriptor(null, 0, 0);
for (;;)
{
DatagramPacket packet = new DatagramPacket(buffer, buffer.length);
try {
rtp.receive(packet);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
desc.length = packet.getLength();
desc.offset = packet.getOffset();
desc.data = packet.getData();
// Skip the RTP header
desc.offset += 12;
desc.length -= 12;
// Give the data to the AV parser
parser.addInputData(new AvPacket(desc));
}
}
}).start();
for (;;)
{
BufferInfo info = new BufferInfo();
System.out.println("dequeuing outputbuffer");
int outIndex = decoder.dequeueOutputBuffer(info, -1);
System.out.println("done dequeuing output buffer");
int outIndex = videoDecoder.dequeueOutputBuffer(info, -1);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
System.out.println("Output buffers changed");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
System.out.println("Output format changed");
//decoderOutputFormat = decoder.getOutputFormat();
System.out.println("New output Format: " + decoder.getOutputFormat());
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
System.out.println("Try again later");
@ -195,9 +225,7 @@ public class NvVideoStream implements RTPAppIntf {
break;
}
if (outIndex >= 0) {
System.out.println("releasing output buffer");
decoder.releaseOutputBuffer(outIndex, true);
System.out.println("output buffer released");
videoDecoder.releaseOutputBuffer(outIndex, true);
}
}
@ -205,52 +233,10 @@ public class NvVideoStream implements RTPAppIntf {
}).start();
}
@Override
public void receiveData(DataFrame frame, Participant participant) {
}
@Override
public void userEvent(int type, Participant[] participant) {
}
@Override
public int frameSize(int payloadType) {
return 1;
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private static long computePresentationTime(int frameIndex) {
return 132 + frameIndex * 1000000 / FRAME_RATE;
}
class NvVideoPacket {
byte[] preamble;
short length;
byte[] extra;
byte[] data;
public NvVideoPacket(byte[] payload)
{
ByteBuffer bb = ByteBuffer.wrap(payload).order(ByteOrder.LITTLE_ENDIAN);
preamble = new byte[12+16];
extra = new byte[38];
bb.get(preamble);
length = bb.getShort();
bb.get(extra);
data = new byte[length];
if (bb.remaining() + length <= payload.length)
bb.get(data);
}
public String toString()
{
return "";//String.format("Length: %d | %02x %02x %02x %02x %02x %02x %02x %02x",
//length, data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7]);
}
}
}

View File

@ -0,0 +1,14 @@
package com.limelight.nvstream.av;
public class AvBufferDescriptor {
public byte[] data;
public int offset;
public int length;
public AvBufferDescriptor(byte[] data, int offset, int length)
{
this.data = data;
this.offset = offset;
this.length = length;
}
}

View File

@ -0,0 +1,34 @@
package com.limelight.nvstream.av;
import java.util.List;
public class AvDecodeUnit {
public static final int TYPE_UNKNOWN = 0;
public static final int TYPE_H264 = 1;
private int type;
private List<AvBufferDescriptor> bufferList;
private int dataLength;
public AvDecodeUnit(int type, List<AvBufferDescriptor> bufferList, int dataLength)
{
this.type = type;
this.bufferList = bufferList;
this.dataLength = dataLength;
}
public int getType()
{
return type;
}
public List<AvBufferDescriptor> getBufferList()
{
return bufferList;
}
public int getDataLength()
{
return dataLength;
}
}

View File

@ -0,0 +1,18 @@
package com.limelight.nvstream.av;
public class AvPacket {
private AvBufferDescriptor buffer;
public AvPacket(AvBufferDescriptor rtpPayload)
{
byte[] data = new byte[rtpPayload.length];
System.arraycopy(rtpPayload.data, rtpPayload.offset, data, 0, rtpPayload.length);
buffer = new AvBufferDescriptor(data, 0, data.length);
}
public AvBufferDescriptor getPayload()
{
int payloadOffset = buffer.offset+56;
return new AvBufferDescriptor(buffer.data, payloadOffset, buffer.length-payloadOffset);
}
}

View File

@ -0,0 +1,131 @@
package com.limelight.nvstream.av;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
public class AvParser {
// Current NAL state
private LinkedList<AvBufferDescriptor> nalDataChain;
private int nalDataLength;
private LinkedBlockingQueue<AvDecodeUnit> decodedUnits = new LinkedBlockingQueue<AvDecodeUnit>();
private void reassembleNal()
{
// This is the start of a new NAL
if (nalDataChain != null && nalDataLength != 0)
{
// Construct the H264 decode unit
AvDecodeUnit du = new AvDecodeUnit(AvDecodeUnit.TYPE_H264, nalDataChain, nalDataLength);
decodedUnits.add(du);
// Clear old state
nalDataChain = null;
nalDataLength = 0;
}
}
public void addInputData(AvPacket packet)
{
AvBufferDescriptor payload = packet.getPayload();
AvBufferDescriptor location = new AvBufferDescriptor(payload.data, payload.offset, payload.length);
while (location.length != 0)
{
// Remember the start of the NAL data in this packet
int start = location.offset;
// Check for the start sequence
if (H264NAL.hasStartSequence(location))
{
// Reassemble any pending NAL
reassembleNal();
// Setup state for the new NAL
nalDataChain = new LinkedList<AvBufferDescriptor>();
nalDataLength = 0;
// Skip the start sequence and the type byte
location.length -= 5;
location.offset += 5;
}
// If there's a NAL assembly in progress, add the current data
if (nalDataChain != null)
{
// FIXME: This is a hack to make parsing full packets
// take less time. We assume if they don't start with
// a NAL start sequence, they're full of NAL data
if (payload.length == 968)
{
location.offset += location.length;
location.length = 0;
}
else
{
System.out.println("Using slow parsing case");
while (location.length != 0)
{
// Check if this should end the current NAL
if (H264NAL.hasStartSequence(location))
{
break;
}
else
{
// This byte is part of the NAL data
location.offset++;
location.length--;
}
}
}
// Add a buffer descriptor describing the NAL data in this packet
nalDataChain.add(new AvBufferDescriptor(location.data, start, location.offset-start));
nalDataLength += location.offset-start;
}
else
{
// Otherwise, skip the data
location.offset++;
location.length--;
}
}
}
public AvDecodeUnit getNextDecodeUnit() throws InterruptedException
{
return decodedUnits.take();
}
}
class H264NAL {
public static boolean shouldTerminateNal(AvBufferDescriptor buffer)
{
if (buffer.length < 4)
return false;
if (buffer.data[buffer.offset] != 0x00 ||
buffer.data[buffer.offset+1] != 0x00 ||
buffer.data[buffer.offset+2] != 0x00)
{
return false;
}
return true;
}
public static boolean hasStartSequence(AvBufferDescriptor buffer)
{
// NAL start sequence is 00 00 00 01
if (!shouldTerminateNal(buffer))
return false;
if (buffer.data[buffer.offset+3] != 0x01)
return false;
return true;
}
}