Stop allocating RtpPacket and VideoPacket objects in the reassembly path

This commit is contained in:
Cameron Gutman 2014-06-22 13:52:40 -07:00
parent 6c5ec3d2e9
commit 86e2657613
4 changed files with 84 additions and 51 deletions

View File

@ -8,6 +8,8 @@ public class RtpPacket {
private short seqNum;
private ByteBufferDescriptor buffer;
public static final int HEADER_SIZE = 12;
public RtpPacket(ByteBufferDescriptor buffer)
{
this.buffer = new ByteBufferDescriptor(buffer);
@ -41,6 +43,6 @@ public class RtpPacket {
public ByteBufferDescriptor getNewPayloadDescriptor()
{
return new ByteBufferDescriptor(buffer.data, buffer.offset+12, buffer.length-12);
return new ByteBufferDescriptor(buffer.data, buffer.offset+HEADER_SIZE, buffer.length-HEADER_SIZE);
}
}

View File

@ -6,7 +6,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.LimeLog;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener;
public class VideoDepacketizer {
@ -26,7 +25,8 @@ public class VideoDepacketizer {
private long frameStartTime;
// Cached objects
private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0);
private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0);
private ByteBufferDescriptor cachedSpecialDesc = new ByteBufferDescriptor(null, 0, 0);
private ConnectionStatusListener controlListener;
@ -78,15 +78,15 @@ public class VideoDepacketizer {
int start = location.offset;
// Check for a special sequence
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
if (NAL.getSpecialSequenceDescriptor(location, cachedSpecialDesc))
{
if (NAL.isAvcStartSequence(cachedDesc))
if (NAL.isAvcStartSequence(cachedSpecialDesc))
{
// We're decoding H264 now
currentlyDecoding = DecodeUnit.TYPE_H264;
// Check if it's the end of the last frame
if (NAL.isAvcFrameStart(cachedDesc))
if (NAL.isAvcFrameStart(cachedSpecialDesc))
{
// Reassemble any pending AVC NAL
reassembleAvcFrame(packet.getFrameIndex());
@ -97,14 +97,14 @@ public class VideoDepacketizer {
}
// Skip the start sequence
location.length -= cachedDesc.length;
location.offset += cachedDesc.length;
location.length -= cachedSpecialDesc.length;
location.offset += cachedSpecialDesc.length;
}
else
{
// Check if this is padding after a full AVC frame
if (currentlyDecoding == DecodeUnit.TYPE_H264 &&
NAL.isPadding(cachedDesc)) {
NAL.isPadding(cachedSpecialDesc)) {
// The decode unit is complete
reassembleAvcFrame(packet.getFrameIndex());
}
@ -125,12 +125,12 @@ public class VideoDepacketizer {
if (location.data[location.offset] == 0x00)
{
// Check if this should end the current NAL
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
if (NAL.getSpecialSequenceDescriptor(location, cachedSpecialDesc))
{
// Only stop if we're decoding something or this
// isn't padding
if (currentlyDecoding != DecodeUnit.TYPE_UNKNOWN ||
!NAL.isPadding(cachedDesc))
!NAL.isPadding(cachedSpecialDesc))
{
break;
}
@ -163,19 +163,20 @@ public class VideoDepacketizer {
}
// Add the payload data to the chain
avcFrameDataChain.add(location);
avcFrameDataChain.add(new ByteBufferDescriptor(location));
avcFrameDataLength += location.length;
}
public void addInputData(VideoPacket packet)
{
ByteBufferDescriptor location = packet.getNewPayloadDescriptor();
// Load our reassembly descriptor
packet.initializePayloadDescriptor(cachedReassemblyDesc);
// Runt packets get decoded using the slow path
// These packets stand alone so there's no need to verify
// sequencing before submitting
if (location.length < 968) {
addInputDataSlow(packet, location);
if (cachedReassemblyDesc.length < 968) {
addInputDataSlow(packet, cachedReassemblyDesc);
return;
}
@ -277,21 +278,22 @@ public class VideoDepacketizer {
nextPacketNumber++;
// Remove extra padding
location.length = packet.getPayloadLength();
cachedReassemblyDesc.length = packet.getPayloadLength();
if (firstPacket)
{
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc) && NAL.isAvcFrameStart(cachedDesc)
&& cachedDesc.data[cachedDesc.offset+cachedDesc.length] == 0x67)
if (NAL.getSpecialSequenceDescriptor(cachedReassemblyDesc, cachedSpecialDesc)
&& NAL.isAvcFrameStart(cachedSpecialDesc)
&& cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x67)
{
// SPS and PPS prefix is padded between NALs, so we must decode it with the slow path
clearAvcFrameState();
addInputDataSlow(packet, location);
addInputDataSlow(packet, cachedReassemblyDesc);
return;
}
}
addInputDataFast(packet, location, firstPacket);
addInputDataFast(packet, cachedReassemblyDesc, firstPacket);
// We can't use the EOF flag here because real frames can be split across
// multiple "frames" when packetized to fit under the bandwidth ceiling
@ -313,12 +315,6 @@ public class VideoDepacketizer {
}
}
public void addInputData(RtpPacket packet)
{
ByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
addInputData(new VideoPacket(rtpPayload));
}
public DecodeUnit takeNextDecodeUnit() throws InterruptedException
{
return decodedUnits.take();

View File

@ -4,9 +4,13 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.RtpPacket;
public class VideoPacket {
private ByteBufferDescriptor buffer;
private ByteBuffer byteBuffer;
private int dataOffset;
private int frameIndex;
private int packetIndex;
@ -18,19 +22,47 @@ public class VideoPacket {
public static final int FLAG_EOF = 0x2;
public static final int FLAG_SOF = 0x4;
public VideoPacket(ByteBufferDescriptor rtpPayload)
public VideoPacket(byte[] buffer)
{
buffer = new ByteBufferDescriptor(rtpPayload);
this.buffer = new ByteBufferDescriptor(buffer, 0, buffer.length);
this.byteBuffer = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN);
}
ByteBuffer bb = ByteBuffer.wrap(buffer.data).order(ByteOrder.LITTLE_ENDIAN);
bb.position(buffer.offset);
public void initializeWithLengthNoRtpHeader(int length)
{
// Read the video header fields
frameIndex = byteBuffer.getInt();
packetIndex = byteBuffer.getInt();
totalPackets = byteBuffer.getInt();
flags = byteBuffer.getInt();
payloadLength = byteBuffer.getInt();
streamPacketIndex = byteBuffer.getInt();
frameIndex = bb.getInt();
packetIndex = bb.getInt();
totalPackets = bb.getInt();
flags = bb.getInt();
payloadLength = bb.getInt();
streamPacketIndex = bb.getInt();
// Data offset without the RTP header
dataOffset = 56;
// Update descriptor length
buffer.length = length;
}
public void initializeWithLength(int length)
{
// Skip the RTP header
byteBuffer.position(RtpPacket.HEADER_SIZE);
// Read the video header fields
frameIndex = byteBuffer.getInt();
packetIndex = byteBuffer.getInt();
totalPackets = byteBuffer.getInt();
flags = byteBuffer.getInt();
payloadLength = byteBuffer.getInt();
streamPacketIndex = byteBuffer.getInt();
// Data offset includes the RTP header
dataOffset = RtpPacket.HEADER_SIZE + 56;
// Update descriptor length
buffer.length = length;
}
public int getFlags()
@ -63,8 +95,13 @@ public class VideoPacket {
return streamPacketIndex;
}
public ByteBufferDescriptor getNewPayloadDescriptor()
public byte[] getBuffer()
{
return new ByteBufferDescriptor(buffer.data, buffer.offset+56, buffer.length-56);
return buffer.data;
}
public void initializePayloadDescriptor(ByteBufferDescriptor bb)
{
bb.reinitialize(buffer.data, buffer.offset+dataOffset, buffer.length-dataOffset);
}
}

View File

@ -12,8 +12,6 @@ import java.util.LinkedList;
import com.limelight.nvstream.NvConnectionListener;
import com.limelight.nvstream.StreamConfiguration;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.RtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener;
public class VideoStream {
@ -118,7 +116,9 @@ public class VideoStream {
offset += bytesRead;
}
depacketizer.addInputData(new VideoPacket(new ByteBufferDescriptor(firstFrame, 0, offset)));
VideoPacket packet = new VideoPacket(firstFrame);
packet.initializeWithLengthNoRtpHeader(offset);
depacketizer.addInputData(packet);
} finally {
firstFrameSocket.close();
firstFrameSocket = null;
@ -179,31 +179,29 @@ public class VideoStream {
Thread t = new Thread() {
@Override
public void run() {
ByteBufferDescriptor ring[] = new ByteBufferDescriptor[VIDEO_RING_SIZE];
VideoPacket ring[] = new VideoPacket[VIDEO_RING_SIZE];
int ringIndex = 0;
// Preinitialize the ring buffer
for (int i = 0; i < VIDEO_RING_SIZE; i++) {
ring[i] = new ByteBufferDescriptor(new byte[MAX_PACKET_SIZE], 0, MAX_PACKET_SIZE);
ring[i] = new VideoPacket(new byte[MAX_PACKET_SIZE]);
}
ByteBufferDescriptor desc;
byte[] buffer;
DatagramPacket packet = new DatagramPacket(new byte[1], 1); // Placeholder array
while (!isInterrupted())
{
try {
// Pull the next buffer in the ring and reset it
desc = ring[ringIndex];
desc.length = MAX_PACKET_SIZE;
desc.offset = 0;
buffer = ring[ringIndex].getBuffer();
// Read the video data off the network
packet.setData(desc.data, desc.offset, desc.length);
packet.setData(buffer, 0, buffer.length);
rtp.receive(packet);
// Submit video data to the depacketizer
desc.length = packet.getLength();
depacketizer.addInputData(new RtpPacket(desc));
ring[ringIndex].initializeWithLength(packet.getLength());
depacketizer.addInputData(ring[ringIndex]);
ringIndex = (ringIndex + 1) % VIDEO_RING_SIZE;
} catch (IOException e) {
listener.connectionTerminated(e);