Fix video buffer pool usage.

This commit is contained in:
Cameron Gutman 2013-11-10 03:29:44 -05:00
parent d5665ac318
commit 54e365a304
5 changed files with 73 additions and 13 deletions

View File

@ -12,7 +12,6 @@ import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvByteBufferPool;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
@ -40,9 +39,7 @@ public class NvVideoStream {
private DatagramSocket rtp;
private LinkedList<Thread> threads = new LinkedList<Thread>();
private AvByteBufferPool pool = new AvByteBufferPool(1500);
private AvVideoDepacketizer depacketizer = new AvVideoDepacketizer();
private boolean aborting = false;
@ -84,7 +81,7 @@ public class NvVideoStream {
private void readFirstFrame(String host) throws IOException
{
byte[] firstFrame = pool.allocate();
byte[] firstFrame = depacketizer.allocatePacketBuffer();
System.out.println("VID: Waiting for first frame");
InputStream firstFrameStream = openFirstFrameInputStream(host);
@ -124,6 +121,7 @@ public class NvVideoStream {
public void setupDecoders(Surface surface)
{
videoDecoder = MediaCodec.createDecoderByType("video/avc");
//videoDecoder = MediaCodec.createByCodecName("OMX.google.h264.decoder");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
videoDecoder.configure(videoFormat, surface, null, 0);
@ -218,10 +216,9 @@ public class NvVideoStream {
for (AvByteBufferDescriptor desc : du.getBufferList())
{
buf.put(desc.data, desc.offset, desc.length);
// Release the buffer back to the buffer pool
pool.free(desc.data);
}
depacketizer.releaseDecodeUnit(du);
videoDecoder.queueInputBuffer(inputIndex,
0, du.getDataLength(),
@ -280,7 +277,7 @@ public class NvVideoStream {
Thread t = new Thread() {
@Override
public void run() {
DatagramPacket packet = new DatagramPacket(pool.allocate(), 1500);
DatagramPacket packet = new DatagramPacket(depacketizer.allocatePacketBuffer(), 1500);
AvByteBufferDescriptor desc = new AvByteBufferDescriptor(null, 0, 0);
while (!isInterrupted())
@ -300,7 +297,7 @@ public class NvVideoStream {
packets.add(new AvRtpPacket(desc));
// Get a new buffer from the buffer pool
packet.setData(pool.allocate(), 0, 1500);
packet.setData(depacketizer.allocatePacketBuffer(), 0, 1500);
}
}
};

View File

@ -4,6 +4,7 @@ public class AvByteBufferDescriptor {
public byte[] data;
public int offset;
public int length;
public Object context;
public AvByteBufferDescriptor(byte[] data, int offset, int length)
{

View File

@ -25,6 +25,6 @@ public class AvByteBufferPool {
public synchronized void free(byte[] buffer)
{
//bufferList.addFirst(buffer);
bufferList.addFirst(buffer);
}
}

View File

@ -4,6 +4,7 @@ import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvByteBufferPool;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
@ -21,6 +22,44 @@ public class AvVideoDepacketizer {
private LinkedBlockingQueue<AvDecodeUnit> decodedUnits = new LinkedBlockingQueue<AvDecodeUnit>();
private AvByteBufferPool pool = new AvByteBufferPool(1500);
public byte[] allocatePacketBuffer()
{
return pool.allocate();
}
private void clearAvcNalState()
{
if (avcNalDataChain != null)
{
for (AvByteBufferDescriptor avbb : avcNalDataChain)
{
AvVideoPacket packet = (AvVideoPacket) avbb.context;
if (packet.release() == 0) {
pool.free(avbb.data);
}
}
}
avcNalDataChain = null;
avcNalDataLength = 0;
}
public void releaseDecodeUnit(AvDecodeUnit decodeUnit)
{
// Remove the reference from each AvVideoPacket (freeing if okay)
for (AvByteBufferDescriptor buff : decodeUnit.getBufferList())
{
AvVideoPacket packet = (AvVideoPacket) buff.context;
if (packet.release() == 0) {
pool.free(buff.data);
}
}
}
private void reassembleAvcNal()
{
// This is the start of a new NAL
@ -87,6 +126,9 @@ public class AvVideoDepacketizer {
{
AvByteBufferDescriptor location = packet.getNewPayloadDescriptor();
// Add an initial reference
packet.addRef();
while (location.length != 0)
{
// Remember the start of the NAL data in this packet
@ -146,13 +188,23 @@ public class AvVideoDepacketizer {
}
AvByteBufferDescriptor data = new AvByteBufferDescriptor(location.data, start, location.offset-start);
if (currentlyDecoding == AvDecodeUnit.TYPE_H264 && avcNalDataChain != null)
{
// Attach the current packet as the buffer context and increment the refcount
data.context = packet;
packet.addRef();
// Add a buffer descriptor describing the NAL data in this packet
avcNalDataChain.add(data);
avcNalDataLength += location.offset-start;
}
}
// If nothing useful came out of this, release the packet now
if (packet.release() == 0) {
pool.free(location.data);
}
}
public void addInputData(AvRtpPacket packet)
@ -168,8 +220,7 @@ public class AvVideoDepacketizer {
// Reset the depacketizer state
currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
avcNalDataChain = null;
avcNalDataLength = 0;
clearAvcNalState();
}
lastSequenceNumber = seq;

View File

@ -4,6 +4,7 @@ import com.limelight.nvstream.av.AvByteBufferDescriptor;
public class AvVideoPacket {
private AvByteBufferDescriptor buffer;
private int refCount;
public AvVideoPacket(AvByteBufferDescriptor rtpPayload)
{
@ -14,4 +15,14 @@ public class AvVideoPacket {
{
return new AvByteBufferDescriptor(buffer.data, buffer.offset+56, buffer.length-56);
}
public int addRef()
{
return ++refCount;
}
public int release()
{
return --refCount;
}
}