Refactor AV code

This commit is contained in:
Cameron Gutman 2013-11-09 19:20:49 -05:00
parent 9b91543c2d
commit d3b9387c37
6 changed files with 121 additions and 191 deletions

View File

@ -5,14 +5,102 @@ import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
import jlibrtp.Participant;
import jlibrtp.RTPSession;
import com.limelight.nvstream.av.AvBufferDescriptor;
import com.limelight.nvstream.av.AvBufferPool;
import com.limelight.nvstream.av.AvRtpPacket;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.net.rtp.AudioGroup;
import android.net.rtp.AudioStream;
import android.view.Surface;
public class NvAudioStream {
private AudioGroup group;
private AudioStream stream;
public static final int RTP_PORT = 48000;
public static final int RTCP_PORT = 47999;
public static final int PORT = 48000;
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>();
private RTPSession session;
private DatagramSocket rtp;
private AvBufferPool pool = new AvBufferPool(1500);
public void setupRtpSession(String host) throws SocketException
{
DatagramSocket rtcp;
rtp = new DatagramSocket(RTP_PORT);
rtcp = new DatagramSocket(RTCP_PORT);
session = new RTPSession(rtp, rtcp);
session.addParticipant(new Participant(host, RTP_PORT, RTCP_PORT));
}
private void startReceiveThread()
{
// Receive thread
new Thread(new Runnable() {
@Override
public void run() {
DatagramPacket packet = new DatagramPacket(pool.allocate(), 1500);
AvBufferDescriptor desc = new AvBufferDescriptor(null, 0, 0);
for (;;)
{
try {
rtp.receive(packet);
} catch (IOException e) {
e.printStackTrace();
return;
}
desc.length = packet.getLength();
desc.offset = packet.getOffset();
desc.data = packet.getData();
// Give the packet to the depacketizer thread
packets.add(new AvRtpPacket(desc));
// Get a new buffer from the buffer pool
packet.setData(pool.allocate(), 0, 1500);
}
}
}).start();
}
private void startUdpPingThread()
{
// Ping thread
new Thread(new Runnable() {
@Override
public void run() {
// PING in ASCII
final byte[] pingPacket = new byte[] {0x50, 0x49, 0x4E, 0x47};
// RTP payload type is 127 (dynamic)
session.payloadType(127);
// Send PING every 100 ms
for (;;)
{
session.sendData(pingPacket);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
break;
}
}
}
}).start();
}
/*public void startStream(String host) throws SocketException, UnknownHostException
{
@ -33,63 +121,4 @@ public class NvAudioStream {
System.out.println("Joined");
}*/
public void start(final String host)
{
new Thread(new Runnable() {
@Override
public void run() {
final DatagramSocket ds;
try {
ds = new DatagramSocket(PORT);
} catch (SocketException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
return;
}
new Thread(new Runnable() {
@Override
public void run() {
byte[] ping = new byte[]{0x50, 0x49, 0x4e, 0x47};
for (;;)
{
DatagramPacket dgp = new DatagramPacket(ping, 0, ping.length);
dgp.setSocketAddress(new InetSocketAddress(host, PORT));
try {
ds.send(dgp);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
break;
}
}
}
}).start();
for (;;)
{
DatagramPacket dp = new DatagramPacket(new byte[1500], 1500);
try {
ds.receive(dp);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
break;
}
//System.out.println("Got UDP 48000: "+dp.getLength());
}
}
}).start();
}
}

View File

@ -13,9 +13,8 @@ import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvBufferDescriptor;
import com.limelight.nvstream.av.AvBufferPool;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvPacket;
import com.limelight.nvstream.av.AvDepacketizer;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
import jlibrtp.Participant;
import jlibrtp.RTPSession;
@ -30,8 +29,8 @@ public class NvVideoStream {
public static final int RTCP_PORT = 47999;
public static final int FIRST_FRAME_PORT = 47996;
private ByteBuffer[] videoDecoderInputBuffers, audioDecoderInputBuffers;
private MediaCodec videoDecoder, audioDecoder;
private ByteBuffer[] videoDecoderInputBuffers;
private MediaCodec videoDecoder;
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>();
@ -40,7 +39,7 @@ public class NvVideoStream {
private AvBufferPool pool = new AvBufferPool(1500);
private AvDepacketizer depacketizer = new AvDepacketizer();
private AvVideoDepacketizer depacketizer = new AvVideoDepacketizer();
private InputStream openFirstFrameInputStream(String host) throws UnknownHostException, IOException
{
@ -94,19 +93,13 @@ public class NvVideoStream {
videoDecoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
audioDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
MediaFormat audioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", 48000, 2);
videoDecoder.configure(videoFormat, surface, null, 0);
audioDecoder.configure(audioFormat, null, null, 0);
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
videoDecoder.start();
audioDecoder.start();
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
audioDecoderInputBuffers = audioDecoder.getInputBuffers();
}
public void startVideoStream(final String host, final Surface surface)
@ -139,9 +132,6 @@ public class NvVideoStream {
// Start decoding the data we're receiving
startDecoderThread();
// Start playing back audio data
startAudioPlaybackThread();
// Read the first frame to start the UDP video stream
try {
readFirstFrame(host);
@ -201,32 +191,6 @@ public class NvVideoStream {
}
break;
case AvDecodeUnit.TYPE_AAC:
{
int inputIndex = audioDecoder.dequeueInputBuffer(0);
if (inputIndex == -4)
{
ByteBuffer buf = audioDecoderInputBuffers[inputIndex];
// Clear old input data
buf.clear();
// Copy data from our buffer list into the input buffer
for (AvBufferDescriptor desc : du.getBufferList())
{
buf.put(desc.data, desc.offset, desc.length);
// Release the buffer back to the buffer pool
pool.free(desc.data);
}
audioDecoder.queueInputBuffer(inputIndex,
0, du.getDataLength(),
0, du.getFlags());
}
}
break;
default:
{
System.out.println("Unknown decode unit type");
@ -322,40 +286,6 @@ public class NvVideoStream {
}).start();
}
private void startAudioPlaybackThread()
{
new Thread(new Runnable() {
@Override
public void run() {
for (;;)
{
BufferInfo info = new BufferInfo();
System.out.println("Waiting for audio");
int outIndex = audioDecoder.dequeueOutputBuffer(info, -1);
System.out.println("Got audio");
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
System.out.println("Output buffers changed");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
System.out.println("Output format changed");
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
System.out.println("Try again later");
break;
default:
break;
}
if (outIndex >= 0) {
audioDecoder.releaseOutputBuffer(outIndex, true);
}
}
}
}).start();
}
private void outputDisplayLoop()
{
for (;;)

View File

@ -5,7 +5,7 @@ import java.util.List;
public class AvDecodeUnit {
public static final int TYPE_UNKNOWN = 0;
public static final int TYPE_H264 = 1;
public static final int TYPE_AAC = 2;
public static final int TYPE_OPUS = 2;
private int type;
private List<AvBufferDescriptor> bufferList;

View File

@ -4,6 +4,7 @@ import java.nio.ByteBuffer;
public class AvRtpPacket {
private byte packetType;
private short seqNum;
private AvBufferDescriptor buffer;
@ -13,13 +14,21 @@ public class AvRtpPacket {
ByteBuffer bb = ByteBuffer.wrap(buffer.data, buffer.offset, buffer.length);
// Discard the first couple of bytes
bb.getShort();
// Discard the first byte
bb.position(bb.position()+1);
// Get the packet type
packetType = bb.get();
// Get the sequence number
seqNum = bb.getShort();
}
public byte getPacketType()
{
return packetType;
}
public short getSequenceNumber()
{
return seqNum;

View File

@ -1,17 +1,19 @@
package com.limelight.nvstream.av;
package com.limelight.nvstream.av.video;
import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.nvstream.av.AvBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
import android.media.MediaCodec;
public class AvDepacketizer {
public class AvVideoDepacketizer {
// Current NAL state
private LinkedList<AvBufferDescriptor> avcNalDataChain = null;
private int avcNalDataLength = 0;
private LinkedList<AvBufferDescriptor> aacNalDataChain = null;
private int aacNalDataLength = 0;
private int currentlyDecoding;
// Sequencing state
@ -19,28 +21,6 @@ public class AvDepacketizer {
private LinkedBlockingQueue<AvDecodeUnit> decodedUnits = new LinkedBlockingQueue<AvDecodeUnit>();
private void reassembleAacNal()
{
// This is the start of a new AAC NAL
if (aacNalDataChain != null && aacNalDataLength != 0)
{
System.out.println("Assembling AAC NAL: "+aacNalDataLength);
/*AvBufferDescriptor header = aacNalDataChain.getFirst();
for (int i = 0; i < header.length; i++)
System.out.printf("%02x ", header.data[header.offset+i]);
System.out.println();*/
// Construct the AAC decode unit
AvDecodeUnit du = new AvDecodeUnit(AvDecodeUnit.TYPE_AAC, aacNalDataChain, aacNalDataLength, 0);
decodedUnits.add(du);
// Clear old state
aacNalDataChain = null;
aacNalDataLength = 0;
}
}
private void reassembleAvcNal()
{
// This is the start of a new NAL
@ -103,7 +83,7 @@ public class AvDepacketizer {
}
}
public void addInputData(AvPacket packet)
public void addInputData(AvVideoPacket packet)
{
AvBufferDescriptor location = packet.getNewPayloadDescriptor();
@ -132,18 +112,6 @@ public class AvDepacketizer {
avcNalDataLength = 0;
}
}
else if (NAL.isAacStartSequence(specialSeq))
{
// We're decoding AAC now
currentlyDecoding = AvDecodeUnit.TYPE_AAC;
// Reassemble any pending AAC NAL
reassembleAacNal();
// Setup state for the new NAL
aacNalDataChain = new LinkedList<AvBufferDescriptor>();
aacNalDataLength = 0;
}
else
{
// Not either sequence we want
@ -181,12 +149,6 @@ public class AvDepacketizer {
avcNalDataChain.add(data);
avcNalDataLength += location.offset-start;
}
else if (currentlyDecoding == AvDecodeUnit.TYPE_AAC && aacNalDataChain != null)
{
// Add a buffer descriptor describing the NAL data in this packet
aacNalDataChain.add(data);
aacNalDataLength += location.offset-start;
}
}
}
@ -205,15 +167,13 @@ public class AvDepacketizer {
currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
avcNalDataChain = null;
avcNalDataLength = 0;
aacNalDataChain = null;
aacNalDataLength = 0;
}
lastSequenceNumber = seq;
// Pass the payload to the non-sequencing parser
AvBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
addInputData(new AvPacket(rtpPayload));
addInputData(new AvVideoPacket(rtpPayload));
}
public AvDecodeUnit getNextDecodeUnit() throws InterruptedException
@ -235,7 +195,7 @@ class NAL {
}
// This assumes that the buffer passed in is already a special sequence
public static boolean isAacStartSequence(AvBufferDescriptor specialSeq)
public static boolean isUnknownStartSequence(AvBufferDescriptor specialSeq)
{
if (specialSeq.length != 3)
return false;

View File

@ -1,9 +1,11 @@
package com.limelight.nvstream.av;
package com.limelight.nvstream.av.video;
public class AvPacket {
import com.limelight.nvstream.av.AvBufferDescriptor;
public class AvVideoPacket {
private AvBufferDescriptor buffer;
public AvPacket(AvBufferDescriptor rtpPayload)
public AvVideoPacket(AvBufferDescriptor rtpPayload)
{
buffer = new AvBufferDescriptor(rtpPayload);
}