mirror of
https://github.com/moonlight-stream/moonlight-android.git
synced 2025-07-21 03:52:48 +00:00
Video stream updates for GFE 2.1.1
This commit is contained in:
parent
ae8cb18f63
commit
aa799342e5
@ -5,7 +5,6 @@ public class StreamConfiguration {
|
|||||||
private int width, height;
|
private int width, height;
|
||||||
private int refreshRate;
|
private int refreshRate;
|
||||||
private int bitrate;
|
private int bitrate;
|
||||||
private int maxPacketSize;
|
|
||||||
private boolean sops;
|
private boolean sops;
|
||||||
|
|
||||||
public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate) {
|
public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate) {
|
||||||
@ -14,17 +13,15 @@ public class StreamConfiguration {
|
|||||||
this.height = height;
|
this.height = height;
|
||||||
this.refreshRate = refreshRate;
|
this.refreshRate = refreshRate;
|
||||||
this.bitrate = bitrate;
|
this.bitrate = bitrate;
|
||||||
this.maxPacketSize = 1024;
|
|
||||||
this.sops = true;
|
this.sops = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate, int maxPacketSize, boolean sops) {
|
public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate, boolean sops) {
|
||||||
this.app = app;
|
this.app = app;
|
||||||
this.width = width;
|
this.width = width;
|
||||||
this.height = height;
|
this.height = height;
|
||||||
this.refreshRate = refreshRate;
|
this.refreshRate = refreshRate;
|
||||||
this.bitrate = bitrate;
|
this.bitrate = bitrate;
|
||||||
this.maxPacketSize = maxPacketSize;
|
|
||||||
this.sops = sops;
|
this.sops = sops;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,7 +42,7 @@ public class StreamConfiguration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public int getMaxPacketSize() {
|
public int getMaxPacketSize() {
|
||||||
return maxPacketSize;
|
return 1024;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getApp() {
|
public String getApp() {
|
||||||
|
@ -7,11 +7,16 @@ public class RtpPacket {
|
|||||||
|
|
||||||
private byte packetType;
|
private byte packetType;
|
||||||
private short seqNum;
|
private short seqNum;
|
||||||
|
private int headerSize;
|
||||||
|
|
||||||
private ByteBufferDescriptor buffer;
|
private ByteBufferDescriptor buffer;
|
||||||
private ByteBuffer bb;
|
private ByteBuffer bb;
|
||||||
|
|
||||||
public static final int HEADER_SIZE = 12;
|
public static final int FLAG_EXTENSION = 0x10;
|
||||||
|
|
||||||
|
public static final int FIXED_HEADER_SIZE = 12;
|
||||||
|
public static final int MAX_HEADER_SIZE = 16;
|
||||||
|
|
||||||
|
|
||||||
public RtpPacket(byte[] buffer)
|
public RtpPacket(byte[] buffer)
|
||||||
{
|
{
|
||||||
@ -21,8 +26,11 @@ public class RtpPacket {
|
|||||||
|
|
||||||
public void initializeWithLength(int length)
|
public void initializeWithLength(int length)
|
||||||
{
|
{
|
||||||
// Discard the first byte
|
// Rewind to start
|
||||||
bb.position(1);
|
bb.rewind();
|
||||||
|
|
||||||
|
// Read the RTP header byte
|
||||||
|
byte header = bb.get();
|
||||||
|
|
||||||
// Get the packet type
|
// Get the packet type
|
||||||
packetType = bb.get();
|
packetType = bb.get();
|
||||||
@ -30,6 +38,12 @@ public class RtpPacket {
|
|||||||
// Get the sequence number
|
// Get the sequence number
|
||||||
seqNum = bb.getShort();
|
seqNum = bb.getShort();
|
||||||
|
|
||||||
|
// If an extension is present, read the fields
|
||||||
|
headerSize = FIXED_HEADER_SIZE;
|
||||||
|
if ((header & FLAG_EXTENSION) != 0) {
|
||||||
|
headerSize += 4; // 2 additional fields
|
||||||
|
}
|
||||||
|
|
||||||
// Update descriptor length
|
// Update descriptor length
|
||||||
buffer.length = length;
|
buffer.length = length;
|
||||||
}
|
}
|
||||||
@ -51,6 +65,6 @@ public class RtpPacket {
|
|||||||
|
|
||||||
public void initializePayloadDescriptor(ByteBufferDescriptor bb)
|
public void initializePayloadDescriptor(ByteBufferDescriptor bb)
|
||||||
{
|
{
|
||||||
bb.reinitialize(buffer.data, buffer.offset+HEADER_SIZE, buffer.length-HEADER_SIZE);
|
bb.reinitialize(buffer.data, buffer.offset+headerSize, buffer.length-headerSize);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,23 +13,21 @@ public class VideoDepacketizer {
|
|||||||
// Current frame state
|
// Current frame state
|
||||||
private LinkedList<ByteBufferDescriptor> avcFrameDataChain = null;
|
private LinkedList<ByteBufferDescriptor> avcFrameDataChain = null;
|
||||||
private int avcFrameDataLength = 0;
|
private int avcFrameDataLength = 0;
|
||||||
private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN;
|
|
||||||
|
|
||||||
// Sequencing state
|
// Sequencing state
|
||||||
private int lastPacketInStream = 0;
|
private int lastPacketInStream = 0;
|
||||||
private int nextFrameNumber = 1;
|
private int nextFrameNumber = 1;
|
||||||
private int nextPacketNumber;
|
|
||||||
private int startFrameNumber = 1;
|
private int startFrameNumber = 1;
|
||||||
private boolean waitingForNextSuccessfulFrame;
|
private boolean waitingForNextSuccessfulFrame;
|
||||||
private boolean gotNextFrameStart;
|
|
||||||
private long frameStartTime;
|
private long frameStartTime;
|
||||||
|
private boolean decodingFrame;
|
||||||
|
|
||||||
// Cached objects
|
// Cached objects
|
||||||
private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0);
|
private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0);
|
||||||
private ByteBufferDescriptor cachedSpecialDesc = new ByteBufferDescriptor(null, 0, 0);
|
private ByteBufferDescriptor cachedSpecialDesc = new ByteBufferDescriptor(null, 0, 0);
|
||||||
|
|
||||||
private ConnectionStatusListener controlListener;
|
private ConnectionStatusListener controlListener;
|
||||||
private int nominalPacketSize;
|
private final int nominalPacketDataLength;
|
||||||
|
|
||||||
private static final int DU_LIMIT = 30;
|
private static final int DU_LIMIT = 30;
|
||||||
private PopulatedBufferList<DecodeUnit> decodedUnits;
|
private PopulatedBufferList<DecodeUnit> decodedUnits;
|
||||||
@ -37,7 +35,7 @@ public class VideoDepacketizer {
|
|||||||
public VideoDepacketizer(ConnectionStatusListener controlListener, int nominalPacketSize)
|
public VideoDepacketizer(ConnectionStatusListener controlListener, int nominalPacketSize)
|
||||||
{
|
{
|
||||||
this.controlListener = controlListener;
|
this.controlListener = controlListener;
|
||||||
this.nominalPacketSize = nominalPacketSize;
|
this.nominalPacketDataLength = nominalPacketSize - VideoPacket.HEADER_SIZE;
|
||||||
|
|
||||||
decodedUnits = new PopulatedBufferList<DecodeUnit>(DU_LIMIT, new PopulatedBufferList.BufferFactory() {
|
decodedUnits = new PopulatedBufferList<DecodeUnit>(DU_LIMIT, new PopulatedBufferList.BufferFactory() {
|
||||||
public Object createFreeBuffer() {
|
public Object createFreeBuffer() {
|
||||||
@ -106,6 +104,8 @@ public class VideoDepacketizer {
|
|||||||
|
|
||||||
private void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location)
|
private void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location)
|
||||||
{
|
{
|
||||||
|
boolean isDecodingH264 = false;
|
||||||
|
|
||||||
while (location.length != 0)
|
while (location.length != 0)
|
||||||
{
|
{
|
||||||
// Remember the start of the NAL data in this packet
|
// Remember the start of the NAL data in this packet
|
||||||
@ -117,11 +117,14 @@ public class VideoDepacketizer {
|
|||||||
if (NAL.isAvcStartSequence(cachedSpecialDesc))
|
if (NAL.isAvcStartSequence(cachedSpecialDesc))
|
||||||
{
|
{
|
||||||
// We're decoding H264 now
|
// We're decoding H264 now
|
||||||
currentlyDecoding = DecodeUnit.TYPE_H264;
|
isDecodingH264 = true;
|
||||||
|
|
||||||
// Check if it's the end of the last frame
|
// Check if it's the end of the last frame
|
||||||
if (NAL.isAvcFrameStart(cachedSpecialDesc))
|
if (NAL.isAvcFrameStart(cachedSpecialDesc))
|
||||||
{
|
{
|
||||||
|
// Update the global state that we're decoding a new frame
|
||||||
|
this.decodingFrame = true;
|
||||||
|
|
||||||
// Reassemble any pending AVC NAL
|
// Reassemble any pending AVC NAL
|
||||||
reassembleAvcFrame(packet.getFrameIndex());
|
reassembleAvcFrame(packet.getFrameIndex());
|
||||||
|
|
||||||
@ -137,14 +140,13 @@ public class VideoDepacketizer {
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Check if this is padding after a full AVC frame
|
// Check if this is padding after a full AVC frame
|
||||||
if (currentlyDecoding == DecodeUnit.TYPE_H264 &&
|
if (isDecodingH264 && NAL.isPadding(cachedSpecialDesc)) {
|
||||||
NAL.isPadding(cachedSpecialDesc)) {
|
|
||||||
// The decode unit is complete
|
// The decode unit is complete
|
||||||
reassembleAvcFrame(packet.getFrameIndex());
|
reassembleAvcFrame(packet.getFrameIndex());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not decoding AVC
|
// Not decoding AVC
|
||||||
currentlyDecoding = DecodeUnit.TYPE_UNKNOWN;
|
isDecodingH264 = false;
|
||||||
|
|
||||||
// Just skip this byte
|
// Just skip this byte
|
||||||
location.length--;
|
location.length--;
|
||||||
@ -163,8 +165,7 @@ public class VideoDepacketizer {
|
|||||||
{
|
{
|
||||||
// Only stop if we're decoding something or this
|
// Only stop if we're decoding something or this
|
||||||
// isn't padding
|
// isn't padding
|
||||||
if (currentlyDecoding != DecodeUnit.TYPE_UNKNOWN ||
|
if (isDecodingH264 || !NAL.isPadding(cachedSpecialDesc))
|
||||||
!NAL.isPadding(cachedSpecialDesc))
|
|
||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -176,7 +177,7 @@ public class VideoDepacketizer {
|
|||||||
location.length--;
|
location.length--;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcFrameDataChain != null)
|
if (isDecodingH264 && avcFrameDataChain != null)
|
||||||
{
|
{
|
||||||
ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start);
|
ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start);
|
||||||
|
|
||||||
@ -201,106 +202,80 @@ public class VideoDepacketizer {
|
|||||||
avcFrameDataLength += location.length;
|
avcFrameDataLength += location.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static boolean isFirstPacket(int flags) {
|
||||||
|
// Clear the picture data flag
|
||||||
|
flags &= ~VideoPacket.FLAG_CONTAINS_PIC_DATA;
|
||||||
|
|
||||||
|
// Check if it's just the start or both start and end of a frame
|
||||||
|
return (flags == (VideoPacket.FLAG_SOF | VideoPacket.FLAG_EOF) ||
|
||||||
|
flags == VideoPacket.FLAG_SOF);
|
||||||
|
}
|
||||||
|
|
||||||
public void addInputData(VideoPacket packet)
|
public void addInputData(VideoPacket packet)
|
||||||
{
|
{
|
||||||
// Load our reassembly descriptor
|
// Load our reassembly descriptor
|
||||||
packet.initializePayloadDescriptor(cachedReassemblyDesc);
|
packet.initializePayloadDescriptor(cachedReassemblyDesc);
|
||||||
|
|
||||||
// Runt packets get decoded using the slow path
|
int flags = packet.getFlags();
|
||||||
// These packets stand alone so there's no need to verify
|
|
||||||
// sequencing before submitting
|
|
||||||
if (cachedReassemblyDesc.length < nominalPacketSize - VideoPacket.HEADER_SIZE) {
|
|
||||||
addInputDataSlow(packet, cachedReassemblyDesc);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
int frameIndex = packet.getFrameIndex();
|
int frameIndex = packet.getFrameIndex();
|
||||||
int packetIndex = packet.getPacketIndex();
|
boolean firstPacket = isFirstPacket(flags);
|
||||||
int packetsInFrame = packet.getTotalPackets();
|
|
||||||
|
|
||||||
// We can use FEC to correct single packet errors
|
// Look for a frame start before receiving a frame end
|
||||||
// on single packet frames because we just get a
|
if (firstPacket && decodingFrame)
|
||||||
// duplicate of the original packet
|
{
|
||||||
if (packetsInFrame == 1 && packetIndex == 1 &&
|
LimeLog.warning("Network dropped end of a frame");
|
||||||
nextPacketNumber == 0 && frameIndex == nextFrameNumber) {
|
nextFrameNumber = frameIndex + 1;
|
||||||
LimeLog.info("Using FEC for error correction");
|
|
||||||
nextPacketNumber = 1;
|
// Unexpected start of next frame before terminating the last
|
||||||
|
waitingForNextSuccessfulFrame = true;
|
||||||
|
|
||||||
|
// Clear the old state and decode this frame
|
||||||
|
clearAvcFrameState();
|
||||||
}
|
}
|
||||||
// Discard the rest of the FEC data until we know how to use it
|
// Look for a non-frame start before a frame start
|
||||||
else if (packetIndex >= packetsInFrame) {
|
else if (!firstPacket && !decodingFrame) {
|
||||||
return;
|
// Check if this looks like a real frame
|
||||||
}
|
if (flags == VideoPacket.FLAG_CONTAINS_PIC_DATA ||
|
||||||
|
flags == VideoPacket.FLAG_EOF ||
|
||||||
// Check that this is the next frame
|
cachedReassemblyDesc.length < nominalPacketDataLength)
|
||||||
boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0;
|
{
|
||||||
if (frameIndex > nextFrameNumber) {
|
LimeLog.warning("Network dropped beginning of a frame");
|
||||||
// Nope, but we can still work with it if it's
|
|
||||||
// the start of the next frame
|
|
||||||
if (firstPacket) {
|
|
||||||
LimeLog.warning("Got start of frame "+frameIndex+
|
|
||||||
" when expecting packet "+nextPacketNumber+
|
|
||||||
" of frame "+nextFrameNumber);
|
|
||||||
nextFrameNumber = frameIndex;
|
|
||||||
nextPacketNumber = 0;
|
|
||||||
clearAvcFrameState();
|
|
||||||
|
|
||||||
// Tell the encoder when we're done decoding this frame
|
|
||||||
// that we lost some previous frames
|
|
||||||
waitingForNextSuccessfulFrame = true;
|
|
||||||
gotNextFrameStart = false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
LimeLog.warning("Got packet "+packetIndex+" of frame "+frameIndex+
|
|
||||||
" when expecting packet "+nextPacketNumber+
|
|
||||||
" of frame "+nextFrameNumber);
|
|
||||||
// We dropped the start of this frame too
|
|
||||||
waitingForNextSuccessfulFrame = true;
|
|
||||||
gotNextFrameStart = false;
|
|
||||||
|
|
||||||
// Try to pickup on the next frame
|
|
||||||
nextFrameNumber = frameIndex + 1;
|
nextFrameNumber = frameIndex + 1;
|
||||||
nextPacketNumber = 0;
|
|
||||||
|
waitingForNextSuccessfulFrame = true;
|
||||||
clearAvcFrameState();
|
clearAvcFrameState();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
else {
|
||||||
else if (frameIndex < nextFrameNumber) {
|
// FEC data
|
||||||
LimeLog.info("Frame "+frameIndex+" is behind our current frame number "+nextFrameNumber);
|
return;
|
||||||
// Discard the frame silently if it's behind our current sequence number
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We know it's the right frame, now check the packet number
|
|
||||||
if (packetIndex != nextPacketNumber) {
|
|
||||||
LimeLog.warning("Frame "+frameIndex+": expected packet "+nextPacketNumber+" but got "+packetIndex);
|
|
||||||
// At this point, we're guaranteed that it's not FEC data that we lost
|
|
||||||
waitingForNextSuccessfulFrame = true;
|
|
||||||
gotNextFrameStart = false;
|
|
||||||
|
|
||||||
// Skip this frame
|
|
||||||
nextFrameNumber++;
|
|
||||||
nextPacketNumber = 0;
|
|
||||||
clearAvcFrameState();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (waitingForNextSuccessfulFrame) {
|
|
||||||
if (!gotNextFrameStart) {
|
|
||||||
if (!firstPacket) {
|
|
||||||
// We're waiting for the next frame, but this one is a fragment of a frame
|
|
||||||
// so we must discard it and wait for the next one
|
|
||||||
LimeLog.warning("Expected start of frame "+frameIndex);
|
|
||||||
|
|
||||||
nextFrameNumber = frameIndex + 1;
|
|
||||||
nextPacketNumber = 0;
|
|
||||||
clearAvcFrameState();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
gotNextFrameStart = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Check sequencing of this frame to ensure we didn't
|
||||||
|
// miss one in between
|
||||||
|
else if (firstPacket) {
|
||||||
|
// Make sure this is the next consecutive frame
|
||||||
|
if (nextFrameNumber < frameIndex) {
|
||||||
|
LimeLog.warning("Network dropped an entire frame");
|
||||||
|
nextFrameNumber = frameIndex + 1;
|
||||||
|
|
||||||
|
// Decode this one and hope for the best
|
||||||
|
waitingForNextSuccessfulFrame = true;
|
||||||
|
clearAvcFrameState();
|
||||||
|
}
|
||||||
|
else if (nextFrameNumber > frameIndex){
|
||||||
|
// Duplicate packet or FEC dup
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// This will be the next expected frame
|
||||||
|
nextFrameNumber = frameIndex + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We're now decoding a frame
|
||||||
|
decodingFrame = true;
|
||||||
|
}
|
||||||
|
|
||||||
int streamPacketIndex = packet.getStreamPacketIndex();
|
int streamPacketIndex = packet.getStreamPacketIndex();
|
||||||
if (streamPacketIndex != (int)(lastPacketInStream + 1)) {
|
if (streamPacketIndex != (int)(lastPacketInStream + 1)) {
|
||||||
@ -309,35 +284,23 @@ public class VideoDepacketizer {
|
|||||||
}
|
}
|
||||||
lastPacketInStream = streamPacketIndex;
|
lastPacketInStream = streamPacketIndex;
|
||||||
|
|
||||||
nextPacketNumber++;
|
if (firstPacket
|
||||||
|
&& NAL.getSpecialSequenceDescriptor(cachedReassemblyDesc, cachedSpecialDesc)
|
||||||
// Remove extra padding
|
|
||||||
cachedReassemblyDesc.length = packet.getPayloadLength();
|
|
||||||
|
|
||||||
if (firstPacket)
|
|
||||||
{
|
|
||||||
if (NAL.getSpecialSequenceDescriptor(cachedReassemblyDesc, cachedSpecialDesc)
|
|
||||||
&& NAL.isAvcFrameStart(cachedSpecialDesc)
|
&& NAL.isAvcFrameStart(cachedSpecialDesc)
|
||||||
&& cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x67)
|
&& cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x67)
|
||||||
{
|
{
|
||||||
// SPS and PPS prefix is padded between NALs, so we must decode it with the slow path
|
// SPS and PPS prefix is padded between NALs, so we must decode it with the slow path
|
||||||
clearAvcFrameState();
|
addInputDataSlow(packet, cachedReassemblyDesc);
|
||||||
addInputDataSlow(packet, cachedReassemblyDesc);
|
}
|
||||||
return;
|
else
|
||||||
}
|
{
|
||||||
|
// Everything else can take the fast path
|
||||||
|
addInputDataFast(packet, cachedReassemblyDesc, firstPacket);
|
||||||
}
|
}
|
||||||
|
|
||||||
addInputDataFast(packet, cachedReassemblyDesc, firstPacket);
|
if ((flags & VideoPacket.FLAG_EOF) != 0) {
|
||||||
|
|
||||||
// We can't use the EOF flag here because real frames can be split across
|
|
||||||
// multiple "frames" when packetized to fit under the bandwidth ceiling
|
|
||||||
if (packetIndex + 1 >= packetsInFrame) {
|
|
||||||
nextFrameNumber++;
|
|
||||||
nextPacketNumber = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) {
|
|
||||||
reassembleAvcFrame(packet.getFrameIndex());
|
reassembleAvcFrame(packet.getFrameIndex());
|
||||||
|
decodingFrame = false;
|
||||||
|
|
||||||
if (waitingForNextSuccessfulFrame) {
|
if (waitingForNextSuccessfulFrame) {
|
||||||
// This is the next successful frame after a loss event
|
// This is the next successful frame after a loss event
|
||||||
|
@ -13,16 +13,14 @@ public class VideoPacket {
|
|||||||
private int dataOffset;
|
private int dataOffset;
|
||||||
|
|
||||||
private int frameIndex;
|
private int frameIndex;
|
||||||
private int packetIndex;
|
|
||||||
private int totalPackets;
|
|
||||||
private int payloadLength;
|
|
||||||
private int flags;
|
private int flags;
|
||||||
private int streamPacketIndex;
|
private int streamPacketIndex;
|
||||||
|
|
||||||
|
public static final int FLAG_CONTAINS_PIC_DATA = 0x1;
|
||||||
public static final int FLAG_EOF = 0x2;
|
public static final int FLAG_EOF = 0x2;
|
||||||
public static final int FLAG_SOF = 0x4;
|
public static final int FLAG_SOF = 0x4;
|
||||||
|
|
||||||
public static final int HEADER_SIZE = 56;
|
public static final int HEADER_SIZE = 16;
|
||||||
|
|
||||||
public VideoPacket(byte[] buffer)
|
public VideoPacket(byte[] buffer)
|
||||||
{
|
{
|
||||||
@ -36,12 +34,9 @@ public class VideoPacket {
|
|||||||
byteBuffer.rewind();
|
byteBuffer.rewind();
|
||||||
|
|
||||||
// Read the video header fields
|
// Read the video header fields
|
||||||
|
streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF;
|
||||||
frameIndex = byteBuffer.getInt();
|
frameIndex = byteBuffer.getInt();
|
||||||
packetIndex = byteBuffer.getInt();
|
flags = byteBuffer.getInt() & 0xFF;
|
||||||
totalPackets = byteBuffer.getInt();
|
|
||||||
flags = byteBuffer.getInt();
|
|
||||||
payloadLength = byteBuffer.getInt();
|
|
||||||
streamPacketIndex = byteBuffer.getInt();
|
|
||||||
|
|
||||||
// Data offset without the RTP header
|
// Data offset without the RTP header
|
||||||
dataOffset = HEADER_SIZE;
|
dataOffset = HEADER_SIZE;
|
||||||
@ -53,18 +48,15 @@ public class VideoPacket {
|
|||||||
public void initializeWithLength(int length)
|
public void initializeWithLength(int length)
|
||||||
{
|
{
|
||||||
// Skip the RTP header
|
// Skip the RTP header
|
||||||
byteBuffer.position(RtpPacket.HEADER_SIZE);
|
byteBuffer.position(RtpPacket.MAX_HEADER_SIZE);
|
||||||
|
|
||||||
// Read the video header fields
|
// Read the video header fields
|
||||||
|
streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF;
|
||||||
frameIndex = byteBuffer.getInt();
|
frameIndex = byteBuffer.getInt();
|
||||||
packetIndex = byteBuffer.getInt();
|
flags = byteBuffer.getInt() & 0xFF;
|
||||||
totalPackets = byteBuffer.getInt();
|
|
||||||
flags = byteBuffer.getInt();
|
|
||||||
payloadLength = byteBuffer.getInt();
|
|
||||||
streamPacketIndex = byteBuffer.getInt();
|
|
||||||
|
|
||||||
// Data offset includes the RTP header
|
// Data offset includes the RTP header
|
||||||
dataOffset = RtpPacket.HEADER_SIZE + HEADER_SIZE;
|
dataOffset = RtpPacket.MAX_HEADER_SIZE + HEADER_SIZE;
|
||||||
|
|
||||||
// Update descriptor length
|
// Update descriptor length
|
||||||
buffer.length = length;
|
buffer.length = length;
|
||||||
@ -80,21 +72,6 @@ public class VideoPacket {
|
|||||||
return frameIndex;
|
return frameIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getPacketIndex()
|
|
||||||
{
|
|
||||||
return packetIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getPayloadLength()
|
|
||||||
{
|
|
||||||
return payloadLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getTotalPackets()
|
|
||||||
{
|
|
||||||
return totalPackets;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getStreamPacketIndex()
|
public int getStreamPacketIndex()
|
||||||
{
|
{
|
||||||
return streamPacketIndex;
|
return streamPacketIndex;
|
||||||
|
@ -194,7 +194,7 @@ public class VideoStream {
|
|||||||
int ringIndex = 0;
|
int ringIndex = 0;
|
||||||
|
|
||||||
// Preinitialize the ring buffer
|
// Preinitialize the ring buffer
|
||||||
int requiredBufferSize = streamConfig.getMaxPacketSize() + RtpPacket.HEADER_SIZE;
|
int requiredBufferSize = streamConfig.getMaxPacketSize() + RtpPacket.MAX_HEADER_SIZE;
|
||||||
for (int i = 0; i < VIDEO_RING_SIZE; i++) {
|
for (int i = 0; i < VIDEO_RING_SIZE; i++) {
|
||||||
ring[i] = new VideoPacket(new byte[requiredBufferSize]);
|
ring[i] = new VideoPacket(new byte[requiredBufferSize]);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user