mirror of
https://github.com/moonlight-stream/moonlight-android.git
synced 2026-04-06 07:56:07 +00:00
Fix ffmpeg library loading. Create a DecoderRenderer interface and use it to move the MediaCodec code into for Qualcomm devices.
This commit is contained in:
@@ -6,7 +6,6 @@ import java.net.DatagramPacket;
|
||||
import java.net.DatagramSocket;
|
||||
import java.net.Socket;
|
||||
import java.net.SocketException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.LinkedList;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
|
||||
@@ -15,13 +14,13 @@ import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
import com.limelight.nvstream.av.AvRtpPacket;
|
||||
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
|
||||
import com.limelight.nvstream.av.video.AvVideoPacket;
|
||||
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
|
||||
import com.limelight.nvstream.av.video.DecoderRenderer;
|
||||
import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer;
|
||||
|
||||
import jlibrtp.Participant;
|
||||
import jlibrtp.RTPSession;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodec.BufferInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.view.Surface;
|
||||
|
||||
public class NvVideoStream {
|
||||
@@ -29,20 +28,18 @@ public class NvVideoStream {
|
||||
public static final int RTCP_PORT = 47999;
|
||||
public static final int FIRST_FRAME_PORT = 47996;
|
||||
|
||||
private ByteBuffer[] videoDecoderInputBuffers;
|
||||
private MediaCodec videoDecoder;
|
||||
|
||||
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>();
|
||||
|
||||
private RTPSession session;
|
||||
private DatagramSocket rtp, rtcp;
|
||||
private Socket firstFrameSocket;
|
||||
|
||||
|
||||
private LinkedList<Thread> threads = new LinkedList<Thread>();
|
||||
|
||||
private AvVideoDepacketizer depacketizer = new AvVideoDepacketizer();
|
||||
|
||||
private DecoderRenderer decrend;
|
||||
|
||||
private boolean aborting = false;
|
||||
|
||||
public void abort()
|
||||
@@ -81,8 +78,8 @@ public class NvVideoStream {
|
||||
if (session != null) {
|
||||
//session.endSession();
|
||||
}
|
||||
if (videoDecoder != null) {
|
||||
videoDecoder.release();
|
||||
if (decrend != null) {
|
||||
decrend.release();
|
||||
}
|
||||
|
||||
threads.clear();
|
||||
@@ -135,18 +132,23 @@ public class NvVideoStream {
|
||||
session.addParticipant(new Participant(host, RTP_PORT, RTCP_PORT));
|
||||
}
|
||||
|
||||
public void setupDecoders(Surface surface)
|
||||
{
|
||||
videoDecoder = MediaCodec.createDecoderByType("video/avc");
|
||||
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
|
||||
|
||||
videoDecoder.configure(videoFormat, surface, null, 0);
|
||||
|
||||
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
|
||||
public void setupDecoderRenderer(Surface renderTarget) {
|
||||
boolean requiresCpuDecoding = true;
|
||||
|
||||
videoDecoder.start();
|
||||
|
||||
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
|
||||
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
|
||||
if (MediaCodecDecoderRenderer.hasWhitelistedDecoder()) {
|
||||
requiresCpuDecoding = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (requiresCpuDecoding) {
|
||||
decrend = new CpuDecoderRenderer();
|
||||
}
|
||||
else {
|
||||
decrend = new MediaCodecDecoderRenderer();
|
||||
}
|
||||
|
||||
decrend.setup(1280, 720, renderTarget);
|
||||
}
|
||||
|
||||
public void startVideoStream(final String host, final Surface surface)
|
||||
@@ -155,8 +157,8 @@ public class NvVideoStream {
|
||||
Thread t = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
// Setup the decoder context
|
||||
setupDecoders(surface);
|
||||
// Setup the decoder and renderer
|
||||
setupDecoderRenderer(surface);
|
||||
|
||||
// Open RTP sockets and start session
|
||||
try {
|
||||
@@ -189,8 +191,7 @@ public class NvVideoStream {
|
||||
// Start decoding the data we're receiving
|
||||
startDecoderThread();
|
||||
|
||||
// Render the frames that are coming out of the decoder
|
||||
outputDisplayLoop(this);
|
||||
decrend.start();
|
||||
}
|
||||
};
|
||||
threads.add(t);
|
||||
@@ -199,7 +200,6 @@ public class NvVideoStream {
|
||||
|
||||
private void startDecoderThread()
|
||||
{
|
||||
// Decoder thread
|
||||
Thread t = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
@@ -207,6 +207,7 @@ public class NvVideoStream {
|
||||
while (!isInterrupted())
|
||||
{
|
||||
AvDecodeUnit du;
|
||||
|
||||
try {
|
||||
du = depacketizer.getNextDecodeUnit();
|
||||
} catch (InterruptedException e) {
|
||||
@@ -214,46 +215,9 @@ public class NvVideoStream {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (du.getType())
|
||||
{
|
||||
case AvDecodeUnit.TYPE_H264:
|
||||
{
|
||||
// Wait for an input buffer or thread termination
|
||||
while (!isInterrupted())
|
||||
{
|
||||
int inputIndex = videoDecoder.dequeueInputBuffer(100);
|
||||
if (inputIndex >= 0)
|
||||
{
|
||||
ByteBuffer buf = videoDecoderInputBuffers[inputIndex];
|
||||
|
||||
// Clear old input data
|
||||
buf.clear();
|
||||
|
||||
// Copy data from our buffer list into the input buffer
|
||||
for (AvByteBufferDescriptor desc : du.getBufferList())
|
||||
{
|
||||
buf.put(desc.data, desc.offset, desc.length);
|
||||
}
|
||||
|
||||
depacketizer.releaseDecodeUnit(du);
|
||||
|
||||
videoDecoder.queueInputBuffer(inputIndex,
|
||||
0, du.getDataLength(),
|
||||
0, du.getFlags());
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
decrend.submitDecodeUnit(du);
|
||||
|
||||
default:
|
||||
{
|
||||
System.err.println("Unknown decode unit type");
|
||||
abort();
|
||||
return;
|
||||
}
|
||||
}
|
||||
depacketizer.releaseDecodeUnit(du);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -351,43 +315,4 @@ public class NvVideoStream {
|
||||
threads.add(t);
|
||||
t.start();
|
||||
}
|
||||
|
||||
private void outputDisplayLoop(Thread t)
|
||||
{
|
||||
long nextFrameTimeUs = 0;
|
||||
while (!t.isInterrupted())
|
||||
{
|
||||
BufferInfo info = new BufferInfo();
|
||||
int outIndex = videoDecoder.dequeueOutputBuffer(info, 100);
|
||||
switch (outIndex) {
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
System.out.println("Output buffers changed");
|
||||
break;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
System.out.println("Output format changed");
|
||||
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (outIndex >= 0) {
|
||||
boolean render = false;
|
||||
|
||||
if (currentTimeUs() >= nextFrameTimeUs) {
|
||||
render = true;
|
||||
nextFrameTimeUs = computePresentationTime(60);
|
||||
}
|
||||
|
||||
videoDecoder.releaseOutputBuffer(outIndex, render);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static long currentTimeUs() {
|
||||
return System.nanoTime() / 1000;
|
||||
}
|
||||
|
||||
private long computePresentationTime(int frameRate) {
|
||||
return currentTimeUs() + (1000000 / frameRate);
|
||||
}
|
||||
}
|
||||
|
||||
51
src/com/limelight/nvstream/av/video/CpuDecoderRenderer.java
Normal file
51
src/com/limelight/nvstream/av/video/CpuDecoderRenderer.java
Normal file
@@ -0,0 +1,51 @@
|
||||
package com.limelight.nvstream.av.video;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import android.view.Surface;
|
||||
|
||||
import com.limelight.nvstream.av.AvByteBufferDescriptor;
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
|
||||
public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
|
||||
private Surface renderTarget;
|
||||
private ByteBuffer decoderBuffer;
|
||||
private Thread rendererThread;
|
||||
|
||||
@Override
|
||||
public void setup(int width, int height, Surface renderTarget) {
|
||||
this.renderTarget = renderTarget;
|
||||
|
||||
int err = AvcDecoder.init(width, height);
|
||||
if (err != 0) {
|
||||
//throw new IllegalStateException("AVC decoder initialization failure: "+err);
|
||||
}
|
||||
|
||||
decoderBuffer = ByteBuffer.allocate(128*1024);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
AvcDecoder.destroy();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void submitDecodeUnit(AvDecodeUnit decodeUnit) {
|
||||
decoderBuffer.clear();
|
||||
|
||||
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
|
||||
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
|
||||
}
|
||||
|
||||
//AvcDecoder.decode(decoderBuffer.array(), 0, decodeUnit.getDataLength());
|
||||
}
|
||||
}
|
||||
17
src/com/limelight/nvstream/av/video/DecoderRenderer.java
Normal file
17
src/com/limelight/nvstream/av/video/DecoderRenderer.java
Normal file
@@ -0,0 +1,17 @@
|
||||
package com.limelight.nvstream.av.video;
|
||||
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
|
||||
import android.view.Surface;
|
||||
|
||||
public interface DecoderRenderer {
|
||||
public void setup(int width, int height, Surface renderTarget);
|
||||
|
||||
public void start();
|
||||
|
||||
public void stop();
|
||||
|
||||
public void release();
|
||||
|
||||
public void submitDecodeUnit(AvDecodeUnit decodeUnit);
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
package com.limelight.nvstream.av.video;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import com.limelight.nvstream.av.AvByteBufferDescriptor;
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaCodec.BufferInfo;
|
||||
import android.os.Build;
|
||||
import android.view.Surface;
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
|
||||
public class MediaCodecDecoderRenderer implements DecoderRenderer {
|
||||
|
||||
private ByteBuffer[] videoDecoderInputBuffers;
|
||||
private MediaCodec videoDecoder;
|
||||
private Thread rendererThread;
|
||||
|
||||
public static boolean hasWhitelistedDecoder() {
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
|
||||
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
|
||||
|
||||
// Skip encoders
|
||||
if (codecInfo.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (codecInfo.getName().equalsIgnoreCase("omx.qcom.video.decoder.avc")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup(int width, int height, Surface renderTarget) {
|
||||
videoDecoder = MediaCodec.createDecoderByType("video/avc");
|
||||
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
|
||||
|
||||
videoDecoder.configure(videoFormat, renderTarget, null, 0);
|
||||
|
||||
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
|
||||
|
||||
videoDecoder.start();
|
||||
|
||||
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
|
||||
}
|
||||
|
||||
private void startRendererThread()
|
||||
{
|
||||
rendererThread = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
long nextFrameTimeUs = 0;
|
||||
while (!isInterrupted())
|
||||
{
|
||||
BufferInfo info = new BufferInfo();
|
||||
int outIndex = videoDecoder.dequeueOutputBuffer(info, 100);
|
||||
switch (outIndex) {
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
System.out.println("Output buffers changed");
|
||||
break;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
System.out.println("Output format changed");
|
||||
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (outIndex >= 0) {
|
||||
boolean render = false;
|
||||
|
||||
if (currentTimeUs() >= nextFrameTimeUs) {
|
||||
render = true;
|
||||
nextFrameTimeUs = computePresentationTime(60);
|
||||
}
|
||||
|
||||
videoDecoder.releaseOutputBuffer(outIndex, render);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
rendererThread.start();
|
||||
}
|
||||
|
||||
private static long currentTimeUs() {
|
||||
return System.nanoTime() / 1000;
|
||||
}
|
||||
|
||||
private long computePresentationTime(int frameRate) {
|
||||
return currentTimeUs() + (1000000 / frameRate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
startRendererThread();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
rendererThread.interrupt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (videoDecoder != null) {
|
||||
videoDecoder.release();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void submitDecodeUnit(AvDecodeUnit decodeUnit) {
|
||||
if (decodeUnit.getType() != AvDecodeUnit.TYPE_H264) {
|
||||
System.err.println("Unknown decode unit type");
|
||||
return;
|
||||
}
|
||||
|
||||
int inputIndex = videoDecoder.dequeueInputBuffer(-1);
|
||||
if (inputIndex >= 0)
|
||||
{
|
||||
ByteBuffer buf = videoDecoderInputBuffers[inputIndex];
|
||||
|
||||
// Clear old input data
|
||||
buf.clear();
|
||||
|
||||
// Copy data from our buffer list into the input buffer
|
||||
for (AvByteBufferDescriptor desc : decodeUnit.getBufferList())
|
||||
{
|
||||
buf.put(desc.data, desc.offset, desc.length);
|
||||
}
|
||||
|
||||
videoDecoder.queueInputBuffer(inputIndex,
|
||||
0, decodeUnit.getDataLength(),
|
||||
0, decodeUnit.getFlags());
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user