Fix ffmpeg library loading. Create a DecoderRenderer interface and use it to move the MediaCodec code into for Qualcomm devices.

This commit is contained in:
Cameron Gutman 2013-11-19 02:49:33 -05:00
parent 2c2e713166
commit 0504bed5e9
49 changed files with 246 additions and 114 deletions

View File

@ -46,8 +46,6 @@ or to a theme attribute in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>na
}
public static final class string {
public static final int app_name=0x7f060000;
public static final int dummy_button=0x7f060002;
public static final int dummy_content=0x7f060003;
public static final int title_activity_game=0x7f060001;
}
public static final class style {

View File

@ -2,36 +2,36 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavcodec.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavcodec-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavformat.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavformat-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswscale.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswscale-2.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavutil.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavutil-52.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavfilter.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavfilter-3.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libwsresample
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswresample.so
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswresample-0.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -3,7 +3,5 @@
<string name="app_name">Limelight</string>
<string name="title_activity_game">Game</string>
<string name="dummy_button">Dummy Button</string>
<string name="dummy_content">DUMMY\nCONTENT</string>
</resources>

View File

@ -6,7 +6,6 @@ import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.Socket;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.concurrent.LinkedBlockingQueue;
@ -15,13 +14,13 @@ import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
import com.limelight.nvstream.av.video.AvVideoPacket;
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
import com.limelight.nvstream.av.video.DecoderRenderer;
import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer;
import jlibrtp.Participant;
import jlibrtp.RTPSession;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.view.Surface;
public class NvVideoStream {
@ -29,20 +28,18 @@ public class NvVideoStream {
public static final int RTCP_PORT = 47999;
public static final int FIRST_FRAME_PORT = 47996;
private ByteBuffer[] videoDecoderInputBuffers;
private MediaCodec videoDecoder;
private LinkedBlockingQueue<AvRtpPacket> packets = new LinkedBlockingQueue<AvRtpPacket>();
private RTPSession session;
private DatagramSocket rtp, rtcp;
private Socket firstFrameSocket;
private LinkedList<Thread> threads = new LinkedList<Thread>();
private AvVideoDepacketizer depacketizer = new AvVideoDepacketizer();
private DecoderRenderer decrend;
private boolean aborting = false;
public void abort()
@ -81,8 +78,8 @@ public class NvVideoStream {
if (session != null) {
//session.endSession();
}
if (videoDecoder != null) {
videoDecoder.release();
if (decrend != null) {
decrend.release();
}
threads.clear();
@ -135,18 +132,23 @@ public class NvVideoStream {
session.addParticipant(new Participant(host, RTP_PORT, RTCP_PORT));
}
public void setupDecoders(Surface surface)
{
videoDecoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
public void setupDecoderRenderer(Surface renderTarget) {
boolean requiresCpuDecoding = true;
videoDecoder.configure(videoFormat, surface, null, 0);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
if (MediaCodecDecoderRenderer.hasWhitelistedDecoder()) {
requiresCpuDecoding = false;
}
}
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
if (requiresCpuDecoding) {
decrend = new CpuDecoderRenderer();
}
else {
decrend = new MediaCodecDecoderRenderer();
}
videoDecoder.start();
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
decrend.setup(1280, 720, renderTarget);
}
public void startVideoStream(final String host, final Surface surface)
@ -155,8 +157,8 @@ public class NvVideoStream {
Thread t = new Thread() {
@Override
public void run() {
// Setup the decoder context
setupDecoders(surface);
// Setup the decoder and renderer
setupDecoderRenderer(surface);
// Open RTP sockets and start session
try {
@ -189,8 +191,7 @@ public class NvVideoStream {
// Start decoding the data we're receiving
startDecoderThread();
// Render the frames that are coming out of the decoder
outputDisplayLoop(this);
decrend.start();
}
};
threads.add(t);
@ -199,7 +200,6 @@ public class NvVideoStream {
private void startDecoderThread()
{
// Decoder thread
Thread t = new Thread() {
@Override
public void run() {
@ -207,6 +207,7 @@ public class NvVideoStream {
while (!isInterrupted())
{
AvDecodeUnit du;
try {
du = depacketizer.getNextDecodeUnit();
} catch (InterruptedException e) {
@ -214,46 +215,9 @@ public class NvVideoStream {
return;
}
switch (du.getType())
{
case AvDecodeUnit.TYPE_H264:
{
// Wait for an input buffer or thread termination
while (!isInterrupted())
{
int inputIndex = videoDecoder.dequeueInputBuffer(100);
if (inputIndex >= 0)
{
ByteBuffer buf = videoDecoderInputBuffers[inputIndex];
decrend.submitDecodeUnit(du);
// Clear old input data
buf.clear();
// Copy data from our buffer list into the input buffer
for (AvByteBufferDescriptor desc : du.getBufferList())
{
buf.put(desc.data, desc.offset, desc.length);
}
depacketizer.releaseDecodeUnit(du);
videoDecoder.queueInputBuffer(inputIndex,
0, du.getDataLength(),
0, du.getFlags());
break;
}
}
}
break;
default:
{
System.err.println("Unknown decode unit type");
abort();
return;
}
}
depacketizer.releaseDecodeUnit(du);
}
}
};
@ -351,43 +315,4 @@ public class NvVideoStream {
threads.add(t);
t.start();
}
private void outputDisplayLoop(Thread t)
{
long nextFrameTimeUs = 0;
while (!t.isInterrupted())
{
BufferInfo info = new BufferInfo();
int outIndex = videoDecoder.dequeueOutputBuffer(info, 100);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
System.out.println("Output buffers changed");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
System.out.println("Output format changed");
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
break;
default:
break;
}
if (outIndex >= 0) {
boolean render = false;
if (currentTimeUs() >= nextFrameTimeUs) {
render = true;
nextFrameTimeUs = computePresentationTime(60);
}
videoDecoder.releaseOutputBuffer(outIndex, render);
}
}
}
private static long currentTimeUs() {
return System.nanoTime() / 1000;
}
private long computePresentationTime(int frameRate) {
return currentTimeUs() + (1000000 / frameRate);
}
}

View File

@ -0,0 +1,51 @@
package com.limelight.nvstream.av.video;
import java.nio.ByteBuffer;
import android.view.Surface;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
public class CpuDecoderRenderer implements DecoderRenderer {
private Surface renderTarget;
private ByteBuffer decoderBuffer;
private Thread rendererThread;
@Override
public void setup(int width, int height, Surface renderTarget) {
this.renderTarget = renderTarget;
int err = AvcDecoder.init(width, height);
if (err != 0) {
//throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
decoderBuffer = ByteBuffer.allocate(128*1024);
}
@Override
public void start() {
}
@Override
public void stop() {
}
@Override
public void release() {
AvcDecoder.destroy();
}
@Override
public void submitDecodeUnit(AvDecodeUnit decodeUnit) {
decoderBuffer.clear();
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
}
//AvcDecoder.decode(decoderBuffer.array(), 0, decodeUnit.getDataLength());
}
}

View File

@ -0,0 +1,17 @@
package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.AvDecodeUnit;
import android.view.Surface;
public interface DecoderRenderer {
public void setup(int width, int height, Surface renderTarget);
public void start();
public void stop();
public void release();
public void submitDecodeUnit(AvDecodeUnit decodeUnit);
}

View File

@ -0,0 +1,143 @@
package com.limelight.nvstream.av.video;
import java.nio.ByteBuffer;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaCodec.BufferInfo;
import android.os.Build;
import android.view.Surface;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public class MediaCodecDecoderRenderer implements DecoderRenderer {
private ByteBuffer[] videoDecoderInputBuffers;
private MediaCodec videoDecoder;
private Thread rendererThread;
public static boolean hasWhitelistedDecoder() {
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
// Skip encoders
if (codecInfo.isEncoder()) {
continue;
}
if (codecInfo.getName().equalsIgnoreCase("omx.qcom.video.decoder.avc")) {
return true;
}
}
return false;
}
@Override
public void setup(int width, int height, Surface renderTarget) {
videoDecoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
videoDecoder.configure(videoFormat, renderTarget, null, 0);
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
videoDecoder.start();
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
}
private void startRendererThread()
{
rendererThread = new Thread() {
@Override
public void run() {
long nextFrameTimeUs = 0;
while (!isInterrupted())
{
BufferInfo info = new BufferInfo();
int outIndex = videoDecoder.dequeueOutputBuffer(info, 100);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
System.out.println("Output buffers changed");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
System.out.println("Output format changed");
System.out.println("New output Format: " + videoDecoder.getOutputFormat());
break;
default:
break;
}
if (outIndex >= 0) {
boolean render = false;
if (currentTimeUs() >= nextFrameTimeUs) {
render = true;
nextFrameTimeUs = computePresentationTime(60);
}
videoDecoder.releaseOutputBuffer(outIndex, render);
}
}
}
};
rendererThread.start();
}
private static long currentTimeUs() {
return System.nanoTime() / 1000;
}
private long computePresentationTime(int frameRate) {
return currentTimeUs() + (1000000 / frameRate);
}
@Override
public void start() {
startRendererThread();
}
@Override
public void stop() {
rendererThread.interrupt();
}
@Override
public void release() {
if (videoDecoder != null) {
videoDecoder.release();
}
}
@Override
public void submitDecodeUnit(AvDecodeUnit decodeUnit) {
if (decodeUnit.getType() != AvDecodeUnit.TYPE_H264) {
System.err.println("Unknown decode unit type");
return;
}
int inputIndex = videoDecoder.dequeueInputBuffer(-1);
if (inputIndex >= 0)
{
ByteBuffer buf = videoDecoderInputBuffers[inputIndex];
// Clear old input data
buf.clear();
// Copy data from our buffer list into the input buffer
for (AvByteBufferDescriptor desc : decodeUnit.getBufferList())
{
buf.put(desc.data, desc.offset, desc.length);
}
videoDecoder.queueInputBuffer(inputIndex,
0, decodeUnit.getDataLength(),
0, decodeUnit.getFlags());
}
}
}