Further optimize the JNI code for faster H264 decoding. Add an experimental RenderScript renderer.

This commit is contained in:
Cameron Gutman
2013-11-29 21:06:35 -06:00
parent f4cf83012e
commit b0bb8b685c
12 changed files with 347 additions and 141 deletions

View File

@@ -150,7 +150,7 @@ public class NvConnection {
private boolean startVideoStream() throws IOException
{
videoStream = new NvVideoStream(hostAddr, listener, controlStream);
videoStream.startVideoStream(video, drFlags);
videoStream.startVideoStream(activity, video, drFlags);
return true;
}

View File

@@ -17,10 +17,11 @@ import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
import com.limelight.nvstream.av.video.AvVideoPacket;
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
import com.limelight.nvstream.av.video.DecoderRenderer;
import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer;
import com.limelight.nvstream.av.video.cpu.CpuDecoderRenderer;
import android.content.Context;
import android.os.Build;
import android.view.SurfaceHolder;
@@ -129,7 +130,7 @@ public class NvVideoStream {
rtp = new DatagramSocket(RTP_PORT);
}
public void setupDecoderRenderer(SurfaceHolder renderTarget, int drFlags) {
public void setupDecoderRenderer(Context context, SurfaceHolder renderTarget, int drFlags) {
if (Build.HARDWARE.equals("goldfish")) {
// Emulator - don't render video (it's slow!)
decrend = null;
@@ -144,14 +145,14 @@ public class NvVideoStream {
}
if (decrend != null) {
decrend.setup(1280, 720, renderTarget, drFlags);
decrend.setup(context, 1280, 720, renderTarget, drFlags);
}
}
public void startVideoStream(final SurfaceHolder surface, int drFlags) throws IOException
public void startVideoStream(Context context, SurfaceHolder surface, int drFlags) throws IOException
{
// Setup the decoder and renderer
setupDecoderRenderer(surface, drFlags);
setupDecoderRenderer(context, surface, drFlags);
// Open RTP sockets and start session
setupRtpSession();

View File

@@ -2,12 +2,13 @@ package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.AvDecodeUnit;
import android.content.Context;
import android.view.SurfaceHolder;
public interface DecoderRenderer {
public static int FLAG_PREFER_QUALITY = 0x1;
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags);
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags);
public void start();

View File

@@ -8,6 +8,7 @@ import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
@@ -73,7 +74,7 @@ public class MediaCodecDecoderRenderer implements DecoderRenderer {
}
@Override
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
videoDecoder = MediaCodec.createByCodecName(findSafeDecoder().getName());
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);

View File

@@ -1,4 +1,4 @@
package com.limelight.nvstream.av.video;
package com.limelight.nvstream.av.video.cpu;
import android.view.Surface;
@@ -27,9 +27,20 @@ public class AvcDecoder {
public static final int BILINEAR_FILTERING = 0x10;
/** Uses a faster bilinear filtering with lower image quality */
public static final int FAST_BILINEAR_FILTERING = 0x20;
/** Disables color conversion (output is NV21) */
public static final int NO_COLOR_CONVERSION = 0x40;
public static native int init(int width, int height, int perflvl, int threadcount);
public static native void destroy();
public static native void redraw(Surface surface);
// Rendering API when NO_COLOR_CONVERSION == 0
public static native boolean setRenderTarget(Surface surface);
public static native boolean getRgbFrame(byte[] rgbFrame, int bufferSize);
public static native boolean redraw();
// Rendering API when NO_COLOR_CONVERSION == 1
public static native boolean getRawFrame(byte[] yuvFrame, int bufferSize);
public static native int getInputPaddingSize();
public static native int decode(byte[] indata, int inoff, int inlen);
}

View File

@@ -1,4 +1,4 @@
package com.limelight.nvstream.av.video;
package com.limelight.nvstream.av.video.cpu;
import java.io.BufferedReader;
import java.io.File;
@@ -6,19 +6,24 @@ import java.io.FileReader;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.view.Surface;
import android.content.Context;
import android.view.SurfaceHolder;
import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.video.DecoderRenderer;
public class CpuDecoderRenderer implements DecoderRenderer {
private Surface renderTarget;
private ByteBuffer decoderBuffer;
private Thread rendererThread;
private int targetFps;
private static final int DECODER_BUFFER_SIZE = 92*1024;
private ByteBuffer decoderBuffer;
private RsRenderer rsRenderer;
private byte[] frameBuffer;
// Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 8;
@@ -76,8 +81,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
}
@Override
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
this.renderTarget = renderTarget.getSurface();
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
this.targetFps = 30;
int perfLevel = findOptimalPerformanceLevel();
@@ -111,6 +115,12 @@ public class CpuDecoderRenderer implements DecoderRenderer {
break;
}
// Create and initialize the RenderScript intrinsic we'll be using
rsRenderer = new RsRenderer(context, width, height, renderTarget.getSurface());
// Allocate the frame buffer that the RGBA frame will be copied into
frameBuffer = new byte[width*height*4];
// If the user wants quality, we'll remove the low IQ flags
if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
// Make sure the loop filter is enabled
@@ -127,7 +137,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
decoderBuffer = ByteBuffer.allocate(92*1024);
decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
System.out.println("Using software decoding (performance level: "+perfLevel+")");
}
@@ -152,7 +162,9 @@ public class CpuDecoderRenderer implements DecoderRenderer {
}
nextFrameTime = computePresentationTimeMs(targetFps);
AvcDecoder.redraw(renderTarget);
if (AvcDecoder.getRgbFrame(frameBuffer, frameBuffer.length)) {
rsRenderer.render(frameBuffer);
}
}
}
};
@@ -175,6 +187,10 @@ public class CpuDecoderRenderer implements DecoderRenderer {
@Override
public void release() {
if (rsRenderer != null) {
rsRenderer.release();
}
AvcDecoder.destroy();
}
@@ -183,7 +199,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
byte[] data;
// Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= decoderBuffer.limit()) {
if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
decoderBuffer.clear();
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
@@ -193,7 +209,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
data = decoderBuffer.array();
}
else {
data = new byte[decodeUnit.getDataLength()];
data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
int offset = 0;
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {

View File

@@ -0,0 +1,36 @@
package com.limelight.nvstream.av.video.cpu;
import android.content.Context;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.Type;
import android.view.Surface;
public class RsRenderer {
private RenderScript rs;
private Allocation renderBuffer;
public RsRenderer(Context context, int width, int height, Surface renderTarget) {
rs = RenderScript.create(context);
Type.Builder tb = new Type.Builder(rs, Element.RGBA_8888(rs));
tb.setX(width);
tb.setY(height);
Type bufferType = tb.create();
renderBuffer = Allocation.createTyped(rs, bufferType, Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT);
renderBuffer.setSurface(renderTarget);
}
public void release() {
renderBuffer.setSurface(null);
renderBuffer.destroy();
rs.destroy();
}
public void render(byte[] rgbData) {
renderBuffer.copyFrom(rgbData);
renderBuffer.ioSend();
}
}