Further optimize the JNI code for faster H264 decoding. Add an experimental RenderScript renderer.

This commit is contained in:
Cameron Gutman 2013-11-29 21:06:35 -06:00
parent f4cf83012e
commit b0bb8b685c
12 changed files with 347 additions and 141 deletions

View File

@ -8,14 +8,20 @@
#include <jni.h> #include <jni.h>
#include <android/native_window_jni.h> #include <android/native_window_jni.h>
// General decoder and renderer state
AVPacket pkt;
AVCodec* decoder; AVCodec* decoder;
AVCodecContext* decoder_ctx; AVCodecContext* decoder_ctx;
AVFrame* yuv_frame; AVFrame* yuv_frame;
AVFrame* rgb_frame;
AVFrame* dec_frame; AVFrame* dec_frame;
pthread_mutex_t mutex; pthread_mutex_t mutex;
// Color conversion and rendering
AVFrame* rgb_frame;
char* rgb_frame_buf; char* rgb_frame_buf;
struct SwsContext* scaler_ctx; struct SwsContext* scaler_ctx;
ANativeWindow* window;
#define RENDER_PIX_FMT AV_PIX_FMT_RGBA #define RENDER_PIX_FMT AV_PIX_FMT_RGBA
#define BYTES_PER_PIXEL 4 #define BYTES_PER_PIXEL 4
@ -32,6 +38,8 @@ struct SwsContext* scaler_ctx;
#define BILINEAR_FILTERING 0x10 #define BILINEAR_FILTERING 0x10
// Uses a faster bilinear filtering with lower image quality // Uses a faster bilinear filtering with lower image quality
#define FAST_BILINEAR_FILTERING 0x20 #define FAST_BILINEAR_FILTERING 0x20
// Disables color conversion (output is NV21)
#define NO_COLOR_CONVERSION 0x40
// This function must be called before // This function must be called before
// any other decoding functions // any other decoding functions
@ -45,6 +53,8 @@ int nv_avc_init(int width, int height, int perf_lvl, int thread_count) {
av_log_set_level(AV_LOG_QUIET); av_log_set_level(AV_LOG_QUIET);
avcodec_register_all(); avcodec_register_all();
av_init_packet(&pkt);
decoder = avcodec_find_decoder(AV_CODEC_ID_H264); decoder = avcodec_find_decoder(AV_CODEC_ID_H264);
if (decoder == NULL) { if (decoder == NULL) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC", __android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
@ -99,6 +109,7 @@ int nv_avc_init(int width, int height, int perf_lvl, int thread_count) {
return -1; return -1;
} }
if (!(perf_lvl & NO_COLOR_CONVERSION)) {
rgb_frame = av_frame_alloc(); rgb_frame = av_frame_alloc();
if (rgb_frame == NULL) { if (rgb_frame == NULL) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC", __android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
@ -147,6 +158,7 @@ int nv_avc_init(int width, int height, int perf_lvl, int thread_count) {
"Couldn't get scaler context"); "Couldn't get scaler context");
return -1; return -1;
} }
}
return 0; return 0;
} }
@ -179,14 +191,15 @@ void nv_avc_destroy(void) {
av_free(rgb_frame_buf); av_free(rgb_frame_buf);
rgb_frame_buf = NULL; rgb_frame_buf = NULL;
} }
if (window) {
ANativeWindow_release(window);
window = NULL;
}
pthread_mutex_destroy(&mutex); pthread_mutex_destroy(&mutex);
} }
void nv_avc_redraw(JNIEnv *env, jobject surface) { static AVFrame* dequeue_new_frame(void) {
ANativeWindow* window; AVFrame *our_yuv_frame = NULL;
ANativeWindow_Buffer buffer;
AVFrame *our_yuv_frame;
int err;
pthread_mutex_lock(&mutex); pthread_mutex_lock(&mutex);
@ -196,10 +209,22 @@ void nv_avc_redraw(JNIEnv *env, jobject surface) {
// responsible for freeing it when we're done // responsible for freeing it when we're done
our_yuv_frame = yuv_frame; our_yuv_frame = yuv_frame;
yuv_frame = NULL; yuv_frame = NULL;
}
// The remaining processing can be done without the mutex
pthread_mutex_unlock(&mutex); pthread_mutex_unlock(&mutex);
return our_yuv_frame;
}
static int update_rgb_frame(void) {
AVFrame *our_yuv_frame;
int err;
our_yuv_frame = dequeue_new_frame();
if (our_yuv_frame == NULL) {
return 0;
}
// Convert the YUV image to RGB // Convert the YUV image to RGB
err = sws_scale(scaler_ctx, err = sws_scale(scaler_ctx,
our_yuv_frame->data, our_yuv_frame->data,
@ -208,65 +233,125 @@ void nv_avc_redraw(JNIEnv *env, jobject surface) {
decoder_ctx->height, decoder_ctx->height,
rgb_frame->data, rgb_frame->data,
rgb_frame->linesize); rgb_frame->linesize);
av_frame_free(&our_yuv_frame);
if (err != decoder_ctx->height) { if (err != decoder_ctx->height) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC", __android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Scaling failed"); "Scaling failed");
goto free_frame_and_return; return 0;
} }
window = ANativeWindow_fromSurface(env, surface); return 1;
if (window == NULL) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Failed to get window from surface");
goto free_frame_and_return;
} }
// Lock down a render buffer static int render_rgb_to_buffer(char* buffer, int size) {
if (ANativeWindow_lock(window, &buffer, NULL) >= 0) { int err;
// Draw the frame to the buffer // Draw the frame to the buffer
err = avpicture_layout((AVPicture*)rgb_frame, err = avpicture_layout((AVPicture*)rgb_frame,
RENDER_PIX_FMT, RENDER_PIX_FMT,
decoder_ctx->width, decoder_ctx->width,
decoder_ctx->height, decoder_ctx->height,
buffer.bits, buffer,
decoder_ctx->width * size);
decoder_ctx->height *
BYTES_PER_PIXEL);
if (err < 0) { if (err < 0) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC", __android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Picture fill failed"); "Picture fill failed");
return 0;
}
return 1;
}
int nv_avc_get_raw_frame(char* buffer, int size) {
AVFrame *our_yuv_frame;
int err;
our_yuv_frame = dequeue_new_frame();
if (our_yuv_frame == NULL) {
return 0;
}
err = avpicture_layout((AVPicture*)our_yuv_frame,
decoder_ctx->pix_fmt,
decoder_ctx->width,
decoder_ctx->height,
buffer,
size);
av_frame_free(&our_yuv_frame);
return (err >= 0);
}
int nv_avc_get_rgb_frame(char* buffer, int size) {
return (update_rgb_frame() && render_rgb_to_buffer(buffer, size));
}
int nv_avc_set_render_target(JNIEnv *env, jobject surface) {
// Release the old window
if (window) {
ANativeWindow_release(window);
window = NULL;
}
// If no new surface was supplied, we're done
if (surface == NULL) {
return 1;
}
// Get a window from the surface
window = ANativeWindow_fromSurface(env, surface);
if (window == NULL) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Failed to get window from surface");
return 0;
}
return 1;
}
int nv_avc_redraw(void) {
ANativeWindow_Buffer buffer;
int ret = 0;
// Check if there's a new frame
if (update_rgb_frame()) {
// Lock down a render buffer
if (ANativeWindow_lock(window, &buffer, NULL) >= 0) {
// Draw the frame to the buffer
if (render_rgb_to_buffer(buffer.bits,
decoder_ctx->width *
decoder_ctx->height *
BYTES_PER_PIXEL)) {
// A new frame will be drawn
ret = 1;
} }
// Draw the frame to the surface // Draw the frame to the surface
ANativeWindow_unlockAndPost(window); ANativeWindow_unlockAndPost(window);
} }
ANativeWindow_release(window);
free_frame_and_return:
av_frame_free(&our_yuv_frame);
} }
else {
pthread_mutex_unlock(&mutex); return ret;
} }
int nv_avc_get_input_padding_size(void) {
return FF_INPUT_BUFFER_PADDING_SIZE;
} }
// packets must be decoded in order // packets must be decoded in order
// indata must be inlen + FF_INPUT_BUFFER_PADDING_SIZE in length
int nv_avc_decode(unsigned char* indata, int inlen) { int nv_avc_decode(unsigned char* indata, int inlen) {
int err; int err;
AVPacket pkt; int got_pic = 0;
int got_pic;
err = av_new_packet(&pkt, inlen); pkt.data = indata;
if (err < 0) { pkt.size = inlen;
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Failed to allocate packet");
return err;
}
memcpy(pkt.data, indata, inlen);
while (pkt.size > 0) { while (pkt.size > 0) {
got_pic = 0;
err = avcodec_decode_video2( err = avcodec_decode_video2(
decoder_ctx, decoder_ctx,
dec_frame, dec_frame,
@ -275,10 +360,15 @@ int nv_avc_decode(unsigned char* indata, int inlen) {
if (err < 0) { if (err < 0) {
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC", __android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
"Decode failed"); "Decode failed");
pthread_mutex_unlock(&mutex); got_pic = 0;
break; break;
} }
pkt.size -= err;
pkt.data += err;
}
// Only copy the picture at the end of decoding the packet
if (got_pic) { if (got_pic) {
pthread_mutex_lock(&mutex); pthread_mutex_lock(&mutex);
@ -293,11 +383,5 @@ int nv_avc_decode(unsigned char* indata, int inlen) {
pthread_mutex_unlock(&mutex); pthread_mutex_unlock(&mutex);
} }
pkt.size -= err;
pkt.data += err;
}
av_free_packet(&pkt);
return err < 0 ? err : 0; return err < 0 ? err : 0;
} }

View File

@ -2,5 +2,12 @@
int nv_avc_init(int width, int height, int perf_lvl, int thread_count); int nv_avc_init(int width, int height, int perf_lvl, int thread_count);
void nv_avc_destroy(void); void nv_avc_destroy(void);
void nv_avc_redraw(JNIEnv *env, jobject surface);
int nv_avc_get_raw_frame(char* buffer, int size);
int nv_avc_get_rgb_frame(char* buffer, int size);
int nv_avc_set_render_target(JNIEnv *env, jobject surface);
int nv_avc_redraw(void);
int nv_avc_get_input_padding_size(void);
int nv_avc_decode(unsigned char* indata, int inlen); int nv_avc_decode(unsigned char* indata, int inlen);

View File

@ -6,7 +6,7 @@
// This function must be called before // This function must be called before
// any other decoding functions // any other decoding functions
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_com_limelight_nvstream_av_video_AvcDecoder_init(JNIEnv *env, jobject this, jint width, Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_init(JNIEnv *env, jobject this, jint width,
jint height, jint perflvl, jint threadcount) jint height, jint perflvl, jint threadcount)
{ {
return nv_avc_init(width, height, perflvl, threadcount); return nv_avc_init(width, height, perflvl, threadcount);
@ -15,20 +15,69 @@ Java_com_limelight_nvstream_av_video_AvcDecoder_init(JNIEnv *env, jobject this,
// This function must be called after // This function must be called after
// decoding is finished // decoding is finished
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_com_limelight_nvstream_av_video_AvcDecoder_destroy(JNIEnv *env, jobject this) { Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_destroy(JNIEnv *env, jobject this) {
nv_avc_destroy(); nv_avc_destroy();
} }
// fills the output buffer with a raw YUV frame
JNIEXPORT jboolean JNICALL
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getRawFrame(
JNIEnv *env, jobject this, // JNI parameters
jbyteArray outdata, jint outlen) // Output data
{
jint ret;
jbyte* jni_output_data;
jni_output_data = (*env)->GetByteArrayElements(env, outdata, 0);
ret = nv_avc_get_raw_frame(jni_output_data, outlen);
(*env)->ReleaseByteArrayElements(env, outdata, jni_output_data, 0);
return ret != 0 ? JNI_TRUE : JNI_FALSE;
}
// fills the output buffer with an RGB frame
JNIEXPORT jboolean JNICALL
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getRgbFrame(
JNIEnv *env, jobject this, // JNI parameters
jbyteArray outdata, jint outlen) // Output data
{
jint ret;
jbyte* jni_output_data;
jni_output_data = (*env)->GetByteArrayElements(env, outdata, 0);
ret = nv_avc_get_rgb_frame(jni_output_data, outlen);
(*env)->ReleaseByteArrayElements(env, outdata, jni_output_data, 0);
return ret != 0 ? JNI_TRUE : JNI_FALSE;
}
// This function sets the rendering target for redraw
JNIEXPORT jboolean JNICALL
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_setRenderTarget(JNIEnv *env, jobject this, jobject surface) {
return nv_avc_set_render_target(env, surface) != 0 ? JNI_TRUE : JNI_FALSE;
}
// This function redraws the surface // This function redraws the surface
JNIEXPORT void JNICALL JNIEXPORT jboolean JNICALL
Java_com_limelight_nvstream_av_video_AvcDecoder_redraw(JNIEnv *env, jobject this, jobject surface) { Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_redraw(JNIEnv *env, jobject this) {
nv_avc_redraw(env, surface); return nv_avc_redraw() != 0 ? JNI_TRUE : JNI_FALSE;
}
// This function returns the required input buffer padding
JNIEXPORT jint JNICALL
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getInputPaddingSize(JNIEnv *env, jobject this) {
return nv_avc_get_input_padding_size();
} }
// packets must be decoded in order // packets must be decoded in order
// the input buffer must have proper padding
// returns 0 on success, < 0 on error // returns 0 on success, < 0 on error
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_com_limelight_nvstream_av_video_AvcDecoder_decode( Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_decode(
JNIEnv *env, jobject this, // JNI parameters JNIEnv *env, jobject this, // JNI parameters
jbyteArray indata, jint inoff, jint inlen) jbyteArray indata, jint inoff, jint inlen)
{ {

Binary file not shown.

Binary file not shown.

View File

@ -150,7 +150,7 @@ public class NvConnection {
private boolean startVideoStream() throws IOException private boolean startVideoStream() throws IOException
{ {
videoStream = new NvVideoStream(hostAddr, listener, controlStream); videoStream = new NvVideoStream(hostAddr, listener, controlStream);
videoStream.startVideoStream(video, drFlags); videoStream.startVideoStream(activity, video, drFlags);
return true; return true;
} }

View File

@ -17,10 +17,11 @@ import com.limelight.nvstream.av.AvRtpPacket;
import com.limelight.nvstream.av.ConnectionStatusListener; import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.video.AvVideoDepacketizer; import com.limelight.nvstream.av.video.AvVideoDepacketizer;
import com.limelight.nvstream.av.video.AvVideoPacket; import com.limelight.nvstream.av.video.AvVideoPacket;
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
import com.limelight.nvstream.av.video.DecoderRenderer; import com.limelight.nvstream.av.video.DecoderRenderer;
import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer; import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer;
import com.limelight.nvstream.av.video.cpu.CpuDecoderRenderer;
import android.content.Context;
import android.os.Build; import android.os.Build;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
@ -129,7 +130,7 @@ public class NvVideoStream {
rtp = new DatagramSocket(RTP_PORT); rtp = new DatagramSocket(RTP_PORT);
} }
public void setupDecoderRenderer(SurfaceHolder renderTarget, int drFlags) { public void setupDecoderRenderer(Context context, SurfaceHolder renderTarget, int drFlags) {
if (Build.HARDWARE.equals("goldfish")) { if (Build.HARDWARE.equals("goldfish")) {
// Emulator - don't render video (it's slow!) // Emulator - don't render video (it's slow!)
decrend = null; decrend = null;
@ -144,14 +145,14 @@ public class NvVideoStream {
} }
if (decrend != null) { if (decrend != null) {
decrend.setup(1280, 720, renderTarget, drFlags); decrend.setup(context, 1280, 720, renderTarget, drFlags);
} }
} }
public void startVideoStream(final SurfaceHolder surface, int drFlags) throws IOException public void startVideoStream(Context context, SurfaceHolder surface, int drFlags) throws IOException
{ {
// Setup the decoder and renderer // Setup the decoder and renderer
setupDecoderRenderer(surface, drFlags); setupDecoderRenderer(context, surface, drFlags);
// Open RTP sockets and start session // Open RTP sockets and start session
setupRtpSession(); setupRtpSession();

View File

@ -2,12 +2,13 @@ package com.limelight.nvstream.av.video;
import com.limelight.nvstream.av.AvDecodeUnit; import com.limelight.nvstream.av.AvDecodeUnit;
import android.content.Context;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
public interface DecoderRenderer { public interface DecoderRenderer {
public static int FLAG_PREFER_QUALITY = 0x1; public static int FLAG_PREFER_QUALITY = 0x1;
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags); public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags);
public void start(); public void start();

View File

@ -8,6 +8,7 @@ import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit; import com.limelight.nvstream.av.AvDecodeUnit;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
@ -73,7 +74,7 @@ public class MediaCodecDecoderRenderer implements DecoderRenderer {
} }
@Override @Override
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) { public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
videoDecoder = MediaCodec.createByCodecName(findSafeDecoder().getName()); videoDecoder = MediaCodec.createByCodecName(findSafeDecoder().getName());
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height); MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);

View File

@ -1,4 +1,4 @@
package com.limelight.nvstream.av.video; package com.limelight.nvstream.av.video.cpu;
import android.view.Surface; import android.view.Surface;
@ -27,9 +27,20 @@ public class AvcDecoder {
public static final int BILINEAR_FILTERING = 0x10; public static final int BILINEAR_FILTERING = 0x10;
/** Uses a faster bilinear filtering with lower image quality */ /** Uses a faster bilinear filtering with lower image quality */
public static final int FAST_BILINEAR_FILTERING = 0x20; public static final int FAST_BILINEAR_FILTERING = 0x20;
/** Disables color conversion (output is NV21) */
public static final int NO_COLOR_CONVERSION = 0x40;
public static native int init(int width, int height, int perflvl, int threadcount); public static native int init(int width, int height, int perflvl, int threadcount);
public static native void destroy(); public static native void destroy();
public static native void redraw(Surface surface);
// Rendering API when NO_COLOR_CONVERSION == 0
public static native boolean setRenderTarget(Surface surface);
public static native boolean getRgbFrame(byte[] rgbFrame, int bufferSize);
public static native boolean redraw();
// Rendering API when NO_COLOR_CONVERSION == 1
public static native boolean getRawFrame(byte[] yuvFrame, int bufferSize);
public static native int getInputPaddingSize();
public static native int decode(byte[] indata, int inoff, int inlen); public static native int decode(byte[] indata, int inoff, int inlen);
} }

View File

@ -1,4 +1,4 @@
package com.limelight.nvstream.av.video; package com.limelight.nvstream.av.video.cpu;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -6,19 +6,24 @@ import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import android.view.Surface; import android.content.Context;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import com.limelight.nvstream.av.AvByteBufferDescriptor; import com.limelight.nvstream.av.AvByteBufferDescriptor;
import com.limelight.nvstream.av.AvDecodeUnit; import com.limelight.nvstream.av.AvDecodeUnit;
import com.limelight.nvstream.av.video.DecoderRenderer;
public class CpuDecoderRenderer implements DecoderRenderer { public class CpuDecoderRenderer implements DecoderRenderer {
private Surface renderTarget;
private ByteBuffer decoderBuffer;
private Thread rendererThread; private Thread rendererThread;
private int targetFps; private int targetFps;
private static final int DECODER_BUFFER_SIZE = 92*1024;
private ByteBuffer decoderBuffer;
private RsRenderer rsRenderer;
private byte[] frameBuffer;
// Only sleep if the difference is above this value // Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 8; private static final int WAIT_CEILING_MS = 8;
@ -76,8 +81,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
} }
@Override @Override
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) { public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
this.renderTarget = renderTarget.getSurface();
this.targetFps = 30; this.targetFps = 30;
int perfLevel = findOptimalPerformanceLevel(); int perfLevel = findOptimalPerformanceLevel();
@ -111,6 +115,12 @@ public class CpuDecoderRenderer implements DecoderRenderer {
break; break;
} }
// Create and initialize the RenderScript intrinsic we'll be using
rsRenderer = new RsRenderer(context, width, height, renderTarget.getSurface());
// Allocate the frame buffer that the RGBA frame will be copied into
frameBuffer = new byte[width*height*4];
// If the user wants quality, we'll remove the low IQ flags // If the user wants quality, we'll remove the low IQ flags
if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) { if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
// Make sure the loop filter is enabled // Make sure the loop filter is enabled
@ -127,7 +137,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
throw new IllegalStateException("AVC decoder initialization failure: "+err); throw new IllegalStateException("AVC decoder initialization failure: "+err);
} }
decoderBuffer = ByteBuffer.allocate(92*1024); decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
System.out.println("Using software decoding (performance level: "+perfLevel+")"); System.out.println("Using software decoding (performance level: "+perfLevel+")");
} }
@ -152,7 +162,9 @@ public class CpuDecoderRenderer implements DecoderRenderer {
} }
nextFrameTime = computePresentationTimeMs(targetFps); nextFrameTime = computePresentationTimeMs(targetFps);
AvcDecoder.redraw(renderTarget); if (AvcDecoder.getRgbFrame(frameBuffer, frameBuffer.length)) {
rsRenderer.render(frameBuffer);
}
} }
} }
}; };
@ -175,6 +187,10 @@ public class CpuDecoderRenderer implements DecoderRenderer {
@Override @Override
public void release() { public void release() {
if (rsRenderer != null) {
rsRenderer.release();
}
AvcDecoder.destroy(); AvcDecoder.destroy();
} }
@ -183,7 +199,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
byte[] data; byte[] data;
// Use the reserved decoder buffer if this decode unit will fit // Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= decoderBuffer.limit()) { if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
decoderBuffer.clear(); decoderBuffer.clear();
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) { for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
@ -193,7 +209,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
data = decoderBuffer.array(); data = decoderBuffer.array();
} }
else { else {
data = new byte[decodeUnit.getDataLength()]; data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
int offset = 0; int offset = 0;
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) { for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {

View File

@ -0,0 +1,36 @@
package com.limelight.nvstream.av.video.cpu;
import android.content.Context;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.Type;
import android.view.Surface;
public class RsRenderer {
private RenderScript rs;
private Allocation renderBuffer;
public RsRenderer(Context context, int width, int height, Surface renderTarget) {
rs = RenderScript.create(context);
Type.Builder tb = new Type.Builder(rs, Element.RGBA_8888(rs));
tb.setX(width);
tb.setY(height);
Type bufferType = tb.create();
renderBuffer = Allocation.createTyped(rs, bufferType, Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT);
renderBuffer.setSurface(renderTarget);
}
public void release() {
renderBuffer.setSurface(null);
renderBuffer.destroy();
rs.destroy();
}
public void render(byte[] rgbData) {
renderBuffer.copyFrom(rgbData);
renderBuffer.ioSend();
}
}