mirror of
https://github.com/moonlight-stream/moonlight-android.git
synced 2025-07-20 03:23:07 +00:00
Further optimize the JNI code for faster H264 decoding. Add an experimental RenderScript renderer.
This commit is contained in:
parent
f4cf83012e
commit
b0bb8b685c
@ -8,14 +8,20 @@
|
||||
#include <jni.h>
|
||||
#include <android/native_window_jni.h>
|
||||
|
||||
// General decoder and renderer state
|
||||
AVPacket pkt;
|
||||
AVCodec* decoder;
|
||||
AVCodecContext* decoder_ctx;
|
||||
AVFrame* yuv_frame;
|
||||
AVFrame* rgb_frame;
|
||||
AVFrame* dec_frame;
|
||||
pthread_mutex_t mutex;
|
||||
|
||||
// Color conversion and rendering
|
||||
AVFrame* rgb_frame;
|
||||
char* rgb_frame_buf;
|
||||
struct SwsContext* scaler_ctx;
|
||||
ANativeWindow* window;
|
||||
|
||||
|
||||
#define RENDER_PIX_FMT AV_PIX_FMT_RGBA
|
||||
#define BYTES_PER_PIXEL 4
|
||||
@ -32,6 +38,8 @@ struct SwsContext* scaler_ctx;
|
||||
#define BILINEAR_FILTERING 0x10
|
||||
// Uses a faster bilinear filtering with lower image quality
|
||||
#define FAST_BILINEAR_FILTERING 0x20
|
||||
// Disables color conversion (output is NV21)
|
||||
#define NO_COLOR_CONVERSION 0x40
|
||||
|
||||
// This function must be called before
|
||||
// any other decoding functions
|
||||
@ -45,6 +53,8 @@ int nv_avc_init(int width, int height, int perf_lvl, int thread_count) {
|
||||
av_log_set_level(AV_LOG_QUIET);
|
||||
avcodec_register_all();
|
||||
|
||||
av_init_packet(&pkt);
|
||||
|
||||
decoder = avcodec_find_decoder(AV_CODEC_ID_H264);
|
||||
if (decoder == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
@ -99,53 +109,55 @@ int nv_avc_init(int width, int height, int perf_lvl, int thread_count) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
rgb_frame = av_frame_alloc();
|
||||
if (rgb_frame == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't allocate frame");
|
||||
return -1;
|
||||
}
|
||||
if (!(perf_lvl & NO_COLOR_CONVERSION)) {
|
||||
rgb_frame = av_frame_alloc();
|
||||
if (rgb_frame == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't allocate frame");
|
||||
return -1;
|
||||
}
|
||||
|
||||
rgb_frame_buf = (char*)av_malloc(width * height * BYTES_PER_PIXEL);
|
||||
if (rgb_frame_buf == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't allocate picture");
|
||||
return -1;
|
||||
}
|
||||
rgb_frame_buf = (char*)av_malloc(width * height * BYTES_PER_PIXEL);
|
||||
if (rgb_frame_buf == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't allocate picture");
|
||||
return -1;
|
||||
}
|
||||
|
||||
err = avpicture_fill((AVPicture*)rgb_frame,
|
||||
rgb_frame_buf,
|
||||
RENDER_PIX_FMT,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height);
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't fill picture");
|
||||
return err;
|
||||
}
|
||||
err = avpicture_fill((AVPicture*)rgb_frame,
|
||||
rgb_frame_buf,
|
||||
RENDER_PIX_FMT,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height);
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't fill picture");
|
||||
return err;
|
||||
}
|
||||
|
||||
if (perf_lvl & FAST_BILINEAR_FILTERING) {
|
||||
filtering = SWS_FAST_BILINEAR;
|
||||
}
|
||||
else if (perf_lvl & BILINEAR_FILTERING) {
|
||||
filtering = SWS_BILINEAR;
|
||||
}
|
||||
else {
|
||||
filtering = SWS_BICUBIC;
|
||||
}
|
||||
if (perf_lvl & FAST_BILINEAR_FILTERING) {
|
||||
filtering = SWS_FAST_BILINEAR;
|
||||
}
|
||||
else if (perf_lvl & BILINEAR_FILTERING) {
|
||||
filtering = SWS_BILINEAR;
|
||||
}
|
||||
else {
|
||||
filtering = SWS_BICUBIC;
|
||||
}
|
||||
|
||||
scaler_ctx = sws_getContext(decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
decoder_ctx->pix_fmt,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
RENDER_PIX_FMT,
|
||||
filtering,
|
||||
NULL, NULL, NULL);
|
||||
if (scaler_ctx == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't get scaler context");
|
||||
return -1;
|
||||
scaler_ctx = sws_getContext(decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
decoder_ctx->pix_fmt,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
RENDER_PIX_FMT,
|
||||
filtering,
|
||||
NULL, NULL, NULL);
|
||||
if (scaler_ctx == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Couldn't get scaler context");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
@ -179,14 +191,15 @@ void nv_avc_destroy(void) {
|
||||
av_free(rgb_frame_buf);
|
||||
rgb_frame_buf = NULL;
|
||||
}
|
||||
if (window) {
|
||||
ANativeWindow_release(window);
|
||||
window = NULL;
|
||||
}
|
||||
pthread_mutex_destroy(&mutex);
|
||||
}
|
||||
|
||||
void nv_avc_redraw(JNIEnv *env, jobject surface) {
|
||||
ANativeWindow* window;
|
||||
ANativeWindow_Buffer buffer;
|
||||
AVFrame *our_yuv_frame;
|
||||
int err;
|
||||
static AVFrame* dequeue_new_frame(void) {
|
||||
AVFrame *our_yuv_frame = NULL;
|
||||
|
||||
pthread_mutex_lock(&mutex);
|
||||
|
||||
@ -196,77 +209,149 @@ void nv_avc_redraw(JNIEnv *env, jobject surface) {
|
||||
// responsible for freeing it when we're done
|
||||
our_yuv_frame = yuv_frame;
|
||||
yuv_frame = NULL;
|
||||
}
|
||||
|
||||
// The remaining processing can be done without the mutex
|
||||
pthread_mutex_unlock(&mutex);
|
||||
pthread_mutex_unlock(&mutex);
|
||||
|
||||
// Convert the YUV image to RGB
|
||||
err = sws_scale(scaler_ctx,
|
||||
our_yuv_frame->data,
|
||||
our_yuv_frame->linesize,
|
||||
0,
|
||||
decoder_ctx->height,
|
||||
rgb_frame->data,
|
||||
rgb_frame->linesize);
|
||||
if (err != decoder_ctx->height) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Scaling failed");
|
||||
goto free_frame_and_return;
|
||||
}
|
||||
return our_yuv_frame;
|
||||
}
|
||||
|
||||
window = ANativeWindow_fromSurface(env, surface);
|
||||
if (window == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Failed to get window from surface");
|
||||
goto free_frame_and_return;
|
||||
}
|
||||
static int update_rgb_frame(void) {
|
||||
AVFrame *our_yuv_frame;
|
||||
int err;
|
||||
|
||||
our_yuv_frame = dequeue_new_frame();
|
||||
if (our_yuv_frame == NULL) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Convert the YUV image to RGB
|
||||
err = sws_scale(scaler_ctx,
|
||||
our_yuv_frame->data,
|
||||
our_yuv_frame->linesize,
|
||||
0,
|
||||
decoder_ctx->height,
|
||||
rgb_frame->data,
|
||||
rgb_frame->linesize);
|
||||
|
||||
av_frame_free(&our_yuv_frame);
|
||||
|
||||
if (err != decoder_ctx->height) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Scaling failed");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int render_rgb_to_buffer(char* buffer, int size) {
|
||||
int err;
|
||||
|
||||
// Draw the frame to the buffer
|
||||
err = avpicture_layout((AVPicture*)rgb_frame,
|
||||
RENDER_PIX_FMT,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
buffer,
|
||||
size);
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Picture fill failed");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int nv_avc_get_raw_frame(char* buffer, int size) {
|
||||
AVFrame *our_yuv_frame;
|
||||
int err;
|
||||
|
||||
our_yuv_frame = dequeue_new_frame();
|
||||
if (our_yuv_frame == NULL) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
err = avpicture_layout((AVPicture*)our_yuv_frame,
|
||||
decoder_ctx->pix_fmt,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
buffer,
|
||||
size);
|
||||
|
||||
av_frame_free(&our_yuv_frame);
|
||||
|
||||
return (err >= 0);
|
||||
}
|
||||
|
||||
int nv_avc_get_rgb_frame(char* buffer, int size) {
|
||||
return (update_rgb_frame() && render_rgb_to_buffer(buffer, size));
|
||||
}
|
||||
|
||||
int nv_avc_set_render_target(JNIEnv *env, jobject surface) {
|
||||
// Release the old window
|
||||
if (window) {
|
||||
ANativeWindow_release(window);
|
||||
window = NULL;
|
||||
}
|
||||
|
||||
// If no new surface was supplied, we're done
|
||||
if (surface == NULL) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Get a window from the surface
|
||||
window = ANativeWindow_fromSurface(env, surface);
|
||||
if (window == NULL) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Failed to get window from surface");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int nv_avc_redraw(void) {
|
||||
ANativeWindow_Buffer buffer;
|
||||
int ret = 0;
|
||||
|
||||
// Check if there's a new frame
|
||||
if (update_rgb_frame()) {
|
||||
// Lock down a render buffer
|
||||
if (ANativeWindow_lock(window, &buffer, NULL) >= 0) {
|
||||
// Draw the frame to the buffer
|
||||
err = avpicture_layout((AVPicture*)rgb_frame,
|
||||
RENDER_PIX_FMT,
|
||||
decoder_ctx->width,
|
||||
decoder_ctx->height,
|
||||
buffer.bits,
|
||||
if (render_rgb_to_buffer(buffer.bits,
|
||||
decoder_ctx->width *
|
||||
decoder_ctx->height *
|
||||
BYTES_PER_PIXEL);
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Picture fill failed");
|
||||
BYTES_PER_PIXEL)) {
|
||||
// A new frame will be drawn
|
||||
ret = 1;
|
||||
}
|
||||
|
||||
// Draw the frame to the surface
|
||||
ANativeWindow_unlockAndPost(window);
|
||||
}
|
||||
|
||||
ANativeWindow_release(window);
|
||||
|
||||
free_frame_and_return:
|
||||
av_frame_free(&our_yuv_frame);
|
||||
}
|
||||
else {
|
||||
pthread_mutex_unlock(&mutex);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int nv_avc_get_input_padding_size(void) {
|
||||
return FF_INPUT_BUFFER_PADDING_SIZE;
|
||||
}
|
||||
|
||||
// packets must be decoded in order
|
||||
// indata must be inlen + FF_INPUT_BUFFER_PADDING_SIZE in length
|
||||
int nv_avc_decode(unsigned char* indata, int inlen) {
|
||||
int err;
|
||||
AVPacket pkt;
|
||||
int got_pic;
|
||||
int got_pic = 0;
|
||||
|
||||
err = av_new_packet(&pkt, inlen);
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Failed to allocate packet");
|
||||
return err;
|
||||
}
|
||||
|
||||
memcpy(pkt.data, indata, inlen);
|
||||
pkt.data = indata;
|
||||
pkt.size = inlen;
|
||||
|
||||
while (pkt.size > 0) {
|
||||
got_pic = 0;
|
||||
err = avcodec_decode_video2(
|
||||
decoder_ctx,
|
||||
dec_frame,
|
||||
@ -275,29 +360,28 @@ int nv_avc_decode(unsigned char* indata, int inlen) {
|
||||
if (err < 0) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, "NVAVCDEC",
|
||||
"Decode failed");
|
||||
pthread_mutex_unlock(&mutex);
|
||||
got_pic = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
if (got_pic) {
|
||||
pthread_mutex_lock(&mutex);
|
||||
|
||||
// Only clone this frame if the last frame was taken.
|
||||
// This saves on extra copies for frames that don't get
|
||||
// rendered.
|
||||
if (yuv_frame == NULL) {
|
||||
// Clone a new frame
|
||||
yuv_frame = av_frame_clone(dec_frame);
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&mutex);
|
||||
}
|
||||
|
||||
pkt.size -= err;
|
||||
pkt.data += err;
|
||||
}
|
||||
|
||||
av_free_packet(&pkt);
|
||||
// Only copy the picture at the end of decoding the packet
|
||||
if (got_pic) {
|
||||
pthread_mutex_lock(&mutex);
|
||||
|
||||
// Only clone this frame if the last frame was taken.
|
||||
// This saves on extra copies for frames that don't get
|
||||
// rendered.
|
||||
if (yuv_frame == NULL) {
|
||||
// Clone a new frame
|
||||
yuv_frame = av_frame_clone(dec_frame);
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&mutex);
|
||||
}
|
||||
|
||||
return err < 0 ? err : 0;
|
||||
}
|
||||
|
@ -2,5 +2,12 @@
|
||||
|
||||
int nv_avc_init(int width, int height, int perf_lvl, int thread_count);
|
||||
void nv_avc_destroy(void);
|
||||
void nv_avc_redraw(JNIEnv *env, jobject surface);
|
||||
|
||||
int nv_avc_get_raw_frame(char* buffer, int size);
|
||||
|
||||
int nv_avc_get_rgb_frame(char* buffer, int size);
|
||||
int nv_avc_set_render_target(JNIEnv *env, jobject surface);
|
||||
int nv_avc_redraw(void);
|
||||
|
||||
int nv_avc_get_input_padding_size(void);
|
||||
int nv_avc_decode(unsigned char* indata, int inlen);
|
||||
|
@ -6,7 +6,7 @@
|
||||
// This function must be called before
|
||||
// any other decoding functions
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_com_limelight_nvstream_av_video_AvcDecoder_init(JNIEnv *env, jobject this, jint width,
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_init(JNIEnv *env, jobject this, jint width,
|
||||
jint height, jint perflvl, jint threadcount)
|
||||
{
|
||||
return nv_avc_init(width, height, perflvl, threadcount);
|
||||
@ -15,20 +15,69 @@ Java_com_limelight_nvstream_av_video_AvcDecoder_init(JNIEnv *env, jobject this,
|
||||
// This function must be called after
|
||||
// decoding is finished
|
||||
JNIEXPORT void JNICALL
|
||||
Java_com_limelight_nvstream_av_video_AvcDecoder_destroy(JNIEnv *env, jobject this) {
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_destroy(JNIEnv *env, jobject this) {
|
||||
nv_avc_destroy();
|
||||
}
|
||||
|
||||
// fills the output buffer with a raw YUV frame
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getRawFrame(
|
||||
JNIEnv *env, jobject this, // JNI parameters
|
||||
jbyteArray outdata, jint outlen) // Output data
|
||||
{
|
||||
jint ret;
|
||||
jbyte* jni_output_data;
|
||||
|
||||
jni_output_data = (*env)->GetByteArrayElements(env, outdata, 0);
|
||||
|
||||
ret = nv_avc_get_raw_frame(jni_output_data, outlen);
|
||||
|
||||
(*env)->ReleaseByteArrayElements(env, outdata, jni_output_data, 0);
|
||||
|
||||
return ret != 0 ? JNI_TRUE : JNI_FALSE;
|
||||
}
|
||||
|
||||
// fills the output buffer with an RGB frame
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getRgbFrame(
|
||||
JNIEnv *env, jobject this, // JNI parameters
|
||||
jbyteArray outdata, jint outlen) // Output data
|
||||
{
|
||||
jint ret;
|
||||
jbyte* jni_output_data;
|
||||
|
||||
jni_output_data = (*env)->GetByteArrayElements(env, outdata, 0);
|
||||
|
||||
ret = nv_avc_get_rgb_frame(jni_output_data, outlen);
|
||||
|
||||
(*env)->ReleaseByteArrayElements(env, outdata, jni_output_data, 0);
|
||||
|
||||
return ret != 0 ? JNI_TRUE : JNI_FALSE;
|
||||
}
|
||||
|
||||
// This function sets the rendering target for redraw
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_setRenderTarget(JNIEnv *env, jobject this, jobject surface) {
|
||||
return nv_avc_set_render_target(env, surface) != 0 ? JNI_TRUE : JNI_FALSE;
|
||||
}
|
||||
|
||||
// This function redraws the surface
|
||||
JNIEXPORT void JNICALL
|
||||
Java_com_limelight_nvstream_av_video_AvcDecoder_redraw(JNIEnv *env, jobject this, jobject surface) {
|
||||
nv_avc_redraw(env, surface);
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_redraw(JNIEnv *env, jobject this) {
|
||||
return nv_avc_redraw() != 0 ? JNI_TRUE : JNI_FALSE;
|
||||
}
|
||||
|
||||
// This function returns the required input buffer padding
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_getInputPaddingSize(JNIEnv *env, jobject this) {
|
||||
return nv_avc_get_input_padding_size();
|
||||
}
|
||||
|
||||
// packets must be decoded in order
|
||||
// the input buffer must have proper padding
|
||||
// returns 0 on success, < 0 on error
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_com_limelight_nvstream_av_video_AvcDecoder_decode(
|
||||
Java_com_limelight_nvstream_av_video_cpu_AvcDecoder_decode(
|
||||
JNIEnv *env, jobject this, // JNI parameters
|
||||
jbyteArray indata, jint inoff, jint inlen)
|
||||
{
|
||||
|
Binary file not shown.
Binary file not shown.
@ -150,7 +150,7 @@ public class NvConnection {
|
||||
private boolean startVideoStream() throws IOException
|
||||
{
|
||||
videoStream = new NvVideoStream(hostAddr, listener, controlStream);
|
||||
videoStream.startVideoStream(video, drFlags);
|
||||
videoStream.startVideoStream(activity, video, drFlags);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -17,10 +17,11 @@ import com.limelight.nvstream.av.AvRtpPacket;
|
||||
import com.limelight.nvstream.av.ConnectionStatusListener;
|
||||
import com.limelight.nvstream.av.video.AvVideoDepacketizer;
|
||||
import com.limelight.nvstream.av.video.AvVideoPacket;
|
||||
import com.limelight.nvstream.av.video.CpuDecoderRenderer;
|
||||
import com.limelight.nvstream.av.video.DecoderRenderer;
|
||||
import com.limelight.nvstream.av.video.MediaCodecDecoderRenderer;
|
||||
import com.limelight.nvstream.av.video.cpu.CpuDecoderRenderer;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Build;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
@ -129,7 +130,7 @@ public class NvVideoStream {
|
||||
rtp = new DatagramSocket(RTP_PORT);
|
||||
}
|
||||
|
||||
public void setupDecoderRenderer(SurfaceHolder renderTarget, int drFlags) {
|
||||
public void setupDecoderRenderer(Context context, SurfaceHolder renderTarget, int drFlags) {
|
||||
if (Build.HARDWARE.equals("goldfish")) {
|
||||
// Emulator - don't render video (it's slow!)
|
||||
decrend = null;
|
||||
@ -144,14 +145,14 @@ public class NvVideoStream {
|
||||
}
|
||||
|
||||
if (decrend != null) {
|
||||
decrend.setup(1280, 720, renderTarget, drFlags);
|
||||
decrend.setup(context, 1280, 720, renderTarget, drFlags);
|
||||
}
|
||||
}
|
||||
|
||||
public void startVideoStream(final SurfaceHolder surface, int drFlags) throws IOException
|
||||
public void startVideoStream(Context context, SurfaceHolder surface, int drFlags) throws IOException
|
||||
{
|
||||
// Setup the decoder and renderer
|
||||
setupDecoderRenderer(surface, drFlags);
|
||||
setupDecoderRenderer(context, surface, drFlags);
|
||||
|
||||
// Open RTP sockets and start session
|
||||
setupRtpSession();
|
||||
|
@ -2,12 +2,13 @@ package com.limelight.nvstream.av.video;
|
||||
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
|
||||
import android.content.Context;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
public interface DecoderRenderer {
|
||||
public static int FLAG_PREFER_QUALITY = 0x1;
|
||||
|
||||
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags);
|
||||
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags);
|
||||
|
||||
public void start();
|
||||
|
||||
|
@ -8,6 +8,7 @@ import com.limelight.nvstream.av.AvByteBufferDescriptor;
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
@ -73,7 +74,7 @@ public class MediaCodecDecoderRenderer implements DecoderRenderer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
|
||||
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
|
||||
videoDecoder = MediaCodec.createByCodecName(findSafeDecoder().getName());
|
||||
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
package com.limelight.nvstream.av.video;
|
||||
package com.limelight.nvstream.av.video.cpu;
|
||||
|
||||
import android.view.Surface;
|
||||
|
||||
@ -27,9 +27,20 @@ public class AvcDecoder {
|
||||
public static final int BILINEAR_FILTERING = 0x10;
|
||||
/** Uses a faster bilinear filtering with lower image quality */
|
||||
public static final int FAST_BILINEAR_FILTERING = 0x20;
|
||||
/** Disables color conversion (output is NV21) */
|
||||
public static final int NO_COLOR_CONVERSION = 0x40;
|
||||
|
||||
public static native int init(int width, int height, int perflvl, int threadcount);
|
||||
public static native void destroy();
|
||||
public static native void redraw(Surface surface);
|
||||
|
||||
// Rendering API when NO_COLOR_CONVERSION == 0
|
||||
public static native boolean setRenderTarget(Surface surface);
|
||||
public static native boolean getRgbFrame(byte[] rgbFrame, int bufferSize);
|
||||
public static native boolean redraw();
|
||||
|
||||
// Rendering API when NO_COLOR_CONVERSION == 1
|
||||
public static native boolean getRawFrame(byte[] yuvFrame, int bufferSize);
|
||||
|
||||
public static native int getInputPaddingSize();
|
||||
public static native int decode(byte[] indata, int inoff, int inlen);
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package com.limelight.nvstream.av.video;
|
||||
package com.limelight.nvstream.av.video.cpu;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
@ -6,19 +6,24 @@ import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import android.view.Surface;
|
||||
import android.content.Context;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
import com.limelight.nvstream.av.AvByteBufferDescriptor;
|
||||
import com.limelight.nvstream.av.AvDecodeUnit;
|
||||
import com.limelight.nvstream.av.video.DecoderRenderer;
|
||||
|
||||
public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
|
||||
private Surface renderTarget;
|
||||
private ByteBuffer decoderBuffer;
|
||||
private Thread rendererThread;
|
||||
private int targetFps;
|
||||
|
||||
private static final int DECODER_BUFFER_SIZE = 92*1024;
|
||||
private ByteBuffer decoderBuffer;
|
||||
|
||||
private RsRenderer rsRenderer;
|
||||
private byte[] frameBuffer;
|
||||
|
||||
// Only sleep if the difference is above this value
|
||||
private static final int WAIT_CEILING_MS = 8;
|
||||
|
||||
@ -76,8 +81,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
|
||||
this.renderTarget = renderTarget.getSurface();
|
||||
public void setup(Context context, int width, int height, SurfaceHolder renderTarget, int drFlags) {
|
||||
this.targetFps = 30;
|
||||
|
||||
int perfLevel = findOptimalPerformanceLevel();
|
||||
@ -111,6 +115,12 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
break;
|
||||
}
|
||||
|
||||
// Create and initialize the RenderScript intrinsic we'll be using
|
||||
rsRenderer = new RsRenderer(context, width, height, renderTarget.getSurface());
|
||||
|
||||
// Allocate the frame buffer that the RGBA frame will be copied into
|
||||
frameBuffer = new byte[width*height*4];
|
||||
|
||||
// If the user wants quality, we'll remove the low IQ flags
|
||||
if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
|
||||
// Make sure the loop filter is enabled
|
||||
@ -127,7 +137,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
throw new IllegalStateException("AVC decoder initialization failure: "+err);
|
||||
}
|
||||
|
||||
decoderBuffer = ByteBuffer.allocate(92*1024);
|
||||
decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
|
||||
|
||||
System.out.println("Using software decoding (performance level: "+perfLevel+")");
|
||||
}
|
||||
@ -152,7 +162,9 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
}
|
||||
|
||||
nextFrameTime = computePresentationTimeMs(targetFps);
|
||||
AvcDecoder.redraw(renderTarget);
|
||||
if (AvcDecoder.getRgbFrame(frameBuffer, frameBuffer.length)) {
|
||||
rsRenderer.render(frameBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -175,6 +187,10 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (rsRenderer != null) {
|
||||
rsRenderer.release();
|
||||
}
|
||||
|
||||
AvcDecoder.destroy();
|
||||
}
|
||||
|
||||
@ -183,7 +199,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
byte[] data;
|
||||
|
||||
// Use the reserved decoder buffer if this decode unit will fit
|
||||
if (decodeUnit.getDataLength() <= decoderBuffer.limit()) {
|
||||
if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
|
||||
decoderBuffer.clear();
|
||||
|
||||
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
|
||||
@ -193,7 +209,7 @@ public class CpuDecoderRenderer implements DecoderRenderer {
|
||||
data = decoderBuffer.array();
|
||||
}
|
||||
else {
|
||||
data = new byte[decodeUnit.getDataLength()];
|
||||
data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
|
||||
|
||||
int offset = 0;
|
||||
for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
|
36
src/com/limelight/nvstream/av/video/cpu/RsRenderer.java
Normal file
36
src/com/limelight/nvstream/av/video/cpu/RsRenderer.java
Normal file
@ -0,0 +1,36 @@
|
||||
package com.limelight.nvstream.av.video.cpu;
|
||||
|
||||
import android.content.Context;
|
||||
import android.renderscript.Allocation;
|
||||
import android.renderscript.Element;
|
||||
import android.renderscript.RenderScript;
|
||||
import android.renderscript.Type;
|
||||
import android.view.Surface;
|
||||
|
||||
public class RsRenderer {
|
||||
private RenderScript rs;
|
||||
private Allocation renderBuffer;
|
||||
|
||||
public RsRenderer(Context context, int width, int height, Surface renderTarget) {
|
||||
rs = RenderScript.create(context);
|
||||
|
||||
Type.Builder tb = new Type.Builder(rs, Element.RGBA_8888(rs));
|
||||
tb.setX(width);
|
||||
tb.setY(height);
|
||||
Type bufferType = tb.create();
|
||||
|
||||
renderBuffer = Allocation.createTyped(rs, bufferType, Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT);
|
||||
renderBuffer.setSurface(renderTarget);
|
||||
}
|
||||
|
||||
public void release() {
|
||||
renderBuffer.setSurface(null);
|
||||
renderBuffer.destroy();
|
||||
rs.destroy();
|
||||
}
|
||||
|
||||
public void render(byte[] rgbData) {
|
||||
renderBuffer.copyFrom(rgbData);
|
||||
renderBuffer.ioSend();
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user