Remove software decoder

This commit is contained in:
Iwan Timmer
2014-01-07 01:43:17 +01:00
parent 3f438c8e68
commit d69a384a5c
84 changed files with 0 additions and 22127 deletions

View File

@@ -1,179 +0,0 @@
package com.limelight.binding.video;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
import java.nio.ByteBuffer;
import javax.swing.JFrame;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.cpu.AvcDecoder;
/**
* Implementation of a video decoder and renderer.
* @author Cameron Gutman
*/
public class SwingCpuDecoderRenderer implements VideoDecoderRenderer {
private Thread rendererThread;
private int targetFps;
private int width, height;
private Graphics graphics;
private JFrame frame;
private BufferedImage image;
private static final int DECODER_BUFFER_SIZE = 92*1024;
private ByteBuffer decoderBuffer;
// Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 8;
/**
* Sets up the decoder and renderer to render video at the specified dimensions
* @param width the width of the video to render
* @param height the height of the video to render
* @param renderTarget what to render the video onto
* @param drFlags flags for the decoder and renderer
*/
@Override
public void setup(int width, int height, int redrawRate, Object renderTarget, int drFlags) {
this.targetFps = redrawRate;
this.width = width;
this.height = height;
// Single threaded low latency decode is ideal
int avcFlags = AvcDecoder.LOW_LATENCY_DECODE;
int threadCount = 1;
int err = AvcDecoder.init(width, height, avcFlags, threadCount);
if (err != 0) {
throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
frame = (JFrame)renderTarget;
graphics = frame.getGraphics();
image = new BufferedImage(width, height,
BufferedImage.TYPE_INT_BGR);
decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
System.out.println("Using software decoding");
}
/**
* Starts the decoding and rendering of the video stream on a new thread
*/
@Override
public void start() {
rendererThread = new Thread() {
@Override
public void run() {
long nextFrameTime = System.currentTimeMillis();
int[] imageBuffer = ((DataBufferInt)image.getRaster().getDataBuffer()).getData();
while (!isInterrupted())
{
long diff = nextFrameTime - System.currentTimeMillis();
if (diff < WAIT_CEILING_MS) {
// We must call Thread.sleep in order to be interruptable
diff = 0;
}
try {
Thread.sleep(diff);
} catch (InterruptedException e) {
return;
}
nextFrameTime = computePresentationTimeMs(targetFps);
double widthScale = (double)frame.getWidth() / width;
double heightScale = (double)frame.getHeight() / height;
double lowerScale = Math.min(widthScale, heightScale);
int newWidth = (int)(width * lowerScale);
int newHeight = (int)(height * lowerScale);
int dx1 = 0;
int dy1 = 0;
if (frame.getWidth() > newWidth) {
dx1 = (frame.getWidth()-newWidth)/2;
}
if (frame.getHeight() > newHeight) {
dy1 = (frame.getHeight()-newHeight)/2;
}
if (AvcDecoder.getRgbFrameInt(imageBuffer, imageBuffer.length)) {
graphics.drawImage(image, dx1, dy1, dx1+newWidth, dy1+newHeight, 0, 0, width, height, null);
}
}
}
};
rendererThread.setName("Video - Renderer (CPU)");
rendererThread.start();
}
/*
* Computes the amount of time to display a certain frame
*/
private long computePresentationTimeMs(int frameRate) {
return System.currentTimeMillis() + (1000 / frameRate);
}
/**
* Stops the decoding and rendering of the video stream.
*/
@Override
public void stop() {
rendererThread.interrupt();
try {
rendererThread.join();
} catch (InterruptedException e) { }
}
/**
* Releases resources held by the decoder.
*/
@Override
public void release() {
AvcDecoder.destroy();
}
/**
* Give a unit to be decoded to the decoder.
* @param decodeUnit the unit to be decoded
* @return true if the unit was decoded successfully, false otherwise
*/
@Override
public boolean submitDecodeUnit(DecodeUnit decodeUnit) {
byte[] data;
// Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
decoderBuffer.clear();
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
}
data = decoderBuffer.array();
}
else {
data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
int offset = 0;
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
System.arraycopy(bbd.data, bbd.offset, data, offset, bbd.length);
offset += bbd.length;
}
}
return (AvcDecoder.decode(data, 0, decodeUnit.getDataLength()) == 0);
}
}

View File

@@ -1,51 +0,0 @@
package com.limelight.nvstream.av.video.cpu;
import com.limelight.binding.LibraryHelper;
public class AvcDecoder {
static {
// Windows uses runtime linking for ffmpeg libraries
if (System.getProperty("os.name").contains("Windows")) {
LibraryHelper.loadNativeLibrary("avutil-52");
LibraryHelper.loadNativeLibrary("postproc-52");
LibraryHelper.loadNativeLibrary("pthreadVC2");
LibraryHelper.loadNativeLibrary("swresample-0");
LibraryHelper.loadNativeLibrary("swscale-2");
LibraryHelper.loadNativeLibrary("avcodec-55");
LibraryHelper.loadNativeLibrary("avformat-55");
LibraryHelper.loadNativeLibrary("avfilter-3");
}
LibraryHelper.loadNativeLibrary("nv_avc_dec");
}
/** Disables the deblocking filter at the cost of image quality */
public static final int DISABLE_LOOP_FILTER = 0x1;
/** Uses the low latency decode flag (disables multithreading) */
public static final int LOW_LATENCY_DECODE = 0x2;
/** Threads process each slice, rather than each frame */
public static final int SLICE_THREADING = 0x4;
/** Uses nonstandard speedup tricks */
public static final int FAST_DECODE = 0x8;
/** Uses bilinear filtering instead of bicubic */
public static final int BILINEAR_FILTERING = 0x10;
/** Uses a faster bilinear filtering with lower image quality */
public static final int FAST_BILINEAR_FILTERING = 0x20;
/** Disables color conversion (output is NV21) */
public static final int NO_COLOR_CONVERSION = 0x40;
public static native int init(int width, int height, int perflvl, int threadcount);
public static native void destroy();
// Rendering API when NO_COLOR_CONVERSION == 0
public static native boolean setRenderTarget(Object androidSurface);
public static native boolean getRgbFrameInt(int[] rgbFrame, int bufferSize);
public static native boolean getRgbFrame(byte[] rgbFrame, int bufferSize);
public static native boolean redraw();
// Rendering API when NO_COLOR_CONVERSION == 1
public static native boolean getRawFrame(byte[] yuvFrame, int bufferSize);
public static native int getInputPaddingSize();
public static native int decode(byte[] indata, int inoff, int inlen);
}