diff --git a/limelight-pc/.classpath b/limelight-pc/.classpath
new file mode 100644
index 0000000..18d70f0
--- /dev/null
+++ b/limelight-pc/.classpath
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/limelight-pc/.project b/limelight-pc/.project
new file mode 100644
index 0000000..93d324d
--- /dev/null
+++ b/limelight-pc/.project
@@ -0,0 +1,17 @@
+
+
+ limelight-pc
+
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+
+ org.eclipse.jdt.core.javanature
+
+
diff --git a/limelight-pc/.settings/org.eclipse.jdt.core.prefs b/limelight-pc/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/limelight-pc/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/inc/opus.h b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus.h
new file mode 100644
index 0000000..ce86038
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus.h
@@ -0,0 +1,906 @@
+/* Copyright (c) 2010-2011 Xiph.Org Foundation, Skype Limited
+ Written by Jean-Marc Valin and Koen Vos */
+/*
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ - Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/**
+ * @file opus.h
+ * @brief Opus reference implementation API
+ */
+
+#ifndef OPUS_H
+#define OPUS_H
+
+#include "opus_types.h"
+#include "opus_defines.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @mainpage Opus
+ *
+ * The Opus codec is designed for interactive speech and audio transmission over the Internet.
+ * It is designed by the IETF Codec Working Group and incorporates technology from
+ * Skype's SILK codec and Xiph.Org's CELT codec.
+ *
+ * The Opus codec is designed to handle a wide range of interactive audio applications,
+ * including Voice over IP, videoconferencing, in-game chat, and even remote live music
+ * performances. It can scale from low bit-rate narrowband speech to very high quality
+ * stereo music. Its main features are:
+
+ * @li Sampling rates from 8 to 48 kHz
+ * @li Bit-rates from 6 kb/s to 510 kb/s
+ * @li Support for both constant bit-rate (CBR) and variable bit-rate (VBR)
+ * @li Audio bandwidth from narrowband to full-band
+ * @li Support for speech and music
+ * @li Support for mono and stereo
+ * @li Support for multichannel (up to 255 channels)
+ * @li Frame sizes from 2.5 ms to 60 ms
+ * @li Good loss robustness and packet loss concealment (PLC)
+ * @li Floating point and fixed-point implementation
+ *
+ * Documentation sections:
+ * @li @ref opus_encoder
+ * @li @ref opus_decoder
+ * @li @ref opus_repacketizer
+ * @li @ref opus_multistream
+ * @li @ref opus_libinfo
+ * @li @ref opus_custom
+ */
+
+/** @defgroup opus_encoder Opus Encoder
+ * @{
+ *
+ * @brief This page describes the process and functions used to encode Opus.
+ *
+ * Since Opus is a stateful codec, the encoding process starts with creating an encoder
+ * state. This can be done with:
+ *
+ * @code
+ * int error;
+ * OpusEncoder *enc;
+ * enc = opus_encoder_create(Fs, channels, application, &error);
+ * @endcode
+ *
+ * From this point, @c enc can be used for encoding an audio stream. An encoder state
+ * @b must @b not be used for more than one stream at the same time. Similarly, the encoder
+ * state @b must @b not be re-initialized for each frame.
+ *
+ * While opus_encoder_create() allocates memory for the state, it's also possible
+ * to initialize pre-allocated memory:
+ *
+ * @code
+ * int size;
+ * int error;
+ * OpusEncoder *enc;
+ * size = opus_encoder_get_size(channels);
+ * enc = malloc(size);
+ * error = opus_encoder_init(enc, Fs, channels, application);
+ * @endcode
+ *
+ * where opus_encoder_get_size() returns the required size for the encoder state. Note that
+ * future versions of this code may change the size, so no assuptions should be made about it.
+ *
+ * The encoder state is always continuous in memory and only a shallow copy is sufficient
+ * to copy it (e.g. memcpy())
+ *
+ * It is possible to change some of the encoder's settings using the opus_encoder_ctl()
+ * interface. All these settings already default to the recommended value, so they should
+ * only be changed when necessary. The most common settings one may want to change are:
+ *
+ * @code
+ * opus_encoder_ctl(enc, OPUS_SET_BITRATE(bitrate));
+ * opus_encoder_ctl(enc, OPUS_SET_COMPLEXITY(complexity));
+ * opus_encoder_ctl(enc, OPUS_SET_SIGNAL(signal_type));
+ * @endcode
+ *
+ * where
+ *
+ * @arg bitrate is in bits per second (b/s)
+ * @arg complexity is a value from 1 to 10, where 1 is the lowest complexity and 10 is the highest
+ * @arg signal_type is either OPUS_AUTO (default), OPUS_SIGNAL_VOICE, or OPUS_SIGNAL_MUSIC
+ *
+ * See @ref opus_encoderctls and @ref opus_genericctls for a complete list of parameters that can be set or queried. Most parameters can be set or changed at any time during a stream.
+ *
+ * To encode a frame, opus_encode() or opus_encode_float() must be called with exactly one frame (2.5, 5, 10, 20, 40 or 60 ms) of audio data:
+ * @code
+ * len = opus_encode(enc, audio_frame, frame_size, packet, max_packet);
+ * @endcode
+ *
+ * where
+ *
+ * - audio_frame is the audio data in opus_int16 (or float for opus_encode_float())
+ * - frame_size is the duration of the frame in samples (per channel)
+ * - packet is the byte array to which the compressed data is written
+ * - max_packet is the maximum number of bytes that can be written in the packet (4000 bytes is recommended).
+ * Do not use max_packet to control VBR target bitrate, instead use the #OPUS_SET_BITRATE CTL.
+ *
+ *
+ * opus_encode() and opus_encode_float() return the number of bytes actually written to the packet.
+ * The return value can be negative, which indicates that an error has occurred. If the return value
+ * is 1 byte, then the packet does not need to be transmitted (DTX).
+ *
+ * Once the encoder state if no longer needed, it can be destroyed with
+ *
+ * @code
+ * opus_encoder_destroy(enc);
+ * @endcode
+ *
+ * If the encoder was created with opus_encoder_init() rather than opus_encoder_create(),
+ * then no action is required aside from potentially freeing the memory that was manually
+ * allocated for it (calling free(enc) for the example above)
+ *
+ */
+
+/** Opus encoder state.
+ * This contains the complete state of an Opus encoder.
+ * It is position independent and can be freely copied.
+ * @see opus_encoder_create,opus_encoder_init
+ */
+typedef struct OpusEncoder OpusEncoder;
+
+/** Gets the size of an OpusEncoder structure.
+ * @param[in] channels int: Number of channels.
+ * This must be 1 or 2.
+ * @returns The size in bytes.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_encoder_get_size(int channels);
+
+/**
+ */
+
+/** Allocates and initializes an encoder state.
+ * There are three coding modes:
+ *
+ * @ref OPUS_APPLICATION_VOIP gives best quality at a given bitrate for voice
+ * signals. It enhances the input signal by high-pass filtering and
+ * emphasizing formants and harmonics. Optionally it includes in-band
+ * forward error correction to protect against packet loss. Use this
+ * mode for typical VoIP applications. Because of the enhancement,
+ * even at high bitrates the output may sound different from the input.
+ *
+ * @ref OPUS_APPLICATION_AUDIO gives best quality at a given bitrate for most
+ * non-voice signals like music. Use this mode for music and mixed
+ * (music/voice) content, broadcast, and applications requiring less
+ * than 15 ms of coding delay.
+ *
+ * @ref OPUS_APPLICATION_RESTRICTED_LOWDELAY configures low-delay mode that
+ * disables the speech-optimized mode in exchange for slightly reduced delay.
+ * This mode can only be set on an newly initialized or freshly reset encoder
+ * because it changes the codec delay.
+ *
+ * This is useful when the caller knows that the speech-optimized modes will not be needed (use with caution).
+ * @param [in] Fs opus_int32: Sampling rate of input signal (Hz)
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param [in] channels int: Number of channels (1 or 2) in input signal
+ * @param [in] application int: Coding mode (@ref OPUS_APPLICATION_VOIP/@ref OPUS_APPLICATION_AUDIO/@ref OPUS_APPLICATION_RESTRICTED_LOWDELAY)
+ * @param [out] error int*: @ref opus_errorcodes
+ * @note Regardless of the sampling rate and number channels selected, the Opus encoder
+ * can switch to a lower audio bandwidth or number of channels if the bitrate
+ * selected is too low. This also means that it is safe to always use 48 kHz stereo input
+ * and let the encoder optimize the encoding.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusEncoder *opus_encoder_create(
+ opus_int32 Fs,
+ int channels,
+ int application,
+ int *error
+);
+
+/** Initializes a previously allocated encoder state
+ * The memory pointed to by st must be at least the size returned by opus_encoder_get_size().
+ * This is intended for applications which use their own allocator instead of malloc.
+ * @see opus_encoder_create(),opus_encoder_get_size()
+ * To reset a previously initialized state, use the #OPUS_RESET_STATE CTL.
+ * @param [in] st OpusEncoder*: Encoder state
+ * @param [in] Fs opus_int32: Sampling rate of input signal (Hz)
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param [in] channels int: Number of channels (1 or 2) in input signal
+ * @param [in] application int: Coding mode (OPUS_APPLICATION_VOIP/OPUS_APPLICATION_AUDIO/OPUS_APPLICATION_RESTRICTED_LOWDELAY)
+ * @retval #OPUS_OK Success or @ref opus_errorcodes
+ */
+OPUS_EXPORT int opus_encoder_init(
+ OpusEncoder *st,
+ opus_int32 Fs,
+ int channels,
+ int application
+) OPUS_ARG_NONNULL(1);
+
+/** Encodes an Opus frame.
+ * @param [in] st OpusEncoder*: Encoder state
+ * @param [in] pcm opus_int16*: Input signal (interleaved if 2 channels). length is frame_size*channels*sizeof(opus_int16)
+ * @param [in] frame_size int: Number of samples per channel in the
+ * input signal.
+ * This must be an Opus frame size for
+ * the encoder's sampling rate.
+ * For example, at 48 kHz the permitted
+ * values are 120, 240, 480, 960, 1920,
+ * and 2880.
+ * Passing in a duration of less than
+ * 10 ms (480 samples at 48 kHz) will
+ * prevent the encoder from using the LPC
+ * or hybrid modes.
+ * @param [out] data unsigned char*: Output payload.
+ * This must contain storage for at
+ * least \a max_data_bytes.
+ * @param [in] max_data_bytes opus_int32: Size of the allocated
+ * memory for the output
+ * payload. This may be
+ * used to impose an upper limit on
+ * the instant bitrate, but should
+ * not be used as the only bitrate
+ * control. Use #OPUS_SET_BITRATE to
+ * control the bitrate.
+ * @returns The length of the encoded packet (in bytes) on success or a
+ * negative error code (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_encode(
+ OpusEncoder *st,
+ const opus_int16 *pcm,
+ int frame_size,
+ unsigned char *data,
+ opus_int32 max_data_bytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Encodes an Opus frame from floating point input.
+ * @param [in] st OpusEncoder*: Encoder state
+ * @param [in] pcm float*: Input in float format (interleaved if 2 channels), with a normal range of +/-1.0.
+ * Samples with a range beyond +/-1.0 are supported but will
+ * be clipped by decoders using the integer API and should
+ * only be used if it is known that the far end supports
+ * extended dynamic range.
+ * length is frame_size*channels*sizeof(float)
+ * @param [in] frame_size int: Number of samples per channel in the
+ * input signal.
+ * This must be an Opus frame size for
+ * the encoder's sampling rate.
+ * For example, at 48 kHz the permitted
+ * values are 120, 240, 480, 960, 1920,
+ * and 2880.
+ * Passing in a duration of less than
+ * 10 ms (480 samples at 48 kHz) will
+ * prevent the encoder from using the LPC
+ * or hybrid modes.
+ * @param [out] data unsigned char*: Output payload.
+ * This must contain storage for at
+ * least \a max_data_bytes.
+ * @param [in] max_data_bytes opus_int32: Size of the allocated
+ * memory for the output
+ * payload. This may be
+ * used to impose an upper limit on
+ * the instant bitrate, but should
+ * not be used as the only bitrate
+ * control. Use #OPUS_SET_BITRATE to
+ * control the bitrate.
+ * @returns The length of the encoded packet (in bytes) on success or a
+ * negative error code (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_encode_float(
+ OpusEncoder *st,
+ const float *pcm,
+ int frame_size,
+ unsigned char *data,
+ opus_int32 max_data_bytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Frees an OpusEncoder allocated by opus_encoder_create().
+ * @param[in] st OpusEncoder*: State to be freed.
+ */
+OPUS_EXPORT void opus_encoder_destroy(OpusEncoder *st);
+
+/** Perform a CTL function on an Opus encoder.
+ *
+ * Generally the request and subsequent arguments are generated
+ * by a convenience macro.
+ * @param st OpusEncoder*: Encoder state.
+ * @param request This and all remaining parameters should be replaced by one
+ * of the convenience macros in @ref opus_genericctls or
+ * @ref opus_encoderctls.
+ * @see opus_genericctls
+ * @see opus_encoderctls
+ */
+OPUS_EXPORT int opus_encoder_ctl(OpusEncoder *st, int request, ...) OPUS_ARG_NONNULL(1);
+/**@}*/
+
+/** @defgroup opus_decoder Opus Decoder
+ * @{
+ *
+ * @brief This page describes the process and functions used to decode Opus.
+ *
+ * The decoding process also starts with creating a decoder
+ * state. This can be done with:
+ * @code
+ * int error;
+ * OpusDecoder *dec;
+ * dec = opus_decoder_create(Fs, channels, &error);
+ * @endcode
+ * where
+ * @li Fs is the sampling rate and must be 8000, 12000, 16000, 24000, or 48000
+ * @li channels is the number of channels (1 or 2)
+ * @li error will hold the error code in case of failure (or #OPUS_OK on success)
+ * @li the return value is a newly created decoder state to be used for decoding
+ *
+ * While opus_decoder_create() allocates memory for the state, it's also possible
+ * to initialize pre-allocated memory:
+ * @code
+ * int size;
+ * int error;
+ * OpusDecoder *dec;
+ * size = opus_decoder_get_size(channels);
+ * dec = malloc(size);
+ * error = opus_decoder_init(dec, Fs, channels);
+ * @endcode
+ * where opus_decoder_get_size() returns the required size for the decoder state. Note that
+ * future versions of this code may change the size, so no assuptions should be made about it.
+ *
+ * The decoder state is always continuous in memory and only a shallow copy is sufficient
+ * to copy it (e.g. memcpy())
+ *
+ * To decode a frame, opus_decode() or opus_decode_float() must be called with a packet of compressed audio data:
+ * @code
+ * frame_size = opus_decode(dec, packet, len, decoded, max_size, 0);
+ * @endcode
+ * where
+ *
+ * @li packet is the byte array containing the compressed data
+ * @li len is the exact number of bytes contained in the packet
+ * @li decoded is the decoded audio data in opus_int16 (or float for opus_decode_float())
+ * @li max_size is the max duration of the frame in samples (per channel) that can fit into the decoded_frame array
+ *
+ * opus_decode() and opus_decode_float() return the number of samples (per channel) decoded from the packet.
+ * If that value is negative, then an error has occurred. This can occur if the packet is corrupted or if the audio
+ * buffer is too small to hold the decoded audio.
+ *
+ * Opus is a stateful codec with overlapping blocks and as a result Opus
+ * packets are not coded independently of each other. Packets must be
+ * passed into the decoder serially and in the correct order for a correct
+ * decode. Lost packets can be replaced with loss concealment by calling
+ * the decoder with a null pointer and zero length for the missing packet.
+ *
+ * A single codec state may only be accessed from a single thread at
+ * a time and any required locking must be performed by the caller. Separate
+ * streams must be decoded with separate decoder states and can be decoded
+ * in parallel unless the library was compiled with NONTHREADSAFE_PSEUDOSTACK
+ * defined.
+ *
+ */
+
+/** Opus decoder state.
+ * This contains the complete state of an Opus decoder.
+ * It is position independent and can be freely copied.
+ * @see opus_decoder_create,opus_decoder_init
+ */
+typedef struct OpusDecoder OpusDecoder;
+
+/** Gets the size of an OpusDecoder structure.
+ * @param [in] channels int: Number of channels.
+ * This must be 1 or 2.
+ * @returns The size in bytes.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_decoder_get_size(int channels);
+
+/** Allocates and initializes a decoder state.
+ * @param [in] Fs opus_int32: Sample rate to decode at (Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param [in] channels int: Number of channels (1 or 2) to decode
+ * @param [out] error int*: #OPUS_OK Success or @ref opus_errorcodes
+ *
+ * Internally Opus stores data at 48000 Hz, so that should be the default
+ * value for Fs. However, the decoder can efficiently decode to buffers
+ * at 8, 12, 16, and 24 kHz so if for some reason the caller cannot use
+ * data at the full sample rate, or knows the compressed data doesn't
+ * use the full frequency range, it can request decoding at a reduced
+ * rate. Likewise, the decoder is capable of filling in either mono or
+ * interleaved stereo pcm buffers, at the caller's request.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusDecoder *opus_decoder_create(
+ opus_int32 Fs,
+ int channels,
+ int *error
+);
+
+/** Initializes a previously allocated decoder state.
+ * The state must be at least the size returned by opus_decoder_get_size().
+ * This is intended for applications which use their own allocator instead of malloc. @see opus_decoder_create,opus_decoder_get_size
+ * To reset a previously initialized state, use the #OPUS_RESET_STATE CTL.
+ * @param [in] st OpusDecoder*: Decoder state.
+ * @param [in] Fs opus_int32: Sampling rate to decode to (Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param [in] channels int: Number of channels (1 or 2) to decode
+ * @retval #OPUS_OK Success or @ref opus_errorcodes
+ */
+OPUS_EXPORT int opus_decoder_init(
+ OpusDecoder *st,
+ opus_int32 Fs,
+ int channels
+) OPUS_ARG_NONNULL(1);
+
+/** Decode an Opus packet.
+ * @param [in] st OpusDecoder*: Decoder state
+ * @param [in] data char*: Input payload. Use a NULL pointer to indicate packet loss
+ * @param [in] len opus_int32: Number of bytes in payload*
+ * @param [out] pcm opus_int16*: Output signal (interleaved if 2 channels). length
+ * is frame_size*channels*sizeof(opus_int16)
+ * @param [in] frame_size Number of samples per channel of available space in \a pcm.
+ * If this is less than the maximum packet duration (120ms; 5760 for 48kHz), this function will
+ * not be capable of decoding some packets. In the case of PLC (data==NULL) or FEC (decode_fec=1),
+ * then frame_size needs to be exactly the duration of audio that is missing, otherwise the
+ * decoder will not be in the optimal state to decode the next incoming packet. For the PLC and
+ * FEC cases, frame_size must be a multiple of 2.5 ms.
+ * @param [in] decode_fec int: Flag (0 or 1) to request that any in-band forward error correction data be
+ * decoded. If no such data is available, the frame is decoded as if it were lost.
+ * @returns Number of decoded samples or @ref opus_errorcodes
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_decode(
+ OpusDecoder *st,
+ const unsigned char *data,
+ opus_int32 len,
+ opus_int16 *pcm,
+ int frame_size,
+ int decode_fec
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Decode an Opus packet with floating point output.
+ * @param [in] st OpusDecoder*: Decoder state
+ * @param [in] data char*: Input payload. Use a NULL pointer to indicate packet loss
+ * @param [in] len opus_int32: Number of bytes in payload
+ * @param [out] pcm float*: Output signal (interleaved if 2 channels). length
+ * is frame_size*channels*sizeof(float)
+ * @param [in] frame_size Number of samples per channel of available space in \a pcm.
+ * If this is less than the maximum packet duration (120ms; 5760 for 48kHz), this function will
+ * not be capable of decoding some packets. In the case of PLC (data==NULL) or FEC (decode_fec=1),
+ * then frame_size needs to be exactly the duration of audio that is missing, otherwise the
+ * decoder will not be in the optimal state to decode the next incoming packet. For the PLC and
+ * FEC cases, frame_size must be a multiple of 2.5 ms.
+ * @param [in] decode_fec int: Flag (0 or 1) to request that any in-band forward error correction data be
+ * decoded. If no such data is available the frame is decoded as if it were lost.
+ * @returns Number of decoded samples or @ref opus_errorcodes
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_decode_float(
+ OpusDecoder *st,
+ const unsigned char *data,
+ opus_int32 len,
+ float *pcm,
+ int frame_size,
+ int decode_fec
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Perform a CTL function on an Opus decoder.
+ *
+ * Generally the request and subsequent arguments are generated
+ * by a convenience macro.
+ * @param st OpusDecoder*: Decoder state.
+ * @param request This and all remaining parameters should be replaced by one
+ * of the convenience macros in @ref opus_genericctls or
+ * @ref opus_decoderctls.
+ * @see opus_genericctls
+ * @see opus_decoderctls
+ */
+OPUS_EXPORT int opus_decoder_ctl(OpusDecoder *st, int request, ...) OPUS_ARG_NONNULL(1);
+
+/** Frees an OpusDecoder allocated by opus_decoder_create().
+ * @param[in] st OpusDecoder*: State to be freed.
+ */
+OPUS_EXPORT void opus_decoder_destroy(OpusDecoder *st);
+
+/** Parse an opus packet into one or more frames.
+ * Opus_decode will perform this operation internally so most applications do
+ * not need to use this function.
+ * This function does not copy the frames, the returned pointers are pointers into
+ * the input packet.
+ * @param [in] data char*: Opus packet to be parsed
+ * @param [in] len opus_int32: size of data
+ * @param [out] out_toc char*: TOC pointer
+ * @param [out] frames char*[48] encapsulated frames
+ * @param [out] size opus_int16[48] sizes of the encapsulated frames
+ * @param [out] payload_offset int*: returns the position of the payload within the packet (in bytes)
+ * @returns number of frames
+ */
+OPUS_EXPORT int opus_packet_parse(
+ const unsigned char *data,
+ opus_int32 len,
+ unsigned char *out_toc,
+ const unsigned char *frames[48],
+ opus_int16 size[48],
+ int *payload_offset
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Gets the bandwidth of an Opus packet.
+ * @param [in] data char*: Opus packet
+ * @retval OPUS_BANDWIDTH_NARROWBAND Narrowband (4kHz bandpass)
+ * @retval OPUS_BANDWIDTH_MEDIUMBAND Mediumband (6kHz bandpass)
+ * @retval OPUS_BANDWIDTH_WIDEBAND Wideband (8kHz bandpass)
+ * @retval OPUS_BANDWIDTH_SUPERWIDEBAND Superwideband (12kHz bandpass)
+ * @retval OPUS_BANDWIDTH_FULLBAND Fullband (20kHz bandpass)
+ * @retval OPUS_INVALID_PACKET The compressed data passed is corrupted or of an unsupported type
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_packet_get_bandwidth(const unsigned char *data) OPUS_ARG_NONNULL(1);
+
+/** Gets the number of samples per frame from an Opus packet.
+ * @param [in] data char*: Opus packet.
+ * This must contain at least one byte of
+ * data.
+ * @param [in] Fs opus_int32: Sampling rate in Hz.
+ * This must be a multiple of 400, or
+ * inaccurate results will be returned.
+ * @returns Number of samples per frame.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_packet_get_samples_per_frame(const unsigned char *data, opus_int32 Fs) OPUS_ARG_NONNULL(1);
+
+/** Gets the number of channels from an Opus packet.
+ * @param [in] data char*: Opus packet
+ * @returns Number of channels
+ * @retval OPUS_INVALID_PACKET The compressed data passed is corrupted or of an unsupported type
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_packet_get_nb_channels(const unsigned char *data) OPUS_ARG_NONNULL(1);
+
+/** Gets the number of frames in an Opus packet.
+ * @param [in] packet char*: Opus packet
+ * @param [in] len opus_int32: Length of packet
+ * @returns Number of frames
+ * @retval OPUS_BAD_ARG Insufficient data was passed to the function
+ * @retval OPUS_INVALID_PACKET The compressed data passed is corrupted or of an unsupported type
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_packet_get_nb_frames(const unsigned char packet[], opus_int32 len) OPUS_ARG_NONNULL(1);
+
+/** Gets the number of samples of an Opus packet.
+ * @param [in] packet char*: Opus packet
+ * @param [in] len opus_int32: Length of packet
+ * @param [in] Fs opus_int32: Sampling rate in Hz.
+ * This must be a multiple of 400, or
+ * inaccurate results will be returned.
+ * @returns Number of samples
+ * @retval OPUS_BAD_ARG Insufficient data was passed to the function
+ * @retval OPUS_INVALID_PACKET The compressed data passed is corrupted or of an unsupported type
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_packet_get_nb_samples(const unsigned char packet[], opus_int32 len, opus_int32 Fs) OPUS_ARG_NONNULL(1);
+
+/** Gets the number of samples of an Opus packet.
+ * @param [in] dec OpusDecoder*: Decoder state
+ * @param [in] packet char*: Opus packet
+ * @param [in] len opus_int32: Length of packet
+ * @returns Number of samples
+ * @retval OPUS_BAD_ARG Insufficient data was passed to the function
+ * @retval OPUS_INVALID_PACKET The compressed data passed is corrupted or of an unsupported type
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_decoder_get_nb_samples(const OpusDecoder *dec, const unsigned char packet[], opus_int32 len) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2);
+/**@}*/
+
+/** @defgroup opus_repacketizer Repacketizer
+ * @{
+ *
+ * The repacketizer can be used to merge multiple Opus packets into a single
+ * packet or alternatively to split Opus packets that have previously been
+ * merged. Splitting valid Opus packets is always guaranteed to succeed,
+ * whereas merging valid packets only succeeds if all frames have the same
+ * mode, bandwidth, and frame size, and when the total duration of the merged
+ * packet is no more than 120 ms.
+ * The repacketizer currently only operates on elementary Opus
+ * streams. It will not manipualte multistream packets successfully, except in
+ * the degenerate case where they consist of data from a single stream.
+ *
+ * The repacketizing process starts with creating a repacketizer state, either
+ * by calling opus_repacketizer_create() or by allocating the memory yourself,
+ * e.g.,
+ * @code
+ * OpusRepacketizer *rp;
+ * rp = (OpusRepacketizer*)malloc(opus_repacketizer_get_size());
+ * if (rp != NULL)
+ * opus_repacketizer_init(rp);
+ * @endcode
+ *
+ * Then the application should submit packets with opus_repacketizer_cat(),
+ * extract new packets with opus_repacketizer_out() or
+ * opus_repacketizer_out_range(), and then reset the state for the next set of
+ * input packets via opus_repacketizer_init().
+ *
+ * For example, to split a sequence of packets into individual frames:
+ * @code
+ * unsigned char *data;
+ * int len;
+ * while (get_next_packet(&data, &len))
+ * {
+ * unsigned char out[1276];
+ * opus_int32 out_len;
+ * int nb_frames;
+ * int err;
+ * int i;
+ * err = opus_repacketizer_cat(rp, data, len);
+ * if (err != OPUS_OK)
+ * {
+ * release_packet(data);
+ * return err;
+ * }
+ * nb_frames = opus_repacketizer_get_nb_frames(rp);
+ * for (i = 0; i < nb_frames; i++)
+ * {
+ * out_len = opus_repacketizer_out_range(rp, i, i+1, out, sizeof(out));
+ * if (out_len < 0)
+ * {
+ * release_packet(data);
+ * return (int)out_len;
+ * }
+ * output_next_packet(out, out_len);
+ * }
+ * opus_repacketizer_init(rp);
+ * release_packet(data);
+ * }
+ * @endcode
+ *
+ * Alternatively, to combine a sequence of frames into packets that each
+ * contain up to TARGET_DURATION_MS milliseconds of data:
+ * @code
+ * // The maximum number of packets with duration TARGET_DURATION_MS occurs
+ * // when the frame size is 2.5 ms, for a total of (TARGET_DURATION_MS*2/5)
+ * // packets.
+ * unsigned char *data[(TARGET_DURATION_MS*2/5)+1];
+ * opus_int32 len[(TARGET_DURATION_MS*2/5)+1];
+ * int nb_packets;
+ * unsigned char out[1277*(TARGET_DURATION_MS*2/2)];
+ * opus_int32 out_len;
+ * int prev_toc;
+ * nb_packets = 0;
+ * while (get_next_packet(data+nb_packets, len+nb_packets))
+ * {
+ * int nb_frames;
+ * int err;
+ * nb_frames = opus_packet_get_nb_frames(data[nb_packets], len[nb_packets]);
+ * if (nb_frames < 1)
+ * {
+ * release_packets(data, nb_packets+1);
+ * return nb_frames;
+ * }
+ * nb_frames += opus_repacketizer_get_nb_frames(rp);
+ * // If adding the next packet would exceed our target, or it has an
+ * // incompatible TOC sequence, output the packets we already have before
+ * // submitting it.
+ * // N.B., The nb_packets > 0 check ensures we've submitted at least one
+ * // packet since the last call to opus_repacketizer_init(). Otherwise a
+ * // single packet longer than TARGET_DURATION_MS would cause us to try to
+ * // output an (invalid) empty packet. It also ensures that prev_toc has
+ * // been set to a valid value. Additionally, len[nb_packets] > 0 is
+ * // guaranteed by the call to opus_packet_get_nb_frames() above, so the
+ * // reference to data[nb_packets][0] should be valid.
+ * if (nb_packets > 0 && (
+ * ((prev_toc & 0xFC) != (data[nb_packets][0] & 0xFC)) ||
+ * opus_packet_get_samples_per_frame(data[nb_packets], 48000)*nb_frames >
+ * TARGET_DURATION_MS*48))
+ * {
+ * out_len = opus_repacketizer_out(rp, out, sizeof(out));
+ * if (out_len < 0)
+ * {
+ * release_packets(data, nb_packets+1);
+ * return (int)out_len;
+ * }
+ * output_next_packet(out, out_len);
+ * opus_repacketizer_init(rp);
+ * release_packets(data, nb_packets);
+ * data[0] = data[nb_packets];
+ * len[0] = len[nb_packets];
+ * nb_packets = 0;
+ * }
+ * err = opus_repacketizer_cat(rp, data[nb_packets], len[nb_packets]);
+ * if (err != OPUS_OK)
+ * {
+ * release_packets(data, nb_packets+1);
+ * return err;
+ * }
+ * prev_toc = data[nb_packets][0];
+ * nb_packets++;
+ * }
+ * // Output the final, partial packet.
+ * if (nb_packets > 0)
+ * {
+ * out_len = opus_repacketizer_out(rp, out, sizeof(out));
+ * release_packets(data, nb_packets);
+ * if (out_len < 0)
+ * return (int)out_len;
+ * output_next_packet(out, out_len);
+ * }
+ * @endcode
+ *
+ * An alternate way of merging packets is to simply call opus_repacketizer_cat()
+ * unconditionally until it fails. At that point, the merged packet can be
+ * obtained with opus_repacketizer_out() and the input packet for which
+ * opus_repacketizer_cat() needs to be re-added to a newly reinitialized
+ * repacketizer state.
+ */
+
+typedef struct OpusRepacketizer OpusRepacketizer;
+
+/** Gets the size of an OpusRepacketizer structure.
+ * @returns The size in bytes.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_repacketizer_get_size(void);
+
+/** (Re)initializes a previously allocated repacketizer state.
+ * The state must be at least the size returned by opus_repacketizer_get_size().
+ * This can be used for applications which use their own allocator instead of
+ * malloc().
+ * It must also be called to reset the queue of packets waiting to be
+ * repacketized, which is necessary if the maximum packet duration of 120 ms
+ * is reached or if you wish to submit packets with a different Opus
+ * configuration (coding mode, audio bandwidth, frame size, or channel count).
+ * Failure to do so will prevent a new packet from being added with
+ * opus_repacketizer_cat().
+ * @see opus_repacketizer_create
+ * @see opus_repacketizer_get_size
+ * @see opus_repacketizer_cat
+ * @param rp OpusRepacketizer*: The repacketizer state to
+ * (re)initialize.
+ * @returns A pointer to the same repacketizer state that was passed in.
+ */
+OPUS_EXPORT OpusRepacketizer *opus_repacketizer_init(OpusRepacketizer *rp) OPUS_ARG_NONNULL(1);
+
+/** Allocates memory and initializes the new repacketizer with
+ * opus_repacketizer_init().
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusRepacketizer *opus_repacketizer_create(void);
+
+/** Frees an OpusRepacketizer allocated by
+ * opus_repacketizer_create().
+ * @param[in] rp OpusRepacketizer*: State to be freed.
+ */
+OPUS_EXPORT void opus_repacketizer_destroy(OpusRepacketizer *rp);
+
+/** Add a packet to the current repacketizer state.
+ * This packet must match the configuration of any packets already submitted
+ * for repacketization since the last call to opus_repacketizer_init().
+ * This means that it must have the same coding mode, audio bandwidth, frame
+ * size, and channel count.
+ * This can be checked in advance by examining the top 6 bits of the first
+ * byte of the packet, and ensuring they match the top 6 bits of the first
+ * byte of any previously submitted packet.
+ * The total duration of audio in the repacketizer state also must not exceed
+ * 120 ms, the maximum duration of a single packet, after adding this packet.
+ *
+ * The contents of the current repacketizer state can be extracted into new
+ * packets using opus_repacketizer_out() or opus_repacketizer_out_range().
+ *
+ * In order to add a packet with a different configuration or to add more
+ * audio beyond 120 ms, you must clear the repacketizer state by calling
+ * opus_repacketizer_init().
+ * If a packet is too large to add to the current repacketizer state, no part
+ * of it is added, even if it contains multiple frames, some of which might
+ * fit.
+ * If you wish to be able to add parts of such packets, you should first use
+ * another repacketizer to split the packet into pieces and add them
+ * individually.
+ * @see opus_repacketizer_out_range
+ * @see opus_repacketizer_out
+ * @see opus_repacketizer_init
+ * @param rp OpusRepacketizer*: The repacketizer state to which to
+ * add the packet.
+ * @param[in] data const unsigned char*: The packet data.
+ * The application must ensure
+ * this pointer remains valid
+ * until the next call to
+ * opus_repacketizer_init() or
+ * opus_repacketizer_destroy().
+ * @param len opus_int32: The number of bytes in the packet data.
+ * @returns An error code indicating whether or not the operation succeeded.
+ * @retval #OPUS_OK The packet's contents have been added to the repacketizer
+ * state.
+ * @retval #OPUS_INVALID_PACKET The packet did not have a valid TOC sequence,
+ * the packet's TOC sequence was not compatible
+ * with previously submitted packets (because
+ * the coding mode, audio bandwidth, frame size,
+ * or channel count did not match), or adding
+ * this packet would increase the total amount of
+ * audio stored in the repacketizer state to more
+ * than 120 ms.
+ */
+OPUS_EXPORT int opus_repacketizer_cat(OpusRepacketizer *rp, const unsigned char *data, opus_int32 len) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2);
+
+
+/** Construct a new packet from data previously submitted to the repacketizer
+ * state via opus_repacketizer_cat().
+ * @param rp OpusRepacketizer*: The repacketizer state from which to
+ * construct the new packet.
+ * @param begin int: The index of the first frame in the current
+ * repacketizer state to include in the output.
+ * @param end int: One past the index of the last frame in the
+ * current repacketizer state to include in the
+ * output.
+ * @param[out] data const unsigned char*: The buffer in which to
+ * store the output packet.
+ * @param maxlen opus_int32: The maximum number of bytes to store in
+ * the output buffer. In order to guarantee
+ * success, this should be at least
+ * 1276 for a single frame,
+ * or for multiple frames,
+ * 1277*(end-begin).
+ * However, 1*(end-begin) plus
+ * the size of all packet data submitted to
+ * the repacketizer since the last call to
+ * opus_repacketizer_init() or
+ * opus_repacketizer_create() is also
+ * sufficient, and possibly much smaller.
+ * @returns The total size of the output packet on success, or an error code
+ * on failure.
+ * @retval #OPUS_BAD_ARG [begin,end) was an invalid range of
+ * frames (begin < 0, begin >= end, or end >
+ * opus_repacketizer_get_nb_frames()).
+ * @retval #OPUS_BUFFER_TOO_SMALL \a maxlen was insufficient to contain the
+ * complete output packet.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_repacketizer_out_range(OpusRepacketizer *rp, int begin, int end, unsigned char *data, opus_int32 maxlen) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Return the total number of frames contained in packet data submitted to
+ * the repacketizer state so far via opus_repacketizer_cat() since the last
+ * call to opus_repacketizer_init() or opus_repacketizer_create().
+ * This defines the valid range of packets that can be extracted with
+ * opus_repacketizer_out_range() or opus_repacketizer_out().
+ * @param rp OpusRepacketizer*: The repacketizer state containing the
+ * frames.
+ * @returns The total number of frames contained in the packet data submitted
+ * to the repacketizer state.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_repacketizer_get_nb_frames(OpusRepacketizer *rp) OPUS_ARG_NONNULL(1);
+
+/** Construct a new packet from data previously submitted to the repacketizer
+ * state via opus_repacketizer_cat().
+ * This is a convenience routine that returns all the data submitted so far
+ * in a single packet.
+ * It is equivalent to calling
+ * @code
+ * opus_repacketizer_out_range(rp, 0, opus_repacketizer_get_nb_frames(rp),
+ * data, maxlen)
+ * @endcode
+ * @param rp OpusRepacketizer*: The repacketizer state from which to
+ * construct the new packet.
+ * @param[out] data const unsigned char*: The buffer in which to
+ * store the output packet.
+ * @param maxlen opus_int32: The maximum number of bytes to store in
+ * the output buffer. In order to guarantee
+ * success, this should be at least
+ * 1277*opus_repacketizer_get_nb_frames(rp).
+ * However,
+ * 1*opus_repacketizer_get_nb_frames(rp)
+ * plus the size of all packet data
+ * submitted to the repacketizer since the
+ * last call to opus_repacketizer_init() or
+ * opus_repacketizer_create() is also
+ * sufficient, and possibly much smaller.
+ * @returns The total size of the output packet on success, or an error code
+ * on failure.
+ * @retval #OPUS_BUFFER_TOO_SMALL \a maxlen was insufficient to contain the
+ * complete output packet.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_repacketizer_out(OpusRepacketizer *rp, unsigned char *data, opus_int32 maxlen) OPUS_ARG_NONNULL(1);
+
+/**@}*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* OPUS_H */
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_custom.h b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_custom.h
new file mode 100644
index 0000000..e7861d6
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_custom.h
@@ -0,0 +1,329 @@
+/* Copyright (c) 2007-2008 CSIRO
+ Copyright (c) 2007-2009 Xiph.Org Foundation
+ Copyright (c) 2008-2012 Gregory Maxwell
+ Written by Jean-Marc Valin and Gregory Maxwell */
+/*
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ - Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/**
+ @file opus_custom.h
+ @brief Opus-Custom reference implementation API
+ */
+
+#ifndef OPUS_CUSTOM_H
+#define OPUS_CUSTOM_H
+
+#include "opus_defines.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifdef CUSTOM_MODES
+#define OPUS_CUSTOM_EXPORT OPUS_EXPORT
+#define OPUS_CUSTOM_EXPORT_STATIC OPUS_EXPORT
+#else
+#define OPUS_CUSTOM_EXPORT
+#ifdef CELT_C
+#define OPUS_CUSTOM_EXPORT_STATIC static inline
+#else
+#define OPUS_CUSTOM_EXPORT_STATIC
+#endif
+#endif
+
+/** @defgroup opus_custom Opus Custom
+ * @{
+ * Opus Custom is an optional part of the Opus specification and
+ * reference implementation which uses a distinct API from the regular
+ * API and supports frame sizes that are not normally supported.\ Use
+ * of Opus Custom is discouraged for all but very special applications
+ * for which a frame size different from 2.5, 5, 10, or 20 ms is needed
+ * (for either complexity or latency reasons) and where interoperability
+ * is less important.
+ *
+ * In addition to the interoperability limitations the use of Opus custom
+ * disables a substantial chunk of the codec and generally lowers the
+ * quality available at a given bitrate. Normally when an application needs
+ * a different frame size from the codec it should buffer to match the
+ * sizes but this adds a small amount of delay which may be important
+ * in some very low latency applications. Some transports (especially
+ * constant rate RF transports) may also work best with frames of
+ * particular durations.
+ *
+ * Libopus only supports custom modes if they are enabled at compile time.
+ *
+ * The Opus Custom API is similar to the regular API but the
+ * @ref opus_encoder_create and @ref opus_decoder_create calls take
+ * an additional mode parameter which is a structure produced by
+ * a call to @ref opus_custom_mode_create. Both the encoder and decoder
+ * must create a mode using the same sample rate (fs) and frame size
+ * (frame size) so these parameters must either be signaled out of band
+ * or fixed in a particular implementation.
+ *
+ * Similar to regular Opus the custom modes support on the fly frame size
+ * switching, but the sizes available depend on the particular frame size in
+ * use. For some initial frame sizes on a single on the fly size is available.
+ */
+
+/** Contains the state of an encoder. One encoder state is needed
+ for each stream. It is initialized once at the beginning of the
+ stream. Do *not* re-initialize the state for every frame.
+ @brief Encoder state
+ */
+typedef struct OpusCustomEncoder OpusCustomEncoder;
+
+/** State of the decoder. One decoder state is needed for each stream.
+ It is initialized once at the beginning of the stream. Do *not*
+ re-initialize the state for every frame.
+ @brief Decoder state
+ */
+typedef struct OpusCustomDecoder OpusCustomDecoder;
+
+/** The mode contains all the information necessary to create an
+ encoder. Both the encoder and decoder need to be initialized
+ with exactly the same mode, otherwise the output will be
+ corrupted.
+ @brief Mode configuration
+ */
+typedef struct OpusCustomMode OpusCustomMode;
+
+/** Creates a new mode struct. This will be passed to an encoder or
+ * decoder. The mode MUST NOT BE DESTROYED until the encoders and
+ * decoders that use it are destroyed as well.
+ * @param [in] Fs int: Sampling rate (8000 to 96000 Hz)
+ * @param [in] frame_size int: Number of samples (per channel) to encode in each
+ * packet (64 - 1024, prime factorization must contain zero or more 2s, 3s, or 5s and no other primes)
+ * @param [out] error int*: Returned error code (if NULL, no error will be returned)
+ * @return A newly created mode
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT OpusCustomMode *opus_custom_mode_create(opus_int32 Fs, int frame_size, int *error);
+
+/** Destroys a mode struct. Only call this after all encoders and
+ * decoders using this mode are destroyed as well.
+ * @param [in] mode OpusCustomMode*: Mode to be freed.
+ */
+OPUS_CUSTOM_EXPORT void opus_custom_mode_destroy(OpusCustomMode *mode);
+
+/* Encoder */
+/** Gets the size of an OpusCustomEncoder structure.
+ * @param [in] mode OpusCustomMode *: Mode configuration
+ * @param [in] channels int: Number of channels
+ * @returns size
+ */
+OPUS_CUSTOM_EXPORT_STATIC OPUS_WARN_UNUSED_RESULT int opus_custom_encoder_get_size(
+ const OpusCustomMode *mode,
+ int channels
+) OPUS_ARG_NONNULL(1);
+
+/** Creates a new encoder state. Each stream needs its own encoder
+ * state (can't be shared across simultaneous streams).
+ * @param [in] mode OpusCustomMode*: Contains all the information about the characteristics of
+ * the stream (must be the same characteristics as used for the
+ * decoder)
+ * @param [in] channels int: Number of channels
+ * @param [out] error int*: Returns an error code
+ * @return Newly created encoder state.
+*/
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT OpusCustomEncoder *opus_custom_encoder_create(
+ const OpusCustomMode *mode,
+ int channels,
+ int *error
+) OPUS_ARG_NONNULL(1);
+
+/** Initializes a previously allocated encoder state
+ * The memory pointed to by st must be the size returned by opus_custom_encoder_get_size.
+ * This is intended for applications which use their own allocator instead of malloc.
+ * @see opus_custom_encoder_create(),opus_custom_encoder_get_size()
+ * To reset a previously initialized state use the OPUS_RESET_STATE CTL.
+ * @param [in] st OpusCustomEncoder*: Encoder state
+ * @param [in] mode OpusCustomMode *: Contains all the information about the characteristics of
+ * the stream (must be the same characteristics as used for the
+ * decoder)
+ * @param [in] channels int: Number of channels
+ * @return OPUS_OK Success or @ref opus_errorcodes
+ */
+OPUS_CUSTOM_EXPORT_STATIC int opus_custom_encoder_init(
+ OpusCustomEncoder *st,
+ const OpusCustomMode *mode,
+ int channels
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2);
+
+/** Destroys a an encoder state.
+ * @param[in] st OpusCustomEncoder*: State to be freed.
+ */
+OPUS_CUSTOM_EXPORT void opus_custom_encoder_destroy(OpusCustomEncoder *st);
+
+/** Encodes a frame of audio.
+ * @param [in] st OpusCustomEncoder*: Encoder state
+ * @param [in] pcm float*: PCM audio in float format, with a normal range of +/-1.0.
+ * Samples with a range beyond +/-1.0 are supported but will
+ * be clipped by decoders using the integer API and should
+ * only be used if it is known that the far end supports
+ * extended dynamic range. There must be exactly
+ * frame_size samples per channel.
+ * @param [in] frame_size int: Number of samples per frame of input signal
+ * @param [out] compressed char *: The compressed data is written here. This may not alias pcm and must be at least maxCompressedBytes long.
+ * @param [in] maxCompressedBytes int: Maximum number of bytes to use for compressing the frame
+ * (can change from one frame to another)
+ * @return Number of bytes written to "compressed".
+ * If negative, an error has occurred (see error codes). It is IMPORTANT that
+ * the length returned be somehow transmitted to the decoder. Otherwise, no
+ * decoding is possible.
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT int opus_custom_encode_float(
+ OpusCustomEncoder *st,
+ const float *pcm,
+ int frame_size,
+ unsigned char *compressed,
+ int maxCompressedBytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Encodes a frame of audio.
+ * @param [in] st OpusCustomEncoder*: Encoder state
+ * @param [in] pcm opus_int16*: PCM audio in signed 16-bit format (native endian).
+ * There must be exactly frame_size samples per channel.
+ * @param [in] frame_size int: Number of samples per frame of input signal
+ * @param [out] compressed char *: The compressed data is written here. This may not alias pcm and must be at least maxCompressedBytes long.
+ * @param [in] maxCompressedBytes int: Maximum number of bytes to use for compressing the frame
+ * (can change from one frame to another)
+ * @return Number of bytes written to "compressed".
+ * If negative, an error has occurred (see error codes). It is IMPORTANT that
+ * the length returned be somehow transmitted to the decoder. Otherwise, no
+ * decoding is possible.
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT int opus_custom_encode(
+ OpusCustomEncoder *st,
+ const opus_int16 *pcm,
+ int frame_size,
+ unsigned char *compressed,
+ int maxCompressedBytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Perform a CTL function on an Opus custom encoder.
+ *
+ * Generally the request and subsequent arguments are generated
+ * by a convenience macro.
+ * @see opus_encoderctls
+ */
+OPUS_CUSTOM_EXPORT int opus_custom_encoder_ctl(OpusCustomEncoder * OPUS_RESTRICT st, int request, ...) OPUS_ARG_NONNULL(1);
+
+/* Decoder */
+
+/** Gets the size of an OpusCustomDecoder structure.
+ * @param [in] mode OpusCustomMode *: Mode configuration
+ * @param [in] channels int: Number of channels
+ * @returns size
+ */
+OPUS_CUSTOM_EXPORT_STATIC OPUS_WARN_UNUSED_RESULT int opus_custom_decoder_get_size(
+ const OpusCustomMode *mode,
+ int channels
+) OPUS_ARG_NONNULL(1);
+
+/** Creates a new decoder state. Each stream needs its own decoder state (can't
+ * be shared across simultaneous streams).
+ * @param [in] mode OpusCustomMode: Contains all the information about the characteristics of the
+ * stream (must be the same characteristics as used for the encoder)
+ * @param [in] channels int: Number of channels
+ * @param [out] error int*: Returns an error code
+ * @return Newly created decoder state.
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT OpusCustomDecoder *opus_custom_decoder_create(
+ const OpusCustomMode *mode,
+ int channels,
+ int *error
+) OPUS_ARG_NONNULL(1);
+
+/** Initializes a previously allocated decoder state
+ * The memory pointed to by st must be the size returned by opus_custom_decoder_get_size.
+ * This is intended for applications which use their own allocator instead of malloc.
+ * @see opus_custom_decoder_create(),opus_custom_decoder_get_size()
+ * To reset a previously initialized state use the OPUS_RESET_STATE CTL.
+ * @param [in] st OpusCustomDecoder*: Decoder state
+ * @param [in] mode OpusCustomMode *: Contains all the information about the characteristics of
+ * the stream (must be the same characteristics as used for the
+ * encoder)
+ * @param [in] channels int: Number of channels
+ * @return OPUS_OK Success or @ref opus_errorcodes
+ */
+OPUS_CUSTOM_EXPORT_STATIC int opus_custom_decoder_init(
+ OpusCustomDecoder *st,
+ const OpusCustomMode *mode,
+ int channels
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2);
+
+/** Destroys a an decoder state.
+ * @param[in] st OpusCustomDecoder*: State to be freed.
+ */
+OPUS_CUSTOM_EXPORT void opus_custom_decoder_destroy(OpusCustomDecoder *st);
+
+/** Decode an opus custom frame with floating point output
+ * @param [in] st OpusCustomDecoder*: Decoder state
+ * @param [in] data char*: Input payload. Use a NULL pointer to indicate packet loss
+ * @param [in] len int: Number of bytes in payload
+ * @param [out] pcm float*: Output signal (interleaved if 2 channels). length
+ * is frame_size*channels*sizeof(float)
+ * @param [in] frame_size Number of samples per channel of available space in *pcm.
+ * @returns Number of decoded samples or @ref opus_errorcodes
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT int opus_custom_decode_float(
+ OpusCustomDecoder *st,
+ const unsigned char *data,
+ int len,
+ float *pcm,
+ int frame_size
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Decode an opus custom frame
+ * @param [in] st OpusCustomDecoder*: Decoder state
+ * @param [in] data char*: Input payload. Use a NULL pointer to indicate packet loss
+ * @param [in] len int: Number of bytes in payload
+ * @param [out] pcm opus_int16*: Output signal (interleaved if 2 channels). length
+ * is frame_size*channels*sizeof(opus_int16)
+ * @param [in] frame_size Number of samples per channel of available space in *pcm.
+ * @returns Number of decoded samples or @ref opus_errorcodes
+ */
+OPUS_CUSTOM_EXPORT OPUS_WARN_UNUSED_RESULT int opus_custom_decode(
+ OpusCustomDecoder *st,
+ const unsigned char *data,
+ int len,
+ opus_int16 *pcm,
+ int frame_size
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Perform a CTL function on an Opus custom decoder.
+ *
+ * Generally the request and subsequent arguments are generated
+ * by a convenience macro.
+ * @see opus_genericctls
+ */
+OPUS_CUSTOM_EXPORT int opus_custom_decoder_ctl(OpusCustomDecoder * OPUS_RESTRICT st, int request, ...) OPUS_ARG_NONNULL(1);
+
+/**@}*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* OPUS_CUSTOM_H */
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_defines.h b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_defines.h
new file mode 100644
index 0000000..9fa3ccb
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_defines.h
@@ -0,0 +1,655 @@
+/* Copyright (c) 2010-2011 Xiph.Org Foundation, Skype Limited
+ Written by Jean-Marc Valin and Koen Vos */
+/*
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ - Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/**
+ * @file opus_defines.h
+ * @brief Opus reference implementation constants
+ */
+
+#ifndef OPUS_DEFINES_H
+#define OPUS_DEFINES_H
+
+#include "opus_types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** @defgroup opus_errorcodes Error codes
+ * @{
+ */
+/** No error @hideinitializer*/
+#define OPUS_OK 0
+/** One or more invalid/out of range arguments @hideinitializer*/
+#define OPUS_BAD_ARG -1
+/** The mode struct passed is invalid @hideinitializer*/
+#define OPUS_BUFFER_TOO_SMALL -2
+/** An internal error was detected @hideinitializer*/
+#define OPUS_INTERNAL_ERROR -3
+/** The compressed data passed is corrupted @hideinitializer*/
+#define OPUS_INVALID_PACKET -4
+/** Invalid/unsupported request number @hideinitializer*/
+#define OPUS_UNIMPLEMENTED -5
+/** An encoder or decoder structure is invalid or already freed @hideinitializer*/
+#define OPUS_INVALID_STATE -6
+/** Memory allocation has failed @hideinitializer*/
+#define OPUS_ALLOC_FAIL -7
+/**@}*/
+
+/** @cond OPUS_INTERNAL_DOC */
+/**Export control for opus functions */
+
+#ifndef OPUS_EXPORT
+# if defined(WIN32)
+# ifdef OPUS_BUILD
+# define OPUS_EXPORT __declspec(dllexport)
+# else
+# define OPUS_EXPORT
+# endif
+# elif defined(__GNUC__) && defined(OPUS_BUILD)
+# define OPUS_EXPORT __attribute__ ((visibility ("default")))
+# else
+# define OPUS_EXPORT
+# endif
+#endif
+
+# if !defined(OPUS_GNUC_PREREQ)
+# if defined(__GNUC__)&&defined(__GNUC_MINOR__)
+# define OPUS_GNUC_PREREQ(_maj,_min) \
+ ((__GNUC__<<16)+__GNUC_MINOR__>=((_maj)<<16)+(_min))
+# else
+# define OPUS_GNUC_PREREQ(_maj,_min) 0
+# endif
+# endif
+
+#if (!defined(__STDC_VERSION__) || (__STDC_VERSION__ < 199901L) )
+# if OPUS_GNUC_PREREQ(3,0)
+# define OPUS_RESTRICT __restrict__
+# elif (defined(_MSC_VER) && _MSC_VER >= 1400)
+# define OPUS_RESTRICT __restrict
+# else
+# define OPUS_RESTRICT
+# endif
+#else
+# define OPUS_RESTRICT restrict
+#endif
+
+/**Warning attributes for opus functions
+ * NONNULL is not used in OPUS_BUILD to avoid the compiler optimizing out
+ * some paranoid null checks. */
+#if defined(__GNUC__) && OPUS_GNUC_PREREQ(3, 4)
+# define OPUS_WARN_UNUSED_RESULT __attribute__ ((__warn_unused_result__))
+#else
+# define OPUS_WARN_UNUSED_RESULT
+#endif
+#if !defined(OPUS_BUILD) && defined(__GNUC__) && OPUS_GNUC_PREREQ(3, 4)
+# define OPUS_ARG_NONNULL(_x) __attribute__ ((__nonnull__(_x)))
+#else
+# define OPUS_ARG_NONNULL(_x)
+#endif
+
+/** These are the actual Encoder CTL ID numbers.
+ * They should not be used directly by applications.
+ * In general, SETs should be even and GETs should be odd.*/
+#define OPUS_SET_APPLICATION_REQUEST 4000
+#define OPUS_GET_APPLICATION_REQUEST 4001
+#define OPUS_SET_BITRATE_REQUEST 4002
+#define OPUS_GET_BITRATE_REQUEST 4003
+#define OPUS_SET_MAX_BANDWIDTH_REQUEST 4004
+#define OPUS_GET_MAX_BANDWIDTH_REQUEST 4005
+#define OPUS_SET_VBR_REQUEST 4006
+#define OPUS_GET_VBR_REQUEST 4007
+#define OPUS_SET_BANDWIDTH_REQUEST 4008
+#define OPUS_GET_BANDWIDTH_REQUEST 4009
+#define OPUS_SET_COMPLEXITY_REQUEST 4010
+#define OPUS_GET_COMPLEXITY_REQUEST 4011
+#define OPUS_SET_INBAND_FEC_REQUEST 4012
+#define OPUS_GET_INBAND_FEC_REQUEST 4013
+#define OPUS_SET_PACKET_LOSS_PERC_REQUEST 4014
+#define OPUS_GET_PACKET_LOSS_PERC_REQUEST 4015
+#define OPUS_SET_DTX_REQUEST 4016
+#define OPUS_GET_DTX_REQUEST 4017
+#define OPUS_SET_VBR_CONSTRAINT_REQUEST 4020
+#define OPUS_GET_VBR_CONSTRAINT_REQUEST 4021
+#define OPUS_SET_FORCE_CHANNELS_REQUEST 4022
+#define OPUS_GET_FORCE_CHANNELS_REQUEST 4023
+#define OPUS_SET_SIGNAL_REQUEST 4024
+#define OPUS_GET_SIGNAL_REQUEST 4025
+#define OPUS_GET_LOOKAHEAD_REQUEST 4027
+/* #define OPUS_RESET_STATE 4028 */
+#define OPUS_GET_SAMPLE_RATE_REQUEST 4029
+#define OPUS_GET_FINAL_RANGE_REQUEST 4031
+#define OPUS_GET_PITCH_REQUEST 4033
+#define OPUS_SET_GAIN_REQUEST 4034
+#define OPUS_GET_GAIN_REQUEST 4045 /* Should have been 4035 */
+#define OPUS_SET_LSB_DEPTH_REQUEST 4036
+#define OPUS_GET_LSB_DEPTH_REQUEST 4037
+
+#define OPUS_GET_LAST_PACKET_DURATION_REQUEST 4039
+
+/* Don't use 4045, it's already taken by OPUS_GET_GAIN_REQUEST */
+
+/* Macros to trigger compilation errors when the wrong types are provided to a CTL */
+#define __opus_check_int(x) (((void)((x) == (opus_int32)0)), (opus_int32)(x))
+#define __opus_check_int_ptr(ptr) ((ptr) + ((ptr) - (opus_int32*)(ptr)))
+#define __opus_check_uint_ptr(ptr) ((ptr) + ((ptr) - (opus_uint32*)(ptr)))
+/** @endcond */
+
+/** @defgroup opus_ctlvalues Pre-defined values for CTL interface
+ * @see opus_genericctls, opus_encoderctls
+ * @{
+ */
+/* Values for the various encoder CTLs */
+#define OPUS_AUTO -1000 /**opus_int32: Allowed values: 0-10, inclusive.
+ *
+ * @hideinitializer */
+#define OPUS_SET_COMPLEXITY(x) OPUS_SET_COMPLEXITY_REQUEST, __opus_check_int(x)
+/** Gets the encoder's complexity configuration.
+ * @see OPUS_SET_COMPLEXITY
+ * @param[out] x opus_int32 *: Returns a value in the range 0-10,
+ * inclusive.
+ * @hideinitializer */
+#define OPUS_GET_COMPLEXITY(x) OPUS_GET_COMPLEXITY_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures the bitrate in the encoder.
+ * Rates from 500 to 512000 bits per second are meaningful, as well as the
+ * special values #OPUS_AUTO and #OPUS_BITRATE_MAX.
+ * The value #OPUS_BITRATE_MAX can be used to cause the codec to use as much
+ * rate as it can, which is useful for controlling the rate by adjusting the
+ * output buffer size.
+ * @see OPUS_GET_BITRATE
+ * @param[in] x opus_int32: Bitrate in bits per second. The default
+ * is determined based on the number of
+ * channels and the input sampling rate.
+ * @hideinitializer */
+#define OPUS_SET_BITRATE(x) OPUS_SET_BITRATE_REQUEST, __opus_check_int(x)
+/** Gets the encoder's bitrate configuration.
+ * @see OPUS_SET_BITRATE
+ * @param[out] x opus_int32 *: Returns the bitrate in bits per second.
+ * The default is determined based on the
+ * number of channels and the input
+ * sampling rate.
+ * @hideinitializer */
+#define OPUS_GET_BITRATE(x) OPUS_GET_BITRATE_REQUEST, __opus_check_int_ptr(x)
+
+/** Enables or disables variable bitrate (VBR) in the encoder.
+ * The configured bitrate may not be met exactly because frames must
+ * be an integer number of bytes in length.
+ * @warning Only the MDCT mode of Opus can provide hard CBR behavior.
+ * @see OPUS_GET_VBR
+ * @see OPUS_SET_VBR_CONSTRAINT
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - 0
- Hard CBR. For LPC/hybrid modes at very low bit-rate, this can
+ * cause noticeable quality degradation.
+ * - 1
- VBR (default). The exact type of VBR is controlled by
+ * #OPUS_SET_VBR_CONSTRAINT.
+ *
+ * @hideinitializer */
+#define OPUS_SET_VBR(x) OPUS_SET_VBR_REQUEST, __opus_check_int(x)
+/** Determine if variable bitrate (VBR) is enabled in the encoder.
+ * @see OPUS_SET_VBR
+ * @see OPUS_GET_VBR_CONSTRAINT
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - 0
- Hard CBR.
+ * - 1
- VBR (default). The exact type of VBR may be retrieved via
+ * #OPUS_GET_VBR_CONSTRAINT.
+ *
+ * @hideinitializer */
+#define OPUS_GET_VBR(x) OPUS_GET_VBR_REQUEST, __opus_check_int_ptr(x)
+
+/** Enables or disables constrained VBR in the encoder.
+ * This setting is ignored when the encoder is in CBR mode.
+ * @warning Only the MDCT mode of Opus currently heeds the constraint.
+ * Speech mode ignores it completely, hybrid mode may fail to obey it
+ * if the LPC layer uses more bitrate than the constraint would have
+ * permitted.
+ * @see OPUS_GET_VBR_CONSTRAINT
+ * @see OPUS_SET_VBR
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - 0
- Unconstrained VBR.
+ * - 1
- Constrained VBR (default). This creates a maximum of one
+ * frame of buffering delay assuming a transport with a
+ * serialization speed of the nominal bitrate.
+ *
+ * @hideinitializer */
+#define OPUS_SET_VBR_CONSTRAINT(x) OPUS_SET_VBR_CONSTRAINT_REQUEST, __opus_check_int(x)
+/** Determine if constrained VBR is enabled in the encoder.
+ * @see OPUS_SET_VBR_CONSTRAINT
+ * @see OPUS_GET_VBR
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - 0
- Unconstrained VBR.
+ * - 1
- Constrained VBR (default).
+ *
+ * @hideinitializer */
+#define OPUS_GET_VBR_CONSTRAINT(x) OPUS_GET_VBR_CONSTRAINT_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures mono/stereo forcing in the encoder.
+ * This can force the encoder to produce packets encoded as either mono or
+ * stereo, regardless of the format of the input audio. This is useful when
+ * the caller knows that the input signal is currently a mono source embedded
+ * in a stereo stream.
+ * @see OPUS_GET_FORCE_CHANNELS
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - #OPUS_AUTO
- Not forced (default)
+ * - 1
- Forced mono
+ * - 2
- Forced stereo
+ *
+ * @hideinitializer */
+#define OPUS_SET_FORCE_CHANNELS(x) OPUS_SET_FORCE_CHANNELS_REQUEST, __opus_check_int(x)
+/** Gets the encoder's forced channel configuration.
+ * @see OPUS_SET_FORCE_CHANNELS
+ * @param[out] x opus_int32 *:
+ *
+ * - #OPUS_AUTO
- Not forced (default)
+ * - 1
- Forced mono
+ * - 2
- Forced stereo
+ *
+ * @hideinitializer */
+#define OPUS_GET_FORCE_CHANNELS(x) OPUS_GET_FORCE_CHANNELS_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures the maximum bandpass that the encoder will select automatically.
+ * Applications should normally use this instead of #OPUS_SET_BANDWIDTH
+ * (leaving that set to the default, #OPUS_AUTO). This allows the
+ * application to set an upper bound based on the type of input it is
+ * providing, but still gives the encoder the freedom to reduce the bandpass
+ * when the bitrate becomes too low, for better overall quality.
+ * @see OPUS_GET_MAX_BANDWIDTH
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - OPUS_BANDWIDTH_NARROWBAND
- 4 kHz passband
+ * - OPUS_BANDWIDTH_MEDIUMBAND
- 6 kHz passband
+ * - OPUS_BANDWIDTH_WIDEBAND
- 8 kHz passband
+ * - OPUS_BANDWIDTH_SUPERWIDEBAND
- 12 kHz passband
+ * - OPUS_BANDWIDTH_FULLBAND
- 20 kHz passband (default)
+ *
+ * @hideinitializer */
+#define OPUS_SET_MAX_BANDWIDTH(x) OPUS_SET_MAX_BANDWIDTH_REQUEST, __opus_check_int(x)
+
+/** Gets the encoder's configured maximum allowed bandpass.
+ * @see OPUS_SET_MAX_BANDWIDTH
+ * @param[out] x opus_int32 *: Allowed values:
+ *
+ * - #OPUS_BANDWIDTH_NARROWBAND
- 4 kHz passband
+ * - #OPUS_BANDWIDTH_MEDIUMBAND
- 6 kHz passband
+ * - #OPUS_BANDWIDTH_WIDEBAND
- 8 kHz passband
+ * - #OPUS_BANDWIDTH_SUPERWIDEBAND
- 12 kHz passband
+ * - #OPUS_BANDWIDTH_FULLBAND
- 20 kHz passband (default)
+ *
+ * @hideinitializer */
+#define OPUS_GET_MAX_BANDWIDTH(x) OPUS_GET_MAX_BANDWIDTH_REQUEST, __opus_check_int_ptr(x)
+
+/** Sets the encoder's bandpass to a specific value.
+ * This prevents the encoder from automatically selecting the bandpass based
+ * on the available bitrate. If an application knows the bandpass of the input
+ * audio it is providing, it should normally use #OPUS_SET_MAX_BANDWIDTH
+ * instead, which still gives the encoder the freedom to reduce the bandpass
+ * when the bitrate becomes too low, for better overall quality.
+ * @see OPUS_GET_BANDWIDTH
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - #OPUS_AUTO
- (default)
+ * - #OPUS_BANDWIDTH_NARROWBAND
- 4 kHz passband
+ * - #OPUS_BANDWIDTH_MEDIUMBAND
- 6 kHz passband
+ * - #OPUS_BANDWIDTH_WIDEBAND
- 8 kHz passband
+ * - #OPUS_BANDWIDTH_SUPERWIDEBAND
- 12 kHz passband
+ * - #OPUS_BANDWIDTH_FULLBAND
- 20 kHz passband
+ *
+ * @hideinitializer */
+#define OPUS_SET_BANDWIDTH(x) OPUS_SET_BANDWIDTH_REQUEST, __opus_check_int(x)
+
+/** Configures the type of signal being encoded.
+ * This is a hint which helps the encoder's mode selection.
+ * @see OPUS_GET_SIGNAL
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - #OPUS_AUTO
- (default)
+ * - #OPUS_SIGNAL_VOICE
- Bias thresholds towards choosing LPC or Hybrid modes.
+ * - #OPUS_SIGNAL_MUSIC
- Bias thresholds towards choosing MDCT modes.
+ *
+ * @hideinitializer */
+#define OPUS_SET_SIGNAL(x) OPUS_SET_SIGNAL_REQUEST, __opus_check_int(x)
+/** Gets the encoder's configured signal type.
+ * @see OPUS_SET_SIGNAL
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - #OPUS_AUTO
- (default)
+ * - #OPUS_SIGNAL_VOICE
- Bias thresholds towards choosing LPC or Hybrid modes.
+ * - #OPUS_SIGNAL_MUSIC
- Bias thresholds towards choosing MDCT modes.
+ *
+ * @hideinitializer */
+#define OPUS_GET_SIGNAL(x) OPUS_GET_SIGNAL_REQUEST, __opus_check_int_ptr(x)
+
+
+/** Configures the encoder's intended application.
+ * The initial value is a mandatory argument to the encoder_create function.
+ * @see OPUS_GET_APPLICATION
+ * @param[in] x opus_int32: Returns one of the following values:
+ *
+ * - #OPUS_APPLICATION_VOIP
+ * - Process signal for improved speech intelligibility.
+ * - #OPUS_APPLICATION_AUDIO
+ * - Favor faithfulness to the original input.
+ * - #OPUS_APPLICATION_RESTRICTED_LOWDELAY
+ * - Configure the minimum possible coding delay by disabling certain modes
+ * of operation.
+ *
+ * @hideinitializer */
+#define OPUS_SET_APPLICATION(x) OPUS_SET_APPLICATION_REQUEST, __opus_check_int(x)
+/** Gets the encoder's configured application.
+ * @see OPUS_SET_APPLICATION
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - #OPUS_APPLICATION_VOIP
+ * - Process signal for improved speech intelligibility.
+ * - #OPUS_APPLICATION_AUDIO
+ * - Favor faithfulness to the original input.
+ * - #OPUS_APPLICATION_RESTRICTED_LOWDELAY
+ * - Configure the minimum possible coding delay by disabling certain modes
+ * of operation.
+ *
+ * @hideinitializer */
+#define OPUS_GET_APPLICATION(x) OPUS_GET_APPLICATION_REQUEST, __opus_check_int_ptr(x)
+
+/** Gets the sampling rate the encoder or decoder was initialized with.
+ * This simply returns the Fs value passed to opus_encoder_init()
+ * or opus_decoder_init().
+ * @param[out] x opus_int32 *: Sampling rate of encoder or decoder.
+ * @hideinitializer
+ */
+#define OPUS_GET_SAMPLE_RATE(x) OPUS_GET_SAMPLE_RATE_REQUEST, __opus_check_int_ptr(x)
+
+/** Gets the total samples of delay added by the entire codec.
+ * This can be queried by the encoder and then the provided number of samples can be
+ * skipped on from the start of the decoder's output to provide time aligned input
+ * and output. From the perspective of a decoding application the real data begins this many
+ * samples late.
+ *
+ * The decoder contribution to this delay is identical for all decoders, but the
+ * encoder portion of the delay may vary from implementation to implementation,
+ * version to version, or even depend on the encoder's initial configuration.
+ * Applications needing delay compensation should call this CTL rather than
+ * hard-coding a value.
+ * @param[out] x opus_int32 *: Number of lookahead samples
+ * @hideinitializer */
+#define OPUS_GET_LOOKAHEAD(x) OPUS_GET_LOOKAHEAD_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures the encoder's use of inband forward error correction (FEC).
+ * @note This is only applicable to the LPC layer
+ * @see OPUS_GET_INBAND_FEC
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - 0
- Disable inband FEC (default).
+ * - 1
- Enable inband FEC.
+ *
+ * @hideinitializer */
+#define OPUS_SET_INBAND_FEC(x) OPUS_SET_INBAND_FEC_REQUEST, __opus_check_int(x)
+/** Gets encoder's configured use of inband forward error correction.
+ * @see OPUS_SET_INBAND_FEC
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - 0
- Inband FEC disabled (default).
+ * - 1
- Inband FEC enabled.
+ *
+ * @hideinitializer */
+#define OPUS_GET_INBAND_FEC(x) OPUS_GET_INBAND_FEC_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures the encoder's expected packet loss percentage.
+ * Higher values with trigger progressively more loss resistant behavior in the encoder
+ * at the expense of quality at a given bitrate in the lossless case, but greater quality
+ * under loss.
+ * @see OPUS_GET_PACKET_LOSS_PERC
+ * @param[in] x opus_int32: Loss percentage in the range 0-100, inclusive (default: 0).
+ * @hideinitializer */
+#define OPUS_SET_PACKET_LOSS_PERC(x) OPUS_SET_PACKET_LOSS_PERC_REQUEST, __opus_check_int(x)
+/** Gets the encoder's configured packet loss percentage.
+ * @see OPUS_SET_PACKET_LOSS_PERC
+ * @param[out] x opus_int32 *: Returns the configured loss percentage
+ * in the range 0-100, inclusive (default: 0).
+ * @hideinitializer */
+#define OPUS_GET_PACKET_LOSS_PERC(x) OPUS_GET_PACKET_LOSS_PERC_REQUEST, __opus_check_int_ptr(x)
+
+/** Configures the encoder's use of discontinuous transmission (DTX).
+ * @note This is only applicable to the LPC layer
+ * @see OPUS_GET_DTX
+ * @param[in] x opus_int32: Allowed values:
+ *
+ * - 0
- Disable DTX (default).
+ * - 1
- Enabled DTX.
+ *
+ * @hideinitializer */
+#define OPUS_SET_DTX(x) OPUS_SET_DTX_REQUEST, __opus_check_int(x)
+/** Gets encoder's configured use of discontinuous transmission.
+ * @see OPUS_SET_DTX
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - 0
- DTX disabled (default).
+ * - 1
- DTX enabled.
+ *
+ * @hideinitializer */
+#define OPUS_GET_DTX(x) OPUS_GET_DTX_REQUEST, __opus_check_int_ptr(x)
+/** Configures the depth of signal being encoded.
+ * This is a hint which helps the encoder identify silence and near-silence.
+ * @see OPUS_GET_LSB_DEPTH
+ * @param[in] x opus_int32: Input precision in bits, between 8 and 24
+ * (default: 24).
+ * @hideinitializer */
+#define OPUS_SET_LSB_DEPTH(x) OPUS_SET_LSB_DEPTH_REQUEST, __opus_check_int(x)
+/** Gets the encoder's configured signal depth.
+ * @see OPUS_SET_LSB_DEPTH
+ * @param[out] x opus_int32 *: Input precision in bits, between 8 and
+ * 24 (default: 24).
+ * @hideinitializer */
+#define OPUS_GET_LSB_DEPTH(x) OPUS_GET_LSB_DEPTH_REQUEST, __opus_check_int_ptr(x)
+
+/** Gets the duration (in samples) of the last packet successfully decoded or concealed.
+ * @param[out] x opus_int32 *: Number of samples (at current sampling rate).
+ * @hideinitializer */
+#define OPUS_GET_LAST_PACKET_DURATION(x) OPUS_GET_LAST_PACKET_DURATION_REQUEST, __opus_check_int_ptr(x)
+/**@}*/
+
+/** @defgroup opus_genericctls Generic CTLs
+ *
+ * These macros are used with the \c opus_decoder_ctl and
+ * \c opus_encoder_ctl calls to generate a particular
+ * request.
+ *
+ * When called on an \c OpusDecoder they apply to that
+ * particular decoder instance. When called on an
+ * \c OpusEncoder they apply to the corresponding setting
+ * on that encoder instance, if present.
+ *
+ * Some usage examples:
+ *
+ * @code
+ * int ret;
+ * opus_int32 pitch;
+ * ret = opus_decoder_ctl(dec_ctx, OPUS_GET_PITCH(&pitch));
+ * if (ret == OPUS_OK) return ret;
+ *
+ * opus_encoder_ctl(enc_ctx, OPUS_RESET_STATE);
+ * opus_decoder_ctl(dec_ctx, OPUS_RESET_STATE);
+ *
+ * opus_int32 enc_bw, dec_bw;
+ * opus_encoder_ctl(enc_ctx, OPUS_GET_BANDWIDTH(&enc_bw));
+ * opus_decoder_ctl(dec_ctx, OPUS_GET_BANDWIDTH(&dec_bw));
+ * if (enc_bw != dec_bw) {
+ * printf("packet bandwidth mismatch!\n");
+ * }
+ * @endcode
+ *
+ * @see opus_encoder, opus_decoder_ctl, opus_encoder_ctl, opus_decoderctls, opus_encoderctls
+ * @{
+ */
+
+/** Resets the codec state to be equivalent to a freshly initialized state.
+ * This should be called when switching streams in order to prevent
+ * the back to back decoding from giving different results from
+ * one at a time decoding.
+ * @hideinitializer */
+#define OPUS_RESET_STATE 4028
+
+/** Gets the final state of the codec's entropy coder.
+ * This is used for testing purposes,
+ * The encoder and decoder state should be identical after coding a payload
+ * (assuming no data corruption or software bugs)
+ *
+ * @param[out] x opus_uint32 *: Entropy coder state
+ *
+ * @hideinitializer */
+#define OPUS_GET_FINAL_RANGE(x) OPUS_GET_FINAL_RANGE_REQUEST, __opus_check_uint_ptr(x)
+
+/** Gets the pitch of the last decoded frame, if available.
+ * This can be used for any post-processing algorithm requiring the use of pitch,
+ * e.g. time stretching/shortening. If the last frame was not voiced, or if the
+ * pitch was not coded in the frame, then zero is returned.
+ *
+ * This CTL is only implemented for decoder instances.
+ *
+ * @param[out] x opus_int32 *: pitch period at 48 kHz (or 0 if not available)
+ *
+ * @hideinitializer */
+#define OPUS_GET_PITCH(x) OPUS_GET_PITCH_REQUEST, __opus_check_int_ptr(x)
+
+/** Gets the encoder's configured bandpass or the decoder's last bandpass.
+ * @see OPUS_SET_BANDWIDTH
+ * @param[out] x opus_int32 *: Returns one of the following values:
+ *
+ * - #OPUS_AUTO
- (default)
+ * - #OPUS_BANDWIDTH_NARROWBAND
- 4 kHz passband
+ * - #OPUS_BANDWIDTH_MEDIUMBAND
- 6 kHz passband
+ * - #OPUS_BANDWIDTH_WIDEBAND
- 8 kHz passband
+ * - #OPUS_BANDWIDTH_SUPERWIDEBAND
- 12 kHz passband
+ * - #OPUS_BANDWIDTH_FULLBAND
- 20 kHz passband
+ *
+ * @hideinitializer */
+#define OPUS_GET_BANDWIDTH(x) OPUS_GET_BANDWIDTH_REQUEST, __opus_check_int_ptr(x)
+
+/**@}*/
+
+/** @defgroup opus_decoderctls Decoder related CTLs
+ * @see opus_genericctls, opus_encoderctls, opus_decoder
+ * @{
+ */
+
+/** Configures decoder gain adjustment.
+ * Scales the decoded output by a factor specified in Q8 dB units.
+ * This has a maximum range of -32768 to 32767 inclusive, and returns
+ * OPUS_BAD_ARG otherwise. The default is zero indicating no adjustment.
+ * This setting survives decoder reset.
+ *
+ * gain = pow(10, x/(20.0*256))
+ *
+ * @param[in] x opus_int32: Amount to scale PCM signal by in Q8 dB units.
+ * @hideinitializer */
+#define OPUS_SET_GAIN(x) OPUS_SET_GAIN_REQUEST, __opus_check_int(x)
+/** Gets the decoder's configured gain adjustment. @see OPUS_SET_GAIN
+ *
+ * @param[out] x opus_int32 *: Amount to scale PCM signal by in Q8 dB units.
+ * @hideinitializer */
+#define OPUS_GET_GAIN(x) OPUS_GET_GAIN_REQUEST, __opus_check_int_ptr(x)
+
+/**@}*/
+
+/** @defgroup opus_libinfo Opus library information functions
+ * @{
+ */
+
+/** Converts an opus error code into a human readable string.
+ *
+ * @param[in] error int: Error number
+ * @returns Error string
+ */
+OPUS_EXPORT const char *opus_strerror(int error);
+
+/** Gets the libopus version string.
+ *
+ * @returns Version string
+ */
+OPUS_EXPORT const char *opus_get_version_string(void);
+/**@}*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* OPUS_DEFINES_H */
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_multistream.h b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_multistream.h
new file mode 100644
index 0000000..ae59979
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_multistream.h
@@ -0,0 +1,660 @@
+/* Copyright (c) 2011 Xiph.Org Foundation
+ Written by Jean-Marc Valin */
+/*
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ - Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/**
+ * @file opus_multistream.h
+ * @brief Opus reference implementation multistream API
+ */
+
+#ifndef OPUS_MULTISTREAM_H
+#define OPUS_MULTISTREAM_H
+
+#include "opus.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** @cond OPUS_INTERNAL_DOC */
+
+/** Macros to trigger compilation errors when the wrong types are provided to a
+ * CTL. */
+/**@{*/
+#define __opus_check_encstate_ptr(ptr) ((ptr) + ((ptr) - (OpusEncoder**)(ptr)))
+#define __opus_check_decstate_ptr(ptr) ((ptr) + ((ptr) - (OpusDecoder**)(ptr)))
+/**@}*/
+
+/** These are the actual encoder and decoder CTL ID numbers.
+ * They should not be used directly by applications.
+ * In general, SETs should be even and GETs should be odd.*/
+/**@{*/
+#define OPUS_MULTISTREAM_GET_ENCODER_STATE_REQUEST 5120
+#define OPUS_MULTISTREAM_GET_DECODER_STATE_REQUEST 5122
+/**@}*/
+
+/** @endcond */
+
+/** @defgroup opus_multistream_ctls Multistream specific encoder and decoder CTLs
+ *
+ * These are convenience macros that are specific to the
+ * opus_multistream_encoder_ctl() and opus_multistream_decoder_ctl()
+ * interface.
+ * The CTLs from @ref opus_genericctls, @ref opus_encoderctls, and
+ * @ref opus_decoderctls may be applied to a multistream encoder or decoder as
+ * well.
+ * In addition, you may retrieve the encoder or decoder state for an specific
+ * stream via #OPUS_MULTISTREAM_GET_ENCODER_STATE or
+ * #OPUS_MULTISTREAM_GET_DECODER_STATE and apply CTLs to it individually.
+ */
+/**@{*/
+
+/** Gets the encoder state for an individual stream of a multistream encoder.
+ * @param[in] x opus_int32: The index of the stream whose encoder you
+ * wish to retrieve.
+ * This must be non-negative and less than
+ * the streams parameter used
+ * to initialize the encoder.
+ * @param[out] y OpusEncoder**: Returns a pointer to the given
+ * encoder state.
+ * @retval OPUS_BAD_ARG The index of the requested stream was out of range.
+ * @hideinitializer
+ */
+#define OPUS_MULTISTREAM_GET_ENCODER_STATE(x,y) OPUS_MULTISTREAM_GET_ENCODER_STATE_REQUEST, __opus_check_int(x), __opus_check_encstate_ptr(y)
+
+/** Gets the decoder state for an individual stream of a multistream decoder.
+ * @param[in] x opus_int32: The index of the stream whose decoder you
+ * wish to retrieve.
+ * This must be non-negative and less than
+ * the streams parameter used
+ * to initialize the decoder.
+ * @param[out] y OpusDecoder**: Returns a pointer to the given
+ * decoder state.
+ * @retval OPUS_BAD_ARG The index of the requested stream was out of range.
+ * @hideinitializer
+ */
+#define OPUS_MULTISTREAM_GET_DECODER_STATE(x,y) OPUS_MULTISTREAM_GET_DECODER_STATE_REQUEST, __opus_check_int(x), __opus_check_decstate_ptr(y)
+
+/**@}*/
+
+/** @defgroup opus_multistream Opus Multistream API
+ * @{
+ *
+ * The multistream API allows individual Opus streams to be combined into a
+ * single packet, enabling support for up to 255 channels. Unlike an
+ * elementary Opus stream, the encoder and decoder must negotiate the channel
+ * configuration before the decoder can successfully interpret the data in the
+ * packets produced by the encoder. Some basic information, such as packet
+ * duration, can be computed without any special negotiation.
+ *
+ * The format for multistream Opus packets is defined in the
+ * Ogg
+ * encapsulation specification and is based on the self-delimited Opus
+ * framing described in Appendix B of RFC 6716.
+ * Normal Opus packets are just a degenerate case of multistream Opus packets,
+ * and can be encoded or decoded with the multistream API by setting
+ * streams to 1 when initializing the encoder or
+ * decoder.
+ *
+ * Multistream Opus streams can contain up to 255 elementary Opus streams.
+ * These may be either "uncoupled" or "coupled", indicating that the decoder
+ * is configured to decode them to either 1 or 2 channels, respectively.
+ * The streams are ordered so that all coupled streams appear at the
+ * beginning.
+ *
+ * A mapping table defines which decoded channel i
+ * should be used for each input/output (I/O) channel j. This table is
+ * typically provided as an unsigned char array.
+ * Let i = mapping[j] be the index for I/O channel j.
+ * If i < 2*coupled_streams, then I/O channel j is
+ * encoded as the left channel of stream (i/2) if i
+ * is even, or as the right channel of stream (i/2) if
+ * i is odd. Otherwise, I/O channel j is encoded as
+ * mono in stream (i - coupled_streams), unless it has the special
+ * value 255, in which case it is omitted from the encoding entirely (the
+ * decoder will reproduce it as silence). Each value i must either
+ * be the special value 255 or be less than streams + coupled_streams.
+ *
+ * The output channels specified by the encoder
+ * should use the
+ * Vorbis
+ * channel ordering. A decoder may wish to apply an additional permutation
+ * to the mapping the encoder used to achieve a different output channel
+ * order (e.g. for outputing in WAV order).
+ *
+ * Each multistream packet contains an Opus packet for each stream, and all of
+ * the Opus packets in a single multistream packet must have the same
+ * duration. Therefore the duration of a multistream packet can be extracted
+ * from the TOC sequence of the first stream, which is located at the
+ * beginning of the packet, just like an elementary Opus stream:
+ *
+ * @code
+ * int nb_samples;
+ * int nb_frames;
+ * nb_frames = opus_packet_get_nb_frames(data, len);
+ * if (nb_frames < 1)
+ * return nb_frames;
+ * nb_samples = opus_packet_get_samples_per_frame(data, 48000) * nb_frames;
+ * @endcode
+ *
+ * The general encoding and decoding process proceeds exactly the same as in
+ * the normal @ref opus_encoder and @ref opus_decoder APIs.
+ * See their documentation for an overview of how to use the corresponding
+ * multistream functions.
+ */
+
+/** Opus multistream encoder state.
+ * This contains the complete state of a multistream Opus encoder.
+ * It is position independent and can be freely copied.
+ * @see opus_multistream_encoder_create
+ * @see opus_multistream_encoder_init
+ */
+typedef struct OpusMSEncoder OpusMSEncoder;
+
+/** Opus multistream decoder state.
+ * This contains the complete state of a multistream Opus decoder.
+ * It is position independent and can be freely copied.
+ * @see opus_multistream_decoder_create
+ * @see opus_multistream_decoder_init
+ */
+typedef struct OpusMSDecoder OpusMSDecoder;
+
+/**\name Multistream encoder functions */
+/**@{*/
+
+/** Gets the size of an OpusMSEncoder structure.
+ * @param streams int: The total number of streams to encode from the
+ * input.
+ * This must be no more than 255.
+ * @param coupled_streams int: Number of coupled (2 channel) streams
+ * to encode.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * encoded channels (streams +
+ * coupled_streams) must be no
+ * more than 255.
+ * @returns The size in bytes on success, or a negative error code
+ * (see @ref opus_errorcodes) on error.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_multistream_encoder_get_size(
+ int streams,
+ int coupled_streams
+);
+
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_multistream_surround_encoder_get_size(
+ int channels,
+ int mapping_family
+);
+
+
+/** Allocates and initializes a multistream encoder state.
+ * Call opus_multistream_encoder_destroy() to release
+ * this object when finished.
+ * @param Fs opus_int32: Sampling rate of the input signal (in Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param channels int: Number of channels in the input signal.
+ * This must be at most 255.
+ * It may be greater than the number of
+ * coded channels (streams +
+ * coupled_streams).
+ * @param streams int: The total number of streams to encode from the
+ * input.
+ * This must be no more than the number of channels.
+ * @param coupled_streams int: Number of coupled (2 channel) streams
+ * to encode.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * encoded channels (streams +
+ * coupled_streams) must be no
+ * more than the number of input channels.
+ * @param[in] mapping const unsigned char[channels]: Mapping from
+ * encoded channels to input channels, as described in
+ * @ref opus_multistream. As an extra constraint, the
+ * multistream encoder does not allow encoding coupled
+ * streams for which one channel is unused since this
+ * is never a good idea.
+ * @param application int: The target encoder application.
+ * This must be one of the following:
+ *
+ * - #OPUS_APPLICATION_VOIP
+ * - Process signal for improved speech intelligibility.
+ * - #OPUS_APPLICATION_AUDIO
+ * - Favor faithfulness to the original input.
+ * - #OPUS_APPLICATION_RESTRICTED_LOWDELAY
+ * - Configure the minimum possible coding delay by disabling certain modes
+ * of operation.
+ *
+ * @param[out] error int *: Returns #OPUS_OK on success, or an error
+ * code (see @ref opus_errorcodes) on
+ * failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusMSEncoder *opus_multistream_encoder_create(
+ opus_int32 Fs,
+ int channels,
+ int streams,
+ int coupled_streams,
+ const unsigned char *mapping,
+ int application,
+ int *error
+) OPUS_ARG_NONNULL(5);
+
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusMSEncoder *opus_multistream_surround_encoder_create(
+ opus_int32 Fs,
+ int channels,
+ int mapping_family,
+ int *streams,
+ int *coupled_streams,
+ unsigned char *mapping,
+ int application,
+ int *error
+) OPUS_ARG_NONNULL(5);
+
+/** Initialize a previously allocated multistream encoder state.
+ * The memory pointed to by \a st must be at least the size returned by
+ * opus_multistream_encoder_get_size().
+ * This is intended for applications which use their own allocator instead of
+ * malloc.
+ * To reset a previously initialized state, use the #OPUS_RESET_STATE CTL.
+ * @see opus_multistream_encoder_create
+ * @see opus_multistream_encoder_get_size
+ * @param st OpusMSEncoder*: Multistream encoder state to initialize.
+ * @param Fs opus_int32: Sampling rate of the input signal (in Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param channels int: Number of channels in the input signal.
+ * This must be at most 255.
+ * It may be greater than the number of
+ * coded channels (streams +
+ * coupled_streams).
+ * @param streams int: The total number of streams to encode from the
+ * input.
+ * This must be no more than the number of channels.
+ * @param coupled_streams int: Number of coupled (2 channel) streams
+ * to encode.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * encoded channels (streams +
+ * coupled_streams) must be no
+ * more than the number of input channels.
+ * @param[in] mapping const unsigned char[channels]: Mapping from
+ * encoded channels to input channels, as described in
+ * @ref opus_multistream. As an extra constraint, the
+ * multistream encoder does not allow encoding coupled
+ * streams for which one channel is unused since this
+ * is never a good idea.
+ * @param application int: The target encoder application.
+ * This must be one of the following:
+ *
+ * - #OPUS_APPLICATION_VOIP
+ * - Process signal for improved speech intelligibility.
+ * - #OPUS_APPLICATION_AUDIO
+ * - Favor faithfulness to the original input.
+ * - #OPUS_APPLICATION_RESTRICTED_LOWDELAY
+ * - Configure the minimum possible coding delay by disabling certain modes
+ * of operation.
+ *
+ * @returns #OPUS_OK on success, or an error code (see @ref opus_errorcodes)
+ * on failure.
+ */
+OPUS_EXPORT int opus_multistream_encoder_init(
+ OpusMSEncoder *st,
+ opus_int32 Fs,
+ int channels,
+ int streams,
+ int coupled_streams,
+ const unsigned char *mapping,
+ int application
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(6);
+
+OPUS_EXPORT int opus_multistream_surround_encoder_init(
+ OpusMSEncoder *st,
+ opus_int32 Fs,
+ int channels,
+ int mapping_family,
+ int *streams,
+ int *coupled_streams,
+ unsigned char *mapping,
+ int application
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(6);
+
+/** Encodes a multistream Opus frame.
+ * @param st OpusMSEncoder*: Multistream encoder state.
+ * @param[in] pcm const opus_int16*: The input signal as interleaved
+ * samples.
+ * This must contain
+ * frame_size*channels
+ * samples.
+ * @param frame_size int: Number of samples per channel in the input
+ * signal.
+ * This must be an Opus frame size for the
+ * encoder's sampling rate.
+ * For example, at 48 kHz the permitted values
+ * are 120, 240, 480, 960, 1920, and 2880.
+ * Passing in a duration of less than 10 ms
+ * (480 samples at 48 kHz) will prevent the
+ * encoder from using the LPC or hybrid modes.
+ * @param[out] data unsigned char*: Output payload.
+ * This must contain storage for at
+ * least \a max_data_bytes.
+ * @param [in] max_data_bytes opus_int32: Size of the allocated
+ * memory for the output
+ * payload. This may be
+ * used to impose an upper limit on
+ * the instant bitrate, but should
+ * not be used as the only bitrate
+ * control. Use #OPUS_SET_BITRATE to
+ * control the bitrate.
+ * @returns The length of the encoded packet (in bytes) on success or a
+ * negative error code (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_multistream_encode(
+ OpusMSEncoder *st,
+ const opus_int16 *pcm,
+ int frame_size,
+ unsigned char *data,
+ opus_int32 max_data_bytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Encodes a multistream Opus frame from floating point input.
+ * @param st OpusMSEncoder*: Multistream encoder state.
+ * @param[in] pcm const float*: The input signal as interleaved
+ * samples with a normal range of
+ * +/-1.0.
+ * Samples with a range beyond +/-1.0
+ * are supported but will be clipped by
+ * decoders using the integer API and
+ * should only be used if it is known
+ * that the far end supports extended
+ * dynamic range.
+ * This must contain
+ * frame_size*channels
+ * samples.
+ * @param frame_size int: Number of samples per channel in the input
+ * signal.
+ * This must be an Opus frame size for the
+ * encoder's sampling rate.
+ * For example, at 48 kHz the permitted values
+ * are 120, 240, 480, 960, 1920, and 2880.
+ * Passing in a duration of less than 10 ms
+ * (480 samples at 48 kHz) will prevent the
+ * encoder from using the LPC or hybrid modes.
+ * @param[out] data unsigned char*: Output payload.
+ * This must contain storage for at
+ * least \a max_data_bytes.
+ * @param [in] max_data_bytes opus_int32: Size of the allocated
+ * memory for the output
+ * payload. This may be
+ * used to impose an upper limit on
+ * the instant bitrate, but should
+ * not be used as the only bitrate
+ * control. Use #OPUS_SET_BITRATE to
+ * control the bitrate.
+ * @returns The length of the encoded packet (in bytes) on success or a
+ * negative error code (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_multistream_encode_float(
+ OpusMSEncoder *st,
+ const float *pcm,
+ int frame_size,
+ unsigned char *data,
+ opus_int32 max_data_bytes
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(2) OPUS_ARG_NONNULL(4);
+
+/** Frees an OpusMSEncoder allocated by
+ * opus_multistream_encoder_create().
+ * @param st OpusMSEncoder*: Multistream encoder state to be freed.
+ */
+OPUS_EXPORT void opus_multistream_encoder_destroy(OpusMSEncoder *st);
+
+/** Perform a CTL function on a multistream Opus encoder.
+ *
+ * Generally the request and subsequent arguments are generated by a
+ * convenience macro.
+ * @param st OpusMSEncoder*: Multistream encoder state.
+ * @param request This and all remaining parameters should be replaced by one
+ * of the convenience macros in @ref opus_genericctls,
+ * @ref opus_encoderctls, or @ref opus_multistream_ctls.
+ * @see opus_genericctls
+ * @see opus_encoderctls
+ * @see opus_multistream_ctls
+ */
+OPUS_EXPORT int opus_multistream_encoder_ctl(OpusMSEncoder *st, int request, ...) OPUS_ARG_NONNULL(1);
+
+/**@}*/
+
+/**\name Multistream decoder functions */
+/**@{*/
+
+/** Gets the size of an OpusMSDecoder structure.
+ * @param streams int: The total number of streams coded in the
+ * input.
+ * This must be no more than 255.
+ * @param coupled_streams int: Number streams to decode as coupled
+ * (2 channel) streams.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * coded channels (streams +
+ * coupled_streams) must be no
+ * more than 255.
+ * @returns The size in bytes on success, or a negative error code
+ * (see @ref opus_errorcodes) on error.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT opus_int32 opus_multistream_decoder_get_size(
+ int streams,
+ int coupled_streams
+);
+
+/** Allocates and initializes a multistream decoder state.
+ * Call opus_multistream_decoder_destroy() to release
+ * this object when finished.
+ * @param Fs opus_int32: Sampling rate to decode at (in Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param channels int: Number of channels to output.
+ * This must be at most 255.
+ * It may be different from the number of coded
+ * channels (streams +
+ * coupled_streams).
+ * @param streams int: The total number of streams coded in the
+ * input.
+ * This must be no more than 255.
+ * @param coupled_streams int: Number of streams to decode as coupled
+ * (2 channel) streams.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * coded channels (streams +
+ * coupled_streams) must be no
+ * more than 255.
+ * @param[in] mapping const unsigned char[channels]: Mapping from
+ * coded channels to output channels, as described in
+ * @ref opus_multistream.
+ * @param[out] error int *: Returns #OPUS_OK on success, or an error
+ * code (see @ref opus_errorcodes) on
+ * failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT OpusMSDecoder *opus_multistream_decoder_create(
+ opus_int32 Fs,
+ int channels,
+ int streams,
+ int coupled_streams,
+ const unsigned char *mapping,
+ int *error
+) OPUS_ARG_NONNULL(5);
+
+/** Intialize a previously allocated decoder state object.
+ * The memory pointed to by \a st must be at least the size returned by
+ * opus_multistream_encoder_get_size().
+ * This is intended for applications which use their own allocator instead of
+ * malloc.
+ * To reset a previously initialized state, use the #OPUS_RESET_STATE CTL.
+ * @see opus_multistream_decoder_create
+ * @see opus_multistream_deocder_get_size
+ * @param st OpusMSEncoder*: Multistream encoder state to initialize.
+ * @param Fs opus_int32: Sampling rate to decode at (in Hz).
+ * This must be one of 8000, 12000, 16000,
+ * 24000, or 48000.
+ * @param channels int: Number of channels to output.
+ * This must be at most 255.
+ * It may be different from the number of coded
+ * channels (streams +
+ * coupled_streams).
+ * @param streams int: The total number of streams coded in the
+ * input.
+ * This must be no more than 255.
+ * @param coupled_streams int: Number of streams to decode as coupled
+ * (2 channel) streams.
+ * This must be no larger than the total
+ * number of streams.
+ * Additionally, The total number of
+ * coded channels (streams +
+ * coupled_streams) must be no
+ * more than 255.
+ * @param[in] mapping const unsigned char[channels]: Mapping from
+ * coded channels to output channels, as described in
+ * @ref opus_multistream.
+ * @returns #OPUS_OK on success, or an error code (see @ref opus_errorcodes)
+ * on failure.
+ */
+OPUS_EXPORT int opus_multistream_decoder_init(
+ OpusMSDecoder *st,
+ opus_int32 Fs,
+ int channels,
+ int streams,
+ int coupled_streams,
+ const unsigned char *mapping
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(6);
+
+/** Decode a multistream Opus packet.
+ * @param st OpusMSDecoder*: Multistream decoder state.
+ * @param[in] data const unsigned char*: Input payload.
+ * Use a NULL
+ * pointer to indicate packet
+ * loss.
+ * @param len opus_int32: Number of bytes in payload.
+ * @param[out] pcm opus_int16*: Output signal, with interleaved
+ * samples.
+ * This must contain room for
+ * frame_size*channels
+ * samples.
+ * @param frame_size int: The number of samples per channel of
+ * available space in \a pcm.
+ * If this is less than the maximum packet duration
+ * (120 ms; 5760 for 48kHz), this function will not be capable
+ * of decoding some packets. In the case of PLC (data==NULL)
+ * or FEC (decode_fec=1), then frame_size needs to be exactly
+ * the duration of audio that is missing, otherwise the
+ * decoder will not be in the optimal state to decode the
+ * next incoming packet. For the PLC and FEC cases, frame_size
+ * must be a multiple of 2.5 ms.
+ * @param decode_fec int: Flag (0 or 1) to request that any in-band
+ * forward error correction data be decoded.
+ * If no such data is available, the frame is
+ * decoded as if it were lost.
+ * @returns Number of samples decoded on success or a negative error code
+ * (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_multistream_decode(
+ OpusMSDecoder *st,
+ const unsigned char *data,
+ opus_int32 len,
+ opus_int16 *pcm,
+ int frame_size,
+ int decode_fec
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Decode a multistream Opus packet with floating point output.
+ * @param st OpusMSDecoder*: Multistream decoder state.
+ * @param[in] data const unsigned char*: Input payload.
+ * Use a NULL
+ * pointer to indicate packet
+ * loss.
+ * @param len opus_int32: Number of bytes in payload.
+ * @param[out] pcm opus_int16*: Output signal, with interleaved
+ * samples.
+ * This must contain room for
+ * frame_size*channels
+ * samples.
+ * @param frame_size int: The number of samples per channel of
+ * available space in \a pcm.
+ * If this is less than the maximum packet duration
+ * (120 ms; 5760 for 48kHz), this function will not be capable
+ * of decoding some packets. In the case of PLC (data==NULL)
+ * or FEC (decode_fec=1), then frame_size needs to be exactly
+ * the duration of audio that is missing, otherwise the
+ * decoder will not be in the optimal state to decode the
+ * next incoming packet. For the PLC and FEC cases, frame_size
+ * must be a multiple of 2.5 ms.
+ * @param decode_fec int: Flag (0 or 1) to request that any in-band
+ * forward error correction data be decoded.
+ * If no such data is available, the frame is
+ * decoded as if it were lost.
+ * @returns Number of samples decoded on success or a negative error code
+ * (see @ref opus_errorcodes) on failure.
+ */
+OPUS_EXPORT OPUS_WARN_UNUSED_RESULT int opus_multistream_decode_float(
+ OpusMSDecoder *st,
+ const unsigned char *data,
+ opus_int32 len,
+ float *pcm,
+ int frame_size,
+ int decode_fec
+) OPUS_ARG_NONNULL(1) OPUS_ARG_NONNULL(4);
+
+/** Perform a CTL function on a multistream Opus decoder.
+ *
+ * Generally the request and subsequent arguments are generated by a
+ * convenience macro.
+ * @param st OpusMSDecoder*: Multistream decoder state.
+ * @param request This and all remaining parameters should be replaced by one
+ * of the convenience macros in @ref opus_genericctls,
+ * @ref opus_decoderctls, or @ref opus_multistream_ctls.
+ * @see opus_genericctls
+ * @see opus_decoderctls
+ * @see opus_multistream_ctls
+ */
+OPUS_EXPORT int opus_multistream_decoder_ctl(OpusMSDecoder *st, int request, ...) OPUS_ARG_NONNULL(1);
+
+/** Frees an OpusMSDecoder allocated by
+ * opus_multistream_decoder_create().
+ * @param st OpusMSDecoder: Multistream decoder state to be freed.
+ */
+OPUS_EXPORT void opus_multistream_decoder_destroy(OpusMSDecoder *st);
+
+/**@}*/
+
+/**@}*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* OPUS_MULTISTREAM_H */
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_types.h b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_types.h
new file mode 100644
index 0000000..b28e03a
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/inc/opus_types.h
@@ -0,0 +1,159 @@
+/* (C) COPYRIGHT 1994-2002 Xiph.Org Foundation */
+/* Modified by Jean-Marc Valin */
+/*
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ - Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+/* opus_types.h based on ogg_types.h from libogg */
+
+/**
+ @file opus_types.h
+ @brief Opus reference implementation types
+*/
+#ifndef OPUS_TYPES_H
+#define OPUS_TYPES_H
+
+/* Use the real stdint.h if it's there (taken from Paul Hsieh's pstdint.h) */
+#if (defined(__STDC__) && __STDC__ && __STDC_VERSION__ >= 199901L) || (defined(__GNUC__) && (defined(_STDINT_H) || defined(_STDINT_H_)) || defined (HAVE_STDINT_H))
+#include
+
+ typedef int16_t opus_int16;
+ typedef uint16_t opus_uint16;
+ typedef int32_t opus_int32;
+ typedef uint32_t opus_uint32;
+#elif defined(_WIN32)
+
+# if defined(__CYGWIN__)
+# include <_G_config.h>
+ typedef _G_int32_t opus_int32;
+ typedef _G_uint32_t opus_uint32;
+ typedef _G_int16 opus_int16;
+ typedef _G_uint16 opus_uint16;
+# elif defined(__MINGW32__)
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+# elif defined(__MWERKS__)
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+# else
+ /* MSVC/Borland */
+ typedef __int32 opus_int32;
+ typedef unsigned __int32 opus_uint32;
+ typedef __int16 opus_int16;
+ typedef unsigned __int16 opus_uint16;
+# endif
+
+#elif defined(__MACOS__)
+
+# include
+ typedef SInt16 opus_int16;
+ typedef UInt16 opus_uint16;
+ typedef SInt32 opus_int32;
+ typedef UInt32 opus_uint32;
+
+#elif (defined(__APPLE__) && defined(__MACH__)) /* MacOS X Framework build */
+
+# include
+ typedef int16_t opus_int16;
+ typedef u_int16_t opus_uint16;
+ typedef int32_t opus_int32;
+ typedef u_int32_t opus_uint32;
+
+#elif defined(__BEOS__)
+
+ /* Be */
+# include
+ typedef int16 opus_int16;
+ typedef u_int16 opus_uint16;
+ typedef int32_t opus_int32;
+ typedef u_int32_t opus_uint32;
+
+#elif defined (__EMX__)
+
+ /* OS/2 GCC */
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+
+#elif defined (DJGPP)
+
+ /* DJGPP */
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+
+#elif defined(R5900)
+
+ /* PS2 EE */
+ typedef int opus_int32;
+ typedef unsigned opus_uint32;
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+
+#elif defined(__SYMBIAN32__)
+
+ /* Symbian GCC */
+ typedef signed short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef signed int opus_int32;
+ typedef unsigned int opus_uint32;
+
+#elif defined(CONFIG_TI_C54X) || defined (CONFIG_TI_C55X)
+
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef long opus_int32;
+ typedef unsigned long opus_uint32;
+
+#elif defined(CONFIG_TI_C6X)
+
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+
+#else
+
+ /* Give up, take a reasonable guess */
+ typedef short opus_int16;
+ typedef unsigned short opus_uint16;
+ typedef int opus_int32;
+ typedef unsigned int opus_uint32;
+
+#endif
+
+#define opus_int int /* used for counters etc; at least 16 bits */
+#define opus_int64 long long
+#define opus_int8 signed char
+
+#define opus_uint unsigned int /* used for counters etc; at least 16 bits */
+#define opus_uint64 unsigned long long
+#define opus_uint8 unsigned char
+
+#endif /* OPUS_TYPES_H */
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.c b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.c
new file mode 100644
index 0000000..c3514a8
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.c
@@ -0,0 +1,54 @@
+#include
+#include
+#include "nv_opus_dec.h"
+
+OpusDecoder* decoder;
+
+// This function must be called before
+// any other decoding functions
+int nv_opus_init(void) {
+ int err;
+ decoder = opus_decoder_create(
+ nv_opus_get_sample_rate(),
+ nv_opus_get_channel_count(),
+ &err);
+ return err;
+}
+
+// This function must be called after
+// decoding is finished
+void nv_opus_destroy(void) {
+ if (decoder != NULL) {
+ opus_decoder_destroy(decoder);
+ }
+}
+
+// The Opus stream is stereo
+int nv_opus_get_channel_count(void) {
+ return 2;
+}
+
+// This number assumes 2 channels at 48 KHz
+int nv_opus_get_max_out_shorts(void) {
+ return 512*nv_opus_get_channel_count();
+}
+
+// The Opus stream is 48 KHz
+int nv_opus_get_sample_rate(void) {
+ return 48000;
+}
+
+// outpcmdata must be 5760*2 shorts in length
+// packets must be decoded in order
+// a packet loss must call this function with NULL indata and 0 inlen
+// returns the number of decoded samples
+int nv_opus_decode(unsigned char* indata, int inlen, short* outpcmdata) {
+ int err;
+
+ // Decoding to 16-bit PCM with FEC off
+ // Maximum length assuming 48KHz sample rate
+ err = opus_decode(decoder, indata, inlen,
+ outpcmdata, 512, 0);
+
+ return err;
+}
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.h b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.h
new file mode 100644
index 0000000..c5eb7f5
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec.h
@@ -0,0 +1,6 @@
+int nv_opus_init(void);
+void nv_opus_destroy(void);
+int nv_opus_get_channel_count(void);
+int nv_opus_get_max_out_shorts(void);
+int nv_opus_get_sample_rate(void);
+int nv_opus_decode(unsigned char* indata, int inlen, short* outpcmdata);
diff --git a/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec_jni.c b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec_jni.c
new file mode 100644
index 0000000..1df6988
--- /dev/null
+++ b/limelight-pc/jni/nv_opus_dec/libopus/nv_opus_dec_jni.c
@@ -0,0 +1,68 @@
+#include "nv_opus_dec.h"
+
+#include
+#include
+
+// This function must be called before
+// any other decoding functions
+JNIEXPORT jint JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_init(JNIEnv *env, jobject this) {
+ return nv_opus_init();
+}
+
+// This function must be called after
+// decoding is finished
+JNIEXPORT void JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_destroy(JNIEnv *env, jobject this) {
+ nv_opus_destroy();
+}
+
+// The Opus stream is stereo
+JNIEXPORT jint JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_getChannelCount(JNIEnv *env, jobject this) {
+ return nv_opus_get_channel_count();
+}
+
+// This number assumes 2 channels at 48 KHz
+JNIEXPORT jint JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_getMaxOutputShorts(JNIEnv *env, jobject this) {
+ return nv_opus_get_max_out_shorts();
+}
+
+// The Opus stream is 48 KHz
+JNIEXPORT jint JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_getSampleRate(JNIEnv *env, jobject this) {
+ return nv_opus_get_sample_rate();
+}
+
+// outpcmdata must be 5760*2 shorts in length
+// packets must be decoded in order
+// a packet loss must call this function with NULL indata and 0 inlen
+// returns the number of decoded samples
+JNIEXPORT jint JNICALL
+Java_com_limelight_nvstream_av_audio_OpusDecoder_decode(
+ JNIEnv *env, jobject this, // JNI parameters
+ jbyteArray indata, jint inoff, jint inlen, // Input parameters
+ jshortArray outpcmdata) // Output parameter
+{
+ jint ret;
+ jbyte* jni_input_data;
+ jshort* jni_pcm_data;
+
+ jni_pcm_data = (*env)->GetShortArrayElements(env, outpcmdata, 0);
+ if (indata != NULL) {
+ jni_input_data = (*env)->GetByteArrayElements(env, indata, 0);
+
+ ret = nv_opus_decode(&jni_input_data[inoff], inlen, jni_pcm_data);
+
+ // The input data isn't changed so it can be safely aborted
+ (*env)->ReleaseByteArrayElements(env, indata, jni_input_data, JNI_ABORT);
+ }
+ else {
+ ret = nv_opus_decode(NULL, 0, jni_pcm_data);
+ }
+
+ (*env)->ReleaseShortArrayElements(env, outpcmdata, jni_pcm_data, 0);
+
+ return ret;
+}
diff --git a/limelight-pc/src/com/limelight/#Limelight.java# b/limelight-pc/src/com/limelight/#Limelight.java#
new file mode 100644
index 0000000..16aa6e8
--- /dev/null
+++ b/limelight-pc/src/com/limelight/#Limelight.java#
@@ -0,0 +1,30 @@
+package com.limelight;
+
+import com.limelight.gui.MainFrame;
+import com.limelight.gui.StreamFrame;
+
+public class Limelight {
+ public static final double VERSION = 1.0;
+
+ private final String HOST;
+
+
+ public Limelight(String host) {
+ this.HOST = host;
+ }
+
+ private void startUp() {
+ StreamFrame streamFrame = new StreamFrame();
+ streamFrame.build();
+ }
+
+ public static void createInstance(String host) {
+ Limelight limelight = new Limelight(host);
+ limelight.startUp();
+ }
+
+ public static void main(String args[]) {
+ MainFrame limeFrame = new MainFrame();
+ limeFrame.build();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/Limelight.java b/limelight-pc/src/com/limelight/Limelight.java
new file mode 100644
index 0000000..5db0bc1
--- /dev/null
+++ b/limelight-pc/src/com/limelight/Limelight.java
@@ -0,0 +1,66 @@
+package com.limelight;
+
+import javax.swing.JOptionPane;
+
+import com.limelight.gui.MainFrame;
+import com.limelight.gui.StreamFrame;
+import com.limelight.nvstream.NvConnection;
+import com.limelight.nvstream.NvConnectionListener;
+
+public class Limelight implements NvConnectionListener {
+ public static final double VERSION = 1.0;
+
+ private String host;
+ private StreamFrame streamFrame;
+ private NvConnection conn;
+
+ public Limelight(String host) {
+ this.host = host;
+ }
+
+ private void startUp() {
+ streamFrame = new StreamFrame();
+ streamFrame.build();
+ conn = new NvConnection(host, streamFrame, this);
+ conn.start();
+ }
+
+ public static void createInstance(String host) {
+ Limelight limelight = new Limelight(host);
+ limelight.startUp();
+ }
+
+ public static void main(String args[]) {
+ MainFrame limeFrame = new MainFrame();
+ limeFrame.build();
+ }
+
+ @Override
+ public void stageStarting(Stage stage) {
+ System.out.println("Starting "+stage.getName());
+
+ }
+
+ @Override
+ public void stageComplete(Stage stage) {
+ }
+
+ @Override
+ public void stageFailed(Stage stage) {
+ JOptionPane.showMessageDialog(streamFrame, "Starting "+stage.getName()+" failed", "Connection Error", JOptionPane.ERROR_MESSAGE);
+ conn.stop();
+
+ }
+
+ @Override
+ public void connectionStarted() {
+ }
+
+ @Override
+ public void connectionTerminated(Exception e) {
+ e.printStackTrace();
+ JOptionPane.showMessageDialog(streamFrame, "The connection failed unexpectedly", "Connection Terminated", JOptionPane.ERROR_MESSAGE);
+ conn.stop();
+ }
+}
+
diff --git a/limelight-pc/src/com/limelight/gui/MainFrame.java b/limelight-pc/src/com/limelight/gui/MainFrame.java
new file mode 100644
index 0000000..38e0c06
--- /dev/null
+++ b/limelight-pc/src/com/limelight/gui/MainFrame.java
@@ -0,0 +1,90 @@
+package com.limelight.gui;
+
+import java.awt.BorderLayout;
+import java.awt.Container;
+import java.awt.Dimension;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+
+import javax.swing.Box;
+import javax.swing.BoxLayout;
+import javax.swing.JButton;
+import javax.swing.JFrame;
+import javax.swing.JPanel;
+import javax.swing.JTextField;
+
+import com.limelight.Limelight;
+
+public class MainFrame {
+ private JTextField hostField;
+ private JButton pair;
+ private JButton stream;
+
+ public MainFrame() {
+ }
+
+ public void build() {
+ JFrame limeFrame = new JFrame("Limelight V" + Limelight.VERSION);
+ limeFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ Container mainPane = limeFrame.getContentPane();
+
+ mainPane.setLayout(new BorderLayout());
+
+ JPanel centerPane = new JPanel();
+ centerPane.setLayout(new BoxLayout(centerPane, BoxLayout.Y_AXIS));
+
+ hostField = new JTextField();
+ hostField.setMaximumSize(new Dimension(Integer.MAX_VALUE, 24));
+ hostField.setToolTipText("Enter host name or IP address");
+ hostField.setText("GeForce PC host");
+
+ stream = new JButton("Start Streaming");
+ stream.addActionListener(createStreamButtonListener());
+ stream.setToolTipText("Start the GeForce stream");
+
+ pair = new JButton("Pair");
+ pair.addActionListener(createPairButtonListener());
+ pair.setToolTipText("Send pair request to GeForce PC");
+
+
+ Box streamBox = Box.createHorizontalBox();
+ streamBox.add(Box.createHorizontalGlue());
+ streamBox.add(stream);
+ streamBox.add(Box.createHorizontalGlue());
+
+ Box pairBox = Box.createHorizontalBox();
+ pairBox.add(Box.createHorizontalGlue());
+ pairBox.add(pair);
+ pairBox.add(Box.createHorizontalGlue());
+
+ Box contentBox = Box.createVerticalBox();
+ contentBox.add(Box.createVerticalStrut(20));
+ contentBox.add(hostField);
+ contentBox.add(Box.createVerticalStrut(10));
+ contentBox.add(streamBox);
+ contentBox.add(Box.createVerticalStrut(10));
+ contentBox.add(pairBox);
+ contentBox.add(Box.createVerticalGlue());
+
+
+ centerPane.add(contentBox);
+ mainPane.add(centerPane, "Center");
+
+ limeFrame.setSize(1000, 800);
+ limeFrame.setVisible(true);
+
+ }
+
+ private ActionListener createStreamButtonListener() {
+ return new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ Limelight.createInstance(hostField.getText());
+ }
+ };
+ }
+
+ private ActionListener createPairButtonListener() {
+ return null;
+ }
+}
diff --git a/limelight-pc/src/com/limelight/gui/StreamFrame.java b/limelight-pc/src/com/limelight/gui/StreamFrame.java
new file mode 100644
index 0000000..a03468b
--- /dev/null
+++ b/limelight-pc/src/com/limelight/gui/StreamFrame.java
@@ -0,0 +1,11 @@
+package com.limelight.gui;
+
+import javax.swing.JFrame;
+
+public class StreamFrame extends JFrame {
+ private static final long serialVersionUID = 1L;
+
+ public void build() {
+
+ }
+}
diff --git a/limelight-pc/src/com/limelight/input/KeyboardHandler.java b/limelight-pc/src/com/limelight/input/KeyboardHandler.java
new file mode 100644
index 0000000..e0f5da5
--- /dev/null
+++ b/limelight-pc/src/com/limelight/input/KeyboardHandler.java
@@ -0,0 +1,23 @@
+package com.limelight.input;
+
+import java.awt.event.KeyEvent;
+import java.awt.event.KeyListener;
+
+public class KeyboardHandler implements KeyListener {
+
+ @Override
+ public void keyPressed(KeyEvent event) {
+
+ }
+
+ @Override
+ public void keyReleased(KeyEvent event) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void keyTyped(KeyEvent event) {
+ }
+
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvApp.java b/limelight-pc/src/com/limelight/nvstream/NvApp.java
new file mode 100644
index 0000000..4ae0cc2
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvApp.java
@@ -0,0 +1,31 @@
+package com.limelight.nvstream;
+
+public class NvApp {
+ private String appName;
+ private int appId;
+ private boolean isRunning;
+
+ public void setAppName(String appName) {
+ this.appName = appName;
+ }
+
+ public void setAppId(String appId) {
+ this.appId = Integer.parseInt(appId);
+ }
+
+ public void setIsRunning(String isRunning) {
+ this.isRunning = isRunning.equals("1");
+ }
+
+ public String getAppName() {
+ return this.appName;
+ }
+
+ public int getAppId() {
+ return this.appId;
+ }
+
+ public boolean getIsRunning() {
+ return this.isRunning;
+ }
+}
\ No newline at end of file
diff --git a/limelight-pc/src/com/limelight/nvstream/NvAudioStream.java b/limelight-pc/src/com/limelight/nvstream/NvAudioStream.java
new file mode 100644
index 0000000..94d2fc4
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvAudioStream.java
@@ -0,0 +1,250 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.net.DatagramPacket;
+import java.net.DatagramSocket;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.SocketException;
+import java.util.LinkedList;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.SourceDataLine;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+import com.limelight.nvstream.av.AvRtpPacket;
+import com.limelight.nvstream.av.AvShortBufferDescriptor;
+import com.limelight.nvstream.av.audio.AvAudioDepacketizer;
+import com.limelight.nvstream.av.audio.OpusDecoder;
+
+public class NvAudioStream {
+ public static final int RTP_PORT = 48000;
+ public static final int RTCP_PORT = 47999;
+
+ private LinkedBlockingQueue packets = new LinkedBlockingQueue(100);
+
+ private SourceDataLine track;
+
+ private DatagramSocket rtp;
+
+ private AvAudioDepacketizer depacketizer = new AvAudioDepacketizer();
+
+ private LinkedList threads = new LinkedList();
+
+ private boolean aborting = false;
+
+ private InetAddress host;
+ private NvConnectionListener listener;
+
+ public NvAudioStream(InetAddress host, NvConnectionListener listener)
+ {
+ this.host = host;
+ this.listener = listener;
+ }
+
+ public void abort()
+ {
+ if (aborting) {
+ return;
+ }
+
+ aborting = true;
+
+ for (Thread t : threads) {
+ t.interrupt();
+ }
+
+ // Close the socket to interrupt the receive thread
+ if (rtp != null) {
+ rtp.close();
+ }
+
+ // Wait for threads to terminate
+ for (Thread t : threads) {
+ try {
+ t.join();
+ } catch (InterruptedException e) { }
+ }
+
+ if (track != null) {
+ track.close();
+ }
+
+ threads.clear();
+ }
+
+ public void startAudioStream() throws SocketException
+ {
+ setupRtpSession();
+
+ setupAudio();
+
+ startReceiveThread();
+
+ startDepacketizerThread();
+
+ startDecoderThread();
+
+ startUdpPingThread();
+ }
+
+ private void setupRtpSession() throws SocketException
+ {
+ rtp = new DatagramSocket(RTP_PORT);
+ }
+
+ private void setupAudio()
+ {
+ int channelConfig;
+ int err;
+
+ err = OpusDecoder.init();
+ if (err != 0) {
+ throw new IllegalStateException("Opus decoder failed to initialize");
+ }
+
+ switch (OpusDecoder.getChannelCount())
+ {
+ case 1:
+ channelConfig = 1;
+ break;
+ case 2:
+ channelConfig = 2;
+ break;
+ default:
+ throw new IllegalStateException("Opus decoder returned unhandled channel count");
+ }
+
+ /*
+ track = new AudioTrack(AudioManager.STREAM_MUSIC,
+ OpusDecoder.getSampleRate(),
+ channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT,
+ 1024, // 1KB buffer
+ AudioTrack.MODE_STREAM);
+
+ track.play();*/
+ }
+
+ private void startDepacketizerThread()
+ {
+ // This thread lessens the work on the receive thread
+ // so it can spend more time waiting for data
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ while (!isInterrupted())
+ {
+ AvRtpPacket packet;
+
+ try {
+ packet = packets.take();
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ depacketizer.decodeInputData(packet);
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Audio - Depacketizer");
+ t.start();
+ }
+
+ private void startDecoderThread()
+ {
+ // Decoder thread
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ while (!isInterrupted())
+ {
+ AvShortBufferDescriptor samples;
+
+ try {
+ samples = depacketizer.getNextDecodedData();
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ //track.write(samples.data, samples.offset, samples.length);
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Audio - Player");
+ t.start();
+ }
+
+ private void startReceiveThread()
+ {
+ // Receive thread
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ AvByteBufferDescriptor desc = new AvByteBufferDescriptor(new byte[1500], 0, 1500);
+ DatagramPacket packet = new DatagramPacket(desc.data, desc.length);
+
+ while (!isInterrupted())
+ {
+ try {
+ rtp.receive(packet);
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ // Give the packet to the depacketizer thread
+ desc.length = packet.getLength();
+ if (packets.offer(new AvRtpPacket(desc))) {
+ desc.reinitialize(new byte[1500], 0, 1500);
+ packet.setData(desc.data, desc.offset, desc.length);
+ }
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Audio - Receive");
+ t.start();
+ }
+
+ private void startUdpPingThread()
+ {
+ // Ping thread
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ // PING in ASCII
+ final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
+ DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
+ pingPacket.setSocketAddress(new InetSocketAddress(host, RTP_PORT));
+
+ // Send PING every 100 ms
+ while (!isInterrupted())
+ {
+ try {
+ rtp.send(pingPacket);
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+ }
+ }
+ };
+ threads.add(t);
+ t.setPriority(Thread.MIN_PRIORITY);
+ t.setName("Audio - Ping");
+ t.start();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvConnection.java b/limelight-pc/src/com/limelight/nvstream/NvConnection.java
new file mode 100644
index 0000000..8dcc220
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvConnection.java
@@ -0,0 +1,245 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.net.UnknownHostException;
+import java.util.Enumeration;
+import java.util.concurrent.ThreadPoolExecutor;
+
+import javax.swing.JFrame;
+import javax.swing.JOptionPane;
+import javax.swing.SwingUtilities;
+import javax.xml.stream.XMLStreamException;
+
+import com.limelight.nvstream.input.NvController;
+
+public class NvConnection {
+ private String host;
+ private JFrame parent;
+ private NvConnectionListener listener;
+ private int drFlags;
+
+ private InetAddress hostAddr;
+ private NvControl controlStream;
+ private NvController inputStream;
+ private NvVideoStream videoStream;
+ private NvAudioStream audioStream;
+
+ private ThreadPoolExecutor threadPool;
+
+ public NvConnection(String host, JFrame parent, NvConnectionListener listener) {
+ this.host = host;
+ this.parent = parent;
+ this.listener = listener;
+ }
+
+ public static String getMacAddressString() throws SocketException {
+ Enumeration ifaceList;
+ NetworkInterface selectedIface = null;
+
+ // First look for a WLAN interface (since those generally aren't removable)
+ ifaceList = NetworkInterface.getNetworkInterfaces();
+ while (selectedIface == null && ifaceList.hasMoreElements()) {
+ NetworkInterface iface = ifaceList.nextElement();
+
+ if (iface.getName().startsWith("wlan") &&
+ iface.getHardwareAddress() != null) {
+ selectedIface = iface;
+ }
+ }
+
+ // If we didn't find that, look for an Ethernet interface
+ ifaceList = NetworkInterface.getNetworkInterfaces();
+ while (selectedIface == null && ifaceList.hasMoreElements()) {
+ NetworkInterface iface = ifaceList.nextElement();
+
+ if (iface.getName().startsWith("eth") &&
+ iface.getHardwareAddress() != null) {
+ selectedIface = iface;
+ }
+ }
+
+ // Now just find something with a MAC address
+ ifaceList = NetworkInterface.getNetworkInterfaces();
+ while (selectedIface == null && ifaceList.hasMoreElements()) {
+ NetworkInterface iface = ifaceList.nextElement();
+
+ if (iface.getHardwareAddress() != null) {
+ selectedIface = ifaceList.nextElement();
+ break;
+ }
+ }
+
+ if (selectedIface == null) {
+ return null;
+ }
+
+ byte[] macAddress = selectedIface.getHardwareAddress();
+ if (macAddress != null) {
+ StringBuilder addrStr = new StringBuilder();
+ for (int i = 0; i < macAddress.length; i++) {
+ addrStr.append(String.format("%02x", macAddress[i]));
+ if (i != macAddress.length - 1) {
+ addrStr.append(':');
+ }
+ }
+ return addrStr.toString();
+ }
+
+ return null;
+ }
+
+ public void start() {
+ new Thread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ hostAddr = InetAddress.getByName(host);
+ } catch (UnknownHostException e) {
+ e.printStackTrace();
+ displayMessage(e.getMessage());
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ establishConnection();
+
+ }
+ }).start();
+ }
+
+
+ private void establishConnection() {
+ for (NvConnectionListener.Stage currentStage : NvConnectionListener.Stage.values())
+ {
+ boolean success = false;
+
+ listener.stageStarting(currentStage);
+ try {
+ switch (currentStage)
+ {
+ case LAUNCH_APP:
+ success = startSteamBigPicture();
+ break;
+
+ case HANDSHAKE:
+ success = NvHandshake.performHandshake(hostAddr);
+ break;
+
+ case CONTROL_START:
+ success = startControlStream();
+ break;
+
+ case VIDEO_START:
+ success = startVideoStream();
+ break;
+
+ case AUDIO_START:
+ success = startAudioStream();
+ break;
+
+ case CONTROL_START2:
+ controlStream.startJitterPackets();
+ success = true;
+ break;
+
+ case INPUT_START:
+ success = startInputConnection();
+ break;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ success = false;
+ }
+
+ if (success) {
+ listener.stageComplete(currentStage);
+ }
+ else {
+ listener.stageFailed(currentStage);
+ return;
+ }
+ }
+
+ listener.connectionStarted();
+ }
+
+ private boolean startSteamBigPicture() throws XMLStreamException, IOException
+ {
+ System.out.println(hostAddr.toString() + "\t" + getMacAddressString());
+ NvHTTP h = new NvHTTP(hostAddr.toString(), "");//getMacAddressString());
+
+ if (!h.getPairState()) {
+ displayMessage("Device not paired with computer");
+ return false;
+ }
+
+ int sessionId = h.getSessionId();
+ int appId = h.getSteamAppId(sessionId);
+
+ h.launchApp(sessionId, appId);
+
+ return true;
+ }
+
+ private boolean startControlStream() throws IOException
+ {
+ controlStream = new NvControl(hostAddr, listener);
+ controlStream.initialize();
+ controlStream.start();
+ return true;
+ }
+
+ private boolean startVideoStream() throws IOException
+ {
+ videoStream = new NvVideoStream(hostAddr, listener, controlStream);
+ //videoStream.startVideoStream(video, drFlags);
+ return true;
+ }
+
+ private boolean startAudioStream() throws IOException
+ {
+ audioStream = new NvAudioStream(hostAddr, listener);
+ audioStream.startAudioStream();
+ return true;
+ }
+
+ private boolean startInputConnection() throws IOException
+ {
+ inputStream = new NvController(hostAddr);
+ inputStream.initialize();
+ return true;
+ }
+
+ public void stop()
+ {
+ threadPool.shutdownNow();
+
+ if (videoStream != null) {
+ videoStream.abort();
+ }
+ if (audioStream != null) {
+ audioStream.abort();
+ }
+
+ if (controlStream != null) {
+ controlStream.abort();
+ }
+
+ if (inputStream != null) {
+ inputStream.close();
+ inputStream = null;
+ }
+ }
+
+ private void displayMessage(final String text) {
+ SwingUtilities.invokeLater(new Runnable() {
+ @Override
+ public void run() {
+ JOptionPane.showMessageDialog(parent, text);
+ }
+ });
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvConnectionListener.java b/limelight-pc/src/com/limelight/nvstream/NvConnectionListener.java
new file mode 100644
index 0000000..c929423
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvConnectionListener.java
@@ -0,0 +1,30 @@
+package com.limelight.nvstream;
+
+public interface NvConnectionListener {
+
+ public enum Stage {
+ LAUNCH_APP("app"),
+ HANDSHAKE("handshake"),
+ CONTROL_START("control connection"),
+ VIDEO_START("video stream"),
+ AUDIO_START("audio stream"),
+ CONTROL_START2("control connection"),
+ INPUT_START("input connection");
+
+ private String name;
+ private Stage(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+ };
+
+ public void stageStarting(Stage stage);
+ public void stageComplete(Stage stage);
+ public void stageFailed(Stage stage);
+
+ public void connectionStarted();
+ public void connectionTerminated(Exception e);
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvControl.java b/limelight-pc/src/com/limelight/nvstream/NvControl.java
new file mode 100644
index 0000000..4c08ea8
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvControl.java
@@ -0,0 +1,483 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import com.limelight.nvstream.av.ConnectionStatusListener;
+
+public class NvControl implements ConnectionStatusListener {
+
+ public static final int PORT = 47995;
+
+ public static final int CONTROL_TIMEOUT = 5000;
+
+ public static final short PTYPE_HELLO = 0x1204;
+ public static final short PPAYLEN_HELLO = 0x0004;
+ public static final byte[] PPAYLOAD_HELLO =
+ {
+ (byte)0x00,
+ (byte)0x05,
+ (byte)0x00,
+ (byte)0x00
+ };
+
+ public static final short PTYPE_KEEPALIVE = 0x13ff;
+ public static final short PPAYLEN_KEEPALIVE = 0x0000;
+
+ public static final short PTYPE_HEARTBEAT = 0x1401;
+ public static final short PPAYLEN_HEARTBEAT = 0x0000;
+
+ public static final short PTYPE_1405 = 0x1405;
+ public static final short PPAYLEN_1405 = 0x0000;
+
+ public static final short PTYPE_RESYNC = 0x1404;
+ public static final short PPAYLEN_RESYNC = 16;
+
+ public static final short PTYPE_CONFIG = 0x1205;
+ public static final short PPAYLEN_CONFIG = 0x0004;
+ public static final int[] PPAYLOAD_CONFIG =
+ {
+ 720,
+ 266758,
+ 1,
+ 266762,
+ 30,
+ 70151,
+ 68291329,
+ 1280,
+ 68291584,
+ 1280,
+ 68291840,
+ 15360,
+ 68292096,
+ 25600,
+ 68292352,
+ 2048,
+ 68292608,
+ 1024,
+ 68289024,
+ 262144,
+ 17957632,
+ 302055424,
+ 134217729,
+ 16777490,
+ 70153,
+ 68293120,
+ 768000,
+ 17961216,
+ 303235072,
+ 335609857,
+ 838861842,
+ 352321536,
+ 1006634002,
+ 369098752,
+ 335545362,
+ 385875968,
+ 1042,
+ 402653184,
+ 134218770,
+ 419430400,
+ 167773202,
+ 436207616,
+ 855638290,
+ 266779,
+ 7000,
+ 266780,
+ 2000,
+ 266781,
+ 50,
+ 266782,
+ 3000,
+ 266783,
+ 2,
+ 266794,
+ 5000,
+ 266795,
+ 500,
+ 266784,
+ 75,
+ 266785,
+ 25,
+ 266786,
+ 10,
+ 266787,
+ 60,
+ 266788,
+ 30,
+ 266789,
+ 3,
+ 266790,
+ 1000,
+ 266791,
+ 5000,
+ 266792,
+ 5000,
+ 266793,
+ 5000,
+ 70190,
+ 68301063,
+ 10240,
+ 68301312,
+ 6400,
+ 68301568,
+ 768000,
+ 68299776,
+ 768,
+ 68300032,
+ 2560,
+ 68300544,
+ 0,
+ 34746368,
+ (int)0xFE000000
+ };
+
+
+ public static final short PTYPE_JITTER = 0x140c;
+ public static final short PPAYLEN_JITTER = 0x10;
+
+ private int seqNum;
+
+ private NvConnectionListener listener;
+ private InetAddress host;
+
+ private Socket s;
+ private InputStream in;
+ private OutputStream out;
+
+ private Thread heartbeatThread;
+ private Thread jitterThread;
+ private boolean aborting = false;
+
+ public NvControl(InetAddress host, NvConnectionListener listener)
+ {
+ this.listener = listener;
+ this.host = host;
+ }
+
+ public void initialize() throws IOException
+ {
+ s = new Socket();
+ s.setSoTimeout(CONTROL_TIMEOUT);
+ s.connect(new InetSocketAddress(host, PORT), CONTROL_TIMEOUT);
+ in = s.getInputStream();
+ out = s.getOutputStream();
+ }
+
+ private void sendPacket(NvCtlPacket packet) throws IOException
+ {
+ out.write(packet.toWire());
+ out.flush();
+ }
+
+ private NvControl.NvCtlResponse sendAndGetReply(NvCtlPacket packet) throws IOException
+ {
+ sendPacket(packet);
+ return new NvCtlResponse(in);
+ }
+
+ private void sendJitter() throws IOException
+ {
+ ByteBuffer bb = ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
+
+ bb.putInt(0);
+ bb.putInt(77);
+ bb.putInt(888);
+ bb.putInt(seqNum += 2);
+
+ sendPacket(new NvCtlPacket(PTYPE_JITTER, PPAYLEN_JITTER, bb.array()));
+ }
+
+ public void abort()
+ {
+ if (aborting) {
+ return;
+ }
+
+ aborting = true;
+
+ if (jitterThread != null) {
+ jitterThread.interrupt();
+ }
+
+ if (heartbeatThread != null) {
+ heartbeatThread.interrupt();
+ }
+
+ try {
+ s.close();
+ } catch (IOException e) {}
+ }
+
+ public void requestResync() throws IOException
+ {
+ System.out.println("CTL: Requesting IDR frame");
+ sendResync();
+ }
+
+ public void start() throws IOException
+ {
+ sendHello();
+ sendConfig();
+ pingPong();
+ send1405AndGetResponse();
+
+ heartbeatThread = new Thread() {
+ @Override
+ public void run() {
+ while (!isInterrupted())
+ {
+ try {
+ sendHeartbeat();
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+
+ try {
+ Thread.sleep(3000);
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+ }
+ }
+ };
+ heartbeatThread.start();
+ }
+
+ public void startJitterPackets()
+ {
+ jitterThread = new Thread() {
+ @Override
+ public void run() {
+ while (!isInterrupted())
+ {
+ try {
+ sendJitter();
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+ }
+ }
+ };
+ jitterThread.start();
+ }
+
+ private NvControl.NvCtlResponse send1405AndGetResponse() throws IOException
+ {
+ return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405));
+ }
+
+ private void sendHello() throws IOException
+ {
+ sendPacket(new NvCtlPacket(PTYPE_HELLO, PPAYLEN_HELLO, PPAYLOAD_HELLO));
+ }
+
+ private void sendResync() throws IOException
+ {
+ ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN);
+
+ conf.putLong(0);
+ conf.putLong(0xFFFF);
+
+ sendAndGetReply(new NvCtlPacket(PTYPE_RESYNC, PPAYLEN_RESYNC, conf.array()));
+ }
+
+ private void sendConfig() throws IOException
+ {
+ ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLOAD_CONFIG.length * 4 + 3]).order(ByteOrder.LITTLE_ENDIAN);
+
+ for (int i : PPAYLOAD_CONFIG)
+ conf.putInt(i);
+
+ conf.putShort((short)0x0013);
+ conf.put((byte) 0x00);
+
+ sendPacket(new NvCtlPacket(PTYPE_CONFIG, PPAYLEN_CONFIG, conf.array()));
+ }
+
+ private void sendHeartbeat() throws IOException
+ {
+ sendPacket(new NvCtlPacket(PTYPE_HEARTBEAT, PPAYLEN_HEARTBEAT));
+ }
+
+ private NvControl.NvCtlResponse pingPong() throws IOException
+ {
+ sendPacket(new NvCtlPacket(PTYPE_KEEPALIVE, PPAYLEN_KEEPALIVE));
+ return new NvControl.NvCtlResponse(in);
+ }
+
+ class NvCtlPacket {
+ public short type;
+ public short paylen;
+ public byte[] payload;
+
+ public NvCtlPacket(InputStream in) throws IOException
+ {
+ byte[] header = new byte[4];
+
+ int offset = 0;
+ do
+ {
+ int bytesRead = in.read(header, offset, header.length - offset);
+ if (bytesRead < 0) {
+ break;
+ }
+ offset += bytesRead;
+ } while (offset != header.length);
+
+ if (offset != header.length) {
+ throw new IOException("Socket closed prematurely");
+ }
+
+ ByteBuffer bb = ByteBuffer.wrap(header).order(ByteOrder.LITTLE_ENDIAN);
+
+ type = bb.getShort();
+ paylen = bb.getShort();
+
+ if (paylen != 0)
+ {
+ payload = new byte[paylen];
+
+ offset = 0;
+ do
+ {
+ int bytesRead = in.read(payload, offset, payload.length - offset);
+ if (bytesRead < 0) {
+ break;
+ }
+ offset += bytesRead;
+ } while (offset != payload.length);
+
+ if (offset != payload.length) {
+ throw new IOException("Socket closed prematurely");
+ }
+ }
+ }
+
+ public NvCtlPacket(byte[] payload)
+ {
+ ByteBuffer bb = ByteBuffer.wrap(payload).order(ByteOrder.LITTLE_ENDIAN);
+
+ type = bb.getShort();
+ paylen = bb.getShort();
+
+ if (bb.hasRemaining())
+ {
+ payload = new byte[bb.remaining()];
+ bb.get(payload);
+ }
+ }
+
+ public NvCtlPacket(short type, short paylen)
+ {
+ this.type = type;
+ this.paylen = paylen;
+ }
+
+ public NvCtlPacket(short type, short paylen, byte[] payload)
+ {
+ this.type = type;
+ this.paylen = paylen;
+ this.payload = payload;
+ }
+
+ public short getType()
+ {
+ return type;
+ }
+
+ public short getPaylen()
+ {
+ return paylen;
+ }
+
+ public void setType(short type)
+ {
+ this.type = type;
+ }
+
+ public void setPaylen(short paylen)
+ {
+ this.paylen = paylen;
+ }
+
+ public byte[] toWire()
+ {
+ ByteBuffer bb = ByteBuffer.allocate(4 + (payload != null ? payload.length : 0)).order(ByteOrder.LITTLE_ENDIAN);
+
+ bb.putShort(type);
+ bb.putShort(paylen);
+
+ if (payload != null)
+ bb.put(payload);
+
+ return bb.array();
+ }
+ }
+
+ class NvCtlResponse extends NvCtlPacket {
+ public short status;
+
+ public NvCtlResponse(InputStream in) throws IOException {
+ super(in);
+ }
+
+ public NvCtlResponse(short type, short paylen) {
+ super(type, paylen);
+ }
+
+ public NvCtlResponse(short type, short paylen, byte[] payload) {
+ super(type, paylen, payload);
+ }
+
+ public NvCtlResponse(byte[] payload) {
+ super(payload);
+ }
+
+ public void setStatusCode(short status)
+ {
+ this.status = status;
+ }
+
+ public short getStatusCode()
+ {
+ return status;
+ }
+ }
+
+ @Override
+ public void connectionTerminated() {
+ abort();
+ }
+
+ @Override
+ public void connectionNeedsResync() {
+ new Thread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ requestResync();
+ } catch (IOException e1) {
+ abort();
+ return;
+ }
+ }
+ }).start();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvHTTP.java b/limelight-pc/src/com/limelight/nvstream/NvHTTP.java
new file mode 100644
index 0000000..a303999
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvHTTP.java
@@ -0,0 +1,144 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.util.LinkedList;
+import java.util.Stack;
+
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.XMLStreamReader;
+
+public class NvHTTP {
+ private String macAddress;
+
+ public static final int PORT = 47989;
+ public String baseUrl;
+
+ public NvHTTP(String host, String macAddress) {
+ this.macAddress = macAddress;
+ this.baseUrl = "http://" + host + ":" + PORT;
+ }
+
+ private String getXmlString(InputStream in, String tagname)
+ throws IOException, XMLStreamException {
+ XMLInputFactory factory = XMLInputFactory.newFactory();
+ XMLStreamReader xReader = factory.createXMLStreamReader(in);
+
+ int eventType = xReader.getEventType();
+ Stack currentTag = new Stack();
+
+ while (eventType != XMLStreamReader.END_DOCUMENT) {
+ switch (eventType) {
+ case (XMLStreamReader.START_ELEMENT):
+ currentTag.push(xReader.getElementText());
+ break;
+ case (XMLStreamReader.END_ELEMENT):
+ currentTag.pop();
+ break;
+ case (XMLStreamReader.CHARACTERS):
+ if (currentTag.peek().equals(tagname)) {
+ return xReader.getElementText();
+ }
+ break;
+ }
+ eventType = xReader.next();
+ }
+
+ return null;
+ }
+
+ private InputStream openHttpConnection(String url) throws IOException {
+ return new URL(url).openConnection().getInputStream();
+ }
+
+ public String getAppVersion() throws XMLStreamException, IOException {
+ InputStream in = openHttpConnection(baseUrl + "/appversion");
+ return getXmlString(in, "appversion");
+ }
+
+ public boolean getPairState() throws IOException, XMLStreamException {
+ InputStream in = openHttpConnection(baseUrl + "/pairstate?mac=" + macAddress);
+ String paired = getXmlString(in, "paired");
+ return Integer.valueOf(paired) != 0;
+ }
+
+ public int getSessionId() throws IOException, XMLStreamException {
+ /* Pass the model (minus spaces) as the device name */
+ String deviceName = "Unknown";
+
+ try
+ {
+ InetAddress addr;
+ addr = InetAddress.getLocalHost();
+ deviceName = addr.getHostName();
+ }
+ catch (UnknownHostException ex)
+ {
+ System.out.println("Hostname can not be resolved");
+ }
+ InputStream in = openHttpConnection(baseUrl + "/pair?mac=" + macAddress
+ + "&devicename=" + deviceName);
+ String sessionId = getXmlString(in, "sessionid");
+ return Integer.parseInt(sessionId);
+ }
+
+ public int getSteamAppId(int sessionId) throws IOException,
+ XMLStreamException {
+ LinkedList appList = getAppList(sessionId);
+ for (NvApp app : appList) {
+ if (app.getAppName().equals("Steam")) {
+ return app.getAppId();
+ }
+ }
+ return 0;
+ }
+
+ public LinkedList getAppList(int sessionId) throws IOException, XMLStreamException {
+ InputStream in = openHttpConnection(baseUrl + "/applist?session=" + sessionId);
+ XMLInputFactory factory = XMLInputFactory.newFactory();
+ XMLStreamReader xReader = factory.createXMLStreamReader(in);
+
+ int eventType = xReader.getEventType();
+ LinkedList appList = new LinkedList();
+ Stack currentTag = new Stack();
+
+ while (eventType != XMLStreamReader.END_DOCUMENT) {
+ switch (eventType) {
+ case (XMLStreamReader.START_ELEMENT):
+ currentTag.push(xReader.getName().toString());
+ if (xReader.getName().toString().equals("App")) {
+ appList.addLast(new NvApp());
+ }
+ break;
+ case (XMLStreamReader.END_DOCUMENT):
+ currentTag.pop();
+ break;
+ case (XMLStreamReader.CHARACTERS):
+ NvApp app = appList.getLast();
+ if (currentTag.peek().equals("AppTitle")) {
+ app.setAppName(xReader.getText());
+ } else if (currentTag.peek().equals("ID")) {
+ app.setAppId(xReader.getText());
+ } else if (currentTag.peek().equals("IsRunning")) {
+ app.setIsRunning(xReader.getText());
+ }
+ break;
+ }
+ eventType = xReader.next();
+ }
+ return appList;
+ }
+
+ // Returns gameSession XML attribute
+ public int launchApp(int sessionId, int appId) throws IOException,
+ XMLStreamException {
+ InputStream in = openHttpConnection(baseUrl + "/launch?session="
+ + sessionId + "&appid=" + appId);
+ String gameSession = getXmlString(in, "gamesession");
+ return Integer.parseInt(gameSession);
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvHandshake.java b/limelight-pc/src/com/limelight/nvstream/NvHandshake.java
new file mode 100644
index 0000000..db111d1
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvHandshake.java
@@ -0,0 +1,133 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+
+public class NvHandshake {
+ public static final int PORT = 47991;
+
+ public static final int HANDSHAKE_TIMEOUT = 5000;
+
+ public static final byte[] PLATFORM_HELLO =
+ {
+ (byte)0x07,
+ (byte)0x00,
+ (byte)0x00,
+ (byte)0x00,
+
+ // android in ASCII
+ (byte)0x61,
+ (byte)0x6e,
+ (byte)0x64,
+ (byte)0x72,
+ (byte)0x6f,
+ (byte)0x69,
+ (byte)0x64,
+
+ (byte)0x03,
+ (byte)0x01,
+ (byte)0x00,
+ (byte)0x00
+ };
+
+ public static final byte[] PACKET_2 =
+ {
+ (byte)0x01,
+ (byte)0x03,
+ (byte)0x02,
+ (byte)0x00,
+ (byte)0x08,
+ (byte)0x00
+ };
+
+ public static final byte[] PACKET_3 =
+ {
+ (byte)0x04,
+ (byte)0x01,
+ (byte)0x00,
+ (byte)0x00,
+
+ (byte)0x00,
+ (byte)0x00,
+ (byte)0x00,
+ (byte)0x00
+ };
+
+ public static final byte[] PACKET_4 =
+ {
+ (byte)0x01,
+ (byte)0x01,
+ (byte)0x00,
+ (byte)0x00
+ };
+
+ private static boolean waitAndDiscardResponse(InputStream in)
+ {
+ // Wait for response and discard response
+ try {
+ in.read();
+
+ // Wait for the full response to come in
+ Thread.sleep(250);
+
+ for (int i = 0; i < in.available(); i++)
+ in.read();
+
+ } catch (IOException e1) {
+ return false;
+ } catch (InterruptedException e) {
+ return false;
+ }
+
+ return true;
+ }
+
+ public static boolean performHandshake(InetAddress host) throws IOException
+ {
+ Socket s = new Socket();
+ s.connect(new InetSocketAddress(host, PORT), HANDSHAKE_TIMEOUT);
+ s.setSoTimeout(HANDSHAKE_TIMEOUT);
+ OutputStream out = s.getOutputStream();
+ InputStream in = s.getInputStream();
+
+ // First packet
+ out.write(PLATFORM_HELLO);
+ out.flush();
+
+ if (!waitAndDiscardResponse(in)) {
+ s.close();
+ return false;
+ }
+
+ // Second packet
+ out.write(PACKET_2);
+ out.flush();
+
+ if (!waitAndDiscardResponse(in)) {
+ s.close();
+ return false;
+ }
+
+ // Third packet
+ out.write(PACKET_3);
+ out.flush();
+
+ if (!waitAndDiscardResponse(in)) {
+ s.close();
+ return false;
+ }
+
+ // Fourth packet
+ out.write(PACKET_4);
+ out.flush();
+
+ // Done
+ s.close();
+
+ return true;
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/NvVideoStream.java b/limelight-pc/src/com/limelight/nvstream/NvVideoStream.java
new file mode 100644
index 0000000..a3f99a5
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/NvVideoStream.java
@@ -0,0 +1,307 @@
+package com.limelight.nvstream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.DatagramPacket;
+import java.net.DatagramSocket;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.net.SocketException;
+import java.util.LinkedList;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+import com.limelight.nvstream.av.AvDecodeUnit;
+import com.limelight.nvstream.av.AvRtpPacket;
+import com.limelight.nvstream.av.ConnectionStatusListener;
+import com.limelight.nvstream.av.video.AvVideoDepacketizer;
+import com.limelight.nvstream.av.video.AvVideoPacket;
+import com.limelight.nvstream.av.video.CpuDecoderRenderer;
+import com.limelight.nvstream.av.video.DecoderRenderer;
+
+public class NvVideoStream {
+ public static final int RTP_PORT = 47998;
+ public static final int RTCP_PORT = 47999;
+ public static final int FIRST_FRAME_PORT = 47996;
+
+ public static final int FIRST_FRAME_TIMEOUT = 5000;
+
+ private LinkedBlockingQueue packets = new LinkedBlockingQueue(100);
+
+ private InetAddress host;
+ private DatagramSocket rtp;
+ private Socket firstFrameSocket;
+
+ private LinkedList threads = new LinkedList();
+
+ private NvConnectionListener listener;
+ private AvVideoDepacketizer depacketizer;
+
+ private DecoderRenderer decrend;
+ private boolean startedRendering;
+
+ private boolean aborting = false;
+
+ public NvVideoStream(InetAddress host, NvConnectionListener listener, ConnectionStatusListener avConnListener)
+ {
+ this.host = host;
+ this.listener = listener;
+ this.depacketizer = new AvVideoDepacketizer(avConnListener);
+ }
+
+ public void abort()
+ {
+ if (aborting) {
+ return;
+ }
+
+ aborting = true;
+
+ // Interrupt threads
+ for (Thread t : threads) {
+ t.interrupt();
+ }
+
+ // Close the socket to interrupt the receive thread
+ if (rtp != null) {
+ rtp.close();
+ }
+ if (firstFrameSocket != null) {
+ try {
+ firstFrameSocket.close();
+ } catch (IOException e) {}
+ }
+
+ // Wait for threads to terminate
+ for (Thread t : threads) {
+ try {
+ t.join();
+ } catch (InterruptedException e) { }
+ }
+
+ if (startedRendering) {
+ decrend.stop();
+ }
+
+ if (decrend != null) {
+ decrend.release();
+ }
+
+ threads.clear();
+ }
+
+ private void readFirstFrame() throws IOException
+ {
+ byte[] firstFrame = new byte[1500];
+
+ firstFrameSocket = new Socket();
+ firstFrameSocket.setSoTimeout(FIRST_FRAME_TIMEOUT);
+
+ try {
+ firstFrameSocket.connect(new InetSocketAddress(host, FIRST_FRAME_PORT), FIRST_FRAME_TIMEOUT);
+ InputStream firstFrameStream = firstFrameSocket.getInputStream();
+
+ int offset = 0;
+ for (;;)
+ {
+ int bytesRead = firstFrameStream.read(firstFrame, offset, firstFrame.length-offset);
+
+ if (bytesRead == -1)
+ break;
+
+ offset += bytesRead;
+ }
+
+ depacketizer.addInputData(new AvVideoPacket(new AvByteBufferDescriptor(firstFrame, 0, offset)));
+ } finally {
+ firstFrameSocket.close();
+ firstFrameSocket = null;
+ }
+ }
+
+ public void setupRtpSession() throws SocketException
+ {
+ rtp = new DatagramSocket(RTP_PORT);
+ }
+
+ /*
+ public void setupDecoderRenderer(SurfaceHolder renderTarget, int drFlags) {
+ if (Build.HARDWARE.equals("goldfish")) {
+ // Emulator - don't render video (it's slow!)
+ decrend = null;
+ }
+ else if (MediaCodecDecoderRenderer.findSafeDecoder() != null) {
+ // Hardware decoding
+ decrend = new MediaCodecDecoderRenderer();
+ }
+ else {
+ // Software decoding
+ decrend = new CpuDecoderRenderer();
+ }
+
+ if (decrend != null) {
+ decrend.setup(1280, 720, renderTarget, drFlags);
+ }
+ }*/
+
+ /*
+ public void startVideoStream(final SurfaceHolder surface, int drFlags) throws IOException
+ {
+ // Setup the decoder and renderer
+ setupDecoderRenderer(surface, drFlags);
+
+ // Open RTP sockets and start session
+ setupRtpSession();
+
+ // Start pinging before reading the first frame
+ // so Shield Proxy knows we're here and sends us
+ // the reference frame
+ startUdpPingThread();
+
+ // Read the first frame to start the UDP video stream
+ // This MUST be called before the normal UDP receive thread
+ // starts in order to avoid state corruption caused by two
+ // threads simultaneously adding input data.
+ readFirstFrame();
+
+ if (decrend != null) {
+ // Start the receive thread early to avoid missing
+ // early packets
+ startReceiveThread();
+
+ // Start the depacketizer thread to deal with the RTP data
+ startDepacketizerThread();
+
+ // Start decoding the data we're receiving
+ startDecoderThread();
+
+ // Start the renderer
+ decrend.start();
+ startedRendering = true;
+ }
+ }
+ */
+ private void startDecoderThread()
+ {
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ // Read the decode units generated from the RTP stream
+ while (!isInterrupted())
+ {
+ AvDecodeUnit du;
+
+ try {
+ du = depacketizer.getNextDecodeUnit();
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ decrend.submitDecodeUnit(du);
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Video - Decoder");
+ t.setPriority(Thread.MAX_PRIORITY);
+ t.start();
+ }
+
+ private void startDepacketizerThread()
+ {
+ // This thread lessens the work on the receive thread
+ // so it can spend more time waiting for data
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ while (!isInterrupted())
+ {
+ AvRtpPacket packet;
+
+ try {
+ packet = packets.take();
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ // !!! We no longer own the data buffer at this point !!!
+ depacketizer.addInputData(packet);
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Video - Depacketizer");
+ t.start();
+ }
+
+ private void startReceiveThread()
+ {
+ // Receive thread
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ AvByteBufferDescriptor desc = new AvByteBufferDescriptor(new byte[1500], 0, 1500);
+ DatagramPacket packet = new DatagramPacket(desc.data, desc.length);
+
+ while (!isInterrupted())
+ {
+ try {
+ rtp.receive(packet);
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ // Give the packet to the depacketizer thread
+ desc.length = packet.getLength();
+ if (packets.offer(new AvRtpPacket(desc))) {
+ desc.reinitialize(new byte[1500], 0, 1500);
+ packet.setData(desc.data, desc.offset, desc.length);
+ }
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Video - Receive");
+ t.start();
+ }
+
+ private void startUdpPingThread()
+ {
+ // Ping thread
+ Thread t = new Thread() {
+ @Override
+ public void run() {
+ // PING in ASCII
+ final byte[] pingPacketData = new byte[] {0x50, 0x49, 0x4E, 0x47};
+ DatagramPacket pingPacket = new DatagramPacket(pingPacketData, pingPacketData.length);
+ pingPacket.setSocketAddress(new InetSocketAddress(host, RTP_PORT));
+
+ // Send PING every 100 ms
+ while (!isInterrupted())
+ {
+ try {
+ rtp.send(pingPacket);
+ } catch (IOException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ listener.connectionTerminated(e);
+ return;
+ }
+ }
+ }
+ };
+ threads.add(t);
+ t.setName("Video - Ping");
+ t.setPriority(Thread.MIN_PRIORITY);
+ t.start();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/AvByteBufferDescriptor.java b/limelight-pc/src/com/limelight/nvstream/av/AvByteBufferDescriptor.java
new file mode 100644
index 0000000..8f11a95
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/AvByteBufferDescriptor.java
@@ -0,0 +1,46 @@
+package com.limelight.nvstream.av;
+
+public class AvByteBufferDescriptor {
+ public byte[] data;
+ public int offset;
+ public int length;
+
+ public AvByteBufferDescriptor(byte[] data, int offset, int length)
+ {
+ this.data = data;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public AvByteBufferDescriptor(AvByteBufferDescriptor desc)
+ {
+ this.data = desc.data;
+ this.offset = desc.offset;
+ this.length = desc.length;
+ }
+
+ public void reinitialize(byte[] data, int offset, int length)
+ {
+ this.data = data;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public void print()
+ {
+ print(offset, length);
+ }
+
+ public void print(int length)
+ {
+ print(this.offset, length);
+ }
+
+ public void print(int offset, int length)
+ {
+ for (int i = offset; i < offset+length; i++) {
+ System.out.printf("%d: %02x \n", i, data[i]);
+ }
+ System.out.println();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/AvDecodeUnit.java b/limelight-pc/src/com/limelight/nvstream/av/AvDecodeUnit.java
new file mode 100644
index 0000000..69300b8
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/AvDecodeUnit.java
@@ -0,0 +1,42 @@
+package com.limelight.nvstream.av;
+
+import java.util.List;
+
+public class AvDecodeUnit {
+ public static final int TYPE_UNKNOWN = 0;
+ public static final int TYPE_H264 = 1;
+ public static final int TYPE_OPUS = 2;
+
+ private int type;
+ private List bufferList;
+ private int dataLength;
+ private int flags;
+
+ public AvDecodeUnit(int type, List bufferList, int dataLength, int flags)
+ {
+ this.type = type;
+ this.bufferList = bufferList;
+ this.dataLength = dataLength;
+ this.flags = flags;
+ }
+
+ public int getType()
+ {
+ return type;
+ }
+
+ public int getFlags()
+ {
+ return flags;
+ }
+
+ public List getBufferList()
+ {
+ return bufferList;
+ }
+
+ public int getDataLength()
+ {
+ return dataLength;
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/AvRtpPacket.java b/limelight-pc/src/com/limelight/nvstream/av/AvRtpPacket.java
new file mode 100644
index 0000000..8e4250e
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/AvRtpPacket.java
@@ -0,0 +1,46 @@
+package com.limelight.nvstream.av;
+
+import java.nio.ByteBuffer;
+
+public class AvRtpPacket {
+
+ private byte packetType;
+ private short seqNum;
+ private AvByteBufferDescriptor buffer;
+
+ public AvRtpPacket(AvByteBufferDescriptor buffer)
+ {
+ this.buffer = new AvByteBufferDescriptor(buffer);
+
+ ByteBuffer bb = ByteBuffer.wrap(buffer.data, buffer.offset, buffer.length);
+
+ // Discard the first byte
+ bb.position(bb.position()+1);
+
+ // Get the packet type
+ packetType = bb.get();
+
+ // Get the sequence number
+ seqNum = bb.getShort();
+ }
+
+ public byte getPacketType()
+ {
+ return packetType;
+ }
+
+ public short getSequenceNumber()
+ {
+ return seqNum;
+ }
+
+ public byte[] getBackingBuffer()
+ {
+ return buffer.data;
+ }
+
+ public AvByteBufferDescriptor getNewPayloadDescriptor()
+ {
+ return new AvByteBufferDescriptor(buffer.data, buffer.offset+12, buffer.length-12);
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/AvShortBufferDescriptor.java b/limelight-pc/src/com/limelight/nvstream/av/AvShortBufferDescriptor.java
new file mode 100644
index 0000000..901783f
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/AvShortBufferDescriptor.java
@@ -0,0 +1,28 @@
+package com.limelight.nvstream.av;
+
+public class AvShortBufferDescriptor {
+ public short[] data;
+ public int offset;
+ public int length;
+
+ public AvShortBufferDescriptor(short[] data, int offset, int length)
+ {
+ this.data = data;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public AvShortBufferDescriptor(AvShortBufferDescriptor desc)
+ {
+ this.data = desc.data;
+ this.offset = desc.offset;
+ this.length = desc.length;
+ }
+
+ public void reinitialize(short[] data, int offset, int length)
+ {
+ this.data = data;
+ this.offset = offset;
+ this.length = length;
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/ConnectionStatusListener.java b/limelight-pc/src/com/limelight/nvstream/av/ConnectionStatusListener.java
new file mode 100644
index 0000000..35262dd
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/ConnectionStatusListener.java
@@ -0,0 +1,7 @@
+package com.limelight.nvstream.av;
+
+public interface ConnectionStatusListener {
+ public void connectionTerminated();
+
+ public void connectionNeedsResync();
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/audio/AvAudioDepacketizer.java b/limelight-pc/src/com/limelight/nvstream/av/audio/AvAudioDepacketizer.java
new file mode 100644
index 0000000..a13cf3e
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/audio/AvAudioDepacketizer.java
@@ -0,0 +1,65 @@
+package com.limelight.nvstream.av.audio;
+
+import java.util.concurrent.LinkedBlockingQueue;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+import com.limelight.nvstream.av.AvRtpPacket;
+import com.limelight.nvstream.av.AvShortBufferDescriptor;
+
+public class AvAudioDepacketizer {
+
+ private static final int DU_LIMIT = 15;
+ private LinkedBlockingQueue decodedUnits =
+ new LinkedBlockingQueue(DU_LIMIT);
+
+ // Sequencing state
+ private short lastSequenceNumber;
+
+ private void decodeData(byte[] data, int off, int len)
+ {
+ // Submit this data to the decoder
+ short[] pcmData = new short[OpusDecoder.getMaxOutputShorts()];
+ int decodeLen = OpusDecoder.decode(data, off, len, pcmData);
+
+ if (decodeLen > 0) {
+ // Return value of decode is frames decoded per channel
+ decodeLen *= OpusDecoder.getChannelCount();
+
+ // Put it on the decoded queue
+ if (!decodedUnits.offer(new AvShortBufferDescriptor(pcmData, 0, decodeLen))) {
+ // Clear out the queue
+ decodedUnits.clear();
+ }
+ }
+ }
+
+ public void decodeInputData(AvRtpPacket packet)
+ {
+ short seq = packet.getSequenceNumber();
+
+ if (packet.getPacketType() != 97) {
+ // Only type 97 is audio
+ return;
+ }
+
+ // Toss out the current NAL if we receive a packet that is
+ // out of sequence
+ if (lastSequenceNumber != 0 &&
+ (short)(lastSequenceNumber + 1) != seq)
+ {
+ System.out.println("Received OOS audio data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
+ decodeData(null, 0, 0);
+ }
+
+ lastSequenceNumber = seq;
+
+ // This is all the depacketizing we need to do
+ AvByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
+ decodeData(rtpPayload.data, rtpPayload.offset, rtpPayload.length);
+ }
+
+ public AvShortBufferDescriptor getNextDecodedData() throws InterruptedException
+ {
+ return decodedUnits.take();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/audio/OpusDecoder.java b/limelight-pc/src/com/limelight/nvstream/av/audio/OpusDecoder.java
new file mode 100644
index 0000000..c01f7fa
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/audio/OpusDecoder.java
@@ -0,0 +1,14 @@
+package com.limelight.nvstream.av.audio;
+
+public class OpusDecoder {
+ static {
+ System.loadLibrary("nv_opus_dec");
+ }
+
+ public static native int init();
+ public static native void destroy();
+ public static native int getChannelCount();
+ public static native int getMaxOutputShorts();
+ public static native int getSampleRate();
+ public static native int decode(byte[] indata, int inoff, int inlen, short[] outpcmdata);
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoDepacketizer.java b/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoDepacketizer.java
new file mode 100644
index 0000000..17336c0
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoDepacketizer.java
@@ -0,0 +1,313 @@
+package com.limelight.nvstream.av.video;
+
+import java.util.LinkedList;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+import com.limelight.nvstream.av.AvDecodeUnit;
+import com.limelight.nvstream.av.AvRtpPacket;
+import com.limelight.nvstream.av.ConnectionStatusListener;
+
+public class AvVideoDepacketizer {
+
+ // Current NAL state
+ private LinkedList avcNalDataChain = null;
+ private int avcNalDataLength = 0;
+ private int currentlyDecoding;
+
+ // Cached buffer descriptor to save on allocations
+ // Only safe to use in decode thread!!!!
+ private AvByteBufferDescriptor cachedDesc;
+
+ // Sequencing state
+ private short lastSequenceNumber;
+
+ private ConnectionStatusListener controlListener;
+
+ private static final int DU_LIMIT = 15;
+ private LinkedBlockingQueue decodedUnits = new LinkedBlockingQueue(DU_LIMIT);
+
+ public AvVideoDepacketizer(ConnectionStatusListener controlListener)
+ {
+ this.controlListener = controlListener;
+ this.cachedDesc = new AvByteBufferDescriptor(null, 0, 0);
+ }
+
+ private void clearAvcNalState()
+ {
+ avcNalDataChain = null;
+ avcNalDataLength = 0;
+ }
+
+ private void reassembleAvcNal()
+ {
+ // This is the start of a new NAL
+ if (avcNalDataChain != null && avcNalDataLength != 0)
+ {
+ int flags = 0;
+
+ // Check if this is a special NAL unit
+ AvByteBufferDescriptor header = avcNalDataChain.getFirst();
+
+ if (NAL.getSpecialSequenceDescriptor(header, cachedDesc))
+ {
+ // The next byte after the special sequence is the NAL header
+ byte nalHeader = cachedDesc.data[cachedDesc.offset+cachedDesc.length];
+
+ switch (nalHeader)
+ {
+ // SPS and PPS
+ case 0x67:
+ case 0x68:
+ System.out.println("Codec config");
+ //flags |= MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
+ break;
+
+ // IDR
+ case 0x65:
+ System.out.println("Reference frame");
+ //flags |= MediaCodec.BUFFER_FLAG_SYNC_FRAME;
+ break;
+
+ // non-IDR frame
+ case 0x61:
+ break;
+
+ // Unknown type
+ default:
+ System.out.printf("Unknown NAL header: %02x %02x %02x %02x %02x\n",
+ header.data[header.offset], header.data[header.offset+1],
+ header.data[header.offset+2], header.data[header.offset+3],
+ header.data[header.offset+4]);
+ break;
+ }
+ }
+ else
+ {
+ System.out.printf("Invalid NAL: %02x %02x %02x %02x %02x\n",
+ header.data[header.offset], header.data[header.offset+1],
+ header.data[header.offset+2], header.data[header.offset+3],
+ header.data[header.offset+4]);
+ }
+
+ // Construct the H264 decode unit
+ AvDecodeUnit du = new AvDecodeUnit(AvDecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, flags);
+ if (!decodedUnits.offer(du)) {
+ // We need a new IDR frame since we're discarding data now
+ decodedUnits.clear();
+ controlListener.connectionNeedsResync();
+ }
+
+ // Clear old state
+ avcNalDataChain = null;
+ avcNalDataLength = 0;
+ }
+ }
+
+ public void addInputData(AvVideoPacket packet)
+ {
+ AvByteBufferDescriptor location = packet.getNewPayloadDescriptor();
+
+ while (location.length != 0)
+ {
+ // Remember the start of the NAL data in this packet
+ int start = location.offset;
+
+ // Check for a special sequence
+ if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
+ {
+ if (NAL.isAvcStartSequence(cachedDesc))
+ {
+ // We're decoding H264 now
+ currentlyDecoding = AvDecodeUnit.TYPE_H264;
+
+ // Check if it's the end of the last frame
+ if (NAL.isAvcFrameStart(cachedDesc))
+ {
+ // Reassemble any pending AVC NAL
+ reassembleAvcNal();
+
+ // Setup state for the new NAL
+ avcNalDataChain = new LinkedList();
+ avcNalDataLength = 0;
+ }
+
+ // Skip the start sequence
+ location.length -= cachedDesc.length;
+ location.offset += cachedDesc.length;
+ }
+ else
+ {
+ // Check if this is padding after a full AVC frame
+ if (currentlyDecoding == AvDecodeUnit.TYPE_H264 &&
+ NAL.isPadding(cachedDesc)) {
+ // The decode unit is complete
+ reassembleAvcNal();
+ }
+
+ // Not decoding AVC
+ currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
+
+ // Just skip this byte
+ location.length--;
+ location.offset++;
+ }
+ }
+
+ // Move to the next special sequence
+ while (location.length != 0)
+ {
+ // Catch the easy case first where byte 0 != 0x00
+ if (location.data[location.offset] == 0x00)
+ {
+ // Check if this should end the current NAL
+ if (NAL.getSpecialSequenceDescriptor(location, cachedDesc))
+ {
+ // Only stop if we're decoding something or this
+ // isn't padding
+ if (currentlyDecoding != AvDecodeUnit.TYPE_UNKNOWN ||
+ !NAL.isPadding(cachedDesc))
+ {
+ break;
+ }
+ }
+ }
+
+ // This byte is part of the NAL data
+ location.offset++;
+ location.length--;
+ }
+
+ if (currentlyDecoding == AvDecodeUnit.TYPE_H264 && avcNalDataChain != null)
+ {
+ AvByteBufferDescriptor data = new AvByteBufferDescriptor(location.data, start, location.offset-start);
+
+ // Add a buffer descriptor describing the NAL data in this packet
+ avcNalDataChain.add(data);
+ avcNalDataLength += location.offset-start;
+ }
+ }
+ }
+
+ public void addInputData(AvRtpPacket packet)
+ {
+ short seq = packet.getSequenceNumber();
+
+ // Toss out the current NAL if we receive a packet that is
+ // out of sequence
+ if (lastSequenceNumber != 0 &&
+ (short)(lastSequenceNumber + 1) != seq)
+ {
+ System.out.println("Received OOS video data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
+
+ // Reset the depacketizer state
+ currentlyDecoding = AvDecodeUnit.TYPE_UNKNOWN;
+ clearAvcNalState();
+
+ // Request an IDR frame
+ controlListener.connectionNeedsResync();
+ }
+
+ lastSequenceNumber = seq;
+
+ // Pass the payload to the non-sequencing parser
+ AvByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
+ addInputData(new AvVideoPacket(rtpPayload));
+ }
+
+ public AvDecodeUnit getNextDecodeUnit() throws InterruptedException
+ {
+ return decodedUnits.take();
+ }
+}
+
+class NAL {
+
+ // This assumes that the buffer passed in is already a special sequence
+ public static boolean isAvcStartSequence(AvByteBufferDescriptor specialSeq)
+ {
+ // The start sequence is 00 00 01 or 00 00 00 01
+ return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
+ }
+
+ // This assumes that the buffer passed in is already a special sequence
+ public static boolean isPadding(AvByteBufferDescriptor specialSeq)
+ {
+ // The padding sequence is 00 00 00
+ return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x00);
+ }
+
+ // This assumes that the buffer passed in is already a special sequence
+ public static boolean isAvcFrameStart(AvByteBufferDescriptor specialSeq)
+ {
+ if (specialSeq.length != 4)
+ return false;
+
+ // The frame start sequence is 00 00 00 01
+ return (specialSeq.data[specialSeq.offset+specialSeq.length-1] == 0x01);
+ }
+
+ // Returns a buffer descriptor describing the start sequence
+ public static boolean getSpecialSequenceDescriptor(AvByteBufferDescriptor buffer, AvByteBufferDescriptor outputDesc)
+ {
+ // NAL start sequence is 00 00 00 01 or 00 00 01
+ if (buffer.length < 3)
+ return false;
+
+ // 00 00 is magic
+ if (buffer.data[buffer.offset] == 0x00 &&
+ buffer.data[buffer.offset+1] == 0x00)
+ {
+ // Another 00 could be the end of the special sequence
+ // 00 00 00 or the middle of 00 00 00 01
+ if (buffer.data[buffer.offset+2] == 0x00)
+ {
+ if (buffer.length >= 4 &&
+ buffer.data[buffer.offset+3] == 0x01)
+ {
+ // It's the AVC start sequence 00 00 00 01
+ outputDesc.reinitialize(buffer.data, buffer.offset, 4);
+ }
+ else
+ {
+ // It's 00 00 00
+ outputDesc.reinitialize(buffer.data, buffer.offset, 3);
+ }
+ return true;
+ }
+ else if (buffer.data[buffer.offset+2] == 0x01 ||
+ buffer.data[buffer.offset+2] == 0x02)
+ {
+ // These are easy: 00 00 01 or 00 00 02
+ outputDesc.reinitialize(buffer.data, buffer.offset, 3);
+ return true;
+ }
+ else if (buffer.data[buffer.offset+2] == 0x03)
+ {
+ // 00 00 03 is special because it's a subsequence of the
+ // NAL wrapping substitute for 00 00 00, 00 00 01, 00 00 02,
+ // or 00 00 03 in the RBSP sequence. We need to check the next
+ // byte to see whether it's 00, 01, 02, or 03 (a valid RBSP substitution)
+ // or whether it's something else
+
+ if (buffer.length < 4)
+ return false;
+
+ if (buffer.data[buffer.offset+3] >= 0x00 &&
+ buffer.data[buffer.offset+3] <= 0x03)
+ {
+ // It's not really a special sequence after all
+ return false;
+ }
+ else
+ {
+ // It's not a standard replacement so it's a special sequence
+ outputDesc.reinitialize(buffer.data, buffer.offset, 3);
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoPacket.java b/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoPacket.java
new file mode 100644
index 0000000..d2bd844
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/video/AvVideoPacket.java
@@ -0,0 +1,17 @@
+package com.limelight.nvstream.av.video;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+
+public class AvVideoPacket {
+ private AvByteBufferDescriptor buffer;
+
+ public AvVideoPacket(AvByteBufferDescriptor rtpPayload)
+ {
+ buffer = new AvByteBufferDescriptor(rtpPayload);
+ }
+
+ public AvByteBufferDescriptor getNewPayloadDescriptor()
+ {
+ return new AvByteBufferDescriptor(buffer.data, buffer.offset+56, buffer.length-56);
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/video/AvcDecoder.java b/limelight-pc/src/com/limelight/nvstream/av/video/AvcDecoder.java
new file mode 100644
index 0000000..249f4ac
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/video/AvcDecoder.java
@@ -0,0 +1,33 @@
+package com.limelight.nvstream.av.video;
+
+public class AvcDecoder {
+ static {
+ // FFMPEG dependencies
+ System.loadLibrary("avutil-52");
+ System.loadLibrary("swresample-0");
+ System.loadLibrary("swscale-2");
+ System.loadLibrary("avcodec-55");
+ System.loadLibrary("avformat-55");
+ System.loadLibrary("avfilter-3");
+
+ System.loadLibrary("nv_avc_dec");
+ }
+
+ /** Disables the deblocking filter at the cost of image quality */
+ public static final int DISABLE_LOOP_FILTER = 0x1;
+ /** Uses the low latency decode flag (disables multithreading) */
+ public static final int LOW_LATENCY_DECODE = 0x2;
+ /** Threads process each slice, rather than each frame */
+ public static final int SLICE_THREADING = 0x4;
+ /** Uses nonstandard speedup tricks */
+ public static final int FAST_DECODE = 0x8;
+ /** Uses bilinear filtering instead of bicubic */
+ public static final int BILINEAR_FILTERING = 0x10;
+ /** Uses a faster bilinear filtering with lower image quality */
+ public static final int FAST_BILINEAR_FILTERING = 0x20;
+
+ public static native int init(int width, int height, int perflvl, int threadcount);
+ public static native void destroy();
+ //public static native void redraw(Surface surface);
+ public static native int decode(byte[] indata, int inoff, int inlen);
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/video/CpuDecoderRenderer.java b/limelight-pc/src/com/limelight/nvstream/av/video/CpuDecoderRenderer.java
new file mode 100644
index 0000000..dea3910
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/video/CpuDecoderRenderer.java
@@ -0,0 +1,203 @@
+package com.limelight.nvstream.av.video;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import com.limelight.nvstream.av.AvByteBufferDescriptor;
+import com.limelight.nvstream.av.AvDecodeUnit;
+
+public class CpuDecoderRenderer/* implements DecoderRenderer */{
+
+ private ByteBuffer decoderBuffer;
+ private Thread rendererThread;
+ private int targetFps;
+
+ // Only sleep if the difference is above this value
+ private static final int WAIT_CEILING_MS = 8;
+
+ private static final int LOW_PERF = 1;
+ private static final int MED_PERF = 2;
+ private static final int HIGH_PERF = 3;
+
+ private int cpuCount = Runtime.getRuntime().availableProcessors();
+
+ private int findOptimalPerformanceLevel() {
+ StringBuilder cpuInfo = new StringBuilder();
+ BufferedReader br = null;
+ try {
+ br = new BufferedReader(new FileReader(new File("/proc/cpuinfo")));
+ for (;;) {
+ int ch = br.read();
+ if (ch == -1)
+ break;
+ cpuInfo.append((char)ch);
+ }
+
+ // Here we're doing very simple heuristics based on CPU model
+ String cpuInfoStr = cpuInfo.toString();
+
+ // We order them from greatest to least for proper detection
+ // of devices with multiple sets of cores (like Exynos 5 Octa)
+ // TODO Make this better
+ if (cpuInfoStr.contains("0xc0f")) {
+ // Cortex-A15
+ return MED_PERF;
+ }
+ else if (cpuInfoStr.contains("0xc09")) {
+ // Cortex-A9
+ return LOW_PERF;
+ }
+ else if (cpuInfoStr.contains("0xc07")) {
+ // Cortex-A7
+ return LOW_PERF;
+ }
+ else {
+ // Didn't have anything we're looking for
+ return MED_PERF;
+ }
+ } catch (IOException e) {
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {}
+ }
+ }
+
+ // Couldn't read cpuinfo, so assume medium
+ return MED_PERF;
+ }
+
+ /*@Override
+ public void setup(int width, int height, SurfaceHolder renderTarget, int drFlags) {
+ this.renderTarget = renderTarget.getSurface();
+ this.targetFps = 30;
+
+ int perfLevel = findOptimalPerformanceLevel();
+ int threadCount;
+
+ int avcFlags = 0;
+ switch (perfLevel) {
+ case HIGH_PERF:
+ // Single threaded low latency decode is ideal but hard to acheive
+ avcFlags = AvcDecoder.LOW_LATENCY_DECODE;
+ threadCount = 1;
+ break;
+
+ case LOW_PERF:
+ // Disable the loop filter for performance reasons
+ avcFlags = AvcDecoder.DISABLE_LOOP_FILTER |
+ AvcDecoder.FAST_BILINEAR_FILTERING |
+ AvcDecoder.FAST_DECODE;
+
+ // Use plenty of threads to try to utilize the CPU as best we can
+ threadCount = cpuCount - 1;
+ break;
+
+ default:
+ case MED_PERF:
+ avcFlags = AvcDecoder.BILINEAR_FILTERING |
+ AvcDecoder.FAST_DECODE;
+
+ // Only use 2 threads to minimize frame processing latency
+ threadCount = 2;
+ break;
+ }
+
+ // If the user wants quality, we'll remove the low IQ flags
+ if ((drFlags & DecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
+ // Make sure the loop filter is enabled
+ avcFlags &= ~AvcDecoder.DISABLE_LOOP_FILTER;
+
+ // Disable the non-compliant speed optimizations
+ avcFlags &= ~AvcDecoder.FAST_DECODE;
+
+ System.out.println("Using high quality decoding");
+ }
+
+ int err = AvcDecoder.init(width, height, avcFlags, threadCount);
+ if (err != 0) {
+ throw new IllegalStateException("AVC decoder initialization failure: "+err);
+ }
+
+ decoderBuffer = ByteBuffer.allocate(92*1024);
+
+ System.out.println("Using software decoding (performance level: "+perfLevel+")");
+ }
+*/
+ //@Override
+ public void start() {
+ rendererThread = new Thread() {
+ @Override
+ public void run() {
+ long nextFrameTime = System.currentTimeMillis();
+
+ while (!isInterrupted())
+ {
+ long diff = nextFrameTime - System.currentTimeMillis();
+
+ if (diff > WAIT_CEILING_MS) {
+ try {
+ Thread.sleep(diff);
+ } catch (InterruptedException e) {
+ return;
+ }
+ }
+
+ nextFrameTime = computePresentationTimeMs(targetFps);
+ // AvcDecoder.redraw(renderTarget);
+ }
+ }
+ };
+ rendererThread.setName("Video - Renderer (CPU)");
+ rendererThread.start();
+ }
+
+ private long computePresentationTimeMs(int frameRate) {
+ return System.currentTimeMillis() + (1000 / frameRate);
+ }
+
+ //@Override
+ public void stop() {
+ rendererThread.interrupt();
+
+ try {
+ rendererThread.join();
+ } catch (InterruptedException e) { }
+ }
+
+ //@Override
+ public void release() {
+ AvcDecoder.destroy();
+ }
+
+ //@Override
+ public boolean submitDecodeUnit(AvDecodeUnit decodeUnit) {
+ byte[] data;
+
+ // Use the reserved decoder buffer if this decode unit will fit
+ if (decodeUnit.getDataLength() <= decoderBuffer.limit()) {
+ decoderBuffer.clear();
+
+ for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
+ decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
+ }
+
+ data = decoderBuffer.array();
+ }
+ else {
+ data = new byte[decodeUnit.getDataLength()];
+
+ int offset = 0;
+ for (AvByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
+ System.arraycopy(bbd.data, bbd.offset, data, offset, bbd.length);
+ offset += bbd.length;
+ }
+ }
+
+ return (AvcDecoder.decode(data, 0, decodeUnit.getDataLength()) == 0);
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/av/video/DecoderRenderer.java b/limelight-pc/src/com/limelight/nvstream/av/video/DecoderRenderer.java
new file mode 100644
index 0000000..43555f8
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/av/video/DecoderRenderer.java
@@ -0,0 +1,17 @@
+package com.limelight.nvstream.av.video;
+
+import com.limelight.nvstream.av.AvDecodeUnit;
+
+public interface DecoderRenderer {
+ public static int FLAG_PREFER_QUALITY = 0x1;
+
+ public void setup(int width, int height, int drFlags);
+
+ public void start();
+
+ public void stop();
+
+ public void release();
+
+ public boolean submitDecodeUnit(AvDecodeUnit decodeUnit);
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/input/NvController.java b/limelight-pc/src/com/limelight/nvstream/input/NvController.java
new file mode 100644
index 0000000..b1b273e
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/input/NvController.java
@@ -0,0 +1,65 @@
+package com.limelight.nvstream.input;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+
+public class NvController {
+
+ public final static int PORT = 35043;
+
+ public final static int CONTROLLER_TIMEOUT = 3000;
+
+ private InetAddress host;
+ private Socket s;
+ private OutputStream out;
+
+ public NvController(InetAddress host)
+ {
+ this.host = host;
+ }
+
+ public void initialize() throws IOException
+ {
+ s = new Socket();
+ s.connect(new InetSocketAddress(host, PORT), CONTROLLER_TIMEOUT);
+ s.setTcpNoDelay(true);
+ out = s.getOutputStream();
+ }
+
+ public void close()
+ {
+ try {
+ s.close();
+ } catch (IOException e) {}
+ }
+
+ public void sendControllerInput(short buttonFlags, byte leftTrigger, byte rightTrigger,
+ short leftStickX, short leftStickY, short rightStickX, short rightStickY) throws IOException
+ {
+ out.write(new NvControllerPacket(buttonFlags, leftTrigger,
+ rightTrigger, leftStickX, leftStickY,
+ rightStickX, rightStickY).toWire());
+ out.flush();
+ }
+
+ public void sendMouseButtonDown() throws IOException
+ {
+ out.write(new NvMouseButtonPacket(true).toWire());
+ out.flush();
+ }
+
+ public void sendMouseButtonUp() throws IOException
+ {
+ out.write(new NvMouseButtonPacket(false).toWire());
+ out.flush();
+ }
+
+ public void sendMouseMove(short deltaX, short deltaY) throws IOException
+ {
+ out.write(new NvMouseMovePacket(deltaX, deltaY).toWire());
+ out.flush();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/input/NvControllerPacket.java b/limelight-pc/src/com/limelight/nvstream/input/NvControllerPacket.java
new file mode 100644
index 0000000..06ab006
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/input/NvControllerPacket.java
@@ -0,0 +1,89 @@
+package com.limelight.nvstream.input;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class NvControllerPacket extends NvInputPacket {
+ public static final byte[] HEADER =
+ {
+ 0x0A,
+ 0x00,
+ 0x00,
+ 0x00,
+ 0x00,
+ 0x14
+ };
+
+ public static final byte[] TAIL =
+ {
+ (byte)0x9C,
+ 0x00,
+ 0x00,
+ 0x00,
+ 0x55,
+ 0x00
+ };
+
+ public static final int PACKET_TYPE = 0x18;
+
+ public static final short A_FLAG = 0x1000;
+ public static final short B_FLAG = 0x2000;
+ public static final short X_FLAG = 0x4000;
+ public static final short Y_FLAG = (short)0x8000;
+ public static final short UP_FLAG = 0x0001;
+ public static final short DOWN_FLAG = 0x0002;
+ public static final short LEFT_FLAG = 0x0004;
+ public static final short RIGHT_FLAG = 0x0008;
+ public static final short LB_FLAG = 0x0100;
+ public static final short RB_FLAG = 0x0200;
+ public static final short PLAY_FLAG = 0x0010;
+ public static final short BACK_FLAG = 0x0020;
+ public static final short LS_CLK_FLAG = 0x0040;
+ public static final short RS_CLK_FLAG = 0x0080;
+ public static final short SPECIAL_BUTTON_FLAG = 0x0400;
+
+ public static final short PAYLOAD_LENGTH = 24;
+ public static final short PACKET_LENGTH = PAYLOAD_LENGTH +
+ NvInputPacket.HEADER_LENGTH;
+
+ private short buttonFlags;
+ private byte leftTrigger;
+ private byte rightTrigger;
+ private short leftStickX;
+ private short leftStickY;
+ private short rightStickX;
+ private short rightStickY;
+
+ public NvControllerPacket(short buttonFlags, byte leftTrigger, byte rightTrigger,
+ short leftStickX, short leftStickY,
+ short rightStickX, short rightStickY)
+ {
+ super(PACKET_TYPE);
+
+ this.buttonFlags = buttonFlags;
+ this.leftTrigger = leftTrigger;
+ this.rightTrigger = rightTrigger;
+ this.leftStickX = leftStickX;
+ this.leftStickY = leftStickY;
+ this.rightStickX = rightStickX;
+ this.rightStickY = rightStickY;
+ }
+
+ public byte[] toWire()
+ {
+ ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH).order(ByteOrder.LITTLE_ENDIAN);
+
+ bb.put(toWireHeader());
+ bb.put(HEADER);
+ bb.putShort(buttonFlags);
+ bb.put(leftTrigger);
+ bb.put(rightTrigger);
+ bb.putShort(leftStickX);
+ bb.putShort(leftStickY);
+ bb.putShort(rightStickX);
+ bb.putShort(rightStickY);
+ bb.put(TAIL);
+
+ return bb.array();
+ }
+ }
\ No newline at end of file
diff --git a/limelight-pc/src/com/limelight/nvstream/input/NvInputPacket.java b/limelight-pc/src/com/limelight/nvstream/input/NvInputPacket.java
new file mode 100644
index 0000000..ec98b2d
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/input/NvInputPacket.java
@@ -0,0 +1,26 @@
+package com.limelight.nvstream.input;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public abstract class NvInputPacket {
+ public static final int HEADER_LENGTH = 0x4;
+
+ protected int packetType;
+
+ public NvInputPacket(int packetType)
+ {
+ this.packetType = packetType;
+ }
+
+ public abstract byte[] toWire();
+
+ public byte[] toWireHeader()
+ {
+ ByteBuffer bb = ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN);
+
+ bb.putInt(packetType);
+
+ return bb.array();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/input/NvMouseButtonPacket.java b/limelight-pc/src/com/limelight/nvstream/input/NvMouseButtonPacket.java
new file mode 100644
index 0000000..8cb87ac
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/input/NvMouseButtonPacket.java
@@ -0,0 +1,36 @@
+package com.limelight.nvstream.input;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class NvMouseButtonPacket extends NvInputPacket {
+
+ private byte buttonEventType;
+
+ public static final int PACKET_TYPE = 0x5;
+ public static final int PAYLOAD_LENGTH = 5;
+ public static final int PACKET_LENGTH = PAYLOAD_LENGTH +
+ NvInputPacket.HEADER_LENGTH;
+
+ public static final byte PRESS_EVENT = 0x07;
+ public static final byte RELEASE_EVENT = 0x08;
+
+ public NvMouseButtonPacket(boolean leftButtonDown)
+ {
+ super(PACKET_TYPE);
+
+ buttonEventType = leftButtonDown ?
+ PRESS_EVENT : RELEASE_EVENT;
+ }
+
+ @Override
+ public byte[] toWire() {
+ ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH).order(ByteOrder.BIG_ENDIAN);
+
+ bb.put(toWireHeader());
+ bb.put(buttonEventType);
+ bb.putInt(1); // FIXME: button index?
+
+ return bb.array();
+ }
+}
diff --git a/limelight-pc/src/com/limelight/nvstream/input/NvMouseMovePacket.java b/limelight-pc/src/com/limelight/nvstream/input/NvMouseMovePacket.java
new file mode 100644
index 0000000..edafa9c
--- /dev/null
+++ b/limelight-pc/src/com/limelight/nvstream/input/NvMouseMovePacket.java
@@ -0,0 +1,42 @@
+package com.limelight.nvstream.input;
+
+import java.nio.ByteBuffer;
+
+public class NvMouseMovePacket extends NvInputPacket {
+
+ private static final byte[] HEADER =
+ {
+ 0x06,
+ 0x00,
+ 0x00,
+ 0x00
+ };
+
+ public static final int PACKET_TYPE = 0x8;
+ public static final int PAYLOAD_LENGTH = 8;
+ public static final int PACKET_LENGTH = PAYLOAD_LENGTH +
+ NvInputPacket.HEADER_LENGTH;
+
+ private short deltaX;
+ private short deltaY;
+
+ public NvMouseMovePacket(short deltaX, short deltaY)
+ {
+ super(PACKET_TYPE);
+
+ this.deltaX = deltaX;
+ this.deltaY = deltaY;
+ }
+
+ @Override
+ public byte[] toWire() {
+ ByteBuffer bb = ByteBuffer.allocate(PACKET_LENGTH);
+
+ bb.put(toWireHeader());
+ bb.put(HEADER);
+ bb.putShort(deltaX);
+ bb.putShort(deltaY);
+
+ return bb.array();
+ }
+}