Faruq Rasid 2015-12-31 10:14:30 +08:00
parent 1d6b5a35bd
commit f8ab7b8e13
450 changed files with 6859 additions and 101095 deletions

View File

@ -1,14 +1,14 @@
#Limelight
#Moonlight
Limelight is an open source implementation of NVIDIA's GameStream, as used by the NVIDIA Shield.
[Moonlight](http://moonlight-stream.com) is an open source implementation of NVIDIA's GameStream, as used by the NVIDIA Shield.
We reverse engineered the Shield streaming software and created a version that can be run on any Android device.
Limelight will allow you to stream your full collection of games from your Windows PC to your Android device,
Moonlight will allow you to stream your full collection of games from your Windows PC to your Android device,
whether in your own home or over the internet.
[Limelight-pc](https://github.com/limelight-stream/limelight-pc) is also currently in development for Windows, OS X and Linux. Versions for [iOS](https://github.com/limelight-stream/limelight-ios) and [Windows and Windows Phone](https://github.com/limelight-stream/limelight-windows) are also in development.
[Moonlight-pc](https://github.com/moonlight-stream/moonlight-pc) is also currently in development for Windows, OS X and Linux. Versions for [iOS](https://github.com/moonlight-stream/moonlight-ios) and [Windows and Windows Phone](https://github.com/moonlight-stream/moonlight-windows) are also in development.
Check our [wiki](https://github.com/limelight-stream/limelight-android/wiki) for more detailed information or a troubleshooting guide.
Check our [wiki](https://github.com/moonlight-stream/moonlight-android/wiki) for more detailed information or a troubleshooting guide.
##Features
@ -18,8 +18,8 @@ Check our [wiki](https://github.com/limelight-stream/limelight-android/wiki) for
##Installation
* Download and install Limelight for Android from
[Google Play](https://play.google.com/store/apps/details?id=com.limelight)
* Download and install Moonlight for Android from
[Google Play](https://play.google.com/store/apps/details?id=com.limelight), [Amazon App Store](http://www.amazon.com/gp/product/B00JK4MFN2), or directly from the [releases page](https://github.com/moonlight-stream/moonlight-android/releases)
* Download [GeForce Experience](http://www.geforce.com/geforce-experience) and install on your Windows PC
##Requirements
@ -33,7 +33,7 @@ Check our [wiki](https://github.com/limelight-stream/limelight-android/wiki) for
* Turn on GameStream in the GFE settings
* If you are connecting from outside the same network, turn on internet
streaming
* When on the same network as your PC, open Limelight and tap on your PC in the list
* When on the same network as your PC, open Moonlight and tap on your PC in the list
* Accept the pairing confirmation on your PC
* Tap your PC again to view the list of apps to stream
* Play games!
@ -46,8 +46,6 @@ This project is being actively developed at [XDA Developers](http://forum.xda-de
2. Write code
3. Send Pull Requests
Check out our [website](http://limelight-stream.com) for project links and information.
##Authors
* [Cameron Gutman](https://github.com/cgutman)
@ -55,5 +53,5 @@ Check out our [website](http://limelight-stream.com) for project links and infor
* [Aaron Neyer](https://github.com/Aaronneyer)
* [Andrew Hennessy](https://github.com/yetanothername)
Limelight is the work of students at [Case Western](http://case.edu) and was
Moonlight is the work of students at [Case Western](http://case.edu) and was
started as a project at [MHacks](http://mhacks.org).

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$/.." external.system.id="GRADLE" type="JAVA_MODULE" version="4">
<module external.linked.project.id=":app" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$/.." external.system.id="GRADLE" external.system.module.group="limelight-android" external.system.module.version="unspecified" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="android-gradle" name="Android-Gradle">
<configuration>
@ -9,11 +9,15 @@
<facet type="android" name="Android">
<configuration>
<option name="SELECTED_BUILD_VARIANT" value="nonRootDebug" />
<option name="SELECTED_TEST_ARTIFACT" value="_android_test_" />
<option name="ASSEMBLE_TASK_NAME" value="assembleNonRootDebug" />
<option name="COMPILE_JAVA_TASK_NAME" value="compileNonRootDebugSources" />
<option name="ASSEMBLE_TEST_TASK_NAME" value="assembleNonRootDebugTest" />
<option name="SOURCE_GEN_TASK_NAME" value="generateNonRootDebugSources" />
<option name="TEST_SOURCE_GEN_TASK_NAME" value="generateNonRootDebugTestSources" />
<option name="ASSEMBLE_TEST_TASK_NAME" value="assembleNonRootDebugAndroidTest" />
<option name="COMPILE_JAVA_TEST_TASK_NAME" value="compileNonRootDebugAndroidTestSources" />
<afterSyncTasks>
<task>generateNonRootDebugAndroidTestSources</task>
<task>generateNonRootDebugSources</task>
</afterSyncTasks>
<option name="ALLOW_USER_CONFIGURATION" value="false" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/src/main/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/src/main/res" />
@ -22,8 +26,9 @@
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="false">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/build/intermediates/classes/nonRoot/debug" />
<output-test url="file://$MODULE_DIR$/build/intermediates/classes/androidTest/nonRoot/debug" />
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/r/nonRoot/debug" isTestSource="false" generated="true" />
@ -31,7 +36,7 @@
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/nonRoot/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/nonRoot/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/nonRoot/debug" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/generated/nonRoot/debug" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/nonRoot/debug" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/assets" type="java-resource" />
@ -39,12 +44,12 @@
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/jni" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRootDebug/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/r/test/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/test/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/test/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/test/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/test/nonRoot/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/generated/test/nonRoot/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/r/androidTest/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/androidTest/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/androidTest/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/androidTest/nonRoot/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/androidTest/nonRoot/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/androidTest/nonRoot/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRoot/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRoot/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/nonRoot/assets" type="java-resource" />
@ -101,20 +106,15 @@
<excludeFolder url="file://$MODULE_DIR$/build/outputs" />
<excludeFolder url="file://$MODULE_DIR$/build/tmp" />
</content>
<orderEntry type="jdk" jdkName="Android API 21 Platform" jdkType="Android SDK" />
<orderEntry type="jdk" jdkName="Android API 23 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" exported="" name="bcprov-jdk15on-1.51" level="project" />
<orderEntry type="library" exported="" name="androidasync-2.0.5" level="project" />
<orderEntry type="library" exported="" name="gson-2.3.1" level="project" />
<orderEntry type="library" exported="" name="support-v4-r7" level="project" />
<orderEntry type="library" exported="" name="jmdns-fixed" level="project" />
<orderEntry type="library" exported="" name="bcpkix-jdk15on-1.51" level="project" />
<orderEntry type="library" exported="" name="okhttp-2.4.0" level="project" />
<orderEntry type="library" exported="" name="bcprov-jdk15on-1.52" level="project" />
<orderEntry type="library" exported="" name="jmdns-3.4.2" level="project" />
<orderEntry type="library" exported="" name="okio-1.5.0" level="project" />
<orderEntry type="library" exported="" name="jcodec-0.1.9-patched" level="project" />
<orderEntry type="library" exported="" name="tinyrtsp" level="project" />
<orderEntry type="library" exported="" name="limelight-common" level="project" />
<orderEntry type="library" exported="" name="ion-2.0.5" level="project" />
<orderEntry type="library" exported="" name="okhttp-2.2.0" level="project" />
<orderEntry type="library" exported="" name="jcodec-0.1.9" level="project" />
<orderEntry type="library" exported="" name="okio-1.2.0" level="project" />
<orderEntry type="library" exported="" name="bcpkix-jdk15on-1.52" level="project" />
</component>
</module>

View File

@ -4,15 +4,15 @@ import org.apache.tools.ant.taskdefs.condition.Os
apply plugin: 'com.android.application'
android {
compileSdkVersion 21
buildToolsVersion "21.1.2"
compileSdkVersion 23
buildToolsVersion "23.0.2"
defaultConfig {
minSdkVersion 16
targetSdkVersion 21
targetSdkVersion 23
versionName "3.1-beta2"
versionCode = 51
versionName "4.0.2"
versionCode = 79
}
productFlavors {
@ -62,19 +62,14 @@ android {
}
dependencies {
compile group: 'org.jcodec', name: 'jcodec', version: '+'
compile group: 'org.bouncycastle', name: 'bcprov-jdk15on', version: '1.52'
compile group: 'org.bouncycastle', name: 'bcpkix-jdk15on', version: '1.52'
compile group: 'org.bouncycastle', name: 'bcprov-jdk15on', version: '+'
compile group: 'org.bouncycastle', name: 'bcpkix-jdk15on', version: '+'
compile group: 'com.squareup.okhttp', name: 'okhttp', version:'2.4.0'
compile group: 'com.squareup.okio', name:'okio', version:'1.5.0'
compile group: 'com.google.android', name: 'support-v4', version:'+'
compile group: 'com.koushikdutta.ion', name: 'ion', version:'+'
compile group: 'com.google.code.gson', name: 'gson', version:'+'
compile group: 'com.squareup.okhttp', name: 'okhttp', version:'+'
compile group: 'com.squareup.okio', name:'okio', version:'+'
compile files('libs/jmdns-fixed.jar')
compile files('libs/jmdns-3.4.2.jar')
compile files('libs/limelight-common.jar')
compile files('libs/tinyrtsp.jar')
compile files('libs/jcodec-0.1.9-patched.jar')
}

Binary file not shown.

BIN
app/libs/jmdns-3.4.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -11,12 +11,17 @@
<uses-feature android:name="android.hardware.touchscreen" android:required="false" />
<uses-feature android:name="android.hardware.wifi" android:required="false" />
<uses-feature android:name="android.hardware.gamepad" android:required="false" />
<uses-feature android:name="android.hardware.usb.host" android:required="false" />
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:theme="@style/AppTheme" >
<!-- Samsung multi-window support -->
<uses-library android:name="com.sec.android.app.multiwindow" android:required="false" />
<meta-data android:name="com.sec.android.support.multiwindow" android:value="true" />
<!-- Launcher for traditional devices -->
<activity
android:name=".PcView"
@ -24,6 +29,7 @@
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<category android:name="android.intent.category.MULTIWINDOW_LAUNCHER" />
<category android:name="tv.ouya.intent.category.APP" />
</intent-filter>
</activity>
@ -88,6 +94,9 @@
<service
android:name=".computers.ComputerManagerService"
android:label="Computer Management Service" />
<service
android:name=".binding.input.driver.UsbDriverService"
android:label="Usb Driver Service" />
</application>
</manifest>

View File

@ -1,25 +1,14 @@
package com.limelight;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
import java.util.Locale;
import java.util.UUID;
import org.xmlpull.v1.XmlPullParserException;
import com.limelight.binding.PlatformBinding;
import com.limelight.binding.crypto.AndroidCryptoProvider;
import com.limelight.computers.ComputerManagerListener;
import com.limelight.computers.ComputerManagerService;
import com.limelight.grid.AppGridAdapter;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.GfeHttpResponseException;
import com.limelight.nvstream.http.NvApp;
import com.limelight.nvstream.http.NvHTTP;
import com.limelight.preferences.PreferenceConfiguration;
@ -27,14 +16,13 @@ import com.limelight.ui.AdapterFragment;
import com.limelight.ui.AdapterFragmentCallbacks;
import com.limelight.utils.CacheHelper;
import com.limelight.utils.Dialog;
import com.limelight.utils.ServerHelper;
import com.limelight.utils.SpinnerDialog;
import com.limelight.utils.UiHelper;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Service;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.res.Configuration;
@ -48,7 +36,6 @@ import android.view.ContextMenu.ContextMenuInfo;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.AdapterView.AdapterContextMenuInfo;
@ -61,9 +48,8 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
private ComputerManagerService.ApplistPoller poller;
private SpinnerDialog blockingLoadSpinner;
private String lastRawApplist;
private int consecutiveAppListFailures = 0;
private final static int CONSECUTIVE_FAILURE_LIMIT = 3;
private int lastRunningAppId;
private boolean suspendGridUpdates;
private final static int START_OR_RESUME_ID = 1;
private final static int QUIT_ID = 2;
@ -74,7 +60,7 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
public final static String UUID_EXTRA = "UUID";
private ComputerManagerService.ComputerManagerBinder managerBinder;
private ServiceConnection serviceConnection = new ServiceConnection() {
private final ServiceConnection serviceConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder binder) {
final ComputerManagerService.ComputerManagerBinder localBinder =
((ComputerManagerService.ComputerManagerBinder)binder);
@ -121,11 +107,6 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
}
};
private InetAddress getAddress() {
return computer.reachability == ComputerDetails.Reachability.LOCAL ?
computer.localIp : computer.remoteIp;
}
private void startComputerUpdates() {
if (managerBinder == null) {
return;
@ -134,15 +115,17 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
managerBinder.startPolling(new ComputerManagerListener() {
@Override
public void notifyComputerUpdated(ComputerDetails details) {
// Don't care about other computers
if (details != computer) {
// Do nothing if updates are suspended
if (suspendGridUpdates) {
return;
}
if (details.state != ComputerDetails.State.ONLINE) {
consecutiveAppListFailures++;
// Don't care about other computers
if (!details.uuid.toString().equalsIgnoreCase(uuidString)) {
return;
}
if (consecutiveAppListFailures >= CONSECUTIVE_FAILURE_LIMIT) {
if (details.state == ComputerDetails.State.OFFLINE) {
// The PC is unreachable now
AppView.this.runOnUiThread(new Runnable() {
@Override
@ -152,27 +135,34 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
finish();
}
});
}
return;
}
consecutiveAppListFailures = 0;
// App list is the same or empty; nothing to do
// App list is the same or empty
if (details.rawAppList == null || details.rawAppList.equals(lastRawApplist)) {
// Let's check if the running app ID changed
if (details.runningGameId != lastRunningAppId) {
// Update the currently running game using the app ID
lastRunningAppId = details.runningGameId;
updateUiWithServerinfo(details);
}
return;
}
lastRunningAppId = details.runningGameId;
lastRawApplist = details.rawAppList;
try {
lastRawApplist = details.rawAppList;
updateUiWithAppList(NvHTTP.getAppListByReader(new StringReader(details.rawAppList)));
if (blockingLoadSpinner != null) {
blockingLoadSpinner.dismiss();
blockingLoadSpinner = null;
}
} catch (Exception e) {}
} catch (Exception ignored) {}
}
});
@ -190,6 +180,10 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
if (managerBinder != null) {
managerBinder.stopPolling();
}
if (appGridAdapter != null) {
appGridAdapter.cancelQueuedOperations();
}
}
@Override
@ -272,10 +266,6 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
int runningAppId = -1;
for (int i = 0; i < appGridAdapter.getCount(); i++) {
AppObject app = (AppObject) appGridAdapter.getItem(i);
if (app.app == null) {
continue;
}
if (app.app.getIsRunning()) {
runningAppId = app.app.getAppId();
break;
@ -290,10 +280,6 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
AdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo;
AppObject selectedApp = (AppObject) appGridAdapter.getItem(info.position);
if (selectedApp == null || selectedApp.app == null) {
return;
}
int runningAppId = getRunningAppId();
if (runningAppId != -1) {
if (runningAppId == selectedApp.app.getAppId()) {
@ -311,33 +297,6 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
public void onContextMenuClosed(Menu menu) {
}
private void displayQuitConfirmationDialog(final Runnable onYes, final Runnable onNo) {
DialogInterface.OnClickListener dialogClickListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which){
case DialogInterface.BUTTON_POSITIVE:
if (onYes != null) {
onYes.run();
}
break;
case DialogInterface.BUTTON_NEGATIVE:
if (onNo != null) {
onNo.run();
}
break;
}
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(getResources().getString(R.string.applist_quit_confirmation))
.setPositiveButton(getResources().getString(R.string.yes), dialogClickListener)
.setNegativeButton(getResources().getString(R.string.no), dialogClickListener)
.show();
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo();
@ -345,25 +304,37 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
switch (item.getItemId()) {
case START_WTIH_QUIT:
// Display a confirmation dialog first
displayQuitConfirmationDialog(new Runnable() {
UiHelper.displayQuitConfirmationDialog(this, new Runnable() {
@Override
public void run() {
doStart(app.app);
ServerHelper.doStart(AppView.this, app.app, computer, managerBinder);
}
}, null);
return true;
case START_OR_RESUME_ID:
// Resume is the same as start for us
doStart(app.app);
ServerHelper.doStart(AppView.this, app.app, computer, managerBinder);
return true;
case QUIT_ID:
// Display a confirmation dialog first
displayQuitConfirmationDialog(new Runnable() {
UiHelper.displayQuitConfirmationDialog(this, new Runnable() {
@Override
public void run() {
doQuit(app.app);
suspendGridUpdates = true;
ServerHelper.doQuit(AppView.this,
ServerHelper.getCurrentAddressFromComputer(computer),
app.app, managerBinder, new Runnable() {
@Override
public void run() {
// Trigger a poll immediately
suspendGridUpdates = false;
if (poller != null) {
poller.pollNow();
}
}
});
}
}, null);
return true;
@ -376,26 +347,61 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
}
}
private void updateUiWithServerinfo(final ComputerDetails details) {
AppView.this.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean updated = false;
// Look through our current app list to tag the running app
for (int i = 0; i < appGridAdapter.getCount(); i++) {
AppObject existingApp = (AppObject) appGridAdapter.getItem(i);
// There can only be one or zero apps running.
if (existingApp.app.getIsRunning() &&
existingApp.app.getAppId() == details.runningGameId) {
// This app was running and still is, so we're done now
return;
}
else if (existingApp.app.getAppId() == details.runningGameId) {
// This app wasn't running but now is
existingApp.app.setIsRunning(true);
updated = true;
}
else if (existingApp.app.getIsRunning()) {
// This app was running but now isn't
existingApp.app.setIsRunning(false);
updated = true;
}
else {
// This app wasn't running and still isn't
}
}
if (updated) {
appGridAdapter.notifyDataSetChanged();
}
}
});
}
private void updateUiWithAppList(final List<NvApp> appList) {
AppView.this.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean updated = false;
// First handle app updates and additions
for (NvApp app : appList) {
boolean foundExistingApp = false;
// Try to update an existing app in the list first
for (int i = 0; i < appGridAdapter.getCount(); i++) {
AppObject existingApp = (AppObject) appGridAdapter.getItem(i);
if (existingApp.app == null) {
continue;
}
if (existingApp.app.getAppId() == app.getAppId()) {
// Found the app; update its properties
if (existingApp.app.getIsRunning() != app.getIsRunning()) {
existingApp.app.setIsRunningBoolean(app.getIsRunning());
existingApp.app.setIsRunning(app.getIsRunning());
updated = true;
}
if (!existingApp.app.getAppName().equals(app.getAppName())) {
@ -415,6 +421,34 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
}
}
// Next handle app removals
int i = 0;
while (i < appGridAdapter.getCount()) {
boolean foundExistingApp = false;
AppObject existingApp = (AppObject) appGridAdapter.getItem(i);
// Check if this app is in the latest list
for (NvApp app : appList) {
if (existingApp.app.getAppId() == app.getAppId()) {
foundExistingApp = true;
break;
}
}
// This app was removed in the latest app list
if (!foundExistingApp) {
appGridAdapter.removeApp(existingApp);
updated = true;
// Check this same index again because the item at i+1 is now at i after
// the removal
continue;
}
// Move on to the next item
i++;
}
if (updated) {
appGridAdapter.notifyDataSetChanged();
}
@ -422,58 +456,6 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
});
}
private void doStart(NvApp app) {
Intent intent = new Intent(this, Game.class);
intent.putExtra(Game.EXTRA_HOST,
computer.reachability == ComputerDetails.Reachability.LOCAL ?
computer.localIp.getHostAddress() : computer.remoteIp.getHostAddress());
intent.putExtra(Game.EXTRA_APP, app.getAppName());
intent.putExtra(Game.EXTRA_UNIQUEID, managerBinder.getUniqueId());
intent.putExtra(Game.EXTRA_STREAMING_REMOTE,
computer.reachability != ComputerDetails.Reachability.LOCAL);
startActivity(intent);
}
private void doQuit(final NvApp app) {
Toast.makeText(AppView.this, getResources().getString(R.string.applist_quit_app)+" "+app.getAppName()+"...", Toast.LENGTH_SHORT).show();
new Thread(new Runnable() {
@Override
public void run() {
NvHTTP httpConn;
String message;
try {
httpConn = new NvHTTP(getAddress(),
managerBinder.getUniqueId(), null, PlatformBinding.getCryptoProvider(AppView.this));
if (httpConn.quitApp()) {
message = getResources().getString(R.string.applist_quit_success)+" "+app.getAppName();
}
else {
message = getResources().getString(R.string.applist_quit_fail)+" "+app.getAppName();
}
} catch (UnknownHostException e) {
message = getResources().getString(R.string.error_unknown_host);
} catch (FileNotFoundException e) {
message = getResources().getString(R.string.error_404);
} catch (Exception e) {
message = e.getMessage();
} finally {
// Trigger a poll immediately
if (poller != null) {
poller.pollNow();
}
}
final String toastMessage = message;
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(AppView.this, toastMessage, Toast.LENGTH_LONG).show();
}
});
}
}).start();
}
@Override
public int getAdapterFragmentLayoutId() {
return PreferenceConfiguration.readPreferences(this).listMode ?
@ -489,25 +471,26 @@ public class AppView extends Activity implements AdapterFragmentCallbacks {
public void onItemClick(AdapterView<?> arg0, View arg1, int pos,
long id) {
AppObject app = (AppObject) appGridAdapter.getItem(pos);
if (app == null || app.app == null) {
return;
}
// Only open the context menu if something is running, otherwise start it
if (getRunningAppId() != -1) {
openContextMenu(arg1);
} else {
doStart(app.app);
ServerHelper.doStart(AppView.this, app.app, computer, managerBinder);
}
}
});
registerForContextMenu(listView);
listView.requestFocus();
}
public class AppObject {
public NvApp app;
public final NvApp app;
public AppObject(NvApp app) {
if (app == null) {
throw new IllegalArgumentException("app must not be null");
}
this.app = app;
}

View File

@ -1,19 +1,22 @@
package com.limelight;
import com.limelight.LimelightBuildProps;
import com.limelight.binding.PlatformBinding;
import com.limelight.binding.input.ControllerHandler;
import com.limelight.binding.input.KeyboardTranslator;
import com.limelight.binding.input.TouchContext;
import com.limelight.binding.input.driver.UsbDriverService;
import com.limelight.binding.input.evdev.EvdevHandler;
import com.limelight.binding.input.evdev.EvdevListener;
import com.limelight.binding.input.evdev.EvdevWatcher;
import com.limelight.binding.input.virtual_controller.VirtualController;
import com.limelight.binding.video.ConfigurableDecoderRenderer;
import com.limelight.binding.video.EnhancedDecoderRenderer;
import com.limelight.binding.video.MediaCodecDecoderRenderer;
import com.limelight.binding.video.MediaCodecHelper;
import com.limelight.nvstream.NvConnection;
import com.limelight.nvstream.NvConnectionListener;
import com.limelight.nvstream.StreamConfiguration;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.http.NvApp;
import com.limelight.nvstream.input.KeyboardPacket;
import com.limelight.nvstream.input.MouseButtonPacket;
import com.limelight.preferences.PreferenceConfiguration;
@ -23,7 +26,11 @@ import com.limelight.utils.SpinnerDialog;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.res.Configuration;
import android.graphics.Point;
import android.hardware.input.InputManager;
@ -32,6 +39,7 @@ import android.net.ConnectivityManager;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.SystemClock;
import android.view.Display;
import android.view.InputDevice;
@ -62,9 +70,12 @@ public class Game extends Activity implements SurfaceHolder.Callback,
private int lastButtonState = 0;
// Only 2 touches are supported
private TouchContext[] touchContextMap = new TouchContext[2];
private final TouchContext[] touchContextMap = new TouchContext[2];
private long threeFingerDownTime = 0;
private static final double REFERENCE_HORIZ_RES = 1280;
private static final double REFERENCE_VERT_RES = 720;
private static final int THREE_FINGER_TAP_THRESHOLD = 300;
private ControllerHandler controllerHandler;
@ -72,27 +83,44 @@ public class Game extends Activity implements SurfaceHolder.Callback,
private KeyboardTranslator keybTranslator;
private PreferenceConfiguration prefConfig;
private Point screenSize = new Point(0, 0);
private final Point screenSize = new Point(0, 0);
private NvConnection conn;
private SpinnerDialog spinner;
private boolean displayedFailureDialog = false;
private boolean connecting = false;
private boolean connected = false;
private boolean deferredSurfaceResize = false;
private EvdevWatcher evdevWatcher;
private EvdevHandler evdevHandler;
private int modifierFlags = 0;
private boolean grabbedInput = true;
private boolean grabComboDown = false;
private ConfigurableDecoderRenderer decoderRenderer;
private EnhancedDecoderRenderer decoderRenderer;
private WifiManager.WifiLock wifiLock;
private int drFlags = 0;
private boolean connectedToUsbDriverService = false;
private ServiceConnection usbDriverServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
UsbDriverService.UsbDriverBinder binder = (UsbDriverService.UsbDriverBinder) iBinder;
binder.setListener(controllerHandler);
connectedToUsbDriverService = true;
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
connectedToUsbDriverService = false;
}
};
public static final String EXTRA_HOST = "Host";
public static final String EXTRA_APP = "App";
public static final String EXTRA_APP_NAME = "AppName";
public static final String EXTRA_APP_ID = "AppId";
public static final String EXTRA_UNIQUEID = "UniqueId";
public static final String EXTRA_STREAMING_REMOTE = "Remote";
@ -141,16 +169,6 @@ public class Game extends Activity implements SurfaceHolder.Callback,
// Read the stream preferences
prefConfig = PreferenceConfiguration.readPreferences(this);
switch (prefConfig.decoder) {
case PreferenceConfiguration.FORCE_SOFTWARE_DECODER:
drFlags |= VideoDecoderRenderer.FLAG_FORCE_SOFTWARE_DECODING;
break;
case PreferenceConfiguration.AUTOSELECT_DECODER:
break;
case PreferenceConfiguration.FORCE_HARDWARE_DECODER:
drFlags |= VideoDecoderRenderer.FLAG_FORCE_HARDWARE_DECODING;
break;
}
if (prefConfig.stretchVideo) {
drFlags |= VideoDecoderRenderer.FLAG_FILL_SCREEN;
@ -174,17 +192,42 @@ public class Game extends Activity implements SurfaceHolder.Callback,
wifiLock.acquire();
String host = Game.this.getIntent().getStringExtra(EXTRA_HOST);
String app = Game.this.getIntent().getStringExtra(EXTRA_APP);
String appName = Game.this.getIntent().getStringExtra(EXTRA_APP_NAME);
int appId = Game.this.getIntent().getIntExtra(EXTRA_APP_ID, StreamConfiguration.INVALID_APP_ID);
String uniqueId = Game.this.getIntent().getStringExtra(EXTRA_UNIQUEID);
boolean remote = Game.this.getIntent().getBooleanExtra(EXTRA_STREAMING_REMOTE, false);
decoderRenderer = new ConfigurableDecoderRenderer();
decoderRenderer.initializeWithFlags(drFlags);
if (appId == StreamConfiguration.INVALID_APP_ID) {
finish();
return;
}
// Initialize the MediaCodec helper before creating the decoder
MediaCodecHelper.initializeWithContext(this);
decoderRenderer = new MediaCodecDecoderRenderer(prefConfig.videoFormat);
// Display a message to the user if H.265 was forced on but we still didn't find a decoder
if (prefConfig.videoFormat == PreferenceConfiguration.FORCE_H265_ON && !decoderRenderer.isHevcSupported()) {
Toast.makeText(this, "No H.265 decoder found.\nFalling back to H.264.", Toast.LENGTH_LONG).show();
}
if (!decoderRenderer.isAvcSupported()) {
if (spinner != null) {
spinner.dismiss();
spinner = null;
}
// If we can't find an AVC decoder, we can't proceed
Dialog.displayDialog(this, getResources().getString(R.string.conn_error_title),
"This device or ROM doesn't support hardware accelerated H.264 playback.", true);
return;
}
StreamConfiguration config = new StreamConfiguration.Builder()
.setResolution(prefConfig.width, prefConfig.height)
.setRefreshRate(prefConfig.fps)
.setApp(app)
.setApp(new NvApp(appName, appId))
.setBitrate(prefConfig.bitrate * 1000)
.setEnableSops(prefConfig.enableSops)
.enableAdaptiveResolution((decoderRenderer.getCapabilities() &
@ -192,6 +235,10 @@ public class Game extends Activity implements SurfaceHolder.Callback,
.enableLocalAudioPlayback(prefConfig.playHostAudio)
.setMaxPacketSize(remote ? 1024 : 1292)
.setRemote(remote)
.setHevcSupported(decoderRenderer.isHevcSupported())
.setAudioConfiguration(prefConfig.enable51Surround ?
StreamConfiguration.AUDIO_CONFIGURATION_5_1 :
StreamConfiguration.AUDIO_CONFIGURATION_STEREO)
.build();
// Initialize the connection
@ -202,30 +249,53 @@ public class Game extends Activity implements SurfaceHolder.Callback,
InputManager inputManager = (InputManager) getSystemService(Context.INPUT_SERVICE);
inputManager.registerInputDeviceListener(controllerHandler, null);
boolean aspectRatioMatch = false;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
// On KitKat and later (where we can use the whole screen via immersive mode), we'll
// calculate whether we need to scale by aspect ratio or not. If not, we'll use
// setFixedSize so we can handle 4K properly. The only known devices that have
// >= 4K screens have exactly 4K screens, so we'll be able to hit this good path
// on these devices. On Marshmallow, we can start changing to 4K manually but no
// 4K devices run 6.0 at the moment.
double screenAspectRatio = ((double)screenSize.y) / screenSize.x;
double streamAspectRatio = ((double)prefConfig.height) / prefConfig.width;
if (Math.abs(screenAspectRatio - streamAspectRatio) < 0.001) {
LimeLog.info("Stream has compatible aspect ratio with output display");
aspectRatioMatch = true;
}
}
SurfaceHolder sh = sv.getHolder();
if (prefConfig.stretchVideo || !decoderRenderer.isHardwareAccelerated()) {
if (prefConfig.stretchVideo || aspectRatioMatch) {
// Set the surface to the size of the video
sh.setFixedSize(prefConfig.width, prefConfig.height);
}
else {
deferredSurfaceResize = true;
}
// Initialize touch contexts
for (int i = 0; i < touchContextMap.length; i++) {
touchContextMap[i] = new TouchContext(conn, i,
((double)prefConfig.width / (double)screenSize.x),
((double)prefConfig.height / (double)screenSize.y));
(REFERENCE_HORIZ_RES / (double)screenSize.x),
(REFERENCE_VERT_RES / (double)screenSize.y));
}
if (LimelightBuildProps.ROOT_BUILD) {
// Start watching for raw input
evdevWatcher = new EvdevWatcher(this);
evdevWatcher.start();
evdevHandler = new EvdevHandler(this, this);
evdevHandler.start();
}
if (prefConfig.virtualController_enable)
{
FrameLayout frameLayout = (FrameLayout) findViewById(R.id.frameLayout);
virtualController = new VirtualController(conn, (FrameLayout) findViewById(R.id.surfaceView).getParent(), getApplicationContext());
}
virtualController = new VirtualController(conn, frameLayout, getApplicationContext());
if (prefConfig.usbDriver) {
// Start the USB driver
bindService(new Intent(this, UsbDriverService.class),
usbDriverServiceConnection, Service.BIND_AUTO_CREATE);
}
// The connection will be started when the surface gets created
@ -256,7 +326,7 @@ public class Game extends Activity implements SurfaceHolder.Callback,
}
@SuppressLint("InlinedApi")
private Runnable hideSystemUi = new Runnable() {
private final Runnable hideSystemUi = new Runnable() {
@Override
public void run() {
// Use immersive mode on 4.4+ or standard low profile on previous builds
@ -295,6 +365,15 @@ public class Game extends Activity implements SurfaceHolder.Callback,
InputManager inputManager = (InputManager) getSystemService(Context.INPUT_SERVICE);
inputManager.unregisterInputDeviceListener(controllerHandler);
wifiLock.release();
if (connectedToUsbDriverService) {
// Unbind from the discovery service
unbindService(usbDriverServiceConnection);
}
VideoDecoderRenderer.VideoFormat videoFormat = conn.getActiveVideoFormat();
displayedFailureDialog = true;
stopConnection();
@ -311,6 +390,16 @@ public class Game extends Activity implements SurfaceHolder.Callback,
message = getResources().getString(R.string.conn_hardware_latency)+" "+averageDecoderLat+" ms";
}
// Add the video codec to the post-stream toast
if (message != null && videoFormat != VideoDecoderRenderer.VideoFormat.Unknown) {
if (videoFormat == VideoDecoderRenderer.VideoFormat.H265) {
message += " [H.265]";
}
else {
message += " [H.264]";
}
}
if (message != null) {
Toast.makeText(this, message, Toast.LENGTH_LONG).show();
}
@ -318,23 +407,15 @@ public class Game extends Activity implements SurfaceHolder.Callback,
finish();
}
@Override
protected void onDestroy() {
super.onDestroy();
wifiLock.release();
}
private Runnable toggleGrab = new Runnable() {
private final Runnable toggleGrab = new Runnable() {
@Override
public void run() {
if (evdevWatcher != null) {
if (evdevHandler != null) {
if (grabbedInput) {
evdevWatcher.ungrabAll();
evdevHandler.ungrabAll();
}
else {
evdevWatcher.regrabAll();
evdevHandler.regrabAll();
}
}
@ -522,9 +603,56 @@ public class Game extends Activity implements SurfaceHolder.Callback,
}
else if ((event.getSource() & InputDevice.SOURCE_CLASS_POINTER) != 0)
{
// This case is for mice
if (event.getSource() == InputDevice.SOURCE_MOUSE)
{
int changedButtons = event.getButtonState() ^ lastButtonState;
if (event.getActionMasked() == MotionEvent.ACTION_SCROLL) {
// Send the vertical scroll packet
byte vScrollClicks = (byte) event.getAxisValue(MotionEvent.AXIS_VSCROLL);
conn.sendMouseScroll(vScrollClicks);
}
if ((changedButtons & MotionEvent.BUTTON_PRIMARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_PRIMARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_LEFT);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_LEFT);
}
}
if ((changedButtons & MotionEvent.BUTTON_SECONDARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_SECONDARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_RIGHT);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_RIGHT);
}
}
if ((changedButtons & MotionEvent.BUTTON_TERTIARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_TERTIARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_MIDDLE);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_MIDDLE);
}
}
// First process the history
for (int i = 0; i < event.getHistorySize(); i++) {
updateMousePosition((int)event.getHistoricalX(i), (int)event.getHistoricalY(i));
}
// Now process the current values
updateMousePosition((int)event.getX(), (int)event.getY());
lastButtonState = event.getButtonState();
}
// This case is for touch-based input devices
if (event.getSource() == InputDevice.SOURCE_TOUCHSCREEN ||
event.getSource() == InputDevice.SOURCE_STYLUS)
else
{
int actionIndex = event.getActionIndex();
@ -603,59 +731,6 @@ public class Game extends Activity implements SurfaceHolder.Callback,
return false;
}
}
// This case is for mice
else if (event.getSource() == InputDevice.SOURCE_MOUSE)
{
int changedButtons = event.getButtonState() ^ lastButtonState;
if (event.getActionMasked() == MotionEvent.ACTION_SCROLL) {
// Send the vertical scroll packet
byte vScrollClicks = (byte) event.getAxisValue(MotionEvent.AXIS_VSCROLL);
conn.sendMouseScroll(vScrollClicks);
}
if ((changedButtons & MotionEvent.BUTTON_PRIMARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_PRIMARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_LEFT);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_LEFT);
}
}
if ((changedButtons & MotionEvent.BUTTON_SECONDARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_SECONDARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_RIGHT);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_RIGHT);
}
}
if ((changedButtons & MotionEvent.BUTTON_TERTIARY) != 0) {
if ((event.getButtonState() & MotionEvent.BUTTON_TERTIARY) != 0) {
conn.sendMouseButtonDown(MouseButtonPacket.BUTTON_MIDDLE);
}
else {
conn.sendMouseButtonUp(MouseButtonPacket.BUTTON_MIDDLE);
}
}
// First process the history
for (int i = 0; i < event.getHistorySize(); i++) {
updateMousePosition((int)event.getHistoricalX(i), (int)event.getHistoricalY(i));
}
// Now process the current values
updateMousePosition((int)event.getX(), (int)event.getY());
lastButtonState = event.getButtonState();
}
else
{
// Unknown source
return false;
}
// Handled a known source
return true;
@ -689,8 +764,8 @@ public class Game extends Activity implements SurfaceHolder.Callback,
// Scale the deltas if the device resolution is different
// than the stream resolution
deltaX = (int)Math.round((double)deltaX * ((double)prefConfig.width / (double)screenSize.x));
deltaY = (int)Math.round((double)deltaY * ((double)prefConfig.height / (double)screenSize.y));
deltaX = (int)Math.round((double)deltaX * (REFERENCE_HORIZ_RES / (double)screenSize.x));
deltaY = (int)Math.round((double)deltaY * (REFERENCE_VERT_RES / (double)screenSize.y));
conn.sendMouseMove((short)deltaX, (short)deltaY);
}
@ -728,10 +803,10 @@ public class Game extends Activity implements SurfaceHolder.Callback,
conn.stop();
}
// Close the Evdev watcher to allow use of captured input devices
if (evdevWatcher != null) {
evdevWatcher.shutdown();
evdevWatcher = null;
// Close the Evdev reader to allow use of captured input devices
if (evdevHandler != null) {
evdevHandler.stop();
evdevHandler = null;
}
}
@ -808,7 +883,7 @@ public class Game extends Activity implements SurfaceHolder.Callback,
// Resize the surface to match the aspect ratio of the video
// This must be done after the surface is created.
if (!prefConfig.stretchVideo && decoderRenderer.isHardwareAccelerated()) {
if (deferredSurfaceResize) {
resizeSurfaceWithAspectRatio((SurfaceView) findViewById(R.id.surfaceView),
prefConfig.width, prefConfig.height);
}

View File

@ -12,6 +12,7 @@ import com.limelight.computers.ComputerManagerListener;
import com.limelight.computers.ComputerManagerService;
import com.limelight.grid.PcGridAdapter;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.NvApp;
import com.limelight.nvstream.http.NvHTTP;
import com.limelight.nvstream.http.PairingManager;
import com.limelight.nvstream.http.PairingManager.PairState;
@ -22,10 +23,10 @@ import com.limelight.preferences.StreamSettings;
import com.limelight.ui.AdapterFragment;
import com.limelight.ui.AdapterFragmentCallbacks;
import com.limelight.utils.Dialog;
import com.limelight.utils.ServerHelper;
import com.limelight.utils.UiHelper;
import android.app.Activity;
import android.app.FragmentTransaction;
import android.app.Service;
import android.content.ComponentName;
import android.content.Intent;
@ -52,8 +53,8 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
private RelativeLayout noPcFoundLayout;
private PcGridAdapter pcGridAdapter;
private ComputerManagerService.ComputerManagerBinder managerBinder;
private boolean freezeUpdates, runningPolling;
private ServiceConnection serviceConnection = new ServiceConnection() {
private boolean freezeUpdates, runningPolling, hasResumed;
private final ServiceConnection serviceConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder binder) {
final ComputerManagerService.ComputerManagerBinder localBinder =
((ComputerManagerService.ComputerManagerBinder)binder);
@ -94,6 +95,8 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
private final static int UNPAIR_ID = 3;
private final static int WOL_ID = 4;
private final static int DELETE_ID = 5;
private final static int RESUME_ID = 6;
private final static int QUIT_ID = 7;
private void initializeViews() {
setContentView(R.layout.activity_pc_view);
@ -212,6 +215,7 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
protected void onResume() {
super.onResume();
hasResumed = true;
startComputerUpdates();
}
@ -219,6 +223,7 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
protected void onPause() {
super.onPause();
hasResumed = false;
stopComputerUpdates(false);
}
@ -238,14 +243,10 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
AdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo;
ComputerObject computer = (ComputerObject) pcGridAdapter.getItem(info.position);
if (computer == null || computer.details == null ||
computer.details.reachability == ComputerDetails.Reachability.UNKNOWN) {
startComputerUpdates();
return;
}
// Inflate the context menu
if (computer.details.reachability == ComputerDetails.Reachability.OFFLINE) {
if (computer.details.reachability == ComputerDetails.Reachability.OFFLINE ||
computer.details.reachability == ComputerDetails.Reachability.UNKNOWN) {
menu.add(Menu.NONE, WOL_ID, 1, getResources().getString(R.string.pcview_menu_send_wol));
menu.add(Menu.NONE, DELETE_ID, 2, getResources().getString(R.string.pcview_menu_delete_pc));
}
@ -254,15 +255,27 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
menu.add(Menu.NONE, DELETE_ID, 2, getResources().getString(R.string.pcview_menu_delete_pc));
}
else {
menu.add(Menu.NONE, APP_LIST_ID, 1, getResources().getString(R.string.pcview_menu_app_list));
menu.add(Menu.NONE, UNPAIR_ID, 2, getResources().getString(R.string.pcview_menu_unpair_pc));
if (computer.details.runningGameId != 0) {
menu.add(Menu.NONE, RESUME_ID, 1, getResources().getString(R.string.applist_menu_resume));
menu.add(Menu.NONE, QUIT_ID, 2, getResources().getString(R.string.applist_menu_quit));
}
menu.add(Menu.NONE, APP_LIST_ID, 3, getResources().getString(R.string.pcview_menu_app_list));
// FIXME: We used to be able to unpair here but it's been broken since GFE 2.1.x, so I've replaced
// it with delete which actually work
menu.add(Menu.NONE, DELETE_ID, 4, getResources().getString(R.string.pcview_menu_delete_pc));
}
}
@Override
public void onContextMenuClosed(Menu menu) {
// For some reason, this gets called again _after_ onPause() is called on this activity.
// We don't want to start computer updates again, so we need to keep track of whether we're paused.
if (hasResumed) {
startComputerUpdates();
}
}
private void doPair(final ComputerDetails computer) {
if (computer.reachability == ComputerDetails.Reachability.OFFLINE) {
@ -296,6 +309,10 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
else if (computer.reachability == ComputerDetails.Reachability.REMOTE) {
addr = computer.remoteIp;
}
else {
LimeLog.warning("Unknown reachability - using local IP");
addr = computer.localIp;
}
httpConn = new NvHTTP(addr,
managerBinder.getUniqueId(),
@ -364,7 +381,7 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
}
private void doWakeOnLan(final ComputerDetails computer) {
if (computer.reachability != ComputerDetails.Reachability.OFFLINE) {
if (computer.state == ComputerDetails.State.ONLINE) {
Toast.makeText(PcView.this, getResources().getString(R.string.wol_pc_online), Toast.LENGTH_SHORT).show();
return;
}
@ -421,6 +438,10 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
else if (computer.reachability == ComputerDetails.Reachability.REMOTE) {
addr = computer.remoteIp;
}
else {
LimeLog.warning("Unknown reachability - using local IP");
addr = computer.localIp;
}
httpConn = new NvHTTP(addr,
managerBinder.getUniqueId(),
@ -476,9 +497,8 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo();
ComputerObject computer = (ComputerObject) pcGridAdapter.getItem(info.position);
switch (item.getItemId())
{
final ComputerObject computer = (ComputerObject) pcGridAdapter.getItem(info.position);
switch (item.getItemId()) {
case PAIR_ID:
doPair(computer.details);
return true;
@ -504,6 +524,32 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
doAppList(computer.details);
return true;
case RESUME_ID:
if (managerBinder == null) {
Toast.makeText(PcView.this, getResources().getString(R.string.error_manager_not_running), Toast.LENGTH_LONG).show();
return true;
}
ServerHelper.doStart(this, new NvApp("app", computer.details.runningGameId), computer.details, managerBinder);
return true;
case QUIT_ID:
if (managerBinder == null) {
Toast.makeText(PcView.this, getResources().getString(R.string.error_manager_not_running), Toast.LENGTH_LONG).show();
return true;
}
// Display a confirmation dialog first
UiHelper.displayQuitConfirmationDialog(this, new Runnable() {
@Override
public void run() {
ServerHelper.doQuit(PcView.this,
ServerHelper.getCurrentAddressFromComputer(computer.details),
new NvApp("app", 0), managerBinder, null);
}
}, null);
return true;
default:
return super.onContextItemSelected(item);
}
@ -516,6 +562,12 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
if (details.equals(computer.details)) {
pcGridAdapter.removeComputer(computer);
pcGridAdapter.notifyDataSetChanged();
if (pcGridAdapter.getCount() == 0) {
// Show the "Discovery in progress" view
noPcFoundLayout.setVisibility(View.VISIBLE);
}
break;
}
}
@ -528,7 +580,7 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
ComputerObject computer = (ComputerObject) pcGridAdapter.getItem(i);
// Check if this is the same computer
if (details.equals(computer.details)) {
if (details.uuid.equals(computer.details.uuid)) {
existingEntry = computer;
break;
}
@ -565,10 +617,9 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
public void onItemClick(AdapterView<?> arg0, View arg1, int pos,
long id) {
ComputerObject computer = (ComputerObject) pcGridAdapter.getItem(pos);
if (computer.details.reachability == ComputerDetails.Reachability.UNKNOWN) {
// Do nothing
} else if (computer.details.reachability == ComputerDetails.Reachability.OFFLINE) {
// Open the context menu if a PC is offline
if (computer.details.reachability == ComputerDetails.Reachability.UNKNOWN ||
computer.details.reachability == ComputerDetails.Reachability.OFFLINE) {
// Open the context menu if a PC is offline or refreshing
openContextMenu(arg1);
} else if (computer.details.pairState != PairState.PAIRED) {
// Pair an unpaired machine by default
@ -585,6 +636,9 @@ public class PcView extends Activity implements AdapterFragmentCallbacks {
public ComputerDetails details;
public ComputerObject(ComputerDetails details) {
if (details == null) {
throw new IllegalArgumentException("details must not be null");
}
this.details = details;
}

View File

@ -9,14 +9,13 @@ import com.limelight.nvstream.av.audio.AudioRenderer;
public class AndroidAudioRenderer implements AudioRenderer {
public static final int FRAME_SIZE = 960;
private AudioTrack track;
@Override
public boolean streamInitialized(int channelCount, int sampleRate) {
public boolean streamInitialized(int channelCount, int channelMask, int samplesPerFrame, int sampleRate) {
int channelConfig;
int bufferSize;
int bytesPerFrame = (samplesPerFrame * 2);
switch (channelCount)
{
@ -26,6 +25,12 @@ public class AndroidAudioRenderer implements AudioRenderer {
case 2:
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
break;
case 4:
channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
break;
case 6:
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
break;
default:
LimeLog.severe("Decoder returned unhandled channel count");
return false;
@ -38,7 +43,7 @@ public class AndroidAudioRenderer implements AudioRenderer {
// use the recommended larger buffer size.
try {
// Buffer two frames of audio if possible
bufferSize = FRAME_SIZE * 2;
bufferSize = bytesPerFrame * 2;
track = new AudioTrack(AudioManager.STREAM_MUSIC,
sampleRate,
@ -59,10 +64,10 @@ public class AndroidAudioRenderer implements AudioRenderer {
bufferSize = Math.max(AudioTrack.getMinBufferSize(sampleRate,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT),
FRAME_SIZE * 2);
bytesPerFrame * 2);
// Round to next frame
bufferSize = (((bufferSize + (FRAME_SIZE - 1)) / FRAME_SIZE) * FRAME_SIZE);
bufferSize = (((bufferSize + (bytesPerFrame - 1)) / bytesPerFrame) * bytesPerFrame);
track = new AudioTrack(AudioManager.STREAM_MUSIC,
sampleRate,

View File

@ -45,8 +45,8 @@ import com.limelight.nvstream.http.LimelightCryptoProvider;
public class AndroidCryptoProvider implements LimelightCryptoProvider {
private File certFile;
private File keyFile;
private final File certFile;
private final File keyFile;
private X509Certificate cert;
private RSAPrivateKey key;

View File

@ -1,22 +1,20 @@
package com.limelight.binding.input;
import java.util.HashMap;
import java.util.Map;
import android.content.Context;
import android.hardware.input.InputManager;
import android.os.SystemClock;
import android.util.SparseArray;
import android.view.InputDevice;
import android.view.KeyEvent;
import android.view.MotionEvent;
import com.limelight.LimeLog;
import com.limelight.binding.input.driver.UsbDriverListener;
import com.limelight.nvstream.NvConnection;
import com.limelight.nvstream.input.ControllerPacket;
import com.limelight.ui.GameGestures;
import com.limelight.utils.Vector2d;
public class ControllerHandler implements InputManager.InputDeviceListener {
public class ControllerHandler implements InputManager.InputDeviceListener, UsbDriverListener {
private static final int MAXIMUM_BUMPER_UP_DELAY_MS = 100;
@ -30,17 +28,18 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
private static final int EMULATED_SPECIAL_UP_DELAY_MS = 100;
private static final int EMULATED_SELECT_UP_DELAY_MS = 30;
private Vector2d inputVector = new Vector2d();
private final Vector2d inputVector = new Vector2d();
private HashMap<String, ControllerContext> contexts = new HashMap<String, ControllerContext>();
private final SparseArray<InputDeviceContext> inputDeviceContexts = new SparseArray<>();
private final SparseArray<UsbDeviceContext> usbDeviceContexts = new SparseArray<>();
private NvConnection conn;
private double stickDeadzone;
private final ControllerContext defaultContext = new ControllerContext();
private GameGestures gestures;
private final NvConnection conn;
private final double stickDeadzone;
private final InputDeviceContext defaultContext = new InputDeviceContext();
private final GameGestures gestures;
private boolean hasGameController;
private boolean multiControllerEnabled;
private final boolean multiControllerEnabled;
private short currentControllers;
public ControllerHandler(NvConnection conn, GameGestures gestures, boolean multiControllerEnabled, int deadzonePercentage) {
@ -53,8 +52,8 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
deadzonePercentage = 10;
int[] ids = InputDevice.getDeviceIds();
for (int i = 0; i < ids.length; i++) {
InputDevice dev = InputDevice.getDevice(ids[i]);
for (int id : ids) {
InputDevice dev = InputDevice.getDevice(id);
if ((dev.getSources() & InputDevice.SOURCE_JOYSTICK) != 0 ||
(dev.getSources() & InputDevice.SOURCE_GAMEPAD) != 0) {
// This looks like a gamepad, but we'll check X and Y to be sure
@ -98,18 +97,6 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
return range;
}
private short assignNewControllerNumber() {
for (short i = 0; i < 4; i++) {
if ((currentControllers & (1 << i)) == 0) {
// Found an unused controller value
currentControllers |= (1 << i);
return i;
}
}
return 0;
}
@Override
public void onInputDeviceAdded(int deviceId) {
// Nothing happening here yet
@ -117,13 +104,11 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
@Override
public void onInputDeviceRemoved(int deviceId) {
for (Map.Entry<String, ControllerContext> device : contexts.entrySet()) {
if (device.getValue().id == deviceId) {
LimeLog.info("Removed controller: "+device.getValue().name);
releaseControllerNumber(device.getValue().controllerNumber);
contexts.remove(device.getKey());
return;
}
InputDeviceContext context = inputDeviceContexts.get(deviceId);
if (context != null) {
LimeLog.info("Removed controller: "+context.name+" ("+deviceId+")");
releaseControllerNumber(context);
inputDeviceContexts.remove(deviceId);
}
}
@ -134,13 +119,96 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
onInputDeviceAdded(deviceId);
}
private void releaseControllerNumber(int controllerNumber) {
LimeLog.info("Controller number "+controllerNumber+" is now available");
currentControllers &= ~(1 << controllerNumber);
private void releaseControllerNumber(GenericControllerContext context) {
// If this device sent data as a gamepad, zero the values before removing
if (context.assignedControllerNumber) {
conn.sendControllerInput(context.controllerNumber, (short) 0,
(byte) 0, (byte) 0,
(short) 0, (short) 0,
(short) 0, (short) 0);
}
private ControllerContext createContextForDevice(InputDevice dev) {
ControllerContext context = new ControllerContext();
// If we reserved a controller number, remove that reservation
if (context.reservedControllerNumber) {
LimeLog.info("Controller number "+context.controllerNumber+" is now available");
currentControllers &= ~(1 << context.controllerNumber);
}
}
// Called before sending input but after we've determined that this
// is definitely a controller (not a keyboard, mouse, or something else)
private void assignControllerNumberIfNeeded(GenericControllerContext context) {
if (context.assignedControllerNumber) {
return;
}
if (context instanceof InputDeviceContext) {
InputDeviceContext devContext = (InputDeviceContext) context;
LimeLog.info(devContext.name+" ("+context.id+") needs a controller number assigned");
if (devContext.name != null && devContext.name.contains("gpio-keys")) {
// This is the back button on Shield portable consoles
LimeLog.info("Built-in buttons hardcoded as controller 0");
context.controllerNumber = 0;
}
else if (multiControllerEnabled && devContext.hasJoystickAxes) {
context.controllerNumber = 0;
LimeLog.info("Reserving the next available controller number");
for (short i = 0; i < 4; i++) {
if ((currentControllers & (1 << i)) == 0) {
// Found an unused controller value
currentControllers |= (1 << i);
context.controllerNumber = i;
context.reservedControllerNumber = true;
break;
}
}
}
else {
LimeLog.info("Not reserving a controller number");
context.controllerNumber = 0;
}
}
else {
if (multiControllerEnabled) {
context.controllerNumber = 0;
LimeLog.info("Reserving the next available controller number");
for (short i = 0; i < 4; i++) {
if ((currentControllers & (1 << i)) == 0) {
// Found an unused controller value
currentControllers |= (1 << i);
context.controllerNumber = i;
context.reservedControllerNumber = true;
break;
}
}
}
else {
LimeLog.info("Not reserving a controller number");
context.controllerNumber = 0;
}
}
LimeLog.info("Assigned as controller "+context.controllerNumber);
context.assignedControllerNumber = true;
}
private UsbDeviceContext createUsbDeviceContextForDevice(int deviceId) {
UsbDeviceContext context = new UsbDeviceContext();
context.id = deviceId;
context.leftStickDeadzoneRadius = (float) stickDeadzone;
context.rightStickDeadzoneRadius = (float) stickDeadzone;
context.triggerDeadzone = 0.13f;
return context;
}
private InputDeviceContext createInputDeviceContextForDevice(InputDevice dev) {
InputDeviceContext context = new InputDeviceContext();
String devName = dev.getName();
LimeLog.info("Creating controller context for device: "+devName);
@ -252,6 +320,15 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
}
}
// Ignore the back buttonn if a controller has both buttons
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
boolean[] hasSelectKey = dev.hasKeys(KeyEvent.KEYCODE_BUTTON_SELECT, KeyEvent.KEYCODE_BACK, 0);
if (hasSelectKey[0] && hasSelectKey[1]) {
LimeLog.info("Ignoring back button because select is present");
context.ignoreBack = true;
}
}
if (devName != null) {
// For the Nexus Player (and probably other ATV devices), we should
// use the back button as start since it doesn't have a start/menu button
@ -263,6 +340,7 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
boolean[] hasStartKey = dev.hasKeys(KeyEvent.KEYCODE_BUTTON_START, KeyEvent.KEYCODE_MENU, 0);
if (!hasStartKey[0] && !hasStartKey[1]) {
context.backIsStart = true;
context.modeIsSelect = true;
}
}
@ -274,56 +352,54 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
else if (devName.contains("Fire TV Remote") || devName.contains("Nexus Remote")) {
// It's only a remote if it doesn't any sticks
if (!context.hasJoystickAxes) {
context.isRemote = true;
context.ignoreBack = true;
}
}
// NYKO Playpad has a fake hat that mimics the left stick for some reason
else if (devName.contains("NYKO PLAYPAD")) {
context.hatXAxis = -1;
context.hatYAxis = -1;
// SHIELD controllers will use small stick deadzones
else if (devName.contains("SHIELD")) {
context.leftStickDeadzoneRadius = 0.07f;
context.rightStickDeadzoneRadius = 0.07f;
}
// Samsung's face buttons appear as a non-virtual button so we'll explicitly ignore
// back presses on this device
else if (devName.equals("sec_touchscreen")) {
context.ignoreBack = true;
}
// The Serval has a couple of unknown buttons that are start and select. It also has
// a back button which we want to ignore since there's already a select button.
else if (devName.contains("Razer Serval")) {
context.isServal = true;
context.ignoreBack = true;
}
}
LimeLog.info("Analog stick deadzone: "+context.leftStickDeadzoneRadius+" "+context.rightStickDeadzoneRadius);
LimeLog.info("Trigger deadzone: "+context.triggerDeadzone);
if (devName != null && devName.equals("gpio-keys")) {
// This is the back button on Shield portable consoles
context.controllerNumber = 0;
}
else if (multiControllerEnabled) {
context.controllerNumber = assignNewControllerNumber();
}
else {
context.controllerNumber = 0;
}
LimeLog.info("Assigned as controller "+context.controllerNumber);
return context;
}
private ControllerContext getContextForDevice(InputDevice dev) {
private InputDeviceContext getContextForDevice(InputDevice dev) {
// Unknown devices use the default context
if (dev == null) {
return defaultContext;
}
String descriptor = dev.getDescriptor();
// Return the existing context if it exists
ControllerContext context = contexts.get(descriptor);
InputDeviceContext context = inputDeviceContexts.get(dev.getId());
if (context != null) {
return context;
}
// Otherwise create a new context
context = createContextForDevice(dev);
contexts.put(descriptor, context);
context = createInputDeviceContextForDevice(dev);
inputDeviceContexts.put(dev.getId(), context);
return context;
}
private void sendControllerInputPacket(ControllerContext context) {
private void sendControllerInputPacket(GenericControllerContext context) {
assignControllerNumberIfNeeded(context);
conn.sendControllerInput(context.controllerNumber, context.inputMap,
context.leftTrigger, context.rightTrigger,
context.leftStickX, context.leftStickY,
@ -332,9 +408,9 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
// Return a valid keycode, 0 to consume, or -1 to not consume the event
// Device MAY BE NULL
private int handleRemapping(ControllerContext context, KeyEvent event) {
// For remotes, don't capture the back button
if (context.isRemote) {
private int handleRemapping(InputDeviceContext context, KeyEvent event) {
// Don't capture the back button if configured
if (context.ignoreBack) {
if (event.getKeyCode() == KeyEvent.KEYCODE_BACK) {
return -1;
}
@ -378,6 +454,16 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
return 0;
}
}
// If this is a Serval controller sending an unknown key code, it's probably
// the start and select buttons
else if (context.isServal && event.getKeyCode() == KeyEvent.KEYCODE_UNKNOWN) {
switch (event.getScanCode()) {
case 314:
return KeyEvent.KEYCODE_BUTTON_SELECT;
case 315:
return KeyEvent.KEYCODE_BUTTON_START;
}
}
if (context.hatXAxis != -1 && context.hatYAxis != -1) {
switch (event.getKeyCode()) {
@ -427,10 +513,18 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
// Ensure that we never use back as start if we have a real start
context.backIsStart = false;
}
else if (keyCode == KeyEvent.KEYCODE_BUTTON_SELECT) {
// Don't use mode as select if we have a select
context.modeIsSelect = false;
}
else if (context.backIsStart && keyCode == KeyEvent.KEYCODE_BACK) {
// Emulate the start button with back
return KeyEvent.KEYCODE_BUTTON_START;
}
else if (context.modeIsSelect && keyCode == KeyEvent.KEYCODE_BUTTON_MODE) {
// Emulate the select button with mode
return KeyEvent.KEYCODE_BUTTON_SELECT;
}
return keyCode;
}
@ -452,7 +546,7 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
// evaluates the deadzone.
}
private void handleAxisSet(ControllerContext context, float lsX, float lsY, float rsX,
private void handleAxisSet(InputDeviceContext context, float lsX, float lsY, float rsX,
float rsY, float lt, float rt, float hatX, float hatY) {
if (context.leftStickXAxis != -1 && context.leftStickYAxis != -1) {
@ -512,7 +606,7 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
}
public boolean handleMotionEvent(MotionEvent event) {
ControllerContext context = getContextForDevice(event.getDevice());
InputDeviceContext context = getContextForDevice(event.getDevice());
float lsX = 0, lsY = 0, rsX = 0, rsY = 0, rt = 0, lt = 0, hatX = 0, hatY = 0;
// We purposefully ignore the historical values in the motion event as it makes
@ -544,7 +638,7 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
}
public boolean handleButtonUp(KeyEvent event) {
ControllerContext context = getContextForDevice(event.getDevice());
InputDeviceContext context = getContextForDevice(event.getDevice());
int keyCode = handleRemapping(context, event);
if (keyCode == 0) {
@ -570,7 +664,9 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
case KeyEvent.KEYCODE_BUTTON_START:
case KeyEvent.KEYCODE_MENU:
if (SystemClock.uptimeMillis() - context.startDownTime > ControllerHandler.START_DOWN_TIME_KEYB_MS) {
gestures.showKeyboard();
// FIXME: The stock keyboard doesn't have controller focus so isn't usable. I'm not enabling this shortcut
// until we have a custom keyboard or some other fix
//gestures.showKeyboard();
}
context.inputMap &= ~ControllerPacket.PLAY_FLAG;
break;
@ -667,7 +763,7 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
}
public boolean handleButtonDown(KeyEvent event) {
ControllerContext context = getContextForDevice(event.getDevice());
InputDeviceContext context = getContextForDevice(event.getDevice());
int keyCode = handleRemapping(context, event);
if (keyCode == 0) {
@ -767,32 +863,67 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
return true;
}
class ControllerContext {
public String name;
@Override
public void reportControllerState(int controllerId, short buttonFlags,
float leftStickX, float leftStickY,
float rightStickX, float rightStickY,
float leftTrigger, float rightTrigger) {
UsbDeviceContext context = usbDeviceContexts.get(controllerId);
Vector2d leftStickVector = populateCachedVector(leftStickX, leftStickY);
handleDeadZone(leftStickVector, context.leftStickDeadzoneRadius);
context.leftStickX = (short) (leftStickVector.getX() * 0x7FFE);
context.leftStickY = (short) (-leftStickVector.getY() * 0x7FFE);
Vector2d rightStickVector = populateCachedVector(rightStickX, rightStickY);
handleDeadZone(rightStickVector, context.rightStickDeadzoneRadius);
context.rightStickX = (short) (rightStickVector.getX() * 0x7FFE);
context.rightStickY = (short) (-rightStickVector.getY() * 0x7FFE);
if (leftTrigger <= context.triggerDeadzone) {
leftTrigger = 0;
}
if (rightTrigger <= context.triggerDeadzone) {
rightTrigger = 0;
}
context.leftTrigger = (byte)(leftTrigger * 0xFF);
context.rightTrigger = (byte)(rightTrigger * 0xFF);
context.inputMap = buttonFlags;
sendControllerInputPacket(context);
}
@Override
public void deviceRemoved(int controllerId) {
UsbDeviceContext context = usbDeviceContexts.get(controllerId);
if (context != null) {
LimeLog.info("Removed controller: "+controllerId);
releaseControllerNumber(context);
usbDeviceContexts.remove(controllerId);
}
}
@Override
public void deviceAdded(int controllerId) {
UsbDeviceContext context = createUsbDeviceContextForDevice(controllerId);
usbDeviceContexts.put(controllerId, context);
}
class GenericControllerContext {
public int id;
public int leftStickXAxis = -1;
public int leftStickYAxis = -1;
public float leftStickDeadzoneRadius;
public int rightStickXAxis = -1;
public int rightStickYAxis = -1;
public float rightStickDeadzoneRadius;
public int leftTriggerAxis = -1;
public int rightTriggerAxis = -1;
public boolean triggersIdleNegative;
public float triggerDeadzone;
public int hatXAxis = -1;
public int hatYAxis = -1;
public boolean isDualShock4;
public boolean isXboxController;
public boolean backIsStart;
public boolean isRemote;
public boolean hasJoystickAxes;
public boolean assignedControllerNumber;
public boolean reservedControllerNumber;
public short controllerNumber;
public short inputMap = 0x0000;
@ -802,6 +933,32 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
public short rightStickY = 0x0000;
public short leftStickX = 0x0000;
public short leftStickY = 0x0000;
}
class InputDeviceContext extends GenericControllerContext {
public String name;
public int leftStickXAxis = -1;
public int leftStickYAxis = -1;
public int rightStickXAxis = -1;
public int rightStickYAxis = -1;
public int leftTriggerAxis = -1;
public int rightTriggerAxis = -1;
public boolean triggersIdleNegative;
public int hatXAxis = -1;
public int hatYAxis = -1;
public boolean isDualShock4;
public boolean isXboxController;
public boolean isServal;
public boolean backIsStart;
public boolean modeIsSelect;
public boolean ignoreBack;
public boolean hasJoystickAxes;
public int emulatingButtonFlags = 0;
// Used for OUYA bumper state tracking since they force all buttons
@ -814,4 +971,6 @@ public class ControllerHandler implements InputManager.InputDeviceListener {
public long startDownTime = 0;
}
class UsbDeviceContext extends GenericControllerContext {}
}

View File

@ -15,7 +15,7 @@ public class KeyboardTranslator extends KeycodeTranslator {
/**
* GFE's prefix for every key code
*/
public static final short KEY_PREFIX = (short) 0x80;
private static final short KEY_PREFIX = (short) 0x80;
public static final int VK_0 = 48;
public static final int VK_9 = 57;

View File

@ -3,6 +3,9 @@ package com.limelight.binding.input;
import com.limelight.nvstream.NvConnection;
import com.limelight.nvstream.input.MouseButtonPacket;
import java.util.Timer;
import java.util.TimerTask;
public class TouchContext {
private int lastTouchX = 0;
private int lastTouchY = 0;
@ -10,13 +13,20 @@ public class TouchContext {
private int originalTouchY = 0;
private long originalTouchTime = 0;
private boolean cancelled;
private boolean confirmedMove;
private boolean confirmedDrag;
private Timer dragTimer;
private double distanceMoved;
private NvConnection conn;
private int actionIndex;
private double xFactor, yFactor;
private final NvConnection conn;
private final int actionIndex;
private final double xFactor;
private final double yFactor;
private static final int TAP_MOVEMENT_THRESHOLD = 10;
private static final int TAP_MOVEMENT_THRESHOLD = 20;
private static final int TAP_DISTANCE_THRESHOLD = 25;
private static final int TAP_TIME_THRESHOLD = 250;
private static final int DRAG_TIME_THRESHOLD = 650;
public TouchContext(NvConnection conn, int actionIndex, double xFactor, double yFactor)
{
@ -31,15 +41,19 @@ public class TouchContext {
return actionIndex;
}
private boolean isWithinTapBounds(int touchX, int touchY)
{
int xDelta = Math.abs(touchX - originalTouchX);
int yDelta = Math.abs(touchY - originalTouchY);
return xDelta <= TAP_MOVEMENT_THRESHOLD &&
yDelta <= TAP_MOVEMENT_THRESHOLD;
}
private boolean isTap()
{
int xDelta = Math.abs(lastTouchX - originalTouchX);
int yDelta = Math.abs(lastTouchY - originalTouchY);
long timeDelta = System.currentTimeMillis() - originalTouchTime;
return xDelta <= TAP_MOVEMENT_THRESHOLD &&
yDelta <= TAP_MOVEMENT_THRESHOLD &&
timeDelta <= TAP_TIME_THRESHOLD;
return isWithinTapBounds(lastTouchX, lastTouchY) && timeDelta <= TAP_TIME_THRESHOLD;
}
private byte getMouseButtonIndex()
@ -57,7 +71,13 @@ public class TouchContext {
originalTouchX = lastTouchX = eventX;
originalTouchY = lastTouchY = eventY;
originalTouchTime = System.currentTimeMillis();
cancelled = false;
cancelled = confirmedDrag = confirmedMove = false;
distanceMoved = 0;
if (actionIndex == 0) {
// Start the timer for engaging a drag
startDragTimer();
}
return true;
}
@ -68,10 +88,17 @@ public class TouchContext {
return;
}
if (isTap())
{
// Cancel the drag timer
cancelDragTimer();
byte buttonIndex = getMouseButtonIndex();
if (confirmedDrag) {
// Raise the button after a drag
conn.sendMouseButtonUp(buttonIndex);
}
else if (isTap())
{
// Lower the mouse button
conn.sendMouseButtonDown(buttonIndex);
@ -86,31 +113,116 @@ public class TouchContext {
}
}
private synchronized void startDragTimer() {
dragTimer = new Timer(true);
dragTimer.schedule(new TimerTask() {
@Override
public void run() {
synchronized (TouchContext.this) {
// Check if someone already set move
if (confirmedMove) {
return;
}
// Check if someone cancelled us
if (dragTimer == null) {
return;
}
// Uncancellable now
dragTimer = null;
// We haven't been cancelled before the timer expired so begin dragging
confirmedDrag = true;
conn.sendMouseButtonDown(getMouseButtonIndex());
}
}
}, DRAG_TIME_THRESHOLD);
}
private synchronized void cancelDragTimer() {
if (dragTimer != null) {
dragTimer.cancel();
dragTimer = null;
}
}
private synchronized void checkForConfirmedMove(int eventX, int eventY) {
// If we've already confirmed something, get out now
if (confirmedMove || confirmedDrag) {
return;
}
// If it leaves the tap bounds before the drag time expires, it's a move.
if (!isWithinTapBounds(eventX, eventY)) {
confirmedMove = true;
cancelDragTimer();
return;
}
// Check if we've exceeded the maximum distance moved
distanceMoved += Math.sqrt(Math.pow(eventX - lastTouchX, 2) + Math.pow(eventY - lastTouchY, 2));
if (distanceMoved >= TAP_DISTANCE_THRESHOLD) {
confirmedMove = true;
cancelDragTimer();
return;
}
}
public boolean touchMoveEvent(int eventX, int eventY)
{
if (eventX != lastTouchX || eventY != lastTouchY)
{
// We only send moves for the primary touch point
// We only send moves and drags for the primary touch point
if (actionIndex == 0) {
checkForConfirmedMove(eventX, eventY);
int deltaX = eventX - lastTouchX;
int deltaY = eventY - lastTouchY;
// Scale the deltas based on the factors passed to our constructor
deltaX = (int)Math.round((double)deltaX * xFactor);
deltaY = (int)Math.round((double)deltaY * yFactor);
deltaX = (int)Math.round((double)Math.abs(deltaX) * xFactor);
deltaY = (int)Math.round((double)Math.abs(deltaY) * yFactor);
// Fix up the signs
if (eventX < lastTouchX) {
deltaX = -deltaX;
}
if (eventY < lastTouchY) {
deltaY = -deltaY;
}
// If the scaling factor ended up rounding deltas to zero, wait until they are
// non-zero to update lastTouch that way devices that report small touch events often
// will work correctly
if (deltaX != 0) {
lastTouchX = eventX;
}
if (deltaY != 0) {
lastTouchY = eventY;
}
conn.sendMouseMove((short)deltaX, (short)deltaY);
}
else {
lastTouchX = eventX;
lastTouchY = eventY;
}
}
return true;
}
public void cancelTouch() {
cancelled = true;
// Cancel the drag timer
cancelDragTimer();
// If it was a confirmed drag, we'll need to raise the button now
if (confirmedDrag) {
conn.sendMouseButtonUp(getMouseButtonIndex());
}
}
public boolean isCancelled() {

View File

@ -0,0 +1,11 @@
package com.limelight.binding.input.driver;
public interface UsbDriverListener {
void reportControllerState(int controllerId, short buttonFlags,
float leftStickX, float leftStickY,
float rightStickX, float rightStickY,
float leftTrigger, float rightTrigger);
void deviceRemoved(int controllerId);
void deviceAdded(int controllerId);
}

View File

@ -0,0 +1,170 @@
package com.limelight.binding.input.driver;
import android.app.PendingIntent;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbManager;
import android.os.Binder;
import android.os.IBinder;
import com.limelight.LimeLog;
import java.util.ArrayList;
public class UsbDriverService extends Service implements UsbDriverListener {
private static final String ACTION_USB_PERMISSION =
"com.limelight.USB_PERMISSION";
private UsbManager usbManager;
private final UsbEventReceiver receiver = new UsbEventReceiver();
private final UsbDriverBinder binder = new UsbDriverBinder();
private final ArrayList<XboxOneController> controllers = new ArrayList<>();
private UsbDriverListener listener;
private static int nextDeviceId;
@Override
public void reportControllerState(int controllerId, short buttonFlags, float leftStickX, float leftStickY, float rightStickX, float rightStickY, float leftTrigger, float rightTrigger) {
// Call through to the client's listener
if (listener != null) {
listener.reportControllerState(controllerId, buttonFlags, leftStickX, leftStickY, rightStickX, rightStickY, leftTrigger, rightTrigger);
}
}
@Override
public void deviceRemoved(int controllerId) {
// Remove the the controller from our list (if not removed already)
for (XboxOneController controller : controllers) {
if (controller.getControllerId() == controllerId) {
controllers.remove(controller);
break;
}
}
// Call through to the client's listener
if (listener != null) {
listener.deviceRemoved(controllerId);
}
}
@Override
public void deviceAdded(int controllerId) {
// Call through to the client's listener
if (listener != null) {
listener.deviceAdded(controllerId);
}
}
public class UsbEventReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
// Initial attachment broadcast
if (action.equals(UsbManager.ACTION_USB_DEVICE_ATTACHED)) {
UsbDevice device = (UsbDevice) intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);
// Continue the state machine
handleUsbDeviceState(device);
}
// Subsequent permission dialog completion intent
else if (action.equals(ACTION_USB_PERMISSION)) {
UsbDevice device = (UsbDevice) intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);
// If we got this far, we've already found we're able to handle this device
if (intent.getBooleanExtra(UsbManager.EXTRA_PERMISSION_GRANTED, false)) {
handleUsbDeviceState(device);
}
}
}
}
public class UsbDriverBinder extends Binder {
public void setListener(UsbDriverListener listener) {
UsbDriverService.this.listener = listener;
// Report all controllerMap that already exist
if (listener != null) {
for (XboxOneController controller : controllers) {
listener.deviceAdded(controller.getControllerId());
}
}
}
}
private void handleUsbDeviceState(UsbDevice device) {
// Are we able to operate it?
if (XboxOneController.canClaimDevice(device)) {
// Do we have permission yet?
if (!usbManager.hasPermission(device)) {
// Let's ask for permission
usbManager.requestPermission(device, PendingIntent.getBroadcast(UsbDriverService.this, 0, new Intent(ACTION_USB_PERMISSION), 0));
return;
}
// Open the device
UsbDeviceConnection connection = usbManager.openDevice(device);
if (connection == null) {
LimeLog.warning("Unable to open USB device: "+device.getDeviceName());
return;
}
// Try to initialize it
XboxOneController controller = new XboxOneController(device, connection, nextDeviceId++, this);
if (!controller.start()) {
connection.close();
return;
}
// Add this controller to the list
controllers.add(controller);
}
}
@Override
public void onCreate() {
this.usbManager = (UsbManager) getSystemService(Context.USB_SERVICE);
// Register for USB attach broadcasts and permission completions
IntentFilter filter = new IntentFilter();
filter.addAction(UsbManager.ACTION_USB_DEVICE_ATTACHED);
filter.addAction(ACTION_USB_PERMISSION);
registerReceiver(receiver, filter);
// Enumerate existing devices
for (UsbDevice dev : usbManager.getDeviceList().values()) {
if (XboxOneController.canClaimDevice(dev)) {
// Start the process of claiming this device
handleUsbDeviceState(dev);
}
}
}
@Override
public void onDestroy() {
// Stop the attachment receiver
unregisterReceiver(receiver);
// Remove listeners
listener = null;
// Stop all controllers
while (controllers.size() > 0) {
// Stop and remove the controller
controllers.remove(0).stop();
}
}
@Override
public IBinder onBind(Intent intent) {
return binder;
}
}

View File

@ -0,0 +1,233 @@
package com.limelight.binding.input.driver;
import android.hardware.usb.UsbConstants;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbEndpoint;
import android.hardware.usb.UsbInterface;
import com.limelight.LimeLog;
import com.limelight.binding.video.MediaCodecHelper;
import com.limelight.nvstream.input.ControllerPacket;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class XboxOneController {
private final UsbDevice device;
private final UsbDeviceConnection connection;
private final int deviceId;
private Thread inputThread;
private UsbDriverListener listener;
private boolean stopped;
private short buttonFlags;
private float leftTrigger, rightTrigger;
private float rightStickX, rightStickY;
private float leftStickX, leftStickY;
private static final int MICROSOFT_VID = 0x045e;
private static final int XB1_IFACE_SUBCLASS = 71;
private static final int XB1_IFACE_PROTOCOL = 208;
// FIXME: odata_serial
private static final byte[] XB1_INIT_DATA = {0x05, 0x20, 0x00, 0x01, 0x00};
public XboxOneController(UsbDevice device, UsbDeviceConnection connection, int deviceId, UsbDriverListener listener) {
this.device = device;
this.connection = connection;
this.deviceId = deviceId;
this.listener = listener;
}
public int getControllerId() {
return this.deviceId;
}
private void setButtonFlag(int buttonFlag, int data) {
if (data != 0) {
buttonFlags |= buttonFlag;
}
else {
buttonFlags &= ~buttonFlag;
}
}
private void reportInput() {
listener.reportControllerState(deviceId, buttonFlags, leftStickX, leftStickY,
rightStickX, rightStickY, leftTrigger, rightTrigger);
}
private void processButtons(ByteBuffer buffer) {
byte b = buffer.get();
setButtonFlag(ControllerPacket.PLAY_FLAG, b & 0x04);
setButtonFlag(ControllerPacket.BACK_FLAG, b & 0x08);
setButtonFlag(ControllerPacket.A_FLAG, b & 0x10);
setButtonFlag(ControllerPacket.B_FLAG, b & 0x20);
setButtonFlag(ControllerPacket.X_FLAG, b & 0x40);
setButtonFlag(ControllerPacket.Y_FLAG, b & 0x80);
b = buffer.get();
setButtonFlag(ControllerPacket.LEFT_FLAG, b & 0x04);
setButtonFlag(ControllerPacket.RIGHT_FLAG, b & 0x08);
setButtonFlag(ControllerPacket.UP_FLAG, b & 0x01);
setButtonFlag(ControllerPacket.DOWN_FLAG, b & 0x02);
setButtonFlag(ControllerPacket.LB_FLAG, b & 0x10);
setButtonFlag(ControllerPacket.RB_FLAG, b & 0x20);
setButtonFlag(ControllerPacket.LS_CLK_FLAG, b & 0x40);
setButtonFlag(ControllerPacket.RS_CLK_FLAG, b & 0x80);
leftTrigger = buffer.getShort() / 1023.0f;
rightTrigger = buffer.getShort() / 1023.0f;
leftStickX = buffer.getShort() / 32767.0f;
leftStickY = ~buffer.getShort() / 32767.0f;
rightStickX = buffer.getShort() / 32767.0f;
rightStickY = ~buffer.getShort() / 32767.0f;
reportInput();
}
private void processPacket(ByteBuffer buffer) {
switch (buffer.get())
{
case 0x20:
buffer.position(buffer.position()+3);
processButtons(buffer);
break;
case 0x07:
buffer.position(buffer.position() + 3);
setButtonFlag(ControllerPacket.SPECIAL_BUTTON_FLAG, buffer.get() & 0x01);
reportInput();
break;
}
}
private void startInputThread(final UsbEndpoint inEndpt) {
inputThread = new Thread() {
public void run() {
while (!isInterrupted() && !stopped) {
byte[] buffer = new byte[64];
int res;
//
// There's no way that I can tell to determine if a device has failed
// or if the timeout has simply expired. We'll check how long the transfer
// took to fail and assume the device failed if it happened before the timeout
// expired.
//
do {
// Read the next input state packet
long lastMillis = MediaCodecHelper.getMonotonicMillis();
res = connection.bulkTransfer(inEndpt, buffer, buffer.length, 3000);
if (res == -1 && MediaCodecHelper.getMonotonicMillis() - lastMillis < 1000) {
LimeLog.warning("Detected device I/O error");
XboxOneController.this.stop();
break;
}
} while (res == -1 && !isInterrupted() && !stopped);
if (res == -1 || stopped) {
break;
}
processPacket(ByteBuffer.wrap(buffer, 0, res).order(ByteOrder.LITTLE_ENDIAN));
}
}
};
inputThread.setName("Xbox One Controller - Input Thread");
inputThread.start();
}
public boolean start() {
// Force claim all interfaces
for (int i = 0; i < device.getInterfaceCount(); i++) {
UsbInterface iface = device.getInterface(i);
if (!connection.claimInterface(iface, true)) {
LimeLog.warning("Failed to claim interfaces");
return false;
}
}
// Find the endpoints
UsbEndpoint outEndpt = null;
UsbEndpoint inEndpt = null;
UsbInterface iface = device.getInterface(0);
for (int i = 0; i < iface.getEndpointCount(); i++) {
UsbEndpoint endpt = iface.getEndpoint(i);
if (endpt.getDirection() == UsbConstants.USB_DIR_IN) {
if (inEndpt != null) {
LimeLog.warning("Found duplicate IN endpoint");
return false;
}
inEndpt = endpt;
}
else if (endpt.getDirection() == UsbConstants.USB_DIR_OUT) {
if (outEndpt != null) {
LimeLog.warning("Found duplicate OUT endpoint");
return false;
}
outEndpt = endpt;
}
}
// Make sure the required endpoints were present
if (inEndpt == null || outEndpt == null) {
LimeLog.warning("Missing required endpoint");
return false;
}
// Send the initialization packet
int res = connection.bulkTransfer(outEndpt, XB1_INIT_DATA, XB1_INIT_DATA.length, 3000);
if (res != XB1_INIT_DATA.length) {
LimeLog.warning("Initialization transfer failed: "+res);
return false;
}
// Start listening for controller input
startInputThread(inEndpt);
// Report this device added via the listener
listener.deviceAdded(deviceId);
return true;
}
public void stop() {
if (stopped) {
return;
}
stopped = true;
// Stop the input thread
if (inputThread != null) {
inputThread.interrupt();
inputThread = null;
}
// Report the device removed
listener.deviceRemoved(deviceId);
// Close the USB connection
connection.close();
}
public static boolean canClaimDevice(UsbDevice device) {
return device.getVendorId() == MICROSOFT_VID &&
device.getInterfaceCount() >= 1 &&
device.getInterface(0).getInterfaceClass() == UsbConstants.USB_CLASS_VENDOR_SPEC &&
device.getInterface(0).getInterfaceSubclass() == XB1_IFACE_SUBCLASS &&
device.getInterface(0).getInterfaceProtocol() == XB1_IFACE_PROTOCOL;
}
}

View File

@ -24,14 +24,10 @@ public class EvdevEvent {
public static final short BTN_FORWARD = 0x115;
public static final short BTN_BACK = 0x116;
public static final short BTN_TASK = 0x117;
public static final short BTN_GAMEPAD = 0x130;
/* Keys */
public static final short KEY_Q = 16;
public short type;
public short code;
public int value;
public final short type;
public final short code;
public final int value;
public EvdevEvent(short type, short code, int value) {
this.type = type;

View File

@ -1,62 +1,58 @@
package com.limelight.binding.input.evdev;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import android.content.Context;
import com.limelight.LimeLog;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class EvdevHandler {
private String absolutePath;
private EvdevListener listener;
private boolean shutdown = false;
private int fd = -1;
private final EvdevListener listener;
private final String libraryPath;
private Thread handlerThread = new Thread() {
private boolean shutdown = false;
private InputStream evdevIn;
private OutputStream evdevOut;
private Process reader;
private static final byte UNGRAB_REQUEST = 1;
private static final byte REGRAB_REQUEST = 2;
private final Thread handlerThread = new Thread() {
@Override
public void run() {
// All the finally blocks here make this code look like a mess
// but it's important that we get this right to avoid causing
// system-wide input problems.
// Open the /dev/input/eventX file
fd = EvdevReader.open(absolutePath);
if (fd == -1) {
LimeLog.warning("Unable to open "+absolutePath);
return;
}
try {
// Check if it's a mouse or keyboard, but not a gamepad
if ((!EvdevReader.isMouse(fd) && !EvdevReader.isAlphaKeyboard(fd)) ||
EvdevReader.isGamepad(fd)) {
// We only handle keyboards and mice
return;
}
// Grab it for ourselves
if (!EvdevReader.grab(fd)) {
LimeLog.warning("Unable to grab "+absolutePath);
return;
}
LimeLog.info("Grabbed device for raw keyboard/mouse input: "+absolutePath);
ByteBuffer buffer = ByteBuffer.allocate(EvdevEvent.EVDEV_MAX_EVENT_SIZE).order(ByteOrder.nativeOrder());
try {
int deltaX = 0;
int deltaY = 0;
byte deltaScroll = 0;
while (!isInterrupted() && !shutdown) {
EvdevEvent event = EvdevReader.read(fd, buffer);
if (event == null) {
// Launch the evdev reader shell
ProcessBuilder builder = new ProcessBuilder("su", "-c", libraryPath+File.separatorChar+"libevdev_reader.so");
builder.redirectErrorStream(false);
try {
reader = builder.start();
} catch (IOException e) {
e.printStackTrace();
return;
}
switch (event.type)
{
evdevIn = reader.getInputStream();
evdevOut = reader.getOutputStream();
while (!isInterrupted() && !shutdown) {
EvdevEvent event;
try {
event = EvdevReader.read(evdevIn);
} catch (IOException e) {
event = null;
}
if (event == null) {
break;
}
switch (event.type) {
case EvdevEvent.EV_SYN:
if (deltaX != 0 || deltaY != 0) {
listener.mouseMove(deltaX, deltaY);
@ -69,8 +65,7 @@ public class EvdevHandler {
break;
case EvdevEvent.EV_REL:
switch (event.code)
{
switch (event.code) {
case EvdevEvent.REL_X:
deltaX = event.value;
break;
@ -84,8 +79,7 @@ public class EvdevHandler {
break;
case EvdevEvent.EV_KEY:
switch (event.code)
{
switch (event.code) {
case EvdevEvent.BTN_LEFT:
listener.mouseButtonEvent(EvdevListener.BUTTON_LEFT,
event.value != 0);
@ -125,20 +119,32 @@ public class EvdevHandler {
break;
}
}
} finally {
// Release our grab
EvdevReader.ungrab(fd);
}
} finally {
// Close the file
EvdevReader.close(fd);
}
}
};
public EvdevHandler(String absolutePath, EvdevListener listener) {
this.absolutePath = absolutePath;
public EvdevHandler(Context context, EvdevListener listener) {
this.listener = listener;
this.libraryPath = context.getApplicationInfo().nativeLibraryDir;
}
public void regrabAll() {
if (!shutdown && evdevOut != null) {
try {
evdevOut.write(REGRAB_REQUEST);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void ungrabAll() {
if (!shutdown && evdevOut != null) {
try {
evdevOut.write(UNGRAB_REQUEST);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void start() {
@ -146,22 +152,35 @@ public class EvdevHandler {
}
public void stop() {
// Close the fd. It doesn't matter if this races
// with the handler thread. We'll close this out from
// under the thread to wake it up
if (fd != -1) {
EvdevReader.close(fd);
}
// We need to stop the process in this context otherwise
// we could get stuck waiting on output from the process
// in order to terminate it.
shutdown = true;
handlerThread.interrupt();
if (evdevIn != null) {
try {
evdevIn.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (evdevOut != null) {
try {
evdevOut.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (reader != null) {
reader.destroy();
}
try {
handlerThread.join();
} catch (InterruptedException ignored) {}
}
public void notifyDeleted() {
stop();
}
}

View File

@ -1,105 +1,58 @@
package com.limelight.binding.input.evdev;
import android.os.Build;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Locale;
import java.nio.ByteOrder;
import com.limelight.LimeLog;
public class EvdevReader {
static {
System.loadLibrary("evdev_reader");
private static void readAll(InputStream in, ByteBuffer bb) throws IOException {
byte[] buf = bb.array();
int ret;
int offset = 0;
while (offset < buf.length) {
ret = in.read(buf, offset, buf.length-offset);
if (ret <= 0) {
throw new IOException("Read failed: "+ret);
}
public static void patchSeLinuxPolicies() {
//
// FIXME: We REALLY shouldn't being changing permissions on the input devices like this.
// We should probably do something clever with a separate daemon and talk via a localhost
// socket. We don't return the SELinux policies back to default after we're done which I feel
// bad about, but we do chmod the input devices back so I don't think any additional attack surface
// remains opened after streaming other than listing the /dev/input directory which you wouldn't
// normally be able to do with SELinux enforcing on Lollipop.
//
// We need to modify SELinux policies to allow us to capture input devices on Lollipop and possibly other
// more restrictive ROMs. Per Chainfire's SuperSU documentation, the supolicy binary is provided on
// 4.4 and later to do live SELinux policy changes.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
EvdevShell shell = EvdevShell.getInstance();
shell.runCommand("supolicy --live \"allow untrusted_app input_device dir { getattr read search }\" " +
"\"allow untrusted_app input_device chr_file { open read write ioctl }\"");
offset += ret;
}
}
// Requires root to chmod /dev/input/eventX
public static void setPermissions(String[] files, int octalPermissions) {
EvdevShell shell = EvdevShell.getInstance();
for (String file : files) {
shell.runCommand(String.format((Locale)null, "chmod %o %s", octalPermissions, file));
}
}
// Returns the fd to be passed to other function or -1 on error
public static native int open(String fileName);
// Prevent other apps (including Android itself) from using the device while "grabbed"
public static native boolean grab(int fd);
public static native boolean ungrab(int fd);
// Used for checking device capabilities
public static native boolean hasRelAxis(int fd, short axis);
public static native boolean hasAbsAxis(int fd, short axis);
public static native boolean hasKey(int fd, short key);
public static boolean isMouse(int fd) {
// This is the same check that Android does in EventHub.cpp
return hasRelAxis(fd, EvdevEvent.REL_X) &&
hasRelAxis(fd, EvdevEvent.REL_Y) &&
hasKey(fd, EvdevEvent.BTN_LEFT);
}
public static boolean isAlphaKeyboard(int fd) {
// This is the same check that Android does in EventHub.cpp
return hasKey(fd, EvdevEvent.KEY_Q);
}
public static boolean isGamepad(int fd) {
return hasKey(fd, EvdevEvent.BTN_GAMEPAD);
}
// Returns the bytes read or -1 on error
private static native int read(int fd, byte[] buffer);
// Takes a byte buffer to use to read the output into.
// This buffer MUST be in native byte order and at least
// EVDEV_MAX_EVENT_SIZE bytes long.
public static EvdevEvent read(int fd, ByteBuffer buffer) {
int bytesRead = read(fd, buffer.array());
if (bytesRead < 0) {
LimeLog.warning("Failed to read: "+bytesRead);
return null;
}
else if (bytesRead < EvdevEvent.EVDEV_MIN_EVENT_SIZE) {
LimeLog.warning("Short read: "+bytesRead);
public static EvdevEvent read(InputStream input) throws IOException {
ByteBuffer bb;
int packetLength;
// Read the packet length
bb = ByteBuffer.allocate(4).order(ByteOrder.nativeOrder());
readAll(input, bb);
packetLength = bb.getInt();
if (packetLength < EvdevEvent.EVDEV_MIN_EVENT_SIZE) {
LimeLog.warning("Short read: "+packetLength);
return null;
}
buffer.limit(bytesRead);
buffer.rewind();
// Read the rest of the packet
bb = ByteBuffer.allocate(packetLength).order(ByteOrder.nativeOrder());
readAll(input, bb);
// Throw away the time stamp
if (bytesRead == EvdevEvent.EVDEV_MAX_EVENT_SIZE) {
buffer.getLong();
buffer.getLong();
if (packetLength == EvdevEvent.EVDEV_MAX_EVENT_SIZE) {
bb.getLong();
bb.getLong();
} else {
buffer.getInt();
buffer.getInt();
bb.getInt();
bb.getInt();
}
return new EvdevEvent(buffer.getShort(), buffer.getShort(), buffer.getInt());
return new EvdevEvent(bb.getShort(), bb.getShort(), bb.getInt());
}
// Closes the fd from open()
public static native int close(int fd);
}

View File

@ -1,116 +0,0 @@
package com.limelight.binding.input.evdev;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Scanner;
import java.util.UUID;
public class EvdevShell {
private OutputStream stdin;
private InputStream stdout;
private Process shell;
private final String uuidString = UUID.randomUUID().toString();
private static final EvdevShell globalShell = new EvdevShell();
public static EvdevShell getInstance() {
return globalShell;
}
public void startShell() {
ProcessBuilder builder = new ProcessBuilder("su");
try {
// Redirect stderr to stdout
builder.redirectErrorStream(true);
shell = builder.start();
stdin = shell.getOutputStream();
stdout = shell.getInputStream();
} catch (IOException e) {
// This is unexpected
e.printStackTrace();
// Kill the shell if it spawned
if (stdin != null) {
try {
stdin.close();
} catch (IOException e1) {
e1.printStackTrace();
} finally {
stdin = null;
}
}
if (stdout != null) {
try {
stdout.close();
} catch (IOException e1) {
e1.printStackTrace();
} finally {
stdout = null;
}
}
if (shell != null) {
shell.destroy();
shell = null;
}
}
}
public void runCommand(String command) {
if (shell == null) {
// Shell never started
return;
}
try {
// Write the command followed by an echo with our UUID
stdin.write((command+'\n').getBytes("UTF-8"));
stdin.write(("echo "+uuidString+'\n').getBytes("UTF-8"));
stdin.flush();
// This is the only command in flight so we can use a scanner
// without worrying about it eating too many characters
Scanner scanner = new Scanner(stdout);
while (scanner.hasNext()) {
if (scanner.next().contains(uuidString)) {
// Our command ran
return;
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
public void stopShell() throws InterruptedException {
boolean exitWritten = false;
if (shell == null) {
// Shell never started
return;
}
try {
stdin.write("exit\n".getBytes("UTF-8"));
exitWritten = true;
} catch (IOException e) {
// We'll destroy the process without
// waiting for it to terminate since
// we don't know whether our exit command made it
e.printStackTrace();
}
if (exitWritten) {
try {
shell.waitFor();
} finally {
shell.destroy();
}
}
else {
shell.destroy();
}
}
}

View File

@ -4,7 +4,7 @@ import android.view.KeyEvent;
public class EvdevTranslator {
public static final short EVDEV_KEY_CODES[] = {
private static final short[] EVDEV_KEY_CODES = {
0, //KeyEvent.VK_RESERVED
KeyEvent.KEYCODE_ESCAPE,
KeyEvent.KEYCODE_1,

View File

@ -1,188 +0,0 @@
package com.limelight.binding.input.evdev;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import com.limelight.LimeLog;
import android.os.FileObserver;
@SuppressWarnings("ALL")
public class EvdevWatcher {
private static final String PATH = "/dev/input";
private static final String REQUIRED_FILE_PREFIX = "event";
private final HashMap<String, EvdevHandler> handlers = new HashMap<String, EvdevHandler>();
private boolean shutdown = false;
private boolean init = false;
private boolean ungrabbed = false;
private EvdevListener listener;
private Thread startThread;
private static boolean patchedSeLinuxPolicies = false;
private FileObserver observer = new FileObserver(PATH, FileObserver.CREATE | FileObserver.DELETE) {
@Override
public void onEvent(int event, String fileName) {
if (fileName == null) {
return;
}
if (!fileName.startsWith(REQUIRED_FILE_PREFIX)) {
return;
}
synchronized (handlers) {
if (shutdown) {
return;
}
if ((event & FileObserver.CREATE) != 0) {
LimeLog.info("Starting evdev handler for "+fileName);
if (!init) {
// If this a real new device, update permissions again so we can read it
EvdevReader.setPermissions(new String[]{PATH + "/" + fileName}, 0666);
}
EvdevHandler handler = new EvdevHandler(PATH + "/" + fileName, listener);
// If we're ungrabbed now, don't start the handler
if (!ungrabbed) {
handler.start();
}
handlers.put(fileName, handler);
}
if ((event & FileObserver.DELETE) != 0) {
LimeLog.info("Halting evdev handler for "+fileName);
EvdevHandler handler = handlers.remove(fileName);
if (handler != null) {
handler.notifyDeleted();
}
}
}
}
};
public EvdevWatcher(EvdevListener listener) {
this.listener = listener;
}
private File[] rundownWithPermissionsChange(int newPermissions) {
// Rundown existing files
File devInputDir = new File(PATH);
File[] files = devInputDir.listFiles();
if (files == null) {
return new File[0];
}
// Set desired permissions
String[] filePaths = new String[files.length];
for (int i = 0; i < files.length; i++) {
filePaths[i] = files[i].getAbsolutePath();
}
EvdevReader.setPermissions(filePaths, newPermissions);
return files;
}
public void ungrabAll() {
synchronized (handlers) {
// Note that we're ungrabbed for now
ungrabbed = true;
// Stop all handlers
for (EvdevHandler handler : handlers.values()) {
handler.stop();
}
}
}
public void regrabAll() {
synchronized (handlers) {
// We're regrabbing everything now
ungrabbed = false;
for (Map.Entry<String, EvdevHandler> entry : handlers.entrySet()) {
// We need to recreate each entry since we can't reuse a stopped one
entry.setValue(new EvdevHandler(PATH + "/" + entry.getKey(), listener));
entry.getValue().start();
}
}
}
public void start() {
startThread = new Thread() {
@Override
public void run() {
// Initialize the root shell
EvdevShell.getInstance().startShell();
// Patch SELinux policies (if needed)
if (!patchedSeLinuxPolicies) {
EvdevReader.patchSeLinuxPolicies();
patchedSeLinuxPolicies = true;
}
// List all files and allow us access
File[] files = rundownWithPermissionsChange(0666);
init = true;
for (File f : files) {
observer.onEvent(FileObserver.CREATE, f.getName());
}
// Done with initial onEvent calls
init = false;
// Start watching for new files
observer.startWatching();
synchronized (startThread) {
// Wait to be awoken again by shutdown()
try {
startThread.wait();
} catch (InterruptedException e) {}
}
// Giveup eventX permissions
rundownWithPermissionsChange(0660);
// Kill the root shell
try {
EvdevShell.getInstance().stopShell();
} catch (InterruptedException e) {}
}
};
startThread.start();
}
public void shutdown() {
// Let start thread cleanup on it's own sweet time
synchronized (startThread) {
startThread.notify();
}
// Stop the observer
observer.stopWatching();
synchronized (handlers) {
// Stop creating new handlers
shutdown = true;
// If we've already ungrabbed, there's nothing else to do
if (ungrabbed) {
return;
}
// Stop all handlers
for (EvdevHandler handler : handlers.values()) {
handler.stop();
}
}
}
}

View File

@ -1,288 +0,0 @@
package com.limelight.binding.video;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.graphics.PixelFormat;
import android.os.Build;
import android.view.SurfaceHolder;
import com.limelight.LimeLog;
import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDepacketizer;
import com.limelight.nvstream.av.video.cpu.AvcDecoder;
@SuppressWarnings("EmptyCatchBlock")
public class AndroidCpuDecoderRenderer extends EnhancedDecoderRenderer {
private Thread rendererThread, decoderThread;
private int targetFps;
private static final int DECODER_BUFFER_SIZE = 92*1024;
private ByteBuffer decoderBuffer;
// Only sleep if the difference is above this value
private static final int WAIT_CEILING_MS = 5;
private static final int LOW_PERF = 1;
private static final int MED_PERF = 2;
private static final int HIGH_PERF = 3;
private int totalFrames;
private long totalTimeMs;
private int cpuCount = Runtime.getRuntime().availableProcessors();
@SuppressWarnings("unused")
private int findOptimalPerformanceLevel() {
StringBuilder cpuInfo = new StringBuilder();
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(new File("/proc/cpuinfo")));
for (;;) {
int ch = br.read();
if (ch == -1)
break;
cpuInfo.append((char)ch);
}
// Here we're doing very simple heuristics based on CPU model
String cpuInfoStr = cpuInfo.toString();
// We order them from greatest to least for proper detection
// of devices with multiple sets of cores (like Exynos 5 Octa)
// TODO Make this better (only even kind of works on ARM)
if (Build.FINGERPRINT.contains("generic")) {
// Emulator
return LOW_PERF;
}
else if (cpuInfoStr.contains("0xc0f")) {
// Cortex-A15
return MED_PERF;
}
else if (cpuInfoStr.contains("0xc09")) {
// Cortex-A9
return LOW_PERF;
}
else if (cpuInfoStr.contains("0xc07")) {
// Cortex-A7
return LOW_PERF;
}
else {
// Didn't have anything we're looking for
return MED_PERF;
}
} catch (IOException e) {
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {}
}
}
// Couldn't read cpuinfo, so assume medium
return MED_PERF;
}
@Override
public boolean setup(int width, int height, int redrawRate, Object renderTarget, int drFlags) {
this.targetFps = redrawRate;
int perfLevel = LOW_PERF; //findOptimalPerformanceLevel();
int threadCount;
int avcFlags = 0;
switch (perfLevel) {
case HIGH_PERF:
// Single threaded low latency decode is ideal but hard to acheive
avcFlags = AvcDecoder.LOW_LATENCY_DECODE;
threadCount = 1;
break;
case LOW_PERF:
// Disable the loop filter for performance reasons
avcFlags = AvcDecoder.FAST_BILINEAR_FILTERING;
// Use plenty of threads to try to utilize the CPU as best we can
threadCount = cpuCount - 1;
break;
default:
case MED_PERF:
avcFlags = AvcDecoder.BILINEAR_FILTERING;
// Only use 2 threads to minimize frame processing latency
threadCount = 2;
break;
}
// If the user wants quality, we'll remove the low IQ flags
if ((drFlags & VideoDecoderRenderer.FLAG_PREFER_QUALITY) != 0) {
// Make sure the loop filter is enabled
avcFlags &= ~AvcDecoder.DISABLE_LOOP_FILTER;
// Disable the non-compliant speed optimizations
avcFlags &= ~AvcDecoder.FAST_DECODE;
LimeLog.info("Using high quality decoding");
}
SurfaceHolder sh = (SurfaceHolder)renderTarget;
sh.setFormat(PixelFormat.RGBX_8888);
int err = AvcDecoder.init(width, height, avcFlags, threadCount);
if (err != 0) {
throw new IllegalStateException("AVC decoder initialization failure: "+err);
}
if (!AvcDecoder.setRenderTarget(sh.getSurface())) {
return false;
}
decoderBuffer = ByteBuffer.allocate(DECODER_BUFFER_SIZE + AvcDecoder.getInputPaddingSize());
LimeLog.info("Using software decoding (performance level: "+perfLevel+")");
return true;
}
@Override
public boolean start(final VideoDepacketizer depacketizer) {
decoderThread = new Thread() {
@Override
public void run() {
DecodeUnit du;
while (!isInterrupted()) {
try {
du = depacketizer.takeNextDecodeUnit();
} catch (InterruptedException e) {
break;
}
submitDecodeUnit(du);
depacketizer.freeDecodeUnit(du);
}
}
};
decoderThread.setName("Video - Decoder (CPU)");
decoderThread.setPriority(Thread.MAX_PRIORITY - 1);
decoderThread.start();
rendererThread = new Thread() {
@Override
public void run() {
long nextFrameTime = System.currentTimeMillis();
DecodeUnit du;
while (!isInterrupted())
{
long diff = nextFrameTime - System.currentTimeMillis();
if (diff > WAIT_CEILING_MS) {
try {
Thread.sleep(diff - WAIT_CEILING_MS);
} catch (InterruptedException e) {
return;
}
continue;
}
nextFrameTime = computePresentationTimeMs(targetFps);
AvcDecoder.redraw();
}
}
};
rendererThread.setName("Video - Renderer (CPU)");
rendererThread.setPriority(Thread.MAX_PRIORITY);
rendererThread.start();
return true;
}
private long computePresentationTimeMs(int frameRate) {
return System.currentTimeMillis() + (1000 / frameRate);
}
@Override
public void stop() {
rendererThread.interrupt();
decoderThread.interrupt();
try {
rendererThread.join();
} catch (InterruptedException e) { }
try {
decoderThread.join();
} catch (InterruptedException e) { }
}
@Override
public void release() {
AvcDecoder.destroy();
}
private boolean submitDecodeUnit(DecodeUnit decodeUnit) {
byte[] data;
// Use the reserved decoder buffer if this decode unit will fit
if (decodeUnit.getDataLength() <= DECODER_BUFFER_SIZE) {
decoderBuffer.clear();
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
decoderBuffer.put(bbd.data, bbd.offset, bbd.length);
}
data = decoderBuffer.array();
}
else {
data = new byte[decodeUnit.getDataLength()+AvcDecoder.getInputPaddingSize()];
int offset = 0;
for (ByteBufferDescriptor bbd : decodeUnit.getBufferList()) {
System.arraycopy(bbd.data, bbd.offset, data, offset, bbd.length);
offset += bbd.length;
}
}
boolean success = (AvcDecoder.decode(data, 0, decodeUnit.getDataLength()) == 0);
if (success) {
long timeAfterDecode = System.currentTimeMillis();
// Add delta time to the totals (excluding probable outliers)
long delta = timeAfterDecode - decodeUnit.getReceiveTimestamp();
if (delta >= 0 && delta < 1000) {
totalTimeMs += delta;
totalFrames++;
}
}
return success;
}
@Override
public int getCapabilities() {
return 0;
}
@Override
public int getAverageDecoderLatency() {
return 0;
}
@Override
public int getAverageEndToEndLatency() {
if (totalFrames == 0) {
return 0;
}
return (int)(totalTimeMs / totalFrames);
}
@Override
public String getDecoderName() {
return "CPU decoding";
}
}

View File

@ -1,87 +0,0 @@
package com.limelight.binding.video;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDepacketizer;
public class ConfigurableDecoderRenderer extends EnhancedDecoderRenderer {
private EnhancedDecoderRenderer decoderRenderer;
@Override
public void release() {
if (decoderRenderer != null) {
decoderRenderer.release();
}
}
@Override
public boolean setup(int width, int height, int redrawRate, Object renderTarget, int drFlags) {
if (decoderRenderer == null) {
throw new IllegalStateException("ConfigurableDecoderRenderer not initialized");
}
return decoderRenderer.setup(width, height, redrawRate, renderTarget, drFlags);
}
public void initializeWithFlags(int drFlags) {
if ((drFlags & VideoDecoderRenderer.FLAG_FORCE_HARDWARE_DECODING) != 0 ||
((drFlags & VideoDecoderRenderer.FLAG_FORCE_SOFTWARE_DECODING) == 0 &&
MediaCodecHelper.findProbableSafeDecoder() != null)) {
decoderRenderer = new MediaCodecDecoderRenderer();
}
else {
decoderRenderer = new AndroidCpuDecoderRenderer();
}
}
public boolean isHardwareAccelerated() {
if (decoderRenderer == null) {
throw new IllegalStateException("ConfigurableDecoderRenderer not initialized");
}
return (decoderRenderer instanceof MediaCodecDecoderRenderer);
}
@Override
public boolean start(VideoDepacketizer depacketizer) {
return decoderRenderer.start(depacketizer);
}
@Override
public void stop() {
decoderRenderer.stop();
}
@Override
public int getCapabilities() {
return decoderRenderer.getCapabilities();
}
@Override
public int getAverageDecoderLatency() {
if (decoderRenderer != null) {
return decoderRenderer.getAverageDecoderLatency();
}
else {
return 0;
}
}
@Override
public int getAverageEndToEndLatency() {
if (decoderRenderer != null) {
return decoderRenderer.getAverageEndToEndLatency();
}
else {
return 0;
}
}
@Override
public String getDecoderName() {
if (decoderRenderer != null) {
return decoderRenderer.getDecoderName();
}
else {
return null;
}
}
}

View File

@ -2,6 +2,7 @@ package com.limelight.binding.video;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public abstract class EnhancedDecoderRenderer implements VideoDecoderRenderer {
public abstract String getDecoderName();
public abstract class EnhancedDecoderRenderer extends VideoDecoderRenderer {
public abstract boolean isHevcSupported();
public abstract boolean isAvcSupported();
}

View File

@ -4,6 +4,7 @@ import java.nio.ByteBuffer;
import java.util.Locale;
import java.util.concurrent.locks.LockSupport;
import org.jcodec.codecs.h264.H264Utils;
import org.jcodec.codecs.h264.io.model.SeqParameterSet;
import org.jcodec.codecs.h264.io.model.VUIParameters;
@ -12,8 +13,8 @@ import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
import com.limelight.nvstream.av.video.VideoDepacketizer;
import com.limelight.preferences.PreferenceConfiguration;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
@ -22,16 +23,22 @@ import android.media.MediaCodec.CodecException;
import android.os.Build;
import android.view.SurfaceHolder;
@SuppressWarnings("unused")
public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
private ByteBuffer[] videoDecoderInputBuffers;
// Used on versions < 5.0
private ByteBuffer[] legacyInputBuffers;
private String avcDecoderName;
private String hevcDecoderName;
private MediaCodec videoDecoder;
private Thread rendererThread;
private boolean needsSpsBitstreamFixup, isExynos4;
private VideoDepacketizer depacketizer;
private boolean adaptivePlayback;
private boolean adaptivePlayback, directSubmit;
private boolean constrainedHighProfile;
private int initialWidth, initialHeight;
private VideoFormat videoFormat;
private boolean needsBaselineSpsHack;
private SeqParameterSet savedSps;
@ -41,64 +48,146 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
private long decoderTimeMs;
private int totalFrames;
private String decoderName;
private int numSpsIn;
private int numPpsIn;
private int numVpsIn;
private int numIframeIn;
private static final boolean ENABLE_ASYNC_RENDERER = false;
private MediaCodecInfo findAvcDecoder() {
MediaCodecInfo decoder = MediaCodecHelper.findProbableSafeDecoder("video/avc", MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
if (decoder == null) {
decoder = MediaCodecHelper.findFirstDecoder("video/avc");
}
return decoder;
}
@TargetApi(Build.VERSION_CODES.KITKAT)
public MediaCodecDecoderRenderer() {
private MediaCodecInfo findHevcDecoder(int videoFormat) {
// Don't return anything if H.265 is forced off
if (videoFormat == PreferenceConfiguration.FORCE_H265_OFF) {
return null;
}
// We don't try the first HEVC decoder. We'd rather fall back to hardware accelerated AVC instead
//
// We need HEVC Main profile, so we could pass that constant to findProbableSafeDecoder, however
// some decoders (at least Qualcomm's Snapdragon 805) don't properly report support
// for even required levels of HEVC.
MediaCodecInfo decoderInfo = MediaCodecHelper.findProbableSafeDecoder("video/hevc", -1);
if (decoderInfo != null) {
if (!MediaCodecHelper.decoderIsWhitelistedForHevc(decoderInfo.getName())) {
LimeLog.info("Found HEVC decoder, but it's not whitelisted - "+decoderInfo.getName());
if (videoFormat == PreferenceConfiguration.FORCE_H265_ON) {
LimeLog.info("Forcing H265 enabled despite non-whitelisted decoder");
}
else {
return null;
}
}
}
return decoderInfo;
}
public MediaCodecDecoderRenderer(int videoFormat) {
//dumpDecoders();
MediaCodecInfo decoder = MediaCodecHelper.findProbableSafeDecoder();
if (decoder == null) {
decoder = MediaCodecHelper.findFirstDecoder();
MediaCodecInfo avcDecoder = findAvcDecoder();
if (avcDecoder != null) {
avcDecoderName = avcDecoder.getName();
LimeLog.info("Selected AVC decoder: "+avcDecoderName);
}
if (decoder == null) {
// This case is handled later in setup()
return;
else {
LimeLog.warning("No AVC decoder found");
}
decoderName = decoder.getName();
// Set decoder-specific attributes
adaptivePlayback = MediaCodecHelper.decoderSupportsAdaptivePlayback(decoderName, decoder);
needsSpsBitstreamFixup = MediaCodecHelper.decoderNeedsSpsBitstreamRestrictions(decoderName, decoder);
needsBaselineSpsHack = MediaCodecHelper.decoderNeedsBaselineSpsHack(decoderName, decoder);
isExynos4 = MediaCodecHelper.isExynos4Device();
if (needsSpsBitstreamFixup) {
LimeLog.info("Decoder "+decoderName+" needs SPS bitstream restrictions fixup");
MediaCodecInfo hevcDecoder = findHevcDecoder(videoFormat);
if (hevcDecoder != null) {
hevcDecoderName = hevcDecoder.getName();
LimeLog.info("Selected HEVC decoder: "+hevcDecoderName);
}
if (needsBaselineSpsHack) {
LimeLog.info("Decoder "+decoderName+" needs baseline SPS hack");
}
if (isExynos4) {
LimeLog.info("Decoder "+decoderName+" is on Exynos 4");
else {
LimeLog.info("No HEVC decoder found");
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public boolean setup(int width, int height, int redrawRate, Object renderTarget, int drFlags) {
public boolean isHevcSupported() {
return hevcDecoderName != null;
}
@Override
public boolean isAvcSupported() {
return avcDecoderName != null;
}
@Override
public boolean setup(VideoDecoderRenderer.VideoFormat format, int width, int height, int redrawRate, Object renderTarget, int drFlags) {
this.initialWidth = width;
this.initialHeight = height;
this.videoFormat = format;
if (decoderName == null) {
LimeLog.severe("No available hardware decoder!");
String mimeType;
String selectedDecoderName;
if (videoFormat == VideoFormat.H264) {
mimeType = "video/avc";
selectedDecoderName = avcDecoderName;
if (avcDecoderName == null) {
LimeLog.severe("No available AVC decoder!");
return false;
}
// These fixups only apply to H264 decoders
needsSpsBitstreamFixup = MediaCodecHelper.decoderNeedsSpsBitstreamRestrictions(selectedDecoderName);
needsBaselineSpsHack = MediaCodecHelper.decoderNeedsBaselineSpsHack(selectedDecoderName);
constrainedHighProfile = MediaCodecHelper.decoderNeedsConstrainedHighProfile(selectedDecoderName);
isExynos4 = MediaCodecHelper.isExynos4Device();
if (needsSpsBitstreamFixup) {
LimeLog.info("Decoder "+selectedDecoderName+" needs SPS bitstream restrictions fixup");
}
if (needsBaselineSpsHack) {
LimeLog.info("Decoder "+selectedDecoderName+" needs baseline SPS hack");
}
if (constrainedHighProfile) {
LimeLog.info("Decoder "+selectedDecoderName+" needs constrained high profile");
}
if (isExynos4) {
LimeLog.info("Decoder "+selectedDecoderName+" is on Exynos 4");
}
}
else if (videoFormat == VideoFormat.H265) {
mimeType = "video/hevc";
selectedDecoderName = hevcDecoderName;
if (hevcDecoderName == null) {
LimeLog.severe("No available HEVC decoder!");
return false;
}
}
else {
// Unknown format
return false;
}
// Set decoder-specific attributes
directSubmit = MediaCodecHelper.decoderCanDirectSubmit(selectedDecoderName);
adaptivePlayback = MediaCodecHelper.decoderSupportsAdaptivePlayback(selectedDecoderName);
if (directSubmit) {
LimeLog.info("Decoder "+selectedDecoderName+" will use direct submit");
}
// Codecs have been known to throw all sorts of crazy runtime exceptions
// due to implementation problems
try {
videoDecoder = MediaCodec.createByCodecName(decoderName);
videoDecoder = MediaCodec.createByCodecName(selectedDecoderName);
} catch (Exception e) {
return false;
}
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
MediaFormat videoFormat = MediaFormat.createVideoFormat(mimeType, width, height);
// Adaptive playback can also be enabled by the whitelist on pre-KitKat devices
// so we don't fill these pre-KitKat
@ -107,62 +196,15 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
videoFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height);
}
// On Lollipop, we use asynchronous mode to avoid having a busy looping renderer thread
if (ENABLE_ASYNC_RENDERER && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
videoDecoder.setCallback(new MediaCodec.Callback() {
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
LimeLog.info("Output format changed");
LimeLog.info("New output Format: " + format);
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index,
BufferInfo info) {
try {
// FIXME: It looks like we can't frameskip here
codec.releaseOutputBuffer(index, true);
} catch (Exception e) {
handleDecoderException(MediaCodecDecoderRenderer.this, e, null, 0);
}
}
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
try {
submitDecodeUnit(depacketizer.takeNextDecodeUnit(), codec.getInputBuffer(index), index);
} catch (InterruptedException e) {
// What do we do here?
e.printStackTrace();
} catch (Exception e) {
handleDecoderException(MediaCodecDecoderRenderer.this, e, null, 0);
}
}
@Override
public void onError(MediaCodec codec, CodecException e) {
if (e.isTransient()) {
LimeLog.warning(e.getDiagnosticInfo());
e.printStackTrace();
}
else {
LimeLog.severe(e.getDiagnosticInfo());
e.printStackTrace();
}
}
});
}
videoDecoder.configure(videoFormat, ((SurfaceHolder)renderTarget).getSurface(), null, 0);
videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
LimeLog.info("Using hardware decoding");
LimeLog.info("Using codec "+selectedDecoderName+" for hardware decoding "+mimeType);
return true;
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void handleDecoderException(MediaCodecDecoderRenderer dr, Exception e, ByteBuffer buf, int codecFlags) {
private void handleDecoderException(Exception e, ByteBuffer buf, int codecFlags) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (e instanceof CodecException) {
CodecException codecExc = (CodecException) e;
@ -177,23 +219,100 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
}
}
// Only throw if this happens at the beginning of a stream
if (totalFrames < 60) {
if (buf != null || codecFlags != 0) {
throw new RendererException(dr, e, buf, codecFlags);
throw new RendererException(this, e, buf, codecFlags);
}
else {
throw new RendererException(dr, e);
throw new RendererException(this, e);
}
}
}
private void startRendererThread()
private void startDirectSubmitRendererThread()
{
rendererThread = new Thread() {
@Override
public void run() {
BufferInfo info = new BufferInfo();
while (!isInterrupted()) {
try {
// Try to output a frame
int outIndex = videoDecoder.dequeueOutputBuffer(info, 50000);
if (outIndex >= 0) {
long presentationTimeUs = info.presentationTimeUs;
int lastIndex = outIndex;
// Get the last output buffer in the queue
while ((outIndex = videoDecoder.dequeueOutputBuffer(info, 0)) >= 0) {
videoDecoder.releaseOutputBuffer(lastIndex, false);
lastIndex = outIndex;
presentationTimeUs = info.presentationTimeUs;
}
// Render the last buffer
videoDecoder.releaseOutputBuffer(lastIndex, true);
// Add delta time to the totals (excluding probable outliers)
long delta = MediaCodecHelper.getMonotonicMillis() - (presentationTimeUs / 1000);
if (delta >= 0 && delta < 1000) {
decoderTimeMs += delta;
totalTimeMs += delta;
}
} else {
switch (outIndex) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LimeLog.info("Output format changed");
LimeLog.info("New output Format: " + videoDecoder.getOutputFormat());
break;
default:
break;
}
}
} catch (Exception e) {
handleDecoderException(e, null, 0);
}
}
}
};
rendererThread.setName("Video - Renderer (MediaCodec)");
rendererThread.setPriority(Thread.NORM_PRIORITY + 2);
rendererThread.start();
}
private int dequeueInputBuffer(boolean wait, boolean infiniteWait) {
int index;
long startTime, queueTime;
startTime = MediaCodecHelper.getMonotonicMillis();
index = videoDecoder.dequeueInputBuffer(wait ? (infiniteWait ? -1 : 3000) : 0);
if (index < 0) {
return index;
}
queueTime = MediaCodecHelper.getMonotonicMillis();
if (queueTime - startTime >= 20) {
LimeLog.warning("Queue input buffer ran long: " + (queueTime - startTime) + " ms");
}
return index;
}
private void startLegacyRendererThread()
{
rendererThread = new Thread() {
@SuppressWarnings("deprecation")
@Override
public void run() {
BufferInfo info = new BufferInfo();
DecodeUnit du = null;
int inputIndex = -1;
long lastDuDequeueTime = 0;
while (!isInterrupted())
{
// In order to get as much data to the decoder as early as possible,
@ -201,22 +320,26 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
if (inputIndex == -1 && du == null) {
try {
for (int i = 0; i < 5; i++) {
inputIndex = videoDecoder.dequeueInputBuffer(0);
inputIndex = dequeueInputBuffer(false, false);
du = depacketizer.pollNextDecodeUnit();
if (du != null) {
lastDuDequeueTime = MediaCodecHelper.getMonotonicMillis();
notifyDuReceived(du);
}
// Stop if we can't get a DU or input buffer
if (du == null || inputIndex == -1) {
break;
}
submitDecodeUnit(du, videoDecoderInputBuffers[inputIndex], inputIndex);
submitDecodeUnit(du, inputIndex);
du = null;
inputIndex = -1;
}
} catch (Exception e) {
inputIndex = -1;
handleDecoderException(MediaCodecDecoderRenderer.this, e, null, 0);
handleDecoderException(e, null, 0);
}
}
@ -231,22 +354,31 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
// If we've got a DU waiting to be given to the decoder,
// wait a full 3 ms for an input buffer. Otherwise
// just see if we can get one immediately.
inputIndex = videoDecoder.dequeueInputBuffer(du != null ? 3000 : 0);
inputIndex = dequeueInputBuffer(du != null, false);
} catch (Exception e) {
inputIndex = -1;
handleDecoderException(MediaCodecDecoderRenderer.this, e, null, 0);
handleDecoderException(e, null, 0);
}
}
// Grab a decode unit if we don't have one already
if (du == null) {
du = depacketizer.pollNextDecodeUnit();
if (du != null) {
lastDuDequeueTime = MediaCodecHelper.getMonotonicMillis();
notifyDuReceived(du);
}
}
// If we've got both a decode unit and an input buffer, we'll
// submit now. Otherwise, we wait until we have one.
if (du != null && inputIndex >= 0) {
submitDecodeUnit(du, videoDecoderInputBuffers[inputIndex], inputIndex);
long submissionTime = MediaCodecHelper.getMonotonicMillis();
if (submissionTime - lastDuDequeueTime >= 20) {
LimeLog.warning("Receiving an input buffer took too long: "+(submissionTime - lastDuDequeueTime)+" ms");
}
submitDecodeUnit(du, inputIndex);
// DU and input buffer have both been consumed
du = null;
@ -272,7 +404,7 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
videoDecoder.releaseOutputBuffer(lastIndex, true);
// Add delta time to the totals (excluding probable outliers)
long delta = System.currentTimeMillis()-(presentationTimeUs/1000);
long delta = MediaCodecHelper.getMonotonicMillis()-(presentationTimeUs/1000);
if (delta >= 0 && delta < 1000) {
decoderTimeMs += delta;
totalTimeMs += delta;
@ -286,9 +418,6 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
LockSupport.parkNanos(1);
}
break;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LimeLog.info("Output buffers changed");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LimeLog.info("Output format changed");
LimeLog.info("New output Format: " + videoDecoder.getOutputFormat());
@ -298,7 +427,7 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
}
}
} catch (Exception e) {
handleDecoderException(MediaCodecDecoderRenderer.this, e, null, 0);
handleDecoderException(e, null, 0);
}
}
}
@ -316,11 +445,17 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
// Start the decoder
videoDecoder.start();
// On devices pre-Lollipop, we'll use a rendering thread
if (!ENABLE_ASYNC_RENDERER || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
startRendererThread();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
legacyInputBuffers = videoDecoder.getInputBuffers();
}
if (directSubmit) {
startDirectSubmitRendererThread();
}
else {
startLegacyRendererThread();
}
return true;
}
@ -353,11 +488,11 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
for (i = 0; i < 25; i++) {
try {
videoDecoder.queueInputBuffer(inputBufferIndex,
0, length,
offset, length,
timestampUs, codecFlags);
break;
} catch (Exception e) {
handleDecoderException(this, e, null, codecFlags);
handleDecoderException(e, null, codecFlags);
lastException = e;
}
}
@ -367,16 +502,44 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
}
}
@SuppressWarnings("deprecation")
private void submitDecodeUnit(DecodeUnit decodeUnit, ByteBuffer buf, int inputBufferIndex) {
long currentTime = System.currentTimeMillis();
long delta = currentTime-decodeUnit.getReceiveTimestamp();
if (delta >= 0 && delta < 1000) {
totalTimeMs += currentTime-decodeUnit.getReceiveTimestamp();
totalFrames++;
// Using the new getInputBuffer() API on Lollipop allows
// the framework to do some performance optimizations for us
private ByteBuffer getEmptyInputBuffer(int inputBufferIndex) {
ByteBuffer buf;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
buf = videoDecoder.getInputBuffer(inputBufferIndex);
}
else {
buf = legacyInputBuffers[inputBufferIndex];
// Clear old input data pre-Lollipop
buf.clear();
}
long timestampUs = currentTime * 1000;
return buf;
}
private void doProfileSpecificSpsPatching(SeqParameterSet sps) {
// Some devices benefit from setting constraint flags 4 & 5 to make this Constrained
// High Profile which allows the decoder to assume there will be no B-frames and
// reduce delay and buffering accordingly. Some devices (Marvell, Exynos 4) don't
// like it so we only set them on devices that are confirmed to benefit from it.
if (sps.profile_idc == 100 && constrainedHighProfile) {
LimeLog.info("Setting constraint set flags for constrained high profile");
sps.constraint_set_4_flag = true;
sps.constraint_set_5_flag = true;
}
else {
// Force the constraints unset otherwise (some may be set by default)
sps.constraint_set_4_flag = false;
sps.constraint_set_5_flag = false;
}
}
@SuppressWarnings("deprecation")
private void submitDecodeUnit(DecodeUnit decodeUnit, int inputBufferIndex) {
long timestampUs = System.nanoTime() / 1000;
if (timestampUs <= lastTimestampUs) {
// We can't submit multiple buffers with the same timestamp
// so bump it up by one before queuing
@ -384,8 +547,7 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
}
lastTimestampUs = timestampUs;
// Clear old input data
buf.clear();
ByteBuffer buf = getEmptyInputBuffer(inputBufferIndex);
int codecFlags = 0;
int decodeUnitFlags = decodeUnit.getFlags();
@ -400,7 +562,9 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
boolean needsSpsReplay = false;
if ((decodeUnitFlags & DecodeUnit.DU_FLAG_CODEC_CONFIG) != 0) {
ByteBufferDescriptor header = decodeUnit.getBufferList().get(0);
ByteBufferDescriptor header = decodeUnit.getBufferHead();
// H264 SPS
if (header.data[header.offset+4] == 0x67) {
numSpsIn++;
@ -409,7 +573,9 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
// Skip to the start of the NALU data
spsBuf.position(header.offset+5);
SeqParameterSet sps = SeqParameterSet.read(spsBuf);
// The H264Utils.readSPS function safely handles
// Annex B NALUs (including NALUs with escape sequences)
SeqParameterSet sps = H264Utils.readSPS(spsBuf);
// Some decoders rely on H264 level to decide how many buffers are needed
// Since we only need one frame buffered, we'll set the level as low as we can
@ -437,19 +603,44 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
LimeLog.info("Patching num_ref_frames in SPS");
sps.num_ref_frames = 1;
// GFE 2.5.11 changed the SPS to add additional extensions
// Some devices don't like these so we remove them here.
sps.vuiParams.video_signal_type_present_flag = false;
sps.vuiParams.colour_description_present_flag = false;
sps.vuiParams.chroma_loc_info_present_flag = false;
if (needsSpsBitstreamFixup || isExynos4) {
// The SPS that comes in the current H264 bytestream doesn't set bitstream_restriction_flag
// or max_dec_frame_buffering which increases decoding latency on Tegra.
LimeLog.info("Adding bitstream restrictions");
// GFE 2.5.11 started sending bitstream restrictions
if (sps.vuiParams.bitstreamRestriction == null) {
LimeLog.info("Adding bitstream restrictions");
sps.vuiParams.bitstreamRestriction = new VUIParameters.BitstreamRestriction();
sps.vuiParams.bitstreamRestriction.motion_vectors_over_pic_boundaries_flag = true;
sps.vuiParams.bitstreamRestriction.max_bytes_per_pic_denom = 2;
sps.vuiParams.bitstreamRestriction.max_bits_per_mb_denom = 1;
sps.vuiParams.bitstreamRestriction.log2_max_mv_length_horizontal = 16;
sps.vuiParams.bitstreamRestriction.log2_max_mv_length_vertical = 16;
sps.vuiParams.bitstreamRestriction.num_reorder_frames = 0;
sps.vuiParams.bitstreamRestriction.max_dec_frame_buffering = 1;
}
else {
LimeLog.info("Patching bitstream restrictions");
}
// Some devices throw errors if max_dec_frame_buffering < num_ref_frames
sps.vuiParams.bitstreamRestriction.max_dec_frame_buffering = sps.num_ref_frames;
// These values are the defaults for the fields, but they are more aggressive
// than what GFE sends in 2.5.11, but it doesn't seem to cause picture problems.
sps.vuiParams.bitstreamRestriction.max_bytes_per_pic_denom = 2;
sps.vuiParams.bitstreamRestriction.max_bits_per_mb_denom = 1;
// log2_max_mv_length_horizontal and log2_max_mv_length_vertical are set to more
// conservative values by GFE 2.5.11. We'll let those values stand.
}
else {
// Devices that didn't/couldn't get bitstream restrictions before GFE 2.5.11
// will continue to not receive them now
sps.vuiParams.bitstreamRestriction = null;
}
// If we need to hack this SPS to say we're baseline, do so now
@ -459,11 +650,16 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
savedSps = sps;
}
// Patch the SPS constraint flags
doProfileSpecificSpsPatching(sps);
// Write the annex B header
buf.put(header.data, header.offset, 5);
// Write the modified SPS to the input buffer
sps.write(buf);
// The H264Utils.writeSPS function safely handles
// Annex B NALUs (including NALUs with escape sequences)
ByteBuffer escapedNalu = H264Utils.writeSPS(sps, header.length);
buf.put(escapedNalu);
queueInputBuffer(inputBufferIndex,
0, buf.position(),
@ -471,6 +667,8 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
depacketizer.freeDecodeUnit(decodeUnit);
return;
// H264 PPS
} else if (header.data[header.offset+4] == 0x68) {
numPpsIn++;
@ -482,11 +680,20 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
needsSpsReplay = true;
}
}
else if (header.data[header.offset+4] == 0x40) {
numVpsIn++;
}
else if (header.data[header.offset+4] == 0x42) {
numSpsIn++;
}
else if (header.data[header.offset+4] == 0x44) {
numPpsIn++;
}
}
// Copy data from our buffer list into the input buffer
for (ByteBufferDescriptor desc : decodeUnit.getBufferList())
{
for (ByteBufferDescriptor desc = decodeUnit.getBufferHead();
desc != null; desc = desc.nextDescriptor) {
buf.put(desc.data, desc.offset, desc.length);
}
@ -502,10 +709,8 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
}
private void replaySps() {
int inputIndex = videoDecoder.dequeueInputBuffer(-1);
ByteBuffer inputBuffer = videoDecoderInputBuffers[inputIndex];
inputBuffer.clear();
int inputIndex = dequeueInputBuffer(true, true);
ByteBuffer inputBuffer = getEmptyInputBuffer(inputIndex);
// Write the Annex B header
inputBuffer.put(new byte[]{0x00, 0x00, 0x00, 0x01, 0x67});
@ -513,8 +718,13 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
// Switch the H264 profile back to high
savedSps.profile_idc = 100;
// Write the SPS data
savedSps.write(inputBuffer);
// Patch the SPS constraint flags
doProfileSpecificSpsPatching(savedSps);
// The H264Utils.writeSPS function safely handles
// Annex B NALUs (including NALUs with escape sequences)
ByteBuffer escapedNalu = H264Utils.writeSPS(savedSps, 128);
inputBuffer.put(escapedNalu);
// No need for the SPS anymore
savedSps = null;
@ -522,7 +732,7 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
// Queue the new SPS
queueInputBuffer(inputIndex,
0, inputBuffer.position(),
System.currentTimeMillis() * 1000,
System.nanoTime() / 1000,
MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
LimeLog.info("SPS replay complete");
@ -530,8 +740,15 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
@Override
public int getCapabilities() {
return adaptivePlayback ?
int caps = 0;
caps |= adaptivePlayback ?
VideoDecoderRenderer.CAPABILITY_ADAPTIVE_RESOLUTION : 0;
caps |= directSubmit ?
VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT : 0;
return caps;
}
@Override
@ -550,16 +767,40 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
return (int)(totalTimeMs / totalFrames);
}
private void notifyDuReceived(DecodeUnit du) {
long currentTime = MediaCodecHelper.getMonotonicMillis();
long delta = currentTime-du.getReceiveTimestamp();
if (delta >= 0 && delta < 1000) {
totalTimeMs += currentTime-du.getReceiveTimestamp();
totalFrames++;
}
}
@Override
public String getDecoderName() {
return decoderName;
public void directSubmitDecodeUnit(DecodeUnit du) {
int inputIndex;
notifyDuReceived(du);
for (;;) {
try {
inputIndex = dequeueInputBuffer(true, true);
break;
} catch (Exception e) {
handleDecoderException(e, null, 0);
}
}
if (inputIndex >= 0) {
submitDecodeUnit(du, inputIndex);
}
}
public class RendererException extends RuntimeException {
private static final long serialVersionUID = 8985937536997012406L;
private Exception originalException;
private MediaCodecDecoderRenderer renderer;
private final Exception originalException;
private final MediaCodecDecoderRenderer renderer;
private ByteBuffer currentBuffer;
private int currentCodecFlags;
@ -578,10 +819,14 @@ public class MediaCodecDecoderRenderer extends EnhancedDecoderRenderer {
public String toString() {
String str = "";
str += "Decoder: "+renderer.decoderName+"\n";
str += "Format: "+renderer.videoFormat+"\n";
str += "AVC Decoder: "+renderer.avcDecoderName+"\n";
str += "HEVC Decoder: "+renderer.hevcDecoderName+"\n";
str += "Initial video dimensions: "+renderer.initialWidth+"x"+renderer.initialHeight+"\n";
str += "In stats: "+renderer.numSpsIn+", "+renderer.numPpsIn+", "+renderer.numIframeIn+"\n";
str += "In stats: "+renderer.numVpsIn+", "+renderer.numSpsIn+", "+renderer.numPpsIn+", "+renderer.numIframeIn+"\n";
str += "Total frames: "+renderer.totalFrames+"\n";
str += "Average end-to-end client latency: "+getAverageEndToEndLatency()+"ms\n";
str += "Average hardware decoder latency: "+getAverageDecoderLatency()+"ms\n";
if (currentBuffer != null) {
str += "Current buffer: ";

View File

@ -10,6 +10,9 @@ import java.util.Locale;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo.CodecCapabilities;
@ -20,12 +23,29 @@ import com.limelight.LimeLog;
public class MediaCodecHelper {
public static final List<String> preferredDecoders;
private static final List<String> preferredDecoders;
public static final List<String> blacklistedDecoderPrefixes;
public static final List<String> spsFixupBitstreamFixupDecoderPrefixes;
public static final List<String> whitelistedAdaptiveResolutionPrefixes;
public static final List<String> baselineProfileHackPrefixes;
private static final List<String> blacklistedDecoderPrefixes;
private static final List<String> spsFixupBitstreamFixupDecoderPrefixes;
private static final List<String> whitelistedAdaptiveResolutionPrefixes;
private static final List<String> baselineProfileHackPrefixes;
private static final List<String> directSubmitPrefixes;
private static final List<String> constrainedHighProfilePrefixes;
private static final List<String> whitelistedHevcDecoders;
static {
directSubmitPrefixes = new LinkedList<String>();
// These decoders have low enough input buffer latency that they
// can be directly invoked from the receive thread
directSubmitPrefixes.add("omx.qcom");
directSubmitPrefixes.add("omx.sec");
directSubmitPrefixes.add("omx.exynos");
directSubmitPrefixes.add("omx.intel");
directSubmitPrefixes.add("omx.brcm");
directSubmitPrefixes.add("omx.TI");
directSubmitPrefixes.add("omx.arc");
}
static {
preferredDecoders = new LinkedList<String>();
@ -43,7 +63,6 @@ public class MediaCodecHelper {
spsFixupBitstreamFixupDecoderPrefixes = new LinkedList<String>();
spsFixupBitstreamFixupDecoderPrefixes.add("omx.nvidia");
spsFixupBitstreamFixupDecoderPrefixes.add("omx.qcom");
spsFixupBitstreamFixupDecoderPrefixes.add("omx.mtk");
spsFixupBitstreamFixupDecoderPrefixes.add("omx.brcm");
baselineProfileHackPrefixes = new LinkedList<String>();
@ -54,6 +73,37 @@ public class MediaCodecHelper {
whitelistedAdaptiveResolutionPrefixes.add("omx.qcom");
whitelistedAdaptiveResolutionPrefixes.add("omx.sec");
whitelistedAdaptiveResolutionPrefixes.add("omx.TI");
constrainedHighProfilePrefixes = new LinkedList<String>();
constrainedHighProfilePrefixes.add("omx.intel");
whitelistedHevcDecoders = new LinkedList<>();
whitelistedHevcDecoders.add("omx.exynos");
// whitelistedHevcDecoders.add("omx.nvidia"); TODO: This needs a similar fixup to the Tegra 3
whitelistedHevcDecoders.add("omx.mtk");
whitelistedHevcDecoders.add("omx.amlogic");
whitelistedHevcDecoders.add("omx.rk");
// omx.qcom added conditionally during initialization
}
public static void initializeWithContext(Context context) {
ActivityManager activityManager =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo configInfo = activityManager.getDeviceConfigurationInfo();
if (configInfo.reqGlEsVersion != ConfigurationInfo.GL_ES_VERSION_UNDEFINED) {
// Qualcomm's early HEVC decoders break hard on our HEVC stream. The best check to
// tell the good from the bad decoders are the generation of Adreno GPU included:
// 3xx - bad
// 4xx - good
//
// Unfortunately, it's not that easy to get that information here, so I'll use an
// approximation by checking the GLES level (<= 3.0 is bad).
LimeLog.info("OpenGL ES version: "+configInfo.reqGlEsVersion);
if (configInfo.reqGlEsVersion > 0x30000) {
LimeLog.info("Added omx.qcom to supported decoders based on GLES 3.1+ support");
whitelistedHevcDecoders.add("omx.qcom");
}
}
}
private static boolean isDecoderInList(List<String> decoderList, String decoderName) {
@ -69,8 +119,12 @@ public class MediaCodecHelper {
return false;
}
public static long getMonotonicMillis() {
return System.nanoTime() / 1000000L;
}
@TargetApi(Build.VERSION_CODES.KITKAT)
public static boolean decoderSupportsAdaptivePlayback(String decoderName, MediaCodecInfo decoderInfo) {
public static boolean decoderSupportsAdaptivePlayback(String decoderName) {
/*
FIXME: Intel's decoder on Nexus Player forces the high latency path if adaptive playback is enabled
so we'll keep it off for now, since we don't know whether other devices also do the same
@ -98,14 +152,47 @@ public class MediaCodecHelper {
return false;
}
public static boolean decoderNeedsSpsBitstreamRestrictions(String decoderName, MediaCodecInfo decoderInfo) {
public static boolean decoderNeedsConstrainedHighProfile(String decoderName) {
return isDecoderInList(constrainedHighProfilePrefixes, decoderName);
}
public static boolean decoderCanDirectSubmit(String decoderName) {
return isDecoderInList(directSubmitPrefixes, decoderName) && !isExynos4Device();
}
public static boolean decoderNeedsSpsBitstreamRestrictions(String decoderName) {
return isDecoderInList(spsFixupBitstreamFixupDecoderPrefixes, decoderName);
}
public static boolean decoderNeedsBaselineSpsHack(String decoderName, MediaCodecInfo decoderInfo) {
public static boolean decoderNeedsBaselineSpsHack(String decoderName) {
return isDecoderInList(baselineProfileHackPrefixes, decoderName);
}
public static boolean decoderIsWhitelistedForHevc(String decoderName) {
// TODO: Shield Tablet K1/LTE?
//
// NVIDIA does partial HEVC acceleration on the Shield Tablet. I don't know
// whether the performance is good enough to use for streaming, but they're
// using the same omx.nvidia.h265.decode name as the Shield TV which has a
// fully accelerated HEVC pipeline. AFAIK, the only K1 device with this
// partially accelerated HEVC decoder is the Shield Tablet, so I'll
// check for it here.
//
// TODO: Temporarily disabled with NVIDIA HEVC support
/*if (Build.DEVICE.equalsIgnoreCase("shieldtablet")) {
return false;
}*/
// Google didn't have official support for HEVC (or more importantly, a CTS test) until
// Lollipop. I've seen some MediaTek devices on 4.4 crash when attempting to use HEVC,
// so I'm restricting HEVC usage to Lollipop and higher.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
return false;
}
return isDecoderInList(whitelistedHevcDecoders, decoderName);
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private static LinkedList<MediaCodecInfo> getMediaCodecList() {
@ -146,7 +233,7 @@ public class MediaCodecHelper {
return str;
}
public static MediaCodecInfo findPreferredDecoder() {
private static MediaCodecInfo findPreferredDecoder() {
// This is a different algorithm than the other findXXXDecoder functions,
// because we want to evaluate the decoders in our list's order
// rather than MediaCodecList's order
@ -169,7 +256,7 @@ public class MediaCodecHelper {
return null;
}
public static MediaCodecInfo findFirstDecoder() {
public static MediaCodecInfo findFirstDecoder(String mimeType) {
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
@ -182,9 +269,9 @@ public class MediaCodecHelper {
continue;
}
// Find a decoder that supports H.264
// Find a decoder that supports the specified video format
for (String mime : codecInfo.getSupportedTypes()) {
if (mime.equalsIgnoreCase("video/avc")) {
if (mime.equalsIgnoreCase(mimeType)) {
LimeLog.info("First decoder choice is "+codecInfo.getName());
return codecInfo;
}
@ -194,7 +281,7 @@ public class MediaCodecHelper {
return null;
}
public static MediaCodecInfo findProbableSafeDecoder() {
public static MediaCodecInfo findProbableSafeDecoder(String mimeType, int requiredProfile) {
// First look for a preferred decoder by name
MediaCodecInfo info = findPreferredDecoder();
if (info != null) {
@ -204,12 +291,12 @@ public class MediaCodecHelper {
// Now look for decoders we know are safe
try {
// If this function completes, it will determine if the decoder is safe
return findKnownSafeDecoder();
return findKnownSafeDecoder(mimeType, requiredProfile);
} catch (Exception e) {
// Some buggy devices seem to throw exceptions
// from getCapabilitiesForType() so we'll just assume
// they're okay and go with the first one we find
return findFirstDecoder();
return findFirstDecoder(mimeType);
}
}
@ -217,7 +304,7 @@ public class MediaCodecHelper {
// since some bad decoders can throw IllegalArgumentExceptions unexpectedly
// and we want to be sure all callers are handling this possibility
@SuppressWarnings("RedundantThrows")
public static MediaCodecInfo findKnownSafeDecoder() throws Exception {
private static MediaCodecInfo findKnownSafeDecoder(String mimeType, int requiredProfile) throws Exception {
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
@ -230,21 +317,26 @@ public class MediaCodecHelper {
continue;
}
// Find a decoder that supports H.264 high profile
// Find a decoder that supports the requested video format
for (String mime : codecInfo.getSupportedTypes()) {
if (mime.equalsIgnoreCase("video/avc")) {
if (mime.equalsIgnoreCase(mimeType)) {
LimeLog.info("Examining decoder capabilities of "+codecInfo.getName());
CodecCapabilities caps = codecInfo.getCapabilitiesForType(mime);
if (requiredProfile != -1) {
for (CodecProfileLevel profile : caps.profileLevels) {
if (profile.profile == CodecProfileLevel.AVCProfileHigh) {
LimeLog.info("Decoder "+codecInfo.getName()+" supports high profile");
LimeLog.info("Selected decoder: "+codecInfo.getName());
if (profile.profile == requiredProfile) {
LimeLog.info("Decoder " + codecInfo.getName() + " supports required profile");
return codecInfo;
}
}
LimeLog.info("Decoder "+codecInfo.getName()+" does NOT support high profile");
LimeLog.info("Decoder " + codecInfo.getName() + " does NOT support required profile");
}
else {
return codecInfo;
}
}
}
}

View File

@ -1,10 +1,11 @@
package com.limelight.computers;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringReader;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
@ -30,18 +31,22 @@ import android.os.IBinder;
import org.xmlpull.v1.XmlPullParserException;
public class ComputerManagerService extends Service {
private static final int POLLING_PERIOD_MS = 3000;
private static final int SERVERINFO_POLLING_PERIOD_MS = 1500;
private static final int APPLIST_POLLING_PERIOD_MS = 30000;
private static final int APPLIST_FAILED_POLLING_RETRY_MS = 2000;
private static final int MDNS_QUERY_PERIOD_MS = 1000;
private static final int FAST_POLL_TIMEOUT = 500;
private static final int OFFLINE_POLL_TRIES = 5;
private ComputerManagerBinder binder = new ComputerManagerBinder();
private final ComputerManagerBinder binder = new ComputerManagerBinder();
private ComputerDatabaseManager dbManager;
private AtomicInteger dbRefCount = new AtomicInteger(0);
private final AtomicInteger dbRefCount = new AtomicInteger(0);
private IdentityManager idManager;
private final LinkedList<PollingTuple> pollingTuples = new LinkedList<PollingTuple>();
private ComputerManagerListener listener = null;
private AtomicInteger activePolls = new AtomicInteger(0);
private final AtomicInteger activePolls = new AtomicInteger(0);
private boolean pollingActive = false;
private DiscoveryService.DiscoveryBinder discoveryBinder;
@ -65,10 +70,7 @@ public class ComputerManagerService extends Service {
};
// Returns true if the details object was modified
private boolean runPoll(ComputerDetails details)
{
boolean newPc = details.name.isEmpty();
private boolean runPoll(ComputerDetails details, boolean newPc, int offlineCount) throws InterruptedException {
if (!getLocalDatabaseReference()) {
return false;
}
@ -76,12 +78,23 @@ public class ComputerManagerService extends Service {
activePolls.incrementAndGet();
// Poll the machine
if (!doPollMachine(details)) {
try {
if (!pollComputer(details)) {
if (!newPc && offlineCount < OFFLINE_POLL_TRIES) {
// Return without calling the listener
releaseLocalDatabaseReference();
return false;
}
details.state = ComputerDetails.State.OFFLINE;
details.reachability = ComputerDetails.Reachability.OFFLINE;
}
} catch (InterruptedException e) {
releaseLocalDatabaseReference();
throw e;
} finally {
activePolls.decrementAndGet();
}
// If it's online, update our persistent state
if (details.state == ComputerDetails.State.ONLINE) {
@ -91,7 +104,7 @@ public class ComputerManagerService extends Service {
if (dbManager.getComputerByName(details.name) == null) {
// It's gone
releaseLocalDatabaseReference();
return true;
return false;
}
}
@ -107,24 +120,34 @@ public class ComputerManagerService extends Service {
return true;
}
private Thread createPollingThread(final ComputerDetails details) {
private Thread createPollingThread(final PollingTuple tuple) {
Thread t = new Thread() {
@Override
public void run() {
int offlineCount = 0;
while (!isInterrupted() && pollingActive) {
try {
// Only allow one request to the machine at a time
synchronized (tuple.networkLock) {
// Check if this poll has modified the details
runPoll(details);
if (!runPoll(tuple.computer, false, offlineCount)) {
LimeLog.warning(tuple.computer.name + " is offline (try " + offlineCount + ")");
offlineCount++;
} else {
offlineCount = 0;
}
}
// Wait until the next polling interval
try {
Thread.sleep(POLLING_PERIOD_MS);
Thread.sleep(SERVERINFO_POLLING_PERIOD_MS);
} catch (InterruptedException e) {
break;
}
}
}
};
t.setName("Polling thread for "+details.localIp.getHostAddress());
t.setName("Polling thread for " + tuple.computer.localIp.getHostAddress());
return t;
}
@ -146,7 +169,7 @@ public class ComputerManagerService extends Service {
// Report this computer initially
listener.notifyComputerUpdated(tuple.computer);
tuple.thread = createPollingThread(tuple.computer);
tuple.thread = createPollingThread(tuple);
tuple.thread.start();
}
}
@ -177,10 +200,6 @@ public class ComputerManagerService extends Service {
return ComputerManagerService.this.addComputerBlocking(addr);
}
public void addComputer(InetAddress addr) {
ComputerManagerService.this.addComputer(addr);
}
public void removeComputer(String name) {
ComputerManagerService.this.removeComputer(name);
}
@ -239,7 +258,7 @@ public class ComputerManagerService extends Service {
@Override
public void notifyComputerAdded(MdnsComputer computer) {
// Kick off a serverinfo poll on this machine
addComputer(computer.getAddress());
addComputerBlocking(computer.getAddress());
}
@Override
@ -255,28 +274,11 @@ public class ComputerManagerService extends Service {
};
}
public void addComputer(InetAddress addr) {
// Setup a placeholder
ComputerDetails fakeDetails = new ComputerDetails();
fakeDetails.localIp = addr;
fakeDetails.remoteIp = addr;
fakeDetails.name = "";
addTuple(fakeDetails);
}
private void addTuple(ComputerDetails details) {
synchronized (pollingTuples) {
for (PollingTuple tuple : pollingTuples) {
// Check if this is the same computer
if (tuple.computer == details ||
// If there's no name on one of these computers, compare with the local IP
((details.name.isEmpty() || tuple.computer.name.isEmpty()) &&
tuple.computer.localIp.equals(details.localIp)) ||
// If there is a name on both computers, compare with name
((!details.name.isEmpty() && !tuple.computer.name.isEmpty()) &&
tuple.computer.name.equals(details.name))) {
if (tuple.computer.uuid.equals(details.uuid)) {
// Update details anyway in case this machine has been re-added by IP
// after not being reachable by our existing information
tuple.computer.localIp = details.localIp;
@ -284,7 +286,7 @@ public class ComputerManagerService extends Service {
// Start a polling thread if polling is active
if (pollingActive && tuple.thread == null) {
tuple.thread = createPollingThread(details);
tuple.thread = createPollingThread(tuple);
tuple.thread.start();
}
@ -294,7 +296,10 @@ public class ComputerManagerService extends Service {
}
// If we got here, we didn't find an entry
PollingTuple tuple = new PollingTuple(details, pollingActive ? createPollingThread(details) : null);
PollingTuple tuple = new PollingTuple(details, null);
if (pollingActive) {
tuple.thread = createPollingThread(tuple);
}
pollingTuples.add(tuple);
if (tuple.thread != null) {
tuple.thread.start();
@ -307,13 +312,18 @@ public class ComputerManagerService extends Service {
ComputerDetails fakeDetails = new ComputerDetails();
fakeDetails.localIp = addr;
fakeDetails.remoteIp = addr;
fakeDetails.name = "";
// Block while we try to fill the details
runPoll(fakeDetails);
try {
runPoll(fakeDetails, true, 0);
} catch (InterruptedException e) {
return false;
}
// If the machine is reachable, it was successful
if (fakeDetails.state == ComputerDetails.State.ONLINE) {
LimeLog.info("New PC ("+fakeDetails.name+") is UUID "+fakeDetails.uuid);
// Start a polling thread for this machine
addTuple(fakeDetails);
return true;
@ -364,6 +374,11 @@ public class ComputerManagerService extends Service {
}
private ComputerDetails tryPollIp(ComputerDetails details, InetAddress ipAddr) {
// Fast poll this address first to determine if we can connect at the TCP layer
if (!fastPollIp(ipAddr)) {
return null;
}
try {
NvHTTP http = new NvHTTP(ipAddr, idManager.getUniqueId(),
null, PlatformBinding.getCryptoProvider(ComputerManagerService.this));
@ -384,14 +399,91 @@ public class ComputerManagerService extends Service {
}
}
private boolean pollComputer(ComputerDetails details, boolean localFirst) {
// Just try to establish a TCP connection to speculatively detect a running
// GFE server
private boolean fastPollIp(InetAddress addr) {
Socket s = new Socket();
try {
s.connect(new InetSocketAddress(addr, NvHTTP.HTTPS_PORT), FAST_POLL_TIMEOUT);
s.close();
return true;
} catch (IOException e) {
return false;
}
}
private void startFastPollThread(final InetAddress addr, final boolean[] info) {
Thread t = new Thread() {
@Override
public void run() {
boolean pollRes = fastPollIp(addr);
synchronized (info) {
info[0] = true; // Done
info[1] = pollRes; // Polling result
info.notify();
}
}
};
t.setName("Fast Poll - "+addr.getHostAddress());
t.start();
}
private ComputerDetails.Reachability fastPollPc(final InetAddress local, final InetAddress remote) throws InterruptedException {
final boolean[] remoteInfo = new boolean[2];
final boolean[] localInfo = new boolean[2];
startFastPollThread(local, localInfo);
startFastPollThread(remote, remoteInfo);
// Check local first
synchronized (localInfo) {
while (!localInfo[0]) {
localInfo.wait(500);
}
if (localInfo[1]) {
return ComputerDetails.Reachability.LOCAL;
}
}
// Now remote
synchronized (remoteInfo) {
while (!remoteInfo[0]) {
remoteInfo.wait(500);
}
if (remoteInfo[1]) {
return ComputerDetails.Reachability.REMOTE;
}
}
return ComputerDetails.Reachability.OFFLINE;
}
private ReachabilityTuple pollForReachability(ComputerDetails details) throws InterruptedException {
ComputerDetails polledDetails;
ComputerDetails.Reachability reachability;
// If the local address is routable across the Internet,
// always consider this PC remote to be conservative
if (details.localIp.equals(details.remoteIp)) {
localFirst = false;
reachability = ComputerDetails.Reachability.REMOTE;
}
else {
// Do a TCP-level connection to the HTTP server to see if it's listening
LimeLog.info("Starting fast poll for "+details.name+" ("+details.localIp+", "+details.remoteIp+")");
reachability = fastPollPc(details.localIp, details.remoteIp);
LimeLog.info("Fast poll for "+details.name+" returned "+reachability.toString());
// If no connection could be established to either IP address, there's nothing we can do
if (reachability == ComputerDetails.Reachability.OFFLINE) {
return null;
}
}
boolean localFirst = (reachability == ComputerDetails.Reachability.LOCAL);
if (localFirst) {
polledDetails = tryPollIp(details, details.localIp);
@ -400,6 +492,7 @@ public class ComputerManagerService extends Service {
polledDetails = tryPollIp(details, details.remoteIp);
}
InetAddress reachableAddr = null;
if (polledDetails == null && !details.localIp.equals(details.remoteIp)) {
// Failed, so let's try the fallback
if (!localFirst) {
@ -409,42 +502,69 @@ public class ComputerManagerService extends Service {
polledDetails = tryPollIp(details, details.remoteIp);
}
// The fallback poll worked
if (polledDetails != null) {
polledDetails.reachability = !localFirst ? ComputerDetails.Reachability.LOCAL :
ComputerDetails.Reachability.REMOTE;
// The fallback poll worked
reachableAddr = !localFirst ? details.localIp : details.remoteIp;
}
}
else if (polledDetails != null) {
polledDetails.reachability = localFirst ? ComputerDetails.Reachability.LOCAL :
ComputerDetails.Reachability.REMOTE;
reachableAddr = localFirst ? details.localIp : details.remoteIp;
}
// Machine was unreachable both tries
if (polledDetails == null) {
if (reachableAddr == null) {
return null;
}
if (polledDetails.remoteIp.equals(reachableAddr)) {
polledDetails.reachability = ComputerDetails.Reachability.REMOTE;
}
else if (polledDetails.localIp.equals(reachableAddr)) {
polledDetails.reachability = ComputerDetails.Reachability.LOCAL;
}
else {
polledDetails.reachability = ComputerDetails.Reachability.UNKNOWN;
}
return new ReachabilityTuple(polledDetails, reachableAddr);
}
private boolean pollComputer(ComputerDetails details) throws InterruptedException {
ReachabilityTuple initialReachTuple = pollForReachability(details);
if (initialReachTuple == null) {
return false;
}
// If we got here, it's reachable
details.update(polledDetails);
return true;
}
private boolean doPollMachine(ComputerDetails details) {
if (details.reachability == ComputerDetails.Reachability.UNKNOWN ||
details.reachability == ComputerDetails.Reachability.OFFLINE) {
// Always try local first to avoid potential UDP issues when
// attempting to stream via the router's external IP address
// behind its NAT
return pollComputer(details, true);
if (initialReachTuple.computer.reachability == ComputerDetails.Reachability.UNKNOWN) {
// Neither IP address reported in the serverinfo response was the one we used.
// Poll again to see if we can contact this machine on either of its reported addresses.
ReachabilityTuple confirmationReachTuple = pollForReachability(initialReachTuple.computer);
if (confirmationReachTuple == null) {
// Neither of those seem to work, so we'll hold onto the address that did work
initialReachTuple.computer.localIp = initialReachTuple.reachableAddress;
initialReachTuple.computer.reachability = ComputerDetails.Reachability.LOCAL;
}
else {
// If we're already reached a machine via a particular IP address,
// always try that one first
return pollComputer(details, details.reachability == ComputerDetails.Reachability.LOCAL);
// We got it on one of the returned addresses; replace the original reach tuple
// with the new one
initialReachTuple = confirmationReachTuple;
}
}
// Save the old MAC address
String savedMacAddress = details.macAddress;
// If we got here, it's reachable
details.update(initialReachTuple.computer);
// If the new MAC address is empty, restore the old one (workaround for GFE bug)
if (details.macAddress.equals("00:00:00:00:00:00") && savedMacAddress != null) {
LimeLog.info("MAC address was empty; using existing value: "+savedMacAddress);
details.macAddress = savedMacAddress;
}
return true;
}
@Override
public void onCreate() {
// Bind to the discovery service
@ -491,8 +611,9 @@ public class ComputerManagerService extends Service {
public class ApplistPoller {
private Thread thread;
private ComputerDetails computer;
private Object pollEvent = new Object();
private final ComputerDetails computer;
private final Object pollEvent = new Object();
private boolean receivedAppList = false;
public ApplistPoller(ComputerDetails computer) {
this.computer = computer;
@ -507,7 +628,15 @@ public class ComputerManagerService extends Service {
private boolean waitPollingDelay() {
try {
synchronized (pollEvent) {
pollEvent.wait(POLLING_PERIOD_MS);
if (receivedAppList) {
// If we've already reported an app list successfully,
// wait the full polling period
pollEvent.wait(APPLIST_POLLING_PERIOD_MS);
}
else {
// If we've failed to get an app list so far, retry much earlier
pollEvent.wait(APPLIST_FAILED_POLLING_RETRY_MS);
}
}
} catch (InterruptedException e) {
return false;
@ -516,6 +645,18 @@ public class ComputerManagerService extends Service {
return thread != null && !thread.isInterrupted();
}
private PollingTuple getPollingTuple(ComputerDetails details) {
synchronized (pollingTuples) {
for (PollingTuple tuple : pollingTuples) {
if (details.uuid.equals(tuple.computer.uuid)) {
return tuple;
}
}
}
return null;
}
public void start() {
thread = new Thread() {
@Override
@ -546,18 +687,43 @@ public class ComputerManagerService extends Service {
NvHTTP http = new NvHTTP(selectedAddr, idManager.getUniqueId(),
null, PlatformBinding.getCryptoProvider(ComputerManagerService.this));
PollingTuple tuple = getPollingTuple(computer);
try {
// Query the app list from the server
String appList = http.getAppListRaw();
String appList;
if (tuple != null) {
// If we're polling this machine too, grab the network lock
// while doing the app list request to prevent other requests
// from being issued in the meantime.
synchronized (tuple.networkLock) {
appList = http.getAppListRaw();
}
}
else {
// No polling is happening now, so we just call it directly
appList = http.getAppListRaw();
}
List<NvApp> list = NvHTTP.getAppListByReader(new StringReader(appList));
if (appList != null && !appList.isEmpty() && !list.isEmpty()) {
// Open the cache file
FileOutputStream cacheOut = CacheHelper.openCacheFileForOutput(getCacheDir(), "applist", computer.uuid.toString());
OutputStream cacheOut = null;
try {
cacheOut = CacheHelper.openCacheFileForOutput(getCacheDir(), "applist", computer.uuid.toString());
CacheHelper.writeStringToOutputStream(cacheOut, appList);
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (cacheOut != null) {
cacheOut.close();
}
} catch (IOException ignored) {}
}
// Update the computer
computer.rawAppList = appList;
receivedAppList = true;
// Notify that the app list has been updated
// and ensure that the thread is still active
@ -593,10 +759,22 @@ public class ComputerManagerService extends Service {
class PollingTuple {
public Thread thread;
public ComputerDetails computer;
public final ComputerDetails computer;
public final Object networkLock;
public PollingTuple(ComputerDetails computer, Thread thread) {
this.computer = computer;
this.thread = thread;
this.networkLock = new Object();
}
}
class ReachabilityTuple {
public final InetAddress reachableAddress;
public final ComputerDetails computer;
public ReachabilityTuple(ComputerDetails computer, InetAddress reachableAddress) {
this.computer = computer;
this.reachableAddress = reachableAddress;
}
}

View File

@ -70,7 +70,7 @@ public class DiscoveryService extends Service {
});
}
private DiscoveryBinder binder = new DiscoveryBinder();
private final DiscoveryBinder binder = new DiscoveryBinder();
@Override
public IBinder onBind(Intent intent) {

View File

@ -1,118 +1,69 @@
package com.limelight.grid;
import android.content.Context;
import android.graphics.Bitmap;
import android.app.Activity;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.koushikdutta.async.future.FutureCallback;
import com.koushikdutta.ion.ImageViewBitmapInfo;
import com.koushikdutta.ion.Ion;
import com.limelight.AppView;
import com.limelight.LimeLog;
import com.limelight.R;
import com.limelight.binding.PlatformBinding;
import com.limelight.grid.assets.CachedAppAssetLoader;
import com.limelight.grid.assets.DiskAssetLoader;
import com.limelight.grid.assets.MemoryAssetLoader;
import com.limelight.grid.assets.NetworkAssetLoader;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.LimelightCryptoProvider;
import com.limelight.utils.CacheHelper;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Future;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.KeyManager;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509KeyManager;
import javax.net.ssl.X509TrustManager;
import java.security.cert.X509Certificate;
@SuppressWarnings("unchecked")
public class AppGridAdapter extends GenericGridAdapter<AppView.AppObject> {
private static final int ART_WIDTH_PX = 300;
private static final int SMALL_WIDTH_DP = 100;
private static final int LARGE_WIDTH_DP = 150;
private ComputerDetails computer;
private String uniqueId;
private LimelightCryptoProvider cryptoProvider;
private SSLContext sslContext;
private final HashMap<ImageView, Future> pendingRequests = new HashMap<ImageView, Future>();
private final CachedAppAssetLoader loader;
public AppGridAdapter(Context context, boolean listMode, boolean small, ComputerDetails computer, String uniqueId) throws NoSuchAlgorithmException, KeyManagementException {
super(context, listMode ? R.layout.simple_row : (small ? R.layout.app_grid_item_small : R.layout.app_grid_item), R.drawable.image_loading);
public AppGridAdapter(Activity activity, boolean listMode, boolean small, ComputerDetails computer, String uniqueId) {
super(activity, listMode ? R.layout.simple_row : (small ? R.layout.app_grid_item_small : R.layout.app_grid_item), R.drawable.image_loading);
this.computer = computer;
this.uniqueId = uniqueId;
int dpi = activity.getResources().getDisplayMetrics().densityDpi;
int dp;
cryptoProvider = PlatformBinding.getCryptoProvider(context);
sslContext = SSLContext.getInstance("SSL");
sslContext.init(ourKeyman, trustAllCerts, new SecureRandom());
if (small) {
dp = SMALL_WIDTH_DP;
}
else {
dp = LARGE_WIDTH_DP;
}
TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
double scalingDivisor = ART_WIDTH_PX / (dp * (dpi / 160.0));
if (scalingDivisor < 1.0) {
// We don't want to make them bigger before draw-time
scalingDivisor = 1.0;
}
public void checkClientTrusted(X509Certificate[] certs, String authType) {}
public void checkServerTrusted(X509Certificate[] certs, String authType) {}
}};
LimeLog.info("Art scaling divisor: " + scalingDivisor);
KeyManager[] ourKeyman = new KeyManager[] {
new X509KeyManager() {
public String chooseClientAlias(String[] keyTypes,
Principal[] issuers, Socket socket) {
return "Limelight-RSA";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = (int) scalingDivisor;
this.loader = new CachedAppAssetLoader(computer, scalingDivisor,
new NetworkAssetLoader(context, uniqueId),
new MemoryAssetLoader(),
new DiskAssetLoader(context.getCacheDir()),
BitmapFactory.decodeResource(activity.getResources(),
R.drawable.image_loading, options));
}
public String chooseServerAlias(String keyType, Principal[] issuers,
Socket socket) {
return null;
public void cancelQueuedOperations() {
loader.cancelForegroundLoads();
loader.cancelBackgroundLoads();
loader.freeCacheMemory();
}
public X509Certificate[] getCertificateChain(String alias) {
return new X509Certificate[] {cryptoProvider.getClientCertificate()};
}
public String[] getClientAliases(String keyType, Principal[] issuers) {
return null;
}
public PrivateKey getPrivateKey(String alias) {
return cryptoProvider.getClientPrivateKey();
}
public String[] getServerAliases(String keyType, Principal[] issuers) {
return null;
}
}
};
// Ignore differences between given hostname and certificate hostname
HostnameVerifier hv = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) { return true; }
};
private void sortList() {
Collections.sort(itemList, new Comparator<AppView.AppObject>() {
@Override
@ -122,62 +73,22 @@ public class AppGridAdapter extends GenericGridAdapter<AppView.AppObject> {
});
}
private InetAddress getCurrentAddress() {
if (computer.reachability == ComputerDetails.Reachability.LOCAL) {
return computer.localIp;
}
else {
return computer.remoteIp;
}
}
public void addApp(AppView.AppObject app) {
// Queue a request to fetch this bitmap into cache
loader.queueCacheLoad(app.app);
// Add the app to our sorted list
itemList.add(app);
sortList();
}
public void abortPendingRequests() {
HashMap<ImageView, Future> tempMap;
synchronized (pendingRequests) {
// Copy the pending requests under a lock
tempMap = new HashMap<ImageView, Future>(pendingRequests);
public void removeApp(AppView.AppObject app) {
itemList.remove(app);
}
for (Future f : tempMap.values()) {
if (!f.isCancelled() && !f.isDone()) {
f.cancel(true);
}
}
synchronized (pendingRequests) {
// Remove cancelled requests
for (ImageView v : tempMap.keySet()) {
pendingRequests.remove(v);
}
}
}
// TODO: Handle pruning of bitmap cache
private void populateBitmapCache(UUID uuid, int appId, Bitmap bitmap) {
try {
// PNG ignores quality setting
FileOutputStream out = CacheHelper.openCacheFileForOutput(context.getCacheDir(), "boxart", uuid.toString(), appId+".png");
bitmap.compress(Bitmap.CompressFormat.PNG, 0, out);
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public boolean populateImageView(final ImageView imgView, final AppView.AppObject obj) {
// Hide the image view while we're loading the image from disk cache
imgView.setVisibility(View.INVISIBLE);
// Check the on-disk cache
new ImageCacheRequest(imgView, obj.app.getAppId()).execute();
public boolean populateImageView(ImageView imgView, AppView.AppObject obj) {
// Let the cached asset loader handle it
loader.populateImageView(obj.app, imgView);
return true;
}
@ -201,84 +112,4 @@ public class AppGridAdapter extends GenericGridAdapter<AppView.AppObject> {
// No overlay
return false;
}
private class ImageCacheRequest extends AsyncTask<Void, Void, Bitmap> {
private ImageView view;
private int appId;
public ImageCacheRequest(ImageView view, int appId) {
this.view = view;
this.appId = appId;
}
@Override
protected Bitmap doInBackground(Void... v) {
InputStream in = null;
try {
in = CacheHelper.openCacheFileForInput(context.getCacheDir(), "boxart", computer.uuid.toString(), appId + ".png");
return BitmapFactory.decodeStream(in);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {}
}
}
return null;
}
@Override
protected void onPostExecute(Bitmap result) {
if (result != null) {
// Disk cache was read successfully
LimeLog.info("Image disk cache hit for ("+computer.uuid+", "+appId+")");
view.setImageBitmap(result);
view.setVisibility(View.VISIBLE);
}
else {
LimeLog.info("Image disk cache miss for ("+computer.uuid+", "+appId+")");
LimeLog.info("Requesting: "+"https://" + getCurrentAddress().getHostAddress() + ":47984/appasset?uniqueid=" + uniqueId + "&appid=" +
appId + "&AssetType=2&AssetIdx=0");
// Load the placeholder image
view.setImageResource(defaultImageRes);
view.setVisibility(View.VISIBLE);
// Set SSL contexts correctly to allow us to authenticate
Ion.getDefault(context).getHttpClient().getSSLSocketMiddleware().setTrustManagers(trustAllCerts);
Ion.getDefault(context).getHttpClient().getSSLSocketMiddleware().setSSLContext(sslContext);
// Kick off the deferred image load
synchronized (pendingRequests) {
Future<Bitmap> f = Ion.with(context)
.load("https://" + getCurrentAddress().getHostAddress() + ":47984/appasset?uniqueid=" + uniqueId + "&appid=" +
appId + "&AssetType=2&AssetIdx=0")
.asBitmap()
.setCallback(new FutureCallback<Bitmap>() {
@Override
public void onCompleted(Exception e, Bitmap result) {
synchronized (pendingRequests) {
pendingRequests.remove(view);
}
if (result != null) {
// Make the view visible now
view.setImageBitmap(result);
view.setVisibility(View.VISIBLE);
// Populate the disk cache if we got an image back
populateBitmapCache(computer.uuid, appId, result);
}
else {
// Leave the loading icon as is (probably should change this eventually...)
}
}
});
pendingRequests.put(view, f);
}
}
}
};
}

View File

@ -13,11 +13,11 @@ import com.limelight.R;
import java.util.ArrayList;
public abstract class GenericGridAdapter<T> extends BaseAdapter {
protected Context context;
protected int defaultImageRes;
protected int layoutId;
protected ArrayList<T> itemList = new ArrayList<T>();
protected LayoutInflater inflater;
protected final Context context;
protected final int defaultImageRes;
protected final int layoutId;
protected final ArrayList<T> itemList = new ArrayList<T>();
protected final LayoutInflater inflater;
public GenericGridAdapter(Context context, int layoutId, int defaultImageRes) {
this.context = context;

View File

@ -0,0 +1,337 @@
package com.limelight.grid.assets;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.widget.ImageView;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.NvApp;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class CachedAppAssetLoader {
private static final int MAX_CONCURRENT_DISK_LOADS = 3;
private static final int MAX_CONCURRENT_NETWORK_LOADS = 3;
private static final int MAX_CONCURRENT_CACHE_LOADS = 1;
private static final int MAX_PENDING_CACHE_LOADS = 100;
private static final int MAX_PENDING_NETWORK_LOADS = 40;
private static final int MAX_PENDING_DISK_LOADS = 40;
private final ThreadPoolExecutor cacheExecutor = new ThreadPoolExecutor(
MAX_CONCURRENT_CACHE_LOADS, MAX_CONCURRENT_CACHE_LOADS,
Long.MAX_VALUE, TimeUnit.DAYS,
new LinkedBlockingQueue<Runnable>(MAX_PENDING_CACHE_LOADS),
new ThreadPoolExecutor.DiscardOldestPolicy());
private final ThreadPoolExecutor foregroundExecutor = new ThreadPoolExecutor(
MAX_CONCURRENT_DISK_LOADS, MAX_CONCURRENT_DISK_LOADS,
Long.MAX_VALUE, TimeUnit.DAYS,
new LinkedBlockingQueue<Runnable>(MAX_PENDING_DISK_LOADS),
new ThreadPoolExecutor.DiscardOldestPolicy());
private final ThreadPoolExecutor networkExecutor = new ThreadPoolExecutor(
MAX_CONCURRENT_NETWORK_LOADS, MAX_CONCURRENT_NETWORK_LOADS,
Long.MAX_VALUE, TimeUnit.DAYS,
new LinkedBlockingQueue<Runnable>(MAX_PENDING_NETWORK_LOADS),
new ThreadPoolExecutor.DiscardOldestPolicy());
private final ComputerDetails computer;
private final double scalingDivider;
private final NetworkAssetLoader networkLoader;
private final MemoryAssetLoader memoryLoader;
private final DiskAssetLoader diskLoader;
private final Bitmap placeholderBitmap;
public CachedAppAssetLoader(ComputerDetails computer, double scalingDivider,
NetworkAssetLoader networkLoader, MemoryAssetLoader memoryLoader,
DiskAssetLoader diskLoader, Bitmap placeholderBitmap) {
this.computer = computer;
this.scalingDivider = scalingDivider;
this.networkLoader = networkLoader;
this.memoryLoader = memoryLoader;
this.diskLoader = diskLoader;
this.placeholderBitmap = placeholderBitmap;
}
public void cancelBackgroundLoads() {
Runnable r;
while ((r = cacheExecutor.getQueue().poll()) != null) {
cacheExecutor.remove(r);
}
}
public void cancelForegroundLoads() {
Runnable r;
while ((r = foregroundExecutor.getQueue().poll()) != null) {
foregroundExecutor.remove(r);
}
while ((r = networkExecutor.getQueue().poll()) != null) {
networkExecutor.remove(r);
}
}
public void freeCacheMemory() {
memoryLoader.clearCache();
}
private Bitmap doNetworkAssetLoad(LoaderTuple tuple, LoaderTask task) {
// Try 3 times
for (int i = 0; i < 3; i++) {
// Check again whether we've been cancelled or the image view is gone
if (task != null && (task.isCancelled() || task.imageViewRef.get() == null)) {
return null;
}
InputStream in = networkLoader.getBitmapStream(tuple);
if (in != null) {
// Write the stream straight to disk
diskLoader.populateCacheWithStream(tuple, in);
// Close the network input stream
try {
in.close();
} catch (IOException ignored) {}
// If there's a task associated with this load, we should return the bitmap
if (task != null) {
// If the cached bitmap is valid, return it. Otherwise, we'll try the load again
Bitmap bmp = diskLoader.loadBitmapFromCache(tuple, (int) scalingDivider);
if (bmp != null) {
return bmp;
}
}
else {
// Otherwise it's a background load and we return nothing
return null;
}
}
// Wait 1 second with a bit of fuzz
try {
Thread.sleep((int) (1000 + (Math.random() * 500)));
} catch (InterruptedException e) {
return null;
}
}
return null;
}
private class LoaderTask extends AsyncTask<LoaderTuple, Void, Bitmap> {
private final WeakReference<ImageView> imageViewRef;
private final boolean diskOnly;
private LoaderTuple tuple;
public LoaderTask(ImageView imageView, boolean diskOnly) {
this.imageViewRef = new WeakReference<ImageView>(imageView);
this.diskOnly = diskOnly;
}
@Override
protected Bitmap doInBackground(LoaderTuple... params) {
tuple = params[0];
// Check whether it has been cancelled or the image view is gone
if (isCancelled() || imageViewRef.get() == null) {
return null;
}
Bitmap bmp = diskLoader.loadBitmapFromCache(tuple, (int) scalingDivider);
if (bmp == null) {
if (!diskOnly) {
// Try to load the asset from the network
bmp = doNetworkAssetLoad(tuple, this);
} else {
// Report progress to display the placeholder and spin
// off the network-capable task
publishProgress();
}
}
// Cache the bitmap
if (bmp != null) {
memoryLoader.populateCache(tuple, bmp);
}
return bmp;
}
@Override
protected void onProgressUpdate(Void... nothing) {
// Do nothing if cancelled
if (isCancelled()) {
return;
}
// If the current loader task for this view isn't us, do nothing
final ImageView imageView = imageViewRef.get();
if (getLoaderTask(imageView) == this) {
// Set off another loader task on the network executor
LoaderTask task = new LoaderTask(imageView, false);
AsyncDrawable asyncDrawable = new AsyncDrawable(imageView.getResources(), placeholderBitmap, task);
imageView.setAlpha(1.0f);
imageView.setImageDrawable(asyncDrawable);
task.executeOnExecutor(networkExecutor, tuple);
}
}
@Override
protected void onPostExecute(Bitmap bitmap) {
// Do nothing if cancelled
if (isCancelled()) {
return;
}
final ImageView imageView = imageViewRef.get();
if (getLoaderTask(imageView) == this) {
// Set the bitmap
if (bitmap != null) {
imageView.setImageBitmap(bitmap);
}
// Show the view
imageView.setAlpha(1.0f);
}
}
}
static class AsyncDrawable extends BitmapDrawable {
private final WeakReference<LoaderTask> loaderTaskReference;
public AsyncDrawable(Resources res, Bitmap bitmap,
LoaderTask loaderTask) {
super(res, bitmap);
loaderTaskReference = new WeakReference<LoaderTask>(loaderTask);
}
public LoaderTask getLoaderTask() {
return loaderTaskReference.get();
}
}
private static LoaderTask getLoaderTask(ImageView imageView) {
if (imageView == null) {
return null;
}
final Drawable drawable = imageView.getDrawable();
// If our drawable is in play, get the loader task
if (drawable instanceof AsyncDrawable) {
final AsyncDrawable asyncDrawable = (AsyncDrawable) drawable;
return asyncDrawable.getLoaderTask();
}
return null;
}
private static boolean cancelPendingLoad(LoaderTuple tuple, ImageView imageView) {
final LoaderTask loaderTask = getLoaderTask(imageView);
// Check if any task was pending for this image view
if (loaderTask != null && !loaderTask.isCancelled()) {
final LoaderTuple taskTuple = loaderTask.tuple;
// Cancel the task if it's not already loading the same data
if (taskTuple == null || !taskTuple.equals(tuple)) {
loaderTask.cancel(true);
} else {
// It's already loading what we want
return false;
}
}
// Allow the load to proceed
return true;
}
public void queueCacheLoad(NvApp app) {
final LoaderTuple tuple = new LoaderTuple(computer, app);
if (memoryLoader.loadBitmapFromCache(tuple) != null) {
// It's in memory which means it must also be on disk
return;
}
// Queue a fetch in the cache executor
cacheExecutor.execute(new Runnable() {
@Override
public void run() {
// Check if the image is cached on disk
if (diskLoader.checkCacheExists(tuple)) {
return;
}
// Try to load the asset from the network and cache result on disk
doNetworkAssetLoad(tuple, null);
}
});
}
public void populateImageView(NvApp app, ImageView view) {
LoaderTuple tuple = new LoaderTuple(computer, app);
// If there's already a task in progress for this view,
// cancel it. If the task is already loading the same image,
// we return and let that load finish.
if (!cancelPendingLoad(tuple, view)) {
return;
}
// First, try the memory cache in the current context
Bitmap bmp = memoryLoader.loadBitmapFromCache(tuple);
if (bmp != null) {
// Show the bitmap immediately
view.setAlpha(1.0f);
view.setImageBitmap(bmp);
return;
}
// If it's not in memory, create an async task to load it. This task will be attached
// via AsyncDrawable to this view.
final LoaderTask task = new LoaderTask(view, true);
final AsyncDrawable asyncDrawable = new AsyncDrawable(view.getResources(), placeholderBitmap, task);
view.setAlpha(0.0f);
view.setImageDrawable(asyncDrawable);
// Run the task on our foreground executor
task.executeOnExecutor(foregroundExecutor, tuple);
}
public class LoaderTuple {
public final ComputerDetails computer;
public final NvApp app;
public LoaderTuple(ComputerDetails computer, NvApp app) {
this.computer = computer;
this.app = app;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof LoaderTuple)) {
return false;
}
LoaderTuple other = (LoaderTuple) o;
return computer.uuid.equals(other.computer.uuid) && app.getAppId() == other.app.getAppId();
}
@Override
public String toString() {
return "("+computer.uuid+", "+app.getAppId()+")";
}
}
}

View File

@ -0,0 +1,83 @@
package com.limelight.grid.assets;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import com.limelight.LimeLog;
import com.limelight.utils.CacheHelper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class DiskAssetLoader {
// 5 MB
private final long MAX_ASSET_SIZE = 5 * 1024 * 1024;
private final File cacheDir;
public DiskAssetLoader(File cacheDir) {
this.cacheDir = cacheDir;
}
public boolean checkCacheExists(CachedAppAssetLoader.LoaderTuple tuple) {
return CacheHelper.cacheFileExists(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png");
}
public Bitmap loadBitmapFromCache(CachedAppAssetLoader.LoaderTuple tuple, int sampleSize) {
InputStream in = null;
Bitmap bmp = null;
try {
// Make sure the cached asset doesn't exceed the maximum size
if (CacheHelper.getFileSize(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png") > MAX_ASSET_SIZE) {
LimeLog.warning("Removing cached tuple exceeding size threshold: "+tuple);
CacheHelper.deleteCacheFile(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png");
return null;
}
in = CacheHelper.openCacheFileForInput(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png");
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = sampleSize;
options.inPreferredConfig = Bitmap.Config.RGB_565;
bmp = BitmapFactory.decodeStream(in, null, options);
} catch (IOException ignored) {
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ignored) {}
}
}
if (bmp != null) {
LimeLog.info("Disk cache hit for tuple: "+tuple);
}
return bmp;
}
public void populateCacheWithStream(CachedAppAssetLoader.LoaderTuple tuple, InputStream input) {
OutputStream out = null;
boolean success = false;
try {
out = CacheHelper.openCacheFileForOutput(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png");
CacheHelper.writeInputStreamToOutputStream(input, out, MAX_ASSET_SIZE);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ignored) {}
}
if (!success) {
LimeLog.warning("Unable to populate cache with tuple: "+tuple);
CacheHelper.deleteCacheFile(cacheDir, "boxart", tuple.computer.uuid.toString(), tuple.app.getAppId() + ".png");
}
}
}
}

View File

@ -0,0 +1,37 @@
package com.limelight.grid.assets;
import android.graphics.Bitmap;
import android.util.LruCache;
import com.limelight.LimeLog;
public class MemoryAssetLoader {
private static final int maxMemory = (int) (Runtime.getRuntime().maxMemory() / 1024);
private static final LruCache<String, Bitmap> memoryCache = new LruCache<String, Bitmap>(maxMemory / 16) {
@Override
protected int sizeOf(String key, Bitmap bitmap) {
// Sizeof returns kilobytes
return bitmap.getByteCount() / 1024;
}
};
private static String constructKey(CachedAppAssetLoader.LoaderTuple tuple) {
return tuple.computer.uuid.toString()+"-"+tuple.app.getAppId();
}
public Bitmap loadBitmapFromCache(CachedAppAssetLoader.LoaderTuple tuple) {
Bitmap bmp = memoryCache.get(constructKey(tuple));
if (bmp != null) {
LimeLog.info("Memory cache hit for tuple: "+tuple);
}
return bmp;
}
public void populateCache(CachedAppAssetLoader.LoaderTuple tuple, Bitmap bitmap) {
memoryCache.put(constructKey(tuple), bitmap);
}
public void clearCache() {
memoryCache.evictAll();
}
}

View File

@ -0,0 +1,49 @@
package com.limelight.grid.assets;
import android.content.Context;
import com.limelight.LimeLog;
import com.limelight.binding.PlatformBinding;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.NvHTTP;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
public class NetworkAssetLoader {
private final Context context;
private final String uniqueId;
public NetworkAssetLoader(Context context, String uniqueId) {
this.context = context;
this.uniqueId = uniqueId;
}
public InputStream getBitmapStream(CachedAppAssetLoader.LoaderTuple tuple) {
NvHTTP http = new NvHTTP(getCurrentAddress(tuple.computer), uniqueId, null, PlatformBinding.getCryptoProvider(context));
InputStream in = null;
try {
in = http.getBoxArt(tuple.app);
} catch (IOException ignored) {}
if (in != null) {
LimeLog.info("Network asset load complete: " + tuple);
}
else {
LimeLog.info("Network asset load failed: " + tuple);
}
return in;
}
private static InetAddress getCurrentAddress(ComputerDetails computer) {
if (computer.reachability == ComputerDetails.Reachability.LOCAL) {
return computer.localIp;
}
else {
return computer.remoteIp;
}
}
}

View File

@ -1,44 +0,0 @@
package com.limelight.nvstream.av.video.cpu;
public class AvcDecoder {
static {
// FFMPEG dependencies
System.loadLibrary("avutil-52");
System.loadLibrary("swresample-0");
System.loadLibrary("swscale-2");
System.loadLibrary("avcodec-55");
System.loadLibrary("avformat-55");
System.loadLibrary("nv_avc_dec");
}
/** Disables the deblocking filter at the cost of image quality */
public static final int DISABLE_LOOP_FILTER = 0x1;
/** Uses the low latency decode flag (disables multithreading) */
public static final int LOW_LATENCY_DECODE = 0x2;
/** Threads process each slice, rather than each frame */
public static final int SLICE_THREADING = 0x4;
/** Uses nonstandard speedup tricks */
public static final int FAST_DECODE = 0x8;
/** Uses bilinear filtering instead of bicubic */
public static final int BILINEAR_FILTERING = 0x10;
/** Uses a faster bilinear filtering with lower image quality */
public static final int FAST_BILINEAR_FILTERING = 0x20;
/** Disables color conversion (output is NV21) */
public static final int NO_COLOR_CONVERSION = 0x40;
public static native int init(int width, int height, int perflvl, int threadcount);
public static native void destroy();
// Rendering API when NO_COLOR_CONVERSION == 0
public static native boolean setRenderTarget(Object androidSurface);
public static native boolean getRgbFrameInt(int[] rgbFrame, int bufferSize);
public static native boolean getRgbFrame(byte[] rgbFrame, int bufferSize);
public static native boolean redraw();
// Rendering API when NO_COLOR_CONVERSION == 1
public static native boolean getRawFrame(byte[] yuvFrame, int bufferSize);
public static native int getInputPaddingSize();
public static native int decode(byte[] indata, int inoff, int inlen);
}

View File

@ -14,6 +14,7 @@ import com.limelight.utils.UiHelper;
import android.app.Activity;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.res.Configuration;
@ -21,15 +22,16 @@ import android.os.Bundle;
import android.os.IBinder;
import android.view.KeyEvent;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.TextView;
import android.widget.Toast;
public class AddComputerManually extends Activity {
private TextView hostText;
private ComputerManagerService.ComputerManagerBinder managerBinder;
private LinkedBlockingQueue<String> computersToAdd = new LinkedBlockingQueue<String>();
private final LinkedBlockingQueue<String> computersToAdd = new LinkedBlockingQueue<String>();
private Thread addThread;
private ServiceConnection serviceConnection = new ServiceConnection() {
private final ServiceConnection serviceConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, final IBinder binder) {
managerBinder = ((ComputerManagerService.ComputerManagerBinder)binder);
startAddThread();
@ -161,6 +163,12 @@ public class AddComputerManually extends Activity {
computersToAdd.add(hostText.getText().toString().trim());
}
else if (actionId == EditorInfo.IME_ACTION_PREVIOUS) {
// This is how the Fire TV dismisses the keyboard
InputMethodManager imm = (InputMethodManager)getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(hostText.getWindowToken(), 0);
return false;
}
return false;
}

View File

@ -3,11 +3,11 @@ package com.limelight.preferences;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Build;
import android.preference.PreferenceManager;
public class PreferenceConfiguration {
static final String RES_FPS_PREF_STRING = "list_resolution_fps";
private static final String DECODER_PREF_STRING = "list_decoders";
static final String BITRATE_PREF_STRING = "seekbar_bitrate";
private static final String STRETCH_PREF_STRING = "checkbox_stretch_video";
private static final String SOPS_PREF_STRING = "checkbox_enable_sops";
@ -18,6 +18,9 @@ public class PreferenceConfiguration {
private static final String LIST_MODE_PREF_STRING = "checkbox_list_mode";
private static final String SMALL_ICONS_PREF_STRING = "checkbox_small_icon_mode";
private static final String MULTI_CONTROLLER_PREF_STRING = "checkbox_multi_controller";
private static final String ENABLE_51_SURROUND_PREF_STRING = "checkbox_51_surround";
private static final String USB_DRIVER_PREF_SRING = "checkbox_usb_driver";
private static final String VIDEO_FORMAT_PREF_STRING = "video_format";
private static final String VIRTUAL_CONTROLLER_ENABLE = "virtual_controller_checkbox_enable";
private static final Boolean VIRTUAL_CONTROLLER_ENABLE_DEFAULT = true;
@ -26,9 +29,10 @@ public class PreferenceConfiguration {
private static final int BITRATE_DEFAULT_720_60 = 10;
private static final int BITRATE_DEFAULT_1080_30 = 10;
private static final int BITRATE_DEFAULT_1080_60 = 20;
private static final int BITRATE_DEFAULT_4K_30 = 40;
private static final int BITRATE_DEFAULT_4K_60 = 80;
private static final String DEFAULT_RES_FPS = "720p60";
private static final String DEFAULT_DECODER = "auto";
private static final int DEFAULT_BITRATE = BITRATE_DEFAULT_720_60;
private static final boolean DEFAULT_STRETCH = false;
private static final boolean DEFAULT_SOPS = true;
@ -38,18 +42,21 @@ public class PreferenceConfiguration {
public static final String DEFAULT_LANGUAGE = "default";
private static final boolean DEFAULT_LIST_MODE = false;
private static final boolean DEFAULT_MULTI_CONTROLLER = true;
private static final boolean DEFAULT_ENABLE_51_SURROUND = false;
private static final boolean DEFAULT_USB_DRIVER = true;
private static final String DEFAULT_VIDEO_FORMAT = "auto";
public static final int FORCE_HARDWARE_DECODER = -1;
public static final int AUTOSELECT_DECODER = 0;
public static final int FORCE_SOFTWARE_DECODER = 1;
public static final int FORCE_H265_ON = -1;
public static final int AUTOSELECT_H265 = 0;
public static final int FORCE_H265_OFF = 1;
public int width, height, fps;
public int bitrate;
public int decoder;
public int videoFormat;
public int deadzonePercentage;
public boolean stretchVideo, enableSops, playHostAudio, disableWarnings;
public String language;
public boolean listMode, smallIconMode, multiController;
public boolean listMode, smallIconMode, multiController, enable51Surround, usbDriver;
public boolean virtualController_enable;
@ -66,6 +73,12 @@ public class PreferenceConfiguration {
else if (resFpsString.equals("1080p60")) {
return BITRATE_DEFAULT_1080_60;
}
else if (resFpsString.equals("4K30")) {
return BITRATE_DEFAULT_4K_30;
}
else if (resFpsString.equals("4K60")) {
return BITRATE_DEFAULT_4K_60;
}
else {
// Should never get here
return DEFAULT_BITRATE;
@ -74,53 +87,45 @@ public class PreferenceConfiguration {
public static boolean getDefaultSmallMode(Context context) {
PackageManager manager = context.getPackageManager();
if (manager != null && manager.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) {
if (manager != null) {
// TVs shouldn't use small mode by default
if (manager.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) {
return false;
}
// API 21 uses LEANBACK instead of TELEVISION
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1) {
if (manager.hasSystemFeature(PackageManager.FEATURE_LEANBACK)) {
return false;
}
}
}
// Use small mode on anything smaller than a 7" tablet
return context.getResources().getConfiguration().smallestScreenWidthDp < 600;
}
public static int getDefaultBitrate(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String str = prefs.getString(RES_FPS_PREF_STRING, DEFAULT_RES_FPS);
if (str.equals("720p30")) {
return BITRATE_DEFAULT_720_30;
}
else if (str.equals("720p60")) {
return BITRATE_DEFAULT_720_60;
}
else if (str.equals("1080p30")) {
return BITRATE_DEFAULT_1080_30;
}
else if (str.equals("1080p60")) {
return BITRATE_DEFAULT_1080_60;
}
else {
// Should never get here
return DEFAULT_BITRATE;
}
return getDefaultBitrate(prefs.getString(RES_FPS_PREF_STRING, DEFAULT_RES_FPS));
}
private static int getDecoderValue(Context context) {
private static int getVideoFormatValue(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String str = prefs.getString(DECODER_PREF_STRING, DEFAULT_DECODER);
String str = prefs.getString(VIDEO_FORMAT_PREF_STRING, DEFAULT_VIDEO_FORMAT);
if (str.equals("auto")) {
return AUTOSELECT_DECODER;
return AUTOSELECT_H265;
}
else if (str.equals("software")) {
return FORCE_SOFTWARE_DECODER;
else if (str.equals("forceh265")) {
return FORCE_H265_ON;
}
else if (str.equals("hardware")) {
return FORCE_HARDWARE_DECODER;
else if (str.equals("neverh265")) {
return FORCE_H265_OFF;
}
else {
// Should never get here
return AUTOSELECT_DECODER;
return AUTOSELECT_H265;
}
}
@ -150,6 +155,16 @@ public class PreferenceConfiguration {
config.height = 1080;
config.fps = 60;
}
else if (str.equals("4K30")) {
config.width = 3840;
config.height = 2160;
config.fps = 30;
}
else if (str.equals("4K60")) {
config.width = 3840;
config.height = 2160;
config.fps = 60;
}
else {
// Should never get here
config.width = 1280;
@ -157,7 +172,7 @@ public class PreferenceConfiguration {
config.fps = 60;
}
config.decoder = getDecoderValue(context);
config.videoFormat = getVideoFormatValue(context);
config.deadzonePercentage = prefs.getInt(DEADZONE_PREF_STRING, DEFAULT_DEADZONE);
@ -171,6 +186,8 @@ public class PreferenceConfiguration {
config.listMode = prefs.getBoolean(LIST_MODE_PREF_STRING, DEFAULT_LIST_MODE);
config.smallIconMode = prefs.getBoolean(SMALL_ICONS_PREF_STRING, getDefaultSmallMode(context));
config.multiController = prefs.getBoolean(MULTI_CONTROLLER_PREF_STRING, DEFAULT_MULTI_CONTROLLER);
config.enable51Surround = prefs.getBoolean(ENABLE_51_SURROUND_PREF_STRING, DEFAULT_ENABLE_51_SURROUND);
config.usbDriver = prefs.getBoolean(USB_DRIVER_PREF_SRING, DEFAULT_USB_DRIVER);
config.virtualController_enable = prefs.getBoolean(VIRTUAL_CONTROLLER_ENABLE, VIRTUAL_CONTROLLER_ENABLE_DEFAULT);

View File

@ -20,10 +20,14 @@ public class SeekBarPreference extends DialogPreference
private SeekBar seekBar;
private TextView valueText;
private Context context;
private final Context context;
private String dialogMessage, suffix;
private int defaultValue, maxValue, minValue, currentValue;
private final String dialogMessage;
private final String suffix;
private final int defaultValue;
private final int maxValue;
private final int minValue;
private int currentValue;
public SeekBarPreference(Context context, AttributeSet attrs) {
super(context, attrs);
@ -127,13 +131,6 @@ public class SeekBarPreference extends DialogPreference
}
}
public void setMax(int max) {
this.maxValue = max;
}
public int getMax() {
return this.maxValue;
}
public void setProgress(int progress) {
this.currentValue = progress;
if (seekBar != null) {

View File

@ -18,14 +18,17 @@ import com.limelight.utils.UiHelper;
import java.util.Locale;
public class StreamSettings extends Activity {
private PreferenceConfiguration previousPrefs;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String locale = PreferenceConfiguration.readPreferences(this).language;
if (!locale.equals(PreferenceConfiguration.DEFAULT_LANGUAGE)) {
previousPrefs = PreferenceConfiguration.readPreferences(this);
if (!previousPrefs.language.equals(PreferenceConfiguration.DEFAULT_LANGUAGE)) {
Configuration config = new Configuration(getResources().getConfiguration());
config.locale = new Locale(locale);
config.locale = new Locale(previousPrefs.language);
getResources().updateConfiguration(config, getResources().getDisplayMetrics());
}
@ -41,11 +44,17 @@ public class StreamSettings extends Activity {
public void onBackPressed() {
finish();
// Check for changes that require a UI reload to take effect
PreferenceConfiguration newPrefs = PreferenceConfiguration.readPreferences(this);
if (newPrefs.listMode != previousPrefs.listMode ||
newPrefs.smallIconMode != previousPrefs.smallIconMode ||
!newPrefs.language.equals(previousPrefs.language)) {
// Restart the PC view to apply UI changes
Intent intent = new Intent(this, PcView.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent, null);
}
}
public static class SettingsFragment extends PreferenceFragment {
@Override

View File

@ -1,5 +1,7 @@
package com.limelight.utils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
@ -9,8 +11,6 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.util.Scanner;
public class CacheHelper {
private static File openPath(boolean createPath, File root, String... path) {
@ -30,12 +30,37 @@ public class CacheHelper {
return f;
}
public static FileInputStream openCacheFileForInput(File root, String... path) throws FileNotFoundException {
return new FileInputStream(openPath(false, root, path));
public static long getFileSize(File root, String... path) {
return openPath(false, root, path).length();
}
public static FileOutputStream openCacheFileForOutput(File root, String... path) throws FileNotFoundException {
return new FileOutputStream(openPath(true, root, path));
public static boolean deleteCacheFile(File root, String... path) {
return openPath(false, root, path).delete();
}
public static boolean cacheFileExists(File root, String... path) {
return openPath(false, root, path).exists();
}
public static InputStream openCacheFileForInput(File root, String... path) throws FileNotFoundException {
return new BufferedInputStream(new FileInputStream(openPath(false, root, path)));
}
public static OutputStream openCacheFileForOutput(File root, String... path) throws FileNotFoundException {
return new BufferedOutputStream(new FileOutputStream(openPath(true, root, path)));
}
public static void writeInputStreamToOutputStream(InputStream in, OutputStream out, long maxLength) throws IOException {
byte[] buf = new byte[4096];
int bytesRead;
while ((bytesRead = in.read(buf)) != -1) {
maxLength -= bytesRead;
if (maxLength <= 0) {
throw new IOException("Stream exceeded max size");
}
out.write(buf, 0, bytesRead);
}
}
public static String readInputStreamToString(InputStream in) throws IOException {
@ -48,6 +73,10 @@ public class CacheHelper {
sb.append(buf, 0, bytesRead);
}
try {
in.close();
} catch (IOException ignored) {}
return sb.toString();
}

View File

@ -7,15 +7,16 @@ import android.app.AlertDialog;
import android.content.DialogInterface;
public class Dialog implements Runnable {
private String title, message;
private Activity activity;
private boolean endAfterDismiss;
private final String title;
private final String message;
private final Activity activity;
private final boolean endAfterDismiss;
private AlertDialog alert;
private static final ArrayList<Dialog> rundownDialogs = new ArrayList<Dialog>();
public Dialog(Activity activity, String title, String message, boolean endAfterDismiss)
private Dialog(Activity activity, String title, String message, boolean endAfterDismiss)
{
this.activity = activity;
this.title = title;

View File

@ -0,0 +1,90 @@
package com.limelight.utils;
import android.app.Activity;
import android.content.Intent;
import android.widget.Toast;
import com.limelight.Game;
import com.limelight.R;
import com.limelight.binding.PlatformBinding;
import com.limelight.computers.ComputerManagerService;
import com.limelight.nvstream.http.ComputerDetails;
import com.limelight.nvstream.http.GfeHttpResponseException;
import com.limelight.nvstream.http.NvApp;
import com.limelight.nvstream.http.NvHTTP;
import java.io.FileNotFoundException;
import java.net.InetAddress;
import java.net.UnknownHostException;
public class ServerHelper {
public static InetAddress getCurrentAddressFromComputer(ComputerDetails computer) {
return computer.reachability == ComputerDetails.Reachability.LOCAL ?
computer.localIp : computer.remoteIp;
}
public static void doStart(Activity parent, NvApp app, ComputerDetails computer,
ComputerManagerService.ComputerManagerBinder managerBinder) {
Intent intent = new Intent(parent, Game.class);
intent.putExtra(Game.EXTRA_HOST,
computer.reachability == ComputerDetails.Reachability.LOCAL ?
computer.localIp.getHostAddress() : computer.remoteIp.getHostAddress());
intent.putExtra(Game.EXTRA_APP_NAME, app.getAppName());
intent.putExtra(Game.EXTRA_APP_ID, app.getAppId());
intent.putExtra(Game.EXTRA_UNIQUEID, managerBinder.getUniqueId());
intent.putExtra(Game.EXTRA_STREAMING_REMOTE,
computer.reachability != ComputerDetails.Reachability.LOCAL);
parent.startActivity(intent);
}
public static void doQuit(final Activity parent,
final InetAddress address,
final NvApp app,
final ComputerManagerService.ComputerManagerBinder managerBinder,
final Runnable onComplete) {
Toast.makeText(parent, parent.getResources().getString(R.string.applist_quit_app) + " " + app.getAppName() + "...", Toast.LENGTH_SHORT).show();
new Thread(new Runnable() {
@Override
public void run() {
NvHTTP httpConn;
String message;
try {
httpConn = new NvHTTP(address,
managerBinder.getUniqueId(), null, PlatformBinding.getCryptoProvider(parent));
if (httpConn.quitApp()) {
message = parent.getResources().getString(R.string.applist_quit_success) + " " + app.getAppName();
} else {
message = parent.getResources().getString(R.string.applist_quit_fail) + " " + app.getAppName();
}
} catch (GfeHttpResponseException e) {
if (e.getErrorCode() == 599) {
message = "This session wasn't started by this device," +
" so it cannot be quit. End streaming on the original " +
"device or the PC itself. (Error code: "+e.getErrorCode()+")";
}
else {
message = e.getMessage();
}
} catch (UnknownHostException e) {
message = parent.getResources().getString(R.string.error_unknown_host);
} catch (FileNotFoundException e) {
message = parent.getResources().getString(R.string.error_404);
} catch (Exception e) {
message = e.getMessage();
} finally {
if (onComplete != null) {
onComplete.run();
}
}
final String toastMessage = message;
parent.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(parent, toastMessage, Toast.LENGTH_LONG).show();
}
});
}
}).start();
}
}

View File

@ -9,14 +9,15 @@ import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
public class SpinnerDialog implements Runnable,OnCancelListener {
private String title, message;
private Activity activity;
private final String title;
private final String message;
private final Activity activity;
private ProgressDialog progress;
private boolean finish;
private final boolean finish;
private static final ArrayList<SpinnerDialog> rundownDialogs = new ArrayList<SpinnerDialog>();
public SpinnerDialog(Activity activity, String title, String message, boolean finish)
private SpinnerDialog(Activity activity, String title, String message, boolean finish)
{
this.activity = activity;
this.title = title;

View File

@ -1,11 +1,15 @@
package com.limelight.utils;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.UiModeManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.view.View;
import com.limelight.R;
public class UiHelper {
// Values from https://developer.android.com/training/tv/start/layouts.html
@ -28,4 +32,31 @@ public class UiHelper {
horizontalPaddingPixels, verticalPaddingPixels);
}
}
public static void displayQuitConfirmationDialog(Activity parent, final Runnable onYes, final Runnable onNo) {
DialogInterface.OnClickListener dialogClickListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which){
case DialogInterface.BUTTON_POSITIVE:
if (onYes != null) {
onYes.run();
}
break;
case DialogInterface.BUTTON_NEGATIVE:
if (onNo != null) {
onNo.run();
}
break;
}
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(parent);
builder.setMessage(parent.getResources().getString(R.string.applist_quit_confirmation))
.setPositiveButton(parent.getResources().getString(R.string.yes), dialogClickListener)
.setNegativeButton(parent.getResources().getString(R.string.no), dialogClickListener)
.show();
}
}

View File

@ -3,7 +3,8 @@
# Our minimum version is Android 4.1
APP_PLATFORM := android-16
APP_ABI := armeabi-v7a arm64-v8a x86 x86_64 mips #mips64
# Support all modern ABIs
APP_ABI := armeabi-v7a arm64-v8a x86 x86_64 mips mips64
# We want an optimized build
APP_OPTIM := release

View File

@ -10,4 +10,23 @@ LOCAL_MODULE := evdev_reader
LOCAL_SRC_FILES := evdev_reader.c
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)
# This next portion of the makefile is mostly copied from build-executable.mk but
# creates a binary with the libXXX.so form so the APK will install and drop
# the binary correctly.
LOCAL_BUILD_SCRIPT := BUILD_EXECUTABLE
LOCAL_MAKEFILE := $(local-makefile)
$(call check-defined-LOCAL_MODULE,$(LOCAL_BUILD_SCRIPT))
$(call check-LOCAL_MODULE,$(LOCAL_MAKEFILE))
$(call check-LOCAL_MODULE_FILENAME)
# we are building target objects
my := TARGET_
$(call handle-module-filename,lib,$(TARGET_SONAME_EXTENSION))
$(call handle-module-built)
LOCAL_MODULE_CLASS := EXECUTABLE
include $(BUILD_SYSTEM)/build-module.mk

View File

@ -1,5 +1,6 @@
#include <stdlib.h>
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
@ -8,34 +9,37 @@
#include <unistd.h>
#include <poll.h>
#include <errno.h>
#include <dirent.h>
#include <pthread.h>
#include <android/log.h>
JNIEXPORT jint JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_open(JNIEnv *env, jobject this, jstring absolutePath) {
const char *path;
#define REL_X 0x00
#define REL_Y 0x01
#define KEY_Q 16
#define BTN_LEFT 0x110
#define BTN_GAMEPAD 0x130
path = (*env)->GetStringUTFChars(env, absolutePath, NULL);
struct DeviceEntry {
struct DeviceEntry *next;
pthread_t thread;
int fd;
char devName[128];
};
return open(path, O_RDWR);
}
static struct DeviceEntry *DeviceListHead;
static int grabbing = 1;
static pthread_mutex_t DeviceListLock = PTHREAD_MUTEX_INITIALIZER;
JNIEXPORT jboolean JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_grab(JNIEnv *env, jobject this, jint fd) {
return ioctl(fd, EVIOCGRAB, 1) == 0;
}
JNIEXPORT jboolean JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_ungrab(JNIEnv *env, jobject this, jint fd) {
return ioctl(fd, EVIOCGRAB, 0) == 0;
}
// has*() and friends are based on Android's EventHub.cpp
// This is a small executable that runs in a root shell. It reads input
// devices and writes the evdev output packets to stdout. This allows
// Moonlight to read input devices without having to muck with changing
// device permissions or modifying SELinux policy (which is prevented in
// Marshmallow anyway).
#define test_bit(bit, array) (array[bit/8] & (1<<(bit%8)))
JNIEXPORT jboolean JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_hasRelAxis(JNIEnv *env, jobject this, jint fd, jshort axis) {
static int hasRelAxis(int fd, short axis) {
unsigned char relBitmask[(REL_MAX + 1) / 8];
ioctl(fd, EVIOCGBIT(EV_REL, sizeof(relBitmask)), relBitmask);
@ -43,17 +47,7 @@ Java_com_limelight_binding_input_evdev_EvdevReader_hasRelAxis(JNIEnv *env, jobje
return test_bit(axis, relBitmask);
}
JNIEXPORT jboolean JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_hasAbsAxis(JNIEnv *env, jobject this, jint fd, jshort axis) {
unsigned char absBitmask[(ABS_MAX + 1) / 8];
ioctl(fd, EVIOCGBIT(EV_ABS, sizeof(absBitmask)), absBitmask);
return test_bit(axis, absBitmask);
}
JNIEXPORT jboolean JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_hasKey(JNIEnv *env, jobject this, jint fd, jshort key) {
static int hasKey(int fd, short key) {
unsigned char keyBitmask[(KEY_MAX + 1) / 8];
ioctl(fd, EVIOCGBIT(EV_KEY, sizeof(keyBitmask)), keyBitmask);
@ -61,24 +55,37 @@ Java_com_limelight_binding_input_evdev_EvdevReader_hasKey(JNIEnv *env, jobject t
return test_bit(key, keyBitmask);
}
JNIEXPORT jint JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_read(JNIEnv *env, jobject this, jint fd, jbyteArray buffer) {
jint ret;
jbyte *data;
int pollres;
struct pollfd pollinfo;
data = (*env)->GetByteArrayElements(env, buffer, NULL);
if (data == NULL) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"Failed to get byte array");
return -1;
static void outputEvdevData(char *data, int dataSize) {
// We need to lock stdout before writing to prevent
// interleaving of data between threads.
flockfile(stdout);
fwrite(&dataSize, sizeof(dataSize), 1, stdout);
fwrite(data, dataSize, 1, stdout);
fflush(stdout);
funlockfile(stdout);
}
do
{
void* pollThreadFunc(void* context) {
struct DeviceEntry *device = context;
struct pollfd pollinfo;
int pollres, ret;
char data[64];
__android_log_print(ANDROID_LOG_INFO, "EvdevReader", "Polling /dev/input/%s", device->devName);
if (grabbing) {
// Exclusively grab the input device (required to make the Android cursor disappear)
if (ioctl(device->fd, EVIOCGRAB, 1) < 0) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"EVIOCGRAB failed for %s: %d", device->devName, errno);
goto cleanup;
}
}
for (;;) {
do {
// Unwait every 250 ms to return to caller if the fd is closed
pollinfo.fd = fd;
pollinfo.fd = device->fd;
pollinfo.events = POLLIN;
pollinfo.revents = 0;
pollres = poll(&pollinfo, 1, 250);
@ -87,16 +94,23 @@ Java_com_limelight_binding_input_evdev_EvdevReader_read(JNIEnv *env, jobject thi
if (pollres > 0 && (pollinfo.revents & POLLIN)) {
// We'll have data available now
ret = read(fd, data, sizeof(struct input_event));
ret = read(device->fd, data, sizeof(struct input_event));
if (ret < 0) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"read() failed: %d", errno);
goto cleanup;
}
else if (ret == 0) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"read() graceful EOF");
goto cleanup;
}
else if (grabbing) {
// Write out the data to our client
outputEvdevData(data, ret);
}
}
else {
// There must have been a failure
ret = -1;
if (pollres < 0) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"poll() failed: %d", errno);
@ -105,14 +119,207 @@ Java_com_limelight_binding_input_evdev_EvdevReader_read(JNIEnv *env, jobject thi
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader",
"Unexpected revents: %d", pollinfo.revents);
}
// Terminate this thread
goto cleanup;
}
}
(*env)->ReleaseByteArrayElements(env, buffer, data, 0);
cleanup:
__android_log_print(ANDROID_LOG_INFO, "EvdevReader", "Closing /dev/input/%s", device->devName);
// Remove the context from the linked list
{
struct DeviceEntry *lastEntry;
if (DeviceListHead == device) {
DeviceListHead = device->next;
}
else {
lastEntry = DeviceListHead;
while (lastEntry->next != NULL) {
if (lastEntry->next == device) {
lastEntry->next = device->next;
break;
}
lastEntry = lastEntry->next;
}
}
}
// Free the context
ioctl(device->fd, EVIOCGRAB, 0);
close(device->fd);
free(device);
return NULL;
}
static int precheckDeviceForPolling(int fd) {
int isMouse;
int isKeyboard;
int isGamepad;
// This is the same check that Android does in EventHub.cpp
isMouse = hasRelAxis(fd, REL_X) &&
hasRelAxis(fd, REL_Y) &&
hasKey(fd, BTN_LEFT);
// This is the same check that Android does in EventHub.cpp
isKeyboard = hasKey(fd, KEY_Q);
isGamepad = hasKey(fd, BTN_GAMEPAD);
// We only handle keyboards and mice that aren't gamepads
return (isMouse || isKeyboard) && !isGamepad;
}
static void startPollForDevice(char* deviceName) {
struct DeviceEntry *currentEntry;
char fullPath[256];
int fd;
// Lock the device list
pthread_mutex_lock(&DeviceListLock);
// Check if the device is already being polled
currentEntry = DeviceListHead;
while (currentEntry != NULL) {
if (strcmp(currentEntry->devName, deviceName) == 0) {
// Already polling this device
goto unlock;
}
currentEntry = currentEntry->next;
}
// Open the device
sprintf(fullPath, "/dev/input/%s", deviceName);
fd = open(fullPath, O_RDWR);
if (fd < 0) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader", "Couldn't open %s: %d", fullPath, errno);
goto unlock;
}
// Allocate a context
currentEntry = malloc(sizeof(*currentEntry));
if (currentEntry == NULL) {
close(fd);
goto unlock;
}
// Populate context
currentEntry->fd = fd;
strcpy(currentEntry->devName, deviceName);
// Check if we support polling this device
if (!precheckDeviceForPolling(fd)) {
// Nope, get out
free(currentEntry);
close(fd);
goto unlock;
}
// Start the polling thread
if (pthread_create(&currentEntry->thread, NULL, pollThreadFunc, currentEntry) != 0) {
free(currentEntry);
close(fd);
goto unlock;
}
// Queue this onto the device list
currentEntry->next = DeviceListHead;
DeviceListHead = currentEntry;
unlock:
// Unlock and return
pthread_mutex_unlock(&DeviceListLock);
}
static int enumerateDevices(void) {
DIR *inputDir;
struct dirent *dirEnt;
inputDir = opendir("/dev/input");
if (!inputDir) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader", "Couldn't open /dev/input: %d", errno);
return -1;
}
// Start polling each device in /dev/input
while ((dirEnt = readdir(inputDir)) != NULL) {
if (strcmp(dirEnt->d_name, ".") == 0 || strcmp(dirEnt->d_name, "..") == 0) {
// Skip these virtual directories
continue;
}
startPollForDevice(dirEnt->d_name);
}
closedir(inputDir);
return 0;
}
#define UNGRAB_REQ 1
#define REGRAB_REQ 2
int main(int argc, char* argv[]) {
int ret;
int pollres;
struct pollfd pollinfo;
// Perform initial enumeration
ret = enumerateDevices();
if (ret < 0) {
return ret;
}
JNIEXPORT jint JNICALL
Java_com_limelight_binding_input_evdev_EvdevReader_close(JNIEnv *env, jobject this, jint fd) {
return close(fd);
// Wait for requests from the client
for (;;) {
unsigned char requestId;
do {
// Every second we poll again for new devices if
// we haven't received any new events
pollinfo.fd = STDIN_FILENO;
pollinfo.events = POLLIN;
pollinfo.revents = 0;
pollres = poll(&pollinfo, 1, 1000);
if (pollres == 0) {
// Timeout, re-enumerate devices
enumerateDevices();
}
}
while (pollres == 0);
ret = fread(&requestId, sizeof(requestId), 1, stdin);
if (ret < sizeof(requestId)) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader", "Short read on input");
return errno;
}
if (requestId != UNGRAB_REQ && requestId != REGRAB_REQ) {
__android_log_print(ANDROID_LOG_ERROR, "EvdevReader", "Unknown request");
return requestId;
}
{
struct DeviceEntry *currentEntry;
pthread_mutex_lock(&DeviceListLock);
// Update state for future devices
grabbing = (requestId == REGRAB_REQ);
// Carry out the requested action on each device
currentEntry = DeviceListHead;
while (currentEntry != NULL) {
ioctl(currentEntry->fd, EVIOCGRAB, grabbing);
currentEntry = currentEntry->next;
}
pthread_mutex_unlock(&DeviceListLock);
}
}
}

View File

@ -1,17 +0,0 @@
# Android.mk for Limelight's H264 decoder
MY_LOCAL_PATH := $(call my-dir)
include $(call all-subdir-makefiles)
LOCAL_PATH := $(MY_LOCAL_PATH)
include $(CLEAR_VARS)
LOCAL_MODULE := nv_avc_dec
LOCAL_SRC_FILES := nv_avc_dec.c nv_avc_dec_jni.c
LOCAL_C_INCLUDES := $(LOCAL_PATH)/ffmpeg/$(TARGET_ARCH_ABI)/include
LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -llog -landroid
# Link to ffmpeg libraries
LOCAL_SHARED_LIBRARIES := libavcodec libavformat libswscale libavutil libwsresample
include $(BUILD_SHARED_LIBRARY)

View File

@ -1,31 +0,0 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavcodec-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavformat-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswscale-2.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libavutil-52.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libwsresample
LOCAL_SRC_FILES:= $(TARGET_ARCH_ABI)/lib/libswresample-0.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)

View File

@ -1,116 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_AVFFT_H
#define AVCODEC_AVFFT_H
/**
* @file
* @ingroup lavc_fft
* FFT functions
*/
/**
* @defgroup lavc_fft FFT functions
* @ingroup lavc_misc
*
* @{
*/
typedef float FFTSample;
typedef struct FFTComplex {
FFTSample re, im;
} FFTComplex;
typedef struct FFTContext FFTContext;
/**
* Set up a complex FFT.
* @param nbits log2 of the length of the input array
* @param inverse if 0 perform the forward transform, if 1 perform the inverse
*/
FFTContext *av_fft_init(int nbits, int inverse);
/**
* Do the permutation needed BEFORE calling ff_fft_calc().
*/
void av_fft_permute(FFTContext *s, FFTComplex *z);
/**
* Do a complex FFT with the parameters defined in av_fft_init(). The
* input data must be permuted before. No 1.0/sqrt(n) normalization is done.
*/
void av_fft_calc(FFTContext *s, FFTComplex *z);
void av_fft_end(FFTContext *s);
FFTContext *av_mdct_init(int nbits, int inverse, double scale);
void av_imdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_imdct_half(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_end(FFTContext *s);
/* Real Discrete Fourier Transform */
enum RDFTransformType {
DFT_R2C,
IDFT_C2R,
IDFT_R2C,
DFT_C2R,
};
typedef struct RDFTContext RDFTContext;
/**
* Set up a real FFT.
* @param nbits log2 of the length of the input array
* @param trans the type of transform
*/
RDFTContext *av_rdft_init(int nbits, enum RDFTransformType trans);
void av_rdft_calc(RDFTContext *s, FFTSample *data);
void av_rdft_end(RDFTContext *s);
/* Discrete Cosine Transform */
typedef struct DCTContext DCTContext;
enum DCTTransformType {
DCT_II = 0,
DCT_III,
DCT_I,
DST_I,
};
/**
* Set up DCT.
* @param nbits size of the input array:
* (1 << nbits) for DCT-II, DCT-III and DST-I
* (1 << nbits) + 1 for DCT-I
*
* @note the first element of the input of DST-I is ignored
*/
DCTContext *av_dct_init(int nbits, enum DCTTransformType type);
void av_dct_calc(DCTContext *s, FFTSample *data);
void av_dct_end (DCTContext *s);
/**
* @}
*/
#endif /* AVCODEC_AVFFT_H */

View File

@ -1,95 +0,0 @@
/*
* DXVA2 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DXVA_H
#define AVCODEC_DXVA_H
/**
* @file
* @ingroup lavc_codec_hwaccel_dxva2
* Public libavcodec DXVA2 header.
*/
#if defined(_WIN32_WINNT) && _WIN32_WINNT < 0x0600
#undef _WIN32_WINNT
#endif
#if !defined(_WIN32_WINNT)
#define _WIN32_WINNT 0x0600
#endif
#include <stdint.h>
#include <d3d9.h>
#include <dxva2api.h>
/**
* @defgroup lavc_codec_hwaccel_dxva2 DXVA2
* @ingroup lavc_codec_hwaccel
*
* @{
*/
#define FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG 1 ///< Work around for DXVA2 and old UVD/UVD+ ATI video cards
/**
* This structure is used to provides the necessary configurations and data
* to the DXVA2 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*/
struct dxva_context {
/**
* DXVA2 decoder object
*/
IDirectXVideoDecoder *decoder;
/**
* DXVA2 configuration used to create the decoder
*/
const DXVA2_ConfigPictureDecode *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
LPDIRECT3DSURFACE9 *surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
};
/**
* @}
*/
#endif /* AVCODEC_DXVA_H */

View File

@ -1,397 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_OLD_CODEC_IDS_H
#define AVCODEC_OLD_CODEC_IDS_H
#include "libavutil/common.h"
/*
* This header exists to prevent new codec IDs from being accidentally added to
* the deprecated list.
* Do not include it directly. It will be removed on next major bump
*
* Do not add new items to this list. Use the AVCodecID enum instead.
*/
CODEC_ID_NONE = AV_CODEC_ID_NONE,
/* video codecs */
CODEC_ID_MPEG1VIDEO,
CODEC_ID_MPEG2VIDEO, ///< preferred ID for MPEG-1/2 video decoding
CODEC_ID_MPEG2VIDEO_XVMC,
CODEC_ID_H261,
CODEC_ID_H263,
CODEC_ID_RV10,
CODEC_ID_RV20,
CODEC_ID_MJPEG,
CODEC_ID_MJPEGB,
CODEC_ID_LJPEG,
CODEC_ID_SP5X,
CODEC_ID_JPEGLS,
CODEC_ID_MPEG4,
CODEC_ID_RAWVIDEO,
CODEC_ID_MSMPEG4V1,
CODEC_ID_MSMPEG4V2,
CODEC_ID_MSMPEG4V3,
CODEC_ID_WMV1,
CODEC_ID_WMV2,
CODEC_ID_H263P,
CODEC_ID_H263I,
CODEC_ID_FLV1,
CODEC_ID_SVQ1,
CODEC_ID_SVQ3,
CODEC_ID_DVVIDEO,
CODEC_ID_HUFFYUV,
CODEC_ID_CYUV,
CODEC_ID_H264,
CODEC_ID_INDEO3,
CODEC_ID_VP3,
CODEC_ID_THEORA,
CODEC_ID_ASV1,
CODEC_ID_ASV2,
CODEC_ID_FFV1,
CODEC_ID_4XM,
CODEC_ID_VCR1,
CODEC_ID_CLJR,
CODEC_ID_MDEC,
CODEC_ID_ROQ,
CODEC_ID_INTERPLAY_VIDEO,
CODEC_ID_XAN_WC3,
CODEC_ID_XAN_WC4,
CODEC_ID_RPZA,
CODEC_ID_CINEPAK,
CODEC_ID_WS_VQA,
CODEC_ID_MSRLE,
CODEC_ID_MSVIDEO1,
CODEC_ID_IDCIN,
CODEC_ID_8BPS,
CODEC_ID_SMC,
CODEC_ID_FLIC,
CODEC_ID_TRUEMOTION1,
CODEC_ID_VMDVIDEO,
CODEC_ID_MSZH,
CODEC_ID_ZLIB,
CODEC_ID_QTRLE,
CODEC_ID_TSCC,
CODEC_ID_ULTI,
CODEC_ID_QDRAW,
CODEC_ID_VIXL,
CODEC_ID_QPEG,
CODEC_ID_PNG,
CODEC_ID_PPM,
CODEC_ID_PBM,
CODEC_ID_PGM,
CODEC_ID_PGMYUV,
CODEC_ID_PAM,
CODEC_ID_FFVHUFF,
CODEC_ID_RV30,
CODEC_ID_RV40,
CODEC_ID_VC1,
CODEC_ID_WMV3,
CODEC_ID_LOCO,
CODEC_ID_WNV1,
CODEC_ID_AASC,
CODEC_ID_INDEO2,
CODEC_ID_FRAPS,
CODEC_ID_TRUEMOTION2,
CODEC_ID_BMP,
CODEC_ID_CSCD,
CODEC_ID_MMVIDEO,
CODEC_ID_ZMBV,
CODEC_ID_AVS,
CODEC_ID_SMACKVIDEO,
CODEC_ID_NUV,
CODEC_ID_KMVC,
CODEC_ID_FLASHSV,
CODEC_ID_CAVS,
CODEC_ID_JPEG2000,
CODEC_ID_VMNC,
CODEC_ID_VP5,
CODEC_ID_VP6,
CODEC_ID_VP6F,
CODEC_ID_TARGA,
CODEC_ID_DSICINVIDEO,
CODEC_ID_TIERTEXSEQVIDEO,
CODEC_ID_TIFF,
CODEC_ID_GIF,
CODEC_ID_DXA,
CODEC_ID_DNXHD,
CODEC_ID_THP,
CODEC_ID_SGI,
CODEC_ID_C93,
CODEC_ID_BETHSOFTVID,
CODEC_ID_PTX,
CODEC_ID_TXD,
CODEC_ID_VP6A,
CODEC_ID_AMV,
CODEC_ID_VB,
CODEC_ID_PCX,
CODEC_ID_SUNRAST,
CODEC_ID_INDEO4,
CODEC_ID_INDEO5,
CODEC_ID_MIMIC,
CODEC_ID_RL2,
CODEC_ID_ESCAPE124,
CODEC_ID_DIRAC,
CODEC_ID_BFI,
CODEC_ID_CMV,
CODEC_ID_MOTIONPIXELS,
CODEC_ID_TGV,
CODEC_ID_TGQ,
CODEC_ID_TQI,
CODEC_ID_AURA,
CODEC_ID_AURA2,
CODEC_ID_V210X,
CODEC_ID_TMV,
CODEC_ID_V210,
CODEC_ID_DPX,
CODEC_ID_MAD,
CODEC_ID_FRWU,
CODEC_ID_FLASHSV2,
CODEC_ID_CDGRAPHICS,
CODEC_ID_R210,
CODEC_ID_ANM,
CODEC_ID_BINKVIDEO,
CODEC_ID_IFF_ILBM,
CODEC_ID_IFF_BYTERUN1,
CODEC_ID_KGV1,
CODEC_ID_YOP,
CODEC_ID_VP8,
CODEC_ID_PICTOR,
CODEC_ID_ANSI,
CODEC_ID_A64_MULTI,
CODEC_ID_A64_MULTI5,
CODEC_ID_R10K,
CODEC_ID_MXPEG,
CODEC_ID_LAGARITH,
CODEC_ID_PRORES,
CODEC_ID_JV,
CODEC_ID_DFA,
CODEC_ID_WMV3IMAGE,
CODEC_ID_VC1IMAGE,
CODEC_ID_UTVIDEO,
CODEC_ID_BMV_VIDEO,
CODEC_ID_VBLE,
CODEC_ID_DXTORY,
CODEC_ID_V410,
CODEC_ID_XWD,
CODEC_ID_CDXL,
CODEC_ID_XBM,
CODEC_ID_ZEROCODEC,
CODEC_ID_MSS1,
CODEC_ID_MSA1,
CODEC_ID_TSCC2,
CODEC_ID_MTS2,
CODEC_ID_CLLC,
CODEC_ID_Y41P = MKBETAG('Y','4','1','P'),
CODEC_ID_ESCAPE130 = MKBETAG('E','1','3','0'),
CODEC_ID_EXR = MKBETAG('0','E','X','R'),
CODEC_ID_AVRP = MKBETAG('A','V','R','P'),
CODEC_ID_G2M = MKBETAG( 0 ,'G','2','M'),
CODEC_ID_AVUI = MKBETAG('A','V','U','I'),
CODEC_ID_AYUV = MKBETAG('A','Y','U','V'),
CODEC_ID_V308 = MKBETAG('V','3','0','8'),
CODEC_ID_V408 = MKBETAG('V','4','0','8'),
CODEC_ID_YUV4 = MKBETAG('Y','U','V','4'),
CODEC_ID_SANM = MKBETAG('S','A','N','M'),
CODEC_ID_PAF_VIDEO = MKBETAG('P','A','F','V'),
CODEC_ID_SNOW = AV_CODEC_ID_SNOW,
/* various PCM "codecs" */
CODEC_ID_FIRST_AUDIO = 0x10000, ///< A dummy id pointing at the start of audio codecs
CODEC_ID_PCM_S16LE = 0x10000,
CODEC_ID_PCM_S16BE,
CODEC_ID_PCM_U16LE,
CODEC_ID_PCM_U16BE,
CODEC_ID_PCM_S8,
CODEC_ID_PCM_U8,
CODEC_ID_PCM_MULAW,
CODEC_ID_PCM_ALAW,
CODEC_ID_PCM_S32LE,
CODEC_ID_PCM_S32BE,
CODEC_ID_PCM_U32LE,
CODEC_ID_PCM_U32BE,
CODEC_ID_PCM_S24LE,
CODEC_ID_PCM_S24BE,
CODEC_ID_PCM_U24LE,
CODEC_ID_PCM_U24BE,
CODEC_ID_PCM_S24DAUD,
CODEC_ID_PCM_ZORK,
CODEC_ID_PCM_S16LE_PLANAR,
CODEC_ID_PCM_DVD,
CODEC_ID_PCM_F32BE,
CODEC_ID_PCM_F32LE,
CODEC_ID_PCM_F64BE,
CODEC_ID_PCM_F64LE,
CODEC_ID_PCM_BLURAY,
CODEC_ID_PCM_LXF,
CODEC_ID_S302M,
CODEC_ID_PCM_S8_PLANAR,
/* various ADPCM codecs */
CODEC_ID_ADPCM_IMA_QT = 0x11000,
CODEC_ID_ADPCM_IMA_WAV,
CODEC_ID_ADPCM_IMA_DK3,
CODEC_ID_ADPCM_IMA_DK4,
CODEC_ID_ADPCM_IMA_WS,
CODEC_ID_ADPCM_IMA_SMJPEG,
CODEC_ID_ADPCM_MS,
CODEC_ID_ADPCM_4XM,
CODEC_ID_ADPCM_XA,
CODEC_ID_ADPCM_ADX,
CODEC_ID_ADPCM_EA,
CODEC_ID_ADPCM_G726,
CODEC_ID_ADPCM_CT,
CODEC_ID_ADPCM_SWF,
CODEC_ID_ADPCM_YAMAHA,
CODEC_ID_ADPCM_SBPRO_4,
CODEC_ID_ADPCM_SBPRO_3,
CODEC_ID_ADPCM_SBPRO_2,
CODEC_ID_ADPCM_THP,
CODEC_ID_ADPCM_IMA_AMV,
CODEC_ID_ADPCM_EA_R1,
CODEC_ID_ADPCM_EA_R3,
CODEC_ID_ADPCM_EA_R2,
CODEC_ID_ADPCM_IMA_EA_SEAD,
CODEC_ID_ADPCM_IMA_EA_EACS,
CODEC_ID_ADPCM_EA_XAS,
CODEC_ID_ADPCM_EA_MAXIS_XA,
CODEC_ID_ADPCM_IMA_ISS,
CODEC_ID_ADPCM_G722,
CODEC_ID_ADPCM_IMA_APC,
CODEC_ID_VIMA = MKBETAG('V','I','M','A'),
/* AMR */
CODEC_ID_AMR_NB = 0x12000,
CODEC_ID_AMR_WB,
/* RealAudio codecs*/
CODEC_ID_RA_144 = 0x13000,
CODEC_ID_RA_288,
/* various DPCM codecs */
CODEC_ID_ROQ_DPCM = 0x14000,
CODEC_ID_INTERPLAY_DPCM,
CODEC_ID_XAN_DPCM,
CODEC_ID_SOL_DPCM,
/* audio codecs */
CODEC_ID_MP2 = 0x15000,
CODEC_ID_MP3, ///< preferred ID for decoding MPEG audio layer 1, 2 or 3
CODEC_ID_AAC,
CODEC_ID_AC3,
CODEC_ID_DTS,
CODEC_ID_VORBIS,
CODEC_ID_DVAUDIO,
CODEC_ID_WMAV1,
CODEC_ID_WMAV2,
CODEC_ID_MACE3,
CODEC_ID_MACE6,
CODEC_ID_VMDAUDIO,
CODEC_ID_FLAC,
CODEC_ID_MP3ADU,
CODEC_ID_MP3ON4,
CODEC_ID_SHORTEN,
CODEC_ID_ALAC,
CODEC_ID_WESTWOOD_SND1,
CODEC_ID_GSM, ///< as in Berlin toast format
CODEC_ID_QDM2,
CODEC_ID_COOK,
CODEC_ID_TRUESPEECH,
CODEC_ID_TTA,
CODEC_ID_SMACKAUDIO,
CODEC_ID_QCELP,
CODEC_ID_WAVPACK,
CODEC_ID_DSICINAUDIO,
CODEC_ID_IMC,
CODEC_ID_MUSEPACK7,
CODEC_ID_MLP,
CODEC_ID_GSM_MS, /* as found in WAV */
CODEC_ID_ATRAC3,
CODEC_ID_VOXWARE,
CODEC_ID_APE,
CODEC_ID_NELLYMOSER,
CODEC_ID_MUSEPACK8,
CODEC_ID_SPEEX,
CODEC_ID_WMAVOICE,
CODEC_ID_WMAPRO,
CODEC_ID_WMALOSSLESS,
CODEC_ID_ATRAC3P,
CODEC_ID_EAC3,
CODEC_ID_SIPR,
CODEC_ID_MP1,
CODEC_ID_TWINVQ,
CODEC_ID_TRUEHD,
CODEC_ID_MP4ALS,
CODEC_ID_ATRAC1,
CODEC_ID_BINKAUDIO_RDFT,
CODEC_ID_BINKAUDIO_DCT,
CODEC_ID_AAC_LATM,
CODEC_ID_QDMC,
CODEC_ID_CELT,
CODEC_ID_G723_1,
CODEC_ID_G729,
CODEC_ID_8SVX_EXP,
CODEC_ID_8SVX_FIB,
CODEC_ID_BMV_AUDIO,
CODEC_ID_RALF,
CODEC_ID_IAC,
CODEC_ID_ILBC,
CODEC_ID_FFWAVESYNTH = MKBETAG('F','F','W','S'),
CODEC_ID_SONIC = MKBETAG('S','O','N','C'),
CODEC_ID_SONIC_LS = MKBETAG('S','O','N','L'),
CODEC_ID_PAF_AUDIO = MKBETAG('P','A','F','A'),
CODEC_ID_OPUS = MKBETAG('O','P','U','S'),
/* subtitle codecs */
CODEC_ID_FIRST_SUBTITLE = 0x17000, ///< A dummy ID pointing at the start of subtitle codecs.
CODEC_ID_DVD_SUBTITLE = 0x17000,
CODEC_ID_DVB_SUBTITLE,
CODEC_ID_TEXT, ///< raw UTF-8 text
CODEC_ID_XSUB,
CODEC_ID_SSA,
CODEC_ID_MOV_TEXT,
CODEC_ID_HDMV_PGS_SUBTITLE,
CODEC_ID_DVB_TELETEXT,
CODEC_ID_SRT,
CODEC_ID_MICRODVD = MKBETAG('m','D','V','D'),
CODEC_ID_EIA_608 = MKBETAG('c','6','0','8'),
CODEC_ID_JACOSUB = MKBETAG('J','S','U','B'),
CODEC_ID_SAMI = MKBETAG('S','A','M','I'),
CODEC_ID_REALTEXT = MKBETAG('R','T','X','T'),
CODEC_ID_SUBVIEWER = MKBETAG('S','u','b','V'),
/* other specific kind of codecs (generally used for attachments) */
CODEC_ID_FIRST_UNKNOWN = 0x18000, ///< A dummy ID pointing at the start of various fake codecs.
CODEC_ID_TTF = 0x18000,
CODEC_ID_BINTEXT = MKBETAG('B','T','X','T'),
CODEC_ID_XBIN = MKBETAG('X','B','I','N'),
CODEC_ID_IDF = MKBETAG( 0 ,'I','D','F'),
CODEC_ID_OTF = MKBETAG( 0 ,'O','T','F'),
CODEC_ID_PROBE = 0x19000, ///< codec_id is not known (like CODEC_ID_NONE) but lavf should attempt to identify it
CODEC_ID_MPEG2TS = 0x20000, /**< _FAKE_ codec to indicate a raw MPEG-2 TS
* stream (only used by libavformat) */
CODEC_ID_MPEG4SYSTEMS = 0x20001, /**< _FAKE_ codec to indicate a MPEG-4 Systems
* stream (only used by libavformat) */
CODEC_ID_FFMETADATA = 0x21000, ///< Dummy codec for streams containing only metadata information.
#endif /* AVCODEC_OLD_CODEC_IDS_H */

View File

@ -1,173 +0,0 @@
/*
* Video Acceleration API (shared data between FFmpeg and the video player)
* HW decode acceleration for MPEG-2, MPEG-4, H.264 and VC-1
*
* Copyright (C) 2008-2009 Splitted-Desktop Systems
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_H
#define AVCODEC_VAAPI_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vaapi
* Public libavcodec VA API header.
*/
#include <stdint.h>
/**
* @defgroup lavc_codec_hwaccel_vaapi VA API Decoding
* @ingroup lavc_codec_hwaccel
* @{
*/
/**
* This structure is used to share data between the FFmpeg library and
* the client video application.
* This shall be zero-allocated and available as
* AVCodecContext.hwaccel_context. All user members can be set once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*/
struct vaapi_context {
/**
* Window system dependent data
*
* - encoding: unused
* - decoding: Set by user
*/
void *display;
/**
* Configuration ID
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t config_id;
/**
* Context ID (video decode pipeline)
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t context_id;
/**
* VAPictureParameterBuffer ID
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t pic_param_buf_id;
/**
* VAIQMatrixBuffer ID
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t iq_matrix_buf_id;
/**
* VABitPlaneBuffer ID (for VC-1 decoding)
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t bitplane_buf_id;
/**
* Slice parameter/data buffer IDs
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t *slice_buf_ids;
/**
* Number of effective slice buffer IDs to send to the HW
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int n_slice_buf_ids;
/**
* Size of pre-allocated slice_buf_ids
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_buf_ids_alloc;
/**
* Pointer to VASliceParameterBuffers
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
void *slice_params;
/**
* Size of a VASliceParameterBuffer element
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_param_size;
/**
* Size of pre-allocated slice_params
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_params_alloc;
/**
* Number of slices currently filled in
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_count;
/**
* Pointer to slice data buffer base
* - encoding: unused
* - decoding: Set by libavcodec
*/
const uint8_t *slice_data;
/**
* Current size of slice data
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t slice_data_size;
};
/* @} */
#endif /* AVCODEC_VAAPI_H */

View File

@ -1,162 +0,0 @@
/*
* VDA HW acceleration
*
* copyright (c) 2011 Sebastien Zwickert
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VDA_H
#define AVCODEC_VDA_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vda
* Public libavcodec VDA header.
*/
#include <stdint.h>
// emmintrin.h is unable to compile with -std=c99 -Werror=missing-prototypes
// http://openradar.appspot.com/8026390
#undef __GNUC_STDC_INLINE__
#define Picture QuickdrawPicture
#include <VideoDecodeAcceleration/VDADecoder.h>
#undef Picture
#include "libavcodec/version.h"
/**
* @defgroup lavc_codec_hwaccel_vda VDA
* @ingroup lavc_codec_hwaccel
*
* @{
*/
/**
* This structure is used to provide the necessary configurations and data
* to the VDA FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*/
struct vda_context {
/**
* VDA decoder object.
*
* - encoding: unused
* - decoding: Set/Unset by libavcodec.
*/
VDADecoder decoder;
/**
* The Core Video pixel buffer that contains the current image data.
*
* encoding: unused
* decoding: Set by libavcodec. Unset by user.
*/
CVPixelBufferRef cv_buffer;
/**
* Use the hardware decoder in synchronous mode.
*
* encoding: unused
* decoding: Set by user.
*/
int use_sync_decoding;
/**
* The frame width.
*
* - encoding: unused
* - decoding: Set/Unset by user.
*/
int width;
/**
* The frame height.
*
* - encoding: unused
* - decoding: Set/Unset by user.
*/
int height;
/**
* The frame format.
*
* - encoding: unused
* - decoding: Set/Unset by user.
*/
int format;
/**
* The pixel format for output image buffers.
*
* - encoding: unused
* - decoding: Set/Unset by user.
*/
OSType cv_pix_fmt_type;
/**
* The current bitstream buffer.
*
* - encoding: unused
* - decoding: Set/Unset by libavcodec.
*/
uint8_t *priv_bitstream;
/**
* The current size of the bitstream.
*
* - encoding: unused
* - decoding: Set/Unset by libavcodec.
*/
int priv_bitstream_size;
/**
* The reference size used for fast reallocation.
*
* - encoding: unused
* - decoding: Set/Unset by libavcodec.
*/
int priv_allocated_size;
/**
* Use av_buffer to manage buffer.
* When the flag is set, the CVPixelBuffers returned by the decoder will
* be released automatically, so you have to retain them if necessary.
* Not setting this flag may cause memory leak.
*
* encoding: unused
* decoding: Set by user.
*/
int use_ref_buffer;
};
/** Create the video decoder. */
int ff_vda_create_decoder(struct vda_context *vda_ctx,
uint8_t *extradata,
int extradata_size);
/** Destroy the video decoder. */
int ff_vda_destroy_decoder(struct vda_context *vda_ctx);
/**
* @}
*/
#endif /* AVCODEC_VDA_H */

View File

@ -1,195 +0,0 @@
/*
* The Video Decode and Presentation API for UNIX (VDPAU) is used for
* hardware-accelerated decoding of MPEG-1/2, H.264 and VC-1.
*
* Copyright (C) 2008 NVIDIA
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VDPAU_H
#define AVCODEC_VDPAU_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vdpau
* Public libavcodec VDPAU header.
*/
/**
* @defgroup lavc_codec_hwaccel_vdpau VDPAU Decoder and Renderer
* @ingroup lavc_codec_hwaccel
*
* VDPAU hardware acceleration has two modules
* - VDPAU decoding
* - VDPAU presentation
*
* The VDPAU decoding module parses all headers using FFmpeg
* parsing mechanisms and uses VDPAU for the actual decoding.
*
* As per the current implementation, the actual decoding
* and rendering (API calls) are done as part of the VDPAU
* presentation (vo_vdpau.c) module.
*
* @{
*/
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
#include "libavutil/avconfig.h"
#include "libavutil/attributes.h"
#ifndef FF_API_CAP_VDPAU
#define FF_API_CAP_VDPAU 1
#endif
#ifndef FF_API_BUFS_VDPAU
#define FF_API_BUFS_VDPAU 1
#endif
#if FF_API_BUFS_VDPAU
union AVVDPAUPictureInfo {
VdpPictureInfoH264 h264;
VdpPictureInfoMPEG1Or2 mpeg;
VdpPictureInfoVC1 vc1;
VdpPictureInfoMPEG4Part2 mpeg4;
};
#endif
struct AVCodecContext;
struct AVFrame;
typedef int (*AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *,
const VdpPictureInfo *, uint32_t,
const VdpBitstreamBuffer *);
/**
* This structure is used to share data between the libavcodec library and
* the client video application.
* The user shall allocate the structure via the av_alloc_vdpau_hwaccel
* function and make it available as
* AVCodecContext.hwaccel_context. Members can be set by the user once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*/
typedef struct AVVDPAUContext {
/**
* VDPAU decoder handle
*
* Set by user.
*/
VdpDecoder decoder;
/**
* VDPAU decoder render callback
*
* Set by the user.
*/
VdpDecoderRender *render;
#if FF_API_BUFS_VDPAU
/**
* VDPAU picture information
*
* Set by libavcodec.
*/
attribute_deprecated
union AVVDPAUPictureInfo info;
/**
* Allocated size of the bitstream_buffers table.
*
* Set by libavcodec.
*/
attribute_deprecated
int bitstream_buffers_allocated;
/**
* Useful bitstream buffers in the bitstream buffers table.
*
* Set by libavcodec.
*/
attribute_deprecated
int bitstream_buffers_used;
/**
* Table of bitstream buffers.
* The user is responsible for freeing this buffer using av_freep().
*
* Set by libavcodec.
*/
attribute_deprecated
VdpBitstreamBuffer *bitstream_buffers;
#endif
AVVDPAU_Render2 render2;
} AVVDPAUContext;
/**
* @brief allocation function for AVVDPAUContext
*
* Allows extending the struct without breaking API/ABI
*/
AVVDPAUContext *av_alloc_vdpaucontext(void);
AVVDPAU_Render2 av_vdpau_hwaccel_get_render2(const AVVDPAUContext *);
void av_vdpau_hwaccel_set_render2(AVVDPAUContext *, AVVDPAU_Render2);
#if FF_API_CAP_VDPAU
/** @brief The videoSurface is used for rendering. */
#define FF_VDPAU_STATE_USED_FOR_RENDER 1
/**
* @brief The videoSurface is needed for reference/prediction.
* The codec manipulates this.
*/
#define FF_VDPAU_STATE_USED_FOR_REFERENCE 2
/**
* @brief This structure is used as a callback between the FFmpeg
* decoder (vd_) and presentation (vo_) module.
* This is used for defining a video frame containing surface,
* picture parameter, bitstream information etc which are passed
* between the FFmpeg decoder and its clients.
*/
struct vdpau_render_state {
VdpVideoSurface surface; ///< Used as rendered surface, never changed.
int state; ///< Holds FF_VDPAU_STATE_* values.
#if AV_HAVE_INCOMPATIBLE_LIBAV_ABI
/** picture parameter information for all supported codecs */
union AVVDPAUPictureInfo info;
#endif
/** Describe size/location of the compressed video data.
Set to 0 when freeing bitstream_buffers. */
int bitstream_buffers_allocated;
int bitstream_buffers_used;
/** The user is responsible for freeing this buffer using av_freep(). */
VdpBitstreamBuffer *bitstream_buffers;
#if !AV_HAVE_INCOMPATIBLE_LIBAV_ABI
/** picture parameter information for all supported codecs */
union AVVDPAUPictureInfo info;
#endif
};
#endif
/* @}*/
#endif /* AVCODEC_VDPAU_H */

View File

@ -1,104 +0,0 @@
/*
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VERSION_H
#define AVCODEC_VERSION_H
/**
* @file
* @ingroup libavc
* Libavcodec version macros.
*/
#include "libavutil/avutil.h"
#define LIBAVCODEC_VERSION_MAJOR 55
#define LIBAVCODEC_VERSION_MINOR 39
#define LIBAVCODEC_VERSION_MICRO 101
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_VERSION AV_VERSION(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_BUILD LIBAVCODEC_VERSION_INT
#define LIBAVCODEC_IDENT "Lavc" AV_STRINGIFY(LIBAVCODEC_VERSION)
/**
* FF_API_* defines may be placed below to indicate public API that will be
* dropped at a future version bump. The defines themselves are not part of
* the public API and may change, break or disappear at any time.
*/
#ifndef FF_API_REQUEST_CHANNELS
#define FF_API_REQUEST_CHANNELS (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_ALLOC_CONTEXT
#define FF_API_ALLOC_CONTEXT (LIBAVCODEC_VERSION_MAJOR < 55)
#endif
#ifndef FF_API_AVCODEC_OPEN
#define FF_API_AVCODEC_OPEN (LIBAVCODEC_VERSION_MAJOR < 55)
#endif
#ifndef FF_API_OLD_DECODE_AUDIO
#define FF_API_OLD_DECODE_AUDIO (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_OLD_TIMECODE
#define FF_API_OLD_TIMECODE (LIBAVCODEC_VERSION_MAJOR < 55)
#endif
#ifndef FF_API_OLD_ENCODE_AUDIO
#define FF_API_OLD_ENCODE_AUDIO (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_OLD_ENCODE_VIDEO
#define FF_API_OLD_ENCODE_VIDEO (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_CODEC_ID
#define FF_API_CODEC_ID (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_AVCODEC_RESAMPLE
#define FF_API_AVCODEC_RESAMPLE (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_DEINTERLACE
#define FF_API_DEINTERLACE (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_DESTRUCT_PACKET
#define FF_API_DESTRUCT_PACKET (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_GET_BUFFER
#define FF_API_GET_BUFFER (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_MISSING_SAMPLE
#define FF_API_MISSING_SAMPLE (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_LOWRES
#define FF_API_LOWRES (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_CAP_VDPAU
#define FF_API_CAP_VDPAU (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_BUFS_VDPAU
#define FF_API_BUFS_VDPAU (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_VOXWARE
#define FF_API_VOXWARE (LIBAVCODEC_VERSION_MAJOR < 56)
#endif
#endif /* AVCODEC_VERSION_H */

View File

@ -1,168 +0,0 @@
/*
* Copyright (C) 2003 Ivan Kalvachev
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_XVMC_H
#define AVCODEC_XVMC_H
/**
* @file
* @ingroup lavc_codec_hwaccel_xvmc
* Public libavcodec XvMC header.
*/
#include <X11/extensions/XvMC.h>
#include "avcodec.h"
/**
* @defgroup lavc_codec_hwaccel_xvmc XvMC
* @ingroup lavc_codec_hwaccel
*
* @{
*/
#define AV_XVMC_ID 0x1DC711C0 /**< special value to ensure that regular pixel routines haven't corrupted the struct
the number is 1337 speak for the letters IDCT MCo (motion compensation) */
struct xvmc_pix_fmt {
/** The field contains the special constant value AV_XVMC_ID.
It is used as a test that the application correctly uses the API,
and that there is no corruption caused by pixel routines.
- application - set during initialization
- libavcodec - unchanged
*/
int xvmc_id;
/** Pointer to the block array allocated by XvMCCreateBlocks().
The array has to be freed by XvMCDestroyBlocks().
Each group of 64 values represents one data block of differential
pixel information (in MoCo mode) or coefficients for IDCT.
- application - set the pointer during initialization
- libavcodec - fills coefficients/pixel data into the array
*/
short* data_blocks;
/** Pointer to the macroblock description array allocated by
XvMCCreateMacroBlocks() and freed by XvMCDestroyMacroBlocks().
- application - set the pointer during initialization
- libavcodec - fills description data into the array
*/
XvMCMacroBlock* mv_blocks;
/** Number of macroblock descriptions that can be stored in the mv_blocks
array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_mv_blocks;
/** Number of blocks that can be stored at once in the data_blocks array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_data_blocks;
/** Indicate that the hardware would interpret data_blocks as IDCT
coefficients and perform IDCT on them.
- application - set during initialization
- libavcodec - unchanged
*/
int idct;
/** In MoCo mode it indicates that intra macroblocks are assumed to be in
unsigned format; same as the XVMC_INTRA_UNSIGNED flag.
- application - set during initialization
- libavcodec - unchanged
*/
int unsigned_intra;
/** Pointer to the surface allocated by XvMCCreateSurface().
It has to be freed by XvMCDestroySurface() on application exit.
It identifies the frame and its state on the video hardware.
- application - set during initialization
- libavcodec - unchanged
*/
XvMCSurface* p_surface;
/** Set by the decoder before calling ff_draw_horiz_band(),
needed by the XvMCRenderSurface function. */
//@{
/** Pointer to the surface used as past reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_past_surface;
/** Pointer to the surface used as future reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_future_surface;
/** top/bottom field or frame
- application - unchanged
- libavcodec - set
*/
unsigned int picture_structure;
/** XVMC_SECOND_FIELD - 1st or 2nd field in the sequence
- application - unchanged
- libavcodec - set
*/
unsigned int flags;
//}@
/** Number of macroblock descriptions in the mv_blocks array
that have already been passed to the hardware.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may increment it
with filled_mb_block_num or zero both.
- libavcodec - unchanged
*/
int start_mv_blocks_num;
/** Number of new macroblock descriptions in the mv_blocks array (after
start_mv_blocks_num) that are filled by libavcodec and have to be
passed to the hardware.
- application - zeroes it on get_buffer() or after successful
ff_draw_horiz_band().
- libavcodec - increment with one of each stored MB
*/
int filled_mv_blocks_num;
/** Number of the next free data block; one data block consists of
64 short values in the data_blocks array.
All blocks before this one have already been claimed by placing their
position into the corresponding block description structure field,
that are part of the mv_blocks array.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may zero it together
with start_mb_blocks_num.
- libavcodec - each decoded macroblock increases it by the number
of coded blocks it contains.
*/
int next_free_data_block_num;
};
/**
* @}
*/
#endif /* AVCODEC_XVMC_H */

View File

@ -1,481 +0,0 @@
/*
* copyright (c) 2001 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_AVIO_H
#define AVFORMAT_AVIO_H
/**
* @file
* @ingroup lavf_io
* Buffered I/O operations
*/
#include <stdint.h>
#include "libavutil/common.h"
#include "libavutil/dict.h"
#include "libavutil/log.h"
#include "libavformat/version.h"
#define AVIO_SEEKABLE_NORMAL 0x0001 /**< Seeking works like for a local file */
/**
* Callback for checking whether to abort blocking functions.
* AVERROR_EXIT is returned in this case by the interrupted
* function. During blocking operations, callback is called with
* opaque as parameter. If the callback returns 1, the
* blocking operation will be aborted.
*
* No members can be added to this struct without a major bump, if
* new elements have been added after this struct in AVFormatContext
* or AVIOContext.
*/
typedef struct AVIOInterruptCB {
int (*callback)(void*);
void *opaque;
} AVIOInterruptCB;
/**
* Bytestream IO Context.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVIOContext) must not be used outside libav*.
*
* @note None of the function pointers in AVIOContext should be called
* directly, they should only be set by the client application
* when implementing custom I/O. Normally these are set to the
* function pointers specified in avio_alloc_context()
*/
typedef struct AVIOContext {
/**
* A class for private options.
*
* If this AVIOContext is created by avio_open2(), av_class is set and
* passes the options down to protocols.
*
* If this AVIOContext is manually allocated, then av_class may be set by
* the caller.
*
* warning -- this field can be NULL, be sure to not pass this AVIOContext
* to any av_opt_* functions in that case.
*/
const AVClass *av_class;
unsigned char *buffer; /**< Start of the buffer. */
int buffer_size; /**< Maximum buffer size */
unsigned char *buf_ptr; /**< Current position in the buffer */
unsigned char *buf_end; /**< End of the data, may be less than
buffer+buffer_size if the read function returned
less data than requested, e.g. for streams where
no more data has been received yet. */
void *opaque; /**< A private pointer, passed to the read/write/seek/...
functions. */
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size);
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size);
int64_t (*seek)(void *opaque, int64_t offset, int whence);
int64_t pos; /**< position in the file of the current buffer */
int must_flush; /**< true if the next seek should flush */
int eof_reached; /**< true if eof reached */
int write_flag; /**< true if open for writing */
int max_packet_size;
unsigned long checksum;
unsigned char *checksum_ptr;
unsigned long (*update_checksum)(unsigned long checksum, const uint8_t *buf, unsigned int size);
int error; /**< contains the error code or 0 if no error happened */
/**
* Pause or resume playback for network streaming protocols - e.g. MMS.
*/
int (*read_pause)(void *opaque, int pause);
/**
* Seek to a given timestamp in stream with the specified stream_index.
* Needed for some network streaming protocols which don't support seeking
* to byte position.
*/
int64_t (*read_seek)(void *opaque, int stream_index,
int64_t timestamp, int flags);
/**
* A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable.
*/
int seekable;
/**
* max filesize, used to limit allocations
* This field is internal to libavformat and access from outside is not allowed.
*/
int64_t maxsize;
/**
* avio_read and avio_write should if possible be satisfied directly
* instead of going through a buffer, and avio_seek will always
* call the underlying seek function directly.
*/
int direct;
/**
* Bytes read statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int64_t bytes_read;
/**
* seek statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int seek_count;
/**
* writeout statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int writeout_count;
} AVIOContext;
/* unbuffered I/O */
/**
* Return AVIO_FLAG_* access flags corresponding to the access permissions
* of the resource in url, or a negative value corresponding to an
* AVERROR code in case of failure. The returned access flags are
* masked by the value in flags.
*
* @note This function is intrinsically unsafe, in the sense that the
* checked resource may change its existence or permission status from
* one call to another. Thus you should not trust the returned value,
* unless you are sure that no other processes are accessing the
* checked resource.
*/
int avio_check(const char *url, int flags);
/**
* Allocate and initialize an AVIOContext for buffered I/O. It must be later
* freed with av_free().
*
* @param buffer Memory block for input/output operations via AVIOContext.
* The buffer must be allocated with av_malloc() and friends.
* @param buffer_size The buffer size is very important for performance.
* For protocols with fixed blocksize it should be set to this blocksize.
* For others a typical size is a cache page, e.g. 4kb.
* @param write_flag Set to 1 if the buffer should be writable, 0 otherwise.
* @param opaque An opaque pointer to user-specific data.
* @param read_packet A function for refilling the buffer, may be NULL.
* @param write_packet A function for writing the buffer contents, may be NULL.
* The function may not change the input buffers content.
* @param seek A function for seeking to specified byte position, may be NULL.
*
* @return Allocated AVIOContext or NULL on failure.
*/
AVIOContext *avio_alloc_context(
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
int64_t (*seek)(void *opaque, int64_t offset, int whence));
void avio_w8(AVIOContext *s, int b);
void avio_write(AVIOContext *s, const unsigned char *buf, int size);
void avio_wl64(AVIOContext *s, uint64_t val);
void avio_wb64(AVIOContext *s, uint64_t val);
void avio_wl32(AVIOContext *s, unsigned int val);
void avio_wb32(AVIOContext *s, unsigned int val);
void avio_wl24(AVIOContext *s, unsigned int val);
void avio_wb24(AVIOContext *s, unsigned int val);
void avio_wl16(AVIOContext *s, unsigned int val);
void avio_wb16(AVIOContext *s, unsigned int val);
/**
* Write a NULL-terminated string.
* @return number of bytes written.
*/
int avio_put_str(AVIOContext *s, const char *str);
/**
* Convert an UTF-8 string to UTF-16LE and write it.
* @return number of bytes written.
*/
int avio_put_str16le(AVIOContext *s, const char *str);
/**
* Passing this as the "whence" parameter to a seek function causes it to
* return the filesize without seeking anywhere. Supporting this is optional.
* If it is not supported then the seek function will return <0.
*/
#define AVSEEK_SIZE 0x10000
/**
* Oring this flag as into the "whence" parameter to a seek function causes it to
* seek by any means (like reopening and linear reading) or other normally unreasonable
* means that can be extremely slow.
* This may be ignored by the seek code.
*/
#define AVSEEK_FORCE 0x20000
/**
* fseek() equivalent for AVIOContext.
* @return new position or AVERROR.
*/
int64_t avio_seek(AVIOContext *s, int64_t offset, int whence);
/**
* Skip given number of bytes forward
* @return new position or AVERROR.
*/
int64_t avio_skip(AVIOContext *s, int64_t offset);
/**
* ftell() equivalent for AVIOContext.
* @return position or AVERROR.
*/
static av_always_inline int64_t avio_tell(AVIOContext *s)
{
return avio_seek(s, 0, SEEK_CUR);
}
/**
* Get the filesize.
* @return filesize or AVERROR
*/
int64_t avio_size(AVIOContext *s);
/**
* feof() equivalent for AVIOContext.
* @return non zero if and only if end of file
*/
int url_feof(AVIOContext *s);
/** @warning currently size is limited */
int avio_printf(AVIOContext *s, const char *fmt, ...) av_printf_format(2, 3);
/**
* Force flushing of buffered data to the output s.
*
* Force the buffered data to be immediately written to the output,
* without to wait to fill the internal buffer.
*/
void avio_flush(AVIOContext *s);
/**
* Read size bytes from AVIOContext into buf.
* @return number of bytes read or AVERROR
*/
int avio_read(AVIOContext *s, unsigned char *buf, int size);
/**
* @name Functions for reading from AVIOContext
* @{
*
* @note return 0 if EOF, so you cannot use it if EOF handling is
* necessary
*/
int avio_r8 (AVIOContext *s);
unsigned int avio_rl16(AVIOContext *s);
unsigned int avio_rl24(AVIOContext *s);
unsigned int avio_rl32(AVIOContext *s);
uint64_t avio_rl64(AVIOContext *s);
unsigned int avio_rb16(AVIOContext *s);
unsigned int avio_rb24(AVIOContext *s);
unsigned int avio_rb32(AVIOContext *s);
uint64_t avio_rb64(AVIOContext *s);
/**
* @}
*/
/**
* Read a string from pb into buf. The reading will terminate when either
* a NULL character was encountered, maxlen bytes have been read, or nothing
* more can be read from pb. The result is guaranteed to be NULL-terminated, it
* will be truncated if buf is too small.
* Note that the string is not interpreted or validated in any way, it
* might get truncated in the middle of a sequence for multi-byte encodings.
*
* @return number of bytes read (is always <= maxlen).
* If reading ends on EOF or error, the return value will be one more than
* bytes actually read.
*/
int avio_get_str(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* Read a UTF-16 string from pb and convert it to UTF-8.
* The reading will terminate when either a null or invalid character was
* encountered or maxlen bytes have been read.
* @return number of bytes read (is always <= maxlen)
*/
int avio_get_str16le(AVIOContext *pb, int maxlen, char *buf, int buflen);
int avio_get_str16be(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* @name URL open modes
* The flags argument to avio_open must be one of the following
* constants, optionally ORed with other flags.
* @{
*/
#define AVIO_FLAG_READ 1 /**< read-only */
#define AVIO_FLAG_WRITE 2 /**< write-only */
#define AVIO_FLAG_READ_WRITE (AVIO_FLAG_READ|AVIO_FLAG_WRITE) /**< read-write pseudo flag */
/**
* @}
*/
/**
* Use non-blocking mode.
* If this flag is set, operations on the context will return
* AVERROR(EAGAIN) if they can not be performed immediately.
* If this flag is not set, operations on the context will never return
* AVERROR(EAGAIN).
* Note that this flag does not affect the opening/connecting of the
* context. Connecting a protocol will always block if necessary (e.g. on
* network protocols) but never hang (e.g. on busy devices).
* Warning: non-blocking protocols is work-in-progress; this flag may be
* silently ignored.
*/
#define AVIO_FLAG_NONBLOCK 8
/**
* Use direct mode.
* avio_read and avio_write should if possible be satisfied directly
* instead of going through a buffer, and avio_seek will always
* call the underlying seek function directly.
*/
#define AVIO_FLAG_DIRECT 0x8000
/**
* Create and initialize a AVIOContext for accessing the
* resource indicated by url.
* @note When the resource indicated by url has been opened in
* read+write mode, the AVIOContext can be used only for writing.
*
* @param s Used to return the pointer to the created AVIOContext.
* In case of failure the pointed to value is set to NULL.
* @param flags flags which control how the resource indicated by url
* is to be opened
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code in case of failure
*/
int avio_open(AVIOContext **s, const char *url, int flags);
/**
* Create and initialize a AVIOContext for accessing the
* resource indicated by url.
* @note When the resource indicated by url has been opened in
* read+write mode, the AVIOContext can be used only for writing.
*
* @param s Used to return the pointer to the created AVIOContext.
* In case of failure the pointed to value is set to NULL.
* @param flags flags which control how the resource indicated by url
* is to be opened
* @param int_cb an interrupt callback to be used at the protocols level
* @param options A dictionary filled with protocol-private options. On return
* this parameter will be destroyed and replaced with a dict containing options
* that were not found. May be NULL.
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code in case of failure
*/
int avio_open2(AVIOContext **s, const char *url, int flags,
const AVIOInterruptCB *int_cb, AVDictionary **options);
/**
* Close the resource accessed by the AVIOContext s and free it.
* This function can only be used if s was opened by avio_open().
*
* The internal buffer is automatically flushed before closing the
* resource.
*
* @return 0 on success, an AVERROR < 0 on error.
* @see avio_closep
*/
int avio_close(AVIOContext *s);
/**
* Close the resource accessed by the AVIOContext *s, free it
* and set the pointer pointing to it to NULL.
* This function can only be used if s was opened by avio_open().
*
* The internal buffer is automatically flushed before closing the
* resource.
*
* @return 0 on success, an AVERROR < 0 on error.
* @see avio_close
*/
int avio_closep(AVIOContext **s);
/**
* Open a write only memory stream.
*
* @param s new IO context
* @return zero if no error.
*/
int avio_open_dyn_buf(AVIOContext **s);
/**
* Return the written size and a pointer to the buffer. The buffer
* must be freed with av_free().
* Padding of FF_INPUT_BUFFER_PADDING_SIZE is added to the buffer.
*
* @param s IO context
* @param pbuffer pointer to a byte buffer
* @return the length of the byte buffer
*/
int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
/**
* Iterate through names of available protocols.
*
* @param opaque A private pointer representing current protocol.
* It must be a pointer to NULL on first iteration and will
* be updated by successive calls to avio_enum_protocols.
* @param output If set to 1, iterate over output protocols,
* otherwise over input protocols.
*
* @return A static string containing the name of current protocol or NULL
*/
const char *avio_enum_protocols(void **opaque, int output);
/**
* Pause and resume playing - only meaningful if using a network streaming
* protocol (e.g. MMS).
* @param pause 1 for pause, 0 for resume
*/
int avio_pause(AVIOContext *h, int pause);
/**
* Seek to a given timestamp relative to some component stream.
* Only meaningful if using a network streaming protocol (e.g. MMS.).
* @param stream_index The stream index that the timestamp is relative to.
* If stream_index is (-1) the timestamp should be in AV_TIME_BASE
* units from the beginning of the presentation.
* If a stream_index >= 0 is used and the protocol does not support
* seeking based on component streams, the call will fail.
* @param timestamp timestamp in AVStream.time_base units
* or if there is no stream specified then in AV_TIME_BASE units.
* @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
* and AVSEEK_FLAG_ANY. The protocol may silently ignore
* AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
* fail if used and not supported.
* @return >= 0 on success
* @see AVInputFormat::read_seek
*/
int64_t avio_seek_time(AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
#endif /* AVFORMAT_AVIO_H */

View File

@ -1,76 +0,0 @@
/*
* Version macros.
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_VERSION_H
#define AVFORMAT_VERSION_H
/**
* @file
* @ingroup libavf
* Libavformat version macros
*/
#include "libavutil/avutil.h"
#define LIBAVFORMAT_VERSION_MAJOR 55
#define LIBAVFORMAT_VERSION_MINOR 19
#define LIBAVFORMAT_VERSION_MICRO 104
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_VERSION AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_BUILD LIBAVFORMAT_VERSION_INT
#define LIBAVFORMAT_IDENT "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION)
/**
* FF_API_* defines may be placed below to indicate public API that will be
* dropped at a future version bump. The defines themselves are not part of
* the public API and may change, break or disappear at any time.
*/
#ifndef FF_API_ALLOC_OUTPUT_CONTEXT
#define FF_API_ALLOC_OUTPUT_CONTEXT (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_FORMAT_PARAMETERS
#define FF_API_FORMAT_PARAMETERS (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_NEW_STREAM
#define FF_API_NEW_STREAM (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_SET_PTS_INFO
#define FF_API_SET_PTS_INFO (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_CLOSE_INPUT_FILE
#define FF_API_CLOSE_INPUT_FILE (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_READ_PACKET
#define FF_API_READ_PACKET (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_ASS_SSA
#define FF_API_ASS_SSA (LIBAVFORMAT_VERSION_MAJOR < 56)
#endif
#ifndef FF_API_R_FRAME_RATE
#define FF_API_R_FRAME_RATE 1
#endif
#endif /* AVFORMAT_VERSION_H */

View File

@ -1,52 +0,0 @@
/*
* copyright (c) 2006 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_ADLER32_H
#define AVUTIL_ADLER32_H
#include <stdint.h>
#include "attributes.h"
/**
* @defgroup lavu_adler32 Adler32
* @ingroup lavu_crypto
* @{
*/
/**
* Calculate the Adler32 checksum of a buffer.
*
* Passing the return value to a subsequent av_adler32_update() call
* allows the checksum of multiple buffers to be calculated as though
* they were concatenated.
*
* @param adler initial checksum value
* @param buf pointer to input buffer
* @param len size of input buffer
* @return updated checksum
*/
unsigned long av_adler32_update(unsigned long adler, const uint8_t *buf,
unsigned int len) av_pure;
/**
* @}
*/
#endif /* AVUTIL_ADLER32_H */

View File

@ -1,65 +0,0 @@
/*
* copyright (c) 2007 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AES_H
#define AVUTIL_AES_H
#include <stdint.h>
#include "attributes.h"
#include "version.h"
/**
* @defgroup lavu_aes AES
* @ingroup lavu_crypto
* @{
*/
extern const int av_aes_size;
struct AVAES;
/**
* Allocate an AVAES context.
*/
struct AVAES *av_aes_alloc(void);
/**
* Initialize an AVAES context.
* @param key_bits 128, 192 or 256
* @param decrypt 0 for encryption, 1 for decryption
*/
int av_aes_init(struct AVAES *a, const uint8_t *key, int key_bits, int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
* @param count number of 16 byte blocks
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param iv initialization vector for CBC mode, if NULL then ECB will be used
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_aes_crypt(struct AVAES *a, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_AES_H */

View File

@ -1,160 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Macro definitions for various function/variable attributes
*/
#ifndef AVUTIL_ATTRIBUTES_H
#define AVUTIL_ATTRIBUTES_H
#ifdef __GNUC__
# define AV_GCC_VERSION_AT_LEAST(x,y) (__GNUC__ > x || __GNUC__ == x && __GNUC_MINOR__ >= y)
#else
# define AV_GCC_VERSION_AT_LEAST(x,y) 0
#endif
#ifndef av_always_inline
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_always_inline __attribute__((always_inline)) inline
#elif defined(_MSC_VER)
# define av_always_inline __forceinline
#else
# define av_always_inline inline
#endif
#endif
#ifndef av_extern_inline
#if defined(__ICL) && __ICL >= 1210 || defined(__GNUC_STDC_INLINE__)
# define av_extern_inline extern inline
#else
# define av_extern_inline inline
#endif
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_noinline __attribute__((noinline))
#elif defined(_MSC_VER)
# define av_noinline __declspec(noinline)
#else
# define av_noinline
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_pure __attribute__((pure))
#else
# define av_pure
#endif
#if AV_GCC_VERSION_AT_LEAST(2,6)
# define av_const __attribute__((const))
#else
# define av_const
#endif
#if AV_GCC_VERSION_AT_LEAST(4,3)
# define av_cold __attribute__((cold))
#else
# define av_cold
#endif
#if AV_GCC_VERSION_AT_LEAST(4,1)
# define av_flatten __attribute__((flatten))
#else
# define av_flatten
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define attribute_deprecated __attribute__((deprecated))
#elif defined(_MSC_VER)
# define attribute_deprecated __declspec(deprecated)
#else
# define attribute_deprecated
#endif
/**
* Disable warnings about deprecated features
* This is useful for sections of code kept for backward compatibility and
* scheduled for removal.
*/
#ifndef AV_NOWARN_DEPRECATED
#if AV_GCC_VERSION_AT_LEAST(4,6)
# define AV_NOWARN_DEPRECATED(code) \
_Pragma("GCC diagnostic push") \
_Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") \
code \
_Pragma("GCC diagnostic pop")
#elif defined(_MSC_VER)
# define AV_NOWARN_DEPRECATED(code) \
__pragma(warning(push)) \
__pragma(warning(disable : 4996)) \
code; \
__pragma(warning(pop))
#else
# define AV_NOWARN_DEPRECATED(code) code
#endif
#endif
#if defined(__GNUC__)
# define av_unused __attribute__((unused))
#else
# define av_unused
#endif
/**
* Mark a variable as used and prevent the compiler from optimizing it
* away. This is useful for variables accessed only from inline
* assembler without the compiler being aware.
*/
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_used __attribute__((used))
#else
# define av_used
#endif
#if AV_GCC_VERSION_AT_LEAST(3,3)
# define av_alias __attribute__((may_alias))
#else
# define av_alias
#endif
#if defined(__GNUC__) && !defined(__INTEL_COMPILER) && !defined(__clang__)
# define av_uninit(x) x=x
#else
# define av_uninit(x) x
#endif
#ifdef __GNUC__
# define av_builtin_constant_p __builtin_constant_p
# define av_printf_format(fmtpos, attrpos) __attribute__((__format__(__printf__, fmtpos, attrpos)))
#else
# define av_builtin_constant_p(x) 0
# define av_printf_format(fmtpos, attrpos)
#endif
#if AV_GCC_VERSION_AT_LEAST(2,5)
# define av_noreturn __attribute__((noreturn))
#else
# define av_noreturn
#endif
#endif /* AVUTIL_ATTRIBUTES_H */

View File

@ -1,149 +0,0 @@
/*
* Audio FIFO
* Copyright (c) 2012 Justin Ruggles <justin.ruggles@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Audio FIFO Buffer
*/
#ifndef AVUTIL_AUDIO_FIFO_H
#define AVUTIL_AUDIO_FIFO_H
#include "avutil.h"
#include "fifo.h"
#include "samplefmt.h"
/**
* @addtogroup lavu_audio
* @{
*/
/**
* Context for an Audio FIFO Buffer.
*
* - Operates at the sample level rather than the byte level.
* - Supports multiple channels with either planar or packed sample format.
* - Automatic reallocation when writing to a full buffer.
*/
typedef struct AVAudioFifo AVAudioFifo;
/**
* Free an AVAudioFifo.
*
* @param af AVAudioFifo to free
*/
void av_audio_fifo_free(AVAudioFifo *af);
/**
* Allocate an AVAudioFifo.
*
* @param sample_fmt sample format
* @param channels number of channels
* @param nb_samples initial allocation size, in samples
* @return newly allocated AVAudioFifo, or NULL on error
*/
AVAudioFifo *av_audio_fifo_alloc(enum AVSampleFormat sample_fmt, int channels,
int nb_samples);
/**
* Reallocate an AVAudioFifo.
*
* @param af AVAudioFifo to reallocate
* @param nb_samples new allocation size, in samples
* @return 0 if OK, or negative AVERROR code on failure
*/
int av_audio_fifo_realloc(AVAudioFifo *af, int nb_samples);
/**
* Write data to an AVAudioFifo.
*
* The AVAudioFifo will be reallocated automatically if the available space
* is less than nb_samples.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to write to
* @param data audio data plane pointers
* @param nb_samples number of samples to write
* @return number of samples actually written, or negative AVERROR
* code on failure. If successful, the number of samples
* actually written will always be nb_samples.
*/
int av_audio_fifo_write(AVAudioFifo *af, void **data, int nb_samples);
/**
* Read data from an AVAudioFifo.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to read from
* @param data audio data plane pointers
* @param nb_samples number of samples to read
* @return number of samples actually read, or negative AVERROR code
* on failure. The number of samples actually read will not
* be greater than nb_samples, and will only be less than
* nb_samples if av_audio_fifo_size is less than nb_samples.
*/
int av_audio_fifo_read(AVAudioFifo *af, void **data, int nb_samples);
/**
* Drain data from an AVAudioFifo.
*
* Removes the data without reading it.
*
* @param af AVAudioFifo to drain
* @param nb_samples number of samples to drain
* @return 0 if OK, or negative AVERROR code on failure
*/
int av_audio_fifo_drain(AVAudioFifo *af, int nb_samples);
/**
* Reset the AVAudioFifo buffer.
*
* This empties all data in the buffer.
*
* @param af AVAudioFifo to reset
*/
void av_audio_fifo_reset(AVAudioFifo *af);
/**
* Get the current number of samples in the AVAudioFifo available for reading.
*
* @param af the AVAudioFifo to query
* @return number of samples available for reading
*/
int av_audio_fifo_size(AVAudioFifo *af);
/**
* Get the current number of samples in the AVAudioFifo available for writing.
*
* @param af the AVAudioFifo to query
* @return number of samples available for writing
*/
int av_audio_fifo_space(AVAudioFifo *af);
/**
* @}
*/
#endif /* AVUTIL_AUDIO_FIFO_H */

View File

@ -1,6 +0,0 @@
#include "version.h"
#if FF_API_AUDIOCONVERT
#include "channel_layout.h"
#endif

View File

@ -1,66 +0,0 @@
/*
* copyright (c) 2010 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* simple assert() macros that are a bit more flexible than ISO C assert().
* @author Michael Niedermayer <michaelni@gmx.at>
*/
#ifndef AVUTIL_AVASSERT_H
#define AVUTIL_AVASSERT_H
#include <stdlib.h>
#include "avutil.h"
#include "log.h"
/**
* assert() equivalent, that is always enabled.
*/
#define av_assert0(cond) do { \
if (!(cond)) { \
av_log(NULL, AV_LOG_PANIC, "Assertion %s failed at %s:%d\n", \
AV_STRINGIFY(cond), __FILE__, __LINE__); \
abort(); \
} \
} while (0)
/**
* assert() equivalent, that does not lie in speed critical code.
* These asserts() thus can be enabled without fearing speedloss.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 0
#define av_assert1(cond) av_assert0(cond)
#else
#define av_assert1(cond) ((void)0)
#endif
/**
* assert() equivalent, that does lie in speed critical code.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 1
#define av_assert2(cond) av_assert0(cond)
#else
#define av_assert2(cond) ((void)0)
#endif
#endif /* AVUTIL_AVASSERT_H */

View File

@ -1,8 +0,0 @@
/* Generated by ffconf */
#ifndef AVUTIL_AVCONFIG_H
#define AVUTIL_AVCONFIG_H
#define AV_HAVE_BIGENDIAN 0
#define AV_HAVE_FAST_UNALIGNED 0
#define AV_HAVE_INCOMPATIBLE_LIBAV_ABI 0
#define AV_HAVE_INCOMPATIBLE_FORK_ABI 0
#endif /* AVUTIL_AVCONFIG_H */

View File

@ -1,302 +0,0 @@
/*
* Copyright (c) 2007 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVSTRING_H
#define AVUTIL_AVSTRING_H
#include <stddef.h>
#include "attributes.h"
/**
* @addtogroup lavu_string
* @{
*/
/**
* Return non-zero if pfx is a prefix of str. If it is, *ptr is set to
* the address of the first character in str after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_strstart(const char *str, const char *pfx, const char **ptr);
/**
* Return non-zero if pfx is a prefix of str independent of case. If
* it is, *ptr is set to the address of the first character in str
* after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_stristart(const char *str, const char *pfx, const char **ptr);
/**
* Locate the first case-independent occurrence in the string haystack
* of the string needle. A zero-length string needle is considered to
* match at the start of haystack.
*
* This function is a case-insensitive version of the standard strstr().
*
* @param haystack string to search in
* @param needle string to search for
* @return pointer to the located match within haystack
* or a null pointer if no match
*/
char *av_stristr(const char *haystack, const char *needle);
/**
* Locate the first occurrence of the string needle in the string haystack
* where not more than hay_length characters are searched. A zero-length
* string needle is considered to match at the start of haystack.
*
* This function is a length-limited version of the standard strstr().
*
* @param haystack string to search in
* @param needle string to search for
* @param hay_length length of string to search in
* @return pointer to the located match within haystack
* or a null pointer if no match
*/
char *av_strnstr(const char *haystack, const char *needle, size_t hay_length);
/**
* Copy the string src to dst, but no more than size - 1 bytes, and
* null-terminate dst.
*
* This function is the same as BSD strlcpy().
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the length of src
*
* @warning since the return value is the length of src, src absolutely
* _must_ be a properly 0-terminated string, otherwise this will read beyond
* the end of the buffer and possibly crash.
*/
size_t av_strlcpy(char *dst, const char *src, size_t size);
/**
* Append the string src to the string dst, but to a total length of
* no more than size - 1 bytes, and null-terminate dst.
*
* This function is similar to BSD strlcat(), but differs when
* size <= strlen(dst).
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the total length of src and dst
*
* @warning since the return value use the length of src and dst, these
* absolutely _must_ be a properly 0-terminated strings, otherwise this
* will read beyond the end of the buffer and possibly crash.
*/
size_t av_strlcat(char *dst, const char *src, size_t size);
/**
* Append output to a string, according to a format. Never write out of
* the destination buffer, and always put a terminating 0 within
* the buffer.
* @param dst destination buffer (string to which the output is
* appended)
* @param size total size of the destination buffer
* @param fmt printf-compatible format string, specifying how the
* following parameters are used
* @return the length of the string that would have been generated
* if enough space had been available
*/
size_t av_strlcatf(char *dst, size_t size, const char *fmt, ...) av_printf_format(3, 4);
/**
* Print arguments following specified format into a large enough auto
* allocated buffer. It is similar to GNU asprintf().
* @param fmt printf-compatible format string, specifying how the
* following parameters are used.
* @return the allocated string
* @note You have to free the string yourself with av_free().
*/
char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2);
/**
* Convert a number to a av_malloced string.
*/
char *av_d2str(double d);
/**
* Unescape the given string until a non escaped terminating char,
* and return the token corresponding to the unescaped string.
*
* The normal \ and ' escaping is supported. Leading and trailing
* whitespaces are removed, unless they are escaped with '\' or are
* enclosed between ''.
*
* @param buf the buffer to parse, buf will be updated to point to the
* terminating char
* @param term a 0-terminated list of terminating chars
* @return the malloced unescaped string, which must be av_freed by
* the user, NULL in case of allocation failure
*/
char *av_get_token(const char **buf, const char *term);
/**
* Split the string into several tokens which can be accessed by
* successive calls to av_strtok().
*
* A token is defined as a sequence of characters not belonging to the
* set specified in delim.
*
* On the first call to av_strtok(), s should point to the string to
* parse, and the value of saveptr is ignored. In subsequent calls, s
* should be NULL, and saveptr should be unchanged since the previous
* call.
*
* This function is similar to strtok_r() defined in POSIX.1.
*
* @param s the string to parse, may be NULL
* @param delim 0-terminated list of token delimiters, must be non-NULL
* @param saveptr user-provided pointer which points to stored
* information necessary for av_strtok() to continue scanning the same
* string. saveptr is updated to point to the next character after the
* first delimiter found, or to NULL if the string was terminated
* @return the found token, or NULL when no token is found
*/
char *av_strtok(char *s, const char *delim, char **saveptr);
/**
* Locale-independent conversion of ASCII isdigit.
*/
int av_isdigit(int c);
/**
* Locale-independent conversion of ASCII isgraph.
*/
int av_isgraph(int c);
/**
* Locale-independent conversion of ASCII isspace.
*/
int av_isspace(int c);
/**
* Locale-independent conversion of ASCII characters to uppercase.
*/
static inline int av_toupper(int c)
{
if (c >= 'a' && c <= 'z')
c ^= 0x20;
return c;
}
/**
* Locale-independent conversion of ASCII characters to lowercase.
*/
static inline int av_tolower(int c)
{
if (c >= 'A' && c <= 'Z')
c ^= 0x20;
return c;
}
/**
* Locale-independent conversion of ASCII isxdigit.
*/
int av_isxdigit(int c);
/**
* Locale-independent case-insensitive compare.
* @note This means only ASCII-range characters are case-insensitive
*/
int av_strcasecmp(const char *a, const char *b);
/**
* Locale-independent case-insensitive compare.
* @note This means only ASCII-range characters are case-insensitive
*/
int av_strncasecmp(const char *a, const char *b, size_t n);
/**
* Thread safe basename.
* @param path the path, on DOS both \ and / are considered separators.
* @return pointer to the basename substring.
*/
const char *av_basename(const char *path);
/**
* Thread safe dirname.
* @param path the path, on DOS both \ and / are considered separators.
* @return the path with the separator replaced by the string terminator or ".".
* @note the function may change the input string.
*/
const char *av_dirname(char *path);
enum AVEscapeMode {
AV_ESCAPE_MODE_AUTO, ///< Use auto-selected escaping mode.
AV_ESCAPE_MODE_BACKSLASH, ///< Use backslash escaping.
AV_ESCAPE_MODE_QUOTE, ///< Use single-quote escaping.
};
/**
* Consider spaces special and escape them even in the middle of the
* string.
*
* This is equivalent to adding the whitespace characters to the special
* characters lists, except it is guaranteed to use the exact same list
* of whitespace characters as the rest of libavutil.
*/
#define AV_ESCAPE_FLAG_WHITESPACE 0x01
/**
* Escape only specified special characters.
* Without this flag, escape also any characters that may be considered
* special by av_get_token(), such as the single quote.
*/
#define AV_ESCAPE_FLAG_STRICT 0x02
/**
* Escape string in src, and put the escaped string in an allocated
* string in *dst, which must be freed with av_free().
*
* @param dst pointer where an allocated string is put
* @param src string to escape, must be non-NULL
* @param special_chars string containing the special characters which
* need to be escaped, can be NULL
* @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros.
* Any unknown value for mode will be considered equivalent to
* AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without
* notice.
* @param flags flags which control how to escape, see AV_ESCAPE_FLAG_ macros
* @return the length of the allocated string, or a negative error code in case of error
* @see av_bprint_escape()
*/
int av_escape(char **dst, const char *src, const char *special_chars,
enum AVEscapeMode mode, int flags);
/**
* @}
*/
#endif /* AVUTIL_AVSTRING_H */

View File

@ -1,320 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVUTIL_H
#define AVUTIL_AVUTIL_H
/**
* @file
* external API header
*/
/**
* @mainpage
*
* @section ffmpeg_intro Introduction
*
* This document describes the usage of the different libraries
* provided by FFmpeg.
*
* @li @ref libavc "libavcodec" encoding/decoding library
* @li @ref lavfi "libavfilter" graph-based frame editing library
* @li @ref libavf "libavformat" I/O and muxing/demuxing library
* @li @ref lavd "libavdevice" special devices muxing/demuxing library
* @li @ref lavu "libavutil" common utility library
* @li @ref lswr "libswresample" audio resampling, format conversion and mixing
* @li @ref lpp "libpostproc" post processing library
* @li @ref lsws "libswscale" color conversion and scaling library
*
* @section ffmpeg_versioning Versioning and compatibility
*
* Each of the FFmpeg libraries contains a version.h header, which defines a
* major, minor and micro version number with the
* <em>LIBRARYNAME_VERSION_{MAJOR,MINOR,MICRO}</em> macros. The major version
* number is incremented with backward incompatible changes - e.g. removing
* parts of the public API, reordering public struct members, etc. The minor
* version number is incremented for backward compatible API changes or major
* new features - e.g. adding a new public function or a new decoder. The micro
* version number is incremented for smaller changes that a calling program
* might still want to check for - e.g. changing behavior in a previously
* unspecified situation.
*
* FFmpeg guarantees backward API and ABI compatibility for each library as long
* as its major version number is unchanged. This means that no public symbols
* will be removed or renamed. Types and names of the public struct members and
* values of public macros and enums will remain the same (unless they were
* explicitly declared as not part of the public API). Documented behavior will
* not change.
*
* In other words, any correct program that works with a given FFmpeg snapshot
* should work just as well without any changes with any later snapshot with the
* same major versions. This applies to both rebuilding the program against new
* FFmpeg versions or to replacing the dynamic FFmpeg libraries that a program
* links against.
*
* However, new public symbols may be added and new members may be appended to
* public structs whose size is not part of public ABI (most public structs in
* FFmpeg). New macros and enum values may be added. Behavior in undocumented
* situations may change slightly (and be documented). All those are accompanied
* by an entry in doc/APIchanges and incrementing either the minor or micro
* version number.
*/
/**
* @defgroup lavu Common utility functions
*
* @brief
* libavutil contains the code shared across all the other FFmpeg
* libraries
*
* @note In order to use the functions provided by avutil you must include
* the specific header.
*
* @{
*
* @defgroup lavu_crypto Crypto and Hashing
*
* @{
* @}
*
* @defgroup lavu_math Maths
* @{
*
* @}
*
* @defgroup lavu_string String Manipulation
*
* @{
*
* @}
*
* @defgroup lavu_mem Memory Management
*
* @{
*
* @}
*
* @defgroup lavu_data Data Structures
* @{
*
* @}
*
* @defgroup lavu_audio Audio related
*
* @{
*
* @}
*
* @defgroup lavu_error Error Codes
*
* @{
*
* @}
*
* @defgroup lavu_log Logging Facility
*
* @{
*
* @}
*
* @defgroup lavu_misc Other
*
* @{
*
* @defgroup lavu_internal Internal
*
* Not exported functions, for internal usage only
*
* @{
*
* @}
*/
/**
* @addtogroup lavu_ver
* @{
*/
/**
* Return the LIBAVUTIL_VERSION_INT constant.
*/
unsigned avutil_version(void);
/**
* Return the libavutil build-time configuration.
*/
const char *avutil_configuration(void);
/**
* Return the libavutil license.
*/
const char *avutil_license(void);
/**
* @}
*/
/**
* @addtogroup lavu_media Media Type
* @brief Media Type
*/
enum AVMediaType {
AVMEDIA_TYPE_UNKNOWN = -1, ///< Usually treated as AVMEDIA_TYPE_DATA
AVMEDIA_TYPE_VIDEO,
AVMEDIA_TYPE_AUDIO,
AVMEDIA_TYPE_DATA, ///< Opaque data information usually continuous
AVMEDIA_TYPE_SUBTITLE,
AVMEDIA_TYPE_ATTACHMENT, ///< Opaque data information usually sparse
AVMEDIA_TYPE_NB
};
/**
* Return a string describing the media_type enum, NULL if media_type
* is unknown.
*/
const char *av_get_media_type_string(enum AVMediaType media_type);
/**
* @defgroup lavu_const Constants
* @{
*
* @defgroup lavu_enc Encoding specific
*
* @note those definition should move to avcodec
* @{
*/
#define FF_LAMBDA_SHIFT 7
#define FF_LAMBDA_SCALE (1<<FF_LAMBDA_SHIFT)
#define FF_QP2LAMBDA 118 ///< factor to convert from H.263 QP to lambda
#define FF_LAMBDA_MAX (256*128-1)
#define FF_QUALITY_SCALE FF_LAMBDA_SCALE //FIXME maybe remove
/**
* @}
* @defgroup lavu_time Timestamp specific
*
* FFmpeg internal timebase and timestamp definitions
*
* @{
*/
/**
* @brief Undefined timestamp value
*
* Usually reported by demuxer that work on containers that do not provide
* either pts or dts.
*/
#define AV_NOPTS_VALUE ((int64_t)UINT64_C(0x8000000000000000))
/**
* Internal time base represented as integer
*/
#define AV_TIME_BASE 1000000
/**
* Internal time base represented as fractional value
*/
#define AV_TIME_BASE_Q (AVRational){1, AV_TIME_BASE}
/**
* @}
* @}
* @defgroup lavu_picture Image related
*
* AVPicture types, pixel formats and basic image planes manipulation.
*
* @{
*/
enum AVPictureType {
AV_PICTURE_TYPE_NONE = 0, ///< Undefined
AV_PICTURE_TYPE_I, ///< Intra
AV_PICTURE_TYPE_P, ///< Predicted
AV_PICTURE_TYPE_B, ///< Bi-dir predicted
AV_PICTURE_TYPE_S, ///< S(GMC)-VOP MPEG4
AV_PICTURE_TYPE_SI, ///< Switching Intra
AV_PICTURE_TYPE_SP, ///< Switching Predicted
AV_PICTURE_TYPE_BI, ///< BI type
};
/**
* Return a single letter to describe the given picture type
* pict_type.
*
* @param[in] pict_type the picture type @return a single character
* representing the picture type, '?' if pict_type is unknown
*/
char av_get_picture_type_char(enum AVPictureType pict_type);
/**
* @}
*/
#include "common.h"
#include "error.h"
#include "version.h"
#include "mathematics.h"
#include "rational.h"
#include "intfloat_readwrite.h"
#include "log.h"
#include "pixfmt.h"
/**
* Return x default pointer in case p is NULL.
*/
static inline void *av_x_if_null(const void *p, const void *x)
{
return (void *)(intptr_t)(p ? p : x);
}
/**
* Compute the length of an integer list.
*
* @param elsize size in bytes of each list element (only 1, 2, 4 or 8)
* @param term list terminator (usually 0 or -1)
* @param list pointer to the list
* @return length of the list, in elements, not counting the terminator
*/
unsigned av_int_list_length_for_size(unsigned elsize,
const void *list, uint64_t term) av_pure;
/**
* Compute the length of an integer list.
*
* @param term list terminator (usually 0 or -1)
* @param list pointer to the list
* @return length of the list, in elements, not counting the terminator
*/
#define av_int_list_length(list, term) \
av_int_list_length_for_size(sizeof(*(list)), list, term)
/**
* @}
* @}
*/
#endif /* AVUTIL_AVUTIL_H */

View File

@ -1,67 +0,0 @@
/*
* Copyright (c) 2006 Ryan Martell. (rdm4@martellventures.com)
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BASE64_H
#define AVUTIL_BASE64_H
#include <stdint.h>
/**
* @defgroup lavu_base64 Base64
* @ingroup lavu_crypto
* @{
*/
/**
* Decode a base64-encoded string.
*
* @param out buffer for decoded data
* @param in null-terminated input string
* @param out_size size in bytes of the out buffer, must be at
* least 3/4 of the length of in
* @return number of bytes written, or a negative value in case of
* invalid input
*/
int av_base64_decode(uint8_t *out, const char *in, int out_size);
/**
* Encode data to base64 and null-terminate.
*
* @param out buffer for encoded data
* @param out_size size in bytes of the out buffer (including the
* null terminator), must be at least AV_BASE64_SIZE(in_size)
* @param in input buffer containing the data to encode
* @param in_size size in bytes of the in buffer
* @return out or NULL in case of error
*/
char *av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size);
/**
* Calculate the output size needed to base64-encode x bytes to a
* null-terminated string.
*/
#define AV_BASE64_SIZE(x) (((x)+2) / 3 * 4 + 1)
/**
* @}
*/
#endif /* AVUTIL_BASE64_H */

View File

@ -1,77 +0,0 @@
/*
* Blowfish algorithm
* Copyright (c) 2012 Samuel Pitoiset
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BLOWFISH_H
#define AVUTIL_BLOWFISH_H
#include <stdint.h>
/**
* @defgroup lavu_blowfish Blowfish
* @ingroup lavu_crypto
* @{
*/
#define AV_BF_ROUNDS 16
typedef struct AVBlowfish {
uint32_t p[AV_BF_ROUNDS + 2];
uint32_t s[4][256];
} AVBlowfish;
/**
* Initialize an AVBlowfish context.
*
* @param ctx an AVBlowfish context
* @param key a key
* @param key_len length of the key
*/
void av_blowfish_init(struct AVBlowfish *ctx, const uint8_t *key, int key_len);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
*
* @param ctx an AVBlowfish context
* @param xl left four bytes halves of input to be encrypted
* @param xr right four bytes halves of input to be encrypted
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_blowfish_crypt_ecb(struct AVBlowfish *ctx, uint32_t *xl, uint32_t *xr,
int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
*
* @param ctx an AVBlowfish context
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param count number of 8 byte blocks
* @param iv initialization vector for CBC mode, if NULL ECB will be used
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_blowfish_crypt(struct AVBlowfish *ctx, uint8_t *dst, const uint8_t *src,
int count, uint8_t *iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_BLOWFISH_H */

View File

@ -1,216 +0,0 @@
/*
* Copyright (c) 2012 Nicolas George
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BPRINT_H
#define AVUTIL_BPRINT_H
#include <stdarg.h>
#include "attributes.h"
#include "avstring.h"
/**
* Define a structure with extra padding to a fixed size
* This helps ensuring binary compatibility with future versions.
*/
#define FF_PAD_STRUCTURE(size, ...) \
__VA_ARGS__ \
char reserved_padding[size - sizeof(struct { __VA_ARGS__ })];
/**
* Buffer to print data progressively
*
* The string buffer grows as necessary and is always 0-terminated.
* The content of the string is never accessed, and thus is
* encoding-agnostic and can even hold binary data.
*
* Small buffers are kept in the structure itself, and thus require no
* memory allocation at all (unless the contents of the buffer is needed
* after the structure goes out of scope). This is almost as lightweight as
* declaring a local "char buf[512]".
*
* The length of the string can go beyond the allocated size: the buffer is
* then truncated, but the functions still keep account of the actual total
* length.
*
* In other words, buf->len can be greater than buf->size and records the
* total length of what would have been to the buffer if there had been
* enough memory.
*
* Append operations do not need to be tested for failure: if a memory
* allocation fails, data stop being appended to the buffer, but the length
* is still updated. This situation can be tested with
* av_bprint_is_complete().
*
* The size_max field determines several possible behaviours:
*
* size_max = -1 (= UINT_MAX) or any large value will let the buffer be
* reallocated as necessary, with an amortized linear cost.
*
* size_max = 0 prevents writing anything to the buffer: only the total
* length is computed. The write operations can then possibly be repeated in
* a buffer with exactly the necessary size
* (using size_init = size_max = len + 1).
*
* size_max = 1 is automatically replaced by the exact size available in the
* structure itself, thus ensuring no dynamic memory allocation. The
* internal buffer is large enough to hold a reasonable paragraph of text,
* such as the current paragraph.
*/
typedef struct AVBPrint {
FF_PAD_STRUCTURE(1024,
char *str; /**< string so far */
unsigned len; /**< length so far */
unsigned size; /**< allocated memory */
unsigned size_max; /**< maximum allocated memory */
char reserved_internal_buffer[1];
)
} AVBPrint;
/**
* Convenience macros for special values for av_bprint_init() size_max
* parameter.
*/
#define AV_BPRINT_SIZE_UNLIMITED ((unsigned)-1)
#define AV_BPRINT_SIZE_AUTOMATIC 1
#define AV_BPRINT_SIZE_COUNT_ONLY 0
/**
* Init a print buffer.
*
* @param buf buffer to init
* @param size_init initial size (including the final 0)
* @param size_max maximum size;
* 0 means do not write anything, just count the length;
* 1 is replaced by the maximum value for automatic storage;
* any large value means that the internal buffer will be
* reallocated as needed up to that limit; -1 is converted to
* UINT_MAX, the largest limit possible.
* Check also AV_BPRINT_SIZE_* macros.
*/
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max);
/**
* Init a print buffer using a pre-existing buffer.
*
* The buffer will not be reallocated.
*
* @param buf buffer structure to init
* @param buffer byte buffer to use for the string data
* @param size size of buffer
*/
void av_bprint_init_for_buffer(AVBPrint *buf, char *buffer, unsigned size);
/**
* Append a formatted string to a print buffer.
*/
void av_bprintf(AVBPrint *buf, const char *fmt, ...) av_printf_format(2, 3);
/**
* Append a formatted string to a print buffer.
*/
void av_vbprintf(AVBPrint *buf, const char *fmt, va_list vl_arg);
/**
* Append char c n times to a print buffer.
*/
void av_bprint_chars(AVBPrint *buf, char c, unsigned n);
/**
* Append data to a print buffer.
*
* param buf bprint buffer to use
* param data pointer to data
* param size size of data
*/
void av_bprint_append_data(AVBPrint *buf, const char *data, unsigned size);
struct tm;
/**
* Append a formatted date and time to a print buffer.
*
* param buf bprint buffer to use
* param fmt date and time format string, see strftime()
* param tm broken-down time structure to translate
*
* @note due to poor design of the standard strftime function, it may
* produce poor results if the format string expands to a very long text and
* the bprint buffer is near the limit stated by the size_max option.
*/
void av_bprint_strftime(AVBPrint *buf, const char *fmt, const struct tm *tm);
/**
* Allocate bytes in the buffer for external use.
*
* @param[in] buf buffer structure
* @param[in] size required size
* @param[out] mem pointer to the memory area
* @param[out] actual_size size of the memory area after allocation;
* can be larger or smaller than size
*/
void av_bprint_get_buffer(AVBPrint *buf, unsigned size,
unsigned char **mem, unsigned *actual_size);
/**
* Reset the string to "" but keep internal allocated data.
*/
void av_bprint_clear(AVBPrint *buf);
/**
* Test if the print buffer is complete (not truncated).
*
* It may have been truncated due to a memory allocation failure
* or the size_max limit (compare size and size_max if necessary).
*/
static inline int av_bprint_is_complete(AVBPrint *buf)
{
return buf->len < buf->size;
}
/**
* Finalize a print buffer.
*
* The print buffer can no longer be used afterwards,
* but the len and size fields are still valid.
*
* @arg[out] ret_str if not NULL, used to return a permanent copy of the
* buffer contents, or NULL if memory allocation fails;
* if NULL, the buffer is discarded and freed
* @return 0 for success or error code (probably AVERROR(ENOMEM))
*/
int av_bprint_finalize(AVBPrint *buf, char **ret_str);
/**
* Escape the content in src and append it to dstbuf.
*
* @param dstbuf already inited destination bprint buffer
* @param src string containing the text to escape
* @param special_chars string containing the special characters which
* need to be escaped, can be NULL
* @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros.
* Any unknown value for mode will be considered equivalent to
* AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without
* notice.
* @param flags flags which control how to escape, see AV_ESCAPE_FLAG_* macros
*/
void av_bprint_escape(AVBPrint *dstbuf, const char *src, const char *special_chars,
enum AVEscapeMode mode, int flags);
#endif /* AVUTIL_BPRINT_H */

View File

@ -1,109 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* byte swapping routines
*/
#ifndef AVUTIL_BSWAP_H
#define AVUTIL_BSWAP_H
#include <stdint.h>
#include "libavutil/avconfig.h"
#include "attributes.h"
#ifdef HAVE_AV_CONFIG_H
#include "config.h"
#if ARCH_ARM
# include "arm/bswap.h"
#elif ARCH_AVR32
# include "avr32/bswap.h"
#elif ARCH_BFIN
# include "bfin/bswap.h"
#elif ARCH_SH4
# include "sh4/bswap.h"
#elif ARCH_X86
# include "x86/bswap.h"
#endif
#endif /* HAVE_AV_CONFIG_H */
#define AV_BSWAP16C(x) (((x) << 8 & 0xff00) | ((x) >> 8 & 0x00ff))
#define AV_BSWAP32C(x) (AV_BSWAP16C(x) << 16 | AV_BSWAP16C((x) >> 16))
#define AV_BSWAP64C(x) (AV_BSWAP32C(x) << 32 | AV_BSWAP32C((x) >> 32))
#define AV_BSWAPC(s, x) AV_BSWAP##s##C(x)
#ifndef av_bswap16
static av_always_inline av_const uint16_t av_bswap16(uint16_t x)
{
x= (x>>8) | (x<<8);
return x;
}
#endif
#ifndef av_bswap32
static av_always_inline av_const uint32_t av_bswap32(uint32_t x)
{
return AV_BSWAP32C(x);
}
#endif
#ifndef av_bswap64
static inline uint64_t av_const av_bswap64(uint64_t x)
{
return (uint64_t)av_bswap32(x) << 32 | av_bswap32(x >> 32);
}
#endif
// be2ne ... big-endian to native-endian
// le2ne ... little-endian to native-endian
#if AV_HAVE_BIGENDIAN
#define av_be2ne16(x) (x)
#define av_be2ne32(x) (x)
#define av_be2ne64(x) (x)
#define av_le2ne16(x) av_bswap16(x)
#define av_le2ne32(x) av_bswap32(x)
#define av_le2ne64(x) av_bswap64(x)
#define AV_BE2NEC(s, x) (x)
#define AV_LE2NEC(s, x) AV_BSWAPC(s, x)
#else
#define av_be2ne16(x) av_bswap16(x)
#define av_be2ne32(x) av_bswap32(x)
#define av_be2ne64(x) av_bswap64(x)
#define av_le2ne16(x) (x)
#define av_le2ne32(x) (x)
#define av_le2ne64(x) (x)
#define AV_BE2NEC(s, x) AV_BSWAPC(s, x)
#define AV_LE2NEC(s, x) (x)
#endif
#define AV_BE2NE16C(x) AV_BE2NEC(16, x)
#define AV_BE2NE32C(x) AV_BE2NEC(32, x)
#define AV_BE2NE64C(x) AV_BE2NEC(64, x)
#define AV_LE2NE16C(x) AV_LE2NEC(16, x)
#define AV_LE2NE32C(x) AV_LE2NEC(32, x)
#define AV_LE2NE64C(x) AV_LE2NEC(64, x)
#endif /* AVUTIL_BSWAP_H */

View File

@ -1,274 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* @ingroup lavu_buffer
* refcounted data buffer API
*/
#ifndef AVUTIL_BUFFER_H
#define AVUTIL_BUFFER_H
#include <stdint.h>
/**
* @defgroup lavu_buffer AVBuffer
* @ingroup lavu_data
*
* @{
* AVBuffer is an API for reference-counted data buffers.
*
* There are two core objects in this API -- AVBuffer and AVBufferRef. AVBuffer
* represents the data buffer itself; it is opaque and not meant to be accessed
* by the caller directly, but only through AVBufferRef. However, the caller may
* e.g. compare two AVBuffer pointers to check whether two different references
* are describing the same data buffer. AVBufferRef represents a single
* reference to an AVBuffer and it is the object that may be manipulated by the
* caller directly.
*
* There are two functions provided for creating a new AVBuffer with a single
* reference -- av_buffer_alloc() to just allocate a new buffer, and
* av_buffer_create() to wrap an existing array in an AVBuffer. From an existing
* reference, additional references may be created with av_buffer_ref().
* Use av_buffer_unref() to free a reference (this will automatically free the
* data once all the references are freed).
*
* The convention throughout this API and the rest of FFmpeg is such that the
* buffer is considered writable if there exists only one reference to it (and
* it has not been marked as read-only). The av_buffer_is_writable() function is
* provided to check whether this is true and av_buffer_make_writable() will
* automatically create a new writable buffer when necessary.
* Of course nothing prevents the calling code from violating this convention,
* however that is safe only when all the existing references are under its
* control.
*
* @note Referencing and unreferencing the buffers is thread-safe and thus
* may be done from multiple threads simultaneously without any need for
* additional locking.
*
* @note Two different references to the same buffer can point to different
* parts of the buffer (i.e. their AVBufferRef.data will not be equal).
*/
/**
* A reference counted buffer type. It is opaque and is meant to be used through
* references (AVBufferRef).
*/
typedef struct AVBuffer AVBuffer;
/**
* A reference to a data buffer.
*
* The size of this struct is not a part of the public ABI and it is not meant
* to be allocated directly.
*/
typedef struct AVBufferRef {
AVBuffer *buffer;
/**
* The data buffer. It is considered writable if and only if
* this is the only reference to the buffer, in which case
* av_buffer_is_writable() returns 1.
*/
uint8_t *data;
/**
* Size of data in bytes.
*/
int size;
} AVBufferRef;
/**
* Allocate an AVBuffer of the given size using av_malloc().
*
* @return an AVBufferRef of given size or NULL when out of memory
*/
AVBufferRef *av_buffer_alloc(int size);
/**
* Same as av_buffer_alloc(), except the returned buffer will be initialized
* to zero.
*/
AVBufferRef *av_buffer_allocz(int size);
/**
* Always treat the buffer as read-only, even when it has only one
* reference.
*/
#define AV_BUFFER_FLAG_READONLY (1 << 0)
/**
* Create an AVBuffer from an existing array.
*
* If this function is successful, data is owned by the AVBuffer. The caller may
* only access data through the returned AVBufferRef and references derived from
* it.
* If this function fails, data is left untouched.
* @param data data array
* @param size size of data in bytes
* @param free a callback for freeing this buffer's data
* @param opaque parameter to be got for processing or passed to free
* @param flags a combination of AV_BUFFER_FLAG_*
*
* @return an AVBufferRef referring to data on success, NULL on failure.
*/
AVBufferRef *av_buffer_create(uint8_t *data, int size,
void (*free)(void *opaque, uint8_t *data),
void *opaque, int flags);
/**
* Default free callback, which calls av_free() on the buffer data.
* This function is meant to be passed to av_buffer_create(), not called
* directly.
*/
void av_buffer_default_free(void *opaque, uint8_t *data);
/**
* Create a new reference to an AVBuffer.
*
* @return a new AVBufferRef referring to the same AVBuffer as buf or NULL on
* failure.
*/
AVBufferRef *av_buffer_ref(AVBufferRef *buf);
/**
* Free a given reference and automatically free the buffer if there are no more
* references to it.
*
* @param buf the reference to be freed. The pointer is set to NULL on return.
*/
void av_buffer_unref(AVBufferRef **buf);
/**
* @return 1 if the caller may write to the data referred to by buf (which is
* true if and only if buf is the only reference to the underlying AVBuffer).
* Return 0 otherwise.
* A positive answer is valid until av_buffer_ref() is called on buf.
*/
int av_buffer_is_writable(const AVBufferRef *buf);
/**
* @return the opaque parameter set by av_buffer_create.
*/
void *av_buffer_get_opaque(const AVBufferRef *buf);
int av_buffer_get_ref_count(const AVBufferRef *buf);
/**
* Create a writable reference from a given buffer reference, avoiding data copy
* if possible.
*
* @param buf buffer reference to make writable. On success, buf is either left
* untouched, or it is unreferenced and a new writable AVBufferRef is
* written in its place. On failure, buf is left untouched.
* @return 0 on success, a negative AVERROR on failure.
*/
int av_buffer_make_writable(AVBufferRef **buf);
/**
* Reallocate a given buffer.
*
* @param buf a buffer reference to reallocate. On success, buf will be
* unreferenced and a new reference with the required size will be
* written in its place. On failure buf will be left untouched. *buf
* may be NULL, then a new buffer is allocated.
* @param size required new buffer size.
* @return 0 on success, a negative AVERROR on failure.
*
* @note the buffer is actually reallocated with av_realloc() only if it was
* initially allocated through av_buffer_realloc(NULL) and there is only one
* reference to it (i.e. the one passed to this function). In all other cases
* a new buffer is allocated and the data is copied.
*/
int av_buffer_realloc(AVBufferRef **buf, int size);
/**
* @}
*/
/**
* @defgroup lavu_bufferpool AVBufferPool
* @ingroup lavu_data
*
* @{
* AVBufferPool is an API for a lock-free thread-safe pool of AVBuffers.
*
* Frequently allocating and freeing large buffers may be slow. AVBufferPool is
* meant to solve this in cases when the caller needs a set of buffers of the
* same size (the most obvious use case being buffers for raw video or audio
* frames).
*
* At the beginning, the user must call av_buffer_pool_init() to create the
* buffer pool. Then whenever a buffer is needed, call av_buffer_pool_get() to
* get a reference to a new buffer, similar to av_buffer_alloc(). This new
* reference works in all aspects the same way as the one created by
* av_buffer_alloc(). However, when the last reference to this buffer is
* unreferenced, it is returned to the pool instead of being freed and will be
* reused for subsequent av_buffer_pool_get() calls.
*
* When the caller is done with the pool and no longer needs to allocate any new
* buffers, av_buffer_pool_uninit() must be called to mark the pool as freeable.
* Once all the buffers are released, it will automatically be freed.
*
* Allocating and releasing buffers with this API is thread-safe as long as
* either the default alloc callback is used, or the user-supplied one is
* thread-safe.
*/
/**
* The buffer pool. This structure is opaque and not meant to be accessed
* directly. It is allocated with av_buffer_pool_init() and freed with
* av_buffer_pool_uninit().
*/
typedef struct AVBufferPool AVBufferPool;
/**
* Allocate and initialize a buffer pool.
*
* @param size size of each buffer in this pool
* @param alloc a function that will be used to allocate new buffers when the
* pool is empty. May be NULL, then the default allocator will be used
* (av_buffer_alloc()).
* @return newly created buffer pool on success, NULL on error.
*/
AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size));
/**
* Mark the pool as being available for freeing. It will actually be freed only
* once all the allocated buffers associated with the pool are released. Thus it
* is safe to call this function while some of the allocated buffers are still
* in use.
*
* @param pool pointer to the pool to be freed. It will be set to NULL.
* @see av_buffer_pool_can_uninit()
*/
void av_buffer_pool_uninit(AVBufferPool **pool);
/**
* Allocate a new AVBuffer, reusing an old buffer from the pool when available.
* This function may be called simultaneously from multiple threads.
*
* @return a reference to the new buffer on success, NULL on error.
*/
AVBufferRef *av_buffer_pool_get(AVBufferPool *pool);
/**
* @}
*/
#endif /* AVUTIL_BUFFER_H */

View File

@ -1,221 +0,0 @@
/*
* Copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
* Copyright (c) 2008 Peter Ross
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CHANNEL_LAYOUT_H
#define AVUTIL_CHANNEL_LAYOUT_H
#include <stdint.h>
/**
* @file
* audio channel layout utility functions
*/
/**
* @addtogroup lavu_audio
* @{
*/
/**
* @defgroup channel_masks Audio channel masks
*
* A channel layout is a 64-bits integer with a bit set for every channel.
* The number of bits set must be equal to the number of channels.
* The value 0 means that the channel layout is not known.
* @note this data structure is not powerful enough to handle channels
* combinations that have the same channel multiple times, such as
* dual-mono.
*
* @{
*/
#define AV_CH_FRONT_LEFT 0x00000001
#define AV_CH_FRONT_RIGHT 0x00000002
#define AV_CH_FRONT_CENTER 0x00000004
#define AV_CH_LOW_FREQUENCY 0x00000008
#define AV_CH_BACK_LEFT 0x00000010
#define AV_CH_BACK_RIGHT 0x00000020
#define AV_CH_FRONT_LEFT_OF_CENTER 0x00000040
#define AV_CH_FRONT_RIGHT_OF_CENTER 0x00000080
#define AV_CH_BACK_CENTER 0x00000100
#define AV_CH_SIDE_LEFT 0x00000200
#define AV_CH_SIDE_RIGHT 0x00000400
#define AV_CH_TOP_CENTER 0x00000800
#define AV_CH_TOP_FRONT_LEFT 0x00001000
#define AV_CH_TOP_FRONT_CENTER 0x00002000
#define AV_CH_TOP_FRONT_RIGHT 0x00004000
#define AV_CH_TOP_BACK_LEFT 0x00008000
#define AV_CH_TOP_BACK_CENTER 0x00010000
#define AV_CH_TOP_BACK_RIGHT 0x00020000
#define AV_CH_STEREO_LEFT 0x20000000 ///< Stereo downmix.
#define AV_CH_STEREO_RIGHT 0x40000000 ///< See AV_CH_STEREO_LEFT.
#define AV_CH_WIDE_LEFT 0x0000000080000000ULL
#define AV_CH_WIDE_RIGHT 0x0000000100000000ULL
#define AV_CH_SURROUND_DIRECT_LEFT 0x0000000200000000ULL
#define AV_CH_SURROUND_DIRECT_RIGHT 0x0000000400000000ULL
#define AV_CH_LOW_FREQUENCY_2 0x0000000800000000ULL
/** Channel mask value used for AVCodecContext.request_channel_layout
to indicate that the user requests the channel order of the decoder output
to be the native codec channel order. */
#define AV_CH_LAYOUT_NATIVE 0x8000000000000000ULL
/**
* @}
* @defgroup channel_mask_c Audio channel convenience macros
* @{
* */
#define AV_CH_LAYOUT_MONO (AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_STEREO (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT)
#define AV_CH_LAYOUT_2POINT1 (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_1 (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_SURROUND (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_3POINT1 (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_4POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_4POINT1 (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_2 (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_QUAD (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_5POINT1 (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_5POINT0_BACK (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT1_BACK (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_6POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT0_FRONT (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_HEXAGONAL (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_BACK (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_FRONT (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_7POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT0_FRONT (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT1_WIDE (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1_WIDE_BACK (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_OCTAGONAL (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_STEREO_DOWNMIX (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT)
enum AVMatrixEncoding {
AV_MATRIX_ENCODING_NONE,
AV_MATRIX_ENCODING_DOLBY,
AV_MATRIX_ENCODING_DPLII,
AV_MATRIX_ENCODING_NB
};
/**
* @}
*/
/**
* Return a channel layout id that matches name, or 0 if no match is found.
*
* name can be one or several of the following notations,
* separated by '+' or '|':
* - the name of an usual channel layout (mono, stereo, 4.0, quad, 5.0,
* 5.0(side), 5.1, 5.1(side), 7.1, 7.1(wide), downmix);
* - the name of a single channel (FL, FR, FC, LFE, BL, BR, FLC, FRC, BC,
* SL, SR, TC, TFL, TFC, TFR, TBL, TBC, TBR, DL, DR);
* - a number of channels, in decimal, optionally followed by 'c', yielding
* the default channel layout for that number of channels (@see
* av_get_default_channel_layout);
* - a channel layout mask, in hexadecimal starting with "0x" (see the
* AV_CH_* macros).
*
* @warning Starting from the next major bump the trailing character
* 'c' to specify a number of channels will be required, while a
* channel layout mask could also be specified as a decimal number
* (if and only if not followed by "c").
*
* Example: "stereo+FC" = "2c+FC" = "2c+1c" = "0x7"
*/
uint64_t av_get_channel_layout(const char *name);
/**
* Return a description of a channel layout.
* If nb_channels is <= 0, it is guessed from the channel_layout.
*
* @param buf put here the string containing the channel layout
* @param buf_size size in bytes of the buffer
*/
void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout);
struct AVBPrint;
/**
* Append a description of a channel layout to a bprint buffer.
*/
void av_bprint_channel_layout(struct AVBPrint *bp, int nb_channels, uint64_t channel_layout);
/**
* Return the number of channels in the channel layout.
*/
int av_get_channel_layout_nb_channels(uint64_t channel_layout);
/**
* Return default channel layout for a given number of channels.
*/
int64_t av_get_default_channel_layout(int nb_channels);
/**
* Get the index of a channel in channel_layout.
*
* @param channel a channel layout describing exactly one channel which must be
* present in channel_layout.
*
* @return index of channel in channel_layout on success, a negative AVERROR
* on error.
*/
int av_get_channel_layout_channel_index(uint64_t channel_layout,
uint64_t channel);
/**
* Get the channel with the given index in channel_layout.
*/
uint64_t av_channel_layout_extract_channel(uint64_t channel_layout, int index);
/**
* Get the name of a given channel.
*
* @return channel name on success, NULL on error.
*/
const char *av_get_channel_name(uint64_t channel);
/**
* Get the description of a given channel.
*
* @param channel a channel layout with a single channel
* @return channel description on success, NULL on error
*/
const char *av_get_channel_description(uint64_t channel);
/**
* Get the value and name of a standard channel layout.
*
* @param[in] index index in an internal list, starting at 0
* @param[out] layout channel layout mask
* @param[out] name name of the layout
* @return 0 if the layout exists,
* <0 if index is beyond the limits
*/
int av_get_standard_channel_layout(unsigned index, uint64_t *layout,
const char **name);
/**
* @}
*/
#endif /* AVUTIL_CHANNEL_LAYOUT_H */

View File

@ -1,464 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* common internal and external API header
*/
#ifndef AVUTIL_COMMON_H
#define AVUTIL_COMMON_H
#include <errno.h>
#include <inttypes.h>
#include <limits.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "attributes.h"
#include "version.h"
#include "libavutil/avconfig.h"
#if AV_HAVE_BIGENDIAN
# define AV_NE(be, le) (be)
#else
# define AV_NE(be, le) (le)
#endif
//rounded division & shift
#define RSHIFT(a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b))
/* assume b>0 */
#define ROUNDED_DIV(a,b) (((a)>0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b))
/* assume a>0 and b>0 */
#define FF_CEIL_RSHIFT(a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) \
: ((a) + (1<<(b)) - 1) >> (b))
#define FFUDIV(a,b) (((a)>0 ?(a):(a)-(b)+1) / (b))
#define FFUMOD(a,b) ((a)-(b)*FFUDIV(a,b))
#define FFABS(a) ((a) >= 0 ? (a) : (-(a)))
#define FFSIGN(a) ((a) > 0 ? 1 : -1)
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
#define FFMAX3(a,b,c) FFMAX(FFMAX(a,b),c)
#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
#define FFMIN3(a,b,c) FFMIN(FFMIN(a,b),c)
#define FFSWAP(type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0)
#define FF_ARRAY_ELEMS(a) (sizeof(a) / sizeof((a)[0]))
#define FFALIGN(x, a) (((x)+(a)-1)&~((a)-1))
/* misc math functions */
/**
* Reverse the order of the bits of an 8-bits unsigned integer.
*/
#if FF_API_AV_REVERSE
extern attribute_deprecated const uint8_t av_reverse[256];
#endif
#ifdef HAVE_AV_CONFIG_H
# include "config.h"
# include "intmath.h"
#endif
/* Pull in unguarded fallback defines at the end of this file. */
#include "common.h"
#ifndef av_log2
av_const int av_log2(unsigned v);
#endif
#ifndef av_log2_16bit
av_const int av_log2_16bit(unsigned v);
#endif
/**
* Clip a signed integer value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const int av_clip_c(int a, int amin, int amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a signed 64bit integer value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const int64_t av_clip64_c(int64_t a, int64_t amin, int64_t amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a signed integer value into the 0-255 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint8_t av_clip_uint8_c(int a)
{
if (a&(~0xFF)) return (-a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -128,127 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int8_t av_clip_int8_c(int a)
{
if ((a+0x80) & ~0xFF) return (a>>31) ^ 0x7F;
else return a;
}
/**
* Clip a signed integer value into the 0-65535 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint16_t av_clip_uint16_c(int a)
{
if (a&(~0xFFFF)) return (-a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -32768,32767 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int16_t av_clip_int16_c(int a)
{
if ((a+0x8000) & ~0xFFFF) return (a>>31) ^ 0x7FFF;
else return a;
}
/**
* Clip a signed 64-bit integer value into the -2147483648,2147483647 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int32_t av_clipl_int32_c(int64_t a)
{
if ((a+0x80000000u) & ~UINT64_C(0xFFFFFFFF)) return (int32_t)((a>>63) ^ 0x7FFFFFFF);
else return (int32_t)a;
}
/**
* Clip a signed integer to an unsigned power of two range.
* @param a value to clip
* @param p bit position to clip at
* @return clipped value
*/
static av_always_inline av_const unsigned av_clip_uintp2_c(int a, int p)
{
if (a & ~((1<<p) - 1)) return -a >> 31 & ((1<<p) - 1);
else return a;
}
/**
* Add two signed 32-bit values with saturation.
*
* @param a one value
* @param b another value
* @return sum with signed saturation
*/
static av_always_inline int av_sat_add32_c(int a, int b)
{
return av_clipl_int32((int64_t)a + b);
}
/**
* Add a doubled value to another value with saturation at both stages.
*
* @param a first value
* @param b value doubled and added to a
* @return sum with signed saturation
*/
static av_always_inline int av_sat_dadd32_c(int a, int b)
{
return av_sat_add32(a, av_sat_add32(b, b));
}
/**
* Clip a float value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const float av_clipf_c(float a, float amin, float amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a double value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const double av_clipd_c(double a, double amin, double amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/** Compute ceil(log2(x)).
* @param x value used to compute ceil(log2(x))
* @return computed ceiling of log2(x)
*/
static av_always_inline av_const int av_ceil_log2_c(int x)
{
return av_log2((x - 1) << 1);
}
/**
* Count number of bits set to one in x
* @param x value to count bits of
* @return the number of bits set to one in x
*/
static av_always_inline av_const int av_popcount_c(uint32_t x)
{
x -= (x >> 1) & 0x55555555;
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0F0F0F0F;
x += x >> 8;
return (x + (x >> 16)) & 0x3F;
}
/**
* Count number of bits set to one in x
* @param x value to count bits of
* @return the number of bits set to one in x
*/
static av_always_inline av_const int av_popcount64_c(uint64_t x)
{
return av_popcount((uint32_t)x) + av_popcount((uint32_t)(x >> 32));
}
#define MKTAG(a,b,c,d) ((a) | ((b) << 8) | ((c) << 16) | ((unsigned)(d) << 24))
#define MKBETAG(a,b,c,d) ((d) | ((c) << 8) | ((b) << 16) | ((unsigned)(a) << 24))
/**
* Convert a UTF-8 character (up to 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_BYTE Expression reading one byte from the input.
* Evaluated up to 7 times (4 for the currently
* assigned Unicode range). With a memory buffer
* input, this could be *ptr++.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*
* @warning ERROR should not contain a loop control statement which
* could interact with the internal while loop, and should force an
* exit from the macro code (e.g. through a goto or a return) in order
* to prevent undefined results.
*/
#define GET_UTF8(val, GET_BYTE, ERROR)\
val= GET_BYTE;\
{\
uint32_t top = (val & 128) >> 1;\
if ((val & 0xc0) == 0x80 || val >= 0xFE)\
ERROR\
while (val & top) {\
int tmp= GET_BYTE - 128;\
if(tmp>>6)\
ERROR\
val= (val<<6) + tmp;\
top <<= 5;\
}\
val &= (top << 1) - 1;\
}
/**
* Convert a UTF-16 character (2 or 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_16BIT Expression returning two bytes of UTF-16 data converted
* to native byte order. Evaluated one or two times.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*/
#define GET_UTF16(val, GET_16BIT, ERROR)\
val = GET_16BIT;\
{\
unsigned int hi = val - 0xD800;\
if (hi < 0x800) {\
val = GET_16BIT - 0xDC00;\
if (val > 0x3FFU || hi > 0x3FFU)\
ERROR\
val += (hi<<10) + 0x10000;\
}\
}\
/**
* @def PUT_UTF8(val, tmp, PUT_BYTE)
* Convert a 32-bit Unicode character to its UTF-8 encoded form (up to 4 bytes long).
* @param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-8. If
* val is given as a function it is executed only once.
* @param tmp is a temporary variable and should be of type uint8_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_BYTE.
* @param PUT_BYTE writes the converted UTF-8 bytes to any proper destination.
* It could be a function or a statement, and uses tmp as the input byte.
* For example, PUT_BYTE could be "*output++ = tmp;" PUT_BYTE will be
* executed up to 4 times for values in the valid UTF-8 range and up to
* 7 times in the general case, depending on the length of the converted
* Unicode character.
*/
#define PUT_UTF8(val, tmp, PUT_BYTE)\
{\
int bytes, shift;\
uint32_t in = val;\
if (in < 0x80) {\
tmp = in;\
PUT_BYTE\
} else {\
bytes = (av_log2(in) + 4) / 5;\
shift = (bytes - 1) * 6;\
tmp = (256 - (256 >> bytes)) | (in >> shift);\
PUT_BYTE\
while (shift >= 6) {\
shift -= 6;\
tmp = 0x80 | ((in >> shift) & 0x3f);\
PUT_BYTE\
}\
}\
}
/**
* @def PUT_UTF16(val, tmp, PUT_16BIT)
* Convert a 32-bit Unicode character to its UTF-16 encoded form (2 or 4 bytes).
* @param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-16. If
* val is given as a function it is executed only once.
* @param tmp is a temporary variable and should be of type uint16_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_16BIT.
* @param PUT_16BIT writes the converted UTF-16 data to any proper destination
* in desired endianness. It could be a function or a statement, and uses tmp
* as the input byte. For example, PUT_BYTE could be "*output++ = tmp;"
* PUT_BYTE will be executed 1 or 2 times depending on input character.
*/
#define PUT_UTF16(val, tmp, PUT_16BIT)\
{\
uint32_t in = val;\
if (in < 0x10000) {\
tmp = in;\
PUT_16BIT\
} else {\
tmp = 0xD800 | ((in - 0x10000) >> 10);\
PUT_16BIT\
tmp = 0xDC00 | ((in - 0x10000) & 0x3FF);\
PUT_16BIT\
}\
}\
#include "mem.h"
#ifdef HAVE_AV_CONFIG_H
# include "internal.h"
#endif /* HAVE_AV_CONFIG_H */
#endif /* AVUTIL_COMMON_H */
/*
* The following definitions are outside the multiple inclusion guard
* to ensure they are immediately available in intmath.h.
*/
#ifndef av_ceil_log2
# define av_ceil_log2 av_ceil_log2_c
#endif
#ifndef av_clip
# define av_clip av_clip_c
#endif
#ifndef av_clip64
# define av_clip64 av_clip64_c
#endif
#ifndef av_clip_uint8
# define av_clip_uint8 av_clip_uint8_c
#endif
#ifndef av_clip_int8
# define av_clip_int8 av_clip_int8_c
#endif
#ifndef av_clip_uint16
# define av_clip_uint16 av_clip_uint16_c
#endif
#ifndef av_clip_int16
# define av_clip_int16 av_clip_int16_c
#endif
#ifndef av_clipl_int32
# define av_clipl_int32 av_clipl_int32_c
#endif
#ifndef av_clip_uintp2
# define av_clip_uintp2 av_clip_uintp2_c
#endif
#ifndef av_sat_add32
# define av_sat_add32 av_sat_add32_c
#endif
#ifndef av_sat_dadd32
# define av_sat_dadd32 av_sat_dadd32_c
#endif
#ifndef av_clipf
# define av_clipf av_clipf_c
#endif
#ifndef av_clipd
# define av_clipd av_clipd_c
#endif
#ifndef av_popcount
# define av_popcount av_popcount_c
#endif
#ifndef av_popcount64
# define av_popcount64 av_popcount64_c
#endif

View File

@ -1,111 +0,0 @@
/*
* Copyright (c) 2000, 2001, 2002 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CPU_H
#define AVUTIL_CPU_H
#include "attributes.h"
#define AV_CPU_FLAG_FORCE 0x80000000 /* force usage of selected flags (OR) */
/* lower 16 bits - CPU features */
#define AV_CPU_FLAG_MMX 0x0001 ///< standard MMX
#define AV_CPU_FLAG_MMXEXT 0x0002 ///< SSE integer functions or AMD MMX ext
#define AV_CPU_FLAG_MMX2 0x0002 ///< SSE integer functions or AMD MMX ext
#define AV_CPU_FLAG_3DNOW 0x0004 ///< AMD 3DNOW
#define AV_CPU_FLAG_SSE 0x0008 ///< SSE functions
#define AV_CPU_FLAG_SSE2 0x0010 ///< PIV SSE2 functions
#define AV_CPU_FLAG_SSE2SLOW 0x40000000 ///< SSE2 supported, but usually not faster
///< than regular MMX/SSE (e.g. Core1)
#define AV_CPU_FLAG_3DNOWEXT 0x0020 ///< AMD 3DNowExt
#define AV_CPU_FLAG_SSE3 0x0040 ///< Prescott SSE3 functions
#define AV_CPU_FLAG_SSE3SLOW 0x20000000 ///< SSE3 supported, but usually not faster
///< than regular MMX/SSE (e.g. Core1)
#define AV_CPU_FLAG_SSSE3 0x0080 ///< Conroe SSSE3 functions
#define AV_CPU_FLAG_ATOM 0x10000000 ///< Atom processor, some SSSE3 instructions are slower
#define AV_CPU_FLAG_SSE4 0x0100 ///< Penryn SSE4.1 functions
#define AV_CPU_FLAG_SSE42 0x0200 ///< Nehalem SSE4.2 functions
#define AV_CPU_FLAG_AVX 0x4000 ///< AVX functions: requires OS support even if YMM registers aren't used
#define AV_CPU_FLAG_XOP 0x0400 ///< Bulldozer XOP functions
#define AV_CPU_FLAG_FMA4 0x0800 ///< Bulldozer FMA4 functions
// #if LIBAVUTIL_VERSION_MAJOR <52
#define AV_CPU_FLAG_CMOV 0x1001000 ///< supports cmov instruction
// #else
// #define AV_CPU_FLAG_CMOV 0x1000 ///< supports cmov instruction
// #endif
#define AV_CPU_FLAG_AVX2 0x8000 ///< AVX2 functions: requires OS support even if YMM registers aren't used
#define AV_CPU_FLAG_ALTIVEC 0x0001 ///< standard
#define AV_CPU_FLAG_ARMV5TE (1 << 0)
#define AV_CPU_FLAG_ARMV6 (1 << 1)
#define AV_CPU_FLAG_ARMV6T2 (1 << 2)
#define AV_CPU_FLAG_VFP (1 << 3)
#define AV_CPU_FLAG_VFPV3 (1 << 4)
#define AV_CPU_FLAG_NEON (1 << 5)
/**
* Return the flags which specify extensions supported by the CPU.
* The returned value is affected by av_force_cpu_flags() if that was used
* before. So av_get_cpu_flags() can easily be used in a application to
* detect the enabled cpu flags.
*/
int av_get_cpu_flags(void);
/**
* Disables cpu detection and forces the specified flags.
* -1 is a special case that disables forcing of specific flags.
*/
void av_force_cpu_flags(int flags);
/**
* Set a mask on flags returned by av_get_cpu_flags().
* This function is mainly useful for testing.
* Please use av_force_cpu_flags() and av_get_cpu_flags() instead which are more flexible
*
* @warning this function is not thread safe.
*/
attribute_deprecated void av_set_cpu_flags_mask(int mask);
/**
* Parse CPU flags from a string.
*
* The returned flags contain the specified flags as well as related unspecified flags.
*
* This function exists only for compatibility with libav.
* Please use av_parse_cpu_caps() when possible.
* @return a combination of AV_CPU_* flags, negative on error.
*/
attribute_deprecated
int av_parse_cpu_flags(const char *s);
/**
* Parse CPU caps from a string and update the given AV_CPU_* flags based on that.
*
* @return negative on error.
*/
int av_parse_cpu_caps(unsigned *flags, const char *s);
/**
* @return the number of logical CPU cores present.
*/
int av_cpu_count(void);
#endif /* AVUTIL_CPU_H */

View File

@ -1,85 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CRC_H
#define AVUTIL_CRC_H
#include <stdint.h>
#include <stddef.h>
#include "attributes.h"
/**
* @defgroup lavu_crc32 CRC32
* @ingroup lavu_crypto
* @{
*/
typedef uint32_t AVCRC;
typedef enum {
AV_CRC_8_ATM,
AV_CRC_16_ANSI,
AV_CRC_16_CCITT,
AV_CRC_32_IEEE,
AV_CRC_32_IEEE_LE, /*< reversed bitorder version of AV_CRC_32_IEEE */
AV_CRC_24_IEEE = 12,
AV_CRC_MAX, /*< Not part of public API! Do not use outside libavutil. */
}AVCRCId;
/**
* Initialize a CRC table.
* @param ctx must be an array of size sizeof(AVCRC)*257 or sizeof(AVCRC)*1024
* @param le If 1, the lowest bit represents the coefficient for the highest
* exponent of the corresponding polynomial (both for poly and
* actual CRC).
* If 0, you must swap the CRC parameter and the result of av_crc
* if you need the standard representation (can be simplified in
* most cases to e.g. bswap16):
* av_bswap32(crc << (32-bits))
* @param bits number of bits for the CRC
* @param poly generator polynomial without the x**bits coefficient, in the
* representation as specified by le
* @param ctx_size size of ctx in bytes
* @return <0 on failure
*/
int av_crc_init(AVCRC *ctx, int le, int bits, uint32_t poly, int ctx_size);
/**
* Get an initialized standard CRC table.
* @param crc_id ID of a standard CRC
* @return a pointer to the CRC table or NULL on failure
*/
const AVCRC *av_crc_get_table(AVCRCId crc_id);
/**
* Calculate the CRC of a block.
* @param crc CRC of previous blocks if any or initial value for CRC
* @return CRC updated with the data from the given block
*
* @see av_crc_init() "le" parameter
*/
uint32_t av_crc(const AVCRC *ctx, uint32_t crc,
const uint8_t *buffer, size_t length) av_pure;
/**
* @}
*/
#endif /* AVUTIL_CRC_H */

View File

@ -1,152 +0,0 @@
/*
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Public dictionary API.
* @deprecated
* AVDictionary is provided for compatibility with libav. It is both in
* implementation as well as API inefficient. It does not scale and is
* extremely slow with large dictionaries.
* It is recommended that new code uses our tree container from tree.c/h
* where applicable, which uses AVL trees to achieve O(log n) performance.
*/
#ifndef AVUTIL_DICT_H
#define AVUTIL_DICT_H
/**
* @addtogroup lavu_dict AVDictionary
* @ingroup lavu_data
*
* @brief Simple key:value store
*
* @{
* Dictionaries are used for storing key:value pairs. To create
* an AVDictionary, simply pass an address of a NULL pointer to
* av_dict_set(). NULL can be used as an empty dictionary wherever
* a pointer to an AVDictionary is required.
* Use av_dict_get() to retrieve an entry or iterate over all
* entries and finally av_dict_free() to free the dictionary
* and all its contents.
*
* @code
* AVDictionary *d = NULL; // "create" an empty dictionary
* av_dict_set(&d, "foo", "bar", 0); // add an entry
*
* char *k = av_strdup("key"); // if your strings are already allocated,
* char *v = av_strdup("value"); // you can avoid copying them like this
* av_dict_set(&d, k, v, AV_DICT_DONT_STRDUP_KEY | AV_DICT_DONT_STRDUP_VAL);
*
* AVDictionaryEntry *t = NULL;
* while (t = av_dict_get(d, "", t, AV_DICT_IGNORE_SUFFIX)) {
* <....> // iterate over all entries in d
* }
*
* av_dict_free(&d);
* @endcode
*
*/
#define AV_DICT_MATCH_CASE 1
#define AV_DICT_IGNORE_SUFFIX 2
#define AV_DICT_DONT_STRDUP_KEY 4 /**< Take ownership of a key that's been
allocated with av_malloc() and children. */
#define AV_DICT_DONT_STRDUP_VAL 8 /**< Take ownership of a value that's been
allocated with av_malloc() and chilren. */
#define AV_DICT_DONT_OVERWRITE 16 ///< Don't overwrite existing entries.
#define AV_DICT_APPEND 32 /**< If the entry already exists, append to it. Note that no
delimiter is added, the strings are simply concatenated. */
typedef struct AVDictionaryEntry {
char *key;
char *value;
} AVDictionaryEntry;
typedef struct AVDictionary AVDictionary;
/**
* Get a dictionary entry with matching key.
*
* @param prev Set to the previous matching element to find the next.
* If set to NULL the first matching element is returned.
* @param flags Allows case as well as suffix-insensitive comparisons.
* @return Found entry or NULL, changing key or value leads to undefined behavior.
*/
AVDictionaryEntry *
av_dict_get(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags);
/**
* Get number of entries in dictionary.
*
* @param m dictionary
* @return number of entries in dictionary
*/
int av_dict_count(const AVDictionary *m);
/**
* Set the given entry in *pm, overwriting an existing entry.
*
* @param pm pointer to a pointer to a dictionary struct. If *pm is NULL
* a dictionary struct is allocated and put in *pm.
* @param key entry key to add to *pm (will be av_strduped depending on flags)
* @param value entry value to add to *pm (will be av_strduped depending on flags).
* Passing a NULL value will cause an existing entry to be deleted.
* @return >= 0 on success otherwise an error code <0
*/
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags);
/**
* Parse the key/value pairs list and add to a dictionary.
*
* @param key_val_sep a 0-terminated list of characters used to separate
* key from value
* @param pairs_sep a 0-terminated list of characters used to separate
* two pairs from each other
* @param flags flags to use when adding to dictionary.
* AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL
* are ignored since the key/value tokens will always
* be duplicated.
* @return 0 on success, negative AVERROR code on failure
*/
int av_dict_parse_string(AVDictionary **pm, const char *str,
const char *key_val_sep, const char *pairs_sep,
int flags);
/**
* Copy entries from one AVDictionary struct into another.
* @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL,
* this function will allocate a struct for you and put it in *dst
* @param src pointer to source AVDictionary struct
* @param flags flags to use when setting entries in *dst
* @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag
*/
void av_dict_copy(AVDictionary **dst, AVDictionary *src, int flags);
/**
* Free all the memory allocated for an AVDictionary struct
* and all keys and values.
*/
void av_dict_free(AVDictionary **m);
/**
* @}
*/
#endif /* AVUTIL_DICT_H */

View File

@ -1,117 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* error code definitions
*/
#ifndef AVUTIL_ERROR_H
#define AVUTIL_ERROR_H
#include <errno.h>
#include <stddef.h>
/**
* @addtogroup lavu_error
*
* @{
*/
/* error handling */
#if EDOM > 0
#define AVERROR(e) (-(e)) ///< Returns a negative error code from a POSIX error code, to return from library functions.
#define AVUNERROR(e) (-(e)) ///< Returns a POSIX error code from a library function error return value.
#else
/* Some platforms have E* and errno already negated. */
#define AVERROR(e) (e)
#define AVUNERROR(e) (e)
#endif
#define FFERRTAG(a, b, c, d) (-(int)MKTAG(a, b, c, d))
#define AVERROR_BSF_NOT_FOUND FFERRTAG(0xF8,'B','S','F') ///< Bitstream filter not found
#define AVERROR_BUG FFERRTAG( 'B','U','G','!') ///< Internal bug, also see AVERROR_BUG2
#define AVERROR_BUFFER_TOO_SMALL FFERRTAG( 'B','U','F','S') ///< Buffer too small
#define AVERROR_DECODER_NOT_FOUND FFERRTAG(0xF8,'D','E','C') ///< Decoder not found
#define AVERROR_DEMUXER_NOT_FOUND FFERRTAG(0xF8,'D','E','M') ///< Demuxer not found
#define AVERROR_ENCODER_NOT_FOUND FFERRTAG(0xF8,'E','N','C') ///< Encoder not found
#define AVERROR_EOF FFERRTAG( 'E','O','F',' ') ///< End of file
#define AVERROR_EXIT FFERRTAG( 'E','X','I','T') ///< Immediate exit was requested; the called function should not be restarted
#define AVERROR_EXTERNAL FFERRTAG( 'E','X','T',' ') ///< Generic error in an external library
#define AVERROR_FILTER_NOT_FOUND FFERRTAG(0xF8,'F','I','L') ///< Filter not found
#define AVERROR_INVALIDDATA FFERRTAG( 'I','N','D','A') ///< Invalid data found when processing input
#define AVERROR_MUXER_NOT_FOUND FFERRTAG(0xF8,'M','U','X') ///< Muxer not found
#define AVERROR_OPTION_NOT_FOUND FFERRTAG(0xF8,'O','P','T') ///< Option not found
#define AVERROR_PATCHWELCOME FFERRTAG( 'P','A','W','E') ///< Not yet implemented in FFmpeg, patches welcome
#define AVERROR_PROTOCOL_NOT_FOUND FFERRTAG(0xF8,'P','R','O') ///< Protocol not found
#define AVERROR_STREAM_NOT_FOUND FFERRTAG(0xF8,'S','T','R') ///< Stream not found
/**
* This is semantically identical to AVERROR_BUG
* it has been introduced in Libav after our AVERROR_BUG and with a modified value.
*/
#define AVERROR_BUG2 FFERRTAG( 'B','U','G',' ')
#define AVERROR_UNKNOWN FFERRTAG( 'U','N','K','N') ///< Unknown error, typically from an external library
#define AVERROR_EXPERIMENTAL (-0x2bb2afa8) ///< Requested feature is flagged experimental. Set strict_std_compliance if you really want to use it.
#define AV_ERROR_MAX_STRING_SIZE 64
/**
* Put a description of the AVERROR code errnum in errbuf.
* In case of failure the global variable errno is set to indicate the
* error. Even in case of failure av_strerror() will print a generic
* error message indicating the errnum provided to errbuf.
*
* @param errnum error code to describe
* @param errbuf buffer to which description is written
* @param errbuf_size the size in bytes of errbuf
* @return 0 on success, a negative value if a description for errnum
* cannot be found
*/
int av_strerror(int errnum, char *errbuf, size_t errbuf_size);
/**
* Fill the provided buffer with a string containing an error string
* corresponding to the AVERROR code errnum.
*
* @param errbuf a buffer
* @param errbuf_size size in bytes of errbuf
* @param errnum error code to describe
* @return the buffer in input, filled with the error description
* @see av_strerror()
*/
static inline char *av_make_error_string(char *errbuf, size_t errbuf_size, int errnum)
{
av_strerror(errnum, errbuf, errbuf_size);
return errbuf;
}
/**
* Convenience macro, the return value should be used only directly in
* function arguments but never stand-alone.
*/
#define av_err2str(errnum) \
av_make_error_string((char[AV_ERROR_MAX_STRING_SIZE]){0}, AV_ERROR_MAX_STRING_SIZE, errnum)
/**
* @}
*/
#endif /* AVUTIL_ERROR_H */

View File

@ -1,113 +0,0 @@
/*
* Copyright (c) 2002 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* simple arithmetic expression evaluator
*/
#ifndef AVUTIL_EVAL_H
#define AVUTIL_EVAL_H
#include "avutil.h"
typedef struct AVExpr AVExpr;
/**
* Parse and evaluate an expression.
* Note, this is significantly slower than av_expr_eval().
*
* @param res a pointer to a double where is put the result value of
* the expression, or NAN in case of error
* @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)"
* @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0}
* @param const_values a zero terminated array of values for the identifiers from const_names
* @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers
* @param funcs1 NULL terminated array of function pointers for functions which take 1 argument
* @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers
* @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments
* @param opaque a pointer which will be passed to all functions from funcs1 and funcs2
* @param log_ctx parent logging context
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_expr_parse_and_eval(double *res, const char *s,
const char * const *const_names, const double *const_values,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
void *opaque, int log_offset, void *log_ctx);
/**
* Parse an expression.
*
* @param expr a pointer where is put an AVExpr containing the parsed
* value in case of successful parsing, or NULL otherwise.
* The pointed to AVExpr must be freed with av_expr_free() by the user
* when it is not needed anymore.
* @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)"
* @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0}
* @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers
* @param funcs1 NULL terminated array of function pointers for functions which take 1 argument
* @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers
* @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments
* @param log_ctx parent logging context
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_expr_parse(AVExpr **expr, const char *s,
const char * const *const_names,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
int log_offset, void *log_ctx);
/**
* Evaluate a previously parsed expression.
*
* @param const_values a zero terminated array of values for the identifiers from av_expr_parse() const_names
* @param opaque a pointer which will be passed to all functions from funcs1 and funcs2
* @return the value of the expression
*/
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque);
/**
* Free a parsed expression previously created with av_expr_parse().
*/
void av_expr_free(AVExpr *e);
/**
* Parse the string in numstr and return its value as a double. If
* the string is empty, contains only whitespaces, or does not contain
* an initial substring that has the expected syntax for a
* floating-point number, no conversion is performed. In this case,
* returns a value of zero and the value returned in tail is the value
* of numstr.
*
* @param numstr a string representing a number, may contain one of
* the International System number postfixes, for example 'K', 'M',
* 'G'. If 'i' is appended after the postfix, powers of 2 are used
* instead of powers of 10. The 'B' postfix multiplies the value for
* 8, and can be appended after another postfix or used alone. This
* allows using for example 'KB', 'MiB', 'G' and 'B' as postfix.
* @param tail if non-NULL puts here the pointer to the char next
* after the last parsed character
*/
double av_strtod(const char *numstr, char **tail);
#endif /* AVUTIL_EVAL_H */

View File

@ -1,144 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* a very simple circular buffer FIFO implementation
*/
#ifndef AVUTIL_FIFO_H
#define AVUTIL_FIFO_H
#include <stdint.h>
#include "avutil.h"
#include "attributes.h"
typedef struct AVFifoBuffer {
uint8_t *buffer;
uint8_t *rptr, *wptr, *end;
uint32_t rndx, wndx;
} AVFifoBuffer;
/**
* Initialize an AVFifoBuffer.
* @param size of FIFO
* @return AVFifoBuffer or NULL in case of memory allocation failure
*/
AVFifoBuffer *av_fifo_alloc(unsigned int size);
/**
* Free an AVFifoBuffer.
* @param f AVFifoBuffer to free
*/
void av_fifo_free(AVFifoBuffer *f);
/**
* Reset the AVFifoBuffer to the state right after av_fifo_alloc, in particular it is emptied.
* @param f AVFifoBuffer to reset
*/
void av_fifo_reset(AVFifoBuffer *f);
/**
* Return the amount of data in bytes in the AVFifoBuffer, that is the
* amount of data you can read from it.
* @param f AVFifoBuffer to read from
* @return size
*/
int av_fifo_size(AVFifoBuffer *f);
/**
* Return the amount of space in bytes in the AVFifoBuffer, that is the
* amount of data you can write into it.
* @param f AVFifoBuffer to write into
* @return size
*/
int av_fifo_space(AVFifoBuffer *f);
/**
* Feed data from an AVFifoBuffer to a user-supplied callback.
* @param f AVFifoBuffer to read from
* @param buf_size number of bytes to read
* @param func generic read function
* @param dest data destination
*/
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void (*func)(void*, void*, int));
/**
* Feed data from a user-supplied callback to an AVFifoBuffer.
* @param f AVFifoBuffer to write to
* @param src data source; non-const since it may be used as a
* modifiable context by the function defined in func
* @param size number of bytes to write
* @param func generic write function; the first parameter is src,
* the second is dest_buf, the third is dest_buf_size.
* func must return the number of bytes written to dest_buf, or <= 0 to
* indicate no more data available to write.
* If func is NULL, src is interpreted as a simple byte array for source data.
* @return the number of bytes written to the FIFO
*/
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int (*func)(void*, void*, int));
/**
* Resize an AVFifoBuffer.
* In case of reallocation failure, the old FIFO is kept unchanged.
*
* @param f AVFifoBuffer to resize
* @param size new AVFifoBuffer size in bytes
* @return <0 for failure, >=0 otherwise
*/
int av_fifo_realloc2(AVFifoBuffer *f, unsigned int size);
/**
* Enlarge an AVFifoBuffer.
* In case of reallocation failure, the old FIFO is kept unchanged.
* The new fifo size may be larger than the requested size.
*
* @param f AVFifoBuffer to resize
* @param additional_space the amount of space in bytes to allocate in addition to av_fifo_size()
* @return <0 for failure, >=0 otherwise
*/
int av_fifo_grow(AVFifoBuffer *f, unsigned int additional_space);
/**
* Read and discard the specified amount of data from an AVFifoBuffer.
* @param f AVFifoBuffer to read from
* @param size amount of data to read in bytes
*/
void av_fifo_drain(AVFifoBuffer *f, int size);
/**
* Return a pointer to the data stored in a FIFO buffer at a certain offset.
* The FIFO buffer is not modified.
*
* @param f AVFifoBuffer to peek at, f must be non-NULL
* @param offs an offset in bytes, its absolute value must be less
* than the used buffer size or the returned pointer will
* point outside to the buffer data.
* The used buffer size can be checked with av_fifo_size().
*/
static inline uint8_t *av_fifo_peek2(const AVFifoBuffer *f, int offs)
{
uint8_t *ptr = f->rptr + offs;
if (ptr >= f->end)
ptr = f->buffer + (ptr - f->end);
else if (ptr < f->buffer)
ptr = f->end - (f->buffer - ptr);
return ptr;
}
#endif /* AVUTIL_FIFO_H */

View File

@ -1,66 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_FILE_H
#define AVUTIL_FILE_H
#include <stdint.h>
#include "avutil.h"
/**
* @file
* Misc file utilities.
*/
/**
* Read the file with name filename, and put its content in a newly
* allocated buffer or map it with mmap() when available.
* In case of success set *bufptr to the read or mmapped buffer, and
* *size to the size in bytes of the buffer in *bufptr.
* The returned buffer must be released with av_file_unmap().
*
* @param log_offset loglevel offset used for logging
* @param log_ctx context used for logging
* @return a non negative number in case of success, a negative value
* corresponding to an AVERROR error code in case of failure
*/
int av_file_map(const char *filename, uint8_t **bufptr, size_t *size,
int log_offset, void *log_ctx);
/**
* Unmap or free the buffer bufptr created by av_file_map().
*
* @param size size in bytes of bufptr, must be the same as returned
* by av_file_map()
*/
void av_file_unmap(uint8_t *bufptr, size_t size);
/**
* Wrapper to work around the lack of mkstemp() on mingw.
* Also, tries to create file in /tmp first, if possible.
* *prefix can be a character constant; *filename will be allocated internally.
* @return file descriptor of opened file (or -1 on error)
* and opened file name in **filename.
* @note On very old libcs it is necessary to set a secure umask before
* calling this, av_tempfile() can't call umask itself as it is used in
* libraries and could interfere with the calling application.
*/
int av_tempfile(const char *prefix, char **filename, int log_offset, void *log_ctx);
#endif /* AVUTIL_FILE_H */

View File

@ -1,659 +0,0 @@
/*
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_FRAME_H
#define AVUTIL_FRAME_H
#include <stdint.h>
#include "libavcodec/version.h"
#include "avutil.h"
#include "buffer.h"
#include "dict.h"
#include "rational.h"
#include "samplefmt.h"
enum AVColorSpace{
AVCOL_SPC_RGB = 0,
AVCOL_SPC_BT709 = 1, ///< also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
AVCOL_SPC_UNSPECIFIED = 2,
AVCOL_SPC_FCC = 4,
AVCOL_SPC_BT470BG = 5, ///< also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
AVCOL_SPC_SMPTE170M = 6, ///< also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
AVCOL_SPC_SMPTE240M = 7,
AVCOL_SPC_YCOCG = 8, ///< Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
AVCOL_SPC_NB , ///< Not part of ABI
};
#define AVCOL_SPC_YCGCO AVCOL_SPC_YCOCG
enum AVColorRange{
AVCOL_RANGE_UNSPECIFIED = 0,
AVCOL_RANGE_MPEG = 1, ///< the normal 219*2^(n-8) "MPEG" YUV ranges
AVCOL_RANGE_JPEG = 2, ///< the normal 2^n-1 "JPEG" YUV ranges
AVCOL_RANGE_NB , ///< Not part of ABI
};
enum AVFrameSideDataType {
/**
* The data is the AVPanScan struct defined in libavcodec.
*/
AV_FRAME_DATA_PANSCAN,
};
typedef struct AVFrameSideData {
enum AVFrameSideDataType type;
uint8_t *data;
int size;
AVDictionary *metadata;
} AVFrameSideData;
/**
* This structure describes decoded (raw) audio or video data.
*
* AVFrame must be allocated using av_frame_alloc(). Note that this only
* allocates the AVFrame itself, the buffers for the data must be managed
* through other means (see below).
* AVFrame must be freed with av_frame_free().
*
* AVFrame is typically allocated once and then reused multiple times to hold
* different data (e.g. a single AVFrame to hold frames received from a
* decoder). In such a case, av_frame_unref() will free any references held by
* the frame and reset it to its original clean state before it
* is reused again.
*
* The data described by an AVFrame is usually reference counted through the
* AVBuffer API. The underlying buffer references are stored in AVFrame.buf /
* AVFrame.extended_buf. An AVFrame is considered to be reference counted if at
* least one reference is set, i.e. if AVFrame.buf[0] != NULL. In such a case,
* every single data plane must be contained in one of the buffers in
* AVFrame.buf or AVFrame.extended_buf.
* There may be a single buffer for all the data, or one separate buffer for
* each plane, or anything in between.
*
* sizeof(AVFrame) is not a part of the public ABI, so new fields may be added
* to the end with a minor bump.
* Similarly fields that are marked as to be only accessed by
* av_opt_ptr() can be reordered. This allows 2 forks to add fields
* without breaking compatibility with each other.
*/
typedef struct AVFrame {
#define AV_NUM_DATA_POINTERS 8
/**
* pointer to the picture/channel planes.
* This might be different from the first allocated byte
*
* Some decoders access areas outside 0,0 - width,height, please
* see avcodec_align_dimensions2(). Some filters and swscale can read
* up to 16 bytes beyond the planes, if these filters are to be used,
* then 16 extra bytes must be allocated.
*/
uint8_t *data[AV_NUM_DATA_POINTERS];
/**
* For video, size in bytes of each picture line.
* For audio, size in bytes of each plane.
*
* For audio, only linesize[0] may be set. For planar audio, each channel
* plane must be the same size.
*
* For video the linesizes should be multiplies of the CPUs alignment
* preference, this is 16 or 32 for modern desktop CPUs.
* Some code requires such alignment other code can be slower without
* correct alignment, for yet other it makes no difference.
*
* @note The linesize may be larger than the size of usable data -- there
* may be extra padding present for performance reasons.
*/
int linesize[AV_NUM_DATA_POINTERS];
/**
* pointers to the data planes/channels.
*
* For video, this should simply point to data[].
*
* For planar audio, each channel has a separate data pointer, and
* linesize[0] contains the size of each channel buffer.
* For packed audio, there is just one data pointer, and linesize[0]
* contains the total size of the buffer for all channels.
*
* Note: Both data and extended_data should always be set in a valid frame,
* but for planar audio with more channels that can fit in data,
* extended_data must be used in order to access all channels.
*/
uint8_t **extended_data;
/**
* width and height of the video frame
*/
int width, height;
/**
* number of audio samples (per channel) described by this frame
*/
int nb_samples;
/**
* format of the frame, -1 if unknown or unset
* Values correspond to enum AVPixelFormat for video frames,
* enum AVSampleFormat for audio)
*/
int format;
/**
* 1 -> keyframe, 0-> not
*/
int key_frame;
/**
* Picture type of the frame.
*/
enum AVPictureType pict_type;
#if FF_API_AVFRAME_LAVC
attribute_deprecated
uint8_t *base[AV_NUM_DATA_POINTERS];
#endif
/**
* Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
*/
AVRational sample_aspect_ratio;
/**
* Presentation timestamp in time_base units (time when frame should be shown to user).
*/
int64_t pts;
/**
* PTS copied from the AVPacket that was decoded to produce this frame.
*/
int64_t pkt_pts;
/**
* DTS copied from the AVPacket that triggered returning this frame. (if frame threading isnt used)
* This is also the Presentation time of this AVFrame calculated from
* only AVPacket.dts values without pts values.
*/
int64_t pkt_dts;
/**
* picture number in bitstream order
*/
int coded_picture_number;
/**
* picture number in display order
*/
int display_picture_number;
/**
* quality (between 1 (good) and FF_LAMBDA_MAX (bad))
*/
int quality;
#if FF_API_AVFRAME_LAVC
attribute_deprecated
int reference;
/**
* QP table
*/
attribute_deprecated
int8_t *qscale_table;
/**
* QP store stride
*/
attribute_deprecated
int qstride;
attribute_deprecated
int qscale_type;
/**
* mbskip_table[mb]>=1 if MB didn't change
* stride= mb_width = (width+15)>>4
*/
attribute_deprecated
uint8_t *mbskip_table;
/**
* motion vector table
* @code
* example:
* int mv_sample_log2= 4 - motion_subsample_log2;
* int mb_width= (width+15)>>4;
* int mv_stride= (mb_width << mv_sample_log2) + 1;
* motion_val[direction][x + y*mv_stride][0->mv_x, 1->mv_y];
* @endcode
*/
attribute_deprecated
int16_t (*motion_val[2])[2];
/**
* macroblock type table
* mb_type_base + mb_width + 2
*/
attribute_deprecated
uint32_t *mb_type;
/**
* DCT coefficients
*/
attribute_deprecated
short *dct_coeff;
/**
* motion reference frame index
* the order in which these are stored can depend on the codec.
*/
attribute_deprecated
int8_t *ref_index[2];
#endif
/**
* for some private data of the user
*/
void *opaque;
/**
* error
*/
uint64_t error[AV_NUM_DATA_POINTERS];
#if FF_API_AVFRAME_LAVC
attribute_deprecated
int type;
#endif
/**
* When decoding, this signals how much the picture must be delayed.
* extra_delay = repeat_pict / (2*fps)
*/
int repeat_pict;
/**
* The content of the picture is interlaced.
*/
int interlaced_frame;
/**
* If the content is interlaced, is top field displayed first.
*/
int top_field_first;
/**
* Tell user application that palette has changed from previous frame.
*/
int palette_has_changed;
#if FF_API_AVFRAME_LAVC
attribute_deprecated
int buffer_hints;
/**
* Pan scan.
*/
attribute_deprecated
struct AVPanScan *pan_scan;
#endif
/**
* reordered opaque 64bit (generally an integer or a double precision float
* PTS but can be anything).
* The user sets AVCodecContext.reordered_opaque to represent the input at
* that time,
* the decoder reorders values as needed and sets AVFrame.reordered_opaque
* to exactly one of the values provided by the user through AVCodecContext.reordered_opaque
* @deprecated in favor of pkt_pts
*/
int64_t reordered_opaque;
#if FF_API_AVFRAME_LAVC
/**
* @deprecated this field is unused
*/
attribute_deprecated void *hwaccel_picture_private;
attribute_deprecated
struct AVCodecContext *owner;
attribute_deprecated
void *thread_opaque;
/**
* log2 of the size of the block which a single vector in motion_val represents:
* (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)
*/
attribute_deprecated
uint8_t motion_subsample_log2;
#endif
/**
* Sample rate of the audio data.
*/
int sample_rate;
/**
* Channel layout of the audio data.
*/
uint64_t channel_layout;
/**
* AVBuffer references backing the data for this frame. If all elements of
* this array are NULL, then this frame is not reference counted.
*
* There may be at most one AVBuffer per data plane, so for video this array
* always contains all the references. For planar audio with more than
* AV_NUM_DATA_POINTERS channels, there may be more buffers than can fit in
* this array. Then the extra AVBufferRef pointers are stored in the
* extended_buf array.
*/
AVBufferRef *buf[AV_NUM_DATA_POINTERS];
/**
* For planar audio which requires more than AV_NUM_DATA_POINTERS
* AVBufferRef pointers, this array will hold all the references which
* cannot fit into AVFrame.buf.
*
* Note that this is different from AVFrame.extended_data, which always
* contains all the pointers. This array only contains the extra pointers,
* which cannot fit into AVFrame.buf.
*
* This array is always allocated using av_malloc() by whoever constructs
* the frame. It is freed in av_frame_unref().
*/
AVBufferRef **extended_buf;
/**
* Number of elements in extended_buf.
*/
int nb_extended_buf;
AVFrameSideData **side_data;
int nb_side_data;
/**
* frame timestamp estimated using various heuristics, in stream time base
* Code outside libavcodec should access this field using:
* av_frame_get_best_effort_timestamp(frame)
* - encoding: unused
* - decoding: set by libavcodec, read by user.
*/
int64_t best_effort_timestamp;
/**
* reordered pos from the last AVPacket that has been input into the decoder
* Code outside libavcodec should access this field using:
* av_frame_get_pkt_pos(frame)
* - encoding: unused
* - decoding: Read by user.
*/
int64_t pkt_pos;
/**
* duration of the corresponding packet, expressed in
* AVStream->time_base units, 0 if unknown.
* Code outside libavcodec should access this field using:
* av_frame_get_pkt_duration(frame)
* - encoding: unused
* - decoding: Read by user.
*/
int64_t pkt_duration;
/**
* metadata.
* Code outside libavcodec should access this field using:
* av_frame_get_metadata(frame)
* - encoding: Set by user.
* - decoding: Set by libavcodec.
*/
AVDictionary *metadata;
/**
* decode error flags of the frame, set to a combination of
* FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there
* were errors during the decoding.
* Code outside libavcodec should access this field using:
* av_frame_get_decode_error_flags(frame)
* - encoding: unused
* - decoding: set by libavcodec, read by user.
*/
int decode_error_flags;
#define FF_DECODE_ERROR_INVALID_BITSTREAM 1
#define FF_DECODE_ERROR_MISSING_REFERENCE 2
/**
* number of audio channels, only used for audio.
* Code outside libavcodec should access this field using:
* av_frame_get_channels(frame)
* - encoding: unused
* - decoding: Read by user.
*/
int channels;
/**
* size of the corresponding packet containing the compressed
* frame. It must be accessed using av_frame_get_pkt_size() and
* av_frame_set_pkt_size().
* It is set to a negative value if unknown.
* - encoding: unused
* - decoding: set by libavcodec, read by user.
*/
int pkt_size;
/**
* YUV colorspace type.
* It must be accessed using av_frame_get_colorspace() and
* av_frame_set_colorspace().
* - encoding: Set by user
* - decoding: Set by libavcodec
*/
enum AVColorSpace colorspace;
/**
* MPEG vs JPEG YUV range.
* It must be accessed using av_frame_get_color_range() and
* av_frame_set_color_range().
* - encoding: Set by user
* - decoding: Set by libavcodec
*/
enum AVColorRange color_range;
/**
* Not to be accessed directly from outside libavutil
*/
AVBufferRef *qp_table_buf;
} AVFrame;
/**
* Accessors for some AVFrame fields.
* The position of these field in the structure is not part of the ABI,
* they should not be accessed directly outside libavcodec.
*/
int64_t av_frame_get_best_effort_timestamp(const AVFrame *frame);
void av_frame_set_best_effort_timestamp(AVFrame *frame, int64_t val);
int64_t av_frame_get_pkt_duration (const AVFrame *frame);
void av_frame_set_pkt_duration (AVFrame *frame, int64_t val);
int64_t av_frame_get_pkt_pos (const AVFrame *frame);
void av_frame_set_pkt_pos (AVFrame *frame, int64_t val);
int64_t av_frame_get_channel_layout (const AVFrame *frame);
void av_frame_set_channel_layout (AVFrame *frame, int64_t val);
int av_frame_get_channels (const AVFrame *frame);
void av_frame_set_channels (AVFrame *frame, int val);
int av_frame_get_sample_rate (const AVFrame *frame);
void av_frame_set_sample_rate (AVFrame *frame, int val);
AVDictionary *av_frame_get_metadata (const AVFrame *frame);
void av_frame_set_metadata (AVFrame *frame, AVDictionary *val);
int av_frame_get_decode_error_flags (const AVFrame *frame);
void av_frame_set_decode_error_flags (AVFrame *frame, int val);
int av_frame_get_pkt_size(const AVFrame *frame);
void av_frame_set_pkt_size(AVFrame *frame, int val);
AVDictionary **avpriv_frame_get_metadatap(AVFrame *frame);
int8_t *av_frame_get_qp_table(AVFrame *f, int *stride, int *type);
int av_frame_set_qp_table(AVFrame *f, AVBufferRef *buf, int stride, int type);
enum AVColorSpace av_frame_get_colorspace(const AVFrame *frame);
void av_frame_set_colorspace(AVFrame *frame, enum AVColorSpace val);
enum AVColorRange av_frame_get_color_range(const AVFrame *frame);
void av_frame_set_color_range(AVFrame *frame, enum AVColorRange val);
/**
* Get the name of a colorspace.
* @return a static string identifying the colorspace; can be NULL.
*/
const char *av_get_colorspace_name(enum AVColorSpace val);
/**
* Allocate an AVFrame and set its fields to default values. The resulting
* struct must be freed using av_frame_free().
*
* @return An AVFrame filled with default values or NULL on failure.
*
* @note this only allocates the AVFrame itself, not the data buffers. Those
* must be allocated through other means, e.g. with av_frame_get_buffer() or
* manually.
*/
AVFrame *av_frame_alloc(void);
/**
* Free the frame and any dynamically allocated objects in it,
* e.g. extended_data. If the frame is reference counted, it will be
* unreferenced first.
*
* @param frame frame to be freed. The pointer will be set to NULL.
*/
void av_frame_free(AVFrame **frame);
/**
* Setup a new reference to the data described by a given frame.
*
* Copy frame properties from src to dst and create a new reference for each
* AVBufferRef from src.
*
* If src is not reference counted, new buffers are allocated and the data is
* copied.
*
* @return 0 on success, a negative AVERROR on error
*/
int av_frame_ref(AVFrame *dst, AVFrame *src);
/**
* Create a new frame that references the same data as src.
*
* This is a shortcut for av_frame_alloc()+av_frame_ref().
*
* @return newly created AVFrame on success, NULL on error.
*/
AVFrame *av_frame_clone(AVFrame *src);
/**
* Unreference all the buffers referenced by frame and reset the frame fields.
*/
void av_frame_unref(AVFrame *frame);
/**
* Move everythnig contained in src to dst and reset src.
*/
void av_frame_move_ref(AVFrame *dst, AVFrame *src);
/**
* Allocate new buffer(s) for audio or video data.
*
* The following fields must be set on frame before calling this function:
* - format (pixel format for video, sample format for audio)
* - width and height for video
* - nb_samples and channel_layout for audio
*
* This function will fill AVFrame.data and AVFrame.buf arrays and, if
* necessary, allocate and fill AVFrame.extended_data and AVFrame.extended_buf.
* For planar formats, one buffer will be allocated for each plane.
*
* @param frame frame in which to store the new buffers.
* @param align required buffer size alignment
*
* @return 0 on success, a negative AVERROR on error.
*/
int av_frame_get_buffer(AVFrame *frame, int align);
/**
* Check if the frame data is writable.
*
* @return A positive value if the frame data is writable (which is true if and
* only if each of the underlying buffers has only one reference, namely the one
* stored in this frame). Return 0 otherwise.
*
* If 1 is returned the answer is valid until av_buffer_ref() is called on any
* of the underlying AVBufferRefs (e.g. through av_frame_ref() or directly).
*
* @see av_frame_make_writable(), av_buffer_is_writable()
*/
int av_frame_is_writable(AVFrame *frame);
/**
* Ensure that the frame data is writable, avoiding data copy if possible.
*
* Do nothing if the frame is writable, allocate new buffers and copy the data
* if it is not.
*
* @return 0 on success, a negative AVERROR on error.
*
* @see av_frame_is_writable(), av_buffer_is_writable(),
* av_buffer_make_writable()
*/
int av_frame_make_writable(AVFrame *frame);
/**
* Copy only "metadata" fields from src to dst.
*
* Metadata for the purpose of this function are those fields that do not affect
* the data layout in the buffers. E.g. pts, sample rate (for audio) or sample
* aspect ratio (for video), but not width/height or channel layout.
* Side data is also copied.
*/
int av_frame_copy_props(AVFrame *dst, const AVFrame *src);
/**
* Get the buffer reference a given data plane is stored in.
*
* @param plane index of the data plane of interest in frame->extended_data.
*
* @return the buffer reference that contains the plane or NULL if the input
* frame is not valid.
*/
AVBufferRef *av_frame_get_plane_buffer(AVFrame *frame, int plane);
/**
* Add a new side data to a frame.
*
* @param frame a frame to which the side data should be added
* @param type type of the added side data
* @param size size of the side data
*
* @return newly added side data on success, NULL on error
*/
AVFrameSideData *av_frame_new_side_data(AVFrame *frame,
enum AVFrameSideDataType type,
int size);
/**
* @return a pointer to the side data of a given type on success, NULL if there
* is no side data with such type in this frame.
*/
AVFrameSideData *av_frame_get_side_data(const AVFrame *frame,
enum AVFrameSideDataType type);
#endif /* AVUTIL_FRAME_H */

View File

@ -1,99 +0,0 @@
/*
* Copyright (C) 2012 Martin Storsjo
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HMAC_H
#define AVUTIL_HMAC_H
#include <stdint.h>
/**
* @defgroup lavu_hmac HMAC
* @ingroup lavu_crypto
* @{
*/
enum AVHMACType {
AV_HMAC_MD5,
AV_HMAC_SHA1,
AV_HMAC_SHA224 = 10,
AV_HMAC_SHA256,
AV_HMAC_SHA384,
AV_HMAC_SHA512,
};
typedef struct AVHMAC AVHMAC;
/**
* Allocate an AVHMAC context.
* @param type The hash function used for the HMAC.
*/
AVHMAC *av_hmac_alloc(enum AVHMACType type);
/**
* Free an AVHMAC context.
* @param ctx The context to free, may be NULL
*/
void av_hmac_free(AVHMAC *ctx);
/**
* Initialize an AVHMAC context with an authentication key.
* @param ctx The HMAC context
* @param key The authentication key
* @param keylen The length of the key, in bytes
*/
void av_hmac_init(AVHMAC *ctx, const uint8_t *key, unsigned int keylen);
/**
* Hash data with the HMAC.
* @param ctx The HMAC context
* @param data The data to hash
* @param len The length of the data, in bytes
*/
void av_hmac_update(AVHMAC *ctx, const uint8_t *data, unsigned int len);
/**
* Finish hashing and output the HMAC digest.
* @param ctx The HMAC context
* @param out The output buffer to write the digest into
* @param outlen The length of the out buffer, in bytes
* @return The number of bytes written to out, or a negative error code.
*/
int av_hmac_final(AVHMAC *ctx, uint8_t *out, unsigned int outlen);
/**
* Hash an array of data with a key.
* @param ctx The HMAC context
* @param data The data to hash
* @param len The length of the data, in bytes
* @param key The authentication key
* @param keylen The length of the key, in bytes
* @param out The output buffer to write the digest into
* @param outlen The length of the out buffer, in bytes
* @return The number of bytes written to out, or a negative error code.
*/
int av_hmac_calc(AVHMAC *ctx, const uint8_t *data, unsigned int len,
const uint8_t *key, unsigned int keylen,
uint8_t *out, unsigned int outlen);
/**
* @}
*/
#endif /* AVUTIL_HMAC_H */

View File

@ -1,200 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_IMGUTILS_H
#define AVUTIL_IMGUTILS_H
/**
* @file
* misc image utilities
*
* @addtogroup lavu_picture
* @{
*/
#include "avutil.h"
#include "pixdesc.h"
/**
* Compute the max pixel step for each plane of an image with a
* format described by pixdesc.
*
* The pixel step is the distance in bytes between the first byte of
* the group of bytes which describe a pixel component and the first
* byte of the successive group in the same plane for the same
* component.
*
* @param max_pixsteps an array which is filled with the max pixel step
* for each plane. Since a plane may contain different pixel
* components, the computed max_pixsteps[plane] is relative to the
* component in the plane with the max pixel step.
* @param max_pixstep_comps an array which is filled with the component
* for each plane which has the max pixel step. May be NULL.
*/
void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4],
const AVPixFmtDescriptor *pixdesc);
/**
* Compute the size of an image line with format pix_fmt and width
* width for the plane plane.
*
* @return the computed size in bytes
*/
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane);
/**
* Fill plane linesizes for an image with pixel format pix_fmt and
* width width.
*
* @param linesizes array to be filled with the linesize for each plane
* @return >= 0 in case of success, a negative error code otherwise
*/
int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width);
/**
* Fill plane data pointers for an image with pixel format pix_fmt and
* height height.
*
* @param data pointers array to be filled with the pointer for each image plane
* @param ptr the pointer to a buffer which will contain the image
* @param linesizes the array containing the linesize for each
* plane, should be filled by av_image_fill_linesizes()
* @return the size in bytes required for the image buffer, a negative
* error code in case of failure
*/
int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height,
uint8_t *ptr, const int linesizes[4]);
/**
* Allocate an image with size w and h and pixel format pix_fmt, and
* fill pointers and linesizes accordingly.
* The allocated image buffer has to be freed by using
* av_freep(&pointers[0]).
*
* @param align the value to use for buffer size alignment
* @return the size in bytes required for the image buffer, a negative
* error code in case of failure
*/
int av_image_alloc(uint8_t *pointers[4], int linesizes[4],
int w, int h, enum AVPixelFormat pix_fmt, int align);
/**
* Copy image plane from src to dst.
* That is, copy "height" number of lines of "bytewidth" bytes each.
* The first byte of each successive line is separated by *_linesize
* bytes.
*
* bytewidth must be contained by both absolute values of dst_linesize
* and src_linesize, otherwise the function behavior is undefined.
*
* @param dst_linesize linesize for the image plane in dst
* @param src_linesize linesize for the image plane in src
*/
void av_image_copy_plane(uint8_t *dst, int dst_linesize,
const uint8_t *src, int src_linesize,
int bytewidth, int height);
/**
* Copy image in src_data to dst_data.
*
* @param dst_linesizes linesizes for the image in dst_data
* @param src_linesizes linesizes for the image in src_data
*/
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4],
const uint8_t *src_data[4], const int src_linesizes[4],
enum AVPixelFormat pix_fmt, int width, int height);
/**
* Setup the data pointers and linesizes based on the specified image
* parameters and the provided array.
*
* The fields of the given image are filled in by using the src
* address which points to the image data buffer. Depending on the
* specified pixel format, one or multiple image data pointers and
* line sizes will be set. If a planar format is specified, several
* pointers will be set pointing to the different picture planes and
* the line sizes of the different planes will be stored in the
* lines_sizes array. Call with src == NULL to get the required
* size for the src buffer.
*
* To allocate the buffer and fill in the dst_data and dst_linesize in
* one call, use av_image_alloc().
*
* @param dst_data data pointers to be filled in
* @param dst_linesizes linesizes for the image in dst_data to be filled in
* @param src buffer which will contain or contains the actual image data, can be NULL
* @param pix_fmt the pixel format of the image
* @param width the width of the image in pixels
* @param height the height of the image in pixels
* @param align the value used in src for linesize alignment
* @return the size in bytes required for src, a negative error code
* in case of failure
*/
int av_image_fill_arrays(uint8_t *dst_data[4], int dst_linesize[4],
const uint8_t *src,
enum AVPixelFormat pix_fmt, int width, int height, int align);
/**
* Return the size in bytes of the amount of data required to store an
* image with the given parameters.
*
* @param[in] align the assumed linesize alignment
*/
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align);
/**
* Copy image data from an image into a buffer.
*
* av_image_get_buffer_size() can be used to compute the required size
* for the buffer to fill.
*
* @param dst a buffer into which picture data will be copied
* @param dst_size the size in bytes of dst
* @param src_data pointers containing the source image data
* @param src_linesizes linesizes for the image in src_data
* @param pix_fmt the pixel format of the source image
* @param width the width of the source image in pixels
* @param height the height of the source image in pixels
* @param align the assumed linesize alignment for dst
* @return the number of bytes written to dst, or a negative value
* (error code) on error
*/
int av_image_copy_to_buffer(uint8_t *dst, int dst_size,
const uint8_t * const src_data[4], const int src_linesize[4],
enum AVPixelFormat pix_fmt, int width, int height, int align);
/**
* Check if the given dimension of an image is valid, meaning that all
* bytes of the image can be addressed with a signed int.
*
* @param w the width of the picture
* @param h the height of the picture
* @param log_offset the offset to sum to the log level for logging with log_ctx
* @param log_ctx the parent logging context, it may be NULL
* @return >= 0 if valid, a negative error code otherwise
*/
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx);
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt);
/**
* @}
*/
#endif /* AVUTIL_IMGUTILS_H */

View File

@ -1,77 +0,0 @@
/*
* Copyright (c) 2011 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTFLOAT_H
#define AVUTIL_INTFLOAT_H
#include <stdint.h>
#include "attributes.h"
union av_intfloat32 {
uint32_t i;
float f;
};
union av_intfloat64 {
uint64_t i;
double f;
};
/**
* Reinterpret a 32-bit integer as a float.
*/
static av_always_inline float av_int2float(uint32_t i)
{
union av_intfloat32 v;
v.i = i;
return v.f;
}
/**
* Reinterpret a float as a 32-bit integer.
*/
static av_always_inline uint32_t av_float2int(float f)
{
union av_intfloat32 v;
v.f = f;
return v.i;
}
/**
* Reinterpret a 64-bit integer as a double.
*/
static av_always_inline double av_int2double(uint64_t i)
{
union av_intfloat64 v;
v.i = i;
return v.f;
}
/**
* Reinterpret a double as a 64-bit integer.
*/
static av_always_inline uint64_t av_double2int(double f)
{
union av_intfloat64 v;
v.f = f;
return v.i;
}
#endif /* AVUTIL_INTFLOAT_H */

View File

@ -1,40 +0,0 @@
/*
* copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTFLOAT_READWRITE_H
#define AVUTIL_INTFLOAT_READWRITE_H
#include <stdint.h>
#include "attributes.h"
/* IEEE 80 bits extended float */
typedef struct AVExtFloat {
uint8_t exponent[2];
uint8_t mantissa[8];
} AVExtFloat;
attribute_deprecated double av_int2dbl(int64_t v) av_const;
attribute_deprecated float av_int2flt(int32_t v) av_const;
attribute_deprecated double av_ext2dbl(const AVExtFloat ext) av_const;
attribute_deprecated int64_t av_dbl2int(double d) av_const;
attribute_deprecated int32_t av_flt2int(float d) av_const;
attribute_deprecated AVExtFloat av_dbl2ext(double d) av_const;
#endif /* AVUTIL_INTFLOAT_READWRITE_H */

View File

@ -1,621 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTREADWRITE_H
#define AVUTIL_INTREADWRITE_H
#include <stdint.h>
#include "libavutil/avconfig.h"
#include "attributes.h"
#include "bswap.h"
typedef union {
uint64_t u64;
uint32_t u32[2];
uint16_t u16[4];
uint8_t u8 [8];
double f64;
float f32[2];
} av_alias av_alias64;
typedef union {
uint32_t u32;
uint16_t u16[2];
uint8_t u8 [4];
float f32;
} av_alias av_alias32;
typedef union {
uint16_t u16;
uint8_t u8 [2];
} av_alias av_alias16;
/*
* Arch-specific headers can provide any combination of
* AV_[RW][BLN](16|24|32|48|64) and AV_(COPY|SWAP|ZERO)(64|128) macros.
* Preprocessor symbols must be defined, even if these are implemented
* as inline functions.
*/
#ifdef HAVE_AV_CONFIG_H
#include "config.h"
#if ARCH_ARM
# include "arm/intreadwrite.h"
#elif ARCH_AVR32
# include "avr32/intreadwrite.h"
#elif ARCH_MIPS
# include "mips/intreadwrite.h"
#elif ARCH_PPC
# include "ppc/intreadwrite.h"
#elif ARCH_TOMI
# include "tomi/intreadwrite.h"
#elif ARCH_X86
# include "x86/intreadwrite.h"
#endif
#endif /* HAVE_AV_CONFIG_H */
/*
* Map AV_RNXX <-> AV_R[BL]XX for all variants provided by per-arch headers.
*/
#if AV_HAVE_BIGENDIAN
# if defined(AV_RN16) && !defined(AV_RB16)
# define AV_RB16(p) AV_RN16(p)
# elif !defined(AV_RN16) && defined(AV_RB16)
# define AV_RN16(p) AV_RB16(p)
# endif
# if defined(AV_WN16) && !defined(AV_WB16)
# define AV_WB16(p, v) AV_WN16(p, v)
# elif !defined(AV_WN16) && defined(AV_WB16)
# define AV_WN16(p, v) AV_WB16(p, v)
# endif
# if defined(AV_RN24) && !defined(AV_RB24)
# define AV_RB24(p) AV_RN24(p)
# elif !defined(AV_RN24) && defined(AV_RB24)
# define AV_RN24(p) AV_RB24(p)
# endif
# if defined(AV_WN24) && !defined(AV_WB24)
# define AV_WB24(p, v) AV_WN24(p, v)
# elif !defined(AV_WN24) && defined(AV_WB24)
# define AV_WN24(p, v) AV_WB24(p, v)
# endif
# if defined(AV_RN32) && !defined(AV_RB32)
# define AV_RB32(p) AV_RN32(p)
# elif !defined(AV_RN32) && defined(AV_RB32)
# define AV_RN32(p) AV_RB32(p)
# endif
# if defined(AV_WN32) && !defined(AV_WB32)
# define AV_WB32(p, v) AV_WN32(p, v)
# elif !defined(AV_WN32) && defined(AV_WB32)
# define AV_WN32(p, v) AV_WB32(p, v)
# endif
# if defined(AV_RN48) && !defined(AV_RB48)
# define AV_RB48(p) AV_RN48(p)
# elif !defined(AV_RN48) && defined(AV_RB48)
# define AV_RN48(p) AV_RB48(p)
# endif
# if defined(AV_WN48) && !defined(AV_WB48)
# define AV_WB48(p, v) AV_WN48(p, v)
# elif !defined(AV_WN48) && defined(AV_WB48)
# define AV_WN48(p, v) AV_WB48(p, v)
# endif
# if defined(AV_RN64) && !defined(AV_RB64)
# define AV_RB64(p) AV_RN64(p)
# elif !defined(AV_RN64) && defined(AV_RB64)
# define AV_RN64(p) AV_RB64(p)
# endif
# if defined(AV_WN64) && !defined(AV_WB64)
# define AV_WB64(p, v) AV_WN64(p, v)
# elif !defined(AV_WN64) && defined(AV_WB64)
# define AV_WN64(p, v) AV_WB64(p, v)
# endif
#else /* AV_HAVE_BIGENDIAN */
# if defined(AV_RN16) && !defined(AV_RL16)
# define AV_RL16(p) AV_RN16(p)
# elif !defined(AV_RN16) && defined(AV_RL16)
# define AV_RN16(p) AV_RL16(p)
# endif
# if defined(AV_WN16) && !defined(AV_WL16)
# define AV_WL16(p, v) AV_WN16(p, v)
# elif !defined(AV_WN16) && defined(AV_WL16)
# define AV_WN16(p, v) AV_WL16(p, v)
# endif
# if defined(AV_RN24) && !defined(AV_RL24)
# define AV_RL24(p) AV_RN24(p)
# elif !defined(AV_RN24) && defined(AV_RL24)
# define AV_RN24(p) AV_RL24(p)
# endif
# if defined(AV_WN24) && !defined(AV_WL24)
# define AV_WL24(p, v) AV_WN24(p, v)
# elif !defined(AV_WN24) && defined(AV_WL24)
# define AV_WN24(p, v) AV_WL24(p, v)
# endif
# if defined(AV_RN32) && !defined(AV_RL32)
# define AV_RL32(p) AV_RN32(p)
# elif !defined(AV_RN32) && defined(AV_RL32)
# define AV_RN32(p) AV_RL32(p)
# endif
# if defined(AV_WN32) && !defined(AV_WL32)
# define AV_WL32(p, v) AV_WN32(p, v)
# elif !defined(AV_WN32) && defined(AV_WL32)
# define AV_WN32(p, v) AV_WL32(p, v)
# endif
# if defined(AV_RN48) && !defined(AV_RL48)
# define AV_RL48(p) AV_RN48(p)
# elif !defined(AV_RN48) && defined(AV_RL48)
# define AV_RN48(p) AV_RL48(p)
# endif
# if defined(AV_WN48) && !defined(AV_WL48)
# define AV_WL48(p, v) AV_WN48(p, v)
# elif !defined(AV_WN48) && defined(AV_WL48)
# define AV_WN48(p, v) AV_WL48(p, v)
# endif
# if defined(AV_RN64) && !defined(AV_RL64)
# define AV_RL64(p) AV_RN64(p)
# elif !defined(AV_RN64) && defined(AV_RL64)
# define AV_RN64(p) AV_RL64(p)
# endif
# if defined(AV_WN64) && !defined(AV_WL64)
# define AV_WL64(p, v) AV_WN64(p, v)
# elif !defined(AV_WN64) && defined(AV_WL64)
# define AV_WN64(p, v) AV_WL64(p, v)
# endif
#endif /* !AV_HAVE_BIGENDIAN */
/*
* Define AV_[RW]N helper macros to simplify definitions not provided
* by per-arch headers.
*/
#if defined(__GNUC__) && !defined(__TI_COMPILER_VERSION__)
union unaligned_64 { uint64_t l; } __attribute__((packed)) av_alias;
union unaligned_32 { uint32_t l; } __attribute__((packed)) av_alias;
union unaligned_16 { uint16_t l; } __attribute__((packed)) av_alias;
# define AV_RN(s, p) (((const union unaligned_##s *) (p))->l)
# define AV_WN(s, p, v) ((((union unaligned_##s *) (p))->l) = (v))
#elif defined(__DECC)
# define AV_RN(s, p) (*((const __unaligned uint##s##_t*)(p)))
# define AV_WN(s, p, v) (*((__unaligned uint##s##_t*)(p)) = (v))
#elif AV_HAVE_FAST_UNALIGNED
# define AV_RN(s, p) (((const av_alias##s*)(p))->u##s)
# define AV_WN(s, p, v) (((av_alias##s*)(p))->u##s = (v))
#else
#ifndef AV_RB16
# define AV_RB16(x) \
((((const uint8_t*)(x))[0] << 8) | \
((const uint8_t*)(x))[1])
#endif
#ifndef AV_WB16
# define AV_WB16(p, darg) do { \
unsigned d = (darg); \
((uint8_t*)(p))[1] = (d); \
((uint8_t*)(p))[0] = (d)>>8; \
} while(0)
#endif
#ifndef AV_RL16
# define AV_RL16(x) \
((((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL16
# define AV_WL16(p, darg) do { \
unsigned d = (darg); \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
} while(0)
#endif
#ifndef AV_RB32
# define AV_RB32(x) \
(((uint32_t)((const uint8_t*)(x))[0] << 24) | \
(((const uint8_t*)(x))[1] << 16) | \
(((const uint8_t*)(x))[2] << 8) | \
((const uint8_t*)(x))[3])
#endif
#ifndef AV_WB32
# define AV_WB32(p, darg) do { \
unsigned d = (darg); \
((uint8_t*)(p))[3] = (d); \
((uint8_t*)(p))[2] = (d)>>8; \
((uint8_t*)(p))[1] = (d)>>16; \
((uint8_t*)(p))[0] = (d)>>24; \
} while(0)
#endif
#ifndef AV_RL32
# define AV_RL32(x) \
(((uint32_t)((const uint8_t*)(x))[3] << 24) | \
(((const uint8_t*)(x))[2] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL32
# define AV_WL32(p, darg) do { \
unsigned d = (darg); \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
((uint8_t*)(p))[3] = (d)>>24; \
} while(0)
#endif
#ifndef AV_RB64
# define AV_RB64(x) \
(((uint64_t)((const uint8_t*)(x))[0] << 56) | \
((uint64_t)((const uint8_t*)(x))[1] << 48) | \
((uint64_t)((const uint8_t*)(x))[2] << 40) | \
((uint64_t)((const uint8_t*)(x))[3] << 32) | \
((uint64_t)((const uint8_t*)(x))[4] << 24) | \
((uint64_t)((const uint8_t*)(x))[5] << 16) | \
((uint64_t)((const uint8_t*)(x))[6] << 8) | \
(uint64_t)((const uint8_t*)(x))[7])
#endif
#ifndef AV_WB64
# define AV_WB64(p, darg) do { \
uint64_t d = (darg); \
((uint8_t*)(p))[7] = (d); \
((uint8_t*)(p))[6] = (d)>>8; \
((uint8_t*)(p))[5] = (d)>>16; \
((uint8_t*)(p))[4] = (d)>>24; \
((uint8_t*)(p))[3] = (d)>>32; \
((uint8_t*)(p))[2] = (d)>>40; \
((uint8_t*)(p))[1] = (d)>>48; \
((uint8_t*)(p))[0] = (d)>>56; \
} while(0)
#endif
#ifndef AV_RL64
# define AV_RL64(x) \
(((uint64_t)((const uint8_t*)(x))[7] << 56) | \
((uint64_t)((const uint8_t*)(x))[6] << 48) | \
((uint64_t)((const uint8_t*)(x))[5] << 40) | \
((uint64_t)((const uint8_t*)(x))[4] << 32) | \
((uint64_t)((const uint8_t*)(x))[3] << 24) | \
((uint64_t)((const uint8_t*)(x))[2] << 16) | \
((uint64_t)((const uint8_t*)(x))[1] << 8) | \
(uint64_t)((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL64
# define AV_WL64(p, darg) do { \
uint64_t d = (darg); \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
((uint8_t*)(p))[3] = (d)>>24; \
((uint8_t*)(p))[4] = (d)>>32; \
((uint8_t*)(p))[5] = (d)>>40; \
((uint8_t*)(p))[6] = (d)>>48; \
((uint8_t*)(p))[7] = (d)>>56; \
} while(0)
#endif
#if AV_HAVE_BIGENDIAN
# define AV_RN(s, p) AV_RB##s(p)
# define AV_WN(s, p, v) AV_WB##s(p, v)
#else
# define AV_RN(s, p) AV_RL##s(p)
# define AV_WN(s, p, v) AV_WL##s(p, v)
#endif
#endif /* HAVE_FAST_UNALIGNED */
#ifndef AV_RN16
# define AV_RN16(p) AV_RN(16, p)
#endif
#ifndef AV_RN32
# define AV_RN32(p) AV_RN(32, p)
#endif
#ifndef AV_RN64
# define AV_RN64(p) AV_RN(64, p)
#endif
#ifndef AV_WN16
# define AV_WN16(p, v) AV_WN(16, p, v)
#endif
#ifndef AV_WN32
# define AV_WN32(p, v) AV_WN(32, p, v)
#endif
#ifndef AV_WN64
# define AV_WN64(p, v) AV_WN(64, p, v)
#endif
#if AV_HAVE_BIGENDIAN
# define AV_RB(s, p) AV_RN##s(p)
# define AV_WB(s, p, v) AV_WN##s(p, v)
# define AV_RL(s, p) av_bswap##s(AV_RN##s(p))
# define AV_WL(s, p, v) AV_WN##s(p, av_bswap##s(v))
#else
# define AV_RB(s, p) av_bswap##s(AV_RN##s(p))
# define AV_WB(s, p, v) AV_WN##s(p, av_bswap##s(v))
# define AV_RL(s, p) AV_RN##s(p)
# define AV_WL(s, p, v) AV_WN##s(p, v)
#endif
#define AV_RB8(x) (((const uint8_t*)(x))[0])
#define AV_WB8(p, d) do { ((uint8_t*)(p))[0] = (d); } while(0)
#define AV_RL8(x) AV_RB8(x)
#define AV_WL8(p, d) AV_WB8(p, d)
#ifndef AV_RB16
# define AV_RB16(p) AV_RB(16, p)
#endif
#ifndef AV_WB16
# define AV_WB16(p, v) AV_WB(16, p, v)
#endif
#ifndef AV_RL16
# define AV_RL16(p) AV_RL(16, p)
#endif
#ifndef AV_WL16
# define AV_WL16(p, v) AV_WL(16, p, v)
#endif
#ifndef AV_RB32
# define AV_RB32(p) AV_RB(32, p)
#endif
#ifndef AV_WB32
# define AV_WB32(p, v) AV_WB(32, p, v)
#endif
#ifndef AV_RL32
# define AV_RL32(p) AV_RL(32, p)
#endif
#ifndef AV_WL32
# define AV_WL32(p, v) AV_WL(32, p, v)
#endif
#ifndef AV_RB64
# define AV_RB64(p) AV_RB(64, p)
#endif
#ifndef AV_WB64
# define AV_WB64(p, v) AV_WB(64, p, v)
#endif
#ifndef AV_RL64
# define AV_RL64(p) AV_RL(64, p)
#endif
#ifndef AV_WL64
# define AV_WL64(p, v) AV_WL(64, p, v)
#endif
#ifndef AV_RB24
# define AV_RB24(x) \
((((const uint8_t*)(x))[0] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[2])
#endif
#ifndef AV_WB24
# define AV_WB24(p, d) do { \
((uint8_t*)(p))[2] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[0] = (d)>>16; \
} while(0)
#endif
#ifndef AV_RL24
# define AV_RL24(x) \
((((const uint8_t*)(x))[2] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL24
# define AV_WL24(p, d) do { \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
} while(0)
#endif
#ifndef AV_RB48
# define AV_RB48(x) \
(((uint64_t)((const uint8_t*)(x))[0] << 40) | \
((uint64_t)((const uint8_t*)(x))[1] << 32) | \
((uint64_t)((const uint8_t*)(x))[2] << 24) | \
((uint64_t)((const uint8_t*)(x))[3] << 16) | \
((uint64_t)((const uint8_t*)(x))[4] << 8) | \
(uint64_t)((const uint8_t*)(x))[5])
#endif
#ifndef AV_WB48
# define AV_WB48(p, darg) do { \
uint64_t d = (darg); \
((uint8_t*)(p))[5] = (d); \
((uint8_t*)(p))[4] = (d)>>8; \
((uint8_t*)(p))[3] = (d)>>16; \
((uint8_t*)(p))[2] = (d)>>24; \
((uint8_t*)(p))[1] = (d)>>32; \
((uint8_t*)(p))[0] = (d)>>40; \
} while(0)
#endif
#ifndef AV_RL48
# define AV_RL48(x) \
(((uint64_t)((const uint8_t*)(x))[5] << 40) | \
((uint64_t)((const uint8_t*)(x))[4] << 32) | \
((uint64_t)((const uint8_t*)(x))[3] << 24) | \
((uint64_t)((const uint8_t*)(x))[2] << 16) | \
((uint64_t)((const uint8_t*)(x))[1] << 8) | \
(uint64_t)((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL48
# define AV_WL48(p, darg) do { \
uint64_t d = (darg); \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
((uint8_t*)(p))[3] = (d)>>24; \
((uint8_t*)(p))[4] = (d)>>32; \
((uint8_t*)(p))[5] = (d)>>40; \
} while(0)
#endif
/*
* The AV_[RW]NA macros access naturally aligned data
* in a type-safe way.
*/
#define AV_RNA(s, p) (((const av_alias##s*)(p))->u##s)
#define AV_WNA(s, p, v) (((av_alias##s*)(p))->u##s = (v))
#ifndef AV_RN16A
# define AV_RN16A(p) AV_RNA(16, p)
#endif
#ifndef AV_RN32A
# define AV_RN32A(p) AV_RNA(32, p)
#endif
#ifndef AV_RN64A
# define AV_RN64A(p) AV_RNA(64, p)
#endif
#ifndef AV_WN16A
# define AV_WN16A(p, v) AV_WNA(16, p, v)
#endif
#ifndef AV_WN32A
# define AV_WN32A(p, v) AV_WNA(32, p, v)
#endif
#ifndef AV_WN64A
# define AV_WN64A(p, v) AV_WNA(64, p, v)
#endif
/*
* The AV_COPYxxU macros are suitable for copying data to/from unaligned
* memory locations.
*/
#define AV_COPYU(n, d, s) AV_WN##n(d, AV_RN##n(s));
#ifndef AV_COPY16U
# define AV_COPY16U(d, s) AV_COPYU(16, d, s)
#endif
#ifndef AV_COPY32U
# define AV_COPY32U(d, s) AV_COPYU(32, d, s)
#endif
#ifndef AV_COPY64U
# define AV_COPY64U(d, s) AV_COPYU(64, d, s)
#endif
#ifndef AV_COPY128U
# define AV_COPY128U(d, s) \
do { \
AV_COPY64U(d, s); \
AV_COPY64U((char *)(d) + 8, (const char *)(s) + 8); \
} while(0)
#endif
/* Parameters for AV_COPY*, AV_SWAP*, AV_ZERO* must be
* naturally aligned. They may be implemented using MMX,
* so emms_c() must be called before using any float code
* afterwards.
*/
#define AV_COPY(n, d, s) \
(((av_alias##n*)(d))->u##n = ((const av_alias##n*)(s))->u##n)
#ifndef AV_COPY16
# define AV_COPY16(d, s) AV_COPY(16, d, s)
#endif
#ifndef AV_COPY32
# define AV_COPY32(d, s) AV_COPY(32, d, s)
#endif
#ifndef AV_COPY64
# define AV_COPY64(d, s) AV_COPY(64, d, s)
#endif
#ifndef AV_COPY128
# define AV_COPY128(d, s) \
do { \
AV_COPY64(d, s); \
AV_COPY64((char*)(d)+8, (char*)(s)+8); \
} while(0)
#endif
#define AV_SWAP(n, a, b) FFSWAP(av_alias##n, *(av_alias##n*)(a), *(av_alias##n*)(b))
#ifndef AV_SWAP64
# define AV_SWAP64(a, b) AV_SWAP(64, a, b)
#endif
#define AV_ZERO(n, d) (((av_alias##n*)(d))->u##n = 0)
#ifndef AV_ZERO16
# define AV_ZERO16(d) AV_ZERO(16, d)
#endif
#ifndef AV_ZERO32
# define AV_ZERO32(d) AV_ZERO(32, d)
#endif
#ifndef AV_ZERO64
# define AV_ZERO64(d) AV_ZERO(64, d)
#endif
#ifndef AV_ZERO128
# define AV_ZERO128(d) \
do { \
AV_ZERO64(d); \
AV_ZERO64((char*)(d)+8); \
} while(0)
#endif
#endif /* AVUTIL_INTREADWRITE_H */

View File

@ -1,62 +0,0 @@
/*
* Lagged Fibonacci PRNG
* Copyright (c) 2008 Michael Niedermayer
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LFG_H
#define AVUTIL_LFG_H
typedef struct AVLFG {
unsigned int state[64];
int index;
} AVLFG;
void av_lfg_init(AVLFG *c, unsigned int seed);
/**
* Get the next random unsigned 32-bit number using an ALFG.
*
* Please also consider a simple LCG like state= state*1664525+1013904223,
* it may be good enough and faster for your specific use case.
*/
static inline unsigned int av_lfg_get(AVLFG *c){
c->state[c->index & 63] = c->state[(c->index-24) & 63] + c->state[(c->index-55) & 63];
return c->state[c->index++ & 63];
}
/**
* Get the next random unsigned 32-bit number using a MLFG.
*
* Please also consider av_lfg_get() above, it is faster.
*/
static inline unsigned int av_mlfg_get(AVLFG *c){
unsigned int a= c->state[(c->index-55) & 63];
unsigned int b= c->state[(c->index-24) & 63];
return c->state[c->index++ & 63] = 2*a*b+a+b;
}
/**
* Get the next two numbers generated by a Box-Muller Gaussian
* generator using the random numbers issued by lfg.
*
* @param out array where the two generated numbers are placed
*/
void av_bmg_get(AVLFG *lfg, double out[2]);
#endif /* AVUTIL_LFG_H */

View File

@ -1,313 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LOG_H
#define AVUTIL_LOG_H
#include <stdarg.h>
#include "avutil.h"
#include "attributes.h"
typedef enum {
AV_CLASS_CATEGORY_NA = 0,
AV_CLASS_CATEGORY_INPUT,
AV_CLASS_CATEGORY_OUTPUT,
AV_CLASS_CATEGORY_MUXER,
AV_CLASS_CATEGORY_DEMUXER,
AV_CLASS_CATEGORY_ENCODER,
AV_CLASS_CATEGORY_DECODER,
AV_CLASS_CATEGORY_FILTER,
AV_CLASS_CATEGORY_BITSTREAM_FILTER,
AV_CLASS_CATEGORY_SWSCALER,
AV_CLASS_CATEGORY_SWRESAMPLER,
AV_CLASS_CATEGORY_NB, ///< not part of ABI/API
}AVClassCategory;
struct AVOptionRanges;
/**
* Describe the class of an AVClass context structure. That is an
* arbitrary struct of which the first field is a pointer to an
* AVClass struct (e.g. AVCodecContext, AVFormatContext etc.).
*/
typedef struct AVClass {
/**
* The name of the class; usually it is the same name as the
* context structure type to which the AVClass is associated.
*/
const char* class_name;
/**
* A pointer to a function which returns the name of a context
* instance ctx associated with the class.
*/
const char* (*item_name)(void* ctx);
/**
* a pointer to the first option specified in the class if any or NULL
*
* @see av_set_default_options()
*/
const struct AVOption *option;
/**
* LIBAVUTIL_VERSION with which this structure was created.
* This is used to allow fields to be added without requiring major
* version bumps everywhere.
*/
int version;
/**
* Offset in the structure where log_level_offset is stored.
* 0 means there is no such variable
*/
int log_level_offset_offset;
/**
* Offset in the structure where a pointer to the parent context for
* logging is stored. For example a decoder could pass its AVCodecContext
* to eval as such a parent context, which an av_log() implementation
* could then leverage to display the parent context.
* The offset can be NULL.
*/
int parent_log_context_offset;
/**
* Return next AVOptions-enabled child or NULL
*/
void* (*child_next)(void *obj, void *prev);
/**
* Return an AVClass corresponding to the next potential
* AVOptions-enabled child.
*
* The difference between child_next and this is that
* child_next iterates over _already existing_ objects, while
* child_class_next iterates over _all possible_ children.
*/
const struct AVClass* (*child_class_next)(const struct AVClass *prev);
/**
* Category used for visualization (like color)
* This is only set if the category is equal for all objects using this class.
* available since version (51 << 16 | 56 << 8 | 100)
*/
AVClassCategory category;
/**
* Callback to return the category.
* available since version (51 << 16 | 59 << 8 | 100)
*/
AVClassCategory (*get_category)(void* ctx);
/**
* Callback to return the supported/allowed ranges.
* available since version (52.12)
*/
int (*query_ranges)(struct AVOptionRanges **, void *obj, const char *key, int flags);
} AVClass;
/**
* @addtogroup lavu_log
*
* @{
*
* @defgroup lavu_log_constants Logging Constants
*
* @{
*/
/**
* Print no output.
*/
#define AV_LOG_QUIET -8
/**
* Something went really wrong and we will crash now.
*/
#define AV_LOG_PANIC 0
/**
* Something went wrong and recovery is not possible.
* For example, no header was found for a format which depends
* on headers or an illegal combination of parameters is used.
*/
#define AV_LOG_FATAL 8
/**
* Something went wrong and cannot losslessly be recovered.
* However, not all future data is affected.
*/
#define AV_LOG_ERROR 16
/**
* Something somehow does not look correct. This may or may not
* lead to problems. An example would be the use of '-vstrict -2'.
*/
#define AV_LOG_WARNING 24
/**
* Standard information.
*/
#define AV_LOG_INFO 32
/**
* Detailed information.
*/
#define AV_LOG_VERBOSE 40
/**
* Stuff which is only useful for libav* developers.
*/
#define AV_LOG_DEBUG 48
#define AV_LOG_MAX_OFFSET (AV_LOG_DEBUG - AV_LOG_QUIET)
/**
* @}
*/
/**
* Send the specified message to the log if the level is less than or equal
* to the current av_log_level. By default, all logging messages are sent to
* stderr. This behavior can be altered by setting a different logging callback
* function.
* @see av_log_set_callback
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message expressed using a @ref
* lavu_log_constants "Logging Constant".
* @param fmt The format string (printf-compatible) that specifies how
* subsequent arguments are converted to output.
*/
void av_log(void *avcl, int level, const char *fmt, ...) av_printf_format(3, 4);
/**
* Send the specified message to the log if the level is less than or equal
* to the current av_log_level. By default, all logging messages are sent to
* stderr. This behavior can be altered by setting a different logging callback
* function.
* @see av_log_set_callback
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message expressed using a @ref
* lavu_log_constants "Logging Constant".
* @param fmt The format string (printf-compatible) that specifies how
* subsequent arguments are converted to output.
* @param vl The arguments referenced by the format string.
*/
void av_vlog(void *avcl, int level, const char *fmt, va_list vl);
/**
* Get the current log level
*
* @see lavu_log_constants
*
* @return Current log level
*/
int av_log_get_level(void);
/**
* Set the log level
*
* @see lavu_log_constants
*
* @param level Logging level
*/
void av_log_set_level(int level);
/**
* Set the logging callback
*
* @note The callback must be thread safe, even if the application does not use
* threads itself as some codecs are multithreaded.
*
* @see av_log_default_callback
*
* @param callback A logging function with a compatible signature.
*/
void av_log_set_callback(void (*callback)(void*, int, const char*, va_list));
/**
* Default logging callback
*
* It prints the message to stderr, optionally colorizing it.
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message expressed using a @ref
* lavu_log_constants "Logging Constant".
* @param fmt The format string (printf-compatible) that specifies how
* subsequent arguments are converted to output.
* @param ap The arguments referenced by the format string.
*/
void av_log_default_callback(void* ptr, int level, const char* fmt, va_list vl);
/**
* Return the context name
*
* @param ctx The AVClass context
*
* @return The AVClass class_name
*/
const char* av_default_item_name(void* ctx);
AVClassCategory av_default_get_category(void *ptr);
/**
* Format a line of log the same way as the default callback.
* @param line buffer to receive the formated line
* @param line_size size of the buffer
* @param print_prefix used to store whether the prefix must be printed;
* must point to a persistent integer initially set to 1
*/
void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl,
char *line, int line_size, int *print_prefix);
/**
* av_dlog macros
* Useful to print debug messages that shouldn't get compiled in normally.
*/
#ifdef DEBUG
# define av_dlog(pctx, ...) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__)
#else
# define av_dlog(pctx, ...) do { if (0) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__); } while (0)
#endif
/**
* Skip repeated messages, this requires the user app to use av_log() instead of
* (f)printf as the 2 would otherwise interfere and lead to
* "Last message repeated x times" messages below (f)printf messages with some
* bad luck.
* Also to receive the last, "last repeated" line if any, the user app must
* call av_log(NULL, AV_LOG_QUIET, "%s", ""); at the end
*/
#define AV_LOG_SKIP_REPEATED 1
void av_log_set_flags(int arg);
/**
* @}
*/
#endif /* AVUTIL_LOG_H */

View File

@ -1,147 +0,0 @@
/*
* copyright (c) 2005-2012 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_MATHEMATICS_H
#define AVUTIL_MATHEMATICS_H
#include <stdint.h>
#include <math.h>
#include "attributes.h"
#include "rational.h"
#include "intfloat.h"
#ifndef M_E
#define M_E 2.7182818284590452354 /* e */
#endif
#ifndef M_LN2
#define M_LN2 0.69314718055994530942 /* log_e 2 */
#endif
#ifndef M_LN10
#define M_LN10 2.30258509299404568402 /* log_e 10 */
#endif
#ifndef M_LOG2_10
#define M_LOG2_10 3.32192809488736234787 /* log_2 10 */
#endif
#ifndef M_PHI
#define M_PHI 1.61803398874989484820 /* phi / golden ratio */
#endif
#ifndef M_PI
#define M_PI 3.14159265358979323846 /* pi */
#endif
#ifndef M_SQRT1_2
#define M_SQRT1_2 0.70710678118654752440 /* 1/sqrt(2) */
#endif
#ifndef M_SQRT2
#define M_SQRT2 1.41421356237309504880 /* sqrt(2) */
#endif
#ifndef NAN
#define NAN av_int2float(0x7fc00000)
#endif
#ifndef INFINITY
#define INFINITY av_int2float(0x7f800000)
#endif
/**
* @addtogroup lavu_math
* @{
*/
enum AVRounding {
AV_ROUND_ZERO = 0, ///< Round toward zero.
AV_ROUND_INF = 1, ///< Round away from zero.
AV_ROUND_DOWN = 2, ///< Round toward -infinity.
AV_ROUND_UP = 3, ///< Round toward +infinity.
AV_ROUND_NEAR_INF = 5, ///< Round to nearest and halfway cases away from zero.
AV_ROUND_PASS_MINMAX = 8192, ///< Flag to pass INT64_MIN/MAX through instead of rescaling, this avoids special cases for AV_NOPTS_VALUE
};
/**
* Return the greatest common divisor of a and b.
* If both a and b are 0 or either or both are <0 then behavior is
* undefined.
*/
int64_t av_const av_gcd(int64_t a, int64_t b);
/**
* Rescale a 64-bit integer with rounding to nearest.
* A simple a*b/c isn't possible as it can overflow.
*/
int64_t av_rescale(int64_t a, int64_t b, int64_t c) av_const;
/**
* Rescale a 64-bit integer with specified rounding.
* A simple a*b/c isn't possible as it can overflow.
*
* @return rescaled value a, or if AV_ROUND_PASS_MINMAX is set and a is
* INT64_MIN or INT64_MAX then a is passed through unchanged.
*/
int64_t av_rescale_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding) av_const;
/**
* Rescale a 64-bit integer by 2 rational numbers.
*/
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq) av_const;
/**
* Rescale a 64-bit integer by 2 rational numbers with specified rounding.
*
* @return rescaled value a, or if AV_ROUND_PASS_MINMAX is set and a is
* INT64_MIN or INT64_MAX then a is passed through unchanged.
*/
int64_t av_rescale_q_rnd(int64_t a, AVRational bq, AVRational cq,
enum AVRounding) av_const;
/**
* Compare 2 timestamps each in its own timebases.
* The result of the function is undefined if one of the timestamps
* is outside the int64_t range when represented in the others timebase.
* @return -1 if ts_a is before ts_b, 1 if ts_a is after ts_b or 0 if they represent the same position
*/
int av_compare_ts(int64_t ts_a, AVRational tb_a, int64_t ts_b, AVRational tb_b);
/**
* Compare 2 integers modulo mod.
* That is we compare integers a and b for which only the least
* significant log2(mod) bits are known.
*
* @param mod must be a power of 2
* @return a negative value if a is smaller than b
* a positive value if a is greater than b
* 0 if a equals b
*/
int64_t av_compare_mod(uint64_t a, uint64_t b, uint64_t mod);
/**
* Rescale a timestamp while preserving known durations.
*
* @param in_ts Input timestamp
* @param in_tb Input timesbase
* @param fs_tb Duration and *last timebase
* @param duration duration till the next call
* @param out_tb Output timesbase
*/
int64_t av_rescale_delta(AVRational in_tb, int64_t in_ts, AVRational fs_tb, int duration, int64_t *last, AVRational out_tb);
/**
* @}
*/
#endif /* AVUTIL_MATHEMATICS_H */

View File

@ -1,81 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_MD5_H
#define AVUTIL_MD5_H
#include <stdint.h>
#include "attributes.h"
#include "version.h"
/**
* @defgroup lavu_md5 MD5
* @ingroup lavu_crypto
* @{
*/
extern const int av_md5_size;
struct AVMD5;
/**
* Allocate an AVMD5 context.
*/
struct AVMD5 *av_md5_alloc(void);
/**
* Initialize MD5 hashing.
*
* @param ctx pointer to the function context (of size av_md5_size)
*/
void av_md5_init(struct AVMD5 *ctx);
/**
* Update hash value.
*
* @param ctx hash function context
* @param src input data to update hash with
* @param len input data length
*/
void av_md5_update(struct AVMD5 *ctx, const uint8_t *src, int len);
/**
* Finish hashing and output digest value.
*
* @param ctx hash function context
* @param dst buffer where output digest value is stored
*/
void av_md5_final(struct AVMD5 *ctx, uint8_t *dst);
/**
* Hash an array of data.
*
* @param dst The output buffer to write the digest into
* @param src The data to hash
* @param len The length of the data, in bytes
*/
void av_md5_sum(uint8_t *dst, const uint8_t *src, const int len);
/**
* @}
*/
#endif /* AVUTIL_MD5_H */

View File

@ -1,342 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* memory handling functions
*/
#ifndef AVUTIL_MEM_H
#define AVUTIL_MEM_H
#include <limits.h>
#include <stdint.h>
#include "attributes.h"
#include "error.h"
#include "avutil.h"
/**
* @addtogroup lavu_mem
* @{
*/
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1110 || defined(__SUNPRO_C)
#define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
#define DECLARE_ASM_CONST(n,t,v) const t __attribute__ ((aligned (n))) v
#elif defined(__TI_COMPILER_VERSION__)
#define DECLARE_ALIGNED(n,t,v) \
AV_PRAGMA(DATA_ALIGN(v,n)) \
t __attribute__((aligned(n))) v
#define DECLARE_ASM_CONST(n,t,v) \
AV_PRAGMA(DATA_ALIGN(v,n)) \
static const t __attribute__((aligned(n))) v
#elif defined(__GNUC__)
#define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
#define DECLARE_ASM_CONST(n,t,v) static const t av_used __attribute__ ((aligned (n))) v
#elif defined(_MSC_VER)
#define DECLARE_ALIGNED(n,t,v) __declspec(align(n)) t v
#define DECLARE_ASM_CONST(n,t,v) __declspec(align(n)) static const t v
#else
#define DECLARE_ALIGNED(n,t,v) t v
#define DECLARE_ASM_CONST(n,t,v) static const t v
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
#define av_malloc_attrib __attribute__((__malloc__))
#else
#define av_malloc_attrib
#endif
#if AV_GCC_VERSION_AT_LEAST(4,3)
#define av_alloc_size(...) __attribute__((alloc_size(__VA_ARGS__)))
#else
#define av_alloc_size(...)
#endif
/**
* Allocate a block of size bytes with alignment suitable for all
* memory accesses (including vectors if available on the CPU).
* @param size Size in bytes for the memory block to be allocated.
* @return Pointer to the allocated block, NULL if the block cannot
* be allocated.
* @see av_mallocz()
*/
void *av_malloc(size_t size) av_malloc_attrib av_alloc_size(1);
/**
* Allocate a block of size * nmemb bytes with av_malloc().
* @param nmemb Number of elements
* @param size Size of the single element
* @return Pointer to the allocated block, NULL if the block cannot
* be allocated.
* @see av_malloc()
*/
av_alloc_size(1, 2) static inline void *av_malloc_array(size_t nmemb, size_t size)
{
if (!size || nmemb >= INT_MAX / size)
return NULL;
return av_malloc(nmemb * size);
}
/**
* Allocate or reallocate a block of memory.
* If ptr is NULL and size > 0, allocate a new block. If
* size is zero, free the memory block pointed to by ptr.
* @param ptr Pointer to a memory block already allocated with
* av_realloc() or NULL.
* @param size Size in bytes of the memory block to be allocated or
* reallocated.
* @return Pointer to a newly-reallocated block or NULL if the block
* cannot be reallocated or the function is used to free the memory block.
* @warning Pointers originating from the av_malloc() family of functions must
* not be passed to av_realloc(). The former can be implemented using
* memalign() (or other functions), and there is no guarantee that
* pointers from such functions can be passed to realloc() at all.
* The situation is undefined according to POSIX and may crash with
* some libc implementations.
* @see av_fast_realloc()
*/
void *av_realloc(void *ptr, size_t size) av_alloc_size(2);
/**
* Allocate or reallocate a block of memory.
* This function does the same thing as av_realloc, except:
* - It takes two arguments and checks the result of the multiplication for
* integer overflow.
* - It frees the input block in case of failure, thus avoiding the memory
* leak with the classic "buf = realloc(buf); if (!buf) return -1;".
*/
void *av_realloc_f(void *ptr, size_t nelem, size_t elsize);
/**
* Allocate or reallocate a block of memory.
* If *ptr is NULL and size > 0, allocate a new block. If
* size is zero, free the memory block pointed to by ptr.
* @param ptr Pointer to a pointer to a memory block already allocated
* with av_realloc(), or pointer to a pointer to NULL.
* The pointer is updated on success, or freed on failure.
* @param size Size in bytes for the memory block to be allocated or
* reallocated
* @return Zero on success, an AVERROR error code on failure.
* @warning Pointers originating from the av_malloc() family of functions must
* not be passed to av_reallocp(). The former can be implemented using
* memalign() (or other functions), and there is no guarantee that
* pointers from such functions can be passed to realloc() at all.
* The situation is undefined according to POSIX and may crash with
* some libc implementations.
*/
int av_reallocp(void *ptr, size_t size);
/**
* Allocate or reallocate an array.
* If ptr is NULL and nmemb > 0, allocate a new block. If
* nmemb is zero, free the memory block pointed to by ptr.
* @param ptr Pointer to a memory block already allocated with
* av_realloc() or NULL.
* @param nmemb Number of elements
* @param size Size of the single element
* @return Pointer to a newly-reallocated block or NULL if the block
* cannot be reallocated or the function is used to free the memory block.
* @warning Pointers originating from the av_malloc() family of functions must
* not be passed to av_realloc(). The former can be implemented using
* memalign() (or other functions), and there is no guarantee that
* pointers from such functions can be passed to realloc() at all.
* The situation is undefined according to POSIX and may crash with
* some libc implementations.
*/
av_alloc_size(2, 3) void *av_realloc_array(void *ptr, size_t nmemb, size_t size);
/**
* Allocate or reallocate an array through a pointer to a pointer.
* If *ptr is NULL and nmemb > 0, allocate a new block. If
* nmemb is zero, free the memory block pointed to by ptr.
* @param ptr Pointer to a pointer to a memory block already allocated
* with av_realloc(), or pointer to a pointer to NULL.
* The pointer is updated on success, or freed on failure.
* @param nmemb Number of elements
* @param size Size of the single element
* @return Zero on success, an AVERROR error code on failure.
* @warning Pointers originating from the av_malloc() family of functions must
* not be passed to av_realloc(). The former can be implemented using
* memalign() (or other functions), and there is no guarantee that
* pointers from such functions can be passed to realloc() at all.
* The situation is undefined according to POSIX and may crash with
* some libc implementations.
*/
av_alloc_size(2, 3) int av_reallocp_array(void *ptr, size_t nmemb, size_t size);
/**
* Free a memory block which has been allocated with av_malloc(z)() or
* av_realloc().
* @param ptr Pointer to the memory block which should be freed.
* @note ptr = NULL is explicitly allowed.
* @note It is recommended that you use av_freep() instead.
* @see av_freep()
*/
void av_free(void *ptr);
/**
* Allocate a block of size bytes with alignment suitable for all
* memory accesses (including vectors if available on the CPU) and
* zero all the bytes of the block.
* @param size Size in bytes for the memory block to be allocated.
* @return Pointer to the allocated block, NULL if it cannot be allocated.
* @see av_malloc()
*/
void *av_mallocz(size_t size) av_malloc_attrib av_alloc_size(1);
/**
* Allocate a block of nmemb * size bytes with alignment suitable for all
* memory accesses (including vectors if available on the CPU) and
* zero all the bytes of the block.
* The allocation will fail if nmemb * size is greater than or equal
* to INT_MAX.
* @param nmemb
* @param size
* @return Pointer to the allocated block, NULL if it cannot be allocated.
*/
void *av_calloc(size_t nmemb, size_t size) av_malloc_attrib;
/**
* Allocate a block of size * nmemb bytes with av_mallocz().
* @param nmemb Number of elements
* @param size Size of the single element
* @return Pointer to the allocated block, NULL if the block cannot
* be allocated.
* @see av_mallocz()
* @see av_malloc_array()
*/
av_alloc_size(1, 2) static inline void *av_mallocz_array(size_t nmemb, size_t size)
{
if (!size || nmemb >= INT_MAX / size)
return NULL;
return av_mallocz(nmemb * size);
}
/**
* Duplicate the string s.
* @param s string to be duplicated
* @return Pointer to a newly-allocated string containing a
* copy of s or NULL if the string cannot be allocated.
*/
char *av_strdup(const char *s) av_malloc_attrib;
/**
* Duplicate the buffer p.
* @param p buffer to be duplicated
* @return Pointer to a newly allocated buffer containing a
* copy of p or NULL if the buffer cannot be allocated.
*/
void *av_memdup(const void *p, size_t size);
/**
* Free a memory block which has been allocated with av_malloc(z)() or
* av_realloc() and set the pointer pointing to it to NULL.
* @param ptr Pointer to the pointer to the memory block which should
* be freed.
* @see av_free()
*/
void av_freep(void *ptr);
/**
* Add an element to a dynamic array.
*
* The array to grow is supposed to be an array of pointers to
* structures, and the element to add must be a pointer to an already
* allocated structure.
*
* The array is reallocated when its size reaches powers of 2.
* Therefore, the amortized cost of adding an element is constant.
*
* In case of success, the pointer to the array is updated in order to
* point to the new grown array, and the number pointed to by nb_ptr
* is incremented.
* In case of failure, the array is freed, *tab_ptr is set to NULL and
* *nb_ptr is set to 0.
*
* @param tab_ptr pointer to the array to grow
* @param nb_ptr pointer to the number of elements in the array
* @param elem element to add
* @see av_dynarray2_add()
*/
void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem);
/**
* Add an element of size elem_size to a dynamic array.
*
* The array is reallocated when its number of elements reaches powers of 2.
* Therefore, the amortized cost of adding an element is constant.
*
* In case of success, the pointer to the array is updated in order to
* point to the new grown array, and the number pointed to by nb_ptr
* is incremented.
* In case of failure, the array is freed, *tab_ptr is set to NULL and
* *nb_ptr is set to 0.
*
* @param tab_ptr pointer to the array to grow
* @param nb_ptr pointer to the number of elements in the array
* @param elem_size size in bytes of the elements in the array
* @param elem_data pointer to the data of the element to add. If NULL, the space of
* the new added element is not filled.
* @return pointer to the data of the element to copy in the new allocated space.
* If NULL, the new allocated space is left uninitialized."
* @see av_dynarray_add()
*/
void *av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size,
const uint8_t *elem_data);
/**
* Multiply two size_t values checking for overflow.
* @return 0 if success, AVERROR(EINVAL) if overflow.
*/
static inline int av_size_mult(size_t a, size_t b, size_t *r)
{
size_t t = a * b;
/* Hack inspired from glibc: only try the division if nelem and elsize
* are both greater than sqrt(SIZE_MAX). */
if ((a | b) >= ((size_t)1 << (sizeof(size_t) * 4)) && a && t / a != b)
return AVERROR(EINVAL);
*r = t;
return 0;
}
/**
* Set the maximum size that may me allocated in one block.
*/
void av_max_alloc(size_t max);
/**
* deliberately overlapping memcpy implementation
* @param dst destination buffer
* @param back how many bytes back we start (the initial size of the overlapping window), must be > 0
* @param cnt number of bytes to copy, must be >= 0
*
* cnt > back is valid, this will copy the bytes we just copied,
* thus creating a repeating pattern with a period length of back.
*/
void av_memcpy_backptr(uint8_t *dst, int back, int cnt);
/**
* @}
*/
#endif /* AVUTIL_MEM_H */

View File

@ -1,32 +0,0 @@
/*
* Copyright (C) 2013 Reimar Döffinger <Reimar.Doeffinger@gmx.de>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_MURMUR3_H
#define AVUTIL_MURMUR3_H
#include <stdint.h>
struct AVMurMur3 *av_murmur3_alloc(void);
void av_murmur3_init_seeded(struct AVMurMur3 *c, uint64_t seed);
void av_murmur3_init(struct AVMurMur3 *c);
void av_murmur3_update(struct AVMurMur3 *c, const uint8_t *src, int len);
void av_murmur3_final(struct AVMurMur3 *c, uint8_t dst[16]);
#endif /* AVUTIL_MURMUR3_H */

Some files were not shown because too many files have changed in this diff Show More