Merge branch 'master' of github.com:limelight-stream/limelight-ios

# By Cameron Gutman
# Via Cameron Gutman
* 'master' of github.com:limelight-stream/limelight-ios:
  Improve touch input support
  Add some WIP touch input support
  Implement working audio support
  Video fully works now
This commit is contained in:
Diego Waxemberg 2014-10-20 02:59:50 -04:00
commit 42773241c1
8 changed files with 250 additions and 65 deletions

View File

@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
984C441819F48D1D0061A500 /* StreamView.m in Sources */ = {isa = PBXBuildFile; fileRef = 984C441719F48D1D0061A500 /* StreamView.m */; };
98A03B4D19F352EB00861ACA /* liblimelight-common.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 98A03B4A19F3514B00861ACA /* liblimelight-common.a */; };
98A03B5019F3598400861ACA /* VideoDecoderRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = 98A03B4F19F3598400861ACA /* VideoDecoderRenderer.m */; };
98A03B5119F35AAC00861ACA /* libcrypto.a in Frameworks */ = {isa = PBXBuildFile; fileRef = FBCC0E9819EF9703009729EB /* libcrypto.a */; };
@ -77,6 +78,8 @@
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
984C441619F48D1D0061A500 /* StreamView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = StreamView.h; sourceTree = "<group>"; };
984C441719F48D1D0061A500 /* StreamView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamView.m; sourceTree = "<group>"; };
98A03B4519F3514B00861ACA /* limelight-common.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = "limelight-common.xcodeproj"; path = "limelight-common-c/limelight-common.xcodeproj"; sourceTree = "<group>"; };
98A03B4E19F3598400861ACA /* VideoDecoderRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = VideoDecoderRenderer.h; path = Limelight/VideoDecoderRenderer.h; sourceTree = SOURCE_ROOT; };
98A03B4F19F3598400861ACA /* VideoDecoderRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = VideoDecoderRenderer.m; path = Limelight/VideoDecoderRenderer.m; sourceTree = SOURCE_ROOT; };
@ -322,6 +325,8 @@
FBCC0E9C19F00659009729EB /* mkcert.c */,
FBC8622B19F0BEFB0087327B /* HttpManager.h */,
FBC8622C19F0BEFB0087327B /* HttpManager.m */,
984C441619F48D1D0061A500 /* StreamView.h */,
984C441719F48D1D0061A500 /* StreamView.m */,
);
path = Limelight;
sourceTree = "<group>";
@ -687,6 +692,7 @@
FB290D3A19B2C6E3004C83CF /* StreamFrameViewController.m in Sources */,
FB290D0019B2C406004C83CF /* main.m in Sources */,
FB290D3919B2C6E3004C83CF /* MainFrameViewController.m in Sources */,
984C441819F48D1D0061A500 /* StreamView.m in Sources */,
FB290D3719B2C6E3004C83CF /* Connection.m in Sources */,
FBCC0E9D19F00659009729EB /* mkcert.c in Sources */,
FB290D3819B2C6E3004C83CF /* ConnectionHandler.m in Sources */,

View File

@ -28,10 +28,17 @@ static OpusDecoder *opusDecoder;
#define PCM_BUFFER_SIZE 1024
#define OUTPUT_BUS 0
static short* decodedPcmBuffer;
static int filledPcmBuffer;
struct AUDIO_BUFFER_QUEUE_ENTRY {
struct AUDIO_BUFFER_QUEUE_ENTRY *next;
int length;
int offset;
char data[0];
};
static short decodedPcmBuffer[512];
static NSLock *audioLock;
static struct AUDIO_BUFFER_QUEUE_ENTRY *audioBufferQueue;
static AudioComponentInstance audioUnit;
static bool started = false;
static VideoDecoderRenderer* renderer;
void DrSetup(int width, int height, int fps, void* context, int drFlags)
@ -81,18 +88,13 @@ void ArInit(void)
opusDecoder = opus_decoder_create(48000, 2, &err);
decodedPcmBuffer = malloc(PCM_BUFFER_SIZE);
audioLock = [[NSLock alloc] init];
}
void ArRelease(void)
{
printf("Release audio\n");
if (decodedPcmBuffer != NULL) {
free(decodedPcmBuffer);
decodedPcmBuffer = NULL;
}
if (opusDecoder != NULL) {
opus_decoder_destroy(opusDecoder);
opusDecoder = NULL;
@ -102,6 +104,7 @@ void ArRelease(void)
void ArStart(void)
{
printf("Start audio\n");
AudioOutputUnitStart(audioUnit);
}
void ArStop(void)
@ -111,17 +114,31 @@ void ArStop(void)
void ArDecodeAndPlaySample(char* sampleData, int sampleLength)
{
if (!started) {
AudioOutputUnitStart(audioUnit);
started = true;
}
filledPcmBuffer = opus_decode(opusDecoder, (unsigned char*)sampleData, sampleLength, decodedPcmBuffer, PCM_BUFFER_SIZE / 2, 0);
if (filledPcmBuffer > 0) {
int decodedLength = opus_decode(opusDecoder, (unsigned char*)sampleData, sampleLength, decodedPcmBuffer, PCM_BUFFER_SIZE / 2, 0);
if (decodedLength > 0) {
// Return of opus_decode is samples per channel
filledPcmBuffer *= 4;
decodedLength *= 4;
NSLog(@"pcmBuffer: %d", filledPcmBuffer);
struct AUDIO_BUFFER_QUEUE_ENTRY *newEntry = malloc(sizeof(*newEntry) + decodedLength);
if (newEntry != NULL) {
newEntry->next = NULL;
newEntry->length = decodedLength;
newEntry->offset = 0;
memcpy(newEntry->data, decodedPcmBuffer, decodedLength);
[audioLock lock];
if (audioBufferQueue == NULL) {
audioBufferQueue = newEntry;
}
else {
struct AUDIO_BUFFER_QUEUE_ENTRY *lastEntry = audioBufferQueue;
while (lastEntry->next != NULL) {
lastEntry = lastEntry->next;
}
lastEntry->next = newEntry;
}
[audioLock unlock];
}
}
}
@ -193,15 +210,15 @@ void ClDisplayTransientMessage(char* message)
clCallbacks.displayMessage = ClDisplayMessage;
clCallbacks.displayTransientMessage = ClDisplayTransientMessage;
//////// Don't think any of this is used /////////
// Configure the audio session for our app
NSError *audioSessionError = nil;
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
[audioSession setPreferredSampleRate:48000.0 error:&audioSessionError];
[audioSession setPreferredSampleRate:48000.0 error:&audioSessionError];
[audioSession setCategory: AVAudioSessionCategoryPlayAndRecord error: &audioSessionError];
[audioSession setPreferredOutputNumberOfChannels:2 error:&audioSessionError];
[audioSession setPreferredIOBufferDuration:0.005 error:&audioSessionError];
[audioSession setActive: YES error: &audioSessionError];
//////////////////////////////////////////////////
OSStatus status;
@ -218,26 +235,25 @@ void ClDisplayTransientMessage(char* message)
NSLog(@"Unable to instantiate new AudioComponent: %d", (int32_t)status);
}
AudioStreamBasicDescription audioFormat = {0};
audioFormat.mSampleRate = 48000;
audioFormat.mBitsPerChannel = 16;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger;
audioFormat.mFramesPerPacket = 1;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
audioFormat.mChannelsPerFrame = 2;
audioFormat.mBytesPerFrame = 960;
audioFormat.mBytesPerPacket = 960;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * (audioFormat.mBitsPerChannel / 8);
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
audioFormat.mFramesPerPacket = audioFormat.mBytesPerPacket / audioFormat.mBytesPerFrame;
audioFormat.mReserved = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kAudioUnitScope_Input,
OUTPUT_BUS,
&audioFormat,
sizeof(audioFormat));
if (status) {
NSLog(@"Unable to set audio unit to output: %d", (int32_t)status);
NSLog(@"Unable to set audio unit to input: %d", (int32_t)status);
}
AURenderCallbackStruct callbackStruct = {0};
@ -246,7 +262,7 @@ void ClDisplayTransientMessage(char* message)
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
kAudioUnitScope_Input,
OUTPUT_BUS,
&callbackStruct,
sizeof(callbackStruct));
@ -272,15 +288,60 @@ static OSStatus playbackCallback(void *inRefCon,
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
NSLog(@"Playback callback");
for (int i = 0; i < ioData->mNumberBuffers; i++) {
ioData->mBuffers[i].mNumberChannels = 2;
int min = MIN(ioData->mBuffers[i].mDataByteSize, filledPcmBuffer);
NSLog(@"Min: %d", min);
memcpy(ioData->mBuffers[i].mData, decodedPcmBuffer, min);
ioData->mBuffers[i].mDataByteSize = min;
filledPcmBuffer -= min;
if (ioData->mBuffers[i].mDataByteSize != 0) {
int thisBufferOffset = 0;
FillBufferAgain:
// Make sure there's data to write
if (ioData->mBuffers[i].mDataByteSize - thisBufferOffset == 0) {
continue;
}
// Wait for a buffer to be available
// FIXME: This needs optimization to avoid busy waiting for buffers
struct AUDIO_BUFFER_QUEUE_ENTRY *audioEntry = NULL;
while (audioEntry == NULL)
{
[audioLock lock];
if (audioBufferQueue != NULL) {
// Dequeue this entry temporarily
audioEntry = audioBufferQueue;
audioBufferQueue = audioBufferQueue->next;
}
[audioLock unlock];
}
// Figure out how much data we can write
int min = MIN(ioData->mBuffers[i].mDataByteSize - thisBufferOffset, audioEntry->length);
// Copy data to the audio buffer
memcpy(&ioData->mBuffers[i].mData[thisBufferOffset], &audioEntry->data[audioEntry->offset], min);
thisBufferOffset += min;
if (min < audioEntry->length) {
// This entry still has unused data
audioEntry->length -= min;
audioEntry->offset += min;
// Requeue the entry
[audioLock lock];
audioEntry->next = audioBufferQueue;
audioBufferQueue = audioEntry;
[audioLock unlock];
}
else {
// This entry is fully depleted so free it
free(audioEntry);
// Try to grab another sample to fill this buffer with
goto FillBufferAgain;
}
ioData->mBuffers[i].mDataByteSize = thisBufferOffset;
}
}
return noErr;

13
Limelight/StreamView.h Normal file
View File

@ -0,0 +1,13 @@
//
// StreamView.h
// Limelight
//
// Created by Cameron Gutman on 10/19/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface StreamView : UIView
@end

59
Limelight/StreamView.m Normal file
View File

@ -0,0 +1,59 @@
//
// StreamView.m
// Limelight
//
// Created by Cameron Gutman on 10/19/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import "StreamView.h"
#include <Limelight.h>
@implementation StreamView {
CGPoint touchLocation;
BOOL touchMoved;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
touchLocation = [touch locationInView:self];
touchMoved = false;
NSLog(@"Touch down");
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint currentLocation = [touch locationInView:self];
if (touchLocation.x != currentLocation.x ||
touchLocation.y != currentLocation.y)
{
LiSendMouseMoveEvent(touchLocation.x - currentLocation.x,
touchLocation.y - currentLocation.y);
touchMoved = true;
touchLocation = currentLocation;
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
NSLog(@"Touch up");
if (!touchMoved) {
NSLog(@"Sending left mouse button press");
LiSendMouseButtonEvent(BUTTON_ACTION_PRESS, BUTTON_LEFT);
// Wait 100 ms to simulate a real button press
usleep(100 * 1000);
LiSendMouseButtonEvent(BUTTON_ACTION_RELEASE, BUTTON_LEFT);
}
}
- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event {
}
@end

View File

@ -14,6 +14,8 @@
- (id)initWithView:(UIView*)view;
- (size_t)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength;
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length;
@end

View File

@ -25,7 +25,7 @@
displayLayer.bounds = view.bounds;
displayLayer.backgroundColor = [UIColor greenColor].CGColor;
displayLayer.position = CGPointMake(CGRectGetMidX(view.bounds), CGRectGetMidY(view.bounds));
displayLayer.videoGravity = AVLayerVideoGravityResize;
displayLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[view.layer addSublayer:displayLayer];
// We need some parameter sets before we can properly start decoding frames
@ -36,31 +36,66 @@
return self;
}
#define ES_START_PREFIX_SIZE 4
#define FRAME_START_PREFIX_SIZE 4
#define NALU_START_PREFIX_SIZE 3
- (size_t)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength
{
OSStatus status;
size_t oldOffset = CMBlockBufferGetDataLength(existingBuffer);
status = CMBlockBufferAppendMemoryBlock(existingBuffer, NULL,
((4 + nalLength) - NALU_START_PREFIX_SIZE),
kCFAllocatorDefault, NULL, 0,
((4 + nalLength) - NALU_START_PREFIX_SIZE), 0);
if (status != noErr) {
NSLog(@"CMBlockBufferAppendMemoryBlock failed: %d", (int)status);
return 0;
}
int dataLength = nalLength - NALU_START_PREFIX_SIZE;
const uint8_t lengthBytes[] = {(uint8_t)(dataLength >> 24), (uint8_t)(dataLength >> 16),
(uint8_t)(dataLength >> 8), (uint8_t)dataLength};
status = CMBlockBufferReplaceDataBytes(lengthBytes, existingBuffer,
oldOffset, 4);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return 0;
}
status = CMBlockBufferReplaceDataBytes(&data[offset+NALU_START_PREFIX_SIZE], existingBuffer,
oldOffset + 4, dataLength);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return 0;
}
return 4 + dataLength;
}
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length
{
unsigned char nalType = data[ES_START_PREFIX_SIZE] & 0x1F;
unsigned char nalType = data[FRAME_START_PREFIX_SIZE] & 0x1F;
OSStatus status;
if (formatDesc == NULL && (nalType == 0x7 || nalType == 0x8)) {
if (waitingForSps && nalType == 0x7) {
NSLog(@"Got SPS");
spsData = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
spsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForSps = false;
}
// Nvidia's stream has 2 PPS NALUs so we'll wait for both of them
else if ((waitingForPpsA || waitingForPpsB) && nalType == 0x8) {
// Read the NALU's PPS index to figure out which PPS this is
printf("PPS BYTE: %02x", data[ES_START_PREFIX_SIZE + 1]);
if (waitingForPpsA) {
NSLog(@"Got PPS 1");
ppsDataA = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
ppsDataA = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForPpsA = false;
ppsDataAFirstByte = data[ES_START_PREFIX_SIZE + 1];
ppsDataAFirstByte = data[FRAME_START_PREFIX_SIZE + 1];
}
else if (data[ES_START_PREFIX_SIZE + 1] != ppsDataAFirstByte) {
else if (data[FRAME_START_PREFIX_SIZE + 1] != ppsDataAFirstByte) {
NSLog(@"Got PPS 2");
ppsDataA = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
ppsDataA = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForPpsB = false;
}
}
@ -100,29 +135,39 @@
// Now we're decoding actual frame data here
CMBlockBufferRef blockBuffer;
status = CMBlockBufferCreateWithMemoryBlock(NULL, data, length, kCFAllocatorNull, NULL, 0, length, 0, &blockBuffer);
status = CMBlockBufferCreateEmpty(NULL, 0, 0, &blockBuffer);
if (status != noErr) {
NSLog(@"CMBlockBufferCreateWithMemoryBlock failed: %d", (int)status);
NSLog(@"CMBlockBufferCreateEmpty failed: %d", (int)status);
return;
}
// Compute the new length prefix to replace the 00 00 00 01
int dataLength = length - ES_START_PREFIX_SIZE;
const uint8_t lengthBytes[] = {(uint8_t)(dataLength >> 24), (uint8_t)(dataLength >> 16),
(uint8_t)(dataLength >> 8), (uint8_t)dataLength};
status = CMBlockBufferReplaceDataBytes(lengthBytes, blockBuffer, 0, 4);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return;
int lastOffset = -1;
for (int i = 0; i < length - FRAME_START_PREFIX_SIZE; i++) {
// Search for a NALU
if (data[i] == 0 && data[i+1] == 0 && data[i+2] == 1) {
// It's the start of a new NALU
if (lastOffset != -1) {
// We've seen a start before this so enqueue that NALU
[self updateBufferForRange:blockBuffer data:data offset:lastOffset length:i - lastOffset];
}
lastOffset = i;
}
}
if (lastOffset != -1) {
// Enqueue the remaining data
[self updateBufferForRange:blockBuffer data:data offset:lastOffset length:length - lastOffset];
}
CMSampleBufferRef sampleBuffer;
const size_t sampleSizeArray[] = {length};
status = CMSampleBufferCreate(kCFAllocatorDefault,
blockBuffer, true, NULL,
blockBuffer,
true, NULL,
NULL, formatDesc, 1, 0,
NULL, 1, sampleSizeArray,
NULL, 0, NULL,
&sampleBuffer);
if (status != noErr) {
NSLog(@"CMSampleBufferCreate failed: %d", (int)status);

View File

@ -1,7 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6245" systemVersion="14A386a" targetRuntime="iOS.CocoaTouch.iPad" propertyAccessControl="none" useAutolayout="YES" initialViewController="wb7-af-jn8">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6245" systemVersion="14A389" targetRuntime="iOS.CocoaTouch.iPad" propertyAccessControl="none" useAutolayout="YES" initialViewController="wb7-af-jn8">
<dependencies>
<deployment defaultVersion="1808" identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6238"/>
</dependencies>
<scenes>
@ -73,7 +72,7 @@
<viewControllerLayoutGuide type="top" id="NG3-N1-D4k"/>
<viewControllerLayoutGuide type="bottom" id="3MH-n6-BSR"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="VPm-Ae-rc4" userLabel="RenderView">
<view key="view" contentMode="scaleToFill" id="VPm-Ae-rc4" userLabel="RenderView" customClass="StreamView">
<rect key="frame" x="0.0" y="0.0" width="1024" height="768"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<color key="backgroundColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6245" systemVersion="14A386a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="dgh-JZ-Q7z">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6245" systemVersion="14A389" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="dgh-JZ-Q7z">
<dependencies>
<deployment defaultVersion="1808" identifier="iOS"/>
<deployment defaultVersion="2048" identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6238"/>
</dependencies>
<scenes>
@ -88,7 +88,7 @@
<viewControllerLayoutGuide type="top" id="DRq-YB-9Rh"/>
<viewControllerLayoutGuide type="bottom" id="KH1-hM-RYW"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="eir-e9-IPE">
<view key="view" contentMode="scaleToFill" id="eir-e9-IPE" customClass="StreamView">
<rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<color key="backgroundColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>