Merge branch 'master' of github.com:limelight-stream/limelight-ios

This commit is contained in:
Cameron Gutman
2014-10-21 04:19:13 -04:00
35 changed files with 150 additions and 135 deletions

View File

@@ -0,0 +1,25 @@
//
// Connection.h
// Limelight-iOS
//
// Created by Diego Waxemberg on 1/19/14.
// Copyright (c) 2014 Diego Waxemberg. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "VideoDecoderRenderer.h"
#import "StreamConfiguration.h"
@protocol ConTermCallback <NSObject>
- (void) connectionTerminated;
@end
@interface Connection : NSOperation <NSStreamDelegate>
-(id) initWithConfig:(StreamConfiguration*)config renderer:(VideoDecoderRenderer*)myRenderer connectionTerminatedCallback:(id<ConTermCallback>)callback;
-(void) terminate;
-(void) main;
@end

View File

@@ -0,0 +1,417 @@
//
// Connection.m
// Limelight-iOS
//
// Created by Diego Waxemberg on 1/19/14.
// Copyright (c) 2014 Diego Waxemberg. All rights reserved.
//
#import "Connection.h"
#import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h>
#include "Limelight.h"
#include "opus.h"
@implementation Connection {
IP_ADDRESS _host;
STREAM_CONFIGURATION _streamConfig;
CONNECTION_LISTENER_CALLBACKS _clCallbacks;
DECODER_RENDERER_CALLBACKS _drCallbacks;
AUDIO_RENDERER_CALLBACKS _arCallbacks;
}
static OpusDecoder *opusDecoder;
static id<ConTermCallback> _callback;
#define PCM_BUFFER_SIZE 1024
#define OUTPUT_BUS 0
struct AUDIO_BUFFER_QUEUE_ENTRY {
struct AUDIO_BUFFER_QUEUE_ENTRY *next;
int length;
int offset;
char data[0];
};
#define MAX_QUEUE_ENTRIES 10
static short decodedPcmBuffer[512];
static NSLock *audioLock;
static struct AUDIO_BUFFER_QUEUE_ENTRY *audioBufferQueue;
static int audioBufferQueueLength;
static AudioComponentInstance audioUnit;
static VideoDecoderRenderer* renderer;
void DrSetup(int width, int height, int fps, void* context, int drFlags)
{
}
void DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit)
{
unsigned char* data = (unsigned char*) malloc(decodeUnit->fullLength);
if (data != NULL) {
int offset = 0;
PLENTRY entry = decodeUnit->bufferList;
while (entry != NULL) {
memcpy(&data[offset], entry->data, entry->length);
offset += entry->length;
entry = entry->next;
}
// This function will take our buffer
[renderer submitDecodeBuffer:data length:decodeUnit->fullLength];
}
}
void DrStart(void)
{
}
void DrStop(void)
{
}
void DrRelease(void)
{
}
void ArInit(void)
{
int err;
opusDecoder = opus_decoder_create(48000, 2, &err);
audioLock = [[NSLock alloc] init];
// Configure the audio session for our app
NSError *audioSessionError = nil;
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
[audioSession setPreferredSampleRate:48000.0 error:&audioSessionError];
[audioSession setCategory: AVAudioSessionCategoryPlayback error: &audioSessionError];
[audioSession setPreferredOutputNumberOfChannels:2 error:&audioSessionError];
[audioSession setPreferredIOBufferDuration:0.005 error:&audioSessionError];
[audioSession setActive: YES error: &audioSessionError];
OSStatus status;
AudioComponentDescription audioDesc;
audioDesc.componentType = kAudioUnitType_Output;
audioDesc.componentSubType = kAudioUnitSubType_RemoteIO;
audioDesc.componentFlags = 0;
audioDesc.componentFlagsMask = 0;
audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
status = AudioComponentInstanceNew(AudioComponentFindNext(NULL, &audioDesc), &audioUnit);
if (status) {
NSLog(@"Unable to instantiate new AudioComponent: %d", (int32_t)status);
}
AudioStreamBasicDescription audioFormat = {0};
audioFormat.mSampleRate = 48000;
audioFormat.mBitsPerChannel = 16;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
audioFormat.mChannelsPerFrame = 2;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * (audioFormat.mBitsPerChannel / 8);
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
audioFormat.mFramesPerPacket = audioFormat.mBytesPerPacket / audioFormat.mBytesPerFrame;
audioFormat.mReserved = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
OUTPUT_BUS,
&audioFormat,
sizeof(audioFormat));
if (status) {
NSLog(@"Unable to set audio unit to input: %d", (int32_t)status);
}
AURenderCallbackStruct callbackStruct = {0};
callbackStruct.inputProc = playbackCallback;
callbackStruct.inputProcRefCon = NULL;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
OUTPUT_BUS,
&callbackStruct,
sizeof(callbackStruct));
if (status) {
NSLog(@"Unable to set audio unit callback: %d", (int32_t)status);
}
status = AudioUnitInitialize(audioUnit);
if (status) {
NSLog(@"Unable to initialize audioUnit: %d", (int32_t)status);
}
}
void ArRelease(void)
{
if (opusDecoder != NULL) {
opus_decoder_destroy(opusDecoder);
opusDecoder = NULL;
}
OSStatus status = AudioUnitUninitialize(audioUnit);
if (status) {
NSLog(@"Unable to uninitialize audioUnit: %d", (int32_t)status);
}
// Audio session is now inactive
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
[audioSession setActive: YES error: nil];
// This is safe because we're guaranteed that nobody
// is touching this list now
struct AUDIO_BUFFER_QUEUE_ENTRY *entry;
while (audioBufferQueue != NULL) {
entry = audioBufferQueue;
audioBufferQueue = entry->next;
audioBufferQueueLength--;
free(entry);
}
}
void ArStart(void)
{
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status) {
NSLog(@"Unable to start audioUnit: %d", (int32_t)status);
}
}
void ArStop(void)
{
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status) {
NSLog(@"Unable to stop audioUnit: %d", (int32_t)status);
}
}
void ArDecodeAndPlaySample(char* sampleData, int sampleLength)
{
int decodedLength = opus_decode(opusDecoder, (unsigned char*)sampleData, sampleLength, decodedPcmBuffer, PCM_BUFFER_SIZE / 2, 0);
if (decodedLength > 0) {
// Return of opus_decode is samples per channel
decodedLength *= 4;
struct AUDIO_BUFFER_QUEUE_ENTRY *newEntry = malloc(sizeof(*newEntry) + decodedLength);
if (newEntry != NULL) {
newEntry->next = NULL;
newEntry->length = decodedLength;
newEntry->offset = 0;
memcpy(newEntry->data, decodedPcmBuffer, decodedLength);
[audioLock lock];
if (audioBufferQueueLength > MAX_QUEUE_ENTRIES) {
NSLog(@"Audio player too slow. Dropping all decoded samples!");
// Clear all values from the buffer queue
struct AUDIO_BUFFER_QUEUE_ENTRY *entry;
while (audioBufferQueue != NULL) {
entry = audioBufferQueue;
audioBufferQueue = entry->next;
audioBufferQueueLength--;
free(entry);
}
}
if (audioBufferQueue == NULL) {
audioBufferQueue = newEntry;
}
else {
struct AUDIO_BUFFER_QUEUE_ENTRY *lastEntry = audioBufferQueue;
while (lastEntry->next != NULL) {
lastEntry = lastEntry->next;
}
lastEntry->next = newEntry;
}
audioBufferQueueLength++;
[audioLock unlock];
}
}
}
void ClStageStarting(int stage)
{
}
void ClStageComplete(int stage)
{
}
void ClStageFailed(int stage, long errorCode)
{
NSLog(@"Stage %d failed: %ld", stage, errorCode);
}
void ClConnectionStarted(void)
{
NSLog(@"Connection started");
}
void ClConnectionTerminated(long errorCode)
{
NSLog(@"ConnectionTerminated: %ld", errorCode);
[_callback connectionTerminated];
}
void ClDisplayMessage(char* message)
{
NSLog(@"DisplayMessage: %s", message);
}
void ClDisplayTransientMessage(char* message)
{
NSLog(@"DisplayTransientMessage: %s", message);
}
-(void) terminate
{
// We dispatch this async to get out because this can be invoked
// on a thread inside common and we don't want to deadlock
dispatch_async(dispatch_get_main_queue(), ^{
// This is safe to call even before LiStartConnection
LiStopConnection();
});
}
-(id) initWithConfig:(StreamConfiguration*)config renderer:(VideoDecoderRenderer*)myRenderer connectionTerminatedCallback:(id<ConTermCallback>)callback
{
self = [super init];
_host = config.hostAddr;
renderer = myRenderer;
_callback = callback;
_streamConfig.width = config.width;
_streamConfig.height = config.height;
_streamConfig.fps = config.frameRate;
_streamConfig.bitrate = config.bitRate;
_streamConfig.packetSize = 1024;
memcpy(_streamConfig.remoteInputAesKey, [config.riKey bytes], [config.riKey length]);
memset(_streamConfig.remoteInputAesIv, 0, 16);
int riKeyId = htonl(config.riKeyId);
memcpy(_streamConfig.remoteInputAesIv, &riKeyId, sizeof(riKeyId));
_drCallbacks.setup = DrSetup;
_drCallbacks.start = DrStart;
_drCallbacks.stop = DrStop;
_drCallbacks.release = DrRelease;
_drCallbacks.submitDecodeUnit = DrSubmitDecodeUnit;
_arCallbacks.init = ArInit;
_arCallbacks.start = ArStart;
_arCallbacks.stop = ArStop;
_arCallbacks.release = ArRelease;
_arCallbacks.decodeAndPlaySample = ArDecodeAndPlaySample;
_clCallbacks.stageStarting = ClStageStarting;
_clCallbacks.stageComplete = ClStageComplete;
_clCallbacks.stageFailed = ClStageFailed;
_clCallbacks.connectionStarted = ClConnectionStarted;
_clCallbacks.connectionTerminated = ClConnectionTerminated;
_clCallbacks.displayMessage = ClDisplayMessage;
_clCallbacks.displayTransientMessage = ClDisplayTransientMessage;
return self;
}
static OSStatus playbackCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
// Notes: ioData contains buffers (may be more than one!)
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
bool ranOutOfData = false;
for (int i = 0; i < ioData->mNumberBuffers; i++) {
ioData->mBuffers[i].mNumberChannels = 2;
if (ranOutOfData) {
ioData->mBuffers[i].mDataByteSize = 0;
continue;
}
if (ioData->mBuffers[i].mDataByteSize != 0) {
int thisBufferOffset = 0;
FillBufferAgain:
// Make sure there's data to write
if (ioData->mBuffers[i].mDataByteSize - thisBufferOffset == 0) {
continue;
}
struct AUDIO_BUFFER_QUEUE_ENTRY *audioEntry = NULL;
[audioLock lock];
if (audioBufferQueue != NULL) {
// Dequeue this entry temporarily
audioEntry = audioBufferQueue;
audioBufferQueue = audioBufferQueue->next;
audioBufferQueueLength--;
}
[audioLock unlock];
if (audioEntry == NULL) {
// No data left
ranOutOfData = true;
ioData->mBuffers[i].mDataByteSize = thisBufferOffset;
continue;
}
// Figure out how much data we can write
int min = MIN(ioData->mBuffers[i].mDataByteSize - thisBufferOffset, audioEntry->length);
// Copy data to the audio buffer
memcpy(&ioData->mBuffers[i].mData[thisBufferOffset], &audioEntry->data[audioEntry->offset], min);
thisBufferOffset += min;
if (min < audioEntry->length) {
// This entry still has unused data
audioEntry->length -= min;
audioEntry->offset += min;
// Requeue the entry
[audioLock lock];
audioEntry->next = audioBufferQueue;
audioBufferQueue = audioEntry;
audioBufferQueueLength++;
[audioLock unlock];
}
else {
// This entry is fully depleted so free it
free(audioEntry);
// Try to grab another sample to fill this buffer with
goto FillBufferAgain;
}
ioData->mBuffers[i].mDataByteSize = thisBufferOffset;
}
}
return noErr;
}
-(void) main
{
LiStartConnection(_host, &_streamConfig, &_clCallbacks, &_drCallbacks, &_arCallbacks, NULL, 0);
}
@end

View File

@@ -0,0 +1,22 @@
//
// StreamConfiguration.h
// Limelight
//
// Created by Diego Waxemberg on 10/20/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface StreamConfiguration : NSObject
@property NSString* host;
@property int hostAddr;
@property int width;
@property int height;
@property int frameRate;
@property int bitRate;
@property int riKeyId;
@property NSData* riKey;
@end

View File

@@ -0,0 +1,13 @@
//
// StreamConfiguration.m
// Limelight
//
// Created by Diego Waxemberg on 10/20/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import "StreamConfiguration.h"
@implementation StreamConfiguration
@synthesize host, hostAddr, width, height, frameRate, bitRate, riKeyId, riKey;
@end

View File

@@ -0,0 +1,18 @@
//
// StreamManager.h
// Limelight
//
// Created by Diego Waxemberg on 10/20/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "StreamConfiguration.h"
#import "Connection.h"
@interface StreamManager : NSOperation
- (id) initWithConfig:(StreamConfiguration*)config renderView:(UIView*)view connectionTerminatedCallback:(id<ConTermCallback>)callback;
- (void) stopStream;
@end

View File

@@ -0,0 +1,81 @@
//
// StreamManager.m
// Limelight
//
// Created by Diego Waxemberg on 10/20/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import "StreamManager.h"
#import "CryptoManager.h"
#import "HttpManager.h"
#import "Utils.h"
@implementation StreamManager {
StreamConfiguration* _config;
UIView* _renderView;
id<ConTermCallback> _callback;
Connection* _connection;
}
- (id) initWithConfig:(StreamConfiguration*)config renderView:(UIView*)view connectionTerminatedCallback:(id<ConTermCallback>)callback {
self = [super init];
_config = config;
_renderView = view;
_callback = callback;
_config.riKey = [Utils randomBytes:16];
_config.riKeyId = arc4random();
_config.bitRate = 10000;
return self;
}
- (void)main {
[CryptoManager generateKeyPairUsingSSl];
NSString* uniqueId = [CryptoManager getUniqueID];
NSData* cert = [CryptoManager readCertFromFile];
HttpManager* hMan = [[HttpManager alloc] initWithHost:_config.host
uniqueId:uniqueId
deviceName:@"roth"
cert:cert];
NSData* serverInfoResp = [hMan executeRequestSynchronously:[hMan newServerInfoRequest]];
if (![[HttpManager getStringFromXML:serverInfoResp tag:@"currentgame"] isEqualToString:@"0"]) {
// App already running, resume it
[self resumeApp:hMan];
} else {
// Start app
[self launchApp:hMan];
}
VideoDecoderRenderer* renderer = [[VideoDecoderRenderer alloc]initWithView:_renderView];
_connection = [[Connection alloc] initWithConfig:_config renderer:renderer connectionTerminatedCallback:_callback];
NSOperationQueue* opQueue = [[NSOperationQueue alloc] init];
[opQueue addOperation:_connection];
}
- (void) stopStream
{
[_connection terminate];
}
- (void) launchApp:(HttpManager*)hMan {
NSData* launchResp = [hMan executeRequestSynchronously:
[hMan newLaunchRequest:@"67339056"
width:_config.width
height:_config.height
refreshRate:_config.frameRate
rikey:[Utils bytesToHex:_config.riKey]
rikeyid:_config.riKeyId]];
[HttpManager getStringFromXML:launchResp tag:@"gamesession"];
}
- (void) resumeApp:(HttpManager*)hMan {
NSData* resumeResp = [hMan executeRequestSynchronously:
[hMan newResumeRequestWithRiKey:[Utils bytesToHex:_config.riKey]
riKeyId:_config.riKeyId]];
[HttpManager getStringFromXML:resumeResp tag:@"gamesession"];
}
@end

View File

@@ -0,0 +1,21 @@
//
// VideoDecoderRenderer.h
// Limelight
//
// Created by Cameron Gutman on 10/18/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import <Foundation/Foundation.h>
@import AVFoundation;
@interface VideoDecoderRenderer : NSObject
- (id)initWithView:(UIView*)view;
- (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength;
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length;
@end

View File

@@ -0,0 +1,245 @@
//
// VideoDecoderRenderer.m
// Limelight
//
// Created by Cameron Gutman on 10/18/14.
// Copyright (c) 2014 Limelight Stream. All rights reserved.
//
#import "VideoDecoderRenderer.h"
@implementation VideoDecoderRenderer {
AVSampleBufferDisplayLayer* displayLayer;
Boolean waitingForSps, waitingForPpsA, waitingForPpsB;
NSData *spsData, *ppsDataA, *ppsDataB;
unsigned char ppsDataAFirstByte;
CMVideoFormatDescriptionRef formatDesc;
}
- (id)initWithView:(UIView*)view
{
self = [super init];
displayLayer = [[AVSampleBufferDisplayLayer alloc] init];
displayLayer.bounds = view.bounds;
displayLayer.backgroundColor = [UIColor blackColor].CGColor;
displayLayer.position = CGPointMake(CGRectGetMidX(view.bounds), CGRectGetMidY(view.bounds));
displayLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[view.layer addSublayer:displayLayer];
// We need some parameter sets before we can properly start decoding frames
waitingForSps = true;
waitingForPpsA = true;
waitingForPpsB = true;
return self;
}
#define FRAME_START_PREFIX_SIZE 4
#define NALU_START_PREFIX_SIZE 3
#define NAL_LENGTH_PREFIX_SIZE 4
- (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength
{
OSStatus status;
size_t oldOffset = CMBlockBufferGetDataLength(existingBuffer);
// If we're at index 1 (first NALU in frame), enqueue this buffer to the memory block
// so it can handle freeing it when the block buffer is destroyed
if (offset == 1) {
int dataLength = nalLength - NALU_START_PREFIX_SIZE;
// Pass the real buffer pointer directly (no offset)
// This will give it to the block buffer to free when it's released.
// All further calls to CMBlockBufferAppendMemoryBlock will do so
// at an offset and will not be asking the buffer to be freed.
status = CMBlockBufferAppendMemoryBlock(existingBuffer, data,
nalLength + 1, // Add 1 for the offset we decremented
kCFAllocatorDefault,
NULL, 0, nalLength + 1, 0);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return;
}
// Write the length prefix to existing buffer
const uint8_t lengthBytes[] = {(uint8_t)(dataLength >> 24), (uint8_t)(dataLength >> 16),
(uint8_t)(dataLength >> 8), (uint8_t)dataLength};
status = CMBlockBufferReplaceDataBytes(lengthBytes, existingBuffer,
oldOffset, NAL_LENGTH_PREFIX_SIZE);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return;
}
}
else {
// Append a 4 byte buffer to this block for the length prefix
status = CMBlockBufferAppendMemoryBlock(existingBuffer, NULL,
NAL_LENGTH_PREFIX_SIZE,
kCFAllocatorDefault, NULL, 0,
NAL_LENGTH_PREFIX_SIZE, 0);
if (status != noErr) {
NSLog(@"CMBlockBufferAppendMemoryBlock failed: %d", (int)status);
return;
}
// Write the length prefix to the new buffer
int dataLength = nalLength - NALU_START_PREFIX_SIZE;
const uint8_t lengthBytes[] = {(uint8_t)(dataLength >> 24), (uint8_t)(dataLength >> 16),
(uint8_t)(dataLength >> 8), (uint8_t)dataLength};
status = CMBlockBufferReplaceDataBytes(lengthBytes, existingBuffer,
oldOffset, NAL_LENGTH_PREFIX_SIZE);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return;
}
// Attach the buffer by reference to the block buffer
status = CMBlockBufferAppendMemoryBlock(existingBuffer, &data[offset+NALU_START_PREFIX_SIZE],
dataLength,
kCFAllocatorNull, // Don't deallocate data on free
NULL, 0, dataLength, 0);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
return;
}
}
}
// This function must free data
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length
{
unsigned char nalType = data[FRAME_START_PREFIX_SIZE] & 0x1F;
OSStatus status;
if (formatDesc == NULL && (nalType == 0x7 || nalType == 0x8)) {
if (waitingForSps && nalType == 0x7) {
NSLog(@"Got SPS");
spsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForSps = false;
}
// Nvidia's stream has 2 PPS NALUs so we'll wait for both of them
else if ((waitingForPpsA || waitingForPpsB) && nalType == 0x8) {
// Read the NALU's PPS index to figure out which PPS this is
if (waitingForPpsA) {
NSLog(@"Got PPS 1");
ppsDataA = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForPpsA = false;
ppsDataAFirstByte = data[FRAME_START_PREFIX_SIZE + 1];
}
else if (data[FRAME_START_PREFIX_SIZE + 1] != ppsDataAFirstByte) {
NSLog(@"Got PPS 2");
ppsDataA = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForPpsB = false;
}
}
// See if we've got all the parameter sets we need
if (!waitingForSps && !waitingForPpsA && !waitingForPpsB) {
const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsDataA bytes], [ppsDataB bytes] };
const size_t parameterSetSizes[] = { [spsData length], [ppsDataA length], [ppsDataB length] };
NSLog(@"Constructing format description");
status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
2, /* count of parameter sets */
parameterSetPointers,
parameterSetSizes,
NAL_LENGTH_PREFIX_SIZE,
&formatDesc);
if (status != noErr) {
NSLog(@"Failed to create format description: %d", (int)status);
formatDesc = NULL;
}
}
// Free the data buffer
free(data);
// No frame data to submit for these NALUs
return;
}
if (formatDesc == NULL) {
// Can't decode if we haven't gotten our parameter sets yet
free(data);
return;
}
if (nalType != 0x1 && nalType != 0x5) {
// Don't submit parameter set data
free(data);
return;
}
// Now we're decoding actual frame data here
CMBlockBufferRef blockBuffer;
status = CMBlockBufferCreateEmpty(NULL, 0, 0, &blockBuffer);
if (status != noErr) {
NSLog(@"CMBlockBufferCreateEmpty failed: %d", (int)status);
free(data);
return;
}
int lastOffset = -1;
for (int i = 0; i < length - FRAME_START_PREFIX_SIZE; i++) {
// Search for a NALU
if (data[i] == 0 && data[i+1] == 0 && data[i+2] == 1) {
// It's the start of a new NALU
if (lastOffset != -1) {
// We've seen a start before this so enqueue that NALU
[self updateBufferForRange:blockBuffer data:data offset:lastOffset length:i - lastOffset];
}
lastOffset = i;
}
}
if (lastOffset != -1) {
// Enqueue the remaining data
[self updateBufferForRange:blockBuffer data:data offset:lastOffset length:length - lastOffset];
}
// From now on, CMBlockBuffer owns the data pointer and will free it when it's dereferenced
CMSampleBufferRef sampleBuffer;
status = CMSampleBufferCreate(kCFAllocatorDefault,
blockBuffer,
true, NULL,
NULL, formatDesc, 1, 0,
NULL, 0, NULL,
&sampleBuffer);
if (status != noErr) {
NSLog(@"CMSampleBufferCreate failed: %d", (int)status);
CFRelease(blockBuffer);
return;
}
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue);
if (nalType == 1) {
// P-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);
}
else {
// I-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse);
}
[displayLayer enqueueSampleBuffer:sampleBuffer];
// Dereference the buffers
CFRelease(blockBuffer);
CFRelease(sampleBuffer);
}
@end