diff --git a/Limelight/Connection.h b/Limelight/Connection.h index 0d9bd73..499d8b9 100644 --- a/Limelight/Connection.h +++ b/Limelight/Connection.h @@ -7,10 +7,11 @@ // #import +#import "VideoDecoderRenderer.h" @interface Connection : NSOperation --(id) initWithHost:(int)ipaddr width:(int)width height:(int)height; +-(id) initWithHost:(int)ipaddr width:(int)width height:(int)height renderer:(VideoDecoderRenderer*)renderer; -(void) main; @end diff --git a/Limelight/Connection.m b/Limelight/Connection.m index 417c1ae..2a8f0e1 100644 --- a/Limelight/Connection.m +++ b/Limelight/Connection.m @@ -7,6 +7,7 @@ // #import "Connection.h" + #import #import @@ -29,10 +30,9 @@ static OpusDecoder *opusDecoder; static short* decodedPcmBuffer; static int filledPcmBuffer; -NSLock* audioRendererBlock; -AudioComponentInstance audioUnit; -bool started = false; - +static AudioComponentInstance audioUnit; +static bool started = false; +static VideoDecoderRenderer* renderer; void DrSetup(int width, int height, int fps, void* context, int drFlags) { @@ -52,7 +52,7 @@ void DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit) entry = entry->next; } - // FIXME: Submit data to decoder + [renderer submitDecodeBuffer:data length:decodeUnit->fullLength]; free(data); } @@ -125,7 +125,7 @@ void ArDecodeAndPlaySample(char* sampleData, int sampleLength) } } --(id) initWithHost:(int)ipaddr width:(int)width height:(int)height +-(id) initWithHost:(int)ipaddr width:(int)width height:(int)height renderer:(VideoDecoderRenderer*)renderer { self = [super init]; host = ipaddr; @@ -237,7 +237,6 @@ static OSStatus playbackCallback(void *inRefCon, filledPcmBuffer -= min; } - //[audioRendererBlock unlock]; return noErr; } diff --git a/Limelight/StreamFrameViewController.m b/Limelight/StreamFrameViewController.m index 065e6a0..f18bb38 100644 --- a/Limelight/StreamFrameViewController.m +++ b/Limelight/StreamFrameViewController.m @@ -42,12 +42,14 @@ // Repositions and resizes the view. CGRect contentRect = CGRectMake(0,0, self.view.frame.size.width, self.view.frame.size.height); streamView.bounds = contentRect; + + VideoDecoderRenderer* renderer = [[VideoDecoderRenderer alloc]init]; - Connection* conn = [[Connection alloc] initWithHost:inet_addr([[ConnectionHandler resolveHost:[NSString stringWithUTF8String:[MainFrameViewController getHostAddr]]] UTF8String]) width:1280 height:720]; + Connection* conn = [[Connection alloc] initWithHost:inet_addr([[ConnectionHandler resolveHost:[NSString stringWithUTF8String:[MainFrameViewController getHostAddr]]] UTF8String]) width:1280 height:720 + renderer: renderer]; NSOperationQueue* opQueue = [[NSOperationQueue alloc] init]; [opQueue addOperation:conn]; - [opQueue addOperation:[[VideoDecoderRenderer alloc]initWithTarget:streamView]]; } - (void)didReceiveMemoryWarning diff --git a/Limelight/VideoDecoderRenderer.h b/Limelight/VideoDecoderRenderer.h index 7fc7ea3..0edc093 100644 --- a/Limelight/VideoDecoderRenderer.h +++ b/Limelight/VideoDecoderRenderer.h @@ -8,10 +8,12 @@ #import -@interface VideoDecoderRenderer : NSOperation +@import AVFoundation; -- (id)initWithTarget:(UIView *)target; +@interface VideoDecoderRenderer : NSObject -@property UIView* renderTarget; +- (id)init; + +- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length; @end diff --git a/Limelight/VideoDecoderRenderer.m b/Limelight/VideoDecoderRenderer.m index 88a2cf3..411f1ff 100644 --- a/Limelight/VideoDecoderRenderer.m +++ b/Limelight/VideoDecoderRenderer.m @@ -8,20 +8,124 @@ #import "VideoDecoderRenderer.h" -@implementation VideoDecoderRenderer +@implementation VideoDecoderRenderer { + AVSampleBufferDisplayLayer* displayLayer; + Boolean waitingForSps, waitingForPpsA, waitingForPpsB; + + NSData *spsData, *ppsDataA, *ppsDataB; + CMVideoFormatDescriptionRef formatDesc; +} -- (id)initWithTarget:(UIView *)target +- (id)init { self = [super init]; - self.renderTarget = target; + displayLayer = [[AVSampleBufferDisplayLayer alloc] init]; + displayLayer.bounds = CGRectMake(0, 0, 300, 300); + displayLayer.backgroundColor = [UIColor blackColor].CGColor; + displayLayer.position = CGPointMake(500, 500); + + // We need some parameter sets before we can properly start decoding frames + waitingForSps = true; + waitingForPpsA = true; + waitingForPpsB = true; return self; } -- (void)main +#define ES_START_PREFIX_SIZE 4 +#define ES_DATA_OFFSET 5 +- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length { - NSLog(@"Hi"); + unsigned char nalType = data[ES_START_PREFIX_SIZE] & 0x1F; + OSStatus status; + + if (formatDesc == NULL && (nalType == 0x7 || nalType == 0x8)) { + if (waitingForSps && nalType == 0x7) { + spsData = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET]; + waitingForSps = false; + } + // Nvidia's stream has 2 PPS NALUs so we'll wait for both of them + else if ((waitingForPpsA || waitingForPpsB) && nalType == 0x8) { + // Read the NALU's PPS index to figure out which PPS this is + if (data[ES_DATA_OFFSET] == 0) { + if (waitingForPpsA) { + ppsDataA = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET]; + waitingForPpsA = false; + } + } + else if (data[ES_DATA_OFFSET] == 1) { + if (waitingForPpsB) { + ppsDataA = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET]; + waitingForPpsB = false; + } + } + } + + // See if we've got all the parameter sets we need + if (!waitingForSps && !waitingForPpsA && !waitingForPpsB) { + const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsDataA bytes], [ppsDataB bytes] }; + const size_t parameterSetSizes[] = { [spsData length], [ppsDataA length], [ppsDataB length] }; + + status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, + 3, /* count of parameter sets */ + parameterSetPointers, + parameterSetSizes, + 4 /* size of length prefix */, + &formatDesc); + if (status != noErr) { + NSLog(@"Failed to create format description: %d", (int)status); + formatDesc = NULL; + return; + } + } + + // No frame data to submit for these NALUs + return; + } + + if (formatDesc == NULL) { + // Can't decode if we haven't gotten our parameter sets yet + return; + } + + // Now we're decoding actual frame data here + CMBlockBufferRef blockBuffer; + status = CMBlockBufferCreateWithMemoryBlock(NULL, data, length, kCFAllocatorNull, NULL, 0, length, 0, &blockBuffer); + if (status != noErr) { + NSLog(@"CMBlockBufferCreateWithMemoryBlock failed: %d", (int)status); + return; + } + + // Compute the new length prefix to replace the 00 00 00 01 + const uint8_t lengthBytes[] = {(uint8_t)(length >> 24), (uint8_t)(length >> 16), (uint8_t)(length >> 8), (uint8_t)length}; + status = CMBlockBufferReplaceDataBytes(lengthBytes, blockBuffer, 0, 4); + if (status != noErr) { + NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status); + return; + } + + CMSampleBufferRef sampleBuffer; + const size_t sampleSizeArray[] = {length}; + + status = CMSampleBufferCreate(kCFAllocatorDefault, + blockBuffer, true, NULL, + NULL, formatDesc, 1, 0, + NULL, 1, sampleSizeArray, + &sampleBuffer); + if (status != noErr) { + NSLog(@"CMSampleBufferCreate failed: %d", (int)status); + return; + } + + CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); + CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + + dispatch_async(dispatch_get_main_queue(),^{ + [displayLayer enqueueSampleBuffer:sampleBuffer]; + [displayLayer setNeedsDisplay]; + }); } @end