Video is mostly working now. It's just chopped off in the view now.

This commit is contained in:
Cameron Gutman 2014-10-19 02:07:13 -04:00
parent 037df87585
commit 3a6472eb0b
8 changed files with 48 additions and 127 deletions

View File

@ -29,7 +29,6 @@
FB290D3819B2C6E3004C83CF /* ConnectionHandler.m in Sources */ = {isa = PBXBuildFile; fileRef = FB290D2919B2C6E3004C83CF /* ConnectionHandler.m */; };
FB290D3919B2C6E3004C83CF /* MainFrameViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = FB290D2B19B2C6E3004C83CF /* MainFrameViewController.m */; };
FB290D3A19B2C6E3004C83CF /* StreamFrameViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = FB290D2D19B2C6E3004C83CF /* StreamFrameViewController.m */; };
FB290D3B19B2C6E3004C83CF /* StreamView.m in Sources */ = {isa = PBXBuildFile; fileRef = FB290D2F19B2C6E3004C83CF /* StreamView.m */; };
FB290DB719B2C870004C83CF /* libz.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = FB290DB619B2C870004C83CF /* libz.dylib */; };
FB290DB919B2C877004C83CF /* libbz2.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = FB290DB819B2C877004C83CF /* libbz2.dylib */; };
FB290DC219B2E966004C83CF /* libopus.a in Frameworks */ = {isa = PBXBuildFile; fileRef = FB290DC119B2E966004C83CF /* libopus.a */; };
@ -106,8 +105,6 @@
FB290D2B19B2C6E3004C83CF /* MainFrameViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MainFrameViewController.m; sourceTree = "<group>"; };
FB290D2C19B2C6E3004C83CF /* StreamFrameViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = StreamFrameViewController.h; sourceTree = "<group>"; };
FB290D2D19B2C6E3004C83CF /* StreamFrameViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamFrameViewController.m; sourceTree = "<group>"; };
FB290D2E19B2C6E3004C83CF /* StreamView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = StreamView.h; sourceTree = "<group>"; };
FB290D2F19B2C6E3004C83CF /* StreamView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamView.m; sourceTree = "<group>"; };
FB290DA919B2C814004C83CF /* Limelight.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Limelight.h; sourceTree = "<group>"; };
FB290DAB19B2C814004C83CF /* liblimelight-common.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = "liblimelight-common.a"; sourceTree = "<group>"; };
FB290DB619B2C870004C83CF /* libz.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libz.dylib; path = usr/lib/libz.dylib; sourceTree = SDKROOT; };
@ -398,8 +395,6 @@
FB290D2B19B2C6E3004C83CF /* MainFrameViewController.m */,
FB290D2C19B2C6E3004C83CF /* StreamFrameViewController.h */,
FB290D2D19B2C6E3004C83CF /* StreamFrameViewController.m */,
FB290D2E19B2C6E3004C83CF /* StreamView.h */,
FB290D2F19B2C6E3004C83CF /* StreamView.m */,
FB290D0219B2C406004C83CF /* AppDelegate.h */,
FB290D0319B2C406004C83CF /* AppDelegate.m */,
FB290E7819B37D81004C83CF /* MainFrame-iPad.storyboard */,
@ -965,7 +960,6 @@
FB290D0019B2C406004C83CF /* main.m in Sources */,
FB290D3919B2C6E3004C83CF /* MainFrameViewController.m in Sources */,
FB290D3719B2C6E3004C83CF /* Connection.m in Sources */,
FB290D3B19B2C6E3004C83CF /* StreamView.m in Sources */,
FBCC0E9D19F00659009729EB /* mkcert.c in Sources */,
FB290D3819B2C6E3004C83CF /* ConnectionHandler.m in Sources */,
);

View File

@ -160,10 +160,11 @@ void ClDisplayTransientMessage(char* message)
NSLog(@"DisplayTransientMessage: %s", message);
}
-(id) initWithHost:(int)ipaddr width:(int)width height:(int)height renderer:(VideoDecoderRenderer*)renderer
-(id) initWithHost:(int)ipaddr width:(int)width height:(int)height renderer:(VideoDecoderRenderer*)myRenderer
{
self = [super init];
host = ipaddr;
renderer = myRenderer;
streamConfig.width = width;
streamConfig.height = height;

View File

@ -7,7 +7,6 @@
//
#import <UIKit/UIKit.h>
#import "StreamView.h"
@interface StreamFrameViewController : UIViewController

View File

@ -28,22 +28,7 @@
[UIApplication sharedApplication].idleTimerDisabled = YES;
StreamView* streamView = [[StreamView alloc] initWithFrame:self.view.frame];
streamView.backgroundColor = [UIColor blackColor];
[self.view addSubview:streamView];
[streamView setNeedsDisplay];
CGAffineTransform transform = CGAffineTransformMakeTranslation((streamView.frame.size.height/2) - (streamView.frame.size.width/2), (streamView.frame.size.width/2) - (streamView.frame.size.height/2));
transform = CGAffineTransformRotate(transform, M_PI_2);
transform = CGAffineTransformScale(transform, -1, -1);
streamView.transform = transform;
// Repositions and resizes the view.
CGRect contentRect = CGRectMake(0,0, self.view.frame.size.width, self.view.frame.size.height);
streamView.bounds = contentRect;
VideoDecoderRenderer* renderer = [[VideoDecoderRenderer alloc]init];
VideoDecoderRenderer* renderer = [[VideoDecoderRenderer alloc]initWithView:self.view];
Connection* conn = [[Connection alloc] initWithHost:inet_addr([[ConnectionHandler resolveHost:[NSString stringWithUTF8String:[MainFrameViewController getHostAddr]]] UTF8String]) width:1280 height:720
renderer: renderer];

View File

@ -1,13 +0,0 @@
//
// StreamView.h
// Limelight-iOS
//
// Created by Diego Waxemberg on 1/18/14.
// Copyright (c) 2014 Diego Waxemberg. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface StreamView : UIView
@end

View File

@ -1,66 +0,0 @@
//
// StreamView.m
// Limelight-iOS
//
// Created by Diego Waxemberg on 1/18/14.
// Copyright (c) 2014 Diego Waxemberg. All rights reserved.
//
#import "StreamView.h"
@implementation StreamView {
size_t width;
size_t height;
size_t bitsPerComponent;
size_t bytesPerRow;
CGColorSpaceRef colorSpace;
CGContextRef bitmapContext;
CGImageRef image;
unsigned char* pixelData;
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
// Initialization code
width = 1280;
height = 720;
bitsPerComponent = 8;
bytesPerRow = (bitsPerComponent / 8) * width * 4;
pixelData = malloc(width * height * 4);
colorSpace = CGColorSpaceCreateDeviceRGB();
return self;
}
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
/*if (![VideoRenderer isRendering]) {
return;
}*/
bitmapContext = CGBitmapContextCreate(pixelData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
image = CGBitmapContextCreateImage(bitmapContext);
struct CGContext* context = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(context, kCGBlendModeCopy);
CGContextSetInterpolationQuality(context, kCGInterpolationNone);
CGContextSetShouldAntialias(context, false);
CGContextRotateCTM(context, -M_PI_2);
CGContextScaleCTM(context, -(float)self.frame.size.width/self.frame.size.height, (float)self.frame.size.height/self.frame.size.width);
CGContextDrawImage(context, rect, image);
CGImageRelease(image);
[super drawRect:rect];
}
@end

View File

@ -12,7 +12,7 @@
@interface VideoDecoderRenderer : NSObject
- (id)init;
- (id)initWithView:(UIView*)view;
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length;

View File

@ -13,17 +13,20 @@
Boolean waitingForSps, waitingForPpsA, waitingForPpsB;
NSData *spsData, *ppsDataA, *ppsDataB;
unsigned char ppsDataAFirstByte;
CMVideoFormatDescriptionRef formatDesc;
}
- (id)init
- (id)initWithView:(UIView*)view
{
self = [super init];
displayLayer = [[AVSampleBufferDisplayLayer alloc] init];
displayLayer.bounds = CGRectMake(0, 0, 300, 300);
displayLayer.backgroundColor = [UIColor blackColor].CGColor;
displayLayer.position = CGPointMake(500, 500);
displayLayer.bounds = view.bounds;
displayLayer.backgroundColor = [UIColor greenColor].CGColor;
displayLayer.position = CGPointMake(CGRectGetMidX(view.bounds), CGRectGetMidY(view.bounds));
displayLayer.videoGravity = AVLayerVideoGravityResize;
[view.layer addSublayer:displayLayer];
// We need some parameter sets before we can properly start decoding frames
waitingForSps = true;
@ -34,7 +37,6 @@
}
#define ES_START_PREFIX_SIZE 4
#define ES_DATA_OFFSET 5
- (void)submitDecodeBuffer:(unsigned char *)data length:(int)length
{
unsigned char nalType = data[ES_START_PREFIX_SIZE] & 0x1F;
@ -42,33 +44,35 @@
if (formatDesc == NULL && (nalType == 0x7 || nalType == 0x8)) {
if (waitingForSps && nalType == 0x7) {
spsData = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET];
NSLog(@"Got SPS");
spsData = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
waitingForSps = false;
}
// Nvidia's stream has 2 PPS NALUs so we'll wait for both of them
else if ((waitingForPpsA || waitingForPpsB) && nalType == 0x8) {
// Read the NALU's PPS index to figure out which PPS this is
if (data[ES_DATA_OFFSET] == 0) {
printf("PPS BYTE: %02x", data[ES_START_PREFIX_SIZE + 1]);
if (waitingForPpsA) {
ppsDataA = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET];
NSLog(@"Got PPS 1");
ppsDataA = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
waitingForPpsA = false;
ppsDataAFirstByte = data[ES_START_PREFIX_SIZE + 1];
}
}
else if (data[ES_DATA_OFFSET] == 1) {
if (waitingForPpsB) {
ppsDataA = [NSData dataWithBytes:&data[ES_DATA_OFFSET] length:length - ES_DATA_OFFSET];
else if (data[ES_START_PREFIX_SIZE + 1] != ppsDataAFirstByte) {
NSLog(@"Got PPS 2");
ppsDataA = [NSData dataWithBytes:&data[ES_START_PREFIX_SIZE] length:length - ES_START_PREFIX_SIZE];
waitingForPpsB = false;
}
}
}
// See if we've got all the parameter sets we need
if (!waitingForSps && !waitingForPpsA && !waitingForPpsB) {
const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsDataA bytes], [ppsDataB bytes] };
const size_t parameterSetSizes[] = { [spsData length], [ppsDataA length], [ppsDataB length] };
NSLog(@"Constructing format description");
status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
3, /* count of parameter sets */
2, /* count of parameter sets */
parameterSetPointers,
parameterSetSizes,
4 /* size of length prefix */,
@ -89,6 +93,11 @@
return;
}
if (nalType != 0x1 && nalType != 0x5) {
// Don't submit parameter set data
return;
}
// Now we're decoding actual frame data here
CMBlockBufferRef blockBuffer;
status = CMBlockBufferCreateWithMemoryBlock(NULL, data, length, kCFAllocatorNull, NULL, 0, length, 0, &blockBuffer);
@ -98,7 +107,9 @@
}
// Compute the new length prefix to replace the 00 00 00 01
const uint8_t lengthBytes[] = {(uint8_t)(length >> 24), (uint8_t)(length >> 16), (uint8_t)(length >> 8), (uint8_t)length};
int dataLength = length - ES_START_PREFIX_SIZE;
const uint8_t lengthBytes[] = {(uint8_t)(dataLength >> 24), (uint8_t)(dataLength >> 16),
(uint8_t)(dataLength >> 8), (uint8_t)dataLength};
status = CMBlockBufferReplaceDataBytes(lengthBytes, blockBuffer, 0, 4);
if (status != noErr) {
NSLog(@"CMBlockBufferReplaceDataBytes failed: %d", (int)status);
@ -120,12 +131,22 @@
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
dispatch_async(dispatch_get_main_queue(),^{
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue);
if (nalType == 1) {
// P-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);
}
else {
// I-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse);
}
[displayLayer enqueueSampleBuffer:sampleBuffer];
[displayLayer setNeedsDisplay];
});
}
@end