Merge branch 'hevc'

This commit is contained in:
Cameron Gutman 2017-09-14 21:54:46 -07:00
commit 593da7ef3e
3 changed files with 132 additions and 31 deletions

View File

@ -10,6 +10,7 @@
#import <AudioUnit/AudioUnit.h> #import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h> #import <AVFoundation/AVFoundation.h>
#import <VideoToolbox/VideoToolbox.h>
#include "Limelight.h" #include "Limelight.h"
#include "opus.h" #include "opus.h"
@ -45,6 +46,12 @@ static int audioBufferQueueLength;
static AudioComponentInstance audioUnit; static AudioComponentInstance audioUnit;
static VideoDecoderRenderer* renderer; static VideoDecoderRenderer* renderer;
int DrDecoderSetup(int videoFormat, int width, int height, int redrawRate, void* context, int drFlags)
{
[renderer setupWithVideoFormat:videoFormat];
return 0;
}
int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit) int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit)
{ {
int offset = 0; int offset = 0;
@ -314,6 +321,12 @@ void ClLogMessage(const char* format, ...)
_streamConfig.fps = config.frameRate; _streamConfig.fps = config.frameRate;
_streamConfig.bitrate = config.bitRate; _streamConfig.bitrate = config.bitRate;
// On iOS 11, we can use HEVC if the server supports encoding it
// and this device has hardware decode for it (A9 and later)
if (@available(iOS 11.0, *)) {
_streamConfig.supportsHevc = VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC);
}
// FIXME: We should use 1024 when streaming remotely // FIXME: We should use 1024 when streaming remotely
_streamConfig.packetSize = 1292; _streamConfig.packetSize = 1292;
@ -323,8 +336,9 @@ void ClLogMessage(const char* format, ...)
memcpy(_streamConfig.remoteInputAesIv, &riKeyId, sizeof(riKeyId)); memcpy(_streamConfig.remoteInputAesIv, &riKeyId, sizeof(riKeyId));
LiInitializeVideoCallbacks(&_drCallbacks); LiInitializeVideoCallbacks(&_drCallbacks);
_drCallbacks.setup = DrDecoderSetup;
_drCallbacks.submitDecodeUnit = DrSubmitDecodeUnit; _drCallbacks.submitDecodeUnit = DrSubmitDecodeUnit;
_drCallbacks.capabilities = CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC; _drCallbacks.capabilities = CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC;
LiInitializeAudioCallbacks(&_arCallbacks); LiInitializeAudioCallbacks(&_arCallbacks);
_arCallbacks.init = ArInit; _arCallbacks.init = ArInit;

View File

@ -14,6 +14,8 @@
- (id)initWithView:(UIView*)view; - (id)initWithView:(UIView*)view;
- (void)setupWithVideoFormat:(int)videoFormat;
- (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength; - (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength;
- (int)submitDecodeBuffer:(unsigned char *)data length:(int)length; - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length;

View File

@ -14,9 +14,10 @@
UIView *_view; UIView *_view;
AVSampleBufferDisplayLayer* displayLayer; AVSampleBufferDisplayLayer* displayLayer;
Boolean waitingForSps, waitingForPps; Boolean waitingForSps, waitingForPps, waitingForVps;
int videoFormat;
NSData *spsData, *ppsData; NSData *spsData, *ppsData, *vpsData;
CMVideoFormatDescriptionRef formatDesc; CMVideoFormatDescriptionRef formatDesc;
} }
@ -43,6 +44,8 @@
spsData = nil; spsData = nil;
waitingForPps = true; waitingForPps = true;
ppsData = nil; ppsData = nil;
waitingForVps = true;
vpsData = nil;
if (formatDesc != nil) { if (formatDesc != nil) {
CFRelease(formatDesc); CFRelease(formatDesc);
@ -61,11 +64,65 @@
return self; return self;
} }
- (void)setupWithVideoFormat:(int)videoFormat
{
self->videoFormat = videoFormat;
}
#define FRAME_START_PREFIX_SIZE 4 #define FRAME_START_PREFIX_SIZE 4
#define NALU_START_PREFIX_SIZE 3 #define NALU_START_PREFIX_SIZE 3
#define NAL_LENGTH_PREFIX_SIZE 4 #define NAL_LENGTH_PREFIX_SIZE 4
#define NAL_TYPE_SPS 0x7 #define AVC_NAL_TYPE_SPS 0x67
#define NAL_TYPE_PPS 0x8 #define AVC_NAL_TYPE_PPS 0x68
#define HEVC_NAL_TYPE_VPS 0x40
#define HEVC_NAL_TYPE_SPS 0x42
#define HEVC_NAL_TYPE_PPS 0x44
#define IS_NAL_SPS(x) ((videoFormat == VIDEO_FORMAT_H264) ? ((x) == AVC_NAL_TYPE_SPS) : ((x) == HEVC_NAL_TYPE_SPS))
#define IS_NAL_PPS(x) ((videoFormat == VIDEO_FORMAT_H264) ? ((x) == AVC_NAL_TYPE_PPS) : ((x) == HEVC_NAL_TYPE_PPS))
#define IS_NAL_VPS(x) ((videoFormat == VIDEO_FORMAT_H265) && ((x) == HEVC_NAL_TYPE_VPS))
- (Boolean)readyForPictureData
{
if (videoFormat == VIDEO_FORMAT_H264) {
return !waitingForSps && !waitingForPps;
}
else {
// H.265 requires VPS in addition to SPS and PPS
return !waitingForVps && !waitingForSps && !waitingForPps;
}
}
- (Boolean)isNalReferencePicture:(unsigned char)nalType
{
if (videoFormat == VIDEO_FORMAT_H264) {
return nalType == 0x65;
}
else {
// HEVC has several types of reference NALU types
switch (nalType) {
case 0x20:
case 0x22:
case 0x24:
case 0x26:
case 0x28:
case 0x2A:
return true;
default:
return false;
}
}
}
- (Boolean)isNalPictureData:(unsigned char)nalType
{
if (videoFormat == VIDEO_FORMAT_H264) {
return !(nalType == AVC_NAL_TYPE_SPS || nalType == AVC_NAL_TYPE_PPS);
}
else {
return !(nalType == HEVC_NAL_TYPE_VPS || nalType == HEVC_NAL_TYPE_SPS || nalType == HEVC_NAL_TYPE_PPS);
}
}
- (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength - (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength
{ {
@ -136,7 +193,7 @@
// This function must free data // This function must free data
- (int)submitDecodeBuffer:(unsigned char *)data length:(int)length - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length
{ {
unsigned char nalType = data[FRAME_START_PREFIX_SIZE] & 0x1F; unsigned char nalType = data[FRAME_START_PREFIX_SIZE];
OSStatus status; OSStatus status;
// Check for previous decoder errors before doing anything // Check for previous decoder errors before doing anything
@ -150,35 +207,69 @@
return DR_NEED_IDR; return DR_NEED_IDR;
} }
if (nalType == NAL_TYPE_SPS || nalType == NAL_TYPE_PPS) { if (![self isNalPictureData:nalType]) {
if (nalType == NAL_TYPE_SPS) { if (IS_NAL_VPS(nalType)) {
Log(LOG_I, @"Got VPS");
vpsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForVps = false;
// We got a new VPS so wait for a new SPS to match it
waitingForSps = true;
}
else if (IS_NAL_SPS(nalType)) {
Log(LOG_I, @"Got SPS"); Log(LOG_I, @"Got SPS");
spsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE]; spsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForSps = false; waitingForSps = false;
// We got a new SPS so wait for a new PPS to match it // We got a new SPS so wait for a new PPS to match it
waitingForPps = true; waitingForPps = true;
} else if (nalType == NAL_TYPE_PPS) { } else if (IS_NAL_PPS(nalType)) {
Log(LOG_I, @"Got PPS"); Log(LOG_I, @"Got PPS");
ppsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE]; ppsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE];
waitingForPps = false; waitingForPps = false;
} }
// See if we've got all the parameter sets we need // See if we've got all the parameter sets we need for our video format
if (!waitingForSps && !waitingForPps) { if ([self readyForPictureData]) {
const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsData bytes] }; if (videoFormat == VIDEO_FORMAT_H264) {
const size_t parameterSetSizes[] = { [spsData length], [ppsData length] }; const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsData bytes] };
const size_t parameterSetSizes[] = { [spsData length], [ppsData length] };
Log(LOG_I, @"Constructing new format description");
status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, Log(LOG_I, @"Constructing new H264 format description");
2, /* count of parameter sets */ status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
parameterSetPointers, 2, /* count of parameter sets */
parameterSetSizes, parameterSetPointers,
NAL_LENGTH_PREFIX_SIZE, parameterSetSizes,
&formatDesc); NAL_LENGTH_PREFIX_SIZE,
if (status != noErr) { &formatDesc);
Log(LOG_E, @"Failed to create format description: %d", (int)status); if (status != noErr) {
formatDesc = NULL; Log(LOG_E, @"Failed to create H264 format description: %d", (int)status);
formatDesc = NULL;
}
}
else {
const uint8_t* const parameterSetPointers[] = { [vpsData bytes], [spsData bytes], [ppsData bytes] };
const size_t parameterSetSizes[] = { [vpsData length], [spsData length], [ppsData length] };
Log(LOG_I, @"Constructing new HEVC format description");
if (@available(iOS 11.0, *)) {
status = CMVideoFormatDescriptionCreateFromHEVCParameterSets(kCFAllocatorDefault,
3, /* count of parameter sets */
parameterSetPointers,
parameterSetSizes,
NAL_LENGTH_PREFIX_SIZE,
nil,
&formatDesc);
} else {
// This means Moonlight-common-c decided to give us an HEVC stream
// even though we said we couldn't support it. All we can do is abort().
abort();
}
if (status != noErr) {
Log(LOG_E, @"Failed to create HEVC format description: %d", (int)status);
formatDesc = NULL;
}
} }
} }
@ -195,12 +286,6 @@
return DR_OK; return DR_OK;
} }
if (nalType != 0x1 && nalType != 0x5) {
// Don't submit parameter set data
free(data);
return DR_OK;
}
// Now we're decoding actual frame data here // Now we're decoding actual frame data here
CMBlockBufferRef blockBuffer; CMBlockBufferRef blockBuffer;
@ -252,7 +337,7 @@
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue);
if (nalType == 1) { if (![self isNalReferencePicture:nalType]) {
// P-frame // P-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);