diff --git a/Limelight/Stream/Connection.m b/Limelight/Stream/Connection.m index b053be9b..77e21b50 100644 --- a/Limelight/Stream/Connection.m +++ b/Limelight/Stream/Connection.m @@ -10,6 +10,7 @@ #import #import +#import #include "Limelight.h" #include "opus.h" @@ -45,6 +46,12 @@ static int audioBufferQueueLength; static AudioComponentInstance audioUnit; static VideoDecoderRenderer* renderer; +int DrDecoderSetup(int videoFormat, int width, int height, int redrawRate, void* context, int drFlags) +{ + [renderer setupWithVideoFormat:videoFormat]; + return 0; +} + int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit) { int offset = 0; @@ -314,6 +321,12 @@ void ClLogMessage(const char* format, ...) _streamConfig.fps = config.frameRate; _streamConfig.bitrate = config.bitRate; + // On iOS 11, we can use HEVC if the server supports encoding it + // and this device has hardware decode for it (A9 and later) + if (@available(iOS 11.0, *)) { + _streamConfig.supportsHevc = VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC); + } + // FIXME: We should use 1024 when streaming remotely _streamConfig.packetSize = 1292; @@ -323,8 +336,9 @@ void ClLogMessage(const char* format, ...) memcpy(_streamConfig.remoteInputAesIv, &riKeyId, sizeof(riKeyId)); LiInitializeVideoCallbacks(&_drCallbacks); + _drCallbacks.setup = DrDecoderSetup; _drCallbacks.submitDecodeUnit = DrSubmitDecodeUnit; - _drCallbacks.capabilities = CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC; + _drCallbacks.capabilities = CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC; LiInitializeAudioCallbacks(&_arCallbacks); _arCallbacks.init = ArInit; diff --git a/Limelight/Stream/VideoDecoderRenderer.h b/Limelight/Stream/VideoDecoderRenderer.h index e1ef307c..6c34ba3b 100644 --- a/Limelight/Stream/VideoDecoderRenderer.h +++ b/Limelight/Stream/VideoDecoderRenderer.h @@ -14,6 +14,8 @@ - (id)initWithView:(UIView*)view; +- (void)setupWithVideoFormat:(int)videoFormat; + - (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength; - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length; diff --git a/Limelight/Stream/VideoDecoderRenderer.m b/Limelight/Stream/VideoDecoderRenderer.m index 736ace84..75453185 100644 --- a/Limelight/Stream/VideoDecoderRenderer.m +++ b/Limelight/Stream/VideoDecoderRenderer.m @@ -14,9 +14,10 @@ UIView *_view; AVSampleBufferDisplayLayer* displayLayer; - Boolean waitingForSps, waitingForPps; + Boolean waitingForSps, waitingForPps, waitingForVps; + int videoFormat; - NSData *spsData, *ppsData; + NSData *spsData, *ppsData, *vpsData; CMVideoFormatDescriptionRef formatDesc; } @@ -43,6 +44,8 @@ spsData = nil; waitingForPps = true; ppsData = nil; + waitingForVps = true; + vpsData = nil; if (formatDesc != nil) { CFRelease(formatDesc); @@ -61,11 +64,65 @@ return self; } +- (void)setupWithVideoFormat:(int)videoFormat +{ + self->videoFormat = videoFormat; +} + #define FRAME_START_PREFIX_SIZE 4 #define NALU_START_PREFIX_SIZE 3 #define NAL_LENGTH_PREFIX_SIZE 4 -#define NAL_TYPE_SPS 0x7 -#define NAL_TYPE_PPS 0x8 +#define AVC_NAL_TYPE_SPS 0x67 +#define AVC_NAL_TYPE_PPS 0x68 +#define HEVC_NAL_TYPE_VPS 0x40 +#define HEVC_NAL_TYPE_SPS 0x42 +#define HEVC_NAL_TYPE_PPS 0x44 + +#define IS_NAL_SPS(x) ((videoFormat == VIDEO_FORMAT_H264) ? ((x) == AVC_NAL_TYPE_SPS) : ((x) == HEVC_NAL_TYPE_SPS)) +#define IS_NAL_PPS(x) ((videoFormat == VIDEO_FORMAT_H264) ? ((x) == AVC_NAL_TYPE_PPS) : ((x) == HEVC_NAL_TYPE_PPS)) +#define IS_NAL_VPS(x) ((videoFormat == VIDEO_FORMAT_H265) && ((x) == HEVC_NAL_TYPE_VPS)) + +- (Boolean)readyForPictureData +{ + if (videoFormat == VIDEO_FORMAT_H264) { + return !waitingForSps && !waitingForPps; + } + else { + // H.265 requires VPS in addition to SPS and PPS + return !waitingForVps && !waitingForSps && !waitingForPps; + } +} + +- (Boolean)isNalReferencePicture:(unsigned char)nalType +{ + if (videoFormat == VIDEO_FORMAT_H264) { + return nalType == 0x65; + } + else { + // HEVC has several types of reference NALU types + switch (nalType) { + case 0x20: + case 0x22: + case 0x24: + case 0x26: + case 0x28: + case 0x2A: + return true; + default: + return false; + } + } +} + +- (Boolean)isNalPictureData:(unsigned char)nalType +{ + if (videoFormat == VIDEO_FORMAT_H264) { + return !(nalType == AVC_NAL_TYPE_SPS || nalType == AVC_NAL_TYPE_PPS); + } + else { + return !(nalType == HEVC_NAL_TYPE_VPS || nalType == HEVC_NAL_TYPE_SPS || nalType == HEVC_NAL_TYPE_PPS); + } +} - (void)updateBufferForRange:(CMBlockBufferRef)existingBuffer data:(unsigned char *)data offset:(int)offset length:(int)nalLength { @@ -136,7 +193,7 @@ // This function must free data - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length { - unsigned char nalType = data[FRAME_START_PREFIX_SIZE] & 0x1F; + unsigned char nalType = data[FRAME_START_PREFIX_SIZE]; OSStatus status; // Check for previous decoder errors before doing anything @@ -150,35 +207,69 @@ return DR_NEED_IDR; } - if (nalType == NAL_TYPE_SPS || nalType == NAL_TYPE_PPS) { - if (nalType == NAL_TYPE_SPS) { + if (![self isNalPictureData:nalType]) { + if (IS_NAL_VPS(nalType)) { + Log(LOG_I, @"Got VPS"); + vpsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE]; + waitingForVps = false; + + // We got a new VPS so wait for a new SPS to match it + waitingForSps = true; + } + else if (IS_NAL_SPS(nalType)) { Log(LOG_I, @"Got SPS"); spsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE]; waitingForSps = false; // We got a new SPS so wait for a new PPS to match it waitingForPps = true; - } else if (nalType == NAL_TYPE_PPS) { + } else if (IS_NAL_PPS(nalType)) { Log(LOG_I, @"Got PPS"); ppsData = [NSData dataWithBytes:&data[FRAME_START_PREFIX_SIZE] length:length - FRAME_START_PREFIX_SIZE]; waitingForPps = false; } - // See if we've got all the parameter sets we need - if (!waitingForSps && !waitingForPps) { - const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsData bytes] }; - const size_t parameterSetSizes[] = { [spsData length], [ppsData length] }; - - Log(LOG_I, @"Constructing new format description"); - status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, - 2, /* count of parameter sets */ - parameterSetPointers, - parameterSetSizes, - NAL_LENGTH_PREFIX_SIZE, - &formatDesc); - if (status != noErr) { - Log(LOG_E, @"Failed to create format description: %d", (int)status); - formatDesc = NULL; + // See if we've got all the parameter sets we need for our video format + if ([self readyForPictureData]) { + if (videoFormat == VIDEO_FORMAT_H264) { + const uint8_t* const parameterSetPointers[] = { [spsData bytes], [ppsData bytes] }; + const size_t parameterSetSizes[] = { [spsData length], [ppsData length] }; + + Log(LOG_I, @"Constructing new H264 format description"); + status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, + 2, /* count of parameter sets */ + parameterSetPointers, + parameterSetSizes, + NAL_LENGTH_PREFIX_SIZE, + &formatDesc); + if (status != noErr) { + Log(LOG_E, @"Failed to create H264 format description: %d", (int)status); + formatDesc = NULL; + } + } + else { + const uint8_t* const parameterSetPointers[] = { [vpsData bytes], [spsData bytes], [ppsData bytes] }; + const size_t parameterSetSizes[] = { [vpsData length], [spsData length], [ppsData length] }; + + Log(LOG_I, @"Constructing new HEVC format description"); + if (@available(iOS 11.0, *)) { + status = CMVideoFormatDescriptionCreateFromHEVCParameterSets(kCFAllocatorDefault, + 3, /* count of parameter sets */ + parameterSetPointers, + parameterSetSizes, + NAL_LENGTH_PREFIX_SIZE, + nil, + &formatDesc); + } else { + // This means Moonlight-common-c decided to give us an HEVC stream + // even though we said we couldn't support it. All we can do is abort(). + abort(); + } + + if (status != noErr) { + Log(LOG_E, @"Failed to create HEVC format description: %d", (int)status); + formatDesc = NULL; + } } } @@ -195,12 +286,6 @@ return DR_OK; } - if (nalType != 0x1 && nalType != 0x5) { - // Don't submit parameter set data - free(data); - return DR_OK; - } - // Now we're decoding actual frame data here CMBlockBufferRef blockBuffer; @@ -252,7 +337,7 @@ CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue); - if (nalType == 1) { + if (![self isNalReferencePicture:nalType]) { // P-frame CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);