Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Limelight/Database/TemporarySettings.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
@property (nonatomic) BOOL playAudioOnPC;
@property (nonatomic) BOOL optimizeGames;
@property (nonatomic) BOOL enableHdr;
@property (nonatomic) BOOL videoSuperResolution;
@property (nonatomic) BOOL btMouseSupport;
@property (nonatomic) BOOL absoluteTouchMode;
@property (nonatomic) BOOL statsOverlay;
Expand Down
2 changes: 2 additions & 0 deletions Limelight/Database/TemporarySettings.m
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ - (id) initFromSettings:(Settings*)settings {
self.useFramePacing = [[NSUserDefaults standardUserDefaults] integerForKey:@"useFramePacing"] != 0;
self.playAudioOnPC = [[NSUserDefaults standardUserDefaults] boolForKey:@"audioOnPC"];
self.enableHdr = [[NSUserDefaults standardUserDefaults] boolForKey:@"enableHdr"];
self.videoSuperResolution = [[NSUserDefaults standardUserDefaults] boolForKey:@"videoSuperResolution"];
self.optimizeGames = [[NSUserDefaults standardUserDefaults] boolForKey:@"optimizeGames"];
self.multiController = [[NSUserDefaults standardUserDefaults] boolForKey:@"multipleControllers"];
self.swapABXYButtons = [[NSUserDefaults standardUserDefaults] boolForKey:@"swapABXYButtons"];
Expand Down Expand Up @@ -78,6 +79,7 @@ - (id) initFromSettings:(Settings*)settings {
self.useFramePacing = settings.useFramePacing;
self.playAudioOnPC = settings.playAudioOnPC;
self.enableHdr = settings.enableHdr;
self.videoSuperResolution = [[NSUserDefaults standardUserDefaults] boolForKey:@"videoSuperResolution"];
self.optimizeGames = settings.optimizeGames;
self.multiController = settings.multiController;
self.swapABXYButtons = settings.swapABXYButtons;
Expand Down
5 changes: 4 additions & 1 deletion Limelight/Stream/StreamManager.m
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ - (BOOL) resumeApp:(HttpManager*)hMan receiveSessionUrl:(NSString**)sessionUrl {

- (NSString*) getStatsOverlayText {
video_stats_t stats;
BOOL videoSuperResolutionEnabled = [[NSUserDefaults standardUserDefaults] boolForKey:@"videoSuperResolution"];

if (!_connection) {
return nil;
Expand Down Expand Up @@ -179,11 +180,13 @@ - (NSString*) getStatsOverlayText {
}

float interval = stats.endTime - stats.startTime;
return [NSString stringWithFormat:@"Video stream: %dx%d %.2f FPS (Codec: %@)\nFrames dropped by your network connection: %.2f%%\nAverage network latency: %@%@",
NSString* videoEnhancementString = videoSuperResolutionEnabled ? @"\nVideo Enhancement: MetalFX" : @"";
return [NSString stringWithFormat:@"Video stream: %dx%d %.2f FPS (Codec: %@)%@\nFrames dropped by your network connection: %.2f%%\nAverage network latency: %@%@",
_config.width,
_config.height,
stats.totalFrames / interval,
[_connection getActiveCodecName],
videoEnhancementString,
stats.networkDroppedFrames / interval,
latencyString,
hostProcessingString];
Expand Down
236 changes: 228 additions & 8 deletions Limelight/Stream/VideoDecoderRenderer.m
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@

#import "VideoDecoderRenderer.h"
#import "StreamView.h"
#import "VideoSuperResolution.h"

@import VideoToolbox;

#include <libavcodec/avcodec.h>
#include <libavcodec/cbs.h>
Expand All @@ -19,6 +22,8 @@
extern int ff_isom_write_av1c(AVIOContext *pb, const uint8_t *buf, int size,
int write_seq_header);

static const void* kVideoSuperResolutionQueueKey = &kVideoSuperResolutionQueueKey;

@implementation VideoDecoderRenderer {
StreamView* _view;
id<ConnectionCallbacks> _callbacks;
Expand All @@ -27,11 +32,19 @@ @implementation VideoDecoderRenderer {
AVSampleBufferDisplayLayer* displayLayer;
int videoFormat;
int frameRate;
int videoWidth;
int videoHeight;

NSMutableArray *parameterSetBuffers;
NSData *masteringDisplayColorVolume;
NSData *contentLightLevelInfo;
CMVideoFormatDescriptionRef formatDesc;
VideoSuperResolution* _videoSuperResolution;
BOOL _useVideoSuperResolution;
BOOL _videoSuperResolutionHdrEnabled;
dispatch_queue_t _videoSuperResolutionQueue;
VTDecompressionSessionRef _decompressionSession;
CGSize _videoSuperResolutionTargetSize;

CADisplayLink* _displayLink;
BOOL framePacing;
Expand Down Expand Up @@ -85,18 +98,199 @@ - (id)initWithView:(StreamView*)view callbacks:(id<ConnectionCallbacks>)callback
_callbacks = callbacks;
_streamAspectRatio = aspectRatio;
framePacing = useFramePacing;
_useVideoSuperResolution = [[NSUserDefaults standardUserDefaults] boolForKey:@"videoSuperResolution"];

parameterSetBuffers = [[NSMutableArray alloc] init];
if (_useVideoSuperResolution) {
_videoSuperResolution = [[VideoSuperResolution alloc] init];
// Build the long-lived GPU state once up front. Size-dependent resources are configured later.
[_videoSuperResolution initializeResources];
[_videoSuperResolution setHdrEnabled:NO];

// Keep the optional VSR decode/conversion path off the main thread.
_videoSuperResolutionQueue = dispatch_queue_create("com.moonlight.VideoSuperResolution", DISPATCH_QUEUE_SERIAL);

// Tag the queue so we can safely invalidate the VT session from either the queue itself
// or another thread without introducing a cross-thread lifetime bug.
dispatch_queue_set_specific(_videoSuperResolutionQueue, kVideoSuperResolutionQueueKey, (void*)kVideoSuperResolutionQueueKey, NULL);
}

[self reinitializeDisplayLayer];

return self;
}

- (CGSize)videoSuperResolutionTargetSize
{
__block CGRect displayBounds = CGRectZero;
__block CGFloat screenScale = 1.0;
void (^readUIState)(void) = ^{
UIScreen* screen = self->_view.window.screen ?: UIScreen.mainScreen;

// The display layer bounds define the actual presentation size after aspect-ratio fitting.
displayBounds = self->displayLayer.bounds;
screenScale = screen.nativeScale > 0.0 ? screen.nativeScale : screen.scale;
};

if ([NSThread isMainThread]) {
readUIState();
}
else {
dispatch_sync(dispatch_get_main_queue(), readUIState);
}

return CGSizeMake(MAX(1.0, CGRectGetWidth(displayBounds) * screenScale),
MAX(1.0, CGRectGetHeight(displayBounds) * screenScale));
}

- (void)invalidateVideoSuperResolutionDecoder
{
if (_videoSuperResolutionQueue == NULL) {
if (_decompressionSession != NULL) {
VTDecompressionSessionInvalidate(_decompressionSession);
CFRelease(_decompressionSession);
_decompressionSession = NULL;
}
return;
}

// The decompression session is owned by the VSR queue, so all invalidation must run there.
void (^invalidateBlock)(void) = ^{
if (self->_decompressionSession != NULL) {
VTDecompressionSessionInvalidate(self->_decompressionSession);
CFRelease(self->_decompressionSession);
self->_decompressionSession = NULL;
}
};

if (dispatch_get_specific(kVideoSuperResolutionQueueKey) == kVideoSuperResolutionQueueKey) {
invalidateBlock();
}
else {
dispatch_sync(_videoSuperResolutionQueue, invalidateBlock);
}
}

- (BOOL)setupVideoSuperResolutionDecoder
{
if (!_useVideoSuperResolution || formatDesc == NULL) {
return NO;
}

// Rebuild the secondary decode session when the stream format or HDR mode changes.
[self invalidateVideoSuperResolutionDecoder];

NSDictionary* imageBufferAttributes = @{
// Request a Metal-compatible decoder output so VideoSuperResolution can wrap it directly
// as a CVMetalTexture without an extra CPU copy.
(id)kCVPixelBufferPixelFormatTypeKey : @(_videoSuperResolutionHdrEnabled ? kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
(id)kCVPixelBufferMetalCompatibilityKey : @YES,
(id)kCVPixelBufferIOSurfacePropertiesKey : @{},
};

OSStatus status = VTDecompressionSessionCreate(kCFAllocatorDefault,
formatDesc,
NULL,
(__bridge CFDictionaryRef)imageBufferAttributes,
NULL,
&_decompressionSession);
if (status != noErr || _decompressionSession == NULL) {
Log(LOG_E, @"Failed to create VideoSuperResolution decompression session: %d", (int)status);
[self invalidateVideoSuperResolutionDecoder];
return NO;
}

return YES;
}

- (BOOL)enqueueVideoSuperResolutionSampleBuffer:(CMSampleBufferRef)sampleBuffer decodeUnit:(PDECODE_UNIT)du
{
BOOL isIDRFrame = du->frameType == FRAME_TYPE_IDR;
CFRetain(sampleBuffer);

dispatch_async(_videoSuperResolutionQueue, ^{
@autoreleasepool {
// Lazily create the secondary VT session on the queue that will own and use it.
if (self->_decompressionSession == NULL && ![self setupVideoSuperResolutionDecoder]) {
CFRelease(sampleBuffer);
return;
}

__block CMSampleBufferRef rgbSampleBuffer = NULL;
__block BOOL decodeCompleted = NO;
VTDecodeInfoFlags infoFlags = 0;

// Decode into a Metal-compatible pixel buffer, then hand that image buffer to the
// VSR path for YUV->RGB conversion and optional MetalFX scaling.
OSStatus status = VTDecompressionSessionDecodeFrameWithOutputHandler(self->_decompressionSession,
sampleBuffer,
0,
&infoFlags,
^(OSStatus decodeStatus,
VTDecodeInfoFlags decodeInfoFlags,
CVImageBufferRef _Nullable imageBuffer,
CMTime presentationTimeStamp,
CMTime presentationDuration) {
decodeCompleted = YES;

if (decodeStatus != noErr || imageBuffer == nil) {
Log(LOG_E, @"VideoSuperResolution decode failed: %d flags: %u", (int)decodeStatus, (unsigned int)decodeInfoFlags);
return;
}

// The source sample buffer is passed through so the RGB output can inherit its
// HDR and color attachments before it is enqueued for display.
rgbSampleBuffer = [self->_videoSuperResolution copyRGBSampleBufferFromImageBuffer:imageBuffer
sourceSampleBuffer:sampleBuffer
presentationTimeStamp:presentationTimeStamp
duration:presentationDuration];
});

CFRelease(sampleBuffer);

if (status != noErr) {
Log(LOG_E, @"VTDecompressionSessionDecodeFrame failed: %d", (int)status);
[self invalidateVideoSuperResolutionDecoder];
return;
}

if (!decodeCompleted || rgbSampleBuffer == NULL) {
return;
}

dispatch_async(dispatch_get_main_queue(), ^{
// AVSampleBufferDisplayLayer remains a main-thread-facing object even though
// decoding and conversion happen on the dedicated VSR queue.
[self->displayLayer enqueueSampleBuffer:rgbSampleBuffer];

if (isIDRFrame) {
self->displayLayer.hidden = NO;
[self->_callbacks videoContentShown];
}

CFRelease(rgbSampleBuffer);
});
}
});

return YES;
}

- (void)setupWithVideoFormat:(int)videoFormat width:(int)videoWidth height:(int)videoHeight frameRate:(int)frameRate
{
self->videoFormat = videoFormat;
self->videoWidth = videoWidth;
self->videoHeight = videoHeight;
self->frameRate = frameRate;

if (_useVideoSuperResolution) {
_videoSuperResolutionTargetSize = [self videoSuperResolutionTargetSize];
[_videoSuperResolution setHdrEnabled:_videoSuperResolutionHdrEnabled];
if (![_videoSuperResolution configureWithInputSize:CGSizeMake(self->videoWidth, self->videoHeight)
outputSize:_videoSuperResolutionTargetSize]) {
Log(LOG_W, @"VideoSuperResolution session resource configuration failed");
}
}
}

- (void)start
Expand Down Expand Up @@ -142,6 +336,12 @@ - (void)displayLinkCallback:(CADisplayLink *)sender

- (void)stop
{
if (_videoSuperResolutionQueue != NULL) {
dispatch_sync(_videoSuperResolutionQueue, ^{
});
}

[self invalidateVideoSuperResolutionDecoder];
[_displayLink invalidate];
}

Expand Down Expand Up @@ -507,6 +707,10 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i
// Unsupported codec!
abort();
}

if (_useVideoSuperResolution && ![self setupVideoSuperResolutionDecoder]) {
Log(LOG_W, @"VideoSuperResolution setup failed. Falling back to default display pipeline.");
}
}

if (formatDesc == NULL) {
Expand Down Expand Up @@ -594,15 +798,18 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i
return DR_NEED_IDR;
}

// Enqueue the next frame
[self->displayLayer enqueueSampleBuffer:sampleBuffer];

if (du->frameType == FRAME_TYPE_IDR) {
// Ensure the layer is visible now
self->displayLayer.hidden = NO;
if (!_useVideoSuperResolution || ![self enqueueVideoSuperResolutionSampleBuffer:sampleBuffer decodeUnit:du]) {
// Enqueue the next frame on the existing path if VideoSuperResolution is disabled
// or if the optional YUV-to-RGB path couldn't process this frame.
[self->displayLayer enqueueSampleBuffer:sampleBuffer];

// Tell our parent VC to hide the progress indicator
[self->_callbacks videoContentShown];
if (du->frameType == FRAME_TYPE_IDR) {
// Ensure the layer is visible now
self->displayLayer.hidden = NO;

// Tell our parent VC to hide the progress indicator
[self->_callbacks videoContentShown];
}
}

// Dereference the buffers
Expand All @@ -616,6 +823,19 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i
- (void)setHdrMode:(BOOL)enabled {
SS_HDR_METADATA hdrMetadata;

if (_useVideoSuperResolution && _videoSuperResolutionHdrEnabled != enabled) {
_videoSuperResolutionHdrEnabled = enabled;
[_videoSuperResolution setHdrEnabled:enabled];

if (displayLayer != nil && videoWidth > 0 && videoHeight > 0) {
_videoSuperResolutionTargetSize = [self videoSuperResolutionTargetSize];
[_videoSuperResolution configureWithInputSize:CGSizeMake(videoWidth, videoHeight)
outputSize:_videoSuperResolutionTargetSize];
}

[self invalidateVideoSuperResolutionDecoder];
}

BOOL hasMetadata = enabled && LiGetHdrMetadata(&hdrMetadata);
BOOL metadataChanged = NO;

Expand Down
29 changes: 29 additions & 0 deletions Limelight/Stream/VideoSuperResolution.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
//
// VideoSuperResolution.h
// Moonlight
//

#import <Foundation/Foundation.h>
@import CoreMedia;
@import CoreVideo;
@import Metal;

@interface VideoSuperResolution : NSObject

// Creates the Metal/Core Image state shared by the whole streaming session.
- (void)initializeResources;

// Rebuilds cached resources when the stream switches between SDR and HDR.
- (void)setHdrEnabled:(BOOL)enabled;

// Prepares all resources that only depend on the fixed input and output sizes for the session.
- (BOOL)configureWithInputSize:(CGSize)inputSize outputSize:(CGSize)outputSize;

// Converts the decoder output from YUV to RGB, optionally upscales it with MetalFX,
// and returns a new sample buffer ready for AVSampleBufferDisplayLayer.
- (CMSampleBufferRef _Nullable)copyRGBSampleBufferFromImageBuffer:(CVImageBufferRef _Nonnull)imageBuffer
sourceSampleBuffer:(CMSampleBufferRef _Nullable)sourceSampleBuffer
presentationTimeStamp:(CMTime)presentationTimeStamp
duration:(CMTime)duration CF_RETURNS_RETAINED;

@end
Loading