diff options
author | Frank Elias <francois.elias@livio.io> | 2022-08-10 08:51:19 -0400 |
---|---|---|
committer | Frank Elias <francois.elias@livio.io> | 2022-08-10 08:51:19 -0400 |
commit | 6032aeb4ef72276515a70d31c9ffb0a632bc9521 (patch) | |
tree | f6595005852e4ec85b20c873e77d02f66a21a550 | |
parent | 42393ce4226a3a1742a164a7be1c8045a07b215f (diff) | |
download | sdl_ios-poc/metal-view-capturing.tar.gz |
draft changespoc/metal-view-capturing
-rw-r--r-- | SmartDeviceLink/private/SDLCarWindow.m | 187 | ||||
-rw-r--r-- | SmartDeviceLink/private/SDLH264VideoEncoder.m | 47 |
2 files changed, 187 insertions, 47 deletions
diff --git a/SmartDeviceLink/private/SDLCarWindow.m b/SmartDeviceLink/private/SDLCarWindow.m index ce5bb704f..516292684 100644 --- a/SmartDeviceLink/private/SDLCarWindow.m +++ b/SmartDeviceLink/private/SDLCarWindow.m @@ -23,9 +23,12 @@ #import "SDLStreamingVideoScaleManager.h" #import "SDLStreamingMediaManagerConstants.h" #import "SDLVideoStreamingCapability.h" +#import <Accelerate/Accelerate.h> NS_ASSUME_NONNULL_BEGIN +@import MetalKit; +@import ReplayKit; @interface SDLCarWindow () @property (weak, nonatomic, nullable) SDLStreamingVideoLifecycleManager *streamManager; @@ -78,34 +81,170 @@ NS_ASSUME_NONNULL_BEGIN return; } - UIGraphicsBeginImageContextWithOptions(bounds.size, YES, 1.0f); - switch (self.renderingType) { - case SDLCarWindowRenderingTypeLayer: { - [self.rootViewController.view.layer renderInContext:UIGraphicsGetCurrentContext()]; - } break; - case SDLCarWindowRenderingTypeViewAfterScreenUpdates: { - [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:YES]; - } break; - case SDLCarWindowRenderingTypeViewBeforeScreenUpdates: { - [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:NO]; - } break; - } + BOOL recordingScreen = false; + if (@available(iOS 11.0, *)) { + if (![[RPScreenRecorder sharedRecorder] isRecording] && !recordingScreen) { + recordingScreen = true; + [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { + if (error) { + SDLLogD(@"Video stream error %@", error.debugDescription); + } else { +// [self.streamManager sendVideoData:CMSampleBufferGetImageBuffer(sampleBuffer)]; + int outWidth = bounds.size.width, outHeight = bounds.size.height; + + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferLockBaseAddress(imageBuffer,0); + void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); + size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); + + vImage_Buffer inBuff; + inBuff.height = 812; + inBuff.width = 375; + inBuff.rowBytes = bytesPerRow; + + inBuff.data = baseAddress; + + unsigned char *outImg= (unsigned char*)malloc(4*outWidth*outHeight); + vImage_Buffer outBuff = {outImg, outHeight, outWidth, 4*outWidth}; + + CVPixelBufferRef bufferRef; + CVPixelBufferCreateWithBytes(kCFAllocatorDefault, bounds.size.width, bounds.size.height, kCVPixelFormatType_64RGBAHalf, outImg, bytesPerRow, nil, nil, nil, bufferRef); + vImage_Error err = vImageScale_ARGB8888(&inBuff, &outBuff, NULL, 0); + if (err != kvImageNoError) NSLog(@" error %ld", err); + } + } completionHandler:^(NSError * _Nullable error) { + if (error) { + SDLLogD(@"Video stream error %@", error.debugDescription); + } + }]; + } + } else { + // Fallback on earlier versions + UIGraphicsBeginImageContextWithOptions(bounds.size, YES, 1.0f); + switch (self.renderingType) { + case SDLCarWindowRenderingTypeLayer: { + [self.rootViewController.view.layer renderInContext:UIGraphicsGetCurrentContext()]; + } break; + case SDLCarWindowRenderingTypeViewAfterScreenUpdates: { + [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:YES]; + } break; + case SDLCarWindowRenderingTypeViewBeforeScreenUpdates: { + [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:NO]; + } break; + } - UIImage *screenshot = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); + UIImage *screenshot = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); - CGImageRef imageRef = screenshot.CGImage; - CVPixelBufferRef pixelBuffer = [self.class sdl_createPixelBufferForImageRef:imageRef usingPool:self.streamManager.pixelBufferPool]; - if (pixelBuffer != nil) { - BOOL success = [self.streamManager sendVideoData:pixelBuffer]; - if (!success) { - SDLLogE(@"Video frame will not be sent because the video frame encoding failed"); - return; + CGImageRef imageRef = screenshot.CGImage; +// CVPixelBufferRef pixelBuffer = [self.class sdl_pixelBufferForImageRef:imageRef usingPool:self.streamManager.pixelBufferPool]; + CVPixelBufferRef pixelBuffer = [self.class sdl_createPixelBufferForImageRef:imageRef usingPool:self.streamManager.pixelBufferPool]; + if (pixelBuffer != nil) { + [self.streamManager sendVideoData:pixelBuffer]; + CVPixelBufferRelease(pixelBuffer); } - CVPixelBufferRelease(pixelBuffer); - } else { - SDLLogE(@"Video frame will not be sent because the pixelBuffer is nil"); } +// BOOL metalViewIsThere = false; +// UIView *viewToBeRendered; +// MTKView *metalView; +// for (UIView *subView in self.rootViewController.view.subviews) { +// for (UIView *subView2 in subView.subviews) { +// if ([subView2 isKindOfClass:[MTKView class]]) { +// NSLog(@"we landed on the moon"); +// metalViewIsThere = true; +// metalView = (MTKView *)subView2; +// break; +// } +// } +// } + +// CGImageRef imageRef; +// CVPixelBufferRef pixelBuffer = NULL; +// if (metalView != nil) { +// metalView.framebufferOnly = NO; +// id<MTLTexture> lastDrawableDisplayed = metalView.currentDrawable.texture; +// if (lastDrawableDisplayed.buffer != nil) { +// CVPixelBufferCreateWithBytes(kCFAllocatorDefault, lastDrawableDisplayed.width, lastDrawableDisplayed.height, kCVPixelFormatType_64RGBALE, [lastDrawableDisplayed.buffer contents], lastDrawableDisplayed.bufferBytesPerRow, nil, nil, nil, &pixelBuffer); +// CVPixelBufferCreate(kCFAllocatorDefault, lastDrawableDisplayed.width, lastDrawableDisplayed.height, kCVPixelFormatType_64RGBALE, nil, &pixelBuffer); +// } +// if let datas = targetTexture.texture.buffer?.contents() { +// CVPixelBufferCreateWithBytes(kCFAllocatorDefault, targetTexture.width, +// targetTexture.height, kCVPixelFormatType_64RGBAHalf, datas, +// targetTexture.texture.bufferBytesPerRow, nil, nil, nil, &outPixelbuffer); +// } +// int width = (int)[lastDrawableDisplayed width]; +// int height = (int)[lastDrawableDisplayed height]; +// int rowBytes = width; +// int selfturesize = width * height; +// +// void *p = malloc(selfturesize); +// +// [lastDrawableDisplayed getBytes:p bytesPerRow:rowBytes fromRegion:MTLRegionMake2D(0, 0, width, height) mipmapLevel:0]; +// +// CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); +// CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst; +// CGDataProviderRef provider = CGDataProviderCreateWithData(nil, p, selfturesize, nil); +// imageRef = CGImageCreate(width, height, 8, 32, rowBytes, colorSpace, bitmapInfo, provider, nil, true, (CGColorRenderingIntent)kCGRenderingIntentDefault); + // metalView.framebufferOnly = YES; + +// CFRelease(imageRef); +// free(p); + +// [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { +// switch (bufferType) { +// case RPSampleBufferTypeVideo: +// NSLog(@"something"); +// CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); +// break; +// default: +// break; +// } +// } completionHandler:^(NSError * _Nullable error) { +// NSLog(@"error while capturing the window while streming the cardwindow"); +// }]; +// } else { +// UIGraphicsBeginImageContextWithOptions(bounds.size, YES, 1.0f); +// switch (self.renderingType) { +// case SDLCarWindowRenderingTypeLayer: { +// [self.rootViewController.view.layer renderInContext:UIGraphicsGetCurrentContext()]; +// } break; +// case SDLCarWindowRenderingTypeViewAfterScreenUpdates: { +// viewToBeRendered = [self.rootViewController.view snapshotViewAfterScreenUpdates:YES]; +// // [viewToBeRendered drawViewHierarchyInRect:bounds afterScreenUpdates:YES]; +// // [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:YES]; +// } break; +// case SDLCarWindowRenderingTypeViewBeforeScreenUpdates: { +// viewToBeRendered = [self.rootViewController.view snapshotViewAfterScreenUpdates:NO]; +// // [viewToBeRendered drawViewHierarchyInRect:bounds afterScreenUpdates:NO]; +// // [self.rootViewController.view drawViewHierarchyInRect:bounds afterScreenUpdates:NO]; +// } break; +// } + +// UIImage *screenshot = UIGraphicsGetImageFromCurrentImageContext(); +// UIGraphicsEndImageContext(); +// imageRef = screenshot.CGImage; +// +// pixelBuffer = [self.class sdl_createPixelBufferForImageRef:imageRef usingPool:self.streamManager.pixelBufferPool]; + +// if (pixelBuffer != nil) { +// BOOL success = [self.streamManager sendVideoData:pixelBuffer]; +// if (!success) { +// SDLLogE(@"Video frame will not be sent because the video frame encoding failed"); +// return; +// } +// CVPixelBufferRelease(pixelBuffer); +// } else { +// SDLLogE(@"Video frame will not be sent because the pixelBuffer is nil"); +// } +} + +- (BOOL)viewControllerContainsMetalView:(UIView *)view { + if (view.subviews == 0) { + return NO; + } else if ([view isKindOfClass:[MTKView class]]) { + return YES; + } + return NO; } - (void)updateVideoStreamingCapability:(SDLVideoStreamingCapability *)videoStreamingCapability { diff --git a/SmartDeviceLink/private/SDLH264VideoEncoder.m b/SmartDeviceLink/private/SDLH264VideoEncoder.m index 2d58a1263..6dc724019 100644 --- a/SmartDeviceLink/private/SDLH264VideoEncoder.m +++ b/SmartDeviceLink/private/SDLH264VideoEncoder.m @@ -55,7 +55,7 @@ static NSDictionary<NSString *, id>* _defaultVideoEncoderSettings; if (!self) { return nil; } - + _compressionSession = NULL; _currentFrameNumber = 0; _videoEncoderSettings = properties; @@ -111,7 +111,7 @@ static NSDictionary<NSString *, id>* _defaultVideoEncoderSettings; if (self.videoEncoderSettings[(__bridge NSString *)kVTCompressionPropertyKey_ExpectedFrameRate] != nil) { timeRate = ((NSNumber *)self.videoEncoderSettings[(__bridge NSString *)kVTCompressionPropertyKey_ExpectedFrameRate]).intValue; } - + presentationTimestamp = CMTimeMake((int64_t)self.currentFrameNumber, timeRate); } self.currentFrameNumber++; @@ -130,7 +130,7 @@ static NSDictionary<NSString *, id>* _defaultVideoEncoderSettings; if (self.pixelBufferPool == NULL) { return NULL; } - + CVPixelBufferRef pixelBuffer; CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, self.pixelBufferPool, @@ -174,11 +174,11 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi SDLLogW(@"Error encoding video frame: %d", (int)status); return; } - + if (outputCallbackRefCon == NULL || sourceFrameRefCon == NULL || sampleBuffer == NULL) { return; } - + SDLH264VideoEncoder *encoder = (__bridge SDLH264VideoEncoder *)sourceFrameRefCon; NSArray *nalUnits = [encoder.class sdl_extractNalUnitsFromSampleBuffer:sampleBuffer]; @@ -194,7 +194,7 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi NSArray *packets = [encoder.packetizer createPackets:nalUnits presentationTimestamp:(presentationTimestamp - encoder.timestampOffset)]; - + if ([encoder.delegate respondsToSelector:@selector(videoEncoder:hasEncodedFrame:)]) { for (NSData *packet in packets) { [encoder.delegate videoEncoder:encoder hasEncodedFrame:packet]; @@ -206,17 +206,17 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi - (CFDictionaryRef _Nullable)pixelBufferOptions { if (_pixelBufferOptions == nil) { CFMutableDictionaryRef pixelBufferOptions = CFDictionaryCreateMutable(NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); - + OSType pixelFormatType = kCVPixelFormatType_32BGRA; - + CFNumberRef pixelFormatNumberRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &pixelFormatType); - + CFDictionarySetValue(pixelBufferOptions, kCVPixelBufferCGImageCompatibilityKey, kCFBooleanFalse); CFDictionarySetValue(pixelBufferOptions, kCVPixelBufferCGBitmapContextCompatibilityKey, kCFBooleanFalse); CFDictionarySetValue(pixelBufferOptions, kCVPixelBufferPixelFormatTypeKey, pixelFormatNumberRef); - + CFRelease(pixelFormatNumberRef); - + _pixelBufferOptions = pixelBufferOptions; } @@ -229,20 +229,20 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi NSMutableArray *nalUnits = [NSMutableArray array]; BOOL isIFrame = NO; CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0); - + if (CFArrayGetCount(attachmentsArray)) { CFBooleanRef notSync; CFDictionaryRef dict = CFArrayGetValueAtIndex(attachmentsArray, 0); BOOL keyExists = CFDictionaryGetValueIfPresent(dict, kCMSampleAttachmentKey_NotSync, (const void **)¬Sync); - + // Find out if the sample buffer contains an I-Frame (sync frame). If so we will write the SPS and PPS NAL units to the elementary stream. isIFrame = !keyExists || !CFBooleanGetValue(notSync); } - + // Write the SPS and PPS NAL units to the elementary stream before every I-Frame if (isIFrame) { CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer); - + // Find out how many parameter sets there are size_t numberOfParameterSets; CMVideoFormatDescriptionGetH264ParameterSetAtIndex(description, @@ -251,7 +251,7 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi NULL, &numberOfParameterSets, NULL); - + // Write each parameter set to the elementary stream for (int i = 0; i < numberOfParameterSets; i++) { const uint8_t *parameterSetPointer; @@ -262,20 +262,20 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi ¶meterSetLength, NULL, NULL); - + // Output the parameter set NSData *nalUnit = [NSData dataWithBytesNoCopy:(uint8_t *)parameterSetPointer length:parameterSetLength freeWhenDone:NO]; [nalUnits addObject:nalUnit]; } } - + // Get a pointer to the raw AVCC NAL unit data in the sample buffer size_t blockBufferLength = 0; char *bufferDataPointer = NULL; CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer); - + CMBlockBufferGetDataPointer(blockBufferRef, 0, NULL, &blockBufferLength, &bufferDataPointer); - + // Loop through all the NAL units in the block buffer and write them to the elementary stream with start codes instead of AVCC length headers size_t bufferOffset = 0; static const int AVCCHeaderLength = 4; @@ -283,14 +283,14 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi // Read the NAL unit length uint32_t NALUnitLength = 0; memcpy(&NALUnitLength, bufferDataPointer + bufferOffset, AVCCHeaderLength); - + // Convert the length value from Big-endian to Little-endian NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); - + // Write the NAL unit without the AVCC length header to the elementary stream NSData *nalUnit = [NSData dataWithBytesNoCopy:bufferDataPointer + bufferOffset + AVCCHeaderLength length:NALUnitLength freeWhenDone:NO]; [nalUnits addObject:nalUnit]; - + // Move to the next NAL unit in the block buffer bufferOffset += AVCCHeaderLength + NALUnitLength; } @@ -304,6 +304,7 @@ void sdl_videoEncoderOutputCallback(void * CM_NULLABLE outputCallbackRefCon, voi VTCompressionSessionCompleteFrames(self.compressionSession, kCMTimeInvalid); VTCompressionSessionInvalidate(self.compressionSession); + if (self.compressionSession == NULL) { return; } CFRelease(self.compressionSession); self.compressionSession = NULL; } |