├── .gitignore ├── AVPlayerDecodeiOS ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── ViewController.h ├── ViewController.m └── main.m ├── Application ├── AAPLAppDelegate.h ├── AAPLAppDelegate.m ├── AAPLViewController.h ├── AAPLViewController.m ├── iOS │ ├── Base.lproj │ │ ├── LaunchScreen.storyboard │ │ └── Main.storyboard │ └── Info.plist ├── macOS │ ├── Base.lproj │ │ └── Main.storyboard │ └── Info.plist ├── main.m └── tvOS │ ├── Base.lproj │ └── Main.storyboard │ └── Info.plist ├── Configuration └── SampleCode.xcconfig ├── CoreVideoDecodeiOS ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── ViewController.h ├── ViewController.m └── main.m ├── Documentation ├── BGRA8UnormBitLayout.png └── TextureCoordinates.png ├── EmptyiOS ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── ViewController.h ├── ViewController.m └── main.m ├── EmptyiOSTests ├── AppleEncodeDecodeBT709Tests.m ├── CoreImageMetalFilterTests.m ├── EmptyiOSTests.m ├── Info.plist ├── MetalBT709DecoderTests.m └── MetalSRGBDecoderTests.m ├── LICENSE └── LICENSE.txt ├── MetalBT709Decoder.xcodeproj ├── .xcodesamplecode.plist └── project.pbxproj ├── README.md ├── Renderer ├── AAPLImage.h ├── AAPLImage.m ├── AAPLRenderer.h ├── AAPLRenderer.m ├── AAPLShaderTypes.h ├── AAPLShaders.metal ├── AlphaBG.png ├── AlphaBGHalf.png ├── BGDecodeEncode.h ├── BGDecodeEncode.m ├── BGRAToBT709Converter.h ├── BGRAToBT709Converter.m ├── BT709.h ├── CGFrameBuffer.h ├── CGFrameBuffer.m ├── CVPixelBufferUtils.h ├── ColorsAlpha4by4.m4v ├── ColorsAlpha4by4_alpha.m4v ├── DalaiLamaGray_bt709.m4v ├── DalaiLamaGray_srgb.m4v ├── Gamma_test_HD_75Per_24BPP_sRGB_HD.m4v ├── GlobeLEDAlpha.m4v ├── GlobeLEDAlpha_alpha.m4v ├── H264Encoder.h ├── H264Encoder.m ├── Image.tga ├── MetalBT709Decoder.h ├── MetalBT709Decoder.m ├── MetalRenderContext.h ├── MetalRenderContext.m ├── MetalScaleRenderContext.h ├── MetalScaleRenderContext.m ├── QuickTime_Test_Pattern_HD.mov ├── QuickTime_Test_Pattern_HD_calibrated_RGB.png ├── QuickTime_Test_Pattern_HD_grayscale.m4v ├── QuickTime_Test_Pattern_HD_sRGB.png ├── QuickTime_Test_Pattern_SD.mov ├── Rec709Sample.mp4 ├── RedCircleOverWhiteA.m4v ├── RedCircleOverWhiteA_alpha.m4v ├── RedFadeAlpha256.m4v ├── RedFadeAlpha256_alpha.m4v ├── WhitePer5.m4v ├── WhitePer50.m4v ├── WhitePer50_alpha.m4v ├── WhitePer5_alpha.m4v ├── big_buck_bunny_HD_apple.m4v ├── big_buck_bunny_HD_srgb.m4v ├── clouds_reflecting_off_the_beach-wallpaper-2048x1536.jpg ├── clouds_reflecting_off_the_beach-wallpaper-2048x1536.m4v ├── drop-of-water-iPad-2048-1536-apple-crf20.m4v ├── drop-of-water-iPad-2048-1536-sRGB-crf20.m4v ├── osxcolor_test_image_24bit_BT709.m4v ├── osxcolor_test_image_iPad_2048_1536.m4v ├── sRGB.h └── y4m_writer.h ├── encode_h264 └── encode_h264.m ├── gamma_write └── gamma_write.m ├── srgb_to_bt709 └── srgb_to_bt709.m └── write_full_range └── write_full_range.m /.gitignore: -------------------------------------------------------------------------------- 1 | # See LICENSE folder for this sample’s licensing information. 2 | # 3 | # Apple sample code gitignore configuration. 4 | 5 | # Finder 6 | .DS_Store 7 | 8 | # Xcode - User files 9 | xcuserdata/ 10 | *.xcworkspace 11 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // AVPlayerDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/24/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // AVPlayerDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/24/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // AVPlayerDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/24/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // AVPlayerDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/24/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | 11 | #import 12 | #import 13 | 14 | @interface ViewController () 15 | 16 | @property (nonatomic, retain) AVPlayerViewController *avPlayerViewcontroller; 17 | 18 | @end 19 | 20 | @implementation ViewController 21 | 22 | - (void)viewDidLoad { 23 | [super viewDidLoad]; 24 | 25 | UIView *view = self.view; 26 | 27 | NSString *resourceName = @"QuickTime_Test_Pattern_HD.mov"; 28 | 29 | NSString* movieFilePath = [[NSBundle mainBundle] pathForResource:resourceName ofType:nil]; 30 | NSAssert(movieFilePath, @"movieFilePath is nil"); 31 | 32 | NSURL *fileURL = [NSURL fileURLWithPath:movieFilePath]; 33 | 34 | AVPlayerViewController *playerViewController = [[AVPlayerViewController alloc] init]; 35 | 36 | playerViewController.player = [AVPlayer playerWithURL:fileURL]; 37 | 38 | self.avPlayerViewcontroller = playerViewController; 39 | 40 | [self resizePlayerToViewSize]; 41 | 42 | [view addSubview:playerViewController.view]; 43 | 44 | view.autoresizesSubviews = TRUE; 45 | } 46 | 47 | - (void) resizePlayerToViewSize 48 | { 49 | CGRect frame = self.view.frame; 50 | 51 | NSLog(@"frame size %d, %d", (int)frame.size.width, (int)frame.size.height); 52 | 53 | self.avPlayerViewcontroller.view.frame = frame; 54 | } 55 | 56 | @end 57 | -------------------------------------------------------------------------------- /AVPlayerDecodeiOS/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // AVPlayerDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/24/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /Application/AAPLAppDelegate.h: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Header for our iOS & tvOS application delegate 6 | */ 7 | 8 | #import 9 | 10 | @interface AAPLAppDelegate : UIResponder 11 | 12 | @property (strong, nonatomic) UIWindow *window; 13 | 14 | @end 15 | -------------------------------------------------------------------------------- /Application/AAPLAppDelegate.m: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Implementation of our iOS & tvOS application delegate 6 | */ 7 | 8 | #import "AAPLAppDelegate.h" 9 | 10 | @implementation AAPLAppDelegate 11 | 12 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 13 | return YES; 14 | } 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /Application/AAPLViewController.h: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Header for our our cross-platform view controller 6 | */ 7 | 8 | #if defined(TARGET_IOS) || defined(TARGET_TVOS) 9 | @import UIKit; 10 | #define PlatformViewController UIViewController 11 | #else 12 | @import AppKit; 13 | #define PlatformViewController NSViewController 14 | #endif 15 | 16 | @import MetalKit; 17 | 18 | #import "AAPLRenderer.h" 19 | 20 | // Our view controller 21 | @interface AAPLViewController : PlatformViewController 22 | 23 | @end 24 | -------------------------------------------------------------------------------- /Application/AAPLViewController.m: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Implementation of our cross-platform view controller 6 | */ 7 | 8 | #import "AAPLViewController.h" 9 | #import "AAPLRenderer.h" 10 | 11 | #import 12 | 13 | @implementation AAPLViewController 14 | { 15 | #if TARGET_OS_IOS 16 | IBOutlet UIImageView *imageView; 17 | #else 18 | IBOutlet NSImageView *imageView; 19 | #endif // TARGET_OS_IOS 20 | 21 | IBOutlet MTKView *mtkView; 22 | 23 | AAPLRenderer *_renderer; 24 | } 25 | 26 | - (void)viewDidLoad 27 | { 28 | [super viewDidLoad]; 29 | 30 | BOOL alphaImageBackground = TRUE; 31 | // If alphaImageBackground is FALSE, background can be black or white 32 | BOOL blackBackground = TRUE; 33 | 34 | #if TARGET_OS_IOS 35 | if (alphaImageBackground) { 36 | UIImage *alphaImg = [UIImage imageNamed:@"AlphaBGHalf.png"]; 37 | assert(alphaImg); 38 | UIColor *patternColor = [UIColor colorWithPatternImage:alphaImg]; 39 | imageView.backgroundColor = patternColor; 40 | } else { 41 | UIColor *color; 42 | if (blackBackground == FALSE) { 43 | color = [UIColor whiteColor]; 44 | } else if (blackBackground) { 45 | color = [UIColor blackColor]; 46 | } 47 | imageView.backgroundColor = color; 48 | } 49 | #else 50 | // MacOSX 51 | if (alphaImageBackground) { 52 | NSImage *alphaImg = [NSImage imageNamed:@"AlphaBG.png"]; 53 | assert(alphaImg); 54 | NSColor *patternColor = [NSColor colorWithPatternImage:alphaImg]; 55 | [imageView setWantsLayer:YES]; 56 | imageView.layer.backgroundColor = patternColor.CGColor; 57 | } else { 58 | NSColor *color; 59 | if (blackBackground == FALSE) { 60 | color = [NSColor whiteColor]; 61 | } else if (blackBackground) { 62 | color = [NSColor blackColor]; 63 | } 64 | imageView.layer.backgroundColor = color.CGColor; 65 | } 66 | #endif // TARGET_OS_IOS 67 | 68 | mtkView.device = MTLCreateSystemDefaultDevice(); 69 | 70 | if(!mtkView.device) 71 | { 72 | NSLog(@"Metal is not supported on this device"); 73 | return; 74 | } 75 | 76 | _renderer = [[AAPLRenderer alloc] initWithMetalKitView:mtkView]; 77 | 78 | if(!_renderer) 79 | { 80 | NSLog(@"Renderer failed initialization"); 81 | return; 82 | } 83 | 84 | // Initialize our renderer with the view size 85 | [_renderer mtkView:mtkView drawableSizeWillChange:mtkView.drawableSize]; 86 | 87 | mtkView.delegate = _renderer; 88 | } 89 | 90 | @end 91 | -------------------------------------------------------------------------------- /Application/iOS/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /Application/iOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /Application/iOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UIStatusBarHidden 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /Application/macOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIconFile 10 | 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSMinimumSystemVersion 24 | $(MACOSX_DEPLOYMENT_TARGET) 25 | NSMainStoryboardFile 26 | Main 27 | NSPrincipalClass 28 | NSApplication 29 | 30 | 31 | -------------------------------------------------------------------------------- /Application/main.m: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Application entry point for all platforms 6 | */ 7 | 8 | #if defined(TARGET_IOS) || defined(TARGET_TVOS) 9 | #import 10 | #import 11 | #import "AAPLAppDelegate.h" 12 | #else 13 | #import 14 | #endif 15 | 16 | #if defined(TARGET_IOS) || defined(TARGET_TVOS) 17 | 18 | int main(int argc, char * argv[]) { 19 | 20 | #if TARGET_OS_SIMULATOR 21 | #error No simulator support for Metal API. Must build for a device 22 | #endif 23 | 24 | @autoreleasepool { 25 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AAPLAppDelegate class])); 26 | } 27 | } 28 | 29 | #elif defined(TARGET_MACOS) 30 | 31 | int main(int argc, const char * argv[]) { 32 | return NSApplicationMain(argc, argv); 33 | } 34 | 35 | #endif 36 | -------------------------------------------------------------------------------- /Application/tvOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /Application/tvOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UIMainStoryboardFile 24 | Main 25 | UIRequiredDeviceCapabilities 26 | 27 | arm64 28 | 29 | UIUserInterfaceStyle 30 | Automatic 31 | 32 | 33 | -------------------------------------------------------------------------------- /Configuration/SampleCode.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // See LICENSE folder for this sample’s licensing information. 3 | // 4 | // SampleCode.xcconfig 5 | // 6 | 7 | // The `SAMPLE_CODE_DISAMBIGUATOR` configuration is to make it easier to build 8 | // and run a sample code project. Once you set your project's development team, 9 | // you'll have a unique bundle identifier. This is because the bundle identifier 10 | // is derived based on the 'SAMPLE_CODE_DISAMBIGUATOR' value. Do not use this 11 | // approach in your own projects—it's only useful for sample code projects because 12 | // they are frequently downloaded and don't have a development team set. 13 | SAMPLE_CODE_DISAMBIGUATOR=${DEVELOPMENT_TEAM} 14 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // CoreVideoDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/23/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // CoreVideoDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/23/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // CoreVideoDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/23/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // CoreVideoDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/23/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | 11 | #import "BGDecodeEncode.h" 12 | 13 | #import "CGFrameBuffer.h" 14 | 15 | @interface ViewController () 16 | 17 | @property (nonatomic, retain) IBOutlet UIImageView *imageView; 18 | 19 | @end 20 | 21 | @implementation ViewController 22 | 23 | - (void)viewDidLoad { 24 | [super viewDidLoad]; 25 | 26 | NSAssert(self.imageView, @"imageView"); 27 | 28 | // Decode a single frame of H264 video to YCbCr data contained in a CoreVideo buffer 29 | 30 | //NSString *resFilename = @"QuickTime_Test_Pattern_HD.mov"; 31 | //NSString *resFilename = @"Rec709Sample.mp4"; 32 | //NSString *resFilename = @"Gamma_test_HD_75Per_24BPP_sRGB_HD.m4v"; 33 | NSString *resFilename = @"osxcolor_test_image_iPad_2048_1536.m4v"; 34 | 35 | NSArray *cvPixelBuffers = [BGDecodeEncode recompressKeyframesOnBackgroundThread:resFilename 36 | frameDuration:1.0/30 37 | renderSize:CGSizeMake(2048, 1536) 38 | aveBitrate:0]; 39 | 40 | CVPixelBufferRef cvPixelBuffer = (__bridge CVPixelBufferRef) cvPixelBuffers[0]; 41 | 42 | int width = (int) CVPixelBufferGetWidth(cvPixelBuffer); 43 | int height = (int) CVPixelBufferGetHeight(cvPixelBuffer); 44 | 45 | NSLog(@"returned %d YCbCr texture : %d x %d", (int)cvPixelBuffers.count, width, height); 46 | 47 | // Access the default HDTV -> sRGB conversion used by CoreVideo via CoreImage API 48 | // that accepts a YCbCr tagged buffer. 49 | 50 | CIImage *rgbFromCVImage = [CIImage imageWithCVPixelBuffer:cvPixelBuffer]; 51 | 52 | CIContext *context = [CIContext contextWithOptions:nil]; 53 | 54 | CGImageRef outCGImageRef = [context createCGImage:rgbFromCVImage fromRect:rgbFromCVImage.extent]; 55 | 56 | UIImage *uiImgFromCIImage = [UIImage imageWithCGImage:outCGImageRef]; 57 | 58 | self.imageView.image = uiImgFromCIImage; 59 | 60 | // Dump PNG that contains the decoded sRGB output pixels 61 | 62 | if ((1)) { 63 | CGFrameBuffer *cgFramebuffer = [CGFrameBuffer cGFrameBufferWithBppDimensions:24 width:width height:height]; 64 | 65 | // Explicitly indicate that framebuffer is in terms of sRGB pixels 66 | CGColorSpaceRef cs = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); 67 | cgFramebuffer.colorspace = cs; 68 | CGColorSpaceRelease(cs); 69 | 70 | [cgFramebuffer renderCGImage:outCGImageRef]; 71 | 72 | if ((1)) { 73 | // Dump RGB of first pixel 74 | uint32_t *pixelPtr = (uint32_t*) cgFramebuffer.pixels; 75 | uint32_t pixel = pixelPtr[0]; 76 | int B = pixel & 0xFF; 77 | int G = (pixel >> 8) & 0xFF; 78 | int R = (pixel >> 16) & 0xFF; 79 | printf("first pixel (R G B) (%3d %3d %3d)\n", R, G, B); 80 | } 81 | 82 | // Dump generated BGRA in sRGB colorspace as PNG 83 | 84 | { 85 | NSString *filename = [NSString stringWithFormat:@"dump_RGB_from_YCbCr_CoreVideo.png"]; 86 | NSString *tmpDir = NSTemporaryDirectory(); 87 | NSString *path = [tmpDir stringByAppendingPathComponent:filename]; 88 | NSData *pngData = [cgFramebuffer formatAsPNG]; 89 | 90 | BOOL worked = [pngData writeToFile:path atomically:TRUE]; 91 | assert(worked); 92 | 93 | NSLog(@"wrote %@", path); 94 | } 95 | } 96 | } 97 | 98 | 99 | @end 100 | -------------------------------------------------------------------------------- /CoreVideoDecodeiOS/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CoreVideoDecodeiOS 4 | // 5 | // Created by Mo DeJong on 1/23/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /Documentation/BGRA8UnormBitLayout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Documentation/BGRA8UnormBitLayout.png -------------------------------------------------------------------------------- /Documentation/TextureCoordinates.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Documentation/TextureCoordinates.png -------------------------------------------------------------------------------- /EmptyiOS/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // EmptyiOS 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /EmptyiOS/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // EmptyiOS 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /EmptyiOS/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /EmptyiOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /EmptyiOS/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /EmptyiOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /EmptyiOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /EmptyiOS/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // EmptyiOS 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /EmptyiOS/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // EmptyiOS 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | 11 | @interface ViewController () 12 | 13 | @end 14 | 15 | @implementation ViewController 16 | 17 | - (void)viewDidLoad { 18 | [super viewDidLoad]; 19 | // Do any additional setup after loading the view, typically from a nib. 20 | } 21 | 22 | 23 | @end 24 | -------------------------------------------------------------------------------- /EmptyiOS/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // EmptyiOS 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /EmptyiOSTests/EmptyiOSTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // EmptyiOSTests.m 3 | // EmptyiOSTests 4 | // 5 | // Created by Mo DeJong on 12/29/18. 6 | // Copyright © 2018 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface EmptyiOSTests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation EmptyiOSTests 16 | 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /EmptyiOSTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | 22 | 23 | -------------------------------------------------------------------------------- /LICENSE/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright © 2018 Apple Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | -------------------------------------------------------------------------------- /MetalBT709Decoder.xcodeproj/.xcodesamplecode.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MetalBT709Decoder 2 | 3 | Proper rendering of BT.709 encoded H.264 image using Metal 4 | 5 | ## Overview 6 | 7 | This project is adapted from the Apple BasicTexturing example code. This render logic attempts to solve the gamma adjustment problem found in Apple provided render logic from example code like AVBasicVideoOutput (and other projects). While the rendering logic works perfectly for video source encoded with linear gamma, real world BT.709 video uses a gamma transfer function defined in the specifications that is non-linear. The result is that real world video authored to the BT.709 specifications will not render properly. This project addresses the problem by including gamma adjustment in the shader logic and making use of a two pass render process to first decode non-linear values and then rescale sRGB encoded pixels into a MTKView. 8 | 9 | ## Status 10 | 11 | This Metal logic will render BT.709 YCbCr data to a sRGB texture. This implementation takes care to get gamma decoding right according to the BT.709 specifications. 12 | 13 | ## Decoding Speed 14 | 15 | The decoder targets the highest quality render possible given a H.264 source with 4:2:0 YCbCr encoding. The implementation is very fast and supports full screen output at 60 fPS. 16 | 17 | ## Implementation 18 | 19 | See AAPLRenderer.m and AAPLShaders.metal for the core GPU rendering logic. This render logic works on iOS and MacOSX. 20 | -------------------------------------------------------------------------------- /Renderer/AAPLImage.h: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Header for a very simple container for image data 6 | */ 7 | 8 | #import 9 | 10 | // Our image 11 | @interface AAPLImage : NSObject 12 | 13 | /// Initialize this image by loading a *very* simple TGA file. Will not load compressed, paletted, 14 | // flipped, or color mapped images. Only support TGA files with 32-bits per pixels 15 | -(nullable instancetype) initWithTGAFileAtLocation:(nonnull NSURL *)location; 16 | 17 | // Width of image in pixels 18 | @property (nonatomic, readonly) NSUInteger width; 19 | 20 | // Height of image in pixels 21 | @property (nonatomic, readonly) NSUInteger height; 22 | 23 | // Image data in 32-bits-per-pixel (bpp) BGRA form (which is equivalent to MTLPixelFormatBGRA8Unorm) 24 | @property (nonatomic, readonly, nonnull) NSData *data; 25 | 26 | @end 27 | -------------------------------------------------------------------------------- /Renderer/AAPLImage.m: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Implementation of a very simple container for image data 6 | */ 7 | 8 | #import "AAPLImage.h" 9 | #include 10 | 11 | @implementation AAPLImage 12 | 13 | -(nullable instancetype) initWithTGAFileAtLocation:(nonnull NSURL *)tgaLocation 14 | { 15 | self = [super init]; 16 | if(self) 17 | { 18 | NSString * fileExtension = tgaLocation.pathExtension; 19 | 20 | if(!([fileExtension caseInsensitiveCompare:@"TGA"] == NSOrderedSame)) 21 | { 22 | NSLog(@"This image loader only loads TGA files"); 23 | return nil; 24 | } 25 | 26 | // Structure fitting the layout of a TGA header containing image metadata. 27 | typedef struct __attribute__ ((packed)) TGAHeader 28 | { 29 | uint8_t IDSize; // Size of ID info following header 30 | uint8_t colorMapType; // Whether this is a paletted image 31 | uint8_t imageType; // type of image 0=none, 1=indexed, 2=rgb, 3=grey, +8=rle packed 32 | 33 | int16_t colorMapStart; // Offset to color map in palette 34 | int16_t colorMapLength; // Number of colors in palette 35 | uint8_t colorMapBpp; // number of bits per palette entry 36 | 37 | uint16_t xOffset; // Number of pixels to the right to start of image 38 | uint16_t yOffset; // Number of pixels down to start of image 39 | uint16_t width; // Width in pixels 40 | uint16_t height; // Height in pixels 41 | uint8_t bitsPerPixel; // Bits per pixel 8,16,24,32 42 | uint8_t descriptor; // Descriptor bits (flipping, etc) 43 | } TGAHeader; 44 | 45 | NSError * error; 46 | 47 | // Copy the entire file to this fileData variable 48 | NSData *fileData = [[NSData alloc] initWithContentsOfURL:tgaLocation 49 | options:0x0 50 | error:&error]; 51 | 52 | if (!fileData) 53 | { 54 | NSLog(@"Could not open TGA File:%@", error.localizedDescription); 55 | return nil; 56 | } 57 | 58 | TGAHeader *tgaInfo = (TGAHeader *) fileData.bytes; 59 | 60 | if(tgaInfo->imageType != 2) { 61 | NSLog(@"This image loader only supports non-compressed BGR(A) TGA files"); 62 | return nil; 63 | } 64 | 65 | if(tgaInfo->colorMapType) 66 | { 67 | NSLog(@"This image loader doesn't support TGA files with a colormap"); 68 | return nil; 69 | } 70 | 71 | if(tgaInfo->xOffset || tgaInfo->yOffset) 72 | { 73 | NSLog(@"This image loader doesn't support TGA files with offsets"); 74 | return nil; 75 | } 76 | 77 | if(!(tgaInfo->bitsPerPixel == 32 || tgaInfo->bitsPerPixel == 24)) 78 | { 79 | NSLog(@"This image loader only supports 24-bit and 32-bit TGA files"); 80 | return nil; 81 | } 82 | 83 | if(tgaInfo->bitsPerPixel == 32) 84 | { 85 | if((tgaInfo->descriptor & 0xF) != 8) 86 | { 87 | NSLog(@"Image loader only supports 32-bit TGA files with 8 bits of alpha"); 88 | } 89 | } 90 | else if(tgaInfo->descriptor) 91 | { 92 | NSLog(@"Image loader only supports 24-bit TGA files with the default descriptor"); 93 | return nil; 94 | } 95 | 96 | _width = tgaInfo->width; 97 | _height = tgaInfo->height; 98 | 99 | // Calculate the byte size of our image data. Since we store our image data as 100 | // 32-bits per pixel BGRA data 101 | NSUInteger dataSize = _width * _height * 4; 102 | 103 | if(tgaInfo->bitsPerPixel == 24) 104 | { 105 | // Metal will not understand an image with 24-bpp format so we must convert our 106 | // TGA data from the 24-bit BGR format to a 32-bit BGRA format that Metal does 107 | // understand (as MTLPixelFormatBGRA8Unorm) 108 | 109 | NSMutableData *mutableData = [[NSMutableData alloc] initWithLength:dataSize]; 110 | 111 | // TGA spec says the image data is immediately after the header and the ID so set 112 | // the pointer to file's start + size of the header + size of the ID 113 | // Initialize a source pointer with the source image data that's in BGR form 114 | uint8_t *srcImageData = ((uint8_t*)fileData.bytes + 115 | sizeof(TGAHeader) + 116 | tgaInfo->IDSize); 117 | 118 | // Initialize a destination pointer to which you'll store the converted BGRA 119 | // image data 120 | uint8_t *dstImageData = mutableData.mutableBytes; 121 | 122 | // For every row of the image 123 | for(NSUInteger y = 0; y < _height; y++) 124 | { 125 | // For every column of the current row 126 | for(NSUInteger x = 0; x < _width; x++) 127 | { 128 | // Calculate the index for the first byte of the pixel you're 129 | // converting in both the source and destination images 130 | NSUInteger srcPixelIndex = 3 * (y * _width + x); 131 | NSUInteger dstPixelIndex = 4 * (y * _width + x); 132 | 133 | // Copy BGR channels from the source to the destination 134 | // Set the alpha channel of the destination pixel to 255 135 | dstImageData[dstPixelIndex + 0] = srcImageData[srcPixelIndex + 0]; 136 | dstImageData[dstPixelIndex + 1] = srcImageData[srcPixelIndex + 1]; 137 | dstImageData[dstPixelIndex + 2] = srcImageData[srcPixelIndex + 2]; 138 | dstImageData[dstPixelIndex + 3] = 255; 139 | } 140 | } 141 | _data = mutableData; 142 | } 143 | else 144 | { 145 | // Metal will understand an image with 32-bpp format so we must only create 146 | // an NSData object with the file's image data 147 | 148 | // TGA spec says the image data is immediately after the header and the ID so set 149 | // the pointer to file's start + size of the header + size of the ID 150 | uint8_t *srcImageData = ((uint8_t*)fileData.bytes + 151 | sizeof(TGAHeader) + 152 | tgaInfo->IDSize); 153 | 154 | _data = [[NSData alloc] initWithBytes:srcImageData 155 | length:dataSize]; 156 | } 157 | } 158 | 159 | return self; 160 | } 161 | 162 | @end 163 | -------------------------------------------------------------------------------- /Renderer/AAPLRenderer.h: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Header for renderer class which performs Metal setup and per frame rendering 6 | */ 7 | 8 | @import MetalKit; 9 | 10 | // Our platform independent renderer class 11 | @interface AAPLRenderer : NSObject 12 | 13 | - (nonnull instancetype)initWithMetalKitView:(nonnull MTKView *)mtkView; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /Renderer/AAPLShaderTypes.h: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Header containing types and enum constants shared between Metal shaders and C/ObjC source 6 | */ 7 | 8 | #ifndef AAPLShaderTypes_h 9 | #define AAPLShaderTypes_h 10 | 11 | #include 12 | 13 | // Buffer index values shared between shader and C code to ensure Metal shader buffer inputs match 14 | // Metal API buffer set calls 15 | typedef enum AAPLVertexInputIndex 16 | { 17 | AAPLVertexInputIndexVertices = 0, 18 | } AAPLVertexInputIndex; 19 | 20 | // Texture index values shared between shader and C code to ensure Metal shader buffer inputs match 21 | // Metal API texture set calls 22 | typedef enum 23 | { 24 | AAPLTextureIndexBaseColor = 0, 25 | } AAPLTextureIndex; 26 | 27 | typedef enum 28 | { 29 | AAPLTextureIndexYPlane = 0, 30 | AAPLTextureIndexCbCrPlane = 1, 31 | AAPLTextureIndexAlphaPlane = 2, 32 | } AAPLTextureYCbCrIndex; 33 | 34 | // This structure defines the layout of each vertex in the array of vertices set as an input to our 35 | // Metal vertex shader. Since this header is shared between our .metal shader and C code, 36 | // we can be sure that the layout of the vertex array in the code matches the layout that 37 | // our vertex shader expects 38 | typedef struct 39 | { 40 | // Positions in pixel space (i.e. a value of 100 indicates 100 pixels from the origin/center) 41 | vector_float2 position; 42 | 43 | // 2D texture coordinate 44 | vector_float2 textureCoordinate; 45 | } AAPLVertex; 46 | 47 | #endif /* AAPLShaderTypes_h */ 48 | -------------------------------------------------------------------------------- /Renderer/AAPLShaders.metal: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Metal shaders used for this sample 6 | */ 7 | 8 | #include 9 | #include 10 | 11 | using namespace metal; 12 | 13 | // Include header shared between this Metal shader code and C code executing Metal API commands 14 | #import "AAPLShaderTypes.h" 15 | 16 | // Vertex shader outputs and per-fragment inputs. Includes clip-space position and vertex outputs 17 | // interpolated by rasterizer and fed to each fragment generated by clip-space primitives. 18 | typedef struct 19 | { 20 | // The [[position]] attribute qualifier of this member indicates this value is the clip space 21 | // position of the vertex wen this structure is returned from the vertex shader 22 | float4 clipSpacePosition [[position]]; 23 | 24 | // Since this member does not have a special attribute qualifier, the rasterizer will 25 | // interpolate its value with values of other vertices making up the triangle and 26 | // pass that interpolated value to the fragment shader for each fragment in that triangle; 27 | float2 textureCoordinate; 28 | 29 | } RasterizerData; 30 | 31 | // Vertex Function that renders full screen flipped texture 32 | 33 | vertex RasterizerData 34 | identityVertexShader(uint vertexID [[ vertex_id ]], 35 | constant AAPLVertex *vertexArray [[ buffer(0) ]]) 36 | { 37 | RasterizerData out; 38 | 39 | // Index into our array of positions to get the current vertex 40 | // Our positons are specified in pixel dimensions (i.e. a value of 100 is 100 pixels from 41 | // the origin) 42 | float2 pixelSpacePosition = vertexArray[vertexID].position.xy; 43 | 44 | // THe output position of every vertex shader is in clip space (also known as normalized device 45 | // coordinate space, or NDC). A value of (-1.0, -1.0) in clip-space represents the 46 | // lower-left corner of the viewport wheras (1.0, 1.0) represents the upper-right corner of 47 | // the viewport. 48 | 49 | out.clipSpacePosition.xy = pixelSpacePosition; 50 | 51 | // Set the z component of our clip space position 0 (since we're only rendering in 52 | // 2-Dimensions for this sample) 53 | out.clipSpacePosition.z = 0.0; 54 | 55 | // Set the w component to 1.0 since we don't need a perspective divide, which is also not 56 | // necessary when rendering in 2-Dimensions 57 | out.clipSpacePosition.w = 1.0; 58 | 59 | // Pass our input textureCoordinate straight to our output RasterizerData. This value will be 60 | // interpolated with the other textureCoordinate values in the vertices that make up the 61 | // triangle. 62 | out.textureCoordinate = vertexArray[vertexID].textureCoordinate; 63 | out.textureCoordinate.y = 1.0 - out.textureCoordinate.y; 64 | 65 | return out; 66 | } 67 | 68 | 69 | // Fragment shader that can do simple rescale, note that the input 70 | // and output is float here as opposed to half to support 16 bit 71 | // float input texture. 72 | 73 | fragment float4 74 | samplingShader(RasterizerData in [[stage_in]], 75 | texture2d colorTexture [[ texture(AAPLTextureIndexBaseColor) ]]) 76 | { 77 | constexpr sampler textureSampler (mag_filter::linear, 78 | min_filter::linear); 79 | 80 | // Sample the texture to obtain a color 81 | const float4 colorSample = colorTexture.sample(textureSampler, in.textureCoordinate); 82 | 83 | // We return the color of the texture 84 | return colorSample; 85 | } 86 | 87 | // BT.709 rendering fragment shader 88 | 89 | // FIXME: note that Metal "fast math" option would automatically 90 | // replace pow() with exp2(y * log2(x)). This may or may not matter, 91 | // an actual 2x performance improvement can be enabled by simply 92 | // doing 1 render pass instead of 2 when rendering at exact pixel size. 93 | 94 | static inline 95 | float BT709_nonLinearNormToLinear(float normV) { 96 | 97 | if (normV < 0.081f) { 98 | normV *= (1.0f / 4.5f); 99 | } else { 100 | const float a = 0.099f; 101 | const float gamma = 1.0f / 0.45f; // 2.2 102 | normV = (normV + a) * (1.0f / (1.0f + a)); 103 | normV = pow(normV, gamma); 104 | } 105 | 106 | return normV; 107 | } 108 | 109 | #define APPLE_GAMMA_196 (1.960938f) 110 | 111 | static inline 112 | float Apple196_nonLinearNormToLinear(float normV) { 113 | const float xIntercept = 0.05583828f; 114 | 115 | if (normV < xIntercept) { 116 | normV *= (1.0f / 16.0f); 117 | } else { 118 | const float gamma = APPLE_GAMMA_196; 119 | normV = pow(normV, gamma); 120 | } 121 | 122 | return normV; 123 | } 124 | 125 | // Convert a non-linear log value to a linear value. 126 | // Note that normV must be normalized in the range [0.0 1.0]. 127 | 128 | static inline 129 | float sRGB_nonLinearNormToLinear(float normV) 130 | { 131 | if (normV <= 0.04045f) { 132 | normV *= (1.0f / 12.92f); 133 | } else { 134 | const float a = 0.055f; 135 | const float gamma = 2.4f; 136 | //const float gamma = 1.0f / (1.0f / 2.4f); 137 | normV = (normV + a) * (1.0f / (1.0f + a)); 138 | normV = pow(normV, gamma); 139 | } 140 | 141 | return normV; 142 | } 143 | 144 | // Unused since this does not decode video colors to linear light 145 | 146 | //static inline 147 | //float4 BT709_gamma_decode(const float4 rgba) { 148 | // rgba.r = BT709_nonLinearNormToLinear(rgba.r); 149 | // rgba.g = BT709_nonLinearNormToLinear(rgba.g); 150 | // rgba.b = BT709_nonLinearNormToLinear(rgba.b); 151 | // return rgba; 152 | //} 153 | 154 | static inline 155 | float4 sRGB_gamma_decode(const float4 rgba) { 156 | rgba.r = sRGB_nonLinearNormToLinear(rgba.r); 157 | rgba.g = sRGB_nonLinearNormToLinear(rgba.g); 158 | rgba.b = sRGB_nonLinearNormToLinear(rgba.b); 159 | return rgba; 160 | } 161 | 162 | // Note that the Apple gamma 1.961 decoding logic 163 | // is used when a BT709 flag is detected for an 164 | // input pixel buffer. The original BT709 gamma 165 | // functions does not actually convert video 166 | // graded color values to linear identity 167 | 168 | static inline 169 | float4 Apple196_gamma_decode(const float4 rgba) { 170 | rgba.r = Apple196_nonLinearNormToLinear(rgba.r); 171 | rgba.g = Apple196_nonLinearNormToLinear(rgba.g); 172 | rgba.b = Apple196_nonLinearNormToLinear(rgba.b); 173 | return rgba; 174 | } 175 | 176 | // Extract common BT.709 decode logic from the 2 implementations, 177 | // this method accepts (Y Cb Cr) and returns gamma encoded 178 | // values if the original data was gamma encoded. 179 | 180 | static inline 181 | float4 BT709_decode(const float Y, const float Cb, const float Cr) { 182 | // Y already normalized to range [0 255] 183 | // 184 | // Note that the matrix multiply will adjust 185 | // this byte normalized range to account for 186 | // the limited range [16 235] 187 | // 188 | // Note that while a half float can be read from 189 | // the input textures, the values need to be full float 190 | // from this point forward since the bias values 191 | // need to be precise to avoid togggling blue and green 192 | // values depending on rounding. 193 | 194 | float Yn = (Y - (16.0f/255.0f)); 195 | 196 | // Normalize Cb and CR with zero at 128 and range [0 255] 197 | // Note that matrix will adjust to limited range [16 240] 198 | 199 | float Cbn = (Cb - (128.0f/255.0f)); 200 | float Crn = (Cr - (128.0f/255.0f)); 201 | 202 | // Zero out the UV colors 203 | //Cbn = 0.0h; 204 | //Crn = 0.0h; 205 | 206 | // Represent half values as full precision float 207 | float3 YCbCr = float3(Yn, Cbn, Crn); 208 | 209 | // BT.709 (HDTV) 210 | // (col0) (col1) (col2) 211 | // 212 | // 1.1644 0.0000 1.7927 213 | // 1.1644 -0.2132 -0.5329 214 | // 1.1644 2.1124 0.0000 215 | 216 | // precise to 4 decimal places 217 | 218 | const float3x3 kColorConversion709 = float3x3( 219 | // column 0 220 | float3(1.1644f, 1.1644f, 1.1644f), 221 | // column 1 222 | float3(0.0f, -0.2132f, 2.1124f), 223 | // column 2 224 | float3(1.7927f, -0.5329f, 0.0f)); 225 | 226 | // matrix to vector mult 227 | float3 rgb = kColorConversion709 * YCbCr; 228 | 229 | // float Rn = (Yn * BT709Mat[0]) + (Cbn * BT709Mat[1]) + (Crn * BT709Mat[2]); 230 | // float Gn = (Yn * BT709Mat[3]) + (Cbn * BT709Mat[4]) + (Crn * BT709Mat[5]); 231 | // float Bn = (Yn * BT709Mat[6]) + (Cbn * BT709Mat[7]) + (Crn * BT709Mat[8]); 232 | 233 | // float3 rgb; 234 | // rgb.r = (YCbCr[0] * kColorConversion709[0][0]) + (YCbCr[1] * kColorConversion709[1][0]) + (YCbCr[2] * kColorConversion709[2][0]); 235 | // rgb.g = (YCbCr[0] * kColorConversion709[0][1]) + (YCbCr[1] * kColorConversion709[1][1]) + (YCbCr[2] * kColorConversion709[2][1]); 236 | // rgb.b = (YCbCr[0] * kColorConversion709[0][2]) + (YCbCr[1] * kColorConversion709[1][2]) + (YCbCr[2] * kColorConversion709[2][2]); 237 | 238 | rgb = saturate(rgb); 239 | 240 | // Note that gamma decoding seems to have very little impact 241 | // on performance since the entire shader is IO bound. 242 | 243 | return float4(rgb.r, rgb.g, rgb.b, 1.0f); 244 | } 245 | 246 | // Decode the Y portion of a BT.709 input value knowing 247 | // that Cb and Cr are both zero. 248 | 249 | static inline 250 | float BT709_decodeAlpha(const float Y) { 251 | // Y already normalized to range [0 255] 252 | // 253 | // Note that the matrix multiply will adjust 254 | // this byte normalized range to account for 255 | // the limited range [16 235] 256 | // 257 | // Note that while a half float can be read from 258 | // the input textures, the values need to be full float 259 | // from this point forward since the bias values 260 | // need to be precise to avoid togggling blue and green 261 | // values depending on rounding. 262 | 263 | float Yn = (Y - (16.0f/255.0f)); 264 | 265 | float YMult = 1.1644f; 266 | 267 | Yn = Yn * YMult; 268 | Yn = saturate(Yn); 269 | 270 | return Yn; 271 | } 272 | 273 | // Decode with Apple 196 gamma 274 | 275 | fragment float4 276 | BT709ToLinearSRGBFragment(RasterizerData in [[stage_in]], 277 | texture2d inYTexture [[texture(AAPLTextureIndexYPlane)]], 278 | texture2d inUVTexture [[texture(AAPLTextureIndexCbCrPlane)]] 279 | ) 280 | { 281 | constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest); 282 | 283 | float Y = float(inYTexture.sample(textureSampler, in.textureCoordinate).r); 284 | half2 uvSamples = inUVTexture.sample(textureSampler, in.textureCoordinate).rg; 285 | 286 | float Cb = float(uvSamples[0]); 287 | float Cr = float(uvSamples[1]); 288 | 289 | float4 pixel = BT709_decode(Y, Cb, Cr); 290 | return Apple196_gamma_decode(pixel); 291 | } 292 | 293 | // Decode with sRGB gamma 294 | 295 | fragment float4 296 | sRGBToLinearSRGBFragment(RasterizerData in [[stage_in]], 297 | texture2d inYTexture [[texture(AAPLTextureIndexYPlane)]], 298 | texture2d inUVTexture [[texture(AAPLTextureIndexCbCrPlane)]] 299 | ) 300 | { 301 | constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest); 302 | 303 | float Y = float(inYTexture.sample(textureSampler, in.textureCoordinate).r); 304 | half2 uvSamples = inUVTexture.sample(textureSampler, in.textureCoordinate).rg; 305 | 306 | float Cb = float(uvSamples[0]); 307 | float Cr = float(uvSamples[1]); 308 | 309 | float4 pixel = BT709_decode(Y, Cb, Cr); 310 | return sRGB_gamma_decode(pixel); 311 | } 312 | 313 | // Decode without a gamma adjustment, original input is linear 314 | // and output will also be linear. 315 | 316 | fragment float4 317 | LinearToLinearSRGBFragment(RasterizerData in [[stage_in]], 318 | texture2d inYTexture [[texture(AAPLTextureIndexYPlane)]], 319 | texture2d inUVTexture [[texture(AAPLTextureIndexCbCrPlane)]] 320 | ) 321 | { 322 | constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest); 323 | 324 | float Y = float(inYTexture.sample(textureSampler, in.textureCoordinate).r); 325 | half2 uvSamples = inUVTexture.sample(textureSampler, in.textureCoordinate).rg; 326 | 327 | float Cb = float(uvSamples[0]); 328 | float Cr = float(uvSamples[1]); 329 | 330 | float4 pixel = BT709_decode(Y, Cb, Cr); 331 | return pixel; 332 | } 333 | 334 | // Decode with compute kernel and Apple 196 gamma function 335 | 336 | kernel void 337 | BT709ToLinearSRGBKernel(texture2d inYTexture [[texture(0)]], 338 | texture2d inUVTexture [[texture(1)]], 339 | texture2d outTexture [[texture(2)]], 340 | ushort2 gid [[thread_position_in_grid]]) 341 | { 342 | // Check if the pixel is within the bounds of the output texture 343 | if((gid.x >= outTexture.get_width()) || (gid.y >= outTexture.get_height())) 344 | { 345 | // Return early if the pixel is out of bounds 346 | return; 347 | } 348 | 349 | float Y = float(inYTexture.read(gid).r); 350 | half2 uvSamples = inUVTexture.read(gid/2).rg; 351 | float Cb = float(uvSamples[0]); 352 | float Cr = float(uvSamples[1]); 353 | 354 | float4 pixel = BT709_decode(Y, Cb, Cr); 355 | pixel = Apple196_gamma_decode(pixel); 356 | outTexture.write(pixel, gid); 357 | } 358 | 359 | // Decode with compute kernel and sRGB gamma function 360 | 361 | kernel void 362 | sRGBToLinearSRGBKernel(texture2d inYTexture [[texture(0)]], 363 | texture2d inUVTexture [[texture(1)]], 364 | texture2d outTexture [[texture(2)]], 365 | ushort2 gid [[thread_position_in_grid]]) 366 | { 367 | // Check if the pixel is within the bounds of the output texture 368 | if((gid.x >= outTexture.get_width()) || (gid.y >= outTexture.get_height())) 369 | { 370 | // Return early if the pixel is out of bounds 371 | return; 372 | } 373 | 374 | float Y = float(inYTexture.read(gid).r); 375 | half2 uvSamples = inUVTexture.read(gid/2).rg; 376 | float Cb = float(uvSamples[0]); 377 | float Cr = float(uvSamples[1]); 378 | 379 | float4 pixel = BT709_decode(Y, Cb, Cr); 380 | pixel = sRGB_gamma_decode(pixel); 381 | outTexture.write(pixel, gid); 382 | } 383 | 384 | // Decode without a gamma adjustment, original input is linear 385 | // and output will also be linear. 386 | 387 | kernel void 388 | LinearToLinearSRGBKernel(texture2d inYTexture [[texture(0)]], 389 | texture2d inUVTexture [[texture(1)]], 390 | texture2d outTexture [[texture(2)]], 391 | ushort2 gid [[thread_position_in_grid]]) 392 | { 393 | // Check if the pixel is within the bounds of the output texture 394 | if((gid.x >= outTexture.get_width()) || (gid.y >= outTexture.get_height())) 395 | { 396 | // Return early if the pixel is out of bounds 397 | return; 398 | } 399 | 400 | float Y = float(inYTexture.read(gid).r); 401 | half2 uvSamples = inUVTexture.read(gid/2).rg; 402 | float Cb = float(uvSamples[0]); 403 | float Cr = float(uvSamples[1]); 404 | 405 | float4 pixel = BT709_decode(Y, Cb, Cr); 406 | outTexture.write(pixel, gid); 407 | } 408 | 409 | // Decode with sRGB gamma and an alpha channel 410 | 411 | fragment float4 412 | sRGBToLinearSRGBFragmentAlpha(RasterizerData in [[stage_in]], 413 | texture2d inYTexture [[texture(AAPLTextureIndexYPlane)]], 414 | texture2d inUVTexture [[texture(AAPLTextureIndexCbCrPlane)]], 415 | texture2d inATexture [[texture(AAPLTextureIndexAlphaPlane)]] 416 | ) 417 | { 418 | constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest); 419 | 420 | float Y = float(inYTexture.sample(textureSampler, in.textureCoordinate).r); 421 | half2 uvSamples = inUVTexture.sample(textureSampler, in.textureCoordinate).rg; 422 | 423 | float Cb = float(uvSamples[0]); 424 | float Cr = float(uvSamples[1]); 425 | 426 | float4 pixel = BT709_decode(Y, Cb, Cr); 427 | pixel = sRGB_gamma_decode(pixel); 428 | 429 | // Load alpha value from alpha texture 430 | float A = float(inATexture.sample(textureSampler, in.textureCoordinate).r); 431 | A = BT709_decodeAlpha(A); 432 | // Premultiply 433 | // pixel.r *= A; 434 | // pixel.g *= A; 435 | // pixel.b *= A; 436 | pixel.a = A; 437 | return pixel; 438 | } 439 | -------------------------------------------------------------------------------- /Renderer/AlphaBG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/AlphaBG.png -------------------------------------------------------------------------------- /Renderer/AlphaBGHalf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/AlphaBGHalf.png -------------------------------------------------------------------------------- /Renderer/BGDecodeEncode.h: -------------------------------------------------------------------------------- 1 | // 2 | // BGDecodeEncode.h 3 | // 4 | // Created by Mo DeJong on 7/6/16. 5 | // 6 | // See license.txt for BSD license terms. 7 | // 8 | // This module provides an easy to use background processing queue 9 | // that will decode frames from a H264 source and then re-encode the 10 | // frames as either opaque or transparent frames. 11 | 12 | @import Foundation; 13 | 14 | @import AVFoundation; 15 | @import CoreVideo; 16 | @import CoreImage; 17 | @import CoreMedia; 18 | @import VideoToolbox; 19 | 20 | @interface BGDecodeEncode : NSObject 21 | 22 | // Decompress and then recompress each frame of H264 video as keyframes that 23 | // can be rendered directly without holding a stream decode resource open. 24 | // If an error is encountered during the encode/decode process then nil 25 | // is returned (this can happen when app is put into the background) 26 | 27 | + (BOOL) recompressKeyframes:(NSString*)resourceName 28 | frameDuration:(float)frameDuration 29 | renderSize:(CGSize)renderSize 30 | aveBitrate:(int)aveBitrate 31 | frames:(NSMutableArray*)frames; 32 | 33 | // Previous API compat 34 | 35 | + (NSArray*) recompressKeyframesOnBackgroundThread:(NSString*)resourceName 36 | frameDuration:(float)frameDuration 37 | renderSize:(CGSize)renderSize 38 | aveBitrate:(int)aveBitrate; 39 | 40 | @end 41 | -------------------------------------------------------------------------------- /Renderer/BGDecodeEncode.m: -------------------------------------------------------------------------------- 1 | // 2 | // BGDecodeEncode.m 3 | // 4 | // Created by Mo DeJong on 4/5/16. 5 | // 6 | // See license.txt for BSD license terms. 7 | // 8 | 9 | #import "BGDecodeEncode.h" 10 | 11 | @import AVFoundation; 12 | 13 | @import CoreVideo; 14 | @import CoreImage; 15 | @import CoreMedia; 16 | @import CoreGraphics; 17 | @import VideoToolbox; 18 | 19 | //#if defined(DEBUG) 20 | //static const int dumpFramesImages = 1; 21 | //#else 22 | static const int dumpFramesImages = 0; 23 | //#endif // DEBUG 24 | 25 | #define LOGGING 1 26 | //#define LOGGING_EVERY_FRAME 1 27 | 28 | // Private API 29 | 30 | @interface BGDecodeEncode () 31 | @end 32 | 33 | @implementation BGDecodeEncode 34 | 35 | // Return the movie decode OS type, typically kCVPixelFormatType_420YpCbCr8BiPlanarFullRange 36 | // but could be kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange. In any case, this value 37 | // needs to match in both the encoder and decoder. 38 | 39 | + (OSType) getPixelType 40 | { 41 | // Explicitly use video range color matrix 42 | const OSType movieEncodePixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 43 | //const OSType movieEncodePixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; 44 | return movieEncodePixelFormatType; 45 | } 46 | 47 | + (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)cgImage 48 | renderSize:(CGSize)renderSize 49 | dump:(BOOL)dump 50 | asYUV:(BOOL)asYUV 51 | { 52 | NSDictionary *options = @{ 53 | (NSString *)kCVPixelBufferCGImageCompatibilityKey: @(YES), 54 | (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @(YES) 55 | }; 56 | 57 | int renderWidth = (int) renderSize.width; 58 | int renderHeight = (int) renderSize.height; 59 | 60 | int imageWidth = (int) CGImageGetWidth(cgImage); 61 | int imageHeight = (int) CGImageGetHeight(cgImage); 62 | 63 | assert(imageWidth <= renderWidth); 64 | assert(imageHeight <= renderHeight); 65 | 66 | // FIXME: instead of creating CoreVideo buffers over and over, just create 1 and 67 | // then keep using it to do the render operations. Could also use a pool, but 68 | // not really needed either. 69 | 70 | CVPixelBufferRef buffer = NULL; 71 | CVPixelBufferCreate(kCFAllocatorDefault, 72 | renderWidth, 73 | renderHeight, 74 | kCVPixelFormatType_32BGRA, 75 | (__bridge CFDictionaryRef)options, 76 | &buffer); 77 | 78 | size_t bytesPerRow, extraBytes; 79 | bytesPerRow = CVPixelBufferGetBytesPerRow(buffer); 80 | extraBytes = bytesPerRow - renderWidth*sizeof(uint32_t); 81 | //NSLog(@"bytesPerRow %d extraBytes %d", (int)bytesPerRow, (int)extraBytes); 82 | 83 | CVPixelBufferLockBaseAddress(buffer, 0); 84 | 85 | void *baseAddress = CVPixelBufferGetBaseAddress(buffer); 86 | 87 | //CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 88 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 89 | 90 | CGContextRef context; 91 | 92 | context = CGBitmapContextCreate(baseAddress, 93 | renderWidth, 94 | renderHeight, 95 | 8, 96 | CVPixelBufferGetBytesPerRow(buffer), 97 | colorSpace, 98 | kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst); 99 | 100 | // Render frame into top left corner at exact size 101 | 102 | CGContextClearRect(context, CGRectMake(0.0f, 0.0f, renderWidth, renderHeight)); 103 | 104 | CGContextDrawImage(context, CGRectMake(0.0f, renderHeight - imageHeight, imageWidth, imageHeight), cgImage); 105 | 106 | CGColorSpaceRelease(colorSpace); 107 | CGContextRelease(context); 108 | 109 | CVPixelBufferUnlockBaseAddress(buffer, 0); 110 | 111 | // Convert from BGRA to YUV representation 112 | 113 | if (asYUV) { 114 | NSDictionary *pixelAttributes = @{ 115 | (__bridge NSString*)kCVPixelBufferIOSurfacePropertiesKey : @{}, 116 | // (__bridge NSString*)kCVPixelFormatOpenGLESCompatibility : @(YES), 117 | (__bridge NSString*)kCVPixelBufferCGImageCompatibilityKey : @(YES), 118 | (__bridge NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey : @(YES), 119 | }; 120 | 121 | CVPixelBufferRef yuv420Buffer = NULL; 122 | CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, 123 | renderWidth, 124 | renderHeight, 125 | [self getPixelType], 126 | (__bridge CFDictionaryRef) pixelAttributes, 127 | &yuv420Buffer); 128 | 129 | CIContext *context = [CIContext contextWithOptions:nil]; 130 | NSAssert(context, @"CIContext"); 131 | 132 | CIImage *inImage = [CIImage imageWithCVPixelBuffer:buffer]; 133 | 134 | if (status == kCVReturnSuccess) { 135 | [context render:inImage toCVPixelBuffer:yuv420Buffer]; 136 | } 137 | 138 | CVPixelBufferRelease(buffer); 139 | 140 | return yuv420Buffer; 141 | } 142 | 143 | return buffer; 144 | } 145 | 146 | // This method accepts a pixel buffer to be encoded, along with 147 | // an encoder object and an output array that the encoded 148 | // frame will be appened to. 149 | 150 | + (BOOL) encodeAndAppendToArray:(CVPixelBufferRef)pixBuffer 151 | frameOffset:(int)frameOffset 152 | renderSize:(CGSize)renderSize 153 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers 154 | resNoSuffix:(NSString*)resNoSuffix 155 | { 156 | int width = (int) CVPixelBufferGetWidth(pixBuffer); 157 | int height = (int) CVPixelBufferGetHeight(pixBuffer); 158 | 159 | CGSize imgSize = CGSizeMake(width, height); 160 | 161 | // 1920 x 1080 is Full HD and the upper limit of H264 render size for iPad devices. 162 | // When the size of the input and the output exactly match, use input buffer (much faster) 163 | 164 | // 2048 x 1536 seems to work just fine on iPad Retina 165 | 166 | //CGSize renderSize = CGSizeMake(1920, 1080); 167 | //CGSize renderSize = CGSizeMake(2048, 1536); 168 | 169 | //int renderWidth = (int) renderSize.width; 170 | //int renderHeight = (int) renderSize.height; 171 | 172 | // Render CoreVideo to a NxN square so that square pixels do not distort 173 | 174 | #if defined(LOGGING_EVERY_FRAME) 175 | NSLog(@"encode input dimensions %4d x %4d", width, height); 176 | #endif // LOGGING_EVERY_FRAME 177 | 178 | CVPixelBufferRef largerBuffer; 179 | 180 | if (CGSizeEqualToSize(imgSize, renderSize)) { 181 | // No resize needed 182 | largerBuffer = pixBuffer; 183 | 184 | CVPixelBufferRetain(largerBuffer); 185 | } else { 186 | // Do not support resizing CoreVideo buffers 187 | assert(0); 188 | } 189 | 190 | // Render CoreVideo to a NxN square so that square pixels do not distort 191 | 192 | #if defined(LOGGING_EVERY_FRAME) 193 | int largerWidth = (int) CVPixelBufferGetWidth(largerBuffer); 194 | int largerHeight = (int) CVPixelBufferGetHeight(largerBuffer); 195 | NSLog(@"encode output dimensions %4d x %4d", largerWidth, largerHeight); 196 | #endif // LOGGING_EVERY_FRAME 197 | 198 | //NSLog(@"CVPixelBufferRef: %@", pixBuffer); 199 | 200 | __block BOOL encodeFrameErrorCondition = FALSE; 201 | 202 | #if TARGET_IPHONE_SIMULATOR 203 | // No-op 204 | #else 205 | OSType bufferPixelType = CVPixelBufferGetPixelFormatType(largerBuffer); 206 | if (bufferPixelType == kCVPixelFormatType_32BGRA) { 207 | // Already converted from YUV to BGRA 208 | } else { 209 | assert([self getPixelType] == bufferPixelType); 210 | } 211 | #endif // TARGET_IPHONE_SIMULATOR 212 | 213 | BOOL worked = TRUE; 214 | 215 | [encodedH264Buffers addObject:(__bridge id)largerBuffer]; 216 | 217 | CVPixelBufferRelease(largerBuffer); 218 | 219 | // Null out block ref just to make sure 220 | //frameEncoder.sampleBufferBlock = nil; 221 | 222 | if (encodeFrameErrorCondition == TRUE) { 223 | return FALSE; 224 | } 225 | 226 | if (worked == FALSE) { 227 | return FALSE; 228 | } else { 229 | return TRUE; 230 | } 231 | }; 232 | 233 | // Given a .mov generate an array of the frames as CoreVideo buffers. 234 | // This method returns the frames as BGRA pixels or YUV frames. 235 | 236 | + (BOOL) decodeCoreVideoFramesFromMOV:(NSString*)movPath 237 | asYUV:(BOOL)asYUV 238 | renderSize:(CGSize)renderSize 239 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers 240 | { 241 | if ([[NSFileManager defaultManager] fileExistsAtPath:movPath] == FALSE) { 242 | return FALSE; 243 | } 244 | 245 | NSString *resNoSuffix = [[movPath lastPathComponent] stringByDeletingPathExtension]; 246 | 247 | // Read H.264 frames and extract YUV 248 | 249 | NSURL *assetURL = [NSURL fileURLWithPath:movPath]; 250 | if (assetURL == nil) { 251 | NSLog(@"asset as url failed for \"%@\"", movPath); 252 | return FALSE; 253 | } 254 | 255 | NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] 256 | forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; 257 | 258 | AVURLAsset *avUrlAsset = [[AVURLAsset alloc] initWithURL:assetURL options:options]; 259 | 260 | if (avUrlAsset.hasProtectedContent) { 261 | NSLog(@"hasProtectedContent is set for \"%@\"", movPath); 262 | return FALSE; 263 | } 264 | 265 | if ([avUrlAsset tracks] == 0) { 266 | NSLog(@"zero tracks is set for \"%@\"", movPath); 267 | return FALSE; 268 | } 269 | 270 | NSError *assetError = nil; 271 | AVAssetReader *aVAssetReader = [AVAssetReader assetReaderWithAsset:avUrlAsset error:&assetError]; 272 | 273 | if (aVAssetReader == nil) { 274 | NSLog(@"aVAssetReader is nil for \"%@\"", movPath); 275 | return FALSE; 276 | } 277 | 278 | if (assetError) { 279 | NSLog(@"assetError is \"%@\" for \"%@\"", [assetError description], movPath); 280 | return FALSE; 281 | } 282 | 283 | NSDictionary *videoSettings; 284 | 285 | if (asYUV) { 286 | videoSettings = [NSDictionary dictionaryWithObject: 287 | [NSNumber numberWithUnsignedInt:[self getPixelType]] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 288 | } else { 289 | videoSettings = [NSDictionary dictionaryWithObject: 290 | [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 291 | 292 | } 293 | 294 | NSArray *videoTracks = [avUrlAsset tracksWithMediaType:AVMediaTypeVideo]; 295 | 296 | const int videoTrackOffset = 0; 297 | AVAssetTrack *videoTrack = [videoTracks objectAtIndex:videoTrackOffset]; 298 | 299 | #if defined(LOGGING_EVERY_FRAME) 300 | NSArray *availableMetadataFormats = videoTrack.availableMetadataFormats; 301 | NSLog(@"availableMetadataFormats %@", availableMetadataFormats); 302 | #endif // LOGGING_EVERY_FRAME 303 | 304 | if (videoTrack.isSelfContained != TRUE) { 305 | NSLog(@"videoTrack.isSelfContained must be TRUE for \"%@\"", movPath); 306 | return FALSE; 307 | } 308 | 309 | #if defined(LOGGING_EVERY_FRAME) 310 | CGSize uncroppedSize = videoTrack.naturalSize; 311 | NSLog(@"video track naturalSize w x h : %d x %d", (int)uncroppedSize.width, (int)uncroppedSize.height); 312 | #endif // LOGGING_EVERY_FRAME 313 | 314 | // Track length in second, should map directly to number of frames 315 | 316 | #if defined(LOGGING_EVERY_FRAME) 317 | CMTimeRange timeRange = videoTrack.timeRange; 318 | float duration = (float)CMTimeGetSeconds(timeRange.duration); 319 | NSLog(@"video track time duration %0.3f", duration); 320 | #endif // LOGGING_EVERY_FRAME 321 | 322 | // Don't know how many frames at this point 323 | 324 | //int numFrames = round(duration); 325 | //NSLog(@"estimated number of frames %d", numFrames); 326 | 327 | AVAssetReaderTrackOutput *aVAssetReaderOutput = [[AVAssetReaderTrackOutput alloc] 328 | initWithTrack:videoTrack outputSettings:videoSettings]; 329 | 330 | if (aVAssetReaderOutput == nil) { 331 | NSLog(@"aVAssetReaderOutput is nil for \"%@\"", movPath); 332 | return FALSE; 333 | } 334 | 335 | aVAssetReaderOutput.alwaysCopiesSampleData = FALSE; 336 | 337 | [aVAssetReader addOutput:aVAssetReaderOutput]; 338 | 339 | // start reading 340 | 341 | BOOL worked = [aVAssetReader startReading]; 342 | 343 | if (worked == FALSE) { 344 | AVAssetReaderStatus status = aVAssetReader.status; 345 | NSError *error = aVAssetReader.error; 346 | 347 | NSLog(@"status = %d", (int)status); 348 | NSLog(@"error = %@", [error description]); 349 | 350 | return FALSE; 351 | } 352 | 353 | // Read N frames as CoreVideo buffers and invoke callback 354 | 355 | BOOL allFramesEncodedSuccessfully = TRUE; 356 | 357 | // Read N frames, convert to BGRA pixels 358 | 359 | for ( int i = 0; 1; i++ ) @autoreleasepool { 360 | 361 | CMSampleBufferRef sampleBuffer = NULL; 362 | sampleBuffer = [aVAssetReaderOutput copyNextSampleBuffer]; 363 | 364 | if (sampleBuffer == NULL) { 365 | // Another frame could not be loaded, this is the normal 366 | // termination condition at the end of the file. 367 | break; 368 | } 369 | 370 | // Process BGRA data in buffer, crop and then read and combine 371 | 372 | CVImageBufferRef imageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer); 373 | if (imageBufferRef == NULL) { 374 | NSLog(@"CMSampleBufferGetImageBuffer() returned NULL at frame %d", i); 375 | allFramesEncodedSuccessfully = FALSE; 376 | break; 377 | } 378 | 379 | CVPixelBufferRef pixBuffer = imageBufferRef; 380 | 381 | BOOL worked = [self encodeAndAppendToArray:pixBuffer 382 | frameOffset:i 383 | renderSize:renderSize 384 | encodedH264Buffers:encodedH264Buffers 385 | resNoSuffix:resNoSuffix]; 386 | 387 | CFRelease(sampleBuffer); 388 | 389 | if (!worked) { 390 | allFramesEncodedSuccessfully = FALSE; 391 | break; 392 | } 393 | } 394 | 395 | [aVAssetReader cancelReading]; 396 | 397 | if (allFramesEncodedSuccessfully == FALSE) { 398 | return FALSE; 399 | } else { 400 | return TRUE; 401 | } 402 | } 403 | 404 | // Previous API compat 405 | 406 | + (NSArray*) recompressKeyframesOnBackgroundThread:(NSString*)resourceName 407 | frameDuration:(float)frameDuration 408 | renderSize:(CGSize)renderSize 409 | aveBitrate:(int)aveBitrate 410 | { 411 | NSMutableArray *encodedH264Buffers = [NSMutableArray array]; 412 | 413 | @autoreleasepool { 414 | [self recompressKeyframesOnBackgroundThreadImpl:resourceName 415 | frameDuration:frameDuration 416 | renderSize:renderSize 417 | aveBitrate:aveBitrate 418 | encodedH264Buffers:encodedH264Buffers]; 419 | } 420 | 421 | NSArray *retArr; 422 | 423 | if (encodedH264Buffers.count == 0) { 424 | retArr = nil; 425 | } else { 426 | retArr = [NSArray arrayWithArray:encodedH264Buffers]; 427 | } 428 | 429 | encodedH264Buffers = nil; 430 | 431 | return retArr; 432 | } 433 | 434 | // Decompress and then recompress each frame of H264 video as keyframes that 435 | // can be rendered directly without holding a stream decode resource open. 436 | // If an error is encountered during the encode/decode process then nil 437 | // is returned (this can happen when app is put into the background) 438 | 439 | + (BOOL) recompressKeyframes:(NSString*)resourceName 440 | frameDuration:(float)frameDuration 441 | renderSize:(CGSize)renderSize 442 | aveBitrate:(int)aveBitrate 443 | frames:(NSMutableArray*)frames 444 | { 445 | //dispatch_sync(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 446 | 447 | @autoreleasepool { 448 | [self recompressKeyframesOnBackgroundThreadImpl:resourceName 449 | frameDuration:frameDuration 450 | renderSize:renderSize 451 | aveBitrate:aveBitrate 452 | encodedH264Buffers:frames]; 453 | } 454 | 455 | //}); 456 | 457 | //[NSThread sleepForTimeInterval:0.1]; 458 | 459 | BOOL worked; 460 | 461 | if (frames.count == 0) { 462 | worked = FALSE; 463 | } else { 464 | worked = TRUE; 465 | } 466 | 467 | return worked; 468 | } 469 | 470 | // This implementation is meant to be called from inside an autorelease block 471 | // so that tmp objects created in the scope of this method execution will 472 | // be cleaned up even if recompressKeyframesOnBackgroundThread is invoked 473 | // over and over in a loop or without leaving a calling scope. 474 | // 475 | // Decompress and then recompress each frame of H264 video as keyframes that 476 | // can be rendered directly without holding a stream decode resource open. 477 | // If an error is encountered during the encode/decode process then nil 478 | // is returned (this can happen when app is put into the background) 479 | 480 | + (void) recompressKeyframesOnBackgroundThreadImpl:(NSString*)resourceName 481 | frameDuration:(float)frameDuration 482 | renderSize:(CGSize)renderSize 483 | aveBitrate:(int)aveBitrate 484 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers 485 | { 486 | #if defined(LOGGING) 487 | NSLog(@"recompressKeyframesOnBackgroundThread"); 488 | #endif // LOGGING 489 | 490 | // This operation cannot be executed on the main thread! 491 | //NSAssert([NSThread isMainThread] == FALSE, @"isMainThread"); 492 | 493 | [encodedH264Buffers removeAllObjects]; 494 | 495 | NSString* movieFilePath = nil; 496 | 497 | if ([resourceName hasPrefix:@"/"]) { 498 | // Fully qualified path name, load from filesystem 499 | 500 | NSString *path = resourceName; 501 | 502 | NSLog(@"fully qualified filename \"%@\"", path); 503 | 504 | movieFilePath = path; 505 | 506 | NSAssert(movieFilePath, @"movieFilePath is nil"); 507 | } else { 508 | // App resource file name 509 | 510 | NSString *resTail = [resourceName lastPathComponent]; 511 | 512 | movieFilePath = [[NSBundle mainBundle] 513 | pathForResource:resTail ofType:nil]; 514 | NSAssert(movieFilePath, @"movieFilePath is nil"); 515 | } 516 | 517 | // Previously, asYUV was set to TRUE on device in an attempt to get the best 518 | // performance by avoiding YUV->RGB->YUV conversion, but it seems to produce 519 | // some slightly off colors in the reencoded video. Convert the initial movie 520 | // data to RGB and then encode from RGB so that the defaults match whatever iOS 521 | // is doing with 601 and 709 automatic detection. 522 | 523 | BOOL asYUV = TRUE; 524 | 525 | // BOOL asYUV = TRUE; 526 | //#if TARGET_IPHONE_SIMULATOR 527 | // asYUV = FALSE; // Force BGRA buffer when running in simulator 528 | //#endif // TARGET_IPHONE_SIMULATOR 529 | 530 | // Setup frame encoder that will encode each frame 531 | 532 | //H264FrameEncoder *frameEncoder = [[H264FrameEncoder alloc] init]; 533 | 534 | // Hard coded to 24 FPS 535 | //frameEncoder.frameDuration = 1.0f/24; 536 | //frameEncoder.frameDuration = frameDuration; 537 | 538 | // Larger than original but not too big 539 | 540 | // frameEncoder.aveBitrate = 5000000; 541 | //frameEncoder.aveBitrate = aveBitrate; 542 | 543 | // Encode each frame, one at a time, so that totaly memory used is minimized 544 | 545 | BOOL worked = [self decodeCoreVideoFramesFromMOV:movieFilePath 546 | asYUV:asYUV 547 | renderSize:renderSize 548 | encodedH264Buffers:encodedH264Buffers]; 549 | 550 | if (worked == FALSE) { 551 | NSLog(@"decodeCoreVideoFramesFromMOV failed for %@", movieFilePath); 552 | 553 | [encodedH264Buffers removeAllObjects]; 554 | } else { 555 | #if defined(LOGGING) 556 | int totalEncodeNumBytes = 0; 557 | for ( id obj in encodedH264Buffers ) { 558 | CMSampleBufferRef sampleBuffer = (__bridge CMSampleBufferRef) obj; 559 | if (1 || asYUV) { 560 | totalEncodeNumBytes += (int) CVPixelBufferGetDataSize((CVPixelBufferRef)sampleBuffer); 561 | } else { 562 | totalEncodeNumBytes += (int) CMSampleBufferGetSampleSize(sampleBuffer, 0); 563 | } 564 | } 565 | int totalkb = totalEncodeNumBytes / 1000; 566 | int totalmb = totalkb / 1000; 567 | NSLog(@"decoded \"%@\" as %d frames : %d x %d", [resourceName lastPathComponent], (int)encodedH264Buffers.count, (int)renderSize.width, (int)renderSize.height); 568 | NSLog(@"total encoded num bytes %d, %d kB, %d mB", totalEncodeNumBytes, totalkb, totalmb); 569 | #endif // LOGGING 570 | } 571 | 572 | //[frameEncoder endSession]; 573 | } 574 | 575 | @end 576 | -------------------------------------------------------------------------------- /Renderer/BGRAToBT709Converter.h: -------------------------------------------------------------------------------- 1 | // 2 | // BGRAToBT709Converter.h 3 | // 4 | // Created by Mo DeJong on 11/25/18. 5 | // 6 | // Convert a buffer of sRGB BGRA pixels to BT.709 colorspace pixels. 7 | // This logic could do YCbCr 4:2:0 subsampling, the value here is 8 | // that input in the sRGB log space can be internally converted to 9 | // the linear RGB linear space and then to the BT.709 log colorspace 10 | // without a loss of precision due to rounding to linear byte step values. 11 | // The output is (Y Cb CR) pixels stored as (B G R A) -> (Y Cb Cr X) 12 | 13 | #import 14 | 15 | #import 16 | #import 17 | 18 | #import "BT709.h" 19 | 20 | @import Accelerate; 21 | 22 | @class CGFrameBuffer; 23 | 24 | typedef enum { 25 | BGRAToBT709ConverterSoftware = 0, 26 | BGRAToBT709ConverterVImage = 1, 27 | BGRAToBT709ConverterMetal = 2 28 | } BGRAToBT709ConverterTypeEnum; 29 | 30 | @interface BGRAToBT709Converter : NSObject 31 | 32 | // BGRA -> BT709 33 | 34 | + (BOOL) convert:(uint32_t*)inBGRAPixels 35 | outBT709Pixels:(uint32_t*)outBT709Pixels 36 | width:(int)width 37 | height:(int)height 38 | type:(BGRAToBT709ConverterTypeEnum)type; 39 | 40 | // BT709 -> BGRA 41 | 42 | + (BOOL) unconvert:(uint32_t*)inBT709Pixels 43 | outBGRAPixels:(uint32_t*)outBGRAPixels 44 | width:(int)width 45 | height:(int)height 46 | type:(BGRAToBT709ConverterTypeEnum)type; 47 | 48 | // Util methods, these are used internally but can be useful 49 | // to other modules. 50 | 51 | // Set the proper attributes on a CVPixelBufferRef so that vImage 52 | // is able to render directly into BT.709 formatted YCbCr planes. 53 | 54 | + (BOOL) setBT709Attributes:(CVPixelBufferRef)cvPixelBuffer; 55 | 56 | // Attach ICC profile data to a pixel buffer, so that pixels rendered 57 | // in the BT.709 colorspace are known to color matching. 58 | 59 | + (BOOL) setBT709Colorspace:(CVPixelBufferRef)cvPixelBuffer; 60 | 61 | // Allocate a CoreVideo buffer for use with BT.709 format YCBCr 2 plane data 62 | 63 | + (CVPixelBufferRef) createCoreVideoYCbCrBuffer:(CGSize)size; 64 | 65 | // Allocate a CoreVideo buffer that contains a single 8 bit component for each pixel 66 | 67 | + (CVPixelBufferRef) createCoreVideoYBuffer:(CGSize)size; 68 | 69 | // Copy pixel data from CoreGraphics source into vImage buffer for processing. 70 | // Note that data is copied as original pixel values, for example if the input 71 | // is in linear RGB then linear RGB values are copied over. 72 | 73 | + (BOOL) convertIntoCoreVideoBuffer:(CGImageRef)inputImageRef 74 | cvPixelBuffer:(CVPixelBufferRef)cvPixelBuffer 75 | inputGamma:(BT709Gamma)inputGamma 76 | outputGamma:(BT709Gamma)outputGamma; 77 | 78 | // Convert the contents of a CoreVideo pixel buffer and write the results 79 | // into the indicated destination vImage buffer. 80 | 81 | + (BOOL) convertFromCoreVideoBuffer:(CVPixelBufferRef)cvPixelBuffer 82 | bufferPtr:(vImage_Buffer*)bufferPtr 83 | colorspace:(CGColorSpaceRef)colorspace; 84 | 85 | // Copy Y Cb Cr pixel data from the planes of a CoreVideo pixel buffer. 86 | // Writes Y Cb Cr values to grayscale PNG if dump flag is TRUE. 87 | 88 | + (BOOL) copyYCBCr:(CVPixelBufferRef)cvPixelBuffer 89 | Y:(NSMutableData*)Y 90 | Cb:(NSMutableData*)Cb 91 | Cr:(NSMutableData*)Cr 92 | dump:(BOOL)dump; 93 | 94 | // Dump Y Cb Cr grayscale images to the tmp dir 95 | 96 | + (BOOL) dumpYCBCr:(CVPixelBufferRef)cvPixelBuffer; 97 | 98 | // Given a CGImageRef, create a CVPixelBufferRef and render into it, 99 | // format input BGRA data into BT.709 formatted YCbCr at 4:2:0 subsampling. 100 | // This method returns a new CoreVideo buffer on success, otherwise failure. 101 | 102 | + (CVPixelBufferRef) createYCbCrFromCGImage:(CGImageRef)inputImageRef; 103 | 104 | // Given a CGImageRef, create a CVPixelBufferRef and render into it, 105 | // format input BGRA data into BT.709 formatted YCbCr at 4:2:0 subsampling. 106 | // This method returns a new CoreVideo buffer on success, otherwise failure. 107 | // The isLinear argument forces the conversion logic to treat input pixels 108 | // as linear SRGB with a gamma = 1.0. If instead, the asSRGBGamma flag 109 | // is set to TRUE then the sRGB gamma function is applied to values 110 | // before converting to BT.709 values. 111 | 112 | + (CVPixelBufferRef) createYCbCrFromCGImage:(CGImageRef)inputImageRef 113 | isLinear:(BOOL)isLinear 114 | asSRGBGamma:(BOOL)asSRGBGamma; 115 | 116 | // Copy YCbCr data stored in BGRA pixels into Y CbCr planes in CoreVideo 117 | // pixel buffer. 118 | 119 | + (BOOL) copyBT709ToCoreVideo:(uint32_t*)inBT709Pixels 120 | cvPixelBuffer:(CVPixelBufferRef)cvPixelBuffer; 121 | 122 | // Process a YUV CoreVideo buffer with Metal logic that will convert the BT.709 123 | // colorspace image and resample it into a sRGB output image. Note that this 124 | // implementation is not optimal since it allocates intermediate images 125 | // and a CIContext. 126 | 127 | + (CGFrameBuffer*) processYUVTosRGB:(CVPixelBufferRef)cvPixelBuffer; 128 | 129 | // Unpremultiply a 32 BPP image and return the results as 130 | // a 24 BPP image where the alpha is assumed to be 0xFF. 131 | // This method returns NULL if there was an error. 132 | 133 | + (CGImageRef) unpremultiply:(CGImageRef)inputImageRef; 134 | 135 | @end 136 | -------------------------------------------------------------------------------- /Renderer/CGFrameBuffer.h: -------------------------------------------------------------------------------- 1 | // 2 | // CGFrameBuffer.h 3 | // 4 | // Created by Moses DeJong on 2/13/09. 5 | // 6 | // License terms defined in License.txt. 7 | 8 | #import 9 | #import 10 | 11 | // Avoid incorrect warnings from clang 12 | #ifndef __has_feature // Optional. 13 | #define __has_feature(x) 0 // Compatibility with non-clang compilers. 14 | #endif 15 | 16 | #ifndef CF_RETURNS_RETAINED 17 | #if __has_feature(attribute_cf_returns_retained) 18 | #define CF_RETURNS_RETAINED __attribute__((cf_returns_retained)) 19 | #else 20 | #define CF_RETURNS_RETAINED 21 | #endif 22 | #endif 23 | 24 | @interface CGFrameBuffer : NSObject { 25 | @protected 26 | char *m_pixels; 27 | char *m_zeroCopyPixels; 28 | NSData *m_zeroCopyMappedData; 29 | size_t m_numBytes; 30 | size_t m_numBytesAllocated; 31 | size_t m_width; 32 | size_t m_height; 33 | size_t m_bitsPerPixel; 34 | size_t m_bytesPerPixel; 35 | int32_t m_isLockedByDataProvider; 36 | CGImageRef m_lockedByImageRef; 37 | CGColorSpaceRef m_colorspace; 38 | 39 | #if __has_feature(objc_arc) 40 | NSObject *m_arcRefToSelf; 41 | #endif // objc_arc 42 | } 43 | 44 | @property (readonly) char *pixels; 45 | @property (readonly) char *zeroCopyPixels; 46 | @property (nonatomic, copy) NSData *zeroCopyMappedData; 47 | 48 | // The numBytes property indicates the number of bytes in length 49 | // of the buffer pointed to by the pixels property. In the event 50 | // that an odd number of pixels is allocated, this numBytes value 51 | // could also include a zero padding pixel in order to keep the 52 | // buffer size an even number of pixels. 53 | 54 | @property (readonly) size_t numBytes; 55 | @property (readonly) size_t width; 56 | @property (readonly) size_t height; 57 | @property (readonly) size_t bitsPerPixel; 58 | @property (readonly) size_t bytesPerPixel; 59 | 60 | @property (nonatomic, assign) BOOL isLockedByDataProvider; 61 | @property (nonatomic, readonly) CGImageRef lockedByImageRef; 62 | 63 | // The colorspace will default to device RGB unless explicitly set. If set, then 64 | // the indicated colorspace will be used when invoking CGBitmapContextCreate() 65 | // such that a drawing operation will output pixels in the indicated colorspace. 66 | // The same colorspace will be used when creating a CGImageRef via createCGImageRef. 67 | // While this property is marked as assign, it will retain a ref to the indicate colorspace. 68 | 69 | @property (nonatomic, assign) CGColorSpaceRef colorspace; 70 | 71 | + (CGFrameBuffer*) cGFrameBufferWithBppDimensions:(NSInteger)bitsPerPixel width:(NSInteger)width height:(NSInteger)height; 72 | 73 | - (id) initWithBppDimensions:(NSInteger)bitsPerPixel width:(NSInteger)width height:(NSInteger)height; 74 | 75 | // Render a CGImageRef directly into the pixels 76 | 77 | - (BOOL) renderCGImage:(CGImageRef)cgImageRef; 78 | 79 | // Wrap the framebuffer in a CoreGraphics bitmap context. 80 | // This API creates a handle that can be used to render 81 | // directly into the bitmap pixels. The handle must 82 | // be explicitly released by the caller via CGContextRelease() 83 | 84 | - (CGContextRef) createBitmapContext CF_RETURNS_RETAINED; 85 | 86 | // Create a Core Graphics image from the pixel data 87 | // in this buffer. The hasDataProvider property 88 | // will be TRUE while the CGImageRef is in use. 89 | // This name is upper case to avoid warnings from the analyzer. 90 | 91 | - (CGImageRef) createCGImageRef CF_RETURNS_RETAINED; 92 | 93 | // Defines the pixel layout, could be overloaded in a derived class 94 | 95 | - (CGBitmapInfo) getBitmapInfo; 96 | 97 | - (BOOL) isLockedByImageRef:(CGImageRef)cgImageRef; 98 | 99 | // Set all pixels to 0x0 100 | 101 | - (void) clear; 102 | 103 | // Copy data from another framebuffer into this one 104 | 105 | - (void) copyPixels:(CGFrameBuffer *)anotherFrameBuffer; 106 | 107 | // USe memcopy() as opposed to an OS level page copy 108 | 109 | - (void) memcopyPixels:(CGFrameBuffer *)anotherFrameBuffer; 110 | 111 | // Zero copy from an external read-only location 112 | 113 | - (void) zeroCopyPixels:(void*)zeroCopyPtr mappedData:(NSData*)mappedData; 114 | - (void) zeroCopyToPixels; 115 | - (void) doneZeroCopyPixels; 116 | 117 | // Optional opaque pixel writing logic to clear the alpha channel values when 118 | // pixels are known to be 24BPP only. This call sets the alpha channel for 119 | // each pixel to zero. 120 | 121 | - (void) clearAlphaChannel; 122 | 123 | // This method resets the alpha channel for each pixel to be fully opaque. 124 | 125 | - (void) resetAlphaChannel; 126 | 127 | // Convert pixels to a PNG image format that can be easily saved to disk. 128 | 129 | - (NSData*) formatAsPNG; 130 | 131 | // Convert pixels to a BMP image format that can be easily saved to disk. 132 | 133 | - (NSData*) formatAsBMP; 134 | 135 | // Convert pixels to a TIFF image format that can be easily saved to disk. 136 | 137 | - (NSData*) formatAsTIFF; 138 | 139 | @end 140 | -------------------------------------------------------------------------------- /Renderer/CVPixelBufferUtils.h: -------------------------------------------------------------------------------- 1 | // 2 | // CVPixelBufferUtils.h 3 | // 4 | // Created by Moses DeJong on 12/14/18. 5 | // 6 | // Utility functions for CoreVideo pixel buffers. 7 | // 8 | // Licensed under BSD terms. 9 | 10 | #if !defined(_CVPixelBufferUtils_H) 11 | #define _CVPixelBufferUtils_H 12 | 13 | #import "BT709.h" 14 | 15 | // Copy the contents of a specific plane from src to dst, this 16 | // method is optimized so that memcpy() operations will copy 17 | // either the whole buffer if possible otherwise or a row at a time. 18 | 19 | static inline 20 | void cvpbu_copy_plane(CVPixelBufferRef src, CVPixelBufferRef dst, int plane) { 21 | int width = (int) CVPixelBufferGetWidth(dst); 22 | int height = (int) CVPixelBufferGetHeight(dst); 23 | 24 | // Copy Y values from cvPixelBufferAlphaIn to cvPixelBufferAlphaOut 25 | 26 | { 27 | int status = CVPixelBufferLockBaseAddress(src, kCVPixelBufferLock_ReadOnly); 28 | assert(status == kCVReturnSuccess); 29 | } 30 | 31 | { 32 | int status = CVPixelBufferLockBaseAddress(dst, 0); 33 | assert(status == kCVReturnSuccess); 34 | } 35 | 36 | uint8_t *yInPlane = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(src, plane); 37 | assert(yInPlane); 38 | const size_t yInBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(src, plane); 39 | 40 | uint8_t *yOutPlane = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(dst, plane); 41 | assert(yOutPlane); 42 | const size_t yOutBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(dst, plane); 43 | 44 | if (yInBytesPerRow == yOutBytesPerRow) { 45 | memcpy(yOutPlane, yInPlane, yInBytesPerRow * height); 46 | } else { 47 | for (int row = 0; row < height; row++) { 48 | uint8_t *rowInPtr = yInPlane + (row * yInBytesPerRow); 49 | uint8_t *rowOutPtr = yOutPlane + (row * yOutBytesPerRow); 50 | #if defined(DEBUG) 51 | assert(width <= yOutBytesPerRow); 52 | #endif // DEBUG 53 | memcpy(rowOutPtr, rowInPtr, width); 54 | } 55 | } 56 | 57 | if ((0)) { 58 | for (int row = 0; row < height; row++) { 59 | uint8_t *rowOutPtr = yOutPlane + (row * yOutBytesPerRow); 60 | for (int col = 0; col < width; col++) { 61 | uint8_t bVal = rowOutPtr[col]; 62 | printf("%d ", bVal); 63 | } 64 | } 65 | } 66 | 67 | { 68 | int status = CVPixelBufferUnlockBaseAddress(src, kCVPixelBufferLock_ReadOnly); 69 | assert(status == kCVReturnSuccess); 70 | } 71 | 72 | { 73 | int status = CVPixelBufferUnlockBaseAddress(dst, 0); 74 | assert(status == kCVReturnSuccess); 75 | } 76 | } 77 | 78 | // Wrap a CV pixel buffer plane up as a Metal texture, the plane number 79 | // indicates which plane will be wrapped. The format of the pixels 80 | // is assumed to be a single byte value that will be referenced as a float. 81 | 82 | static inline 83 | id cvpbu_wrap_y_plane_as_metal_texture(CVPixelBufferRef cvPixelBuffer, 84 | int width, 85 | int height, 86 | CVMetalTextureCacheRef textureCache, 87 | int plane) 88 | { 89 | id inputTexture = nil; 90 | 91 | CVMetalTextureRef textureWrapperRef = NULL; 92 | 93 | CVReturn ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 94 | textureCache, 95 | cvPixelBuffer, 96 | nil, // textureAttributes 97 | MTLPixelFormatR8Unorm, 98 | width, 99 | height, 100 | plane, 101 | &textureWrapperRef); 102 | 103 | #if defined(DEBUG) 104 | assert(ret == kCVReturnSuccess && textureWrapperRef != NULL); 105 | #endif // DEBUG 106 | 107 | if (ret != kCVReturnSuccess || textureWrapperRef == NULL) { 108 | return FALSE; 109 | } 110 | 111 | inputTexture = CVMetalTextureGetTexture(textureWrapperRef); 112 | 113 | CFRelease(textureWrapperRef); 114 | 115 | return inputTexture; 116 | } 117 | 118 | static inline 119 | id cvpbu_wrap_uv_plane_as_metal_texture(CVPixelBufferRef cvPixelBuffer, 120 | unsigned int width, 121 | unsigned int height, 122 | CVMetalTextureCacheRef textureCache, 123 | int plane) 124 | { 125 | unsigned int hw = width >> 1; 126 | unsigned int hh = height >> 1; 127 | 128 | id inputTexture = nil; 129 | 130 | CVMetalTextureRef textureWrapperRef = NULL; 131 | 132 | CVReturn ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 133 | textureCache, 134 | cvPixelBuffer, 135 | nil, // textureAttributes 136 | MTLPixelFormatRG8Unorm, 137 | hw, 138 | hh, 139 | plane, 140 | &textureWrapperRef); 141 | 142 | #if defined(DEBUG) 143 | assert(ret == kCVReturnSuccess && textureWrapperRef != NULL); 144 | #endif // DEBUG 145 | 146 | if (ret != kCVReturnSuccess || textureWrapperRef == NULL) { 147 | return FALSE; 148 | } 149 | 150 | inputTexture = CVMetalTextureGetTexture(textureWrapperRef); 151 | 152 | CFRelease(textureWrapperRef); 153 | 154 | return inputTexture; 155 | } 156 | 157 | // Grab the contents of a Y (byte) plane inside a CoreVideo pixel buffer 158 | // and copy the byte values into a NSData object. This method is 159 | // intended to be used for debug purposes and so it does not need 160 | // to be efficient. This method will lock and then unlock the buffer. 161 | 162 | static inline 163 | NSMutableData* cvpbu_get_y_plane_as_data(CVPixelBufferRef cvPixelBuffer, int plane) 164 | { 165 | int width = (int) CVPixelBufferGetWidth(cvPixelBuffer); 166 | int height = (int) CVPixelBufferGetHeight(cvPixelBuffer); 167 | 168 | NSMutableData *mData = [NSMutableData dataWithLength:width*height*sizeof(uint8_t)]; 169 | 170 | { 171 | int status = CVPixelBufferLockBaseAddress(cvPixelBuffer, 0); 172 | assert(status == kCVReturnSuccess); 173 | } 174 | 175 | uint8_t *yPlane = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(cvPixelBuffer, plane); 176 | size_t yBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(cvPixelBuffer, plane); 177 | 178 | uint8_t *yPlanePacked = (uint8_t *) mData.bytes; 179 | 180 | for (int row = 0; row < height; row++) { 181 | uint8_t *inRowPtr = yPlane + (row * yBytesPerRow); 182 | uint8_t *outRowPtr = yPlanePacked + (row * (width * sizeof(uint8_t))); 183 | 184 | for (int col = 0; col < width; col++) { 185 | int Y = inRowPtr[col]; 186 | outRowPtr[col] = Y; 187 | } 188 | } 189 | 190 | { 191 | int status = CVPixelBufferUnlockBaseAddress(cvPixelBuffer, 0); 192 | assert(status == kCVReturnSuccess); 193 | } 194 | 195 | return mData; 196 | } 197 | 198 | static inline 199 | NSMutableData* cvpbu_get_uv_plane_as_data(CVPixelBufferRef cvPixelBuffer, int plane) 200 | { 201 | unsigned int width = (int) CVPixelBufferGetWidth(cvPixelBuffer); 202 | unsigned int height = (int) CVPixelBufferGetHeight(cvPixelBuffer); 203 | 204 | unsigned int hw = width >> 1; 205 | unsigned int hh = height >> 1; 206 | 207 | NSMutableData *mData = [NSMutableData dataWithLength:hw*hh*sizeof(uint16_t)]; 208 | 209 | { 210 | int status = CVPixelBufferLockBaseAddress(cvPixelBuffer, 0); 211 | assert(status == kCVReturnSuccess); 212 | } 213 | 214 | uint16_t *cbcrPlane = (uint16_t *) CVPixelBufferGetBaseAddressOfPlane(cvPixelBuffer, plane); 215 | size_t cbcrBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(cvPixelBuffer, plane); 216 | const size_t cbcrPixelsPerRow = cbcrBytesPerRow / sizeof(uint16_t); 217 | 218 | uint16_t *cbcrPlanePacked = (uint16_t *) mData.bytes; 219 | 220 | for (int row = 0; row < hh; row++) { 221 | uint16_t *inRowPtr = cbcrPlane + (row * cbcrPixelsPerRow); 222 | uint16_t *outRowPtr = cbcrPlanePacked + (row * hw); 223 | 224 | for (int col = 0; col < hw; col++) { 225 | uint16_t uv = inRowPtr[col]; 226 | outRowPtr[col] = uv; 227 | } 228 | } 229 | 230 | { 231 | int status = CVPixelBufferUnlockBaseAddress(cvPixelBuffer, 0); 232 | assert(status == kCVReturnSuccess); 233 | } 234 | 235 | return mData; 236 | } 237 | 238 | // Subsample RGB pixels as YCbCr with linear gamma logic that 239 | // best represents the resized color planes via iterative approach. 240 | 241 | static inline 242 | void cvpbu_ycbcr_subsample(uint32_t *inPixelsPtr, int width, int height, CVPixelBufferRef dst, const BT709Gamma inputGamma, const BT709Gamma outputGamma) { 243 | const int debug = 0; 244 | 245 | // int width = (int) CVPixelBufferGetWidth(dst); 246 | // int height = (int) CVPixelBufferGetHeight(dst); 247 | 248 | // Copy Y values from cvPixelBufferAlphaIn to cvPixelBufferAlphaOut 249 | 250 | // { 251 | // int status = CVPixelBufferLockBaseAddress(src, kCVPixelBufferLock_ReadOnly); 252 | // assert(status == kCVReturnSuccess); 253 | // } 254 | 255 | { 256 | int status = CVPixelBufferLockBaseAddress(dst, 0); 257 | assert(status == kCVReturnSuccess); 258 | } 259 | 260 | const int yPlane = 0; 261 | const int cbcrPlane = 1; 262 | 263 | //uint32_t *inPixelsPtr = (uint32_t *) CVPixelBufferGetBaseAddress(src); 264 | //assert(inPixelsPtr); 265 | //const size_t inPixelsBytesPerRow = CVPixelBufferGetBytesPerRow(src); 266 | 267 | uint8_t *outYPlanePtr = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(dst, yPlane); 268 | assert(outYPlanePtr); 269 | const size_t yOutBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(dst, yPlane); 270 | 271 | uint16_t *outCbCrPlanePtr = (uint16_t *) CVPixelBufferGetBaseAddressOfPlane(dst, cbcrPlane); 272 | assert(outCbCrPlanePtr); 273 | const size_t cbcrOutBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(dst, cbcrPlane); 274 | 275 | assert((width % 2) == 0); 276 | assert((height % 2) == 0); 277 | 278 | const int numCbCrPerRow = (int) (cbcrOutBytesPerRow / sizeof(uint16_t)); 279 | 280 | { 281 | uint8_t *outYRowPtr; 282 | uint16_t *outCbCrRowPtr; 283 | 284 | for (int row = 0; row < height; row++) { 285 | if ((row % 2) != 0) { 286 | // Skip odd rows 287 | continue; 288 | } 289 | 290 | //assert((inPixelsBytesPerRow % sizeof(uint32_t)) == 0); 291 | //int numPixelsPerRow = (int) (row * (inPixelsBytesPerRow / sizeof(uint32_t))); 292 | //uint32_t *inPixelsRowPtr = inPixelsPtr + width; 293 | 294 | //outYRowPtr = outYPlanePtr + (row * yOutBytesPerRow); 295 | //outCbCrRowPtr = outCbCrPlanePtr + (row * numCbCrPerRow); 296 | 297 | for (int col = 0; col < width; col += 2) { 298 | uint32_t p1 = inPixelsPtr[(row * width) + col]; 299 | uint32_t p2 = inPixelsPtr[(row * width) + col+1]; 300 | 301 | uint32_t p3 = inPixelsPtr[((row+1) * width) + col]; 302 | uint32_t p4 = inPixelsPtr[((row+1) * width) + col+1]; 303 | 304 | int R1 = (p1 >> 16) & 0xFF; 305 | int G1 = (p1 >> 8) & 0xFF; 306 | int B1 = p1 & 0xFF; 307 | 308 | int R2 = (p2 >> 16) & 0xFF; 309 | int G2 = (p2 >> 8) & 0xFF; 310 | int B2 = p2 & 0xFF; 311 | 312 | int R3 = (p3 >> 16) & 0xFF; 313 | int G3 = (p3 >> 8) & 0xFF; 314 | int B3 = p3 & 0xFF; 315 | 316 | int R4 = (p4 >> 16) & 0xFF; 317 | int G4 = (p4 >> 8) & 0xFF; 318 | int B4 = p4 & 0xFF; 319 | 320 | if (debug) { 321 | printf("R1 G1 B1 %3d %3d %3d : R2 G2 B2 %3d %3d %3d\n", R1, G1, B1, R2, G2, B2); 322 | printf("R3 G3 B3 %3d %3d %3d : R4 G4 B4 %3d %3d %3d\n", R3, G3, B3, R4, G4, B4); 323 | } 324 | 325 | int Y1, Y2, Y3, Y4; 326 | int Cb, Cr; 327 | 328 | BT709_average_pixel_values( 329 | R1, G1, B1, 330 | R2, G2, B2, 331 | R3, G3, B3, 332 | R4, G4, B4, 333 | &Y1, &Y2, &Y3, &Y4, 334 | &Cb, &Cr, 335 | inputGamma, 336 | outputGamma 337 | ); 338 | 339 | if (debug) { 340 | printf("Y1 Y2 Y3 Y4 %3d %3d %3d %3d : Cb Cr %3d %3d\n", Y1, Y2, Y3, Y4, Cb, Cr); 341 | } 342 | 343 | // Write 4 Y values to plane 0 344 | 345 | outYRowPtr = outYPlanePtr + (row * yOutBytesPerRow); 346 | 347 | outYRowPtr[col] = Y1; 348 | outYRowPtr[col+1] = Y2; 349 | 350 | outYRowPtr = outYPlanePtr + ((row+1) * yOutBytesPerRow); 351 | 352 | outYRowPtr[col] = Y3; 353 | outYRowPtr[col+1] = Y4; 354 | 355 | // Write CbCr value to plane 1 356 | 357 | outCbCrRowPtr = outCbCrPlanePtr + (row/2 * numCbCrPerRow); 358 | uint16_t cbcrPixel = 0; 359 | cbcrPixel = (uint16_t) Cb; 360 | cbcrPixel |= (((uint16_t) Cr) << 8); 361 | outCbCrRowPtr[col/2] = cbcrPixel; 362 | } 363 | } 364 | } 365 | 366 | if ((0)) { 367 | printf("Y:\n"); 368 | for (int row = 0; row < height; row++) { 369 | uint8_t *rowOutPtr = outYPlanePtr + (row * yOutBytesPerRow); 370 | for (int col = 0; col < width; col++) { 371 | uint8_t bVal = rowOutPtr[col]; 372 | printf("%3d ", bVal); 373 | } 374 | printf("\n"); 375 | } 376 | 377 | printf("CbCr:\n"); 378 | for (int row = 0; row < (height/2); row++) { 379 | uint16_t *rowOutPtr = outCbCrPlanePtr + (row * numCbCrPerRow); 380 | for (int col = 0; col < (width/2); col++) { 381 | uint16_t pVal = rowOutPtr[col]; 382 | uint8_t Cb = (pVal >> 0) & 0xFF; 383 | uint8_t Cr = (pVal >> 8) & 0xFF; 384 | printf("%3d %3d ", Cb, Cr); 385 | } 386 | printf("\n"); 387 | } 388 | } 389 | 390 | // { 391 | // int status = CVPixelBufferUnlockBaseAddress(src, kCVPixelBufferLock_ReadOnly); 392 | // assert(status == kCVReturnSuccess); 393 | // } 394 | 395 | { 396 | int status = CVPixelBufferUnlockBaseAddress(dst, 0); 397 | assert(status == kCVReturnSuccess); 398 | } 399 | } 400 | 401 | 402 | #endif // _CVPixelBufferUtils_H 403 | -------------------------------------------------------------------------------- /Renderer/ColorsAlpha4by4.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/ColorsAlpha4by4.m4v -------------------------------------------------------------------------------- /Renderer/ColorsAlpha4by4_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/ColorsAlpha4by4_alpha.m4v -------------------------------------------------------------------------------- /Renderer/DalaiLamaGray_bt709.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/DalaiLamaGray_bt709.m4v -------------------------------------------------------------------------------- /Renderer/DalaiLamaGray_srgb.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/DalaiLamaGray_srgb.m4v -------------------------------------------------------------------------------- /Renderer/Gamma_test_HD_75Per_24BPP_sRGB_HD.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/Gamma_test_HD_75Per_24BPP_sRGB_HD.m4v -------------------------------------------------------------------------------- /Renderer/GlobeLEDAlpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/GlobeLEDAlpha.m4v -------------------------------------------------------------------------------- /Renderer/GlobeLEDAlpha_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/GlobeLEDAlpha_alpha.m4v -------------------------------------------------------------------------------- /Renderer/H264Encoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // H264Encoder.h 3 | // 4 | // Created by Mo DeJong on 7/6/16. 5 | // 6 | // See license.txt for BSD license terms. 7 | // 8 | // This module wraps AVFoundation APIs so that a H264 video can 9 | // be created from CoreGraphics refrences to image data. The 10 | // interface defines a callback that is invoked each time the 11 | // encoder is redy to write a frame of data to the output thread. 12 | // All processing is done on a background thread, the main thread 13 | // is not blocked at any time. Note that any error reporting is 14 | // handled by invoking H264EncoderResult to report an error 15 | // on the main thread. 16 | 17 | @import Foundation; 18 | @import AVFoundation; 19 | @import CoreVideo; 20 | @import CoreImage; 21 | @import CoreMedia; 22 | @import VideoToolbox; 23 | 24 | typedef enum 25 | { 26 | H264EncoderErrorCodeSuccess = 0, 27 | H264EncoderErrorCodeNoFrameSource = 1, 28 | H264EncoderErrorCodeSessionNotStarted = 2, 29 | } H264EncoderErrorCode; 30 | 31 | @protocol H264EncoderFrameSource 32 | 33 | // Given a frame number in the range (0, N-1), return a CoreGraphics 34 | // image reference to the corresponding image to be written to 35 | // the encoded file. 36 | 37 | - (CGImageRef) imageForFrame:(int)frameNum; 38 | 39 | // Return TRUE if more frames can be returned by this frame source, 40 | // returning FALSE means that all frames have been encoded. 41 | 42 | - (BOOL) hasMoreFrames; 43 | 44 | @end 45 | 46 | // Implement this protocol to async report the results of an encoding 47 | // operation on a background thread. 48 | 49 | @protocol H264EncoderResult 50 | 51 | // Given a frame number in the range (0, N-1), return a CoreGraphics 52 | // image reference to the corresponding image to be written to 53 | // the encoded file. The code is zero = H264EncoderErrorCodeSuccess 54 | // on success, otherwise an error code indicates wht went wrong. 55 | 56 | - (void) encoderResult:(H264EncoderErrorCode)code; 57 | 58 | @end 59 | 60 | // H264Encoder class 61 | 62 | @interface H264Encoder : NSObject 63 | 64 | // Reference to a H264EncoderFrameSource protocol implementation, 65 | // this must be set to a non-nil value before encoding. 66 | 67 | @property (nonatomic, assign) id frameSource; 68 | 69 | // Reference to a H264EncoderResult, this must be set to a non-nil 70 | // value before encoding. 71 | 72 | @property (nonatomic, assign) id encoderResult; 73 | 74 | // This property is FALSE once encoding has started, when encoding is finished 75 | // with either a success or error status then this property is set to TRUE. 76 | // Note that this property is thread safe as it can be accessed from different threads. 77 | 78 | @property (atomic, assign) BOOL finished; 79 | 80 | // Block the calling thread until encoding is finished 81 | 82 | - (void) blockUntilFinished; 83 | 84 | // convert error code to string 85 | 86 | + (NSString*) ErrorCodeToString:(H264EncoderErrorCode)code; 87 | 88 | // constructor 89 | 90 | + (H264Encoder*) h264Encoder; 91 | 92 | // Invoke this method to start a background thread that will encode 93 | // incoming frames of images and write the result to a H264 file. 94 | // Note that if there are any error conditions, the encoderResult 95 | // protocol callback is invoked to report the error condition. 96 | 97 | - (void) encodeframes:(NSString*)outH264Path 98 | frameDuration:(float)frameDuration 99 | renderSize:(CGSize)renderSize 100 | aveBitrate:(int)aveBitrate; 101 | 102 | // Generate a reference to the hidden "HDTV" colorspace that decodes already 103 | // "brightness adjusted" video data to light linear with a 1.961 gamma combined 104 | // with a linear segment with slope 16. 105 | 106 | + (CGColorSpaceRef) createHDTVColorSpaceRef; 107 | 108 | @end 109 | -------------------------------------------------------------------------------- /Renderer/Image.tga: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/Image.tga -------------------------------------------------------------------------------- /Renderer/MetalBT709Decoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // MetalBT709Decoder.h 3 | // 4 | // Created by Mo DeJong on 12/26/18. 5 | // 6 | // Given an input buffer of BT.709 encoded YCbCr data, decode 7 | // pixels into a sRGB texture. 8 | 9 | #import 10 | #import 11 | #import 12 | 13 | @class MetalRenderContext; 14 | 15 | typedef enum { 16 | MetalBT709GammaApple = 0, // default 17 | MetalBT709GammaSRGB, 18 | MetalBT709GammaLinear 19 | } MetalBT709Gamma; 20 | 21 | @interface MetalBT709Decoder : NSObject 22 | 23 | // Setup MetalRenderContext instance before the first decode, 24 | // existing Metal refs will be used if set, otherwise new 25 | // instance will be allocated. 26 | 27 | @property (nonatomic, retain) MetalRenderContext *metalRenderContext; 28 | 29 | @property (nonatomic, assign) MTLPixelFormat colorPixelFormat; 30 | 31 | // Defaults to apple gamma. Caller must set the specific gamma function 32 | // to use white decoded before Metal is initialized. The decoding 33 | // logic must indicate the gamma function to be used at init time 34 | // even though the actualy type of content to be decoded may not 35 | // be known until the first frame of video is read. 36 | 37 | @property (nonatomic, assign) MetalBT709Gamma gamma; 38 | 39 | // If set to TRUE, a compute kernel will be used to render, 40 | // otherwise use a fragment shader. 41 | 42 | @property (nonatomic, assign) BOOL useComputeRenderer; 43 | 44 | // If hasAlphaChannel is set to TRUE then a second alpha 45 | // is defined as a linear channel that can be directly 46 | // interpolated. 47 | 48 | @property (nonatomic, assign) BOOL hasAlphaChannel; 49 | 50 | // Set to TRUE once a render context has been setup 51 | 52 | // Setup Metal refs for this instance, this is implicitly 53 | // invoked by decodeBT709 but some test code may want to 54 | // setup metal before invoking the decode method. 55 | 56 | - (BOOL) setupMetal; 57 | 58 | // BT709 -> BGRA conversion that writes directly into a Metal texture. 59 | // This logic assumes that the Metal texture was already allocated at 60 | // exactly the same dimensions as the input YCbCr encoded data. 61 | // Note that in the case where the render should be done in 1 step, 62 | // meaning directly into a view then pass a renderPassDescriptor. 63 | // A 2 stage render would pass nil for renderPassDescriptor. 64 | 65 | - (BOOL) decodeBT709:(CVPixelBufferRef)yCbCrInputTexture 66 | alphaPixelBuffer:(CVPixelBufferRef)alphaPixelBuffer 67 | bgraSRGBTexture:(id)bgraSRGBTexture 68 | commandBuffer:(id)commandBuffer 69 | renderPassDescriptor:(MTLRenderPassDescriptor*)renderPassDescriptor 70 | renderWidth:(int)renderWidth 71 | renderHeight:(int)renderHeight 72 | waitUntilCompleted:(BOOL)waitUntilCompleted; 73 | 74 | @end 75 | -------------------------------------------------------------------------------- /Renderer/MetalRenderContext.h: -------------------------------------------------------------------------------- 1 | // 2 | // MetalRenderContext.h 3 | // 4 | // Copyright 2016 Mo DeJong. 5 | // 6 | // See LICENSE for terms. 7 | // 8 | // This module references Metal objects that are associated with 9 | // a rendering context, like a view but are not defined on a 10 | // render frame. There is 1 render context for N render frames. 11 | 12 | //@import MetalKit; 13 | #include 14 | 15 | @interface MetalRenderContext : NSObject 16 | 17 | @property (nonatomic, retain) id device; 18 | @property (nonatomic, retain) id defaultLibrary; 19 | @property (nonatomic, retain) id commandQueue; 20 | 21 | @property (nonatomic, retain) id identityVerticesBuffer; 22 | @property (nonatomic, assign) int identityNumVertices; 23 | 24 | // Query GPU feature set flags 25 | 26 | //@property (readonly) NSUInteger highestSupportedFeatureSet; 27 | 28 | // Return 1 for A7, otherwise returns 2 29 | 30 | //@property (readonly) NSUInteger featureSetGPUFamily; 31 | 32 | //@property (readonly) BOOL supportsASTCPixelFormats; 33 | 34 | // Invoke this method once a MetalRenderFrame object has been created 35 | // to allocate and create metal resources with the given device instance. 36 | // Note that the device property must be set before this invocation but 37 | // the defaultLibrary and commandQueue properties will be allocated if 38 | // they are not already set as properties. Returns TRUE on success, FALSE 39 | // if any of the Metal setup steps fails. One example FALSE return condition 40 | // could be failing to locate Metal compiled library in the application bundle 41 | // or test bundle. 42 | 43 | - (BOOL) setupMetal; 44 | 45 | // Create a MTLRenderPipelineDescriptor given a vertex and fragment shader 46 | 47 | - (id) makePipeline:(MTLPixelFormat)pixelFormat 48 | pipelineLabel:(NSString*)pipelineLabel 49 | numAttachments:(int)numAttachments 50 | vertexFunctionName:(NSString*)vertexFunctionName 51 | fragmentFunctionName:(NSString*)fragmentFunctionName; 52 | 53 | // Create a pipeline that executes a compute kernel 54 | 55 | - (id) makePipeline:(MTLPixelFormat)pixelFormat 56 | pipelineLabel:(NSString*)pipelineLabel 57 | kernelFunctionName:(NSString*)kernelFunctionName; 58 | 59 | // Util to allocate a BGRA 32 bits per pixel texture 60 | // with the given dimensions. 61 | 62 | - (id) makeBGRATexture:(CGSize)size 63 | pixels:(uint32_t*)pixels 64 | usage:(MTLTextureUsage)usage 65 | isSRGB:(BOOL)isSRGB; 66 | 67 | - (void) fillBGRATexture:(id)texture pixels:(uint32_t*)pixels; 68 | 69 | // Query bytes out of an 32 bit texture, return as NSData that contains uint32_t elements 70 | 71 | - (NSData*) getBGRATexturePixels:(id)texture; 72 | 73 | // Query pixels from a 32 bit texture and then return those 74 | // pixels by reading the B,G,R,A components as byte values 75 | // in a flat NSData. This method does not depend on endian ordering. 76 | 77 | - (NSData*) getBGRATextureAsBytes:(id)texture; 78 | 79 | // Allocate texture that contains an 8 bit int value in the range (0, 255) 80 | // represented by a half float value. 81 | 82 | - (id) make8bitTexture:(CGSize)size bytes:(uint8_t*)bytes usage:(MTLTextureUsage)usage; 83 | 84 | // Fill values in an 8 bit texture 85 | 86 | - (void) fill8bitTexture:(id)texture bytes:(uint8_t*)bytes; 87 | 88 | // Query bytes out of an 8 bit texture, return as NSData that contains uint8_t elements 89 | 90 | - (NSData*) get8bitTextureBytes:(id)texture; 91 | 92 | // Allocate 16 bit unsigned int texture 93 | 94 | - (id) make16bitTexture:(CGSize)size halfwords:(uint16_t*)halfwords usage:(MTLTextureUsage)usage; 95 | 96 | - (void) fill16bitTexture:(id)texture halfwords:(uint16_t*)halfwords; 97 | 98 | // Query bytes out of an 16 bit texture, return as NSData that contains uint16_t elements 99 | 100 | - (NSData*) get16bitTexturePixels:(id)texture; 101 | 102 | // RG texture is 2 byte values stored together, typically UV 103 | // 8 bit int value in the range (0, 255) represented by a half float value. 104 | 105 | - (id) make16bitRGTexture:(CGSize)size halfwords:(uint16_t*)halfwords usage:(MTLTextureUsage)usage; 106 | 107 | @end 108 | -------------------------------------------------------------------------------- /Renderer/MetalRenderContext.m: -------------------------------------------------------------------------------- 1 | // 2 | // MetalRenderContext.m 3 | // 4 | // Copyright 2016 Mo DeJong. 5 | // 6 | // See LICENSE for terms. 7 | // 8 | // This object Metal references that are associated with a 9 | // rendering context like a view but are not defined on a 10 | // render frame. There is 1 render contet for N render frames. 11 | 12 | #include "MetalRenderContext.h" 13 | 14 | // Header shared between C code here, which executes Metal API commands, and .metal files, which 15 | // uses these types as inpute to the shaders 16 | //#import "AAPLShaderTypes.h" 17 | 18 | // Private API 19 | 20 | @interface MetalRenderContext () 21 | 22 | @end 23 | 24 | // Main class performing the rendering 25 | @implementation MetalRenderContext 26 | 27 | - (BOOL) isSetup 28 | { 29 | if (self.identityVerticesBuffer == nil) { 30 | return FALSE; 31 | } else { 32 | return TRUE; 33 | } 34 | } 35 | 36 | - (BOOL) setupMetal 37 | { 38 | if ([self isSetup] == TRUE) { 39 | // Invoking setupMetal again after it has already been run in a nop and returns success 40 | return TRUE; 41 | } 42 | 43 | if (self.device == nil) { 44 | #if defined(DEBUG) 45 | NSAssert(self.device != nil, @"Metal device must be set before invoking setupMetal"); 46 | #endif // DEBUG 47 | return FALSE; 48 | } 49 | 50 | id defaultLibrary = self.defaultLibrary; 51 | if (defaultLibrary == nil) { 52 | id defaultLibrary = [self.device newDefaultLibrary]; 53 | #if defined(DEBUG) 54 | NSAssert(defaultLibrary != nil, @"defaultLibrary is nil, is Metal library compiled into Application or Test target?"); 55 | #endif // DEBUG 56 | self.defaultLibrary = defaultLibrary; 57 | } 58 | 59 | id commandQueue = self.commandQueue; 60 | if (commandQueue == nil) { 61 | commandQueue = [self.device newCommandQueue]; 62 | self.commandQueue = commandQueue; 63 | } 64 | 65 | #if defined(DEBUG) 66 | NSAssert(self.identityVerticesBuffer == nil, @"identityVerticesBuffer must be nil"); 67 | #endif // DEBUG 68 | 69 | int tmp = 0; 70 | self.identityVerticesBuffer = [self makeIdentityVertexBuffer:&tmp]; 71 | self.identityNumVertices = tmp; 72 | 73 | return TRUE; 74 | } 75 | 76 | // Util to allocate a BGRA 32 bits per pixel texture 77 | // with the given dimensions. 78 | 79 | - (id) makeBGRATexture:(CGSize)size 80 | pixels:(uint32_t*)pixels 81 | usage:(MTLTextureUsage)usage 82 | isSRGB:(BOOL)isSRGB 83 | { 84 | MTLTextureDescriptor *textureDescriptor = [[MTLTextureDescriptor alloc] init]; 85 | 86 | textureDescriptor.textureType = MTLTextureType2D; 87 | 88 | if (isSRGB) { 89 | textureDescriptor.pixelFormat = MTLPixelFormatBGRA8Unorm_sRGB; 90 | } else { 91 | textureDescriptor.pixelFormat = MTLPixelFormatBGRA8Unorm; 92 | } 93 | 94 | textureDescriptor.width = (int) size.width; 95 | textureDescriptor.height = (int) size.height; 96 | 97 | //textureDescriptor.usage = MTLTextureUsageShaderWrite|MTLTextureUsageShaderRead; 98 | //textureDescriptor.usage = MTLTextureUsageRenderTarget|MTLTextureUsageShaderRead; 99 | textureDescriptor.usage = usage; 100 | 101 | // Create our texture object from the device and our descriptor 102 | id texture = [_device newTextureWithDescriptor:textureDescriptor]; 103 | 104 | if (pixels != NULL) { 105 | NSUInteger bytesPerRow = textureDescriptor.width * sizeof(uint32_t); 106 | 107 | MTLRegion region = { 108 | { 0, 0, 0 }, // MTLOrigin 109 | {textureDescriptor.width, textureDescriptor.height, 1} // MTLSize 110 | }; 111 | 112 | // Copy the bytes from our data object into the texture 113 | [texture replaceRegion:region 114 | mipmapLevel:0 115 | withBytes:pixels 116 | bytesPerRow:bytesPerRow]; 117 | } 118 | 119 | return texture; 120 | } 121 | 122 | - (void) fillBGRATexture:(id)texture pixels:(uint32_t*)pixels 123 | { 124 | NSUInteger bytesPerRow = texture.width * sizeof(uint32_t); 125 | 126 | MTLRegion region = { 127 | { 0, 0, 0 }, // MTLOrigin 128 | {texture.width, texture.height, 1} // MTLSize 129 | }; 130 | 131 | // Copy the bytes from our data object into the texture 132 | [texture replaceRegion:region 133 | mipmapLevel:0 134 | withBytes:pixels 135 | bytesPerRow:bytesPerRow]; 136 | } 137 | 138 | // Query bytes out of an 32 bit texture, return as NSData that contains uint32_t elements 139 | 140 | - (NSData*) getBGRATexturePixels:(id)texture 141 | { 142 | // Copy texture data into debug framebuffer, note that this include 2x scale 143 | 144 | int width = (int) texture.width; 145 | int height = (int) texture.height; 146 | 147 | NSMutableData *mFramebuffer = [NSMutableData dataWithLength:width*height*sizeof(uint32_t)]; 148 | 149 | [texture getBytes:(void*)mFramebuffer.mutableBytes 150 | bytesPerRow:width*sizeof(uint32_t) 151 | bytesPerImage:width*height*sizeof(uint32_t) 152 | fromRegion:MTLRegionMake2D(0, 0, width, height) 153 | mipmapLevel:0 154 | slice:0]; 155 | 156 | return [NSData dataWithData:mFramebuffer]; 157 | } 158 | 159 | // Query pixels from a 32 bit texture and then return those 160 | // pixels by reading the B,G,R,A components as byte values 161 | // in a flat NSData. This method does not depend on endian ordering. 162 | 163 | - (NSData*) getBGRATextureAsBytes:(id)texture 164 | { 165 | // Copy texture data into debug framebuffer, note that this include 2x scale 166 | 167 | int width = (int) texture.width; 168 | int height = (int) texture.height; 169 | 170 | NSMutableData *pixelsData = [NSMutableData dataWithLength:width*height*sizeof(uint32_t)]; 171 | 172 | [texture getBytes:(void*)pixelsData.mutableBytes 173 | bytesPerRow:width*sizeof(uint32_t) 174 | bytesPerImage:width*height*sizeof(uint32_t) 175 | fromRegion:MTLRegionMake2D(0, 0, width, height) 176 | mipmapLevel:0 177 | slice:0]; 178 | 179 | uint32_t *inPixelPtr = (uint32_t *) pixelsData.bytes; 180 | 181 | NSMutableData *bytesData = [NSMutableData dataWithLength:pixelsData.length]; 182 | uint8_t *outBytesPtr = (uint8_t *) bytesData.mutableBytes; 183 | 184 | for (int row = 0; row < height; row++) { 185 | for (int col = 0; col < width; col++) { 186 | int offset = (row * width) + col; 187 | 188 | uint32_t pixel = inPixelPtr[offset]; 189 | 190 | uint8_t b0 = pixel & 0xFF; 191 | uint8_t b1 = (pixel >> 8) & 0xFF; 192 | uint8_t b2 = (pixel >> 16) & 0xFF; 193 | uint8_t b3 = (pixel >> 24) & 0xFF; 194 | 195 | *outBytesPtr++ = b0; 196 | *outBytesPtr++ = b1; 197 | *outBytesPtr++ = b2; 198 | *outBytesPtr++ = b3; 199 | } 200 | } 201 | 202 | return [NSData dataWithData:bytesData]; 203 | } 204 | 205 | // Allocate texture that contains an 8 bit int value in the range (0, 255) 206 | // represented by a half float value. 207 | 208 | - (id) make8bitTexture:(CGSize)size bytes:(uint8_t*)bytes usage:(MTLTextureUsage)usage 209 | { 210 | MTLTextureDescriptor *textureDescriptor = [[MTLTextureDescriptor alloc] init]; 211 | 212 | textureDescriptor.textureType = MTLTextureType2D; 213 | 214 | // Each value in this texture is an 8 bit integer value in the range (0,255) inclusive 215 | // represented by a half float 216 | 217 | textureDescriptor.pixelFormat = MTLPixelFormatR8Unorm; 218 | textureDescriptor.width = (int) size.width; 219 | textureDescriptor.height = (int) size.height; 220 | 221 | textureDescriptor.usage = usage; 222 | 223 | // Create our texture object from the device and our descriptor 224 | id texture = [_device newTextureWithDescriptor:textureDescriptor]; 225 | 226 | if (bytes != NULL) { 227 | NSUInteger bytesPerRow = textureDescriptor.width * sizeof(uint8_t); 228 | 229 | MTLRegion region = { 230 | { 0, 0, 0 }, // MTLOrigin 231 | {textureDescriptor.width, textureDescriptor.height, 1} // MTLSize 232 | }; 233 | 234 | // Copy the bytes from our data object into the texture 235 | [texture replaceRegion:region 236 | mipmapLevel:0 237 | withBytes:bytes 238 | bytesPerRow:bytesPerRow]; 239 | } 240 | 241 | return texture; 242 | } 243 | 244 | // Fill values in an 8 bit texture 245 | 246 | - (void) fill8bitTexture:(id)texture 247 | bytes:(uint8_t*)bytes 248 | { 249 | NSUInteger bytesPerRow = texture.width * sizeof(uint8_t); 250 | 251 | MTLRegion region = { 252 | { 0, 0, 0 }, // MTLOrigin 253 | {texture.width, texture.height, 1} // MTLSize 254 | }; 255 | 256 | // Copy the bytes from our data object into the texture 257 | [texture replaceRegion:region 258 | mipmapLevel:0 259 | withBytes:bytes 260 | bytesPerRow:bytesPerRow]; 261 | } 262 | 263 | // Query bytes out of an 8 bit texture, return as NSData that contains uint8_t elements 264 | 265 | - (NSData*) get8bitTextureBytes:(id)texture 266 | { 267 | int width = (int) texture.width; 268 | int height = (int) texture.height; 269 | 270 | NSMutableData *mFramebuffer = [NSMutableData dataWithLength:width*height*sizeof(uint8_t)]; 271 | 272 | [texture getBytes:(void*)mFramebuffer.mutableBytes 273 | bytesPerRow:width*sizeof(uint8_t) 274 | bytesPerImage:width*height*sizeof(uint8_t) 275 | fromRegion:MTLRegionMake2D(0, 0, width, height) 276 | mipmapLevel:0 277 | slice:0]; 278 | 279 | return [NSData dataWithData:mFramebuffer]; 280 | } 281 | 282 | // RG texture is 2 byte values stored together, typically UV 283 | // 8 bit int value in the range (0, 255) represented by a half float value. 284 | 285 | - (id) make16bitRGTexture:(CGSize)size halfwords:(uint16_t*)halfwords usage:(MTLTextureUsage)usage 286 | { 287 | MTLTextureDescriptor *textureDescriptor = [[MTLTextureDescriptor alloc] init]; 288 | 289 | textureDescriptor.textureType = MTLTextureType2D; 290 | 291 | // Each value in this texture is an 8 bit integer pair in the range (0,255) inclusive 292 | // represented by a half float pair of values 293 | 294 | textureDescriptor.pixelFormat = MTLPixelFormatRG8Unorm; 295 | textureDescriptor.width = (int) size.width; 296 | textureDescriptor.height = (int) size.height; 297 | 298 | textureDescriptor.usage = usage; 299 | 300 | // Create our texture object from the device and our descriptor 301 | id texture = [_device newTextureWithDescriptor:textureDescriptor]; 302 | 303 | if (halfwords != NULL) { 304 | NSUInteger bytesPerRow = textureDescriptor.width * sizeof(uint16_t); 305 | 306 | MTLRegion region = { 307 | { 0, 0, 0 }, // MTLOrigin 308 | {textureDescriptor.width, textureDescriptor.height, 1} // MTLSize 309 | }; 310 | 311 | // Copy the bytes from our data object into the texture 312 | [texture replaceRegion:region 313 | mipmapLevel:0 314 | withBytes:halfwords 315 | bytesPerRow:bytesPerRow]; 316 | } 317 | 318 | return texture; 319 | } 320 | 321 | // Allocate 16 bit unsigned int texture 322 | 323 | - (id) make16bitTexture:(CGSize)size halfwords:(uint16_t*)halfwords usage:(MTLTextureUsage)usage 324 | { 325 | MTLTextureDescriptor *textureDescriptor = [[MTLTextureDescriptor alloc] init]; 326 | 327 | textureDescriptor.textureType = MTLTextureType2D; 328 | 329 | // Each value in this texture is an 8 bit integer value in the range (0,255) inclusive 330 | 331 | textureDescriptor.pixelFormat = MTLPixelFormatR16Uint; 332 | textureDescriptor.width = (int) size.width; 333 | textureDescriptor.height = (int) size.height; 334 | 335 | textureDescriptor.usage = usage; 336 | 337 | // Create our texture object from the device and our descriptor 338 | id texture = [self.device newTextureWithDescriptor:textureDescriptor]; 339 | 340 | if (halfwords != NULL) { 341 | NSUInteger bytesPerRow = textureDescriptor.width * sizeof(uint16_t); 342 | 343 | MTLRegion region = { 344 | { 0, 0, 0 }, // MTLOrigin 345 | {textureDescriptor.width, textureDescriptor.height, 1} // MTLSize 346 | }; 347 | 348 | // Copy the bytes from our data object into the texture 349 | [texture replaceRegion:region 350 | mipmapLevel:0 351 | withBytes:halfwords 352 | bytesPerRow:bytesPerRow]; 353 | } 354 | 355 | return texture; 356 | } 357 | 358 | - (void) fill16bitTexture:(id)texture halfwords:(uint16_t*)halfwords 359 | { 360 | 361 | NSUInteger bytesPerRow = texture.width * sizeof(uint16_t); 362 | 363 | MTLRegion region = { 364 | { 0, 0, 0 }, // MTLOrigin 365 | {texture.width, texture.height, 1} // MTLSize 366 | }; 367 | 368 | // Copy the bytes from our data object into the texture 369 | [texture replaceRegion:region 370 | mipmapLevel:0 371 | withBytes:halfwords 372 | bytesPerRow:bytesPerRow]; 373 | } 374 | 375 | // Query bytes out of an 16 bit texture, return as NSData that contains uint16_t elements 376 | 377 | - (NSData*) get16bitTexturePixels:(id)texture 378 | { 379 | int width = (int) texture.width; 380 | int height = (int) texture.height; 381 | 382 | NSMutableData *mFramebuffer = [NSMutableData dataWithLength:width*height*sizeof(uint16_t)]; 383 | 384 | [texture getBytes:(void*)mFramebuffer.mutableBytes 385 | bytesPerRow:width*sizeof(uint16_t) 386 | bytesPerImage:width*height*sizeof(uint16_t) 387 | fromRegion:MTLRegionMake2D(0, 0, width, height) 388 | mipmapLevel:0 389 | slice:0]; 390 | 391 | return [NSData dataWithData:mFramebuffer]; 392 | } 393 | 394 | // Create identity vertex buffer 395 | 396 | - (id) makeIdentityVertexBuffer:(int*)numPtr 397 | { 398 | typedef struct 399 | { 400 | // Positions in pixel space (i.e. a value of 100 indicates 100 pixels from the origin/center) 401 | vector_float2 position; 402 | 403 | // 2D texture coordinate 404 | vector_float2 textureCoordinate; 405 | } AAPLVertex; 406 | 407 | static const AAPLVertex quadVertices[] = 408 | { 409 | // Positions, Texture Coordinates 410 | { { 1, -1 }, { 1.f, 0.f } }, 411 | { { -1, -1 }, { 0.f, 0.f } }, 412 | { { -1, 1 }, { 0.f, 1.f } }, 413 | 414 | { { 1, -1 }, { 1.f, 0.f } }, 415 | { { -1, 1 }, { 0.f, 1.f } }, 416 | { { 1, 1 }, { 1.f, 1.f } }, 417 | }; 418 | 419 | *numPtr = sizeof(quadVertices) / sizeof(AAPLVertex); 420 | 421 | // Create our vertex buffer, and intializat it with our quadVertices array 422 | return [self.device newBufferWithBytes:quadVertices 423 | length:sizeof(quadVertices) 424 | options:MTLResourceStorageModeShared]; 425 | } 426 | 427 | // Create a MTLRenderPipelineDescriptor given a vertex and fragment shader 428 | 429 | - (id) makePipeline:(MTLPixelFormat)pixelFormat 430 | pipelineLabel:(NSString*)pipelineLabel 431 | numAttachments:(int)numAttachments 432 | vertexFunctionName:(NSString*)vertexFunctionName 433 | fragmentFunctionName:(NSString*)fragmentFunctionName 434 | { 435 | // Load the vertex function from the library 436 | id vertexFunction = [self.defaultLibrary newFunctionWithName:vertexFunctionName]; 437 | NSAssert(vertexFunction, @"vertexFunction \"%@\" could not be loaded", vertexFunctionName); 438 | 439 | // Load the fragment function from the library 440 | id fragmentFunction = [self.defaultLibrary newFunctionWithName:fragmentFunctionName]; 441 | NSAssert(fragmentFunction, @"fragmentFunction \"%@\" could not be loaded", fragmentFunctionName); 442 | 443 | // Set up a descriptor for creating a pipeline state object 444 | MTLRenderPipelineDescriptor *pipelineStateDescriptor = [[MTLRenderPipelineDescriptor alloc] init]; 445 | pipelineStateDescriptor.label = pipelineLabel; 446 | pipelineStateDescriptor.vertexFunction = vertexFunction; 447 | pipelineStateDescriptor.fragmentFunction = fragmentFunction; 448 | 449 | for ( int i = 0; i < numAttachments; i++ ) { 450 | pipelineStateDescriptor.colorAttachments[i].pixelFormat = pixelFormat; 451 | } 452 | 453 | NSError *error = NULL; 454 | 455 | id state = [self.device newRenderPipelineStateWithDescriptor:pipelineStateDescriptor 456 | error:&error]; 457 | 458 | if (!state) 459 | { 460 | // Pipeline State creation could fail if we haven't properly set up our pipeline descriptor. 461 | // If the Metal API validation is enabled, we can find out more information about what 462 | // went wrong. (Metal API validation is enabled by default when a debug build is run 463 | // from Xcode) 464 | NSLog(@"Failed to created pipeline state, error %@", error); 465 | } 466 | 467 | return state; 468 | } 469 | 470 | // Create a pipeline that executes a compute kernel 471 | 472 | - (id) makePipeline:(MTLPixelFormat)pixelFormat 473 | pipelineLabel:(NSString*)pipelineLabel 474 | kernelFunctionName:(NSString*)kernelFunctionName 475 | { 476 | // Load the vertex function from the library 477 | id kernelFunction = [self.defaultLibrary newFunctionWithName:kernelFunctionName]; 478 | NSAssert(kernelFunction, @"kernel function \"%@\" could not be loaded", kernelFunctionName); 479 | 480 | NSError *error = NULL; 481 | 482 | id state = [self.device newComputePipelineStateWithFunction:kernelFunction 483 | error:&error]; 484 | 485 | if (!state) 486 | { 487 | // Pipeline State creation could fail if we haven't properly set up our pipeline descriptor. 488 | // If the Metal API validation is enabled, we can find out more information about what 489 | // went wrong. (Metal API validation is enabled by default when a debug build is run 490 | // from Xcode) 491 | NSLog(@"Failed to created pipeline state, error %@", error); 492 | } 493 | 494 | return state; 495 | } 496 | 497 | /* 498 | #if TARGET_OS_IPHONE 499 | 500 | - (NSUInteger)highestSupportedFeatureSet 501 | { 502 | const NSUInteger maxKnownFeatureSet = MTLFeatureSet_iOS_GPUFamily2_v1; 503 | 504 | for (int featureSet = maxKnownFeatureSet; featureSet >= 0; --featureSet) 505 | { 506 | if ([self.device supportsFeatureSet:featureSet]) 507 | { 508 | return featureSet; 509 | } 510 | } 511 | 512 | return MTLFeatureSet_iOS_GPUFamily1_v1; 513 | } 514 | 515 | - (NSUInteger)featureSetGPUFamily 516 | { 517 | switch (self.highestSupportedFeatureSet) 518 | { 519 | case MTLFeatureSet_iOS_GPUFamily2_v1: 520 | return 2; 521 | case MTLFeatureSet_iOS_GPUFamily1_v1: 522 | default: 523 | return 1; 524 | } 525 | } 526 | 527 | - (BOOL)supportsASTCPixelFormats 528 | { 529 | return (self.featureSetGPUFamily > 1); 530 | } 531 | 532 | #endif // TARGET_OS_IPHONE 533 | */ 534 | 535 | @end 536 | -------------------------------------------------------------------------------- /Renderer/MetalScaleRenderContext.h: -------------------------------------------------------------------------------- 1 | // 2 | // MetalScaleRenderContext.h 3 | // 4 | // Copyright 2019 Mo DeJong. 5 | // 6 | // See LICENSE for terms. 7 | // 8 | // This module will render into an existing MTKView 9 | // in the case where a 2D rescale operation is needed 10 | // to fit the contents of a Metal texture into a view. 11 | 12 | //@import MetalKit; 13 | #include 14 | 15 | @class MetalRenderContext; 16 | 17 | @interface MetalScaleRenderContext : NSObject 18 | 19 | // Name of fragment shader function 20 | 21 | @property (nonatomic, copy) NSString *fragmentFunction; 22 | 23 | // fragment pipeline 24 | 25 | @property (nonatomic, retain) id pipelineState; 26 | 27 | // Setup render pixpeline to render into the given view. 28 | 29 | - (void) setupRenderPipelines:(MetalRenderContext*)mrc 30 | mtkView:(MTKView*)mtkView; 31 | 32 | // Render into MTKView with 2D scale operation 33 | 34 | - (void) renderScaled:(MetalRenderContext*)mrc 35 | mtkView:(nonnull MTKView *)mtkView 36 | renderWidth:(int)renderWidth 37 | renderHeight:(int)renderHeight 38 | commandBuffer:(id)commandBuffer 39 | renderPassDescriptor:(MTLRenderPassDescriptor*)renderPassDescriptor 40 | bgraTexture:(id)bgraTexture; 41 | 42 | @end 43 | -------------------------------------------------------------------------------- /Renderer/MetalScaleRenderContext.m: -------------------------------------------------------------------------------- 1 | // 2 | // MetalScaleRenderContext.m 3 | // 4 | // Copyright 2019 Mo DeJong. 5 | // 6 | // See LICENSE for terms. 7 | // 8 | // This module will render into an existing MTKView 9 | // in the case where a 2D rescale operation is needed 10 | // to fit the contents of a Metal texture into a view. 11 | 12 | #include "MetalScaleRenderContext.h" 13 | 14 | // Header shared between C code here, which executes Metal API commands, and .metal files, which 15 | // uses these types as inpute to the shaders 16 | #import "AAPLShaderTypes.h" 17 | 18 | #import "MetalRenderContext.h" 19 | 20 | // Private API 21 | 22 | @interface MetalScaleRenderContext () 23 | 24 | @end 25 | 26 | // Main class performing the rendering 27 | @implementation MetalScaleRenderContext 28 | 29 | // Setup render pixpelines 30 | 31 | - (void) setupRenderPipelines:(MetalRenderContext*)mrc 32 | mtkView:(MTKView*)mtkView 33 | { 34 | NSString *shader = self.fragmentFunction; 35 | 36 | if (shader == nil) { 37 | shader = @"samplingShader"; 38 | } 39 | 40 | // Render from BGRA where 4 grayscale values are packed into 41 | // each pixel into BGRA pixels that are expanded to grayscale 42 | // and cropped to the original image dimensions. 43 | 44 | self.pipelineState = [mrc makePipeline:mtkView.colorPixelFormat 45 | pipelineLabel:@"Rescale Pipeline" 46 | numAttachments:1 47 | vertexFunctionName:@"identityVertexShader" 48 | fragmentFunctionName:shader]; 49 | 50 | NSAssert(self.pipelineState, @"pipelineState"); 51 | } 52 | 53 | // Render into MTKView with 2D scale operation 54 | 55 | - (void) renderScaled:(MetalRenderContext*)mrc 56 | mtkView:(nonnull MTKView *)mtkView 57 | renderWidth:(int)renderWidth 58 | renderHeight:(int)renderHeight 59 | commandBuffer:(id)commandBuffer 60 | renderPassDescriptor:(MTLRenderPassDescriptor*)renderPassDescriptor 61 | bgraTexture:(id)bgraTexture 62 | { 63 | #if defined(DEBUG) 64 | assert(mtkView); 65 | assert(renderWidth > 0); 66 | assert(renderHeight > 0); 67 | assert(mrc); 68 | assert(commandBuffer); 69 | assert(bgraTexture); 70 | #endif // DEBUG 71 | 72 | if (renderPassDescriptor != nil) 73 | { 74 | // Create a render command encoder so we can render into something 75 | id renderEncoder = 76 | [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; 77 | renderEncoder.label = @"RescaleRender"; 78 | 79 | // Set the region of the drawable to which we'll draw. 80 | [renderEncoder setViewport:(MTLViewport){0.0, 0.0, renderWidth, renderHeight, -1.0, 1.0 }]; 81 | 82 | id pipeline = self.pipelineState; 83 | [renderEncoder setRenderPipelineState:pipeline]; 84 | 85 | [renderEncoder setVertexBuffer:mrc.identityVerticesBuffer 86 | offset:0 87 | atIndex:0]; 88 | 89 | // Set the texture object. The AAPLTextureIndexBaseColor enum value corresponds 90 | /// to the 'colorMap' argument in our 'samplingShader' function because its 91 | // texture attribute qualifier also uses AAPLTextureIndexBaseColor for its index 92 | [renderEncoder setFragmentTexture:bgraTexture 93 | atIndex:AAPLTextureIndexBaseColor]; 94 | 95 | // Draw the vertices of our triangles 96 | [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangle 97 | vertexStart:0 98 | vertexCount:mrc.identityNumVertices]; 99 | 100 | [renderEncoder endEncoding]; 101 | 102 | // Schedule a present once the framebuffer is complete using the current drawable 103 | [commandBuffer presentDrawable:mtkView.currentDrawable]; 104 | } 105 | } 106 | 107 | @end 108 | -------------------------------------------------------------------------------- /Renderer/QuickTime_Test_Pattern_HD.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/QuickTime_Test_Pattern_HD.mov -------------------------------------------------------------------------------- /Renderer/QuickTime_Test_Pattern_HD_calibrated_RGB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/QuickTime_Test_Pattern_HD_calibrated_RGB.png -------------------------------------------------------------------------------- /Renderer/QuickTime_Test_Pattern_HD_grayscale.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/QuickTime_Test_Pattern_HD_grayscale.m4v -------------------------------------------------------------------------------- /Renderer/QuickTime_Test_Pattern_HD_sRGB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/QuickTime_Test_Pattern_HD_sRGB.png -------------------------------------------------------------------------------- /Renderer/QuickTime_Test_Pattern_SD.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/QuickTime_Test_Pattern_SD.mov -------------------------------------------------------------------------------- /Renderer/Rec709Sample.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/Rec709Sample.mp4 -------------------------------------------------------------------------------- /Renderer/RedCircleOverWhiteA.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/RedCircleOverWhiteA.m4v -------------------------------------------------------------------------------- /Renderer/RedCircleOverWhiteA_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/RedCircleOverWhiteA_alpha.m4v -------------------------------------------------------------------------------- /Renderer/RedFadeAlpha256.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/RedFadeAlpha256.m4v -------------------------------------------------------------------------------- /Renderer/RedFadeAlpha256_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/RedFadeAlpha256_alpha.m4v -------------------------------------------------------------------------------- /Renderer/WhitePer5.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/WhitePer5.m4v -------------------------------------------------------------------------------- /Renderer/WhitePer50.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/WhitePer50.m4v -------------------------------------------------------------------------------- /Renderer/WhitePer50_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/WhitePer50_alpha.m4v -------------------------------------------------------------------------------- /Renderer/WhitePer5_alpha.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/WhitePer5_alpha.m4v -------------------------------------------------------------------------------- /Renderer/big_buck_bunny_HD_apple.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/big_buck_bunny_HD_apple.m4v -------------------------------------------------------------------------------- /Renderer/big_buck_bunny_HD_srgb.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/big_buck_bunny_HD_srgb.m4v -------------------------------------------------------------------------------- /Renderer/clouds_reflecting_off_the_beach-wallpaper-2048x1536.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/clouds_reflecting_off_the_beach-wallpaper-2048x1536.jpg -------------------------------------------------------------------------------- /Renderer/clouds_reflecting_off_the_beach-wallpaper-2048x1536.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/clouds_reflecting_off_the_beach-wallpaper-2048x1536.m4v -------------------------------------------------------------------------------- /Renderer/drop-of-water-iPad-2048-1536-apple-crf20.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/drop-of-water-iPad-2048-1536-apple-crf20.m4v -------------------------------------------------------------------------------- /Renderer/drop-of-water-iPad-2048-1536-sRGB-crf20.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/drop-of-water-iPad-2048-1536-sRGB-crf20.m4v -------------------------------------------------------------------------------- /Renderer/osxcolor_test_image_24bit_BT709.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/osxcolor_test_image_24bit_BT709.m4v -------------------------------------------------------------------------------- /Renderer/osxcolor_test_image_iPad_2048_1536.m4v: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mdejong/MetalBT709Decoder/b736e6199850b4cb01949baab77a3112a969a049/Renderer/osxcolor_test_image_iPad_2048_1536.m4v -------------------------------------------------------------------------------- /Renderer/sRGB.h: -------------------------------------------------------------------------------- 1 | // 2 | // sRGB.h 3 | // 4 | // Created by Moses DeJong on 12/14/18. 5 | // 6 | // Header only interface to mapping for sRGB to linear 7 | // and sRGB to XYZ. 8 | // 9 | // http://www.color.org/srgb.pdf 10 | // 11 | // Licensed under BSD terms. 12 | 13 | #if !defined(_SRGB_H) 14 | #define _SRGB_H 15 | 16 | // saturate limits the range to [0.0, 1.0] 17 | 18 | static inline 19 | float saturatef(float v) { 20 | if (v < 0.0f) { 21 | return 0.0f; 22 | } 23 | if (v > 1.0f) { 24 | return 1.0f; 25 | } 26 | return v; 27 | } 28 | 29 | // Convert a byte range integer [0 255] into a normalized float 30 | // using a multiply operation. 31 | 32 | static inline 33 | float byteNorm(int val) 34 | { 35 | return val * (1.0f/255.0f); 36 | } 37 | 38 | // sRGB 39 | 40 | // Convert a non-linear log value to a linear value. 41 | // Note that normV must be normalized in the range [0.0 1.0]. 42 | 43 | static inline 44 | float sRGB_nonLinearNormToLinear(float normV) 45 | { 46 | if (normV <= 0.04045f) { 47 | normV *= (1.0f / 12.92f); 48 | } else { 49 | const float a = 0.055f; 50 | const float gamma = 2.4f; 51 | //const float gamma = 1.0f / (1.0f / 2.4f); 52 | normV = (normV + a) * (1.0f / (1.0f + a)); 53 | normV = pow(normV, gamma); 54 | } 55 | 56 | return normV; 57 | } 58 | 59 | // Convert a linear log value to a non-linear value. 60 | // Note that normV must be normalized in the range [0.0 1.0] 61 | 62 | static inline 63 | float sRGB_linearNormToNonLinear(float normV) { 64 | 65 | if (normV <= 0.0031308f) { 66 | normV *= 12.92f; 67 | } else { 68 | const float a = 0.055f; 69 | const float gamma = 1.0f / 2.4f; // 0.4166... 70 | normV = (1.0f + a) * pow(normV, gamma) - a; 71 | } 72 | 73 | return normV; 74 | } 75 | 76 | // sRGB to XYZ colorspace conversion (CIE 1931) 77 | 78 | static inline 79 | int sRGB_convertRGBToXYZ( 80 | int R, 81 | int G, 82 | int B, 83 | float *XPtr, 84 | float *YPtr, 85 | float *ZPtr, 86 | int applyGammaMap) 87 | { 88 | const int debug = 0; 89 | 90 | #if defined(DEBUG) 91 | assert(XPtr); 92 | assert(YPtr); 93 | assert(ZPtr); 94 | 95 | assert(R >= 0 && R <= 255); 96 | assert(G >= 0 && G <= 255); 97 | assert(B >= 0 && B <= 255); 98 | #endif // DEBUG 99 | 100 | if (debug) { 101 | printf("R G B : %3d %3d %3d\n", R, G, B); 102 | } 103 | 104 | // Normalize 105 | 106 | float Rn = byteNorm(R); 107 | float Gn = byteNorm(G); 108 | float Bn = byteNorm(B); 109 | 110 | // Convert non-linear sRGB to linear 111 | 112 | if (applyGammaMap) { 113 | 114 | if (debug) { 115 | printf("pre to linear Rn Gn Bn : %.4f %.4f %.4f\n", Rn, Gn, Bn); 116 | } 117 | 118 | Rn = sRGB_nonLinearNormToLinear(Rn); 119 | Gn = sRGB_nonLinearNormToLinear(Gn); 120 | Bn = sRGB_nonLinearNormToLinear(Bn); 121 | 122 | if (debug) { 123 | printf("post to linear Rn Gn Bn : %.4f %.4f %.4f\n", Rn, Gn, Bn); 124 | } 125 | } 126 | 127 | if (debug) { 128 | printf("Rn %.4f\n", Rn); 129 | printf("Gn %.4f\n", Gn); 130 | printf("Bn %.4f\n", Bn); 131 | } 132 | 133 | // sRGB -> XYZ 134 | 135 | const 136 | float Matrix[] = { 137 | 0.4124f, 0.3576f, 0.1805f, 138 | 0.2126f, 0.7152f, 0.0722f, 139 | 0.0193f, 0.1192f, 0.9505f 140 | }; 141 | 142 | if (debug) { 143 | printf("Mat 3x3:\n"); 144 | printf("%.4f %.4f %.4f\n", Matrix[0], Matrix[1], Matrix[2]); 145 | printf("%.4f %.4f %.4f\n", Matrix[3], Matrix[4], Matrix[5]); 146 | printf("%.4f %.4f %.4f\n", Matrix[6], Matrix[7], Matrix[8]); 147 | } 148 | 149 | // Matrix multiply operation 150 | // 151 | // rgb = Matrix * (Rn,Gn,Bn) 152 | 153 | // Convert input X, Y, Z float values 154 | 155 | float X = (Rn * Matrix[0]) + (Gn * Matrix[1]) + (Bn * Matrix[2]); 156 | float Y = (Rn * Matrix[3]) + (Gn * Matrix[4]) + (Bn * Matrix[5]); 157 | float Z = (Rn * Matrix[6]) + (Gn * Matrix[7]) + (Bn * Matrix[8]); 158 | 159 | if (debug) { 160 | printf("X %.4f\n", X); 161 | printf("Y %.4f\n", Y); 162 | printf("Z %.4f\n", Z); 163 | } 164 | 165 | // Scale in terms of (0.9505, 1.0000, 1.08899) 166 | 167 | X *= (1.0f / 0.9505f); 168 | //Y *= 1.0f; 169 | Z *= (1.0f / 1.08899f); 170 | 171 | if (debug) { 172 | printf("scaled to whitepoint\n"); 173 | 174 | printf("X %.4f\n", X); 175 | printf("Y %.4f\n", Y); 176 | printf("Z %.4f\n", Z); 177 | } 178 | 179 | X = saturatef(X); 180 | Y = saturatef(Y); 181 | Z = saturatef(Z); 182 | 183 | // Return float value in XYZ linear colorspace 184 | 185 | *XPtr = X; 186 | *YPtr = Y; 187 | *ZPtr = Z; 188 | 189 | return 0; 190 | } 191 | 192 | // Convert from XYZ (linear gamma) to sRGB (pow gamma) 193 | 194 | static inline 195 | int sRGB_convertXYZToRGB( 196 | float X, 197 | float Y, 198 | float Z, 199 | int *RPtr, 200 | int *GPtr, 201 | int *BPtr, 202 | int applyGammaMap) 203 | { 204 | const int debug = 0; 205 | 206 | #if defined(DEBUG) 207 | assert(RPtr); 208 | assert(GPtr); 209 | assert(BPtr); 210 | #endif // DEBUG 211 | 212 | if (debug) { 213 | printf("X Y Z : %.3f %.3f %.3f\n", X, Y, Z); 214 | } 215 | 216 | // Since XYZ colorspace is always linear, no gamma correction 217 | // step is needed before processing values. 218 | 219 | // Undo Scale in terms of (0.9505, 1.0000, 1.08899) 220 | 221 | X *= 0.9505f; 222 | //Y *= 1.0f; 223 | Z *= 1.08899f; 224 | 225 | // XYZ -> sRGB 226 | // http://www.ryanjuckett.com/programming/rgb-color-space-conversion/ 227 | 228 | const 229 | float Matrix[3*3] = { 230 | 3.2410f, -1.5374f, -0.4986f, 231 | -0.9692f, 1.8760f, 0.0416f, 232 | 0.0556f, -0.2040f, 1.0570f 233 | }; 234 | 235 | if (debug) { 236 | printf("Mat 3x3:\n"); 237 | printf("%.7f %.7f %.7f\n", Matrix[0], Matrix[1], Matrix[2]); 238 | printf("%.7f %.7f %.7f\n", Matrix[3], Matrix[4], Matrix[5]); 239 | printf("%.7f %.7f %.7f\n", Matrix[6], Matrix[7], Matrix[8]); 240 | } 241 | 242 | // Matrix multiply operation 243 | // 244 | // rgb = Matrix * (Xn,Yn,Zn) 245 | 246 | // Convert input Y, Cb, Cr to normalized float values 247 | 248 | float Rn = (X * Matrix[0]) + (Y * Matrix[1]) + (Z * Matrix[2]); 249 | float Gn = (X * Matrix[3]) + (Y * Matrix[4]) + (Z * Matrix[5]); 250 | float Bn = (X * Matrix[6]) + (Y * Matrix[7]) + (Z * Matrix[8]); 251 | 252 | if (debug) { 253 | printf("unclamped:\n"); 254 | printf("Rn %.4f\n", Rn); 255 | printf("Gn %.4f\n", Gn); 256 | printf("Bn %.4f\n", Bn); 257 | } 258 | 259 | // Saturate limits range to [0.0, 1.0] 260 | 261 | Rn = saturatef(Rn); 262 | Gn = saturatef(Gn); 263 | Bn = saturatef(Bn); 264 | 265 | // Convert linear RGB to sRGB log space 266 | 267 | if (applyGammaMap) { 268 | // Adjust int values in the input range to gamma mapping over same range 269 | 270 | if (debug) { 271 | printf("pre to non-linear Rn Gn Bn : %.4f %.4f %.4f\n", Rn, Gn, Bn); 272 | } 273 | 274 | Rn = sRGB_linearNormToNonLinear(Rn); 275 | Gn = sRGB_linearNormToNonLinear(Gn); 276 | Bn = sRGB_linearNormToNonLinear(Bn); 277 | 278 | if (debug) { 279 | printf("post to non-linear Rn Gn Bn : %.4f %.4f %.4f\n", Rn, Gn, Bn); 280 | } 281 | } 282 | 283 | // Round to nearest int value in range [0, 255] 284 | 285 | int R = (int) round(Rn * 255.0f); 286 | int G = (int) round(Gn * 255.0f); 287 | int B = (int) round(Bn * 255.0f); 288 | 289 | if (debug) { 290 | printf("scaled up to byte range:\n"); 291 | printf("Rn %.4f\n", Rn * 255.0f); 292 | printf("Gn %.4f\n", Gn * 255.0f); 293 | printf("Bn %.4f\n", Bn * 255.0f); 294 | 295 | printf("rounded to int:\n"); 296 | printf("X %3d\n", R); 297 | printf("Y %3d\n", G); 298 | printf("Z %3d\n", B); 299 | } 300 | 301 | assert(R >= 0 && R <= 255); 302 | assert(G >= 0 && G <= 255); 303 | assert(B >= 0 && B <= 255); 304 | 305 | *RPtr = R; 306 | *GPtr = G; 307 | *BPtr = B; 308 | 309 | return 0; 310 | } 311 | 312 | #endif // _SRGB_H 313 | -------------------------------------------------------------------------------- /Renderer/y4m_writer.h: -------------------------------------------------------------------------------- 1 | // 2 | // y4m_writer.h 3 | // 4 | // Created by Moses DeJong on 12/14/18. 5 | // 6 | // Header only interface that supports writing 7 | // a Y4M file that contains tagged YUV bytes 8 | // in 4:2:0 format. 9 | // 10 | // Licensed under BSD terms. 11 | 12 | #if !defined(_Y4M_WRITER_H) 13 | #define _Y4M_WRITER_H 14 | 15 | #include 16 | 17 | typedef enum { 18 | Y4MHeaderFPS_1, 19 | Y4MHeaderFPS_15, 20 | Y4MHeaderFPS_24, 21 | Y4MHeaderFPS_25, 22 | Y4MHeaderFPS_29_97, 23 | Y4MHeaderFPS_30, 24 | Y4MHeaderFPS_60 25 | } Y4MHeaderFPS; 26 | 27 | typedef struct { 28 | int width; 29 | int height; 30 | Y4MHeaderFPS fps; 31 | } Y4MHeaderStruct; 32 | 33 | // Emit a single frame to the output Y4M file. 34 | 35 | typedef struct { 36 | uint8_t *yPtr; 37 | int yLen; 38 | 39 | uint8_t *uPtr; 40 | int uLen; 41 | 42 | uint8_t *vPtr; 43 | int vLen; 44 | } Y4MFrameStruct; 45 | 46 | // Open output Y4M file descriptor with binary setting 47 | 48 | static inline 49 | FILE* y4m_open_file(const char *outFilePath) { 50 | FILE *outFile = fopen(outFilePath, "wb"); 51 | 52 | if (outFile == NULL) { 53 | fprintf(stderr, "could not open output Y4M file \"%s\"\n", outFilePath); 54 | } 55 | 56 | return outFile; 57 | } 58 | 59 | // Emit header given the options indicated in header 60 | 61 | static inline 62 | int y4m_write_header(FILE *outFile, Y4MHeaderStruct *hsPtr) { 63 | { 64 | char *segment = "YUV4MPEG2 "; 65 | int segmentLen = (int) strlen(segment); 66 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 67 | if (numWritten != 1) { 68 | return 2; 69 | } 70 | } 71 | 72 | { 73 | int width = hsPtr->width; 74 | NSString *formatted = [NSString stringWithFormat:@"W%d ", width]; 75 | char *segment = (char*) [formatted UTF8String]; 76 | int segmentLen = (int) strlen(segment); 77 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 78 | if (numWritten != 1) { 79 | return 2; 80 | } 81 | } 82 | 83 | { 84 | int height = hsPtr->height; 85 | NSString *formatted = [NSString stringWithFormat:@"H%d ", height]; 86 | char *segment = (char*) [formatted UTF8String]; 87 | int segmentLen = (int) strlen(segment); 88 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 89 | if (numWritten != 1) { 90 | return 2; 91 | } 92 | } 93 | 94 | // Framerate : 95 | // 'F30:1' = 30 FPS 96 | // 'F30000:1001' = 29.97 FPS 97 | // '1:1' = 1 FPS 98 | 99 | { 100 | //char *segment = "F30:1 "; 101 | char *segment; 102 | 103 | switch (hsPtr->fps) { 104 | case Y4MHeaderFPS_1: { 105 | segment = "F1:1 "; 106 | break; 107 | } 108 | case Y4MHeaderFPS_15: { 109 | segment = "F15:1 "; 110 | break; 111 | } 112 | case Y4MHeaderFPS_24: { 113 | segment = "F24:1 "; 114 | break; 115 | } 116 | case Y4MHeaderFPS_25: { 117 | segment = "F25:1 "; 118 | break; 119 | } 120 | case Y4MHeaderFPS_29_97: { 121 | // 29.97 standard video rate 122 | segment = "F30000:1001 "; 123 | break; 124 | } 125 | case Y4MHeaderFPS_30: { 126 | segment = "F30:1 "; 127 | break; 128 | } 129 | case Y4MHeaderFPS_60: { 130 | segment = "F60:1 "; 131 | break; 132 | } 133 | default: { 134 | assert(0); 135 | return 3; 136 | break; 137 | } 138 | } 139 | 140 | int segmentLen = (int) strlen(segment); 141 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 142 | if (numWritten != 1) { 143 | return 2; 144 | } 145 | } 146 | 147 | // interlacing progressive 148 | 149 | { 150 | char *segment = "Ip "; 151 | int segmentLen = (int) strlen(segment); 152 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 153 | if (numWritten != 1) { 154 | return 2; 155 | } 156 | } 157 | 158 | // Pixel aspect ratio 159 | 160 | { 161 | char *segment = "A1:1 "; 162 | int segmentLen = (int) strlen(segment); 163 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 164 | if (numWritten != 1) { 165 | return 2; 166 | } 167 | } 168 | 169 | // Colour space = 4:2:0 subsampling 170 | 171 | { 172 | char *segment = "C420jpeg\n"; 173 | int segmentLen = (int) strlen(segment); 174 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 175 | if (numWritten != 1) { 176 | return 2; 177 | } 178 | } 179 | 180 | // Comment 181 | 182 | { 183 | char *segment = "XYSCSS=420JPEG\n"; 184 | int segmentLen = (int) strlen(segment); 185 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 186 | if (numWritten != 1) { 187 | return 2; 188 | } 189 | } 190 | 191 | return 0; 192 | } 193 | 194 | static inline 195 | int y4m_write_frame(FILE *outFile, Y4MFrameStruct *fsPtr) { 196 | // FRAME marker 197 | 198 | { 199 | char *segment = "FRAME\n"; 200 | int segmentLen = (int) strlen(segment); 201 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 202 | if (numWritten != 1) { 203 | return 2; 204 | } 205 | } 206 | 207 | // Y 208 | 209 | { 210 | uint8_t *segment = (uint8_t *) fsPtr->yPtr; 211 | int segmentLen = (int) fsPtr->yLen; 212 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 213 | if (numWritten != 1) { 214 | return 2; 215 | } 216 | } 217 | 218 | // U 219 | 220 | { 221 | uint8_t *segment = (uint8_t *) fsPtr->uPtr; 222 | int segmentLen = (int) fsPtr->uLen; 223 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 224 | if (numWritten != 1) { 225 | return 2; 226 | } 227 | } 228 | 229 | // V 230 | 231 | { 232 | uint8_t *segment = (uint8_t *) fsPtr->vPtr; 233 | int segmentLen = (int) fsPtr->vLen; 234 | int numWritten = (int) fwrite(segment, segmentLen, 1, outFile); 235 | if (numWritten != 1) { 236 | return 2; 237 | } 238 | } 239 | 240 | return 0; 241 | } 242 | 243 | #endif // _Y4M_WRITER_H 244 | -------------------------------------------------------------------------------- /gamma_write/gamma_write.m: -------------------------------------------------------------------------------- 1 | // 2 | // gamma_write.m 3 | // 4 | // Created by Mo DeJong on 12/31/18. 5 | // 6 | // Command line utility to emit test image 7 | // that can be passed to H264 encoder to 8 | // verify that encoding is working properly. 9 | // This test image is specific to 4:2:0 10 | // YCbCr format since it emits blocks of 11 | // 2x2 with a known grayscale value. This is 12 | // so that 2 pixels next to each other do not 13 | // change the Cb or Cr value of the neighbor 14 | // pixel which changes the encoded Y value. 15 | 16 | #import 17 | #import 18 | 19 | #import 20 | 21 | #import 22 | 23 | #import "H264Encoder.h" 24 | 25 | #import "CGFrameBuffer.h" 26 | #import "BGRAToBT709Converter.h" 27 | 28 | #import "BGDecodeEncode.h" 29 | 30 | #import "BT709.h" 31 | 32 | typedef struct { 33 | int fps; 34 | } ConfigurationStruct; 35 | 36 | void usage() { 37 | printf("gamma_write OUT.m4v\n"); 38 | fflush(stdout); 39 | } 40 | 41 | // Helper class 42 | 43 | @interface EncoderImpl : NSObject 44 | 45 | //- (CGImageRef) imageForFrame:(int)frameNum; 46 | //- (BOOL) hasMoreFrames; 47 | 48 | @property (nonatomic, assign) int frameNum; 49 | 50 | // Array of CGImageRef 51 | 52 | @property (nonatomic, retain) NSMutableArray *frames; 53 | 54 | @end 55 | 56 | @implementation EncoderImpl 57 | 58 | // Provide frames for H264 encoder interface 59 | 60 | - (CGImageRef) imageForFrame:(int)frameNum 61 | { 62 | CGImageRef imageRef = (__bridge CGImageRef) [self.frames objectAtIndex:frameNum]; 63 | 64 | self.frameNum = self.frameNum + 1; 65 | 66 | return imageRef; 67 | } 68 | 69 | // Return TRUE if more frames can be returned by this frame source, 70 | // returning FALSE means that all frames have been encoded. 71 | 72 | - (BOOL) hasMoreFrames 73 | { 74 | if (self.frameNum < self.frames.count) { 75 | return TRUE; 76 | } else { 77 | return FALSE; 78 | } 79 | } 80 | 81 | - (void)encoderResult:(H264EncoderErrorCode)code { 82 | NSLog(@"encoderResult : %@", [H264Encoder ErrorCodeToString:code]); 83 | } 84 | 85 | // Emit an array of float data as a CSV file, the 86 | // labels should be NSString, these define 87 | // the emitted labels in column 0. 88 | 89 | + (BOOL) writeTableToCSV:(NSString*)filename 90 | labelsArr:(NSArray*)labelsArr 91 | valuesArr:(NSArray*)valuesArr 92 | { 93 | //NSString *tmpDir = NSTemporaryDirectory(); 94 | NSString *dirName = [[NSFileManager defaultManager] currentDirectoryPath]; 95 | NSString *path = [dirName stringByAppendingPathComponent:filename]; 96 | FILE *csvFile = fopen([path UTF8String], "w"); 97 | if (csvFile == NULL) { 98 | return FALSE; 99 | } 100 | 101 | int numColumns = (int) [labelsArr count]; 102 | 103 | for (int i = 0; i < numColumns; i++) { 104 | NSString *label = labelsArr[i]; 105 | fprintf(csvFile, "%s", [label UTF8String]); 106 | 107 | if (i < (numColumns - 1)) { 108 | fprintf(csvFile, ","); 109 | } 110 | } 111 | fprintf(csvFile, "\n"); 112 | 113 | for ( NSArray *tuple in valuesArr ) { 114 | for (int i = 0; i < numColumns; i++) { 115 | float v = [tuple[i] floatValue]; 116 | 117 | fprintf(csvFile, "%.4f", v); 118 | 119 | if (i < (numColumns - 1)) { 120 | fprintf(csvFile, ","); 121 | } 122 | } 123 | fprintf(csvFile, "\n"); 124 | // 125 | // float y1 = [tuple[0] floatValue]; 126 | // float y2 = [tuple[1] floatValue]; 127 | // 128 | // NSLog(@"[%4d] (y1, y2) = %.4f,%.4f", x, y1, y2); 129 | // x++; 130 | // 131 | // fprintf(csvFile, "%.4f,%.4f\n", y1, y2); 132 | } 133 | 134 | fclose(csvFile); 135 | NSLog(@"wrote %@", path); 136 | return TRUE; 137 | } 138 | 139 | // Util methods to allocate a tmp buffer to hold pixel data 140 | // and render into a new CGFrameBuffer object. 141 | 142 | + (CGFrameBuffer*) convertFromColorspaceToColorspace:(CGImageRef)inImage 143 | bpp:(int)bpp 144 | convertToColorspace:(CGColorSpaceRef)convertToColorspace 145 | { 146 | int width = (int) CGImageGetWidth(inImage); 147 | int height = (int) CGImageGetHeight(inImage); 148 | 149 | CGFrameBuffer *convertedFB = [CGFrameBuffer cGFrameBufferWithBppDimensions:bpp width:width height:height]; 150 | 151 | //CGColorSpaceRef cs = CGColorSpaceCreateWithName(convertToColorspace); 152 | convertedFB.colorspace = convertToColorspace; 153 | //CGColorSpaceRelease(convertToColorspace); 154 | 155 | BOOL worked = [convertedFB renderCGImage:inImage]; 156 | NSAssert(worked, @"renderCGImage"); 157 | 158 | if (!worked) { 159 | return nil; 160 | } 161 | 162 | return convertedFB; 163 | } 164 | 165 | // Convert pixels in one colorspace into a second colorspace, in the 166 | // event that the pixels share the same white point and color bounds 167 | // this invocation will only adjust the gamma. 168 | 169 | + (CGFrameBuffer*) convertFromColorspaceToColorspace:(CGFrameBuffer*)inFB 170 | convertToColorspace:(CGColorSpaceRef)convertToColorspace 171 | { 172 | int width = (int) inFB.width; 173 | int height = (int) inFB.height; 174 | 175 | CGFrameBuffer *convertedFB = [CGFrameBuffer cGFrameBufferWithBppDimensions:inFB.bitsPerPixel width:width height:height]; 176 | 177 | //CGColorSpaceRef cs = CGColorSpaceCreateWithName(convertToColorspace); 178 | convertedFB.colorspace = convertToColorspace; 179 | //CGColorSpaceRelease(convertToColorspace); 180 | 181 | CGImageRef inFBImageRef = [inFB createCGImageRef]; 182 | 183 | [convertedFB renderCGImage:inFBImageRef]; 184 | 185 | CGImageRelease(inFBImageRef); 186 | 187 | return convertedFB; 188 | } 189 | 190 | @end 191 | 192 | // Load PNG from the filesystem 193 | 194 | CGImageRef makeImageFromFile(NSString *filenameStr) 195 | { 196 | CGImageSourceRef sourceRef; 197 | CGImageRef imageRef; 198 | 199 | NSData *image_data = [NSData dataWithContentsOfFile:filenameStr]; 200 | if (image_data == nil) { 201 | fprintf(stderr, "can't read image data from file \"%s\"\n", [filenameStr UTF8String]); 202 | exit(1); 203 | } 204 | 205 | // Create image object from src image data. 206 | 207 | sourceRef = CGImageSourceCreateWithData((__bridge CFDataRef)image_data, NULL); 208 | 209 | // Make sure the image source exists before continuing 210 | 211 | if (sourceRef == NULL) { 212 | fprintf(stderr, "can't create image data from file \"%s\"\n", [filenameStr UTF8String]); 213 | exit(1); 214 | } 215 | 216 | // Create an image from the first item in the image source. 217 | 218 | imageRef = CGImageSourceCreateImageAtIndex(sourceRef, 0, NULL); 219 | 220 | CFRelease(sourceRef); 221 | 222 | return imageRef; 223 | } 224 | 225 | int process(NSString *outPNGStr, ConfigurationStruct *configSPtr) { 226 | // Read PNG 227 | 228 | int genWidth = 2 * 256; 229 | 230 | if ((1)) { 231 | // Generate 16x16 image that contains all the grayscale values in linear 232 | // RGB and then map these values to gamma adjusted values in the BT.709 space 233 | 234 | int width = 1920; 235 | int height = 1080; 236 | 237 | // When the Apple supplied BT.709 colorspace is used and every grayscale 238 | // input value is written into the output, the gamma adjustment in 239 | // converting from this colorpace to the linear colorspace can be 240 | // determined by graphing the gamma adjustment. 241 | 242 | // Mapping each value in this colorspace to linear seems to make use 243 | // of a gamma = 1.961 244 | 245 | CGFrameBuffer *identityFB = [CGFrameBuffer cGFrameBufferWithBppDimensions:24 width:width height:height]; 246 | 247 | CGColorSpaceRef cs = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); 248 | //CGColorSpaceRef cs = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709); 249 | identityFB.colorspace = cs; 250 | CGColorSpaceRelease(cs); 251 | 252 | uint32_t *pixelsPtr = (uint32_t *) identityFB.pixels; 253 | 254 | const int dumpGrayOut = 0; 255 | 256 | if (dumpGrayOut) { 257 | printf("dumpGrayOut:\n"); 258 | } 259 | 260 | // for (int row = 0; row < height; row++) { 261 | // for (int col = 0; col < width; col++) { 262 | // int offset = (row * width) + col; 263 | // int rd2 = row / 2; 264 | // int cd2 = col / 2; 265 | // int GForPixel = (rd2 * width/2) + cd2; 266 | // uint32_t G = GForPixel & 0xFF; 267 | // uint32_t grayPixel = (0xFF << 24) | (G << 16) | (G << 8) | (G); 268 | // pixelsPtr[offset] = grayPixel; 269 | // 270 | // if (dumpGrayOut) { 271 | // printf("%3d ", G); 272 | // } 273 | // } 274 | // 275 | // if (dumpGrayOut) { 276 | // printf("\n"); 277 | // } 278 | // } 279 | 280 | for (int row = 0; row < height; row++) { 281 | for (int col = 0; col < width; col++) { 282 | int offset = (row * width) + col; 283 | //int rd2 = row / 2; 284 | int cd2 = col / 2; 285 | int GForPixel = cd2; 286 | uint32_t G = GForPixel & 0xFF; 287 | uint32_t grayPixel = (0xFF << 24) | (G << 16) | (G << 8) | (G); 288 | pixelsPtr[offset] = grayPixel; 289 | 290 | if (dumpGrayOut) { 291 | printf("%3d ", G); 292 | } 293 | } 294 | 295 | if (dumpGrayOut) { 296 | printf("\n"); 297 | } 298 | } 299 | 300 | if ((0)) { 301 | // Emit png with linear colorspace 302 | 303 | NSString *filename = [NSString stringWithFormat:@"TestHDFirst20PerSRGB.png"]; 304 | //NSString *tmpDir = NSTemporaryDirectory(); 305 | NSString *dirName = [[NSFileManager defaultManager] currentDirectoryPath]; 306 | NSString *path = [dirName stringByAppendingPathComponent:filename]; 307 | NSData *pngData = [identityFB formatAsPNG]; 308 | 309 | BOOL worked = [pngData writeToFile:path atomically:TRUE]; 310 | assert(worked); 311 | 312 | NSLog(@"wrote %@", path); 313 | } 314 | 315 | // Convert identity grayscale values to sRGB gamma adjusted values 316 | // and emit as a PNG. This set of gamma adjusted values can be 317 | // compared to the known identity values for [0, 255] to see 318 | // how large the gamma shift was. 319 | 320 | CGColorSpaceRef sRGBcs = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); 321 | 322 | CGFrameBuffer *sRGBFB = [EncoderImpl convertFromColorspaceToColorspace:identityFB convertToColorspace:sRGBcs]; 323 | 324 | CGColorSpaceRelease(sRGBcs); 325 | 326 | if ((1)) { 327 | // Emit png in sRGBcolorspace 328 | 329 | NSString *filename = [NSString stringWithFormat:@"TestHDAsSRGB.png"]; 330 | //NSString *tmpDir = NSTemporaryDirectory(); 331 | NSString *dirName = [[NSFileManager defaultManager] currentDirectoryPath]; 332 | NSString *path = [dirName stringByAppendingPathComponent:filename]; 333 | NSData *pngData = [sRGBFB formatAsPNG]; 334 | 335 | BOOL worked = [pngData writeToFile:path atomically:TRUE]; 336 | assert(worked); 337 | 338 | NSLog(@"wrote %@", path); 339 | } 340 | 341 | /* 342 | 343 | // Convert grayscale range to BT.709 gamma adjusted values 344 | 345 | CGColorSpaceRef bt709cs = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709); 346 | 347 | CGFrameBuffer *bt709FB = [EncoderImpl convertFromColorspaceToColorspace:identityFB convertToColorspace:bt709cs]; 348 | 349 | CGColorSpaceRelease(bt709cs); 350 | 351 | if ((1)) { 352 | // Emit png in BT.709 colorspace 353 | 354 | NSString *filename = [NSString stringWithFormat:@"TestHDAsBT709.png"]; 355 | //NSString *tmpDir = NSTemporaryDirectory(); 356 | NSString *dirName = [[NSFileManager defaultManager] currentDirectoryPath]; 357 | NSString *path = [dirName stringByAppendingPathComponent:filename]; 358 | NSData *pngData = [bt709FB formatAsPNG]; 359 | 360 | BOOL worked = [pngData writeToFile:path atomically:TRUE]; 361 | assert(worked); 362 | 363 | NSLog(@"wrote %@", path); 364 | } 365 | 366 | */ 367 | 368 | // Gather value mappings over the entire byte range for lin -> 709 369 | 370 | if ((0)) 371 | { 372 | NSArray *labels = @[ @"G", @"R", @"PG", @"PR", @"AG", @"709" ]; 373 | 374 | NSMutableArray *yPairsArr = [NSMutableArray array]; 375 | 376 | // CSV generation logic below depends on getting 256 values that 377 | // represent the range. 378 | 379 | uint32_t *pixelPtr = (uint32_t *) sRGBFB.pixels; 380 | 381 | NSMutableArray *mSamples = [NSMutableArray array]; 382 | 383 | for (int i = 0; i < genWidth; i += 2) { 384 | uint32_t pixel = pixelPtr[i]; 385 | int grayVal = pixel & 0xFF; 386 | [mSamples addObject:@(grayVal)]; 387 | } 388 | 389 | assert([mSamples count] == 256); 390 | 391 | NSMutableDictionary *rangeMap = [NSMutableDictionary dictionary]; 392 | 393 | for (int i = 0; i < 256; i++) { 394 | int grayVal = [mSamples[i] intValue]; 395 | rangeMap[@(i)] = @(grayVal); 396 | 397 | // Use (Y 128 128) to decode grayscale value to a RGB value. 398 | // Since the values for Y are setup with a gamma, need to 399 | // know the gamma to be able to decode ? 400 | 401 | // Float amount of the grayscale range that input grayscale 402 | // value corresponds to. 403 | 404 | float percentOfGrayscale = i / 255.0f; 405 | float percentOfRange = grayVal / 255.0f; 406 | 407 | float appleGammaAdjusted = Apple196_linearNormToNonLinear(percentOfGrayscale); 408 | 409 | // This actually appears to be a better approximzation of the actualy current 410 | // output, so why would anything about the Apple 1961 be useful ?? 411 | 412 | // Perhaps you are only meant to decode with the 1.961 function? 413 | 414 | float rec709GammaAdjusted = BT709_linearNormToNonLinear(percentOfGrayscale); 415 | 416 | [yPairsArr addObject:@[@(i), @(grayVal), @(percentOfGrayscale), @(percentOfRange), @(appleGammaAdjusted), @(rec709GammaAdjusted)]]; 417 | } 418 | 419 | NSLog(@"rangeMap contains %d values", (int)rangeMap.count); 420 | NSLog(@""); 421 | 422 | [EncoderImpl writeTableToCSV:@"Encode_lin_to_sRGB.csv" labelsArr:labels valuesArr:yPairsArr]; 423 | } 424 | 425 | // Gather value mappings over the entire byte range for lin -> sRGB 426 | 427 | if ((0)) 428 | { 429 | NSArray *labels = @[ @"G", @"R", @"PG", @"PR", @"AG", @"sRGB" ]; 430 | 431 | NSMutableArray *yPairsArr = [NSMutableArray array]; 432 | 433 | // CSV generation logic below depends on getting 256 values that 434 | // represent the range. 435 | 436 | uint32_t *pixelPtr = (uint32_t *) sRGBFB.pixels; 437 | 438 | NSMutableArray *mSamples = [NSMutableArray array]; 439 | 440 | for (int i = 0; i < genWidth; i += 2) { 441 | uint32_t pixel = pixelPtr[i]; 442 | int grayVal = pixel & 0xFF; 443 | [mSamples addObject:@(grayVal)]; 444 | } 445 | 446 | assert([mSamples count] == 256); 447 | 448 | NSMutableDictionary *rangeMap = [NSMutableDictionary dictionary]; 449 | 450 | for (int i = 0; i < 256; i++) { 451 | int grayVal = [mSamples[i] intValue]; 452 | rangeMap[@(i)] = @(grayVal); 453 | 454 | // Use (Y 128 128) to decode grayscale value to a RGB value. 455 | // Since the values for Y are setup with a gamma, need to 456 | // know the gamma to be able to decode ? 457 | 458 | // Float amount of the grayscale range that input grayscale 459 | // value corresponds to. 460 | 461 | float percentOfGrayscale = i / 255.0f; 462 | float percentOfRange = grayVal / 255.0f; 463 | 464 | float appleGammaAdjusted = 0.0f; 465 | 466 | float sRGBGammaAdjusted = sRGB_linearNormToNonLinear(percentOfGrayscale); 467 | 468 | [yPairsArr addObject:@[@(i), @(grayVal), @(percentOfGrayscale), @(percentOfRange), @(appleGammaAdjusted), @(sRGBGammaAdjusted)]]; 469 | } 470 | 471 | NSLog(@"rangeMap contains %d values", (int)rangeMap.count); 472 | NSLog(@""); 473 | 474 | [EncoderImpl writeTableToCSV:@"Encode_lin_to_sRGB_GR.csv" labelsArr:labels valuesArr:yPairsArr]; 475 | } 476 | 477 | // Emit CSV mapping of the grayscale values to 1.961 power function 478 | 479 | if ((1)) 480 | { 481 | NSArray *labels = @[ @"G", @"R", @"PG", @"PR", @"sRGB", @"BT709", @"AG" ]; 482 | 483 | NSMutableArray *yPairsArr = [NSMutableArray array]; 484 | 485 | // CSV generation logic below depends on getting 256 values that 486 | // represent the range. 487 | 488 | uint32_t *pixelPtr = (uint32_t *) sRGBFB.pixels; 489 | 490 | NSMutableArray *mSamples = [NSMutableArray array]; 491 | 492 | for (int i = 0; i < genWidth; i += 2) { 493 | uint32_t pixel = pixelPtr[i]; 494 | int grayVal = pixel & 0xFF; 495 | [mSamples addObject:@(grayVal)]; 496 | } 497 | 498 | assert([mSamples count] == 256); 499 | 500 | NSMutableDictionary *rangeMap = [NSMutableDictionary dictionary]; 501 | 502 | for (int i = 0; i < 256; i++) { 503 | int grayVal = [mSamples[i] intValue]; 504 | rangeMap[@(i)] = @(grayVal); 505 | 506 | // Use (Y 128 128) to decode grayscale value to a RGB value. 507 | // Since the values for Y are setup with a gamma, need to 508 | // know the gamma to be able to decode ? 509 | 510 | // Float amount of the grayscale range that input grayscale 511 | // value corresponds to. 512 | 513 | float percentOfGrayscale = i / 255.0f; 514 | float percentOfRange = grayVal / 255.0f; 515 | 516 | float appleGammaAdjusted = Apple196_linearNormToNonLinear(percentOfGrayscale); 517 | 518 | float sRGBGammaAdjusted = sRGB_linearNormToNonLinear(percentOfGrayscale); 519 | 520 | float bt709GammaAdjusted = BT709_linearNormToNonLinear(percentOfGrayscale); 521 | 522 | [yPairsArr addObject:@[@(i), @(grayVal), @(percentOfGrayscale), @(percentOfRange), @(sRGBGammaAdjusted), 523 | @(bt709GammaAdjusted), 524 | @(appleGammaAdjusted) 525 | ]]; 526 | } 527 | 528 | NSLog(@"rangeMap contains %d values", (int)rangeMap.count); 529 | NSLog(@""); 530 | 531 | [EncoderImpl writeTableToCSV:@"Encode_lin_to_sRGB_GR.csv" labelsArr:labels valuesArr:yPairsArr]; 532 | } 533 | } 534 | 535 | return 1; 536 | } 537 | 538 | int main(int argc, const char * argv[]) { 539 | int retcode = 0; 540 | 541 | @autoreleasepool { 542 | char *outPNG = NULL; 543 | 544 | //int fps = 30; // Default to 30 frames per second 545 | //int fps = 1; 546 | 547 | ConfigurationStruct configS; 548 | configS.fps = 1; 549 | 550 | if (argc == 2) { 551 | // No options, input and output files indicated 552 | outPNG = (char *) argv[1]; 553 | } else { 554 | usage(); 555 | exit(1); 556 | } 557 | 558 | NSString *outPNGStr = [NSString stringWithFormat:@"%s", outPNG]; 559 | 560 | retcode = process(outPNGStr, &configS); 561 | } 562 | 563 | exit(retcode); 564 | return retcode; 565 | } 566 | 567 | -------------------------------------------------------------------------------- /write_full_range/write_full_range.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // write_full_range 4 | // 5 | // Created by Mo DeJong on 2/3/19. 6 | // Copyright © 2019 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "CGFrameBuffer.h" 12 | #import "BGRAToBT709Converter.h" 13 | 14 | #import "sRGB.h" 15 | #import "BT709.h" 16 | 17 | #import "y4m_writer.h" 18 | 19 | 20 | int main(int argc, const char * argv[]) { 21 | @autoreleasepool { 22 | // Generate YCbCr bytes using full range [0, 255] 23 | // values as opposed to pc range [16, 235 | 240] 24 | 25 | const char *outFilename = "write_y4m_ex.y4m"; 26 | FILE *outFile = y4m_open_file(outFilename); 27 | 28 | if (outFile == NULL) { 29 | return 1; 30 | } 31 | 32 | int width = 256; 33 | int height = 256; 34 | 35 | NSMutableData *yData = [NSMutableData dataWithLength:width*height]; 36 | uint8_t *yPtr = (uint8_t*) yData.bytes; 37 | 38 | for (int i = 0; i < (width * height); i++) { 39 | if ((i % 2) == 0) { 40 | yPtr[i] = 255; 41 | } else { 42 | yPtr[i] = 128; 43 | } 44 | } 45 | 46 | int hw = 256 / 2; 47 | int hh = 256 / 2; 48 | 49 | NSMutableData *uData = [NSMutableData dataWithLength:hw*hh]; 50 | NSMutableData *vData = [NSMutableData dataWithLength:hw*hh]; 51 | 52 | uint8_t *uPtr = (uint8_t*) uData.bytes; 53 | uint8_t *vPtr = (uint8_t*) vData.bytes; 54 | 55 | for (int i = 0; i < (hw * hh); i++) { 56 | if ((i % 2) == 0) { 57 | uPtr[i] = 128; 58 | vPtr[i] = 128; 59 | } else { 60 | uPtr[i] = 128; 61 | vPtr[i] = 128; 62 | } 63 | } 64 | 65 | Y4MHeaderStruct header; 66 | 67 | header.width = width; 68 | header.height = height; 69 | 70 | header.fps = Y4MHeaderFPS_1; 71 | //header.fps = Y4MHeaderFPS_30; 72 | 73 | int header_result = y4m_write_header(outFile, &header); 74 | if (header_result != 0) { 75 | return header_result; 76 | } 77 | 78 | Y4MFrameStruct fs; 79 | 80 | fs.yPtr = (uint8_t*) yData.bytes; 81 | fs.yLen = (int) yData.length; 82 | 83 | fs.uPtr = (uint8_t*) uData.bytes; 84 | fs.uLen = (int) uData.length; 85 | 86 | fs.vPtr = (uint8_t*) vData.bytes; 87 | fs.vLen = (int) vData.length; 88 | 89 | int write_frame_result = y4m_write_frame(outFile, &fs); 90 | if (write_frame_result != 0) { 91 | return write_frame_result; 92 | } 93 | 94 | fclose(outFile); 95 | } 96 | return 0; 97 | } 98 | --------------------------------------------------------------------------------