├── .gitignore
├── H264SeamlessLooping
├── CarOverWhiteBG.m4v
├── AppDelegate.h
├── main.m
├── MasterViewController.h
├── DetailViewController.h
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── BGDecodeEncode.h
├── H264FrameDecoder.h
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── H264FrameEncoder.h
├── Info.plist
├── AppDelegate.m
├── MasterViewController.m
├── CGFrameBuffer.h
├── H264FrameDecoder.m
├── H264FrameEncoder.m
├── DetailViewController.m
├── BGDecodeEncode.m
└── CGFrameBuffer.m
├── H264SeamlessLooping.xcodeproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── project.pbxproj
└── license.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | *xcuserdata*
2 |
3 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/CarOverWhiteBG.m4v:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mdejong/H264SeamlessLooping/HEAD/H264SeamlessLooping/CarOverWhiteBG.m4v
--------------------------------------------------------------------------------
/H264SeamlessLooping.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/MasterViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // MasterViewController.h
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @class DetailViewController;
12 |
13 | @interface MasterViewController : UITableViewController
14 |
15 | @property (strong, nonatomic) DetailViewController *detailViewController;
16 |
17 |
18 | @end
19 |
20 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/DetailViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // DetailViewController.h
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface DetailViewController : UIViewController
12 |
13 | @property (nonatomic, copy) NSString* tag;
14 | @property (weak, nonatomic) IBOutlet UILabel *detailDescriptionLabel;
15 |
16 | - (void)configureView;
17 |
18 | @end
19 |
20 |
--------------------------------------------------------------------------------
/license.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2016, Mo DeJong
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
5 |
6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
7 |
8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
9 |
10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
11 |
12 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/H264SeamlessLooping/BGDecodeEncode.h:
--------------------------------------------------------------------------------
1 | //
2 | // BGDecodeEncode.h
3 | //
4 | // Created by Mo DeJong on 7/6/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 | // This module provides an easy to use background processing queue
9 | // that will decode frames from a H264 source and then re-encode the
10 | // frames as either opaque or transparent frames.
11 |
12 | @import Foundation;
13 |
14 | @import AVFoundation;
15 | @import CoreVideo;
16 | @import CoreImage;
17 | @import CoreMedia;
18 | @import VideoToolbox;
19 |
20 | @interface BGDecodeEncode : NSObject
21 |
22 | // Decompress and then recompress each frame of H264 video as keyframes that
23 | // can be rendered directly without holding a stream decode resource open.
24 | // If an error is encountered during the encode/decode process then nil
25 | // is returned (this can happen when app is put into the background)
26 |
27 | + (BOOL) recompressKeyframes:(NSString*)resourceName
28 | frameDuration:(float)frameDuration
29 | renderSize:(CGSize)renderSize
30 | aveBitrate:(int)aveBitrate
31 | frames:(NSMutableArray*)frames;
32 |
33 | // Previous API compat
34 |
35 | + (NSArray*) recompressKeyframesOnBackgroundThread:(NSString*)resourceName
36 | frameDuration:(float)frameDuration
37 | renderSize:(CGSize)renderSize
38 | aveBitrate:(int)aveBitrate;
39 |
40 | @end
41 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/H264FrameDecoder.h:
--------------------------------------------------------------------------------
1 | //
2 | // H264FrameDecoder.h
3 | //
4 | // Created by Mo DeJong on 4/5/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 | // This module makes it easy to decode a CoreVideo frame
9 | // given an encoded CoreMedia frame.
10 |
11 | @import Foundation;
12 |
13 | @import AVFoundation;
14 | @import CoreVideo;
15 | @import CoreImage;
16 | @import CoreMedia;
17 | @import VideoToolbox;
18 |
19 | @interface H264FrameDecoder : NSObject
20 |
21 | // Defaults to kCVPixelFormatType_32BGRA for implicit conversion to iOS native pixel format.
22 | // Other useful values are kCVPixelFormatType_420YpCbCr8BiPlanarVideorange and
23 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
24 |
25 | @property (nonatomic, assign) OSType pixelType;
26 |
27 | // When a CoreVideo pixel buffer is fully decoded, this property is set
28 |
29 | @property (nonatomic, assign) CVPixelBufferRef pixelBuffer;
30 |
31 | @property (nonatomic, copy) void (^pixelBufferBlock)(CVPixelBufferRef);
32 |
33 | // Invoke to encode the pixel buffer, the result is assigned to
34 | // self.sampleBuffer via async callback.
35 |
36 | - (BOOL) decodeH264CoreMediaFrame:(CMSampleBufferRef)cmSampleBuffer;
37 |
38 | // Optional method to indicate that session is completed, is also
39 | // invoked on dealloc
40 |
41 | - (void) endSession;
42 |
43 | // If the caller wants to explicitly block until the frame decode operation
44 | // is finished then this method can be invoked. Don't invoke in the main
45 | // thread, or else!
46 |
47 | - (void) waitForFrame;
48 |
49 | // Flush any pending frames but without blocking
50 |
51 | - (void) finishFrame;
52 |
53 | @end
54 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/H264FrameEncoder.h:
--------------------------------------------------------------------------------
1 | //
2 | // H264FrameEncoder.h
3 | //
4 | // Created by Mo DeJong on 4/5/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 | // This module makes it easy to encode a single CoreVideo frame
9 | // as H264 wrapped as a CoreMedia sample buffer object.
10 |
11 | @import Foundation;
12 |
13 | @import AVFoundation;
14 | @import CoreVideo;
15 | @import CoreImage;
16 | @import CoreMedia;
17 | @import VideoToolbox;
18 |
19 | @interface H264FrameEncoder : NSObject
20 |
21 | // Approx frame duration, will default to 30 FPS by default
22 |
23 | @property (nonatomic, assign) float frameDuration;
24 |
25 |
26 | // Bitrate provides a way to dial the encoded quality up or down
27 |
28 | // LOW = 100000
29 | // MED = 2000000
30 | // HIGH = 5000000
31 | // CRAZY = 2000000000
32 |
33 | @property (nonatomic, assign) int aveBitrate;
34 |
35 | @property (nonatomic, assign) int frameOffset;
36 |
37 | @property (nonatomic, assign) CMSampleBufferRef sampleBuffer;
38 |
39 | @property (nonatomic, copy) void (^sampleBufferBlock)(CMSampleBufferRef);
40 |
41 | // Encode an uncompressed CoreVideo pixel buffer as a compressed CoreMedia buffer.
42 | // The input is BGRA pixels and the output is a CoreMedia H.264 frame as a data buffer.
43 | // The output CoreMedia buffer is assigned to self.sampleBuffer via async callback.
44 | // Returns TRUE on success or FALSE if a compression session could not be created.
45 |
46 | - (BOOL) encodeH264CoreMediaFrame:(CVPixelBufferRef)cvPixelBuffer;
47 |
48 | // Optional method to indicate that session is completed, is also
49 | // invoked on dealloc
50 |
51 | - (void) endSession;
52 |
53 | // If the caller wants to explicitly block until the frame decode operation
54 | // is finished then this method can be invoked. Don't invoke in the main
55 | // thread, or else!
56 |
57 | - (void) waitForFrame;
58 |
59 | @end
60 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UIStatusBarTintParameters
34 |
35 | UINavigationBar
36 |
37 | Style
38 | UIBarStyleDefault
39 | Translucent
40 |
41 |
42 |
43 | UISupportedInterfaceOrientations
44 |
45 | UIInterfaceOrientationPortrait
46 | UIInterfaceOrientationLandscapeLeft
47 | UIInterfaceOrientationLandscapeRight
48 |
49 | UISupportedInterfaceOrientations~ipad
50 |
51 | UIInterfaceOrientationPortrait
52 | UIInterfaceOrientationPortraitUpsideDown
53 | UIInterfaceOrientationLandscapeLeft
54 | UIInterfaceOrientationLandscapeRight
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 | #import "DetailViewController.h"
11 |
12 | @interface AppDelegate ()
13 |
14 | @end
15 |
16 | @implementation AppDelegate
17 |
18 |
19 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
20 | // Override point for customization after application launch.
21 | UISplitViewController *splitViewController = (UISplitViewController *)self.window.rootViewController;
22 | UINavigationController *navigationController = [splitViewController.viewControllers lastObject];
23 | navigationController.topViewController.navigationItem.leftBarButtonItem = splitViewController.displayModeButtonItem;
24 | splitViewController.delegate = self;
25 | return YES;
26 | }
27 |
28 | - (void)applicationWillResignActive:(UIApplication *)application {
29 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
30 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
31 | }
32 |
33 | - (void)applicationDidEnterBackground:(UIApplication *)application {
34 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
35 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
36 | }
37 |
38 | - (void)applicationWillEnterForeground:(UIApplication *)application {
39 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
40 | }
41 |
42 | - (void)applicationDidBecomeActive:(UIApplication *)application {
43 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
44 | }
45 |
46 | - (void)applicationWillTerminate:(UIApplication *)application {
47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
48 | }
49 |
50 | #pragma mark - Split view
51 |
52 | - (BOOL)splitViewController:(UISplitViewController *)splitViewController collapseSecondaryViewController:(UIViewController *)secondaryViewController ontoPrimaryViewController:(UIViewController *)primaryViewController {
53 | if ([secondaryViewController isKindOfClass:[UINavigationController class]] && [[(UINavigationController *)secondaryViewController topViewController] isKindOfClass:[DetailViewController class]] && ([(DetailViewController *)[(UINavigationController *)secondaryViewController topViewController] tag] == nil)) {
54 | // Return YES to indicate that we have handled the collapse by doing nothing; the secondary controller will be discarded.
55 | return YES;
56 | } else {
57 | return NO;
58 | }
59 | }
60 |
61 | @end
62 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/MasterViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // MasterViewController.m
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/8/16.
6 | // Copyright © 2016 HelpURock. All rights reserved.
7 | //
8 |
9 | #import "MasterViewController.h"
10 | #import "DetailViewController.h"
11 |
12 | @interface MasterViewController ()
13 |
14 | @property NSMutableArray *objects;
15 | @end
16 |
17 | @implementation MasterViewController
18 |
19 | - (void)viewDidLoad {
20 | [super viewDidLoad];
21 | // Do any additional setup after loading the view, typically from a nib.
22 | //self.navigationItem.leftBarButtonItem = self.editButtonItem;
23 |
24 | self.navigationItem.title = @"Looping";
25 |
26 | // UIBarButtonItem *addButton = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemAdd target:self action:@selector(insertNewObject:)];
27 | // self.navigationItem.rightBarButtonItem = addButton;
28 | // self.detailViewController = (DetailViewController *)[[self.splitViewController.viewControllers lastObject] topViewController];
29 |
30 | [self insertNewObject:@"AVPlayer builtin"];
31 | [self insertNewObject:@"CoreMedia samples"];
32 | }
33 |
34 | - (void)viewWillAppear:(BOOL)animated {
35 | self.clearsSelectionOnViewWillAppear = self.splitViewController.isCollapsed;
36 | [super viewWillAppear:animated];
37 | }
38 |
39 | - (void)didReceiveMemoryWarning {
40 | [super didReceiveMemoryWarning];
41 | // Dispose of any resources that can be recreated.
42 | }
43 |
44 | - (void)insertNewObject:(NSString*)tag {
45 | if (!self.objects) {
46 | self.objects = [[NSMutableArray alloc] init];
47 | }
48 | [self.objects addObject:tag];
49 | NSIndexPath *indexPath = [NSIndexPath indexPathForRow:[self.objects count] inSection:0];
50 | [self.tableView insertRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationAutomatic];
51 | }
52 |
53 | #pragma mark - Segues
54 |
55 | - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
56 | if ([[segue identifier] isEqualToString:@"showDetail"]) {
57 | NSIndexPath *indexPath = [self.tableView indexPathForSelectedRow];
58 | NSString *tag = self.objects[indexPath.row];
59 | DetailViewController *controller = (DetailViewController *)[[segue destinationViewController] topViewController];
60 | controller.tag = tag;
61 | [controller configureView];
62 | controller.navigationItem.leftBarButtonItem = self.splitViewController.displayModeButtonItem;
63 | controller.navigationItem.leftItemsSupplementBackButton = YES;
64 | }
65 | }
66 |
67 | #pragma mark - Table View
68 |
69 | - (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
70 | return 1;
71 | }
72 |
73 | - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
74 | return self.objects.count;
75 | }
76 |
77 | - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
78 | UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"Cell" forIndexPath:indexPath];
79 |
80 | NSDate *object = self.objects[indexPath.row];
81 | cell.textLabel.text = [object description];
82 | return cell;
83 | }
84 |
85 | - (BOOL)tableView:(UITableView *)tableView canEditRowAtIndexPath:(NSIndexPath *)indexPath {
86 | // Return NO if you do not want the specified item to be editable.
87 | return NO;
88 | }
89 |
90 | - (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath {
91 | if (editingStyle == UITableViewCellEditingStyleDelete) {
92 | [self.objects removeObjectAtIndex:indexPath.row];
93 | [tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationFade];
94 | } else if (editingStyle == UITableViewCellEditingStyleInsert) {
95 | // Create a new instance of the appropriate class, insert it into the array, and add a new row to the table view.
96 | }
97 | }
98 |
99 | @end
100 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/CGFrameBuffer.h:
--------------------------------------------------------------------------------
1 | //
2 | // CGFrameBuffer.h
3 | //
4 | // Created by Moses DeJong on 2/13/09.
5 | //
6 | // License terms defined in License.txt.
7 |
8 | #import
9 | #import
10 | #import
11 |
12 | // Avoid incorrect warnings from clang
13 | #ifndef __has_feature // Optional.
14 | #define __has_feature(x) 0 // Compatibility with non-clang compilers.
15 | #endif
16 |
17 | #ifndef CF_RETURNS_RETAINED
18 | #if __has_feature(attribute_cf_returns_retained)
19 | #define CF_RETURNS_RETAINED __attribute__((cf_returns_retained))
20 | #else
21 | #define CF_RETURNS_RETAINED
22 | #endif
23 | #endif
24 |
25 | @interface CGFrameBuffer : NSObject {
26 | @protected
27 | char *m_pixels;
28 | char *m_zeroCopyPixels;
29 | NSData *m_zeroCopyMappedData;
30 | size_t m_numBytes;
31 | size_t m_numBytesAllocated;
32 | size_t m_width;
33 | size_t m_height;
34 | size_t m_bitsPerPixel;
35 | size_t m_bytesPerPixel;
36 | int32_t m_isLockedByDataProvider;
37 | CGImageRef m_lockedByImageRef;
38 | CGColorSpaceRef m_colorspace;
39 |
40 | #if __has_feature(objc_arc)
41 | NSObject *m_arcRefToSelf;
42 | #endif // objc_arc
43 | }
44 |
45 | @property (readonly) char *pixels;
46 | @property (readonly) char *zeroCopyPixels;
47 | @property (nonatomic, copy) NSData *zeroCopyMappedData;
48 |
49 | // The numBytes property indicates the number of bytes in length
50 | // of the buffer pointed to by the pixels property. In the event
51 | // that an odd number of pixels is allocated, this numBytes value
52 | // could also include a zero padding pixel in order to keep the
53 | // buffer size an even number of pixels.
54 |
55 | @property (readonly) size_t numBytes;
56 | @property (readonly) size_t width;
57 | @property (readonly) size_t height;
58 | @property (readonly) size_t bitsPerPixel;
59 | @property (readonly) size_t bytesPerPixel;
60 |
61 | @property (nonatomic, assign) BOOL isLockedByDataProvider;
62 | @property (nonatomic, readonly) CGImageRef lockedByImageRef;
63 |
64 | // The colorspace will default to device RGB unless explicitly set. If set, then
65 | // the indicated colorspace will be used when invoking CGBitmapContextCreate()
66 | // such that a drawing operation will output pixels in the indicated colorspace.
67 | // The same colorspace will be used when creating a CGImageRef via createCGImageRef.
68 | // While this property is marked as assign, it will retain a ref to the indicate colorspace.
69 |
70 | @property (nonatomic, assign) CGColorSpaceRef colorspace;
71 |
72 | + (CGFrameBuffer*) cGFrameBufferWithBppDimensions:(NSInteger)bitsPerPixel width:(NSInteger)width height:(NSInteger)height;
73 |
74 | - (id) initWithBppDimensions:(NSInteger)bitsPerPixel width:(NSInteger)width height:(NSInteger)height;
75 |
76 | // Render the contents of a view as pixels. Returns TRUE
77 | // is successful, otherwise FALSE. Note that the view
78 | // must be opaque and render all of its pixels.
79 |
80 | - (BOOL) renderView:(UIView*)view;
81 |
82 | // Render a CGImageRef directly into the pixels
83 |
84 | - (BOOL) renderCGImage:(CGImageRef)cgImageRef;
85 |
86 | // Wrap the framebuffer in a CoreGraphics bitmap context.
87 | // This API creates a handle that can be used to render
88 | // directly into the bitmap pixels. The handle must
89 | // be explicitly released by the caller via CGContextRelease()
90 |
91 | - (CGContextRef) createBitmapContext CF_RETURNS_RETAINED;
92 |
93 | // Create a Core Graphics image from the pixel data
94 | // in this buffer. The hasDataProvider property
95 | // will be TRUE while the CGImageRef is in use.
96 | // This name is upper case to avoid warnings from the analyzer.
97 |
98 | - (CGImageRef) createCGImageRef CF_RETURNS_RETAINED;
99 |
100 | // Defines the pixel layout, could be overloaded in a derived class
101 |
102 | - (CGBitmapInfo) getBitmapInfo;
103 |
104 | - (BOOL) isLockedByImageRef:(CGImageRef)cgImageRef;
105 |
106 | // Set all pixels to 0x0
107 |
108 | - (void) clear;
109 |
110 | // Copy data from another framebuffer into this one
111 |
112 | - (void) copyPixels:(CGFrameBuffer *)anotherFrameBuffer;
113 |
114 | // USe memcopy() as opposed to an OS level page copy
115 |
116 | - (void) memcopyPixels:(CGFrameBuffer *)anotherFrameBuffer;
117 |
118 | // Zero copy from an external read-only location
119 |
120 | - (void) zeroCopyPixels:(void*)zeroCopyPtr mappedData:(NSData*)mappedData;
121 | - (void) zeroCopyToPixels;
122 | - (void) doneZeroCopyPixels;
123 |
124 | // Copy from the given CoreVideo pixel buffer to this framebuffer. Note
125 | // that an implicit crop is used in the case where this framebuffer
126 | // is smaller than the other framebuffer.
127 |
128 | - (void) copyFromCVPixelBuffer:(CVPixelBufferRef)cVPixelBufferRef;
129 |
130 | // Optional opaque pixel writing logic to clear the alpha channel values when
131 | // pixels are known to be 24BPP only. This call sets the alpha channel for
132 | // each pixel to zero.
133 |
134 | - (void) clearAlphaChannel;
135 |
136 | // This method resets the alpha channel for each pixel to be fully opaque.
137 |
138 | - (void) resetAlphaChannel;
139 |
140 | // Convert pixels to a PNG image format that can be easily saved to disk.
141 |
142 | - (NSData*) formatAsPNG;
143 |
144 | @end
145 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/H264FrameDecoder.m:
--------------------------------------------------------------------------------
1 | //
2 | // H264FrameDecoder.m
3 | //
4 | // Created by Mo DeJong on 4/5/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 |
9 | #import "H264FrameDecoder.h"
10 |
11 | void VideoToolboxCallback(
12 | void* decompressionOutputRefCon,
13 | void* sourceFrameRefCon,
14 | OSStatus status,
15 | VTDecodeInfoFlags info,
16 | CVImageBufferRef imageBuffer,
17 | CMTime presentationTimeStamp,
18 | CMTime presentationDuration);
19 |
20 | // Private API
21 |
22 | @interface H264FrameDecoder ()
23 | {
24 | VTDecompressionSessionRef session;
25 | }
26 |
27 | @end
28 |
29 | @implementation H264FrameDecoder
30 |
31 | @synthesize pixelBuffer = m_pixelBuffer;
32 |
33 | - (void) dealloc
34 | {
35 | self.pixelBuffer = NULL;
36 |
37 | self.pixelBufferBlock = nil;
38 |
39 | [self endSession];
40 | }
41 |
42 | - (void) endSession
43 | {
44 | if (self->session != NULL) {
45 | VTDecompressionSessionWaitForAsynchronousFrames(self->session);
46 | VTDecompressionSessionInvalidate(self->session);
47 | CFRelease(self->session);
48 | self->session = NULL;
49 | }
50 | }
51 |
52 | - (void) setPixelBuffer:(CVPixelBufferRef)cvPixelBuffer
53 | {
54 | if (m_pixelBuffer) {
55 | CFRelease(m_pixelBuffer);
56 | }
57 | m_pixelBuffer = cvPixelBuffer;
58 | if (m_pixelBuffer) {
59 | CFRetain(m_pixelBuffer);
60 | }
61 | }
62 |
63 | - (BOOL) decodeH264CoreMediaFrame:(CMSampleBufferRef)cmSampleBuffer {
64 | OSStatus status;
65 |
66 | VTDecompressionOutputCallbackRecord cb = { VideoToolboxCallback, (__bridge void *) self };
67 | CMVideoFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(cmSampleBuffer);
68 |
69 | if (self.pixelType == 0) {
70 | self.pixelType = kCVPixelFormatType_32BGRA;
71 | }
72 |
73 | assert(formatDesc);
74 |
75 | if (self->session == NULL) {
76 | NSDictionary* pixelBufferOptions = @{
77 | // Output pixel type required here since it would default to video range
78 | (NSString*) kCVPixelBufferPixelFormatTypeKey : @(self.pixelType),
79 | (NSString*) kCVPixelBufferOpenGLESCompatibilityKey : @YES,
80 | (NSString*) kCVPixelBufferIOSurfacePropertiesKey : @{}};
81 |
82 | status = VTDecompressionSessionCreate(
83 | kCFAllocatorDefault,
84 | formatDesc,
85 | NULL,
86 | (__bridge CFDictionaryRef)pixelBufferOptions,
87 | &cb,
88 | &session);
89 |
90 | if (status != noErr) {
91 | NSLog(@"VTDecompressionSessionCreate status not `noErr`: %d\n", (int)status);
92 |
93 | if ((int)status == -12983) {
94 | // A previous session was not closed down properly
95 | NSLog(@"VTDecompressionSessionCreate error due to missing endSession?\n");
96 | }
97 |
98 | return FALSE;
99 | }
100 |
101 | // Configure session parameters
102 |
103 | [self configureSessionParameters];
104 | }
105 |
106 | assert(self->session);
107 |
108 | VTDecodeInfoFlags decodedFlags;
109 |
110 | status = VTDecompressionSessionDecodeFrame(session,
111 | cmSampleBuffer,
112 | kVTDecodeFrame_EnableAsynchronousDecompression,
113 | NULL,
114 | &decodedFlags);
115 |
116 | if (status != noErr) {
117 | NSLog(@"VTDecompressionSessionDecodeFrame status not `noErr`: %d\n", (int)status);
118 | return FALSE;
119 | }
120 |
121 | return TRUE;
122 | }
123 |
124 | - (void) didReceiveImageBuffer:(CVImageBufferRef)imageBuffer
125 | {
126 | CVPixelBufferRef cvPixelBufferRef = (CVPixelBufferRef) imageBuffer;
127 |
128 | self.pixelBuffer = cvPixelBufferRef;
129 |
130 | if (self.pixelBuffer != nil) {
131 | if (cvPixelBufferRef == NULL) {
132 | // Error case
133 | self.pixelBufferBlock(NULL);
134 | } else {
135 | // Success
136 | self.pixelBufferBlock(cvPixelBufferRef);
137 | }
138 | }
139 |
140 | return;
141 | }
142 |
143 | - (void) configureSessionParameters
144 | {
145 | }
146 |
147 | - (void) waitForFrame
148 | {
149 | OSStatus status;
150 |
151 | // Block until our callback has been called with the last frame.
152 | status = VTDecompressionSessionWaitForAsynchronousFrames(session);
153 |
154 | if (status != noErr) {
155 | NSLog(@"VTDecompressionSessionWaitForAsynchronousFrames status not `noErr`: %d\n", (int)status);
156 | return;
157 | }
158 | }
159 |
160 | - (void) finishFrame
161 | {
162 | OSStatus status;
163 |
164 | // Block until our callback has been called with the last frame.
165 | status = VTDecompressionSessionFinishDelayedFrames(session);
166 |
167 | if (status != noErr) {
168 | NSLog(@"VTDecompressionSessionFinishDelayedFrames status not `noErr`: %d\n", (int)status);
169 | return;
170 | }
171 | }
172 |
173 | @end
174 |
175 | // Video Toolbox callback
176 |
177 | void VideoToolboxCallback(
178 | void* decompressionOutputRefCon,
179 | void* sourceFrameRefCon,
180 | OSStatus status,
181 | VTDecodeInfoFlags info,
182 | CVImageBufferRef imageBuffer,
183 | CMTime presentationTimeStamp,
184 | CMTime presentationDuration)
185 | {
186 | H264FrameDecoder *obj = (__bridge H264FrameDecoder *)decompressionOutputRefCon;
187 | assert(obj);
188 |
189 | if (status != noErr) {
190 | NSLog(@"Error: %@", [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]);
191 |
192 | [obj didReceiveImageBuffer:nil];
193 | } else {
194 | [obj didReceiveImageBuffer:imageBuffer];
195 | }
196 |
197 | return;
198 | }
199 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/H264FrameEncoder.m:
--------------------------------------------------------------------------------
1 | //
2 | // H264FrameEncoder.m
3 | //
4 | // Created by Mo DeJong on 4/5/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 |
9 | #import "H264FrameEncoder.h"
10 |
11 | static
12 | void VideoToolboxCallback(void *outputCallbackRefCon,
13 | void *sourceFrameRefCon,
14 | OSStatus status,
15 | VTEncodeInfoFlags infoFlags,
16 | CMSampleBufferRef sampleBuffer );
17 |
18 | // Private API
19 |
20 | @interface H264FrameEncoder ()
21 | {
22 | VTCompressionSessionRef session;
23 | }
24 |
25 | @end
26 |
27 | @implementation H264FrameEncoder
28 |
29 | @synthesize sampleBuffer = m_sampleBuffer;
30 |
31 | - (void) dealloc
32 | {
33 | self.sampleBuffer = NULL;
34 |
35 | self.sampleBufferBlock = nil;
36 |
37 | [self endSession];
38 | }
39 |
40 | - (void) endSession
41 | {
42 | if (self->session != NULL) {
43 | VTCompressionSessionCompleteFrames(self->session, kCMTimeIndefinite);
44 | VTCompressionSessionInvalidate(self->session);
45 | CFRelease(self->session);
46 | self->session = NULL;
47 | }
48 | }
49 |
50 | - (void) setSampleBuffer:(CMSampleBufferRef)cmSampleBuffer
51 | {
52 | if (m_sampleBuffer) {
53 | CFRelease(m_sampleBuffer);
54 | }
55 | m_sampleBuffer = cmSampleBuffer;
56 | if (m_sampleBuffer) {
57 | CFRetain(m_sampleBuffer);
58 | }
59 | }
60 |
61 | // Encode an uncompressed CoreVideo pixel buffer as a compressed CoreMedia buffer.
62 | // The input is BGRA pixels and the output is a CoreMedia H.264 frame as a data buffer.
63 | // The output CoreMedia buffer is assigned to self.sampleBuffer via async callback.
64 | // Returns TRUE on success or FALSE if a compression session could not be created.
65 |
66 | - (BOOL) encodeH264CoreMediaFrame:(CVPixelBufferRef)cvPixelBuffer {
67 | OSStatus status;
68 |
69 | if (self->session == NULL) {
70 | int width = (int) CVPixelBufferGetWidth(cvPixelBuffer);
71 | int height = (int) CVPixelBufferGetHeight(cvPixelBuffer);
72 |
73 | NSDictionary* pixelBufferOptions = @{
74 | (NSString*) kCVPixelBufferWidthKey : @(width),
75 | (NSString*) kCVPixelBufferHeightKey : @(height),
76 | (NSString*) kCVPixelBufferOpenGLESCompatibilityKey : @YES,
77 | (NSString*) kCVPixelBufferIOSurfacePropertiesKey : @{}};
78 |
79 | CFMutableDictionaryRef encoderSpecifications = NULL;
80 | status = VTCompressionSessionCreate(
81 | kCFAllocatorDefault,
82 | width,
83 | height,
84 | kCMVideoCodecType_H264,
85 | encoderSpecifications,
86 | (__bridge CFDictionaryRef)pixelBufferOptions,
87 | NULL,
88 | (VTCompressionOutputCallback)VideoToolboxCallback,
89 | (__bridge void *)self,
90 | &self->session);
91 |
92 | if (status != noErr) {
93 | NSLog(@"VTCompressionSessionCreate status not `noErr`: %d\n", (int)status);
94 | return FALSE;
95 | }
96 |
97 | // Configure session parameters
98 |
99 | [self configureSessionParameters];
100 |
101 | self.frameOffset = 0;
102 |
103 | if (self.frameDuration == 0.0f || self.frameDuration < 0.0f) {
104 | self.frameDuration = 1.0f/30;
105 | }
106 | }
107 |
108 | assert(self->session);
109 |
110 | int offset = self.frameOffset;
111 | self.frameOffset += 1;
112 |
113 | // Determine the approx number of 1/600 samples correspond to the frame duration
114 |
115 | int nSamples = (int) round(self.frameDuration * offset * 600);
116 |
117 | // printf("frame %3d maps to offset time %0.3f which is %5d in 1/600 intervals\n", offset, self.frameDuration * offset, nSamples);
118 |
119 | CMTime pts = CMTimeMake(nSamples, 600);
120 | CMTime dur = CMTimeMake(600, 600);
121 |
122 | self.sampleBuffer = NULL;
123 |
124 | status = VTCompressionSessionEncodeFrame(session, cvPixelBuffer, pts, dur, NULL, NULL, NULL);
125 |
126 | if (status == kVTInvalidSessionErr) {
127 | NSLog(@"VTCompressionSessionEncodeFrame status kVTInvalidSessionErr\n");
128 | return FALSE;
129 | } else if (status != noErr) {
130 | NSLog(@"VTCompressionSessionEncodeFrame status not `noErr`: %d\n", (int)status);
131 | return FALSE;
132 | }
133 |
134 | return TRUE;
135 | }
136 |
137 | - (void) didReceiveSampleBuffer:(CMSampleBufferRef)cmSampleBuffer
138 | {
139 | self.sampleBuffer = cmSampleBuffer;
140 |
141 | if (self.sampleBufferBlock != nil) {
142 | if (cmSampleBuffer == NULL) {
143 | // Error case
144 | self.sampleBufferBlock(NULL);
145 | } else {
146 | // Success
147 | self.sampleBufferBlock(cmSampleBuffer);
148 | }
149 | }
150 |
151 | return;
152 | }
153 |
154 | // If the caller wants to explicitly block until the frame decode operation
155 | // is finished then this method can be invoked. Don't invoke in the main
156 | // thread, or else!
157 |
158 | - (void) waitForFrame
159 | {
160 | OSStatus status;
161 |
162 | int offset = self.frameOffset - 1;
163 | if (offset < 0) {
164 | offset = 0;
165 | }
166 |
167 | //CMTime pts = CMTimeMake(600 * offset, 600);
168 | //status = VTCompressionSessionCompleteFrames(session, pts);
169 |
170 | status = VTCompressionSessionCompleteFrames(session, kCMTimeIndefinite);
171 |
172 | if (status == kVTInvalidSessionErr) {
173 | NSLog(@"VTCompressionSessionCompleteFrames status kVTInvalidSessionErr\n");
174 | } else if (status != noErr) {
175 | NSLog(@"VTCompressionSessionCompleteFrames status not `noErr`: %d\n", (int)status);
176 | }
177 | }
178 |
179 | - (void) configureSessionParameters
180 | {
181 | OSStatus status;
182 |
183 | status = VTSessionSetProperty(session,
184 | kVTCompressionPropertyKey_RealTime,
185 | kCFBooleanTrue);
186 |
187 | if (noErr != status) {
188 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_RealTime.\n");
189 | }
190 |
191 | status = VTSessionSetProperty(session,
192 | kVTCompressionPropertyKey_ProfileLevel,
193 | kVTProfileLevel_H264_Main_AutoLevel);
194 |
195 | if (noErr != status) {
196 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_ProfileLevel.\n");
197 | }
198 |
199 | status = VTSessionSetProperty(session,
200 | kVTCompressionPropertyKey_AllowFrameReordering,
201 | kCFBooleanFalse);
202 |
203 | if (noErr != status) {
204 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_AllowFrameReordering.\n");
205 | }
206 |
207 | // Must be a keyframe
208 |
209 | status = VTSessionSetProperty(session,
210 | kVTCompressionPropertyKey_MaxKeyFrameInterval,
211 | (__bridge CFNumberRef)@(1));
212 |
213 | if (noErr != status) {
214 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_MaxKeyFrameInterval.\n");
215 | }
216 |
217 | //NSNumber *bitrate = @(700);
218 | //NSNumber *bitrate = @(20000); // 400
219 |
220 | //NSNumber *bitrate = @(100000); // 1146
221 | //NSNumber *bitrate = @(200000); // 1332
222 | //NSNumber *bitrate = @(500000); // 1956
223 | //NSNumber *bitrate = @(700000); // 2423
224 | //NSNumber *bitrate = @(1000000); // 2756
225 | //NSNumber *bitrate = @(1250000); // 3283
226 | //NSNumber *bitrate = @(1500000); // 3697
227 | //NSNumber *bitrate = @(2000000); // 4121
228 |
229 | // NSNumber *bitrate = @(5000000);
230 | // NSNumber *bitrate = @(2000000000);
231 |
232 | NSNumber *bitrate;
233 |
234 | if (self.aveBitrate == 0) {
235 | bitrate = @(2000000); // Lowish but not super low quality
236 | } else {
237 | bitrate = @(self.aveBitrate);
238 | }
239 |
240 | status = VTSessionSetProperty(session,
241 | kVTCompressionPropertyKey_AverageBitRate,
242 | (__bridge CFNumberRef)bitrate);
243 |
244 | if (noErr != status) {
245 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_AverageBitRate.\n");
246 | }
247 |
248 | // status = VTSessionSetProperty(session, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)@[800 * 1024 / 8, 1]);
249 |
250 | /*
251 |
252 | // Not as effective, baseline profile compression
253 |
254 | status = VTSessionSetProperty(session,
255 | kVTCompressionPropertyKey_H264EntropyMode,
256 | kVTH264EntropyMode_CAVLC);
257 |
258 | if (noErr != status) {
259 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_H264EntropyMode.\n");
260 | }
261 |
262 | */
263 |
264 | // CABAC provides best entropy encoding
265 |
266 | status = VTSessionSetProperty(session,
267 | kVTCompressionPropertyKey_H264EntropyMode,
268 | kVTH264EntropyMode_CABAC);
269 |
270 | if (noErr != status) {
271 | NSLog(@"VTSessionSetProperty: Cannot set kVTCompressionPropertyKey_H264EntropyMode.\n");
272 | }
273 |
274 | status = VTCompressionSessionPrepareToEncodeFrames(session);
275 |
276 | if (noErr != status) {
277 | NSLog(@"VTCompressionSessionPrepareToEncodeFrames %d\n", (int)status);
278 | }
279 | }
280 |
281 | @end
282 |
283 | // Video Toolbox callback
284 |
285 | void VideoToolboxCallback(void *outputCallbackRefCon,
286 | void *sourceFrameRefCon,
287 | OSStatus status,
288 | VTEncodeInfoFlags infoFlags,
289 | CMSampleBufferRef sampleBuffer )
290 | {
291 | H264FrameEncoder *obj = (__bridge H264FrameEncoder *)outputCallbackRefCon;
292 | if (obj == nil) {
293 | assert(obj);
294 | }
295 |
296 | if (status != noErr) {
297 | NSLog(@"Error: %@", [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]);
298 |
299 | [obj didReceiveSampleBuffer:nil];
300 | } else {
301 | [obj didReceiveSampleBuffer:sampleBuffer];
302 | }
303 |
304 | return;
305 | }
306 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/DetailViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // DetailViewController.m
3 | // H264SeamlessLooping
4 | //
5 | // Created by Mo DeJong on 4/5/16.
6 | //
7 | // See license.txt for BSD license terms.
8 | //
9 |
10 | #import "DetailViewController.h"
11 |
12 | @import AVFoundation;
13 | @import AVKit;
14 |
15 | #import "BGDecodeEncode.h"
16 |
17 | #import "H264FrameEncoder.h"
18 | #import "H264FrameDecoder.h"
19 |
20 | static int dumpFramesImages = 0;
21 |
22 | @interface DetailViewController ()
23 |
24 | @property (nonatomic, copy) NSString *resourceName;
25 |
26 | @property (nonatomic, retain) AVPlayerViewController* avPlayerViewController;
27 |
28 | @property (nonatomic, retain) AVSampleBufferDisplayLayer *sampleBufferLayer;
29 |
30 | @property (nonatomic, retain) NSTimer *displayH264Timer;
31 |
32 | @property (nonatomic, assign) BOOL isWaitingToPlay;
33 |
34 | @property (nonatomic, copy) NSArray *encodedBuffers;
35 | @property (nonatomic, assign) int encodedBufferOffset;
36 |
37 | @end
38 |
39 | @implementation DetailViewController
40 |
41 | @synthesize tag = m_tag;
42 |
43 | #pragma mark - Managing the detail item
44 |
45 | - (void) dealloc {
46 | NSLog(@"DetailViewController : dealloc %p with tag \"%@\"", self, self.tag);
47 |
48 | [[NSNotificationCenter defaultCenter] removeObserver:self];
49 | }
50 |
51 | - (void)configureView {
52 | // Update the user interface for the detail item.
53 | if (self.tag) {
54 | self.detailDescriptionLabel.text = [self.tag description];
55 | }
56 | }
57 |
58 | - (void) loadAVPlayerLayer
59 | {
60 | UIView *view = self.view;
61 | NSString *resourceName = self.resourceName;
62 | NSString* movieFilePath = [[NSBundle mainBundle]
63 | pathForResource:resourceName ofType:nil];
64 | NSAssert(movieFilePath, @"movieFilePath is nil");
65 | NSURL *fileURL = [NSURL fileURLWithPath:movieFilePath];
66 |
67 | AVPlayerViewController *playerViewController = [[AVPlayerViewController alloc] init];
68 | playerViewController.player = [AVPlayer playerWithURL:fileURL];
69 | self.avPlayerViewController = playerViewController;
70 | [self resizePlayerToViewSize];
71 | [view addSubview:playerViewController.view];
72 | view.autoresizesSubviews = TRUE;
73 |
74 | // Deliver notification on movie play end
75 |
76 | AVPlayerItem *playerItem = playerViewController.player.currentItem;
77 | assert(playerItem);
78 |
79 | [[NSNotificationCenter defaultCenter] addObserver:self
80 | selector:@selector(aVPlayerViewControllerDonePlaying:)
81 | name:AVPlayerItemDidPlayToEndTimeNotification
82 | object:playerItem];
83 |
84 | [self addObserverForTimeRanges];
85 |
86 | self.isWaitingToPlay = TRUE;
87 | }
88 |
89 | - (void)viewDidLayoutSubviews {
90 | // Adjust buffer dimensions
91 | [self resizePlayerToViewSize];
92 | }
93 |
94 | - (void) resizePlayerToViewSize
95 | {
96 | CGRect frame = self.view.frame;
97 |
98 | NSLog(@" avPlayerViewController set to frame size %d, %d", (int)frame.size.width, (int)frame.size.height);
99 |
100 | self.avPlayerViewController.view.frame = frame;
101 |
102 | self.sampleBufferLayer.frame = frame;
103 | self.sampleBufferLayer.position = CGPointMake(CGRectGetMidX(self.sampleBufferLayer.bounds), CGRectGetMidY(self.sampleBufferLayer.bounds));
104 | }
105 |
106 | - (void) aVPlayerViewControllerDonePlaying:(NSNotification*)notification
107 | {
108 | AVPlayer *player = self.avPlayerViewController.player;
109 | assert(player);
110 | AVPlayerItem *playerItem = player.currentItem;
111 | assert(playerItem);
112 | [playerItem seekToTime:kCMTimeZero];
113 | [player play];
114 | }
115 |
116 | // Check for avPlayerViewController ready to play
117 |
118 | - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
119 | {
120 | AVPlayer *player = self.avPlayerViewController.player;
121 |
122 | if (object == player && [keyPath isEqualToString:@"currentItem.loadedTimeRanges"]) {
123 | NSArray *timeRanges = (NSArray*)[change objectForKey:NSKeyValueChangeNewKey];
124 | if (timeRanges && [timeRanges count]) {
125 | CMTimeRange timerange=[[timeRanges objectAtIndex:0]CMTimeRangeValue];
126 | float currentBufferDuration = CMTimeGetSeconds(CMTimeAdd(timerange.start, timerange.duration));
127 | CMTime duration = player.currentItem.asset.duration;
128 | float seconds = CMTimeGetSeconds(duration);
129 |
130 | //I think that 2 seconds is enough to know if you're ready or not
131 | if (self.isWaitingToPlay && (currentBufferDuration > 2 || currentBufferDuration == seconds)) {
132 |
133 | [self removeObserverForTimesRanges];
134 | self.isWaitingToPlay = FALSE;
135 |
136 | // Start at zero
137 |
138 | [self aVPlayerViewControllerDonePlaying:nil];
139 | }
140 | } else {
141 | [[[UIAlertView alloc] initWithTitle:@"Alert!" message:@"Error trying to play the clip. Please try again" delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles:nil, nil] show];
142 | }
143 | }
144 | }
145 |
146 | - (void) addObserverForTimeRanges
147 | {
148 | AVPlayer *player = self.avPlayerViewController.player;
149 | if (player) {
150 | [player addObserver:self forKeyPath:@"currentItem.loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
151 | }
152 | }
153 |
154 | - (void)removeObserverForTimesRanges
155 | {
156 | AVPlayer *player = self.avPlayerViewController.player;
157 | if (player) {
158 | @try {
159 | [player removeObserver:self forKeyPath:@"currentItem.loadedTimeRanges"];
160 | } @catch(id anException){
161 | NSLog(@"excepcion remove observer == %@. Remove previously or never added observer.",anException);
162 | //do nothing, obviously it wasn't attached because an exception was thrown
163 | }
164 | }
165 | }
166 |
167 | - (void)viewDidLoad {
168 | [super viewDidLoad];
169 |
170 | self.resourceName = @"CarOverWhiteBG.m4v";
171 |
172 | if ([self.tag hasPrefix:@"AVPlayer"]) {
173 | [self loadAVPlayerLayer];
174 | } else if ([self.tag hasPrefix:@"CoreMedia"]) {
175 | [self loadCoreMedia];
176 | } else if (self.tag == nil || [self.tag isEqualToString:@""]) {
177 | // nop
178 | } else {
179 | NSAssert(0, @"unsupported tag \"%@\"", self.tag);
180 | }
181 | }
182 |
183 | - (void) viewDidDisappear:(BOOL)animated
184 | {
185 | [super viewDidDisappear:animated];
186 | [self removeObserverForTimesRanges];
187 |
188 | [self.displayH264Timer invalidate];
189 | self.displayH264Timer = nil;
190 | }
191 |
192 | - (void)didReceiveMemoryWarning {
193 | [super didReceiveMemoryWarning];
194 | // Dispose of any resources that can be recreated.
195 | }
196 |
197 | // Decompress CoreMedia sample data directly from a .mov container
198 | // without decompressing the samples.
199 |
200 | - (void) loadCoreMedia
201 | {
202 | self.title = @"Loading";
203 |
204 | // Setup AVSampleBufferDisplayLayer to display samples from memory
205 |
206 | self.sampleBufferLayer = [[AVSampleBufferDisplayLayer alloc] init];
207 |
208 | self.sampleBufferLayer.videoGravity = AVLayerVideoGravityResizeAspect;
209 |
210 | self.sampleBufferLayer.backgroundColor = [UIColor redColor].CGColor;
211 |
212 | [self.view.layer addSublayer:self.sampleBufferLayer];
213 |
214 | [self resizePlayerToViewSize];
215 |
216 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
217 | [self loadCoreMediaOnBackgroundThread];
218 | });
219 |
220 | return;
221 | }
222 |
223 | // Decompress CoreMedia sample data directly from a .mov container
224 | // without decompressing the samples.
225 |
226 | - (void) loadCoreMediaOnBackgroundThread
227 | {
228 | NSString *resourceName = self.resourceName;
229 | NSString* movieFilePath = [[NSBundle mainBundle]
230 | pathForResource:resourceName ofType:nil];
231 | NSAssert(movieFilePath, @"movieFilePath is nil");
232 |
233 | // Decode H.264 encoded data from file and then reencode the image data
234 | // as keyframes that can be access randomly.
235 |
236 | float frameDuration = 1.0f/30;
237 | int aveBitrate = 5000000;
238 |
239 | CGSize renderSize = CGSizeMake(1920, 1080);
240 |
241 | NSArray *encodedH264Buffers =
242 | [BGDecodeEncode recompressKeyframesOnBackgroundThread:movieFilePath
243 | frameDuration:frameDuration
244 | renderSize:renderSize
245 | aveBitrate:aveBitrate];
246 |
247 | if (encodedH264Buffers == nil) {
248 | // The H264 frame decode/encode process failed, this could happen
249 | // if the app was put into the background while converting.
250 |
251 | return;
252 | }
253 |
254 | self.encodedBuffers = [NSArray arrayWithArray:encodedH264Buffers];
255 |
256 | // Create timer on main thread
257 |
258 | dispatch_sync(dispatch_get_main_queue(), ^{
259 | [self setupTimer];
260 | });
261 |
262 | return;
263 | }
264 |
265 | // Decompress CoreMedia sample data directly from a .mov container
266 | // without decompressing the samples.
267 |
268 | - (void) setupTimer
269 | {
270 | // FIXME: need to decode each frame and then save as a series of images so as to check
271 | // the quality of the encoded video.
272 |
273 | if ((0)) {
274 | // Display just the first encoded frame
275 |
276 | CMSampleBufferRef sampleBufferRef = (__bridge CMSampleBufferRef) self.encodedBuffers[0];
277 |
278 | [self.sampleBufferLayer enqueueSampleBuffer:sampleBufferRef];
279 | }
280 |
281 | if ((1)) {
282 | // Dead simple NSTimer based impl
283 |
284 | NSTimer *timer = [NSTimer timerWithTimeInterval:1.0/30
285 | target:self
286 | selector:@selector(timerFired:)
287 | userInfo:NULL
288 | repeats:TRUE];
289 |
290 | self.displayH264Timer = timer;
291 |
292 | [[NSRunLoop currentRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];
293 |
294 | self.encodedBufferOffset = 0;
295 | self.encodedBuffers = [NSArray arrayWithArray:self.encodedBuffers];
296 |
297 | }
298 |
299 | if ((0)) {
300 | // Send frames to sampleBufferLayer and use embedded display times to control when to display.
301 | // Note that this method is broken since it decodes all the H264 data so it is wasteful
302 |
303 | assert(self.sampleBufferLayer);
304 |
305 | int numSampleBuffers = (int) self.encodedBuffers.count;
306 |
307 | for (int i = 0; i < numSampleBuffers; i++ ) {
308 | CMSampleBufferRef sampleBufferRef = (__bridge CMSampleBufferRef) self.encodedBuffers[i];
309 |
310 | [self.sampleBufferLayer enqueueSampleBuffer:sampleBufferRef];
311 | }
312 |
313 | CMTimebaseRef controlTimebase;
314 | CMTimebaseCreateWithMasterClock(CFAllocatorGetDefault(), CMClockGetHostTimeClock(), &controlTimebase );
315 |
316 | self.sampleBufferLayer.controlTimebase = controlTimebase;
317 | CMTimebaseSetTime(self.sampleBufferLayer.controlTimebase, kCMTimeZero);
318 | CMTimebaseSetRate(self.sampleBufferLayer.controlTimebase, 1.0);
319 |
320 | [self.sampleBufferLayer setNeedsDisplay];
321 | }
322 |
323 | // Reset the bg color
324 |
325 | self.sampleBufferLayer.backgroundColor = [UIColor blackColor].CGColor;
326 |
327 | self.title = @"Looping";
328 |
329 | return;
330 | }
331 |
332 | // Really simplified impl of a repeating timer, just send the frame data to the sampleBufferLayer
333 |
334 | - (void) timerFired:(id)timer {
335 | int offset = self.encodedBufferOffset;
336 |
337 | #if defined(DEBUG)
338 | NSLog(@"timerFired %d", offset);
339 | #endif // DEBUG
340 |
341 | assert(self.encodedBuffers);
342 |
343 | CMSampleBufferRef sampleBufferRef = (__bridge CMSampleBufferRef) self.encodedBuffers[offset];
344 |
345 | // Force display as soon as possible
346 |
347 | CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBufferRef, YES);
348 | CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
349 | CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
350 |
351 | [self.sampleBufferLayer enqueueSampleBuffer:sampleBufferRef];
352 |
353 | [self.sampleBufferLayer setNeedsDisplay];
354 |
355 | self.encodedBufferOffset = self.encodedBufferOffset + 1;
356 |
357 | if (self.encodedBufferOffset >= self.encodedBuffers.count) {
358 | // [timer invalidate];
359 |
360 | // Keep looping
361 |
362 | self.encodedBufferOffset = 0;
363 | }
364 |
365 | // Manually decode the frame data and emit the pixels as PNG
366 |
367 | if (dumpFramesImages) {
368 | NSString *dumpFilename = [NSString stringWithFormat:@"dump_decoded_%0d.png", offset];
369 | NSString *tmpPath = [NSTemporaryDirectory() stringByAppendingPathComponent:dumpFilename];
370 |
371 | H264FrameDecoder *frameDecoder = [[H264FrameDecoder alloc] init];
372 |
373 | frameDecoder.pixelType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
374 |
375 | frameDecoder.pixelBufferBlock = ^(CVPixelBufferRef pixBuffer){
376 | CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixBuffer];
377 |
378 | int width = (int) CVPixelBufferGetWidth(pixBuffer);
379 | int height = (int) CVPixelBufferGetHeight(pixBuffer);
380 |
381 | CGSize imgSize = CGSizeMake(width, height);
382 |
383 | UIGraphicsBeginImageContext(imgSize);
384 | CGRect rect;
385 | rect.origin = CGPointZero;
386 | rect.size = imgSize;
387 | UIImage *remImage = [UIImage imageWithCIImage:ciImage];
388 | [remImage drawInRect:rect];
389 | UIImage *outputImg = UIGraphicsGetImageFromCurrentImageContext();
390 | UIGraphicsEndImageContext();
391 |
392 | NSData *pngData = UIImagePNGRepresentation(outputImg);
393 | [pngData writeToFile:tmpPath atomically:TRUE];
394 |
395 | NSLog(@"wrote \"%@\"", tmpPath);
396 | };
397 |
398 | [frameDecoder decodeH264CoreMediaFrame:sampleBufferRef];
399 |
400 | [frameDecoder waitForFrame];
401 |
402 | [frameDecoder endSession];
403 | }
404 |
405 | return;
406 | }
407 |
408 | @end
409 |
--------------------------------------------------------------------------------
/H264SeamlessLooping.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 3C32E96B1DBFF75E0092E7A9 /* BGDecodeEncode.m in Sources */ = {isa = PBXBuildFile; fileRef = 3C32E96A1DBFF75E0092E7A9 /* BGDecodeEncode.m */; };
11 | 3CE33B551CB84AB80096D6AA /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B541CB84AB80096D6AA /* main.m */; };
12 | 3CE33B581CB84AB80096D6AA /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B571CB84AB80096D6AA /* AppDelegate.m */; };
13 | 3CE33B5B1CB84AB80096D6AA /* MasterViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B5A1CB84AB80096D6AA /* MasterViewController.m */; };
14 | 3CE33B5E1CB84AB80096D6AA /* DetailViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B5D1CB84AB80096D6AA /* DetailViewController.m */; };
15 | 3CE33B611CB84AB80096D6AA /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3CE33B5F1CB84AB80096D6AA /* Main.storyboard */; };
16 | 3CE33B631CB84AB80096D6AA /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 3CE33B621CB84AB80096D6AA /* Assets.xcassets */; };
17 | 3CE33B661CB84AB80096D6AA /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3CE33B641CB84AB80096D6AA /* LaunchScreen.storyboard */; };
18 | 3CE33B791CB8551E0096D6AA /* H264FrameDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B751CB8551E0096D6AA /* H264FrameDecoder.m */; };
19 | 3CE33B7A1CB8551E0096D6AA /* H264FrameEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CE33B771CB8551E0096D6AA /* H264FrameEncoder.m */; };
20 | 3CE33B7C1CB85E870096D6AA /* CarOverWhiteBG.m4v in Resources */ = {isa = PBXBuildFile; fileRef = 3CE33B7B1CB85E870096D6AA /* CarOverWhiteBG.m4v */; };
21 | 3CFF2417213E060600153340 /* CGFrameBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 3CFF2416213E060600153340 /* CGFrameBuffer.m */; };
22 | /* End PBXBuildFile section */
23 |
24 | /* Begin PBXFileReference section */
25 | 3C32E9691DBFF75E0092E7A9 /* BGDecodeEncode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BGDecodeEncode.h; sourceTree = ""; };
26 | 3C32E96A1DBFF75E0092E7A9 /* BGDecodeEncode.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = BGDecodeEncode.m; sourceTree = ""; };
27 | 3CE33B501CB84AB80096D6AA /* H264SeamlessLooping.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = H264SeamlessLooping.app; sourceTree = BUILT_PRODUCTS_DIR; };
28 | 3CE33B541CB84AB80096D6AA /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
29 | 3CE33B561CB84AB80096D6AA /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
30 | 3CE33B571CB84AB80096D6AA /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
31 | 3CE33B591CB84AB80096D6AA /* MasterViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MasterViewController.h; sourceTree = ""; };
32 | 3CE33B5A1CB84AB80096D6AA /* MasterViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MasterViewController.m; sourceTree = ""; };
33 | 3CE33B5C1CB84AB80096D6AA /* DetailViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DetailViewController.h; sourceTree = ""; };
34 | 3CE33B5D1CB84AB80096D6AA /* DetailViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DetailViewController.m; sourceTree = ""; };
35 | 3CE33B601CB84AB80096D6AA /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
36 | 3CE33B621CB84AB80096D6AA /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
37 | 3CE33B651CB84AB80096D6AA /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
38 | 3CE33B671CB84AB80096D6AA /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
39 | 3CE33B741CB8551E0096D6AA /* H264FrameDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = H264FrameDecoder.h; sourceTree = ""; };
40 | 3CE33B751CB8551E0096D6AA /* H264FrameDecoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = H264FrameDecoder.m; sourceTree = ""; };
41 | 3CE33B761CB8551E0096D6AA /* H264FrameEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = H264FrameEncoder.h; sourceTree = ""; };
42 | 3CE33B771CB8551E0096D6AA /* H264FrameEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = H264FrameEncoder.m; sourceTree = ""; };
43 | 3CE33B7B1CB85E870096D6AA /* CarOverWhiteBG.m4v */ = {isa = PBXFileReference; lastKnownFileType = file; path = CarOverWhiteBG.m4v; sourceTree = ""; };
44 | 3CFF2415213E060600153340 /* CGFrameBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CGFrameBuffer.h; sourceTree = ""; };
45 | 3CFF2416213E060600153340 /* CGFrameBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CGFrameBuffer.m; sourceTree = ""; };
46 | /* End PBXFileReference section */
47 |
48 | /* Begin PBXFrameworksBuildPhase section */
49 | 3CE33B4D1CB84AB80096D6AA /* Frameworks */ = {
50 | isa = PBXFrameworksBuildPhase;
51 | buildActionMask = 2147483647;
52 | files = (
53 | );
54 | runOnlyForDeploymentPostprocessing = 0;
55 | };
56 | /* End PBXFrameworksBuildPhase section */
57 |
58 | /* Begin PBXGroup section */
59 | 3CE33B471CB84AB70096D6AA = {
60 | isa = PBXGroup;
61 | children = (
62 | 3CE33B521CB84AB80096D6AA /* H264SeamlessLooping */,
63 | 3CE33B511CB84AB80096D6AA /* Products */,
64 | );
65 | sourceTree = "";
66 | };
67 | 3CE33B511CB84AB80096D6AA /* Products */ = {
68 | isa = PBXGroup;
69 | children = (
70 | 3CE33B501CB84AB80096D6AA /* H264SeamlessLooping.app */,
71 | );
72 | name = Products;
73 | sourceTree = "";
74 | };
75 | 3CE33B521CB84AB80096D6AA /* H264SeamlessLooping */ = {
76 | isa = PBXGroup;
77 | children = (
78 | 3C32E9691DBFF75E0092E7A9 /* BGDecodeEncode.h */,
79 | 3C32E96A1DBFF75E0092E7A9 /* BGDecodeEncode.m */,
80 | 3CE33B761CB8551E0096D6AA /* H264FrameEncoder.h */,
81 | 3CE33B771CB8551E0096D6AA /* H264FrameEncoder.m */,
82 | 3CE33B741CB8551E0096D6AA /* H264FrameDecoder.h */,
83 | 3CE33B751CB8551E0096D6AA /* H264FrameDecoder.m */,
84 | 3CFF2415213E060600153340 /* CGFrameBuffer.h */,
85 | 3CFF2416213E060600153340 /* CGFrameBuffer.m */,
86 | 3CE33B561CB84AB80096D6AA /* AppDelegate.h */,
87 | 3CE33B571CB84AB80096D6AA /* AppDelegate.m */,
88 | 3CE33B591CB84AB80096D6AA /* MasterViewController.h */,
89 | 3CE33B5A1CB84AB80096D6AA /* MasterViewController.m */,
90 | 3CE33B5C1CB84AB80096D6AA /* DetailViewController.h */,
91 | 3CE33B5D1CB84AB80096D6AA /* DetailViewController.m */,
92 | 3CE33B7B1CB85E870096D6AA /* CarOverWhiteBG.m4v */,
93 | 3CE33B5F1CB84AB80096D6AA /* Main.storyboard */,
94 | 3CE33B621CB84AB80096D6AA /* Assets.xcassets */,
95 | 3CE33B641CB84AB80096D6AA /* LaunchScreen.storyboard */,
96 | 3CE33B671CB84AB80096D6AA /* Info.plist */,
97 | 3CE33B531CB84AB80096D6AA /* Supporting Files */,
98 | );
99 | path = H264SeamlessLooping;
100 | sourceTree = "";
101 | };
102 | 3CE33B531CB84AB80096D6AA /* Supporting Files */ = {
103 | isa = PBXGroup;
104 | children = (
105 | 3CE33B541CB84AB80096D6AA /* main.m */,
106 | );
107 | name = "Supporting Files";
108 | sourceTree = "";
109 | };
110 | /* End PBXGroup section */
111 |
112 | /* Begin PBXNativeTarget section */
113 | 3CE33B4F1CB84AB80096D6AA /* H264SeamlessLooping */ = {
114 | isa = PBXNativeTarget;
115 | buildConfigurationList = 3CE33B6A1CB84AB80096D6AA /* Build configuration list for PBXNativeTarget "H264SeamlessLooping" */;
116 | buildPhases = (
117 | 3CE33B4C1CB84AB80096D6AA /* Sources */,
118 | 3CE33B4D1CB84AB80096D6AA /* Frameworks */,
119 | 3CE33B4E1CB84AB80096D6AA /* Resources */,
120 | );
121 | buildRules = (
122 | );
123 | dependencies = (
124 | );
125 | name = H264SeamlessLooping;
126 | productName = H264SeamlessLooping;
127 | productReference = 3CE33B501CB84AB80096D6AA /* H264SeamlessLooping.app */;
128 | productType = "com.apple.product-type.application";
129 | };
130 | /* End PBXNativeTarget section */
131 |
132 | /* Begin PBXProject section */
133 | 3CE33B481CB84AB80096D6AA /* Project object */ = {
134 | isa = PBXProject;
135 | attributes = {
136 | LastUpgradeCheck = 0730;
137 | ORGANIZATIONNAME = HelpURock;
138 | TargetAttributes = {
139 | 3CE33B4F1CB84AB80096D6AA = {
140 | CreatedOnToolsVersion = 7.3;
141 | DevelopmentTeam = 9F74CLHA49;
142 | };
143 | };
144 | };
145 | buildConfigurationList = 3CE33B4B1CB84AB80096D6AA /* Build configuration list for PBXProject "H264SeamlessLooping" */;
146 | compatibilityVersion = "Xcode 3.2";
147 | developmentRegion = English;
148 | hasScannedForEncodings = 0;
149 | knownRegions = (
150 | en,
151 | Base,
152 | );
153 | mainGroup = 3CE33B471CB84AB70096D6AA;
154 | productRefGroup = 3CE33B511CB84AB80096D6AA /* Products */;
155 | projectDirPath = "";
156 | projectRoot = "";
157 | targets = (
158 | 3CE33B4F1CB84AB80096D6AA /* H264SeamlessLooping */,
159 | );
160 | };
161 | /* End PBXProject section */
162 |
163 | /* Begin PBXResourcesBuildPhase section */
164 | 3CE33B4E1CB84AB80096D6AA /* Resources */ = {
165 | isa = PBXResourcesBuildPhase;
166 | buildActionMask = 2147483647;
167 | files = (
168 | 3CE33B661CB84AB80096D6AA /* LaunchScreen.storyboard in Resources */,
169 | 3CE33B631CB84AB80096D6AA /* Assets.xcassets in Resources */,
170 | 3CE33B611CB84AB80096D6AA /* Main.storyboard in Resources */,
171 | 3CE33B7C1CB85E870096D6AA /* CarOverWhiteBG.m4v in Resources */,
172 | );
173 | runOnlyForDeploymentPostprocessing = 0;
174 | };
175 | /* End PBXResourcesBuildPhase section */
176 |
177 | /* Begin PBXSourcesBuildPhase section */
178 | 3CE33B4C1CB84AB80096D6AA /* Sources */ = {
179 | isa = PBXSourcesBuildPhase;
180 | buildActionMask = 2147483647;
181 | files = (
182 | 3CE33B791CB8551E0096D6AA /* H264FrameDecoder.m in Sources */,
183 | 3CFF2417213E060600153340 /* CGFrameBuffer.m in Sources */,
184 | 3CE33B581CB84AB80096D6AA /* AppDelegate.m in Sources */,
185 | 3CE33B5B1CB84AB80096D6AA /* MasterViewController.m in Sources */,
186 | 3CE33B7A1CB8551E0096D6AA /* H264FrameEncoder.m in Sources */,
187 | 3CE33B551CB84AB80096D6AA /* main.m in Sources */,
188 | 3CE33B5E1CB84AB80096D6AA /* DetailViewController.m in Sources */,
189 | 3C32E96B1DBFF75E0092E7A9 /* BGDecodeEncode.m in Sources */,
190 | );
191 | runOnlyForDeploymentPostprocessing = 0;
192 | };
193 | /* End PBXSourcesBuildPhase section */
194 |
195 | /* Begin PBXVariantGroup section */
196 | 3CE33B5F1CB84AB80096D6AA /* Main.storyboard */ = {
197 | isa = PBXVariantGroup;
198 | children = (
199 | 3CE33B601CB84AB80096D6AA /* Base */,
200 | );
201 | name = Main.storyboard;
202 | sourceTree = "";
203 | };
204 | 3CE33B641CB84AB80096D6AA /* LaunchScreen.storyboard */ = {
205 | isa = PBXVariantGroup;
206 | children = (
207 | 3CE33B651CB84AB80096D6AA /* Base */,
208 | );
209 | name = LaunchScreen.storyboard;
210 | sourceTree = "";
211 | };
212 | /* End PBXVariantGroup section */
213 |
214 | /* Begin XCBuildConfiguration section */
215 | 3CE33B681CB84AB80096D6AA /* Debug */ = {
216 | isa = XCBuildConfiguration;
217 | buildSettings = {
218 | ALWAYS_SEARCH_USER_PATHS = NO;
219 | CLANG_ANALYZER_NONNULL = YES;
220 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
221 | CLANG_CXX_LIBRARY = "libc++";
222 | CLANG_ENABLE_MODULES = YES;
223 | CLANG_ENABLE_OBJC_ARC = YES;
224 | CLANG_WARN_BOOL_CONVERSION = YES;
225 | CLANG_WARN_CONSTANT_CONVERSION = YES;
226 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
227 | CLANG_WARN_EMPTY_BODY = YES;
228 | CLANG_WARN_ENUM_CONVERSION = YES;
229 | CLANG_WARN_INT_CONVERSION = YES;
230 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
231 | CLANG_WARN_UNREACHABLE_CODE = YES;
232 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
233 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
234 | COPY_PHASE_STRIP = NO;
235 | DEBUG_INFORMATION_FORMAT = dwarf;
236 | ENABLE_STRICT_OBJC_MSGSEND = YES;
237 | ENABLE_TESTABILITY = YES;
238 | GCC_C_LANGUAGE_STANDARD = gnu99;
239 | GCC_DYNAMIC_NO_PIC = NO;
240 | GCC_NO_COMMON_BLOCKS = YES;
241 | GCC_OPTIMIZATION_LEVEL = 0;
242 | GCC_PREPROCESSOR_DEFINITIONS = (
243 | "DEBUG=1",
244 | "$(inherited)",
245 | );
246 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
247 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
248 | GCC_WARN_UNDECLARED_SELECTOR = YES;
249 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
250 | GCC_WARN_UNUSED_FUNCTION = YES;
251 | GCC_WARN_UNUSED_VARIABLE = YES;
252 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
253 | MTL_ENABLE_DEBUG_INFO = YES;
254 | ONLY_ACTIVE_ARCH = YES;
255 | SDKROOT = iphoneos;
256 | TARGETED_DEVICE_FAMILY = "1,2";
257 | };
258 | name = Debug;
259 | };
260 | 3CE33B691CB84AB80096D6AA /* Release */ = {
261 | isa = XCBuildConfiguration;
262 | buildSettings = {
263 | ALWAYS_SEARCH_USER_PATHS = NO;
264 | CLANG_ANALYZER_NONNULL = YES;
265 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
266 | CLANG_CXX_LIBRARY = "libc++";
267 | CLANG_ENABLE_MODULES = YES;
268 | CLANG_ENABLE_OBJC_ARC = YES;
269 | CLANG_WARN_BOOL_CONVERSION = YES;
270 | CLANG_WARN_CONSTANT_CONVERSION = YES;
271 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
272 | CLANG_WARN_EMPTY_BODY = YES;
273 | CLANG_WARN_ENUM_CONVERSION = YES;
274 | CLANG_WARN_INT_CONVERSION = YES;
275 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
276 | CLANG_WARN_UNREACHABLE_CODE = YES;
277 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
278 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
279 | COPY_PHASE_STRIP = NO;
280 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
281 | ENABLE_NS_ASSERTIONS = NO;
282 | ENABLE_STRICT_OBJC_MSGSEND = YES;
283 | GCC_C_LANGUAGE_STANDARD = gnu99;
284 | GCC_NO_COMMON_BLOCKS = YES;
285 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
286 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
287 | GCC_WARN_UNDECLARED_SELECTOR = YES;
288 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
289 | GCC_WARN_UNUSED_FUNCTION = YES;
290 | GCC_WARN_UNUSED_VARIABLE = YES;
291 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
292 | MTL_ENABLE_DEBUG_INFO = NO;
293 | SDKROOT = iphoneos;
294 | TARGETED_DEVICE_FAMILY = "1,2";
295 | VALIDATE_PRODUCT = YES;
296 | };
297 | name = Release;
298 | };
299 | 3CE33B6B1CB84AB80096D6AA /* Debug */ = {
300 | isa = XCBuildConfiguration;
301 | buildSettings = {
302 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
303 | DEVELOPMENT_TEAM = 9F74CLHA49;
304 | INFOPLIST_FILE = H264SeamlessLooping/Info.plist;
305 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
306 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
307 | PRODUCT_BUNDLE_IDENTIFIER = com.helpurock.H264SeamlessLooping;
308 | PRODUCT_NAME = "$(TARGET_NAME)";
309 | };
310 | name = Debug;
311 | };
312 | 3CE33B6C1CB84AB80096D6AA /* Release */ = {
313 | isa = XCBuildConfiguration;
314 | buildSettings = {
315 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
316 | DEVELOPMENT_TEAM = 9F74CLHA49;
317 | INFOPLIST_FILE = H264SeamlessLooping/Info.plist;
318 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
319 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
320 | PRODUCT_BUNDLE_IDENTIFIER = com.helpurock.H264SeamlessLooping;
321 | PRODUCT_NAME = "$(TARGET_NAME)";
322 | };
323 | name = Release;
324 | };
325 | /* End XCBuildConfiguration section */
326 |
327 | /* Begin XCConfigurationList section */
328 | 3CE33B4B1CB84AB80096D6AA /* Build configuration list for PBXProject "H264SeamlessLooping" */ = {
329 | isa = XCConfigurationList;
330 | buildConfigurations = (
331 | 3CE33B681CB84AB80096D6AA /* Debug */,
332 | 3CE33B691CB84AB80096D6AA /* Release */,
333 | );
334 | defaultConfigurationIsVisible = 0;
335 | defaultConfigurationName = Release;
336 | };
337 | 3CE33B6A1CB84AB80096D6AA /* Build configuration list for PBXNativeTarget "H264SeamlessLooping" */ = {
338 | isa = XCConfigurationList;
339 | buildConfigurations = (
340 | 3CE33B6B1CB84AB80096D6AA /* Debug */,
341 | 3CE33B6C1CB84AB80096D6AA /* Release */,
342 | );
343 | defaultConfigurationIsVisible = 0;
344 | defaultConfigurationName = Release;
345 | };
346 | /* End XCConfigurationList section */
347 | };
348 | rootObject = 3CE33B481CB84AB80096D6AA /* Project object */;
349 | }
350 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/BGDecodeEncode.m:
--------------------------------------------------------------------------------
1 | //
2 | // BGDecodeEncode.m
3 | //
4 | // Created by Mo DeJong on 4/5/16.
5 | //
6 | // See license.txt for BSD license terms.
7 | //
8 |
9 | #import "BGDecodeEncode.h"
10 |
11 | @import AVFoundation;
12 | @import UIKit;
13 |
14 | @import CoreVideo;
15 | @import CoreImage;
16 | @import CoreMedia;
17 | @import CoreGraphics;
18 | @import VideoToolbox;
19 |
20 | #import "H264FrameEncoder.h"
21 |
22 | #import "CGFrameBuffer.h"
23 |
24 | //#if defined(DEBUG)
25 | //static const int dumpFramesImages = 1;
26 | //#else
27 | static const int dumpFramesImages = 0;
28 | //#endif // DEBUG
29 |
30 | #define LOGGING 1
31 | //#define LOGGING_EVERY_FRAME 1
32 |
33 | // Private API
34 |
35 | @interface BGDecodeEncode ()
36 | @end
37 |
38 | @implementation BGDecodeEncode
39 |
40 | // Return the movie decode OS type, typically kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
41 | // but could be kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange. In any case, this value
42 | // needs to match in both the encoder and decoder.
43 |
44 | + (OSType) getPixelType
45 | {
46 | // Explicitly use video range color matrix
47 | const OSType movieEncodePixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
48 | //const OSType movieEncodePixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
49 | return movieEncodePixelFormatType;
50 | }
51 |
52 | + (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)cgImage
53 | renderSize:(CGSize)renderSize
54 | dump:(BOOL)dump
55 | asYUV:(BOOL)asYUV
56 | {
57 | NSDictionary *options = @{
58 | (NSString *)kCVPixelBufferCGImageCompatibilityKey: @(YES),
59 | (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @(YES)
60 | };
61 |
62 | int renderWidth = (int) renderSize.width;
63 | int renderHeight = (int) renderSize.height;
64 |
65 | int imageWidth = (int) CGImageGetWidth(cgImage);
66 | int imageHeight = (int) CGImageGetHeight(cgImage);
67 |
68 | assert(imageWidth <= renderWidth);
69 | assert(imageHeight <= renderHeight);
70 |
71 | // FIXME: instead of creating CoreVideo buffers over and over, just create 1 and
72 | // then keep using it to do the render operations. Could also use a pool, but
73 | // not really needed either.
74 |
75 | CVPixelBufferRef buffer = NULL;
76 | CVPixelBufferCreate(kCFAllocatorDefault,
77 | renderWidth,
78 | renderHeight,
79 | kCVPixelFormatType_32BGRA,
80 | (__bridge CFDictionaryRef)options,
81 | &buffer);
82 |
83 | size_t bytesPerRow, extraBytes;
84 | bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
85 | extraBytes = bytesPerRow - renderWidth*sizeof(uint32_t);
86 | //NSLog(@"bytesPerRow %d extraBytes %d", (int)bytesPerRow, (int)extraBytes);
87 |
88 | CVPixelBufferLockBaseAddress(buffer, 0);
89 |
90 | void *baseAddress = CVPixelBufferGetBaseAddress(buffer);
91 |
92 | //CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
93 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
94 |
95 | CGContextRef context;
96 |
97 | context = CGBitmapContextCreate(baseAddress,
98 | renderWidth,
99 | renderHeight,
100 | 8,
101 | CVPixelBufferGetBytesPerRow(buffer),
102 | colorSpace,
103 | kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst);
104 |
105 | // Render frame into top left corner at exact size
106 |
107 | CGContextClearRect(context, CGRectMake(0.0f, 0.0f, renderWidth, renderHeight));
108 |
109 | CGContextDrawImage(context, CGRectMake(0.0f, renderHeight - imageHeight, imageWidth, imageHeight), cgImage);
110 |
111 | CGColorSpaceRelease(colorSpace);
112 | CGContextRelease(context);
113 |
114 | CVPixelBufferUnlockBaseAddress(buffer, 0);
115 |
116 | // Convert from BGRA to YUV representation
117 |
118 | if (asYUV) {
119 | CVPixelBufferRef yuv420Buffer = NULL;
120 | CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
121 | renderWidth,
122 | renderHeight,
123 | [self getPixelType],
124 | (__bridge CFDictionaryRef) @{
125 | (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey: @{},
126 | (__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : @(YES),
127 | },
128 | &yuv420Buffer);
129 |
130 | CIContext *context = [CIContext contextWithOptions:nil];
131 | NSAssert(context, @"CIContext");
132 |
133 | CIImage *inImage = [CIImage imageWithCVPixelBuffer:buffer];
134 |
135 | if (status == kCVReturnSuccess) {
136 | [context render:inImage toCVPixelBuffer:yuv420Buffer];
137 | }
138 |
139 | CVPixelBufferRelease(buffer);
140 |
141 | return yuv420Buffer;
142 | }
143 |
144 | return buffer;
145 | }
146 |
147 | // This method accepts a pixel buffer to be encoded, along with
148 | // an encoder object and an output array that the encoded
149 | // frame will be appened to.
150 |
151 | + (BOOL) encodeAndAppendToArray:(CVPixelBufferRef)pixBuffer
152 | frameOffset:(int)frameOffset
153 | renderSize:(CGSize)renderSize
154 | frameEncoder:(H264FrameEncoder *)frameEncoder
155 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers
156 | resNoSuffix:(NSString*)resNoSuffix
157 | {
158 | int width = (int) CVPixelBufferGetWidth(pixBuffer);
159 | int height = (int) CVPixelBufferGetHeight(pixBuffer);
160 |
161 | CGSize imgSize = CGSizeMake(width, height);
162 |
163 | // 1920 x 1080 is Full HD and the upper limit of H264 render size for iPad devices.
164 | // When the size of the input and the output exactly match, use input buffer (much faster)
165 |
166 | // 2048 x 1536 seems to work just fine on iPad Retina
167 |
168 | //CGSize renderSize = CGSizeMake(1920, 1080);
169 | //CGSize renderSize = CGSizeMake(2048, 1536);
170 |
171 | //int renderWidth = (int) renderSize.width;
172 | //int renderHeight = (int) renderSize.height;
173 |
174 | // Render CoreVideo to a NxN square so that square pixels do not distort
175 |
176 | #if defined(LOGGING_EVERY_FRAME)
177 | NSLog(@"encode input dimensions %4d x %4d", width, height);
178 | #endif // LOGGING_EVERY_FRAME
179 |
180 | CVPixelBufferRef largerBuffer;
181 |
182 | if (CGSizeEqualToSize(imgSize, renderSize)) {
183 | // No resize needed
184 | largerBuffer = pixBuffer;
185 |
186 | CVPixelBufferRetain(largerBuffer);
187 | } else {
188 | int srcWidth = (int) CVPixelBufferGetWidth(pixBuffer);
189 | int srcHeight = (int) CVPixelBufferGetHeight(pixBuffer);
190 | int pixBufferNumBytes = (int) CVPixelBufferGetBytesPerRow(pixBuffer) * srcHeight;
191 |
192 | {
193 | int status = CVPixelBufferLockBaseAddress(pixBuffer, 0);
194 | assert(status == kCVReturnSuccess);
195 | }
196 | void *pixelsPtr = CVPixelBufferGetBaseAddress(pixBuffer);
197 | assert(pixelsPtr);
198 |
199 | size_t bitsPerComponent = 8;
200 | size_t numComponents = 4;
201 | size_t bitsPerPixel = bitsPerComponent * numComponents;
202 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixBuffer);
203 |
204 | CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst;
205 |
206 | CGDataProviderReleaseDataCallback releaseData = NULL;
207 |
208 | CGDataProviderRef dataProviderRef = CGDataProviderCreateWithData(
209 | NULL,
210 | pixelsPtr,
211 | pixBufferNumBytes,
212 | releaseData);
213 |
214 | BOOL shouldInterpolate = TRUE;
215 |
216 | CGColorRenderingIntent renderIntent = kCGRenderingIntentDefault;
217 |
218 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); // iOS sRGB
219 |
220 | CGImageRef inImageRef = CGImageCreate(srcWidth, srcHeight, bitsPerComponent, bitsPerPixel, bytesPerRow,
221 | colorSpace, bitmapInfo, dataProviderRef, NULL,
222 | shouldInterpolate, renderIntent);
223 |
224 | CGDataProviderRelease(dataProviderRef);
225 |
226 | CGColorSpaceRelease(colorSpace);
227 |
228 | assert(inImageRef);
229 |
230 | // Dump original before resize action
231 |
232 | if (dumpFramesImages)
233 | {
234 | NSString *dumpFilename = [NSString stringWithFormat:@"%@_orig_F%d.png", resNoSuffix, frameOffset];
235 | NSString *tmpPath = [NSTemporaryDirectory() stringByAppendingPathComponent:dumpFilename];
236 |
237 | UIImage *rerenderedInputImg = [UIImage imageWithCGImage:inImageRef];
238 | NSData *pngData = UIImagePNGRepresentation(rerenderedInputImg);
239 | [pngData writeToFile:tmpPath atomically:TRUE];
240 |
241 | NSLog(@"wrote \"%@\" at size %d x %d", tmpPath, (int)rerenderedInputImg.size.width, (int)rerenderedInputImg.size.height);
242 | }
243 |
244 | // Output image as CoreGraphics buffer
245 |
246 | CGFrameBuffer *cgFramebuffer = [CGFrameBuffer cGFrameBufferWithBppDimensions:24 width:renderSize.width height:renderSize.height];
247 |
248 | // Render the src image into a large framebuffer
249 |
250 | BOOL worked = [cgFramebuffer renderCGImage:inImageRef];
251 | assert(worked);
252 |
253 | CGImageRelease(inImageRef);
254 |
255 | {
256 | int status = CVPixelBufferUnlockBaseAddress(pixBuffer, 0);
257 | assert(status == kCVReturnSuccess);
258 | }
259 |
260 | CGImageRef resizedCgImgRef = [cgFramebuffer createCGImageRef];
261 |
262 | if (dumpFramesImages)
263 | {
264 | NSString *dumpFilename = [NSString stringWithFormat:@"%@_resized_F%d.png", resNoSuffix, frameOffset];
265 | NSString *tmpPath = [NSTemporaryDirectory() stringByAppendingPathComponent:dumpFilename];
266 |
267 | UIImage *rerenderedInputImg = [UIImage imageWithCGImage:resizedCgImgRef];
268 | NSData *pngData = UIImagePNGRepresentation(rerenderedInputImg);
269 | [pngData writeToFile:tmpPath atomically:TRUE];
270 |
271 | NSLog(@"wrote \"%@\" at size %d x %d", tmpPath, (int)rerenderedInputImg.size.width, (int)rerenderedInputImg.size.height);
272 | }
273 |
274 | largerBuffer = [self.class pixelBufferFromCGImage:resizedCgImgRef
275 | renderSize:renderSize
276 | dump:FALSE
277 | asYUV:FALSE];
278 |
279 | CGImageRelease(resizedCgImgRef);
280 | }
281 |
282 | if (dumpFramesImages)
283 | {
284 | CIImage *largerCiImage = [CIImage imageWithCVPixelBuffer:largerBuffer];
285 |
286 | UIGraphicsBeginImageContext(renderSize);
287 | CGRect rect;
288 | rect.origin = CGPointZero;
289 | rect.size = renderSize;
290 | UIImage *remLargerImage = [UIImage imageWithCIImage:largerCiImage];
291 | [remLargerImage drawInRect:rect];
292 | UIImage *largerRenderedImg = UIGraphicsGetImageFromCurrentImageContext();
293 | UIGraphicsEndImageContext();
294 |
295 | NSString *dumpFilename = [NSString stringWithFormat:@"%@_F%d.png", resNoSuffix, frameOffset];
296 | NSString *tmpPath = [NSTemporaryDirectory() stringByAppendingPathComponent:dumpFilename];
297 |
298 | NSData *pngData = UIImagePNGRepresentation(largerRenderedImg);
299 | [pngData writeToFile:tmpPath atomically:TRUE];
300 |
301 | NSLog(@"wrote \"%@\" at size %d x %d", tmpPath, (int)largerRenderedImg.size.width, (int)largerRenderedImg.size.height);
302 | }
303 |
304 | // Render CoreVideo to a NxN square so that square pixels do not distort
305 |
306 | #if defined(LOGGING_EVERY_FRAME)
307 | int largerWidth = (int) CVPixelBufferGetWidth(largerBuffer);
308 | int largerHeight = (int) CVPixelBufferGetHeight(largerBuffer);
309 | NSLog(@"encode output dimensions %4d x %4d", largerWidth, largerHeight);
310 | #endif // LOGGING_EVERY_FRAME
311 |
312 | //NSLog(@"CVPixelBufferRef: %@", pixBuffer);
313 |
314 | __block BOOL encodeFrameErrorCondition = FALSE;
315 |
316 | frameEncoder.sampleBufferBlock = ^(CMSampleBufferRef sampleBuffer) {
317 | // If sampleBuffer is NULL, then the frame could not be encoded.
318 |
319 | if (sampleBuffer == NULL) {
320 | //NSAssert(sampleBuffer, @"sampleBuffer argument to H264FrameEncoder.sampleBufferBlock is NULL");
321 | encodeFrameErrorCondition = TRUE;
322 | return;
323 | }
324 |
325 | [encodedH264Buffers addObject:(__bridge id)sampleBuffer];
326 |
327 | #if defined(LOGGING_EVERY_FRAME)
328 | int numBytes = (int) CMSampleBufferGetSampleSize(sampleBuffer, 0);
329 | NSLog(@"encoded buffer as %6d H264 bytes", numBytes);
330 | #endif // LOGGING_EVERY_FRAME
331 | };
332 |
333 | #if TARGET_IPHONE_SIMULATOR
334 | // No-op
335 | #else
336 | OSType bufferPixelType = CVPixelBufferGetPixelFormatType(largerBuffer);
337 | if (bufferPixelType == kCVPixelFormatType_32BGRA) {
338 | // Already converted from YUV to BGRA
339 | } else {
340 | assert([self getPixelType] == bufferPixelType);
341 | }
342 | #endif // TARGET_IPHONE_SIMULATOR
343 |
344 | BOOL worked = [frameEncoder encodeH264CoreMediaFrame:largerBuffer];
345 |
346 | if (worked) {
347 | [frameEncoder waitForFrame];
348 | }
349 |
350 | CVPixelBufferRelease(largerBuffer);
351 |
352 | // Null out block ref just to make sure
353 | frameEncoder.sampleBufferBlock = nil;
354 |
355 | if (encodeFrameErrorCondition == TRUE) {
356 | return FALSE;
357 | }
358 |
359 | if (worked == FALSE) {
360 | return FALSE;
361 | } else {
362 | return TRUE;
363 | }
364 | };
365 |
366 | // Given a .mov generate an array of the frames as CoreVideo buffers.
367 | // This method returns the frames as BGRA pixels or YUV frames.
368 |
369 | + (BOOL) decodeCoreVideoFramesFromMOV:(NSString*)movPath
370 | asYUV:(BOOL)asYUV
371 | renderSize:(CGSize)renderSize
372 | frameEncoder:(H264FrameEncoder*)frameEncoder
373 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers
374 | {
375 | if ([[NSFileManager defaultManager] fileExistsAtPath:movPath] == FALSE) {
376 | return FALSE;
377 | }
378 |
379 | NSString *resNoSuffix = [[movPath lastPathComponent] stringByDeletingPathExtension];
380 |
381 | // Read H.264 frames and convert from YUV to BGRA on the read
382 |
383 | NSURL *assetURL = [NSURL fileURLWithPath:movPath];
384 | assert(assetURL);
385 |
386 | NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
387 | forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
388 |
389 | AVURLAsset *avUrlAsset = [[AVURLAsset alloc] initWithURL:assetURL options:options];
390 |
391 | if (avUrlAsset.hasProtectedContent) {
392 | NSAssert(FALSE, @"DRM");
393 | }
394 |
395 | if ([avUrlAsset tracks] == 0) {
396 | NSAssert(FALSE, @"not tracks");
397 | }
398 |
399 | NSError *assetError = nil;
400 | AVAssetReader *aVAssetReader = [AVAssetReader assetReaderWithAsset:avUrlAsset error:&assetError];
401 |
402 | NSAssert(aVAssetReader, @"aVAssetReader");
403 |
404 | if (assetError) {
405 | NSAssert(FALSE, @"AVAssetReader");
406 | }
407 |
408 | NSDictionary *videoSettings;
409 |
410 | if (asYUV) {
411 | videoSettings = [NSDictionary dictionaryWithObject:
412 | [NSNumber numberWithUnsignedInt:[self getPixelType]] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
413 | } else {
414 | videoSettings = [NSDictionary dictionaryWithObject:
415 | [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
416 |
417 | }
418 |
419 | NSArray *videoTracks = [avUrlAsset tracksWithMediaType:AVMediaTypeVideo];
420 |
421 | NSAssert([videoTracks count] == 1, @"only 1 video track can be decoded");
422 |
423 | AVAssetTrack *videoTrack = [videoTracks objectAtIndex:0];
424 |
425 | #if defined(LOGGING_EVERY_FRAME)
426 | NSArray *availableMetadataFormats = videoTrack.availableMetadataFormats;
427 | NSLog(@"availableMetadataFormats %@", availableMetadataFormats);
428 | #endif // LOGGING_EVERY_FRAME
429 |
430 | NSAssert(videoTrack.isSelfContained, @"isSelfContained");
431 |
432 | #if defined(LOGGING_EVERY_FRAME)
433 | CGSize uncroppedSize = videoTrack.naturalSize;
434 | NSLog(@"video track naturalSize w x h : %d x %d", (int)uncroppedSize.width, (int)uncroppedSize.height);
435 | #endif // LOGGING_EVERY_FRAME
436 |
437 | // Track length in second, should map directly to number of frames
438 |
439 | #if defined(LOGGING_EVERY_FRAME)
440 | CMTimeRange timeRange = videoTrack.timeRange;
441 | float duration = (float)CMTimeGetSeconds(timeRange.duration);
442 | NSLog(@"video track time duration %0.3f", duration);
443 | #endif // LOGGING_EVERY_FRAME
444 |
445 | // Don't know how many frames at this point
446 |
447 | //int numFrames = round(duration);
448 | //NSLog(@"estimated number of frames %d", numFrames);
449 |
450 | AVAssetReaderTrackOutput *aVAssetReaderOutput = [[AVAssetReaderTrackOutput alloc]
451 | initWithTrack:videoTrack outputSettings:videoSettings];
452 |
453 | NSAssert(aVAssetReaderOutput, @"AVAssetReaderVideoCompositionOutput failed");
454 |
455 | aVAssetReaderOutput.alwaysCopiesSampleData = FALSE;
456 |
457 | [aVAssetReader addOutput:aVAssetReaderOutput];
458 |
459 | // start reading
460 |
461 | NSAssert(aVAssetReader, @"aVAssetReader");
462 |
463 | BOOL worked = [aVAssetReader startReading];
464 |
465 | if (worked == FALSE) {
466 | AVAssetReaderStatus status = aVAssetReader.status;
467 | NSError *error = aVAssetReader.error;
468 |
469 | NSLog(@"status = %d", (int)status);
470 | NSLog(@"error = %@", [error description]);
471 |
472 | return FALSE;
473 | }
474 |
475 | // Read N frames as CoreVideo buffers and invoke callback
476 |
477 | BOOL allFramesEncodedSuccessfully = TRUE;
478 |
479 | // Read N frames, convert to BGRA pixels
480 |
481 | for ( int i = 0; 1; i++ ) @autoreleasepool {
482 |
483 | CMSampleBufferRef sampleBuffer = NULL;
484 | sampleBuffer = [aVAssetReaderOutput copyNextSampleBuffer];
485 |
486 | if (sampleBuffer == NULL) {
487 | // Another frame could not be loaded, this is the normal
488 | // termination condition at the end of the file.
489 | break;
490 | }
491 |
492 | // Process BGRA data in buffer, crop and then read and combine
493 |
494 | CVImageBufferRef imageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer);
495 | if (imageBufferRef == NULL) {
496 | NSLog(@"CMSampleBufferGetImageBuffer() returned NULL at frame %d", i);
497 | allFramesEncodedSuccessfully = FALSE;
498 | break;
499 | }
500 |
501 | CVPixelBufferRef pixBuffer = imageBufferRef;
502 |
503 | BOOL worked = [self encodeAndAppendToArray:pixBuffer
504 | frameOffset:i
505 | renderSize:renderSize
506 | frameEncoder:frameEncoder
507 | encodedH264Buffers:encodedH264Buffers
508 | resNoSuffix:resNoSuffix];
509 |
510 | CFRelease(sampleBuffer);
511 |
512 | if (!worked) {
513 | allFramesEncodedSuccessfully = FALSE;
514 | break;
515 | }
516 | }
517 |
518 | [aVAssetReader cancelReading];
519 |
520 | if (allFramesEncodedSuccessfully == FALSE) {
521 | return FALSE;
522 | } else {
523 | return TRUE;
524 | }
525 | }
526 |
527 | // Previous API compat
528 |
529 | + (NSArray*) recompressKeyframesOnBackgroundThread:(NSString*)resourceName
530 | frameDuration:(float)frameDuration
531 | renderSize:(CGSize)renderSize
532 | aveBitrate:(int)aveBitrate
533 | {
534 | NSMutableArray *encodedH264Buffers = [NSMutableArray array];
535 |
536 | @autoreleasepool {
537 | [self recompressKeyframesOnBackgroundThreadImpl:resourceName
538 | frameDuration:frameDuration
539 | renderSize:renderSize
540 | aveBitrate:aveBitrate
541 | encodedH264Buffers:encodedH264Buffers];
542 | }
543 |
544 | NSArray *retArr;
545 |
546 | if (encodedH264Buffers.count == 0) {
547 | retArr = nil;
548 | } else {
549 | retArr = [NSArray arrayWithArray:encodedH264Buffers];
550 | }
551 |
552 | encodedH264Buffers = nil;
553 |
554 | return retArr;
555 | }
556 |
557 | // Decompress and then recompress each frame of H264 video as keyframes that
558 | // can be rendered directly without holding a stream decode resource open.
559 | // If an error is encountered during the encode/decode process then nil
560 | // is returned (this can happen when app is put into the background)
561 |
562 | + (BOOL) recompressKeyframes:(NSString*)resourceName
563 | frameDuration:(float)frameDuration
564 | renderSize:(CGSize)renderSize
565 | aveBitrate:(int)aveBitrate
566 | frames:(NSMutableArray*)frames
567 | {
568 | //dispatch_sync(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
569 |
570 | @autoreleasepool {
571 | [self recompressKeyframesOnBackgroundThreadImpl:resourceName
572 | frameDuration:frameDuration
573 | renderSize:renderSize
574 | aveBitrate:aveBitrate
575 | encodedH264Buffers:frames];
576 | }
577 |
578 | //});
579 |
580 | //[NSThread sleepForTimeInterval:0.1];
581 |
582 | BOOL worked;
583 |
584 | if (frames.count == 0) {
585 | worked = FALSE;
586 | } else {
587 | worked = TRUE;
588 | }
589 |
590 | return worked;
591 | }
592 |
593 | // This implementation is meant to be called from inside an autorelease block
594 | // so that tmp objects created in the scope of this method execution will
595 | // be cleaned up even if recompressKeyframesOnBackgroundThread is invoked
596 | // over and over in a loop or without leaving a calling scope.
597 | //
598 | // Decompress and then recompress each frame of H264 video as keyframes that
599 | // can be rendered directly without holding a stream decode resource open.
600 | // If an error is encountered during the encode/decode process then nil
601 | // is returned (this can happen when app is put into the background)
602 |
603 | + (void) recompressKeyframesOnBackgroundThreadImpl:(NSString*)resourceName
604 | frameDuration:(float)frameDuration
605 | renderSize:(CGSize)renderSize
606 | aveBitrate:(int)aveBitrate
607 | encodedH264Buffers:(NSMutableArray*)encodedH264Buffers
608 | {
609 | #if defined(LOGGING)
610 | NSLog(@"recompressKeyframesOnBackgroundThread");
611 | #endif // LOGGING
612 |
613 | // This operation cannot be executed on the main thread!
614 | NSAssert([NSThread isMainThread] == FALSE, @"isMainThread");
615 |
616 | [encodedH264Buffers removeAllObjects];
617 |
618 | NSString *resTail = [resourceName lastPathComponent];
619 |
620 | NSString* movieFilePath = [[NSBundle mainBundle]
621 | pathForResource:resTail ofType:nil];
622 | NSAssert(movieFilePath, @"movieFilePath is nil");
623 |
624 | // Previously, asYUV was set to TRUE on device in an attempt to get the best
625 | // performance by avoiding YUV->RGB->YUV conversion, but it seems to produce
626 | // some slightly off colors in the reencoded video. Convert the initial movie
627 | // data to RGB and then encode from RGB so that the defaults match whatever iOS
628 | // is doing with 601 and 709 automatic detection.
629 |
630 | BOOL asYUV = FALSE;
631 |
632 | // BOOL asYUV = TRUE;
633 | //#if TARGET_IPHONE_SIMULATOR
634 | // asYUV = FALSE; // Force BGRA buffer when running in simulator
635 | //#endif // TARGET_IPHONE_SIMULATOR
636 |
637 | // Setup frame encoder that will encode each frame
638 |
639 | H264FrameEncoder *frameEncoder = [[H264FrameEncoder alloc] init];
640 |
641 | // Hard coded to 24 FPS
642 | //frameEncoder.frameDuration = 1.0f/24;
643 | frameEncoder.frameDuration = frameDuration;
644 |
645 | // Larger than original but not too big
646 |
647 | // frameEncoder.aveBitrate = 5000000;
648 | frameEncoder.aveBitrate = aveBitrate;
649 |
650 | // Encode each frame, one at a time, so that totaly memory used is minimized
651 |
652 | BOOL worked = [self decodeCoreVideoFramesFromMOV:movieFilePath
653 | asYUV:asYUV
654 | renderSize:renderSize
655 | frameEncoder:frameEncoder
656 | encodedH264Buffers:encodedH264Buffers];
657 |
658 | if (worked == FALSE) {
659 | NSLog(@"decodeCoreVideoFramesFromMOV failed for %@", movieFilePath);
660 |
661 | [encodedH264Buffers removeAllObjects];
662 | } else {
663 | #if defined(LOGGING)
664 | int totalEncodeNumBytes = 0;
665 | for ( id obj in encodedH264Buffers ) {
666 | CMSampleBufferRef sampleBuffer = (__bridge CMSampleBufferRef) obj;
667 | totalEncodeNumBytes += (int) CMSampleBufferGetSampleSize(sampleBuffer, 0);
668 | }
669 | int totalkb = totalEncodeNumBytes / 1000;
670 | int totalmb = totalkb / 1000;
671 | NSLog(@"encoded \"%@\" as %d frames", resTail, (int)encodedH264Buffers.count);
672 | NSLog(@"total encoded num bytes %d, %d kB, %d mB", totalEncodeNumBytes, totalkb, totalmb);
673 | #endif // LOGGING
674 | }
675 |
676 | [frameEncoder endSession];
677 | }
678 |
679 | @end
680 |
--------------------------------------------------------------------------------
/H264SeamlessLooping/CGFrameBuffer.m:
--------------------------------------------------------------------------------
1 | //
2 | // CGFrameBuffer.m
3 | //
4 | // Created by Moses DeJong on 2/13/09.
5 | //
6 | // License terms defined in License.txt.
7 |
8 | #import "CGFrameBuffer.h"
9 |
10 | #import
11 |
12 | #import
13 |
14 | #import
15 |
16 | #ifndef __OPTIMIZE__
17 | // Automatically define EXTRA_CHECKS when not optimizing (in debug mode)
18 | # define EXTRA_CHECKS
19 | #endif // DEBUG
20 |
21 | // Alignment is not an issue, makes no difference in performance
22 | //#define USE_ALIGNED_VALLOC 1
23 |
24 | // Using page copy makes a huge diff, 24 bpp goes from 15->20 FPS to 30 FPS!
25 | #define USE_MACH_VM_ALLOCATE 1
26 |
27 | #if defined(USE_ALIGNED_VALLOC) || defined(USE_MACH_VM_ALLOCATE)
28 | #import // getpagesize()
29 | #endif
30 |
31 | #if defined(USE_MACH_VM_ALLOCATE)
32 | #import
33 | #endif
34 |
35 | //#define DEBUG_LOGGING
36 |
37 | void CGFrameBufferProviderReleaseData (void *info, const void *data, size_t size);
38 |
39 | // Private API
40 |
41 | @interface CGFrameBuffer ()
42 |
43 | // This property indicates the actual size of the allocated buffer pointed to
44 | // by the pixels property. It is possible that the actual allocated size
45 | // is larger than the value returned by the numBytes property, but this
46 | // is an implementation detail of this class and would not need to be known
47 | // outside this module.
48 |
49 | @property (readonly) size_t numBytesAllocated;
50 |
51 | // This tricky ref to self is needed in ARC mode, since an object cannot invoke retain
52 | // to retain itself. Instead, hold a property that is set to self so that ARC will
53 | // do the retain.
54 |
55 | #if __has_feature(objc_arc)
56 | @property (nonatomic, retain) NSObject *arcRefToSelf;
57 | #endif // objc_arc
58 |
59 | @end
60 |
61 | // class CGFrameBuffer
62 |
63 | @implementation CGFrameBuffer
64 |
65 | @synthesize pixels = m_pixels;
66 | @synthesize zeroCopyPixels = m_zeroCopyPixels;
67 | @synthesize zeroCopyMappedData = m_zeroCopyMappedData;
68 | @synthesize numBytes = m_numBytes;
69 | @synthesize numBytesAllocated = m_numBytesAllocated;
70 | @synthesize width = m_width;
71 | @synthesize height = m_height;
72 | @synthesize bitsPerPixel = m_bitsPerPixel;
73 | @synthesize bytesPerPixel = m_bytesPerPixel;
74 | //@synthesize isLockedByDataProvider = m_isLockedByDataProvider;
75 | @synthesize lockedByImageRef = m_lockedByImageRef;
76 | @synthesize colorspace = m_colorspace;
77 |
78 | #if __has_feature(objc_arc)
79 | @synthesize arcRefToSelf = m_arcRefToSelf;
80 | #endif // objc_arc
81 |
82 | + (CGFrameBuffer*) cGFrameBufferWithBppDimensions:(NSInteger)bitsPerPixel
83 | width:(NSInteger)width
84 | height:(NSInteger)height
85 | {
86 | CGFrameBuffer *obj = [[CGFrameBuffer alloc] initWithBppDimensions:bitsPerPixel width:width height:height];
87 | #if __has_feature(objc_arc)
88 | return obj;
89 | #else
90 | return [obj autorelease];
91 | #endif // objc_arc
92 | }
93 |
94 | - (id) initWithBppDimensions:(NSInteger)bitsPerPixel
95 | width:(NSInteger)width
96 | height:(NSInteger)height;
97 | {
98 | // Ensure that memory is allocated in terms of whole words, the
99 | // bitmap context won't make use of the extra half-word.
100 |
101 | size_t numPixels = width * height;
102 | size_t numPixelsToAllocate = numPixels;
103 |
104 | if ((numPixels % 2) != 0) {
105 | numPixelsToAllocate++;
106 | }
107 |
108 | // 16bpp -> 2 bytes per pixel, 24bpp and 32bpp -> 4 bytes per pixel
109 |
110 | size_t bytesPerPixel;
111 | if (bitsPerPixel == 16) {
112 | bytesPerPixel = 2;
113 | } else if (bitsPerPixel == 24 || bitsPerPixel == 32) {
114 | bytesPerPixel = 4;
115 | } else {
116 | bytesPerPixel = 0;
117 | NSAssert(FALSE, @"bitsPerPixel is invalid");
118 | }
119 |
120 | size_t inNumBytes = numPixelsToAllocate * bytesPerPixel;
121 |
122 | // FIXME: if every frame is a key frame, then don't use the kernel memory interface
123 | // since it would not help at all in terms of performance. Would be faster to
124 | // just use different buffers.
125 |
126 | // FIXME: implement runtime switch for mode, so that code can be compiled once to
127 | // test out both modes!
128 |
129 | char* buffer;
130 | size_t allocNumBytes;
131 |
132 | #if defined(USE_MACH_VM_ALLOCATE)
133 | size_t pagesize = (size_t)getpagesize();
134 | size_t numpages = (inNumBytes / pagesize);
135 | if (inNumBytes % pagesize) {
136 | numpages++;
137 | }
138 |
139 | vm_size_t m_size = (vm_size_t)(numpages * pagesize);
140 | allocNumBytes = (size_t)m_size;
141 |
142 | kern_return_t ret = vm_allocate((vm_map_t) mach_task_self(), (vm_address_t*) &buffer, m_size, VM_FLAGS_ANYWHERE);
143 |
144 | if (ret != KERN_SUCCESS) {
145 | buffer = NULL;
146 | }
147 |
148 | // Note that the returned memory is not zeroed, the first frame is a keyframe, so it will completely
149 | // fill the framebuffer. Additional frames will be created from a copy of the initial frame.
150 | #else
151 | // Regular malloc(), or page aligned malloc()
152 | # if defined(USE_ALIGNED_VALLOC)
153 | size_t pagesize = getpagesize();
154 | size_t numpages = (inNumBytes / pagesize);
155 | if (inNumBytes % pagesize) {
156 | numpages++;
157 | }
158 | allocNumBytes = numpages * pagesize;
159 | buffer = (char*) valloc(allocNumBytes);
160 | if (buffer) {
161 | bzero(buffer, allocNumBytes);
162 | }
163 | # else
164 | allocNumBytes = inNumBytes;
165 | buffer = (char*) malloc(allocNumBytes);
166 | if (buffer) {
167 | bzero(buffer, allocNumBytes);
168 | }
169 | # endif // USE_ALIGNED_MALLOC
170 | #endif
171 |
172 | if (buffer == NULL) {
173 | return nil;
174 | }
175 |
176 | // Verify page alignemnt of the image buffer. The self.pixels pointer must be page
177 | // aligned to properly support zero copy blit and whole page copy optimizations.
178 |
179 | if (1) {
180 | uint32_t i32val = (uint32_t)buffer;
181 | uint32_t pagesize = getpagesize();
182 | uint32_t mod = i32val % pagesize;
183 |
184 | if (mod != 0) {
185 | NSAssert(0, @"framebuffer is not page aligned : pagesize %d : ptr %p : ptr32 0x%08X : ptr32 mod pagesize %d",
186 | pagesize,
187 | buffer,
188 | i32val,
189 | mod);
190 | // Just in case NSAssert() was disabled in opt mode
191 | assert(0);
192 | }
193 | }
194 |
195 | if ((self = [super init])) {
196 | self->m_bitsPerPixel = bitsPerPixel;
197 | self->m_bytesPerPixel = bytesPerPixel;
198 | self->m_pixels = buffer;
199 | self->m_numBytes = inNumBytes;
200 | self->m_numBytesAllocated = allocNumBytes;
201 | self->m_width = width;
202 | self->m_height = height;
203 | } else {
204 | free(buffer);
205 | }
206 |
207 | return self;
208 | }
209 |
210 | // Getter for the self.pixels property. Normally, this
211 | // just returns what self.pixels was set to, but in
212 | // the case of the "zero copy mode", this method
213 | // returns the pointer to the read only mapped zero
214 | // copy memory.
215 |
216 | - (char*) pixels
217 | {
218 | char *ptr;
219 |
220 | ptr = self.zeroCopyPixels;
221 |
222 | if (ptr != NULL) {
223 | // The framebuffer is in zero copy mode
224 | return ptr;
225 | }
226 |
227 | return self->m_pixels;
228 | }
229 |
230 | - (BOOL) renderView:(UIView*)view
231 | {
232 | [self doneZeroCopyPixels];
233 |
234 | // Capture the pixel content of the View that contains the
235 | // UIImageView. A view that displays at the full width and
236 | // height of the screen will be captured in a 320x480
237 | // bitmap context. Note that any transformations applied
238 | // to the UIImageView will be captured *after* the
239 | // transformation has been applied. Once the bitmap
240 | // context has been captured, it should be rendered with
241 | // no transformations. Also note that the colorspace
242 | // is always ARGB with no alpha, the bitmap capture happens
243 | // *after* any colors in the image have been converted to RGB pixels.
244 |
245 | size_t w = view.layer.bounds.size.width;
246 | size_t h = view.layer.bounds.size.height;
247 |
248 | // if ((self.width != w) || (self.height != h)) {
249 | // return FALSE;
250 | // }
251 |
252 | // BOOL isRotated;
253 |
254 | if ((self.width == w) && (self.height == h)) {
255 | // isRotated = FALSE;
256 | } else if ((self.width == h) || (self.height != w)) {
257 | // view must have created a rotation transformation
258 | // isRotated = TRUE;
259 | } else {
260 | return FALSE;
261 | }
262 |
263 | size_t bitsPerComponent = 0;
264 | size_t numComponents = 0;
265 | size_t bitsPerPixel = 0;
266 | size_t bytesPerRow = 0;
267 |
268 | if (self.bitsPerPixel == 16) {
269 | bitsPerComponent = 5;
270 | // numComponents = 3;
271 | bitsPerPixel = 16;
272 | bytesPerRow = self.width * (bitsPerPixel / 8);
273 | } else if (self.bitsPerPixel == 24 || self.bitsPerPixel == 32) {
274 | bitsPerComponent = 8;
275 | numComponents = 4;
276 | bitsPerPixel = bitsPerComponent * numComponents;
277 | bytesPerRow = self.width * (bitsPerPixel / 8);
278 | } else {
279 | NSAssert(FALSE, @"unmatched bitsPerPixel");
280 | }
281 |
282 | CGBitmapInfo bitmapInfo = [self getBitmapInfo];
283 |
284 | CGColorSpaceRef colorSpace = self.colorspace;
285 | if (colorSpace) {
286 | CGColorSpaceRetain(colorSpace);
287 | } else {
288 | colorSpace = CGColorSpaceCreateDeviceRGB();
289 | }
290 |
291 | NSAssert(self.pixels != NULL, @"pixels must not be NULL");
292 |
293 | NSAssert(self.isLockedByDataProvider == FALSE, @"renderView: pixel buffer locked by data provider");
294 |
295 | CGContextRef bitmapContext =
296 | CGBitmapContextCreate(self.pixels, self.width, self.height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);
297 |
298 | CGColorSpaceRelease(colorSpace);
299 |
300 | if (bitmapContext == NULL) {
301 | return FALSE;
302 | }
303 |
304 | // Translation matrix that maps CG space to view space
305 |
306 | CGContextTranslateCTM(bitmapContext, 0.0, self.height);
307 | CGContextScaleCTM(bitmapContext, 1.0, -1.0);
308 |
309 | [view.layer renderInContext:bitmapContext];
310 |
311 | CGContextRelease(bitmapContext);
312 |
313 | return TRUE;
314 | }
315 |
316 | - (BOOL) renderCGImage:(CGImageRef)cgImageRef
317 | {
318 | [self doneZeroCopyPixels];
319 |
320 | // Render the contents of an image to pixels.
321 |
322 | size_t w = CGImageGetWidth(cgImageRef);
323 | size_t h = CGImageGetHeight(cgImageRef);
324 |
325 | BOOL isRotated = FALSE;
326 |
327 | if ((w != h) && (h == self.width) && (w == self.height)) {
328 | // Assume image is rotated to portrait, so rotate and then render
329 | isRotated = TRUE;
330 | } else {
331 | // If sizes do not match, then resize input image to fit into this framebuffer
332 | }
333 |
334 | size_t bitsPerComponent = 0;
335 | size_t numComponents = 0;
336 | size_t bitsPerPixel = 0;
337 | size_t bytesPerRow = 0;
338 |
339 | if (self.bitsPerPixel == 16) {
340 | bitsPerComponent = 5;
341 | // numComponents = 3;
342 | bitsPerPixel = 16;
343 | bytesPerRow = self.width * (bitsPerPixel / 8);
344 | } else if (self.bitsPerPixel == 24 || self.bitsPerPixel == 32) {
345 | bitsPerComponent = 8;
346 | numComponents = 4;
347 | bitsPerPixel = bitsPerComponent * numComponents;
348 | bytesPerRow = self.width * (bitsPerPixel / 8);
349 | } else {
350 | NSAssert(FALSE, @"unmatched bitsPerPixel");
351 | }
352 |
353 | CGBitmapInfo bitmapInfo = [self getBitmapInfo];
354 |
355 | CGColorSpaceRef colorSpace = self.colorspace;
356 | if (colorSpace) {
357 | CGColorSpaceRetain(colorSpace);
358 | } else {
359 | colorSpace = CGColorSpaceCreateDeviceRGB();
360 | }
361 |
362 | NSAssert(self.pixels != NULL, @"pixels must not be NULL");
363 | NSAssert(self.isLockedByDataProvider == FALSE, @"renderCGImage: pixel buffer locked by data provider");
364 |
365 | CGContextRef bitmapContext =
366 | CGBitmapContextCreate(self.pixels, self.width, self.height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);
367 |
368 | CGColorSpaceRelease(colorSpace);
369 |
370 | if (bitmapContext == NULL) {
371 | return FALSE;
372 | }
373 |
374 | // Translation matrix that maps CG space to view space
375 |
376 | if (isRotated) {
377 | // To landscape : 90 degrees CCW
378 |
379 | CGContextRotateCTM(bitmapContext, M_PI / 2);
380 | }
381 |
382 | CGRect bounds = CGRectMake( 0.0f, 0.0f, self.width, self.height );
383 |
384 | CGContextDrawImage(bitmapContext, bounds, cgImageRef);
385 |
386 | CGContextRelease(bitmapContext);
387 |
388 | return TRUE;
389 | }
390 |
391 | - (CGContextRef) createBitmapContext
392 | {
393 | [self doneZeroCopyPixels];
394 |
395 | size_t bitsPerComponent = 0;
396 | size_t numComponents = 0;
397 | size_t bitsPerPixel = 0;
398 | size_t bytesPerRow = 0;
399 |
400 | if (self.bitsPerPixel == 16) {
401 | bitsPerComponent = 5;
402 | // numComponents = 3;
403 | bitsPerPixel = 16;
404 | bytesPerRow = self.width * (bitsPerPixel / 8);
405 | } else if (self.bitsPerPixel == 24 || self.bitsPerPixel == 32) {
406 | bitsPerComponent = 8;
407 | numComponents = 4;
408 | bitsPerPixel = bitsPerComponent * numComponents;
409 | bytesPerRow = self.width * (bitsPerPixel / 8);
410 | } else {
411 | NSAssert(FALSE, @"unmatched bitsPerPixel");
412 | }
413 |
414 | CGBitmapInfo bitmapInfo = [self getBitmapInfo];
415 |
416 | CGColorSpaceRef colorSpace = self.colorspace;
417 | if (colorSpace) {
418 | CGColorSpaceRetain(colorSpace);
419 | } else {
420 | colorSpace = CGColorSpaceCreateDeviceRGB();
421 | }
422 |
423 | NSAssert(self.pixels != NULL, @"pixels must not be NULL");
424 | NSAssert(self.isLockedByDataProvider == FALSE, @"createBitmapContext: pixel buffer locked by data provider");
425 |
426 | CGContextRef bitmapContext =
427 | CGBitmapContextCreate(self.pixels, self.width, self.height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);
428 |
429 | CGColorSpaceRelease(colorSpace);
430 |
431 | if (bitmapContext == NULL) {
432 | return NULL;
433 | }
434 |
435 | return bitmapContext;
436 | }
437 |
438 | - (CGImageRef) createCGImageRef
439 | {
440 | // Load pixel data as a core graphics image object.
441 |
442 | NSAssert(self.width > 0 && self.height > 0, @"width or height is zero");
443 |
444 | size_t bitsPerComponent = 0;
445 | size_t numComponents = 0;
446 | size_t bitsPerPixel = 0;
447 | size_t bytesPerRow = 0;
448 |
449 | if (self.bitsPerPixel == 16) {
450 | bitsPerComponent = 5;
451 | // numComponents = 3;
452 | bitsPerPixel = 16;
453 | bytesPerRow = self.width * (bitsPerPixel / 8);
454 | } else if (self.bitsPerPixel == 24 || self.bitsPerPixel == 32) {
455 | bitsPerComponent = 8;
456 | numComponents = 4;
457 | bitsPerPixel = bitsPerComponent * numComponents;
458 | bytesPerRow = self.width * (bitsPerPixel / 8);
459 | } else {
460 | NSAssert(FALSE, @"unmatched bitsPerPixel");
461 | }
462 |
463 | CGBitmapInfo bitmapInfo = [self getBitmapInfo];
464 |
465 | CGDataProviderReleaseDataCallback releaseData = CGFrameBufferProviderReleaseData;
466 |
467 | void *pixelsPtr = self.pixels; // Will return zero copy pointer in zero copy mode. Otherwise self.pixels
468 |
469 | CGDataProviderRef dataProviderRef = CGDataProviderCreateWithData(
470 | #if __has_feature(objc_arc)
471 | (__bridge void *)self,
472 | #else
473 | self,
474 | #endif // objc_arc
475 | pixelsPtr,
476 | self.width * self.height * (bitsPerPixel / 8),
477 | releaseData);
478 |
479 | BOOL shouldInterpolate = FALSE; // images at exact size already
480 |
481 | CGColorRenderingIntent renderIntent = kCGRenderingIntentDefault;
482 |
483 | CGColorSpaceRef colorSpace = self.colorspace;
484 | if (colorSpace) {
485 | CGColorSpaceRetain(colorSpace);
486 | } else {
487 | colorSpace = CGColorSpaceCreateDeviceRGB();
488 | }
489 |
490 | CGImageRef inImageRef = CGImageCreate(self.width, self.height, bitsPerComponent, bitsPerPixel, bytesPerRow,
491 | colorSpace, bitmapInfo, dataProviderRef, NULL,
492 | shouldInterpolate, renderIntent);
493 |
494 | CGDataProviderRelease(dataProviderRef);
495 |
496 | CGColorSpaceRelease(colorSpace);
497 |
498 | if (inImageRef != NULL) {
499 | self.isLockedByDataProvider = TRUE;
500 | self->m_lockedByImageRef = inImageRef; // Don't retain, just save pointer
501 | }
502 |
503 | return inImageRef;
504 | }
505 |
506 | - (BOOL) isLockedByImageRef:(CGImageRef)cgImageRef
507 | {
508 | if (! self->m_isLockedByDataProvider)
509 | return FALSE;
510 |
511 | return (self->m_lockedByImageRef == cgImageRef);
512 | }
513 |
514 | - (CGBitmapInfo) getBitmapInfo
515 | {
516 | CGBitmapInfo bitmapInfo = 0;
517 | if (self.bitsPerPixel == 16) {
518 | bitmapInfo = kCGBitmapByteOrder16Host | kCGImageAlphaNoneSkipFirst;
519 | } else if (self.bitsPerPixel == 24) {
520 | bitmapInfo |= kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst;
521 | } else if (self.bitsPerPixel == 32) {
522 | bitmapInfo |= kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
523 | } else {
524 | assert(0);
525 | }
526 | return bitmapInfo;
527 | }
528 |
529 | // These properties are implemented explicitly to aid
530 | // in debugging of read/write operations. These method
531 | // are used to set values that could be set in one thread
532 | // and read or set in another. The code must take care to
533 | // use these fields correctly to remain thread safe.
534 |
535 | - (BOOL) isLockedByDataProvider
536 | {
537 | return self->m_isLockedByDataProvider;
538 | }
539 |
540 | #if __has_feature(objc_arc)
541 |
542 | // The arc impl uses a property of type NSObject to hold a ref to itself
543 | // during the time that the buffer is locked by CoreGraphics.
544 |
545 | - (void) setIsLockedByDataProvider:(BOOL)newValue
546 | {
547 | NSAssert(m_isLockedByDataProvider == !newValue,
548 | @"isLockedByDataProvider property can only be switched");
549 |
550 | self->m_isLockedByDataProvider = newValue;
551 |
552 | if (self->m_isLockedByDataProvider) {
553 | self.arcRefToSelf = self;
554 | } else {
555 | self.arcRefToSelf = nil;
556 | }
557 | }
558 |
559 | #else
560 |
561 | // non-arc impl the explicitly invokes retain/release and does some tricky logging
562 |
563 | - (void) setIsLockedByDataProvider:(BOOL)newValue
564 | {
565 | NSAssert(m_isLockedByDataProvider == !newValue,
566 | @"isLockedByDataProvider property can only be switched");
567 |
568 | self->m_isLockedByDataProvider = newValue;
569 |
570 | if (m_isLockedByDataProvider) {
571 | [self retain]; // retain extra ref to self
572 | } else {
573 | #ifdef DEBUG_LOGGING
574 | if (TRUE)
575 | #else
576 | if (FALSE)
577 | #endif
578 | {
579 | // Catch the case where the very last ref to
580 | // an object is dropped fby CoreGraphics
581 |
582 | int refCount = [self retainCount];
583 |
584 | if (refCount == 1) {
585 | // About to drop last ref to this frame buffer
586 |
587 | NSLog(@"dropping last ref to CGFrameBuffer held by DataProvider");
588 | }
589 |
590 | [self release];
591 | } else {
592 | // Regular logic for non-debug situations
593 |
594 | [self release]; // release extra ref to self
595 | }
596 | }
597 | }
598 |
599 | #endif // objc_arc
600 |
601 | // Set all pixels to 0x0
602 |
603 | - (void) clear
604 | {
605 | [self doneZeroCopyPixels];
606 | bzero(self.pixels, self.numBytes);
607 | }
608 |
609 | - (void) osCopyImpl:(void*)srcPtr
610 | {
611 | #if defined(USE_MACH_VM_ALLOCATE)
612 | kern_return_t ret;
613 | vm_address_t src = (vm_address_t) srcPtr;
614 | vm_address_t dst = (vm_address_t) self->m_pixels;
615 |
616 | #if defined(EXTRA_CHECKS)
617 | // Do extra checking to ensure that the zero copy region is
618 | // properly page aligned and that the number of bytes to
619 | // copy is an exact multiple of the page size.
620 |
621 | size_t s = getpagesize();
622 |
623 | if ((self.numBytesAllocated % s) != 0) {
624 | assert(0);
625 | }
626 |
627 | if ((dst % s) != 0) {
628 | assert(0);
629 | }
630 | if ((src % s) != 0) {
631 | assert(0);
632 | }
633 | #endif // EXTRA_CHECKS
634 |
635 | ret = vm_copy((vm_map_t) mach_task_self(), src, (vm_size_t) self.numBytesAllocated, dst);
636 | if (ret != KERN_SUCCESS) {
637 | assert(0);
638 | }
639 | #else
640 | // FIXME: add assert here to check num bytes
641 | // FIXME: this code will not compile if USE_MACH_VM_ALLOCATE is not defined
642 | memcpy(self->m_pixels, anotherFrameBufferPixelsPtr, anotherFrameBuffer.numBytes);
643 | #endif
644 | }
645 |
646 | - (void) copyPixels:(CGFrameBuffer *)anotherFrameBuffer
647 | {
648 | assert(self.numBytes == anotherFrameBuffer.numBytes);
649 |
650 | [self doneZeroCopyPixels];
651 | void *anotherFrameBufferPixelsPtr = anotherFrameBuffer.zeroCopyPixels;
652 |
653 | if (anotherFrameBufferPixelsPtr) {
654 | // other framebuffer has a zero copy pixel buffer, this happes when a keyframe
655 | // is followed by a delta frame. Use the original zero copy pointer as the
656 | // source for a OS level page copy operation, but don't modify the state of
657 | // the other frame buffer in any way since it could be used by the graphics
658 | // subsystem currently.
659 | } else {
660 | // copy bytes from other framebuffer
661 | anotherFrameBufferPixelsPtr = anotherFrameBuffer.pixels;
662 | }
663 |
664 | [self osCopyImpl:anotherFrameBufferPixelsPtr];
665 | }
666 |
667 | // Explicitly memcopy pixels instead of an OS level page copy,
668 | // this is useful only when we want to deallocate the mapped
669 | // memory and an os copy would keep that memory mapped.
670 |
671 | - (void) memcopyPixels:(CGFrameBuffer *)anotherFrameBuffer
672 | {
673 | [self doneZeroCopyPixels];
674 | assert(self.zeroCopyMappedData == nil);
675 | assert(self.zeroCopyPixels == NULL);
676 | assert(self.numBytes == anotherFrameBuffer.numBytes);
677 |
678 | void *anotherFrameBufferPixelsPtr = anotherFrameBuffer.zeroCopyPixels;
679 |
680 | if (anotherFrameBufferPixelsPtr) {
681 | // other framebuffer has a zero copy pixel buffer, this happes when a keyframe
682 | // is followed by a delta frame. Use the original zero copy pointer as the
683 | // source for a OS level page copy operation, but don't modify the state of
684 | // the other frame buffer in any way since it could be used by the graphics
685 | // subsystem currently.
686 | } else {
687 | // copy bytes from other framebuffer
688 | anotherFrameBufferPixelsPtr = anotherFrameBuffer.pixels;
689 | }
690 |
691 | memcpy(self.pixels, anotherFrameBufferPixelsPtr, anotherFrameBuffer.numBytes);
692 | }
693 |
694 | // Copy the contents of the zero copy buffer to the allocated framebuffer and
695 | // release the zero copy bytes.
696 |
697 | - (void) zeroCopyToPixels
698 | {
699 | if (self.zeroCopyPixels == NULL) {
700 | // No zero copy pixels in use, so this is a no-op
701 | return;
702 | }
703 |
704 | [self osCopyImpl:self.zeroCopyPixels];
705 |
706 | // Release zero copy buffer
707 |
708 | [self doneZeroCopyPixels];
709 | }
710 |
711 | - (void) copyFromCVPixelBuffer:(CVPixelBufferRef)cVPixelBufferRef
712 | {
713 | int width = (int) self.width;
714 | int height = (int) self.height;
715 |
716 | int cvWidth = (int) CVPixelBufferGetWidth(cVPixelBufferRef);
717 | int cvHeight = (int) CVPixelBufferGetHeight(cVPixelBufferRef);
718 |
719 | int numPlanes = (int) CVPixelBufferGetPlaneCount(cVPixelBufferRef);
720 | assert(numPlanes <= 1); // exclude YUV buffers
721 |
722 | // Note that the width and height of the dst of the copy operation
723 | // can be smaller that the source. The image data is cropped to
724 | // the size of the dst in that case.
725 |
726 | assert(width <= cvWidth);
727 | assert(height <= cvHeight);
728 |
729 | char *pixels = (char*) self.pixels;
730 |
731 | CVPixelBufferLockBaseAddress(cVPixelBufferRef, 0);
732 |
733 | char *baseAddress = (char*) CVPixelBufferGetBaseAddress(cVPixelBufferRef);
734 | assert(baseAddress);
735 |
736 | int rowWidthInBytes = width * sizeof(uint32_t);
737 | int cvRowWidthInBytes = (int)CVPixelBufferGetBytesPerRow(cVPixelBufferRef);
738 |
739 | for (int row = 0; row < height; row++) {
740 | memcpy(&pixels[row * rowWidthInBytes], &baseAddress[row * cvRowWidthInBytes], rowWidthInBytes);
741 | }
742 |
743 | CVPixelBufferUnlockBaseAddress(cVPixelBufferRef, 0);
744 |
745 | return;
746 | }
747 |
748 | - (void)dealloc {
749 | NSAssert(self.isLockedByDataProvider == FALSE, @"dealloc: buffer still locked by data provider");
750 | [self doneZeroCopyPixels];
751 |
752 | self.colorspace = NULL;
753 |
754 | #if defined(USE_MACH_VM_ALLOCATE)
755 | if (self.pixels != NULL) {
756 | kern_return_t ret;
757 | ret = vm_deallocate((vm_map_t) mach_task_self(), (vm_address_t) self.pixels, (vm_size_t) self.numBytesAllocated);
758 | if (ret != KERN_SUCCESS) {
759 | assert(0);
760 | }
761 | }
762 | #else
763 | if (self.pixels != NULL) {
764 | free(self.pixels);
765 | }
766 | #endif
767 |
768 | #ifdef DEBUG_LOGGING
769 | NSLog(@"deallocate CGFrameBuffer");
770 | #endif
771 |
772 | self.zeroCopyMappedData = nil;
773 |
774 | #if __has_feature(objc_arc)
775 | // It should not actually be possible for this method to be invoked if arcRefToSelf is non-nil
776 | self.arcRefToSelf = nil;
777 | #else
778 | [super dealloc];
779 | #endif // objc_arc
780 | }
781 |
782 | // Save a "zero copy" pointer and a ref to the mapped data. Invoking this function
783 | // means that the self.pixels getter will return the value of the self.zeroCopyPixels
784 | // and the data in the frame buffer will be ignored until doneZeroCopyPixels is invoked.
785 |
786 | - (void) zeroCopyPixels:(void*)zeroCopyPtr
787 | mappedData:(NSData*)mappedData
788 | {
789 | #if defined(EXTRA_CHECKS)
790 | // Do extra checking to ensure that the zero copy region is
791 | // properly page aligned and that the number of bytes to
792 | // copy is an exact multiple of the page size.
793 |
794 | size_t ptr = (size_t) zeroCopyPtr;
795 | size_t s = getpagesize();
796 |
797 | if ((ptr % s) != 0) {
798 | assert(0);
799 | }
800 | #endif // EXTRA_CHECKS
801 |
802 | self->m_zeroCopyPixels = zeroCopyPtr;
803 | self.zeroCopyMappedData = mappedData;
804 | }
805 |
806 | // Exit zero copy mode.
807 |
808 | - (void) doneZeroCopyPixels
809 | {
810 | NSAssert(self.isLockedByDataProvider == FALSE, @"isLockedByDataProvider");
811 | self->m_zeroCopyPixels = NULL;
812 | self.zeroCopyMappedData = nil;
813 | }
814 |
815 | - (NSString*) description
816 | {
817 | return [NSString stringWithFormat:@"CGFrameBuffer %p, pixels %p, %d x %d, %d BPP, isLocked %d", self, self.pixels,
818 | (int)self.width, (int)self.height, (int)self.bitsPerPixel, (int)self.isLockedByDataProvider];
819 | }
820 |
821 | // Setter for self.colorspace property. While this property is declared as assign,
822 | // it will actually retain a ref to the colorspace.
823 |
824 | - (void) setColorspace:(CGColorSpaceRef)colorspace
825 | {
826 | if (colorspace) {
827 | CGColorSpaceRetain(colorspace);
828 | }
829 |
830 | if (self->m_colorspace) {
831 | CGColorSpaceRelease(self->m_colorspace);
832 | }
833 |
834 | self->m_colorspace = colorspace;
835 | }
836 |
837 | - (void) clearAlphaChannel
838 | {
839 | assert(self.isLockedByDataProvider == FALSE);
840 | //assert(self.bitsPerPixel == 24);
841 |
842 | uint32_t *pixelsPtr = (uint32_t*) self.pixels;
843 | uint32_t numPixels = (uint32_t)(self.width * self.height);
844 |
845 | for (int i = 0; i < numPixels; i++) {
846 | uint32_t value = pixelsPtr[i];
847 | assert((value >> 24) == 0xFF || (value >> 24) == 0x0);
848 | // Throw out alpha values
849 | value = value & 0xFFFFFF;
850 | pixelsPtr[i] = value;
851 | }
852 | }
853 |
854 | // This method resets the alpha channel for each pixel to be fully opaque.
855 |
856 | - (void) resetAlphaChannel
857 | {
858 | assert(self.isLockedByDataProvider == FALSE);
859 | //assert(self.bitsPerPixel == 24);
860 |
861 | uint32_t *pixelsPtr = (uint32_t*) self.pixels;
862 | uint32_t numPixels = (uint32_t)(self.width * self.height);
863 |
864 | for (int i = 0; i < numPixels; i++) {
865 | uint32_t value = pixelsPtr[i];
866 | value = (0xFF << 24) | value;
867 | pixelsPtr[i] = value;
868 | }
869 | }
870 |
871 | // Convert pixels to a PNG image format that can be easily saved to disk.
872 |
873 | - (NSData*) formatAsPNG
874 | {
875 | NSMutableData *mData = [NSMutableData data];
876 |
877 | @autoreleasepool {
878 |
879 | // Render buffer as a PNG image
880 |
881 | CFStringRef type = kUTTypePNG;
882 | size_t count = 1;
883 | CGImageDestinationRef dataDest;
884 | dataDest = CGImageDestinationCreateWithData(
885 | #if __has_feature(objc_arc)
886 | (__bridge CFMutableDataRef)mData,
887 | #else
888 | (CFMutableDataRef)mData,
889 | #endif // objc_arc
890 | type,
891 | count,
892 | NULL);
893 | assert(dataDest);
894 |
895 | CGImageRef imgRef = [self createCGImageRef];
896 |
897 | CGImageDestinationAddImage(dataDest, imgRef, NULL);
898 | CGImageDestinationFinalize(dataDest);
899 |
900 | CGImageRelease(imgRef);
901 | CFRelease(dataDest);
902 |
903 | // Return instance object that was allocated outside the scope of pool
904 |
905 | }
906 |
907 | return [NSData dataWithData:mData];
908 | }
909 |
910 | @end
911 |
912 | // C callback invoked by core graphics when done with a buffer, this is tricky
913 | // since an extra ref is held on the buffer while it is locked by the
914 | // core graphics layer.
915 |
916 | void CGFrameBufferProviderReleaseData (void *info, const void *data, size_t size) {
917 | #ifdef DEBUG_LOGGING
918 | NSLog(@"CGFrameBufferProviderReleaseData() called");
919 | #endif
920 |
921 | CGFrameBuffer *cgBuffer;
922 | #if __has_feature(objc_arc)
923 | cgBuffer = (__bridge CGFrameBuffer *) info;
924 | #else
925 | cgBuffer = (CGFrameBuffer *) info;
926 | #endif // objc_arc
927 |
928 | cgBuffer.isLockedByDataProvider = FALSE;
929 |
930 | // Note that the cgBuffer just deallocated itself, so the
931 | // pointer no longer points to valid memory.
932 | }
933 |
--------------------------------------------------------------------------------