├── mtv.h264
├── H264DecodeDemo.xcodeproj
├── xcuserdata
│ └── yaodong.xcuserdatad
│ │ ├── xcdebugger
│ │ └── Breakpoints_v2.xcbkptlist
│ │ └── xcschemes
│ │ ├── xcschememanagement.plist
│ │ └── H264DecodeDemo.xcscheme
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ ├── xcuserdata
│ │ └── yaodong.xcuserdatad
│ │ │ └── UserInterfaceState.xcuserstate
│ └── xcshareddata
│ │ └── H264DecodeDemo.xccheckout
└── project.pbxproj
├── H264DecodeDemo
├── ViewController.h
├── VideoFileParser.h
├── AppDelegate.h
├── main.m
├── AAPLEAGLLayer.h
├── Images.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Info.plist
├── AppDelegate.m
├── VideoFileParser.m
├── Base.lproj
│ ├── Main.storyboard
│ └── LaunchScreen.xib
├── ViewController.m
└── AAPLEAGLLayer.m
└── README.md
/mtv.h264:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stevenyao/iOSHardwareDecoder/HEAD/mtv.h264
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/xcuserdata/yaodong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/project.xcworkspace/xcuserdata/yaodong.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stevenyao/iOSHardwareDecoder/HEAD/H264DecodeDemo.xcodeproj/project.xcworkspace/xcuserdata/yaodong.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/H264DecodeDemo/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // H264DecodeDemo
4 | //
5 | // Created by Yao Dong on 15/8/6.
6 | // Copyright (c) 2015年 duowan. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ViewController : UIViewController
12 |
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/H264DecodeDemo/VideoFileParser.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | @interface VideoPacket : NSObject
4 |
5 | @property uint8_t* buffer;
6 | @property NSInteger size;
7 |
8 | @end
9 |
10 | @interface VideoFileParser : NSObject
11 |
12 | -(BOOL)open:(NSString*)fileName;
13 | -(VideoPacket *)nextPacket;
14 | -(void)close;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/H264DecodeDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // H264DecodeDemo
4 | //
5 | // Created by Yao Dong on 15/8/6.
6 | // Copyright (c) 2015年 duowan. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/H264DecodeDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // H264DecodeDemo
4 | //
5 | // Created by Yao Dong on 15/8/6.
6 | // Copyright (c) 2015年 duowan. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # iOSHardwareDecoder
2 |
3 | iOS hardware decoder demo
4 |
5 | 这是iOS硬解码 H.264 视频的例子
6 |
7 | AAPLEAGLLayer.m 是用OpenGL渲染 YUV的Layer,我从苹果例子里抄的
8 |
9 | VideoFileParser.m 是个很简陋的264文件的解析,只是用来做例子,不要模仿
10 |
11 | ViewController.m 重点看这里,演示了VideoToolbox的API如何调用
12 |
13 | 注意几点:
14 |
15 | iOS解码器接受的Nal数据需要MP4格式的,就是在每个包头的前4字节放Big-endian的size,而不是00 00 00 01的startcode,需要转换下。
16 |
17 | 初始化解码器用的sps pps数据是不包括startcode的。
18 |
19 | 解码播放后的视频会抖动,这就对了,因为视频里是有B-frame的,iOS解码器不负责重排B帧顺序,需要应用自己根据PTS去做。
20 |
--------------------------------------------------------------------------------
/H264DecodeDemo/AAPLEAGLLayer.h:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (C) 2014 Apple Inc. All Rights Reserved.
3 | See LICENSE.txt for this sample’s licensing information
4 |
5 | Abstract:
6 |
7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
8 |
9 | */
10 |
11 | //@import QuartzCore;
12 | #include
13 | #include
14 |
15 | @interface AAPLEAGLLayer : CAEAGLLayer
16 | @property CVPixelBufferRef pixelBuffer;
17 | - (id)initWithFrame:(CGRect)frame;
18 | - (void)resetRenderBuffer;
19 | @end
20 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/xcuserdata/yaodong.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | H264DecodeDemo.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | DAE85E941B733B2600062371
16 |
17 | primary
18 |
19 |
20 | DAE85EAD1B733B2600062371
21 |
22 | primary
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/H264DecodeDemo/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/H264DecodeDemo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | com.yy.$(PRODUCT_NAME:rfc1034identifier)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/project.xcworkspace/xcshareddata/H264DecodeDemo.xccheckout:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDESourceControlProjectFavoriteDictionaryKey
6 |
7 | IDESourceControlProjectIdentifier
8 | 20A3845F-6960-4AF5-A41C-559A169F122B
9 | IDESourceControlProjectName
10 | H264DecodeDemo
11 | IDESourceControlProjectOriginsDictionary
12 |
13 | FE08CEBC6019DF8B580694C9FEE9610AECBD952B
14 | https://github.com/stevenyao/iOSHardwareDecoder.git
15 |
16 | IDESourceControlProjectPath
17 | H264DecodeDemo.xcodeproj
18 | IDESourceControlProjectRelativeInstallPathDictionary
19 |
20 | FE08CEBC6019DF8B580694C9FEE9610AECBD952B
21 | ../..
22 |
23 | IDESourceControlProjectURL
24 | https://github.com/stevenyao/iOSHardwareDecoder.git
25 | IDESourceControlProjectVersion
26 | 111
27 | IDESourceControlProjectWCCIdentifier
28 | FE08CEBC6019DF8B580694C9FEE9610AECBD952B
29 | IDESourceControlProjectWCConfigurations
30 |
31 |
32 | IDESourceControlRepositoryExtensionIdentifierKey
33 | public.vcs.git
34 | IDESourceControlWCCIdentifierKey
35 | FE08CEBC6019DF8B580694C9FEE9610AECBD952B
36 | IDESourceControlWCCName
37 | iOSHardwareDecoder
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/H264DecodeDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // H264DecodeDemo
4 | //
5 | // Created by Yao Dong on 15/8/6.
6 | // Copyright (c) 2015年 duowan. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 | - (void)applicationWillResignActive:(UIApplication *)application {
24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
26 | }
27 |
28 | - (void)applicationDidEnterBackground:(UIApplication *)application {
29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
31 | }
32 |
33 | - (void)applicationWillEnterForeground:(UIApplication *)application {
34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
35 | }
36 |
37 | - (void)applicationDidBecomeActive:(UIApplication *)application {
38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
39 | }
40 |
41 | - (void)applicationWillTerminate:(UIApplication *)application {
42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
43 | }
44 |
45 | @end
46 |
--------------------------------------------------------------------------------
/H264DecodeDemo/VideoFileParser.m:
--------------------------------------------------------------------------------
1 | #import
2 | #include "VideoFileParser.h"
3 |
4 | const uint8_t KStartCode[4] = {0, 0, 0, 1};
5 |
6 | @implementation VideoPacket
7 | - (instancetype)initWithSize:(NSInteger)size
8 | {
9 | self = [super init];
10 | self.buffer = malloc(size);
11 | self.size = size;
12 |
13 | return self;
14 | }
15 |
16 | -(void)dealloc
17 | {
18 | free(self.buffer);
19 | }
20 | @end
21 |
22 | @interface VideoFileParser ()
23 | {
24 | uint8_t *_buffer;
25 | NSInteger _bufferSize;
26 | NSInteger _bufferCap;
27 | }
28 | @property NSString *fileName;
29 | @property NSInputStream *fileStream;
30 | @end
31 |
32 | @implementation VideoFileParser
33 |
34 | -(BOOL)open:(NSString *)fileName
35 | {
36 | _bufferSize = 0;
37 | _bufferCap = 512 * 1024;
38 | _buffer = malloc(_bufferCap);
39 | self.fileName = fileName;
40 | self.fileStream = [NSInputStream inputStreamWithFileAtPath:fileName];
41 | [self.fileStream open];
42 |
43 | return YES;
44 | }
45 |
46 | -(VideoPacket*)nextPacket
47 | {
48 | if(_bufferSize < _bufferCap && self.fileStream.hasBytesAvailable) {
49 | NSInteger readBytes = [self.fileStream read:_buffer + _bufferSize maxLength:_bufferCap - _bufferSize];
50 | _bufferSize += readBytes;
51 | }
52 |
53 | if(memcmp(_buffer, KStartCode, 4) != 0) {
54 | return nil;
55 | }
56 |
57 | if(_bufferSize >= 5) {
58 | uint8_t *bufferBegin = _buffer + 4;
59 | uint8_t *bufferEnd = _buffer + _bufferSize;
60 | while(bufferBegin != bufferEnd) {
61 | if(*bufferBegin == 0x01) {
62 | if(memcmp(bufferBegin - 3, KStartCode, 4) == 0) {
63 | NSInteger packetSize = bufferBegin - _buffer - 3;
64 | VideoPacket *vp = [[VideoPacket alloc] initWithSize:packetSize];
65 | memcpy(vp.buffer, _buffer, packetSize);
66 |
67 | memmove(_buffer, _buffer + packetSize, _bufferSize - packetSize);
68 | _bufferSize -= packetSize;
69 |
70 | return vp;
71 | }
72 | }
73 | ++bufferBegin;
74 | }
75 | }
76 |
77 | return nil;
78 | }
79 |
80 | -(void)close
81 | {
82 | free(_buffer);
83 | [self.fileStream close];
84 | }
85 |
86 | @end
--------------------------------------------------------------------------------
/H264DecodeDemo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/H264DecodeDemo/Base.lproj/LaunchScreen.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
20 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/xcuserdata/yaodong.xcuserdatad/xcschemes/H264DecodeDemo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
38 |
39 |
44 |
45 |
47 |
53 |
54 |
55 |
56 |
57 |
63 |
64 |
65 |
66 |
75 |
77 |
83 |
84 |
85 |
86 |
87 |
88 |
94 |
96 |
102 |
103 |
104 |
105 |
107 |
108 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/H264DecodeDemo/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // H264DecodeDemo
4 | //
5 | // Created by Yao Dong on 15/8/6.
6 | // Copyright (c) 2015年 duowan. All rights reserved.
7 | //
8 |
9 | #import "ViewController.h"
10 | #import "VideoFileParser.h"
11 | #import "AAPLEAGLLayer.h"
12 | #import
13 |
14 |
15 | @interface ViewController ()
16 | {
17 | uint8_t *_sps;
18 | NSInteger _spsSize;
19 | uint8_t *_pps;
20 | NSInteger _ppsSize;
21 | VTDecompressionSessionRef _deocderSession;
22 | CMVideoFormatDescriptionRef _decoderFormatDescription;
23 |
24 | AAPLEAGLLayer *_glLayer;
25 | }
26 | @end
27 |
28 | static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
29 |
30 | CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
31 | *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
32 | }
33 |
34 | @implementation ViewController
35 |
36 | -(BOOL)initH264Decoder {
37 | if(_deocderSession) {
38 | return YES;
39 | }
40 |
41 | const uint8_t* const parameterSetPointers[2] = { _sps, _pps };
42 | const size_t parameterSetSizes[2] = { _spsSize, _ppsSize };
43 | OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
44 | 2, //param count
45 | parameterSetPointers,
46 | parameterSetSizes,
47 | 4, //nal start code size
48 | &_decoderFormatDescription);
49 |
50 | if(status == noErr) {
51 | CFDictionaryRef attrs = NULL;
52 | const void *keys[] = { kCVPixelBufferPixelFormatTypeKey };
53 | // kCVPixelFormatType_420YpCbCr8Planar is YUV420
54 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
55 | uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
56 | const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };
57 | attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
58 |
59 | VTDecompressionOutputCallbackRecord callBackRecord;
60 | callBackRecord.decompressionOutputCallback = didDecompress;
61 | callBackRecord.decompressionOutputRefCon = NULL;
62 |
63 | status = VTDecompressionSessionCreate(kCFAllocatorDefault,
64 | _decoderFormatDescription,
65 | NULL, attrs,
66 | &callBackRecord,
67 | &_deocderSession);
68 | CFRelease(attrs);
69 | } else {
70 | NSLog(@"IOS8VT: reset decoder session failed status=%d", status);
71 | }
72 |
73 | return YES;
74 | }
75 |
76 | -(void)clearH264Deocder {
77 | if(_deocderSession) {
78 | VTDecompressionSessionInvalidate(_deocderSession);
79 | CFRelease(_deocderSession);
80 | _deocderSession = NULL;
81 | }
82 |
83 | if(_decoderFormatDescription) {
84 | CFRelease(_decoderFormatDescription);
85 | _decoderFormatDescription = NULL;
86 | }
87 |
88 | free(_sps);
89 | free(_pps);
90 | _spsSize = _ppsSize = 0;
91 | }
92 |
93 | -(CVPixelBufferRef)decode:(VideoPacket*)vp {
94 | CVPixelBufferRef outputPixelBuffer = NULL;
95 |
96 | CMBlockBufferRef blockBuffer = NULL;
97 | OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
98 | (void*)vp.buffer, vp.size,
99 | kCFAllocatorNull,
100 | NULL, 0, vp.size,
101 | 0, &blockBuffer);
102 | if(status == kCMBlockBufferNoErr) {
103 | CMSampleBufferRef sampleBuffer = NULL;
104 | const size_t sampleSizeArray[] = {vp.size};
105 | status = CMSampleBufferCreateReady(kCFAllocatorDefault,
106 | blockBuffer,
107 | _decoderFormatDescription ,
108 | 1, 0, NULL, 1, sampleSizeArray,
109 | &sampleBuffer);
110 | if (status == kCMBlockBufferNoErr && sampleBuffer) {
111 | VTDecodeFrameFlags flags = 0;
112 | VTDecodeInfoFlags flagOut = 0;
113 | OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_deocderSession,
114 | sampleBuffer,
115 | flags,
116 | &outputPixelBuffer,
117 | &flagOut);
118 |
119 | if(decodeStatus == kVTInvalidSessionErr) {
120 | NSLog(@"IOS8VT: Invalid session, reset decoder session");
121 | } else if(decodeStatus == kVTVideoDecoderBadDataErr) {
122 | NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus);
123 | } else if(decodeStatus != noErr) {
124 | NSLog(@"IOS8VT: decode failed status=%d", decodeStatus);
125 | }
126 |
127 | CFRelease(sampleBuffer);
128 | }
129 | CFRelease(blockBuffer);
130 | }
131 |
132 | return outputPixelBuffer;
133 | }
134 |
135 | -(void)decodeFile:(NSString*)fileName fileExt:(NSString*)fileExt {
136 | NSString *path = [[NSBundle mainBundle] pathForResource:fileName ofType:fileExt];
137 | VideoFileParser *parser = [VideoFileParser alloc];
138 | [parser open:path];
139 |
140 | VideoPacket *vp = nil;
141 | while(true) {
142 | vp = [parser nextPacket];
143 | if(vp == nil) {
144 | break;
145 | }
146 |
147 | uint32_t nalSize = (uint32_t)(vp.size - 4);
148 | uint8_t *pNalSize = (uint8_t*)(&nalSize);
149 | vp.buffer[0] = *(pNalSize + 3);
150 | vp.buffer[1] = *(pNalSize + 2);
151 | vp.buffer[2] = *(pNalSize + 1);
152 | vp.buffer[3] = *(pNalSize);
153 |
154 | CVPixelBufferRef pixelBuffer = NULL;
155 | int nalType = vp.buffer[4] & 0x1F;
156 | switch (nalType) {
157 | case 0x05:
158 | NSLog(@"Nal type is IDR frame");
159 | if([self initH264Decoder]) {
160 | pixelBuffer = [self decode:vp];
161 | }
162 | break;
163 | case 0x07:
164 | NSLog(@"Nal type is SPS");
165 | _spsSize = vp.size - 4;
166 | _sps = malloc(_spsSize);
167 | memcpy(_sps, vp.buffer + 4, _spsSize);
168 | break;
169 | case 0x08:
170 | NSLog(@"Nal type is PPS");
171 | _ppsSize = vp.size - 4;
172 | _pps = malloc(_ppsSize);
173 | memcpy(_pps, vp.buffer + 4, _ppsSize);
174 | break;
175 |
176 | default:
177 | NSLog(@"Nal type is B/P frame");
178 | pixelBuffer = [self decode:vp];
179 | break;
180 | }
181 |
182 | if(pixelBuffer) {
183 | dispatch_sync(dispatch_get_main_queue(), ^{
184 | _glLayer.pixelBuffer = pixelBuffer;
185 | });
186 |
187 | CVPixelBufferRelease(pixelBuffer);
188 | }
189 |
190 | NSLog(@"Read Nalu size %ld", vp.size);
191 | }
192 | [parser close];
193 | }
194 |
195 | -(IBAction)on_playButton_clicked:(id)sender {
196 | dispatch_async(dispatch_get_global_queue(0, 0), ^{
197 | [self decodeFile:@"mtv" fileExt:@"h264"];
198 | });
199 | }
200 |
201 | - (void)viewDidLoad {
202 | [super viewDidLoad];
203 | // Do any additional setup after loading the view, typically from a nib.
204 |
205 | _glLayer = [[AAPLEAGLLayer alloc] initWithFrame:self.view.bounds];
206 | [self.view.layer addSublayer:_glLayer];
207 | }
208 |
209 | - (void)didReceiveMemoryWarning {
210 | [super didReceiveMemoryWarning];
211 | // Dispose of any resources that can be recreated.
212 | }
213 |
214 | @end
215 |
--------------------------------------------------------------------------------
/H264DecodeDemo.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | DAE85E9B1B733B2600062371 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = DAE85E9A1B733B2600062371 /* main.m */; };
11 | DAE85E9E1B733B2600062371 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = DAE85E9D1B733B2600062371 /* AppDelegate.m */; };
12 | DAE85EA11B733B2600062371 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = DAE85EA01B733B2600062371 /* ViewController.m */; };
13 | DAE85EA41B733B2600062371 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = DAE85EA21B733B2600062371 /* Main.storyboard */; };
14 | DAE85EA61B733B2600062371 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = DAE85EA51B733B2600062371 /* Images.xcassets */; };
15 | DAE85EA91B733B2600062371 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = DAE85EA71B733B2600062371 /* LaunchScreen.xib */; };
16 | DAE85EC01B733FFA00062371 /* VideoFileParser.m in Sources */ = {isa = PBXBuildFile; fileRef = DAE85EBF1B733FFA00062371 /* VideoFileParser.m */; };
17 | DAE85EC51B7349F000062371 /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = DAE85EC41B7349F000062371 /* AAPLEAGLLayer.m */; };
18 | DAE85ED61B81C94100062371 /* mtv.h264 in Resources */ = {isa = PBXBuildFile; fileRef = DAE85ED51B81C94100062371 /* mtv.h264 */; };
19 | /* End PBXBuildFile section */
20 |
21 | /* Begin PBXFileReference section */
22 | DAE85E951B733B2600062371 /* H264DecodeDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = H264DecodeDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
23 | DAE85E991B733B2600062371 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
24 | DAE85E9A1B733B2600062371 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
25 | DAE85E9C1B733B2600062371 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
26 | DAE85E9D1B733B2600062371 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
27 | DAE85E9F1B733B2600062371 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; };
28 | DAE85EA01B733B2600062371 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; };
29 | DAE85EA31B733B2600062371 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
30 | DAE85EA51B733B2600062371 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; };
31 | DAE85EA81B733B2600062371 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/LaunchScreen.xib; sourceTree = ""; };
32 | DAE85EBE1B733FFA00062371 /* VideoFileParser.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoFileParser.h; sourceTree = ""; };
33 | DAE85EBF1B733FFA00062371 /* VideoFileParser.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VideoFileParser.m; sourceTree = ""; };
34 | DAE85EC31B7349F000062371 /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = ""; };
35 | DAE85EC41B7349F000062371 /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = ""; };
36 | DAE85ED51B81C94100062371 /* mtv.h264 */ = {isa = PBXFileReference; lastKnownFileType = file; path = mtv.h264; sourceTree = ""; };
37 | /* End PBXFileReference section */
38 |
39 | /* Begin PBXFrameworksBuildPhase section */
40 | DAE85E921B733B2600062371 /* Frameworks */ = {
41 | isa = PBXFrameworksBuildPhase;
42 | buildActionMask = 2147483647;
43 | files = (
44 | );
45 | runOnlyForDeploymentPostprocessing = 0;
46 | };
47 | /* End PBXFrameworksBuildPhase section */
48 |
49 | /* Begin PBXGroup section */
50 | DAE85E8C1B733B2600062371 = {
51 | isa = PBXGroup;
52 | children = (
53 | DAE85ED51B81C94100062371 /* mtv.h264 */,
54 | DAE85E971B733B2600062371 /* H264DecodeDemo */,
55 | DAE85E961B733B2600062371 /* Products */,
56 | );
57 | sourceTree = "";
58 | };
59 | DAE85E961B733B2600062371 /* Products */ = {
60 | isa = PBXGroup;
61 | children = (
62 | DAE85E951B733B2600062371 /* H264DecodeDemo.app */,
63 | );
64 | name = Products;
65 | sourceTree = "";
66 | };
67 | DAE85E971B733B2600062371 /* H264DecodeDemo */ = {
68 | isa = PBXGroup;
69 | children = (
70 | DAE85E9C1B733B2600062371 /* AppDelegate.h */,
71 | DAE85E9D1B733B2600062371 /* AppDelegate.m */,
72 | DAE85EBE1B733FFA00062371 /* VideoFileParser.h */,
73 | DAE85EBF1B733FFA00062371 /* VideoFileParser.m */,
74 | DAE85EC31B7349F000062371 /* AAPLEAGLLayer.h */,
75 | DAE85EC41B7349F000062371 /* AAPLEAGLLayer.m */,
76 | DAE85E9F1B733B2600062371 /* ViewController.h */,
77 | DAE85EA01B733B2600062371 /* ViewController.m */,
78 | DAE85EA21B733B2600062371 /* Main.storyboard */,
79 | DAE85EA51B733B2600062371 /* Images.xcassets */,
80 | DAE85EA71B733B2600062371 /* LaunchScreen.xib */,
81 | DAE85E981B733B2600062371 /* Supporting Files */,
82 | );
83 | path = H264DecodeDemo;
84 | sourceTree = "";
85 | };
86 | DAE85E981B733B2600062371 /* Supporting Files */ = {
87 | isa = PBXGroup;
88 | children = (
89 | DAE85E991B733B2600062371 /* Info.plist */,
90 | DAE85E9A1B733B2600062371 /* main.m */,
91 | );
92 | name = "Supporting Files";
93 | sourceTree = "";
94 | };
95 | /* End PBXGroup section */
96 |
97 | /* Begin PBXNativeTarget section */
98 | DAE85E941B733B2600062371 /* H264DecodeDemo */ = {
99 | isa = PBXNativeTarget;
100 | buildConfigurationList = DAE85EB81B733B2600062371 /* Build configuration list for PBXNativeTarget "H264DecodeDemo" */;
101 | buildPhases = (
102 | DAE85E911B733B2600062371 /* Sources */,
103 | DAE85E921B733B2600062371 /* Frameworks */,
104 | DAE85E931B733B2600062371 /* Resources */,
105 | );
106 | buildRules = (
107 | );
108 | dependencies = (
109 | );
110 | name = H264DecodeDemo;
111 | productName = H264DecodeDemo;
112 | productReference = DAE85E951B733B2600062371 /* H264DecodeDemo.app */;
113 | productType = "com.apple.product-type.application";
114 | };
115 | /* End PBXNativeTarget section */
116 |
117 | /* Begin PBXProject section */
118 | DAE85E8D1B733B2600062371 /* Project object */ = {
119 | isa = PBXProject;
120 | attributes = {
121 | LastUpgradeCheck = 0640;
122 | ORGANIZATIONNAME = duowan;
123 | TargetAttributes = {
124 | DAE85E941B733B2600062371 = {
125 | CreatedOnToolsVersion = 6.4;
126 | };
127 | };
128 | };
129 | buildConfigurationList = DAE85E901B733B2600062371 /* Build configuration list for PBXProject "H264DecodeDemo" */;
130 | compatibilityVersion = "Xcode 3.2";
131 | developmentRegion = English;
132 | hasScannedForEncodings = 0;
133 | knownRegions = (
134 | en,
135 | Base,
136 | );
137 | mainGroup = DAE85E8C1B733B2600062371;
138 | productRefGroup = DAE85E961B733B2600062371 /* Products */;
139 | projectDirPath = "";
140 | projectRoot = "";
141 | targets = (
142 | DAE85E941B733B2600062371 /* H264DecodeDemo */,
143 | );
144 | };
145 | /* End PBXProject section */
146 |
147 | /* Begin PBXResourcesBuildPhase section */
148 | DAE85E931B733B2600062371 /* Resources */ = {
149 | isa = PBXResourcesBuildPhase;
150 | buildActionMask = 2147483647;
151 | files = (
152 | DAE85EA41B733B2600062371 /* Main.storyboard in Resources */,
153 | DAE85ED61B81C94100062371 /* mtv.h264 in Resources */,
154 | DAE85EA91B733B2600062371 /* LaunchScreen.xib in Resources */,
155 | DAE85EA61B733B2600062371 /* Images.xcassets in Resources */,
156 | );
157 | runOnlyForDeploymentPostprocessing = 0;
158 | };
159 | /* End PBXResourcesBuildPhase section */
160 |
161 | /* Begin PBXSourcesBuildPhase section */
162 | DAE85E911B733B2600062371 /* Sources */ = {
163 | isa = PBXSourcesBuildPhase;
164 | buildActionMask = 2147483647;
165 | files = (
166 | DAE85EC01B733FFA00062371 /* VideoFileParser.m in Sources */,
167 | DAE85EA11B733B2600062371 /* ViewController.m in Sources */,
168 | DAE85E9E1B733B2600062371 /* AppDelegate.m in Sources */,
169 | DAE85E9B1B733B2600062371 /* main.m in Sources */,
170 | DAE85EC51B7349F000062371 /* AAPLEAGLLayer.m in Sources */,
171 | );
172 | runOnlyForDeploymentPostprocessing = 0;
173 | };
174 | /* End PBXSourcesBuildPhase section */
175 |
176 | /* Begin PBXVariantGroup section */
177 | DAE85EA21B733B2600062371 /* Main.storyboard */ = {
178 | isa = PBXVariantGroup;
179 | children = (
180 | DAE85EA31B733B2600062371 /* Base */,
181 | );
182 | name = Main.storyboard;
183 | sourceTree = "";
184 | };
185 | DAE85EA71B733B2600062371 /* LaunchScreen.xib */ = {
186 | isa = PBXVariantGroup;
187 | children = (
188 | DAE85EA81B733B2600062371 /* Base */,
189 | );
190 | name = LaunchScreen.xib;
191 | sourceTree = "";
192 | };
193 | /* End PBXVariantGroup section */
194 |
195 | /* Begin XCBuildConfiguration section */
196 | DAE85EB61B733B2600062371 /* Debug */ = {
197 | isa = XCBuildConfiguration;
198 | buildSettings = {
199 | ALWAYS_SEARCH_USER_PATHS = NO;
200 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
201 | CLANG_CXX_LIBRARY = "libc++";
202 | CLANG_ENABLE_MODULES = YES;
203 | CLANG_ENABLE_OBJC_ARC = YES;
204 | CLANG_WARN_BOOL_CONVERSION = YES;
205 | CLANG_WARN_CONSTANT_CONVERSION = YES;
206 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
207 | CLANG_WARN_EMPTY_BODY = YES;
208 | CLANG_WARN_ENUM_CONVERSION = YES;
209 | CLANG_WARN_INT_CONVERSION = YES;
210 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
211 | CLANG_WARN_UNREACHABLE_CODE = YES;
212 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
213 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
214 | COPY_PHASE_STRIP = NO;
215 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
216 | ENABLE_STRICT_OBJC_MSGSEND = YES;
217 | GCC_C_LANGUAGE_STANDARD = gnu99;
218 | GCC_DYNAMIC_NO_PIC = NO;
219 | GCC_NO_COMMON_BLOCKS = YES;
220 | GCC_OPTIMIZATION_LEVEL = 0;
221 | GCC_PREPROCESSOR_DEFINITIONS = (
222 | "DEBUG=1",
223 | "$(inherited)",
224 | );
225 | GCC_SYMBOLS_PRIVATE_EXTERN = NO;
226 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
227 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
228 | GCC_WARN_UNDECLARED_SELECTOR = YES;
229 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
230 | GCC_WARN_UNUSED_FUNCTION = YES;
231 | GCC_WARN_UNUSED_VARIABLE = YES;
232 | IPHONEOS_DEPLOYMENT_TARGET = 8.4;
233 | MTL_ENABLE_DEBUG_INFO = YES;
234 | ONLY_ACTIVE_ARCH = YES;
235 | SDKROOT = iphoneos;
236 | TARGETED_DEVICE_FAMILY = "1,2";
237 | };
238 | name = Debug;
239 | };
240 | DAE85EB71B733B2600062371 /* Release */ = {
241 | isa = XCBuildConfiguration;
242 | buildSettings = {
243 | ALWAYS_SEARCH_USER_PATHS = NO;
244 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
245 | CLANG_CXX_LIBRARY = "libc++";
246 | CLANG_ENABLE_MODULES = YES;
247 | CLANG_ENABLE_OBJC_ARC = YES;
248 | CLANG_WARN_BOOL_CONVERSION = YES;
249 | CLANG_WARN_CONSTANT_CONVERSION = YES;
250 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
251 | CLANG_WARN_EMPTY_BODY = YES;
252 | CLANG_WARN_ENUM_CONVERSION = YES;
253 | CLANG_WARN_INT_CONVERSION = YES;
254 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
255 | CLANG_WARN_UNREACHABLE_CODE = YES;
256 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
257 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
258 | COPY_PHASE_STRIP = NO;
259 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
260 | ENABLE_NS_ASSERTIONS = NO;
261 | ENABLE_STRICT_OBJC_MSGSEND = YES;
262 | GCC_C_LANGUAGE_STANDARD = gnu99;
263 | GCC_NO_COMMON_BLOCKS = YES;
264 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
265 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
266 | GCC_WARN_UNDECLARED_SELECTOR = YES;
267 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
268 | GCC_WARN_UNUSED_FUNCTION = YES;
269 | GCC_WARN_UNUSED_VARIABLE = YES;
270 | IPHONEOS_DEPLOYMENT_TARGET = 8.4;
271 | MTL_ENABLE_DEBUG_INFO = NO;
272 | SDKROOT = iphoneos;
273 | TARGETED_DEVICE_FAMILY = "1,2";
274 | VALIDATE_PRODUCT = YES;
275 | };
276 | name = Release;
277 | };
278 | DAE85EB91B733B2600062371 /* Debug */ = {
279 | isa = XCBuildConfiguration;
280 | buildSettings = {
281 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
282 | INFOPLIST_FILE = H264DecodeDemo/Info.plist;
283 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
284 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
285 | PRODUCT_NAME = "$(TARGET_NAME)";
286 | };
287 | name = Debug;
288 | };
289 | DAE85EBA1B733B2600062371 /* Release */ = {
290 | isa = XCBuildConfiguration;
291 | buildSettings = {
292 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
293 | INFOPLIST_FILE = H264DecodeDemo/Info.plist;
294 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
295 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
296 | PRODUCT_NAME = "$(TARGET_NAME)";
297 | };
298 | name = Release;
299 | };
300 | /* End XCBuildConfiguration section */
301 |
302 | /* Begin XCConfigurationList section */
303 | DAE85E901B733B2600062371 /* Build configuration list for PBXProject "H264DecodeDemo" */ = {
304 | isa = XCConfigurationList;
305 | buildConfigurations = (
306 | DAE85EB61B733B2600062371 /* Debug */,
307 | DAE85EB71B733B2600062371 /* Release */,
308 | );
309 | defaultConfigurationIsVisible = 0;
310 | defaultConfigurationName = Release;
311 | };
312 | DAE85EB81B733B2600062371 /* Build configuration list for PBXNativeTarget "H264DecodeDemo" */ = {
313 | isa = XCConfigurationList;
314 | buildConfigurations = (
315 | DAE85EB91B733B2600062371 /* Debug */,
316 | DAE85EBA1B733B2600062371 /* Release */,
317 | );
318 | defaultConfigurationIsVisible = 0;
319 | defaultConfigurationName = Release;
320 | };
321 | /* End XCConfigurationList section */
322 | };
323 | rootObject = DAE85E8D1B733B2600062371 /* Project object */;
324 | }
325 |
--------------------------------------------------------------------------------
/H264DecodeDemo/AAPLEAGLLayer.m:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (C) 2014 Apple Inc. All Rights Reserved.
3 | See LICENSE.txt for this sample’s licensing information
4 |
5 | Abstract:
6 |
7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
8 |
9 | */
10 |
11 | #import "AAPLEAGLLayer.h"
12 |
13 | #import
14 | #import
15 | #include
16 | #import
17 | #include
18 | #include
19 | #include
20 |
21 | // Uniform index.
22 | enum
23 | {
24 | UNIFORM_Y,
25 | UNIFORM_UV,
26 | UNIFORM_ROTATION_ANGLE,
27 | UNIFORM_COLOR_CONVERSION_MATRIX,
28 | NUM_UNIFORMS
29 | };
30 | GLint uniforms[NUM_UNIFORMS];
31 |
32 | // Attribute index.
33 | enum
34 | {
35 | ATTRIB_VERTEX,
36 | ATTRIB_TEXCOORD,
37 | NUM_ATTRIBUTES
38 | };
39 |
40 | // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
41 |
42 | // BT.601, which is the standard for SDTV.
43 | static const GLfloat kColorConversion601[] = {
44 | 1.164, 1.164, 1.164,
45 | 0.0, -0.392, 2.017,
46 | 1.596, -0.813, 0.0,
47 | };
48 |
49 | // BT.709, which is the standard for HDTV.
50 | static const GLfloat kColorConversion709[] = {
51 | 1.164, 1.164, 1.164,
52 | 0.0, -0.213, 2.112,
53 | 1.793, -0.533, 0.0,
54 | };
55 |
56 |
57 |
58 | @interface AAPLEAGLLayer ()
59 | {
60 | // The pixel dimensions of the CAEAGLLayer.
61 | GLint _backingWidth;
62 | GLint _backingHeight;
63 |
64 | EAGLContext *_context;
65 | CVOpenGLESTextureRef _lumaTexture;
66 | CVOpenGLESTextureRef _chromaTexture;
67 |
68 | GLuint _frameBufferHandle;
69 | GLuint _colorBufferHandle;
70 |
71 | const GLfloat *_preferredConversion;
72 | }
73 | @property GLuint program;
74 |
75 | @end
76 | @implementation AAPLEAGLLayer
77 | @synthesize pixelBuffer = _pixelBuffer;
78 |
79 | -(CVPixelBufferRef) pixelBuffer
80 | {
81 | return _pixelBuffer;
82 | }
83 |
84 | - (void)setPixelBuffer:(CVPixelBufferRef)pb
85 | {
86 | if(_pixelBuffer) {
87 | CVPixelBufferRelease(_pixelBuffer);
88 | }
89 | _pixelBuffer = CVPixelBufferRetain(pb);
90 |
91 | int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
92 | int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
93 | [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
94 | }
95 |
96 | - (instancetype)initWithFrame:(CGRect)frame
97 | {
98 | self = [super init];
99 | if (self) {
100 | CGFloat scale = [[UIScreen mainScreen] scale];
101 | self.contentsScale = scale;
102 |
103 | self.opaque = TRUE;
104 | self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
105 |
106 | [self setFrame:frame];
107 |
108 | // Set the context into which the frames will be drawn.
109 | _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
110 |
111 | if (!_context) {
112 | return nil;
113 | }
114 |
115 | // Set the default conversion to BT.709, which is the standard for HDTV.
116 | _preferredConversion = kColorConversion709;
117 |
118 | [self setupGL];
119 | }
120 |
121 | return self;
122 | }
123 |
124 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight
125 | {
126 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
127 | return;
128 | }
129 |
130 | if(pixelBuffer == NULL) {
131 | NSLog(@"Pixel buffer is null");
132 | return;
133 | }
134 |
135 | CVReturn err;
136 |
137 | size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
138 |
139 | /*
140 | Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix.
141 | */
142 | CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
143 |
144 | if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
145 | _preferredConversion = kColorConversion601;
146 | }
147 | else {
148 | _preferredConversion = kColorConversion709;
149 | }
150 |
151 | /*
152 | CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
153 | */
154 |
155 | /*
156 | Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
157 | */
158 |
159 | CVOpenGLESTextureCacheRef _videoTextureCache;
160 |
161 | // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
162 | err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
163 | if (err != noErr) {
164 | NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
165 | return;
166 | }
167 |
168 | glActiveTexture(GL_TEXTURE0);
169 |
170 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
171 | _videoTextureCache,
172 | pixelBuffer,
173 | NULL,
174 | GL_TEXTURE_2D,
175 | GL_RED_EXT,
176 | frameWidth,
177 | frameHeight,
178 | GL_RED_EXT,
179 | GL_UNSIGNED_BYTE,
180 | 0,
181 | &_lumaTexture);
182 | if (err) {
183 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
184 | }
185 |
186 | glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
187 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
188 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
189 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
190 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
191 |
192 | if(planeCount == 2) {
193 | // UV-plane.
194 | glActiveTexture(GL_TEXTURE1);
195 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
196 | _videoTextureCache,
197 | pixelBuffer,
198 | NULL,
199 | GL_TEXTURE_2D,
200 | GL_RG_EXT,
201 | frameWidth / 2,
202 | frameHeight / 2,
203 | GL_RG_EXT,
204 | GL_UNSIGNED_BYTE,
205 | 1,
206 | &_chromaTexture);
207 | if (err) {
208 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
209 | }
210 |
211 | glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
212 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
213 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
214 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
215 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
216 | }
217 |
218 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
219 |
220 | // Set the view port to the entire view.
221 | glViewport(0, 0, _backingWidth, _backingHeight);
222 |
223 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
224 | glClear(GL_COLOR_BUFFER_BIT);
225 |
226 | // Use shader program.
227 | glUseProgram(self.program);
228 | // glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1);
229 | // glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1);
230 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
231 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
232 |
233 | // Set up the quad vertices with respect to the orientation and aspect ratio of the video.
234 | CGRect viewBounds = self.bounds;
235 | CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
236 | CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
237 |
238 | // Compute normalized quad coordinates to draw the frame into.
239 | CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
240 | CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,
241 | vertexSamplingRect.size.height/viewBounds.size.height);
242 |
243 | // Normalize the quad vertices.
244 | if (cropScaleAmount.width > cropScaleAmount.height) {
245 | normalizedSamplingSize.width = 1.0;
246 | normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
247 | }
248 | else {
249 | normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
250 | normalizedSamplingSize.height = 1.0;;
251 | }
252 |
253 | /*
254 | The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers.
255 | Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen.
256 | */
257 | GLfloat quadVertexData [] = {
258 | -1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
259 | normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
260 | -1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
261 | normalizedSamplingSize.width, normalizedSamplingSize.height,
262 | };
263 |
264 | // Update attribute values.
265 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
266 | glEnableVertexAttribArray(ATTRIB_VERTEX);
267 |
268 | /*
269 | The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
270 | */
271 | CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
272 | GLfloat quadTextureData[] = {
273 | CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
274 | CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
275 | CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),
276 | CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect)
277 | };
278 |
279 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
280 | glEnableVertexAttribArray(ATTRIB_TEXCOORD);
281 |
282 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
283 |
284 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
285 | [_context presentRenderbuffer:GL_RENDERBUFFER];
286 |
287 | [self cleanUpTextures];
288 | // Periodic texture cache flush every frame
289 | CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
290 |
291 | if(_videoTextureCache) {
292 | CFRelease(_videoTextureCache);
293 | }
294 | }
295 |
296 | # pragma mark - OpenGL setup
297 |
298 | - (void)setupGL
299 | {
300 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
301 | return;
302 | }
303 |
304 | [self setupBuffers];
305 | [self loadShaders];
306 |
307 | glUseProgram(self.program);
308 |
309 | // 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively.
310 | glUniform1i(uniforms[UNIFORM_Y], 0);
311 | glUniform1i(uniforms[UNIFORM_UV], 1);
312 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
313 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
314 | }
315 |
316 | #pragma mark - Utilities
317 |
318 | - (void)setupBuffers
319 | {
320 | glDisable(GL_DEPTH_TEST);
321 |
322 | glEnableVertexAttribArray(ATTRIB_VERTEX);
323 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
324 |
325 | glEnableVertexAttribArray(ATTRIB_TEXCOORD);
326 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
327 |
328 | [self createBuffers];
329 | }
330 |
331 | - (void) createBuffers
332 | {
333 | glGenFramebuffers(1, &_frameBufferHandle);
334 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
335 |
336 | glGenRenderbuffers(1, &_colorBufferHandle);
337 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
338 |
339 | [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
340 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
341 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
342 |
343 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
344 | if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
345 | NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
346 | }
347 | }
348 |
349 | - (void) releaseBuffers
350 | {
351 | if(_frameBufferHandle) {
352 | glDeleteFramebuffers(1, &_frameBufferHandle);
353 | _frameBufferHandle = 0;
354 | }
355 |
356 | if(_colorBufferHandle) {
357 | glDeleteRenderbuffers(1, &_colorBufferHandle);
358 | _colorBufferHandle = 0;
359 | }
360 | }
361 |
362 | - (void) resetRenderBuffer
363 | {
364 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
365 | return;
366 | }
367 |
368 | [self releaseBuffers];
369 | [self createBuffers];
370 | }
371 |
372 | - (void) cleanUpTextures
373 | {
374 | if (_lumaTexture) {
375 | CFRelease(_lumaTexture);
376 | _lumaTexture = NULL;
377 | }
378 |
379 | if (_chromaTexture) {
380 | CFRelease(_chromaTexture);
381 | _chromaTexture = NULL;
382 | }
383 | }
384 |
385 | #pragma mark - OpenGL ES 2 shader compilation
386 |
387 | const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
388 | "precision mediump float;"
389 | "uniform sampler2D SamplerY;"
390 | "uniform sampler2D SamplerUV;"
391 | "uniform mat3 colorConversionMatrix;"
392 | "void main()"
393 | "{"
394 | " mediump vec3 yuv;"
395 | " lowp vec3 rgb;"
396 | // Subtract constants to map the video range start at 0
397 | " yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
398 | " yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
399 | " rgb = colorConversionMatrix * yuv;"
400 | " gl_FragColor = vec4(rgb, 1);"
401 | "}";
402 |
403 | const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
404 | "attribute vec2 texCoord;"
405 | "uniform float preferredRotation;"
406 | "varying vec2 texCoordVarying;"
407 | "void main()"
408 | "{"
409 | " mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
410 | " sin(preferredRotation), cos(preferredRotation), 0.0, 0.0,"
411 | " 0.0, 0.0, 1.0, 0.0,"
412 | " 0.0, 0.0, 0.0, 1.0);"
413 | " gl_Position = position * rotationMatrix;"
414 | " texCoordVarying = texCoord;"
415 | "}";
416 |
417 | - (BOOL)loadShaders
418 | {
419 | GLuint vertShader = 0, fragShader = 0;
420 |
421 | // Create the shader program.
422 | self.program = glCreateProgram();
423 |
424 | if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
425 | NSLog(@"Failed to compile vertex shader");
426 | return NO;
427 | }
428 |
429 | if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
430 | NSLog(@"Failed to compile fragment shader");
431 | return NO;
432 | }
433 |
434 | // Attach vertex shader to program.
435 | glAttachShader(self.program, vertShader);
436 |
437 | // Attach fragment shader to program.
438 | glAttachShader(self.program, fragShader);
439 |
440 | // Bind attribute locations. This needs to be done prior to linking.
441 | glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
442 | glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
443 |
444 | // Link the program.
445 | if (![self linkProgram:self.program]) {
446 | NSLog(@"Failed to link program: %d", self.program);
447 |
448 | if (vertShader) {
449 | glDeleteShader(vertShader);
450 | vertShader = 0;
451 | }
452 | if (fragShader) {
453 | glDeleteShader(fragShader);
454 | fragShader = 0;
455 | }
456 | if (self.program) {
457 | glDeleteProgram(self.program);
458 | self.program = 0;
459 | }
460 |
461 | return NO;
462 | }
463 |
464 | // Get uniform locations.
465 | uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
466 | uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
467 | // uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold");
468 | // uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold");
469 | uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
470 | uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
471 |
472 | // Release vertex and fragment shaders.
473 | if (vertShader) {
474 | glDetachShader(self.program, vertShader);
475 | glDeleteShader(vertShader);
476 | }
477 | if (fragShader) {
478 | glDetachShader(self.program, fragShader);
479 | glDeleteShader(fragShader);
480 | }
481 |
482 | return YES;
483 | }
484 |
485 | - (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString
486 | {
487 | *shader = glCreateShader(type);
488 | glShaderSource(*shader, 1, &shaderString, NULL);
489 | glCompileShader(*shader);
490 |
491 | #if defined(DEBUG)
492 | GLint logLength;
493 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
494 | if (logLength > 0) {
495 | GLchar *log = (GLchar *)malloc(logLength);
496 | glGetShaderInfoLog(*shader, logLength, &logLength, log);
497 | NSLog(@"Shader compile log:\n%s", log);
498 | free(log);
499 | }
500 | #endif
501 |
502 | GLint status = 0;
503 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
504 | if (status == 0) {
505 | glDeleteShader(*shader);
506 | return NO;
507 | }
508 |
509 | return YES;
510 | }
511 |
512 | - (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
513 | {
514 | NSError *error;
515 | NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
516 | if (sourceString == nil) {
517 | NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
518 | return NO;
519 | }
520 |
521 | const GLchar *source = (GLchar *)[sourceString UTF8String];
522 |
523 | return [self compileShaderString:shader type:type shaderString:source];
524 | }
525 |
526 | - (BOOL)linkProgram:(GLuint)prog
527 | {
528 | GLint status;
529 | glLinkProgram(prog);
530 |
531 | #if defined(DEBUG)
532 | GLint logLength;
533 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
534 | if (logLength > 0) {
535 | GLchar *log = (GLchar *)malloc(logLength);
536 | glGetProgramInfoLog(prog, logLength, &logLength, log);
537 | NSLog(@"Program link log:\n%s", log);
538 | free(log);
539 | }
540 | #endif
541 |
542 | glGetProgramiv(prog, GL_LINK_STATUS, &status);
543 | if (status == 0) {
544 | return NO;
545 | }
546 |
547 | return YES;
548 | }
549 |
550 | - (BOOL)validateProgram:(GLuint)prog
551 | {
552 | GLint logLength, status;
553 |
554 | glValidateProgram(prog);
555 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
556 | if (logLength > 0) {
557 | GLchar *log = (GLchar *)malloc(logLength);
558 | glGetProgramInfoLog(prog, logLength, &logLength, log);
559 | NSLog(@"Program validate log:\n%s", log);
560 | free(log);
561 | }
562 |
563 | glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
564 | if (status == 0) {
565 | return NO;
566 | }
567 |
568 | return YES;
569 | }
570 |
571 | - (void)dealloc
572 | {
573 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
574 | return;
575 | }
576 |
577 | [self cleanUpTextures];
578 |
579 | if(_pixelBuffer) {
580 | CVPixelBufferRelease(_pixelBuffer);
581 | }
582 |
583 | if (self.program) {
584 | glDeleteProgram(self.program);
585 | self.program = 0;
586 | }
587 | if(_context) {
588 | //[_context release];
589 | _context = nil;
590 | }
591 | //[super dealloc];
592 | }
593 |
594 | @end
595 |
--------------------------------------------------------------------------------