├── VTDemoOniPad
├── Assets.xcassets
│ ├── Contents.json
│ └── AppIcon.appiconset
│ │ ├── Icon-76.png
│ │ ├── Icon-76@2x.png
│ │ ├── Icon-Small.png
│ │ ├── Icon-Small-40.png
│ │ ├── Icon-Small@2x.png
│ │ ├── Icon-Small-40@2x.png
│ │ └── Contents.json
├── VideoFileParser.h
├── AppDelegate.h
├── ViewController.h
├── main.m
├── AAPLEAGLLayer.h
├── H264HwEncoderImpl.h
├── Info.plist
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── AppDelegate.m
├── VideoFileParser.m
├── H264HwEncoderImpl.m
├── ViewController.m
└── AAPLEAGLLayer.m
├── H264 Encode and Decode.xcodeproj
├── xcuserdata
│ └── AJB.xcuserdatad
│ │ ├── xcdebugger
│ │ └── Breakpoints_v2.xcbkptlist
│ │ └── xcschemes
│ │ ├── xcschememanagement.plist
│ │ └── VTDemoOniPad.xcscheme
├── project.xcworkspace
│ ├── xcuserdata
│ │ └── AJB.xcuserdatad
│ │ │ └── UserInterfaceState.xcuserstate
│ ├── xcshareddata
│ │ ├── IDEWorkspaceChecks.plist
│ │ └── WorkspaceSettings.xcsettings
│ └── contents.xcworkspacedata
└── project.pbxproj
├── .gitignore
├── LICENSE
├── README.md
└── H264 Encode and Decode
└── Images.xcassets
└── AppIcon.appiconset
└── Contents.json
/VTDemoOniPad/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-76.png
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-76@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-76@2x.png
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small.png
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/xcuserdata/AJB.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small-40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small-40.png
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x.png
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/project.xcworkspace/xcuserdata/AJB.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LevyGG/iOS-H.264-hareware-encode-and-decode/HEAD/H264 Encode and Decode.xcodeproj/project.xcworkspace/xcuserdata/AJB.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/VTDemoOniPad/VideoFileParser.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | @interface VideoPacket : NSObject
4 |
5 | @property uint8_t* buffer;
6 | @property NSInteger size;
7 |
8 | @end
9 |
10 | @interface VideoFileParser : NSObject
11 |
12 | -(BOOL)open:(NSString*)fileName;
13 | -(VideoPacket *)nextPacket;
14 | -(void)close;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VTDemoOniPad/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // VTDemoOniPad
4 | //
5 | // Created by AJB on 16/4/25.
6 | // Copyright © 2016年 AJB. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/VTDemoOniPad/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // VTDemoOniPad
4 | //
5 | // Created by AJB on 16/4/25.
6 | // Copyright © 2016年 AJB. All rights reserved.
7 | //
8 |
9 | #import
10 | // 编码
11 | #import "H264HwEncoderImpl.h"
12 | @interface ViewController : UIViewController
13 |
14 |
15 | @end
16 |
17 |
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | BuildSystemType
6 | Original
7 | PreviewsEnabled
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/VTDemoOniPad/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // VTDemoOniPad
4 | //
5 | // Created by AJB on 16/4/25.
6 | // Copyright © 2016年 AJB. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## User settings
6 | xcuserdata/
7 |
8 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
9 | *.xcscmblueprint
10 | *.xccheckout
11 |
12 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
13 | build/
14 | DerivedData/
15 | *.moved-aside
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 |
--------------------------------------------------------------------------------
/VTDemoOniPad/AAPLEAGLLayer.h:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (C) 2014 Apple Inc. All Rights Reserved.
3 | See LICENSE.txt for this sample’s licensing information
4 |
5 | Abstract:
6 |
7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
8 |
9 | */
10 |
11 | //@import QuartzCore;
12 | #include
13 | #include
14 |
15 | @interface AAPLEAGLLayer : CAEAGLLayer
16 | @property CVPixelBufferRef pixelBuffer;
17 | - (id)initWithFrame:(CGRect)frame;
18 | - (void)resetRenderBuffer;
19 | @end
20 |
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/xcuserdata/AJB.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | VTDemoOniPad.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | 2575C6101CCE03EF00160B73
16 |
17 | primary
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/VTDemoOniPad/H264HwEncoderImpl.h:
--------------------------------------------------------------------------------
1 | //
2 | // H264HwEncoderImpl.h
3 | // h264v1
4 | //
5 | // Created by Ganvir, Manish on 3/31/15.
6 | // Copyright (c) 2015 Ganvir, Manish. All rights reserved.
7 | //
8 |
9 | #import
10 | @import AVFoundation;
11 | @protocol H264HwEncoderImplDelegate
12 |
13 | - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps;
14 | - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame;
15 |
16 | @end
17 | @interface H264HwEncoderImpl : NSObject
18 |
19 | - (void) initWithConfiguration;
20 | - (void) initEncode:(int)width height:(int)height;
21 | - (void) encode:(CMSampleBufferRef )sampleBuffer;
22 | - (void) End;
23 |
24 | @property (weak, nonatomic) NSString *error;
25 | @property (weak, nonatomic) id delegate;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 LevyGG
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/VTDemoOniPad/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIcons
10 |
11 | CFBundleIcons~ipad
12 |
13 | CFBundleIdentifier
14 | $(PRODUCT_BUNDLE_IDENTIFIER)
15 | CFBundleInfoDictionaryVersion
16 | 6.0
17 | CFBundleName
18 | $(PRODUCT_NAME)
19 | CFBundlePackageType
20 | APPL
21 | CFBundleShortVersionString
22 | 1.0
23 | CFBundleSignature
24 | ????
25 | CFBundleVersion
26 | 1
27 | LSRequiresIPhoneOS
28 |
29 | NSCameraUsageDescription
30 |
31 | UIFileSharingEnabled
32 |
33 | UILaunchStoryboardName
34 | LaunchScreen
35 | UIMainStoryboardFile
36 | Main
37 | UIRequiredDeviceCapabilities
38 |
39 | armv7
40 |
41 | UISupportedInterfaceOrientations
42 |
43 | UIInterfaceOrientationPortrait
44 | UIInterfaceOrientationLandscapeLeft
45 | UIInterfaceOrientationLandscapeRight
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/VTDemoOniPad/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # iOS-H.264-hareware-encode-and-decode
2 | 使用 Video Toolbox 进行H.264编码
3 |
4 | 这个demo实现了H.264的硬件编码与硬件解码。编码视频来源为摄像头,调试的时候最好横屏
5 |
6 | 1、点击start按钮开始录像(把录像编码成H.h264存入沙盒中,文件名为:test.h264)
7 |
8 | 2、点击stop按钮停止录像。
9 |
10 | 3、点击play按钮播放录像(从沙盒中读取test.h264文件并解码播放)
11 |
12 | 4、点击stop停止播放、清除播放对象
13 |
14 | 沙盒中的test.h264文件可以通过设备连接电脑后打开iTunes,选定自己的设备,在文件共享那里可以找到第一步保存的test.h264源码文件
15 |
16 | 把test.h264文件拷到电脑中后缀名改为.mov,用MPlayerX可以直接播放
17 |
18 | 通过参考以下博客、github上的demo与苹果官方的资料整理出这个集H.264编码与解码于一身的demo供大家参考
19 |
20 | 参考博客:
21 |
22 | http://www.jianshu.com/p/a6530fa46a88
23 |
24 | http://www.zhihu.com/question/20692215/answer/37458146
25 |
26 | 苹果官方参考资料:
27 |
28 | WWDC2014 513《direct access to media encoding and decoding》
29 |
30 | 参考开源项目:
31 |
32 | https://github.com/stevenyao/iOSHardwareDecoder (解码)
33 |
34 | https://github.com/manishganvir/iOS-h264Hw-Toolbox (编码)
35 |
36 | This demo use Video Toolbox to encode/decode H264 video stream.
37 |
38 | Compressing into H264,and decompress for CVPixelbuffer ,using OpenGL to display.
39 | The video source come from camera. Recommending use landscape mode for this demo.(I do it on my ipad :)
40 |
41 | 1.Tap "start" button to begin capture video and decode video stream into H.264 format and save a file named:"test.h264"(file stream, raw H.264. Or you can save it as "xxx.mov"\"xxx.txt" whatever you want) in sanbox at the same time;
42 |
43 | 2.Tap "stop" button for stop capturing and stop encoding;
44 |
45 | 3.Tap "play" button to replay the video you just record in sanbox(decode H.264 stream).
46 |
47 | 4.Tap "Stop" button to stop decode.
48 |
49 | You can get that file(test.h264) after those steps from iTunes:device->"thisDemo"->file shareing. Change the file subffix to ".mov". Drag into MPlayerX can play.
50 |
51 | Refred:
52 |
53 | https://github.com/stevenyao/iOSHardwareDecoder (decode)
54 |
55 | https://github.com/manishganvir/iOS-h264Hw-Toolbox (encode)
56 |
57 |
--------------------------------------------------------------------------------
/H264 Encode and Decode/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/VTDemoOniPad/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // VTDemoOniPad
4 | //
5 | // Created by AJB on 16/4/25.
6 | // Copyright © 2016年 AJB. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 | - (void)applicationWillResignActive:(UIApplication *)application {
24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
26 | }
27 |
28 | - (void)applicationDidEnterBackground:(UIApplication *)application {
29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
31 | }
32 |
33 | - (void)applicationWillEnterForeground:(UIApplication *)application {
34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
35 | }
36 |
37 | - (void)applicationDidBecomeActive:(UIApplication *)application {
38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
39 | }
40 |
41 | - (void)applicationWillTerminate:(UIApplication *)application {
42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
43 | }
44 |
45 | @end
46 |
--------------------------------------------------------------------------------
/VTDemoOniPad/VideoFileParser.m:
--------------------------------------------------------------------------------
1 | #import
2 | #include "VideoFileParser.h"
3 |
4 | const uint8_t KStartCode[4] = {0, 0, 0, 1};
5 |
6 | @implementation VideoPacket
7 | - (instancetype)initWithSize:(NSInteger)size
8 | {
9 | self = [super init];
10 | self.buffer = malloc(size);
11 | self.size = size;
12 |
13 | return self;
14 | }
15 |
16 | -(void)dealloc
17 | {
18 | free(self.buffer);
19 | }
20 | @end
21 |
22 | @interface VideoFileParser ()
23 | {
24 | uint8_t *_buffer;
25 | NSInteger _bufferSize;
26 | NSInteger _bufferCap;
27 | }
28 | @property NSString *fileName;
29 | @property NSInputStream *fileStream;
30 | @end
31 |
32 | @implementation VideoFileParser
33 |
34 | -(BOOL)open:(NSString *)fileName
35 | {
36 | _bufferSize = 0;
37 | // _bufferCap = 512 * 1024;
38 | // _bufferCap = 1080 * 1920;
39 | _bufferCap = 720 * 1280;
40 | _buffer = malloc(_bufferCap);
41 | self.fileName = fileName;
42 | self.fileStream = [NSInputStream inputStreamWithFileAtPath:fileName];
43 | [self.fileStream open];
44 |
45 | return YES;
46 | }
47 |
48 | -(VideoPacket*)nextPacket
49 | {
50 | if(_bufferSize < _bufferCap && self.fileStream.hasBytesAvailable) {
51 | NSInteger readBytes = [self.fileStream read:_buffer + _bufferSize maxLength:_bufferCap - _bufferSize];
52 | _bufferSize += readBytes;
53 | }
54 |
55 | if(memcmp(_buffer, KStartCode, 4) != 0) {
56 | return nil;
57 | }
58 |
59 | if(_bufferSize >= 5) {
60 | uint8_t *bufferBegin = _buffer + 4;
61 | uint8_t *bufferEnd = _buffer + _bufferSize;
62 | while(bufferBegin != bufferEnd) {
63 | if(*bufferBegin == 0x01) {
64 | if(memcmp(bufferBegin - 3, KStartCode, 4) == 0) {
65 | NSInteger packetSize = bufferBegin - _buffer - 3;
66 | VideoPacket *vp = [[VideoPacket alloc] initWithSize:packetSize];
67 | memcpy(vp.buffer, _buffer, packetSize);
68 |
69 | memmove(_buffer, _buffer + packetSize, _bufferSize - packetSize);
70 | _bufferSize -= packetSize;
71 |
72 | return vp;
73 | }
74 | }
75 | ++bufferBegin;
76 | }
77 | }
78 |
79 | return nil;
80 | }
81 |
82 | -(void)close
83 | {
84 | free(_buffer);
85 | [self.fileStream close];
86 | }
87 |
88 | @end
--------------------------------------------------------------------------------
/VTDemoOniPad/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "3x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "2x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "3x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "57x57",
31 | "scale" : "1x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "57x57",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "iphone",
45 | "size" : "60x60",
46 | "scale" : "3x"
47 | },
48 | {
49 | "size" : "29x29",
50 | "idiom" : "ipad",
51 | "filename" : "Icon-Small.png",
52 | "scale" : "1x"
53 | },
54 | {
55 | "size" : "29x29",
56 | "idiom" : "ipad",
57 | "filename" : "Icon-Small@2x.png",
58 | "scale" : "2x"
59 | },
60 | {
61 | "size" : "40x40",
62 | "idiom" : "ipad",
63 | "filename" : "Icon-Small-40.png",
64 | "scale" : "1x"
65 | },
66 | {
67 | "size" : "40x40",
68 | "idiom" : "ipad",
69 | "filename" : "Icon-Small-40@2x.png",
70 | "scale" : "2x"
71 | },
72 | {
73 | "idiom" : "ipad",
74 | "size" : "50x50",
75 | "scale" : "1x"
76 | },
77 | {
78 | "idiom" : "ipad",
79 | "size" : "50x50",
80 | "scale" : "2x"
81 | },
82 | {
83 | "idiom" : "ipad",
84 | "size" : "72x72",
85 | "scale" : "1x"
86 | },
87 | {
88 | "idiom" : "ipad",
89 | "size" : "72x72",
90 | "scale" : "2x"
91 | },
92 | {
93 | "size" : "76x76",
94 | "idiom" : "ipad",
95 | "filename" : "Icon-76.png",
96 | "scale" : "1x"
97 | },
98 | {
99 | "size" : "76x76",
100 | "idiom" : "ipad",
101 | "filename" : "Icon-76@2x.png",
102 | "scale" : "2x"
103 | },
104 | {
105 | "idiom" : "ipad",
106 | "size" : "83.5x83.5",
107 | "scale" : "2x"
108 | }
109 | ],
110 | "info" : {
111 | "version" : 1,
112 | "author" : "xcode"
113 | }
114 | }
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/xcuserdata/AJB.xcuserdatad/xcschemes/VTDemoOniPad.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
39 |
40 |
41 |
42 |
43 |
44 |
54 |
56 |
62 |
63 |
64 |
65 |
66 |
67 |
73 |
75 |
81 |
82 |
83 |
84 |
86 |
87 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/VTDemoOniPad/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
33 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/VTDemoOniPad/H264HwEncoderImpl.m:
--------------------------------------------------------------------------------
1 | //
2 | // H264HwEncoderImpl.m
3 | // h264v1
4 | //
5 | // Created by Ganvir, Manish on 3/31/15.
6 | // Copyright (c) 2015 Ganvir, Manish. All rights reserved.
7 | //
8 |
9 | #import "H264HwEncoderImpl.h"
10 |
11 | @import VideoToolbox;
12 | @import AVFoundation;
13 |
14 | @implementation H264HwEncoderImpl
15 | {
16 | VTCompressionSessionRef EncodingSession;
17 | dispatch_queue_t aQueue;
18 | CMFormatDescriptionRef format;
19 | CMSampleTimingInfo * timingInfo;
20 | BOOL initialized;
21 | int frameCount;
22 | NSData *sps;
23 | NSData *pps;
24 | }
25 | @synthesize error;
26 |
27 | - (void) initWithConfiguration
28 | {
29 | EncodingSession = nil;
30 | initialized = true;
31 | aQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
32 | frameCount = 0;
33 | sps = NULL;
34 | pps = NULL;
35 | }
36 |
37 | // VTCompressionOutputCallback(回调方法) 由VTCompressionSessionCreate调用
38 | void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags,
39 | CMSampleBufferRef sampleBuffer )
40 | {
41 | NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags);
42 | if (status != 0) return;
43 |
44 | if (!CMSampleBufferDataIsReady(sampleBuffer))
45 | {
46 | NSLog(@"didCompressH264 data is not ready ");
47 | return;
48 | }
49 | H264HwEncoderImpl* encoder = (__bridge H264HwEncoderImpl*)outputCallbackRefCon;
50 |
51 | // Check if we have got a key frame first
52 | bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync);
53 |
54 | if (keyframe)
55 | {
56 | CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
57 | // CFDictionaryRef extensionDict = CMFormatDescriptionGetExtensions(format);
58 | // Get the extensions
59 | // From the extensions get the dictionary with key "SampleDescriptionExtensionAtoms"
60 | // From the dict, get the value for the key "avcC"
61 |
62 | size_t sparameterSetSize, sparameterSetCount;
63 | const uint8_t *sparameterSet;
64 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 );
65 | if (statusCode == noErr)
66 | {
67 | // Found sps and now check for pps
68 | size_t pparameterSetSize, pparameterSetCount;
69 | const uint8_t *pparameterSet;
70 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 );
71 | if (statusCode == noErr)
72 | {
73 | // Found pps
74 | encoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
75 | encoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
76 | if (encoder->_delegate)
77 | {
78 | [encoder->_delegate gotSpsPps:encoder->sps pps:encoder->pps];
79 | }
80 | }
81 | }
82 | }
83 |
84 | CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
85 | size_t length, totalLength;
86 | char *dataPointer;
87 | OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
88 | if (statusCodeRet == noErr) {
89 |
90 | size_t bufferOffset = 0;
91 | static const int AVCCHeaderLength = 4;
92 | while (bufferOffset < totalLength - AVCCHeaderLength) {
93 |
94 | // Read the NAL unit length
95 | uint32_t NALUnitLength = 0;
96 | memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
97 |
98 | // Convert the length value from Big-endian to Little-endian
99 | NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
100 |
101 | NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
102 | [encoder->_delegate gotEncodedData:data isKeyFrame:keyframe];
103 |
104 | // Move to the next NAL unit in the block buffer
105 | bufferOffset += AVCCHeaderLength + NALUnitLength;
106 | }
107 |
108 | }
109 |
110 | }
111 |
112 | - (void) initEncode:(int)width height:(int)height // 仅调用一次
113 | {
114 | dispatch_sync(aQueue, ^{
115 |
116 | CFMutableDictionaryRef sessionAttributes = CFDictionaryCreateMutable(
117 | NULL,
118 | 0,
119 | &kCFTypeDictionaryKeyCallBacks,
120 | &kCFTypeDictionaryValueCallBacks);
121 |
122 | // bitrate 只有当压缩frame设置的时候才起作用,有时候不起作用,当不设置的时候大小根据视频的大小而定
123 | // int fixedBitrate = 2000 * 1024; // 2000 * 1024 -> assume 2 Mbits/s
124 | // CFNumberRef bitrateNum = CFNumberCreate(NULL, kCFNumberSInt32Type, &fixedBitrate);
125 | // CFDictionarySetValue(sessionAttributes, kVTCompressionPropertyKey_AverageBitRate, bitrateNum);
126 | // CFRelease(bitrateNum);
127 |
128 | // CMTime CMTimeMake(int64_t value, int32_t timescale)当timescale设置为1的时候更改这个参数就看不到效果了
129 | // float fixedQuality = 1.0;
130 | // CFNumberRef qualityNum = CFNumberCreate(NULL, kCFNumberFloat32Type, &fixedQuality);
131 | // CFDictionarySetValue(sessionAttributes, kVTCompressionPropertyKey_Quality, qualityNum);
132 | // CFRelease(qualityNum);
133 |
134 | //貌似没作用
135 | // int DataRateLimits = 2;
136 | // CFNumberRef DataRateLimitsNum = CFNumberCreate(NULL, kCFNumberSInt8Type, &DataRateLimits);
137 | // CFDictionarySetValue(sessionAttributes, kVTCompressionPropertyKey_DataRateLimits, DataRateLimitsNum);
138 | // CFRelease(DataRateLimitsNum);
139 |
140 | // 创建编码
141 | OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, sessionAttributes, NULL, NULL, didCompressH264, (__bridge void *)(self), &EncodingSession);
142 | NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
143 |
144 | if (status != 0)
145 | {
146 | NSLog(@"H264: Unable to create a H264 session");
147 | error = @"H264: Unable to create a H264 session";
148 | return ;
149 | }
150 |
151 | //设置properties(这些参数设置了也没用)
152 | VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
153 | VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_High_5_2);
154 | VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
155 |
156 | // 启动编码
157 | VTCompressionSessionPrepareToEncodeFrames(EncodingSession);
158 |
159 |
160 | });
161 | }
162 | // 从控制的AVCaptureVideoDataOutputSampleBufferDelegate代理方法中调用至此
163 | - (void) encode:(CMSampleBufferRef )sampleBuffer // 频繁调用
164 | {
165 | dispatch_sync(aQueue, ^{
166 |
167 | frameCount++;
168 | // Get the CV Image buffer
169 | CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
170 | // CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
171 |
172 | // Create properties
173 | CMTime presentationTimeStamp = CMTimeMake(frameCount, 1); // 这个值越大画面越模糊
174 | // CMTime duration = CMTimeMake(1, DURATION);
175 | VTEncodeInfoFlags flags;
176 |
177 | // Pass it to the encoder
178 | OSStatus statusCode = VTCompressionSessionEncodeFrame(EncodingSession,
179 | imageBuffer,
180 | presentationTimeStamp,
181 | kCMTimeInvalid,
182 | NULL, NULL, &flags);
183 | // Check for error
184 | if (statusCode != noErr) {
185 | NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
186 | error = @"H264: VTCompressionSessionEncodeFrame failed ";
187 |
188 | // End the session
189 | VTCompressionSessionInvalidate(EncodingSession);
190 | CFRelease(EncodingSession);
191 | EncodingSession = NULL;
192 | error = NULL;
193 | return;
194 | }
195 | NSLog(@"H264: VTCompressionSessionEncodeFrame Success");
196 | });
197 |
198 | }
199 |
200 | - (void) End
201 | {
202 | // Mark the completion
203 | VTCompressionSessionCompleteFrames(EncodingSession, kCMTimeInvalid);
204 |
205 | // End the session
206 | VTCompressionSessionInvalidate(EncodingSession);
207 | CFRelease(EncodingSession);
208 | EncodingSession = NULL;
209 | error = NULL;
210 |
211 | }
212 |
213 |
214 | @end
215 |
--------------------------------------------------------------------------------
/H264 Encode and Decode.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 25BAEF651CDC444200D43A42 /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF621CDC444200D43A42 /* AAPLEAGLLayer.m */; };
11 | 25BAEF661CDC444200D43A42 /* VideoFileParser.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF641CDC444200D43A42 /* VideoFileParser.m */; };
12 | 25BAEF741CDC446E00D43A42 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 25BAEF701CDC446E00D43A42 /* LaunchScreen.storyboard */; };
13 | 25BAEF751CDC446E00D43A42 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 25BAEF721CDC446E00D43A42 /* Main.storyboard */; };
14 | 25BAEF781CDC448200D43A42 /* H264HwEncoderImpl.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF771CDC448200D43A42 /* H264HwEncoderImpl.m */; };
15 | 25BAEF7F1CDC449300D43A42 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF7A1CDC449300D43A42 /* AppDelegate.m */; };
16 | 25BAEF801CDC449300D43A42 /* Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 25BAEF7B1CDC449300D43A42 /* Info.plist */; };
17 | 25BAEF811CDC449300D43A42 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF7C1CDC449300D43A42 /* main.m */; };
18 | 25BAEF821CDC449300D43A42 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 25BAEF7E1CDC449300D43A42 /* ViewController.m */; };
19 | BF500D502330D57D0068D2B9 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = BF500D4F2330D57D0068D2B9 /* Images.xcassets */; };
20 | /* End PBXBuildFile section */
21 |
22 | /* Begin PBXFileReference section */
23 | 2572B9161CCF3A620062BD02 /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; };
24 | 2575C6111CCE03EF00160B73 /* H264 Encode and Decode.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "H264 Encode and Decode.app"; sourceTree = BUILT_PRODUCTS_DIR; };
25 | 25BAEF611CDC444200D43A42 /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = ""; };
26 | 25BAEF621CDC444200D43A42 /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = ""; };
27 | 25BAEF631CDC444200D43A42 /* VideoFileParser.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoFileParser.h; sourceTree = ""; };
28 | 25BAEF641CDC444200D43A42 /* VideoFileParser.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VideoFileParser.m; sourceTree = ""; };
29 | 25BAEF711CDC446E00D43A42 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
30 | 25BAEF731CDC446E00D43A42 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
31 | 25BAEF761CDC448200D43A42 /* H264HwEncoderImpl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = H264HwEncoderImpl.h; sourceTree = ""; };
32 | 25BAEF771CDC448200D43A42 /* H264HwEncoderImpl.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = H264HwEncoderImpl.m; sourceTree = ""; };
33 | 25BAEF791CDC449300D43A42 /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
34 | 25BAEF7A1CDC449300D43A42 /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
35 | 25BAEF7B1CDC449300D43A42 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
36 | 25BAEF7C1CDC449300D43A42 /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
37 | 25BAEF7D1CDC449300D43A42 /* ViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; };
38 | 25BAEF7E1CDC449300D43A42 /* ViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; };
39 | BF500D4F2330D57D0068D2B9 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Images.xcassets; path = "H264 Encode and Decode/Images.xcassets"; sourceTree = ""; };
40 | /* End PBXFileReference section */
41 |
42 | /* Begin PBXFrameworksBuildPhase section */
43 | 2575C60E1CCE03EF00160B73 /* Frameworks */ = {
44 | isa = PBXFrameworksBuildPhase;
45 | buildActionMask = 2147483647;
46 | files = (
47 | );
48 | runOnlyForDeploymentPostprocessing = 0;
49 | };
50 | /* End PBXFrameworksBuildPhase section */
51 |
52 | /* Begin PBXGroup section */
53 | 2547F8D11CDC40E100C7DD90 /* Encode */ = {
54 | isa = PBXGroup;
55 | children = (
56 | 25BAEF761CDC448200D43A42 /* H264HwEncoderImpl.h */,
57 | 25BAEF771CDC448200D43A42 /* H264HwEncoderImpl.m */,
58 | );
59 | name = Encode;
60 | sourceTree = "";
61 | };
62 | 2547F8D21CDC410200C7DD90 /* Decode */ = {
63 | isa = PBXGroup;
64 | children = (
65 | 25BAEF611CDC444200D43A42 /* AAPLEAGLLayer.h */,
66 | 25BAEF621CDC444200D43A42 /* AAPLEAGLLayer.m */,
67 | 25BAEF631CDC444200D43A42 /* VideoFileParser.h */,
68 | 25BAEF641CDC444200D43A42 /* VideoFileParser.m */,
69 | );
70 | name = Decode;
71 | sourceTree = "";
72 | };
73 | 2575C6081CCE03EF00160B73 = {
74 | isa = PBXGroup;
75 | children = (
76 | BF500D4F2330D57D0068D2B9 /* Images.xcassets */,
77 | 2572B9161CCF3A620062BD02 /* MediaPlayer.framework */,
78 | 2575C6131CCE03EF00160B73 /* VTDemoOniPad */,
79 | 2575C6121CCE03EF00160B73 /* Products */,
80 | );
81 | sourceTree = "";
82 | };
83 | 2575C6121CCE03EF00160B73 /* Products */ = {
84 | isa = PBXGroup;
85 | children = (
86 | 2575C6111CCE03EF00160B73 /* H264 Encode and Decode.app */,
87 | );
88 | name = Products;
89 | sourceTree = "";
90 | };
91 | 2575C6131CCE03EF00160B73 /* VTDemoOniPad */ = {
92 | isa = PBXGroup;
93 | children = (
94 | 2575C6141CCE03EF00160B73 /* Supporting Files */,
95 | 25BAEF701CDC446E00D43A42 /* LaunchScreen.storyboard */,
96 | 25BAEF721CDC446E00D43A42 /* Main.storyboard */,
97 | 2547F8D21CDC410200C7DD90 /* Decode */,
98 | 2547F8D11CDC40E100C7DD90 /* Encode */,
99 | );
100 | path = VTDemoOniPad;
101 | sourceTree = "";
102 | };
103 | 2575C6141CCE03EF00160B73 /* Supporting Files */ = {
104 | isa = PBXGroup;
105 | children = (
106 | 25BAEF791CDC449300D43A42 /* AppDelegate.h */,
107 | 25BAEF7A1CDC449300D43A42 /* AppDelegate.m */,
108 | 25BAEF7B1CDC449300D43A42 /* Info.plist */,
109 | 25BAEF7C1CDC449300D43A42 /* main.m */,
110 | 25BAEF7D1CDC449300D43A42 /* ViewController.h */,
111 | 25BAEF7E1CDC449300D43A42 /* ViewController.m */,
112 | );
113 | name = "Supporting Files";
114 | sourceTree = "";
115 | };
116 | /* End PBXGroup section */
117 |
118 | /* Begin PBXNativeTarget section */
119 | 2575C6101CCE03EF00160B73 /* H264 Encode and Decode */ = {
120 | isa = PBXNativeTarget;
121 | buildConfigurationList = 2575C6281CCE03EF00160B73 /* Build configuration list for PBXNativeTarget "H264 Encode and Decode" */;
122 | buildPhases = (
123 | 2575C60D1CCE03EF00160B73 /* Sources */,
124 | 2575C60E1CCE03EF00160B73 /* Frameworks */,
125 | 2575C60F1CCE03EF00160B73 /* Resources */,
126 | );
127 | buildRules = (
128 | );
129 | dependencies = (
130 | );
131 | name = "H264 Encode and Decode";
132 | productName = VTDemoOniPad;
133 | productReference = 2575C6111CCE03EF00160B73 /* H264 Encode and Decode.app */;
134 | productType = "com.apple.product-type.application";
135 | };
136 | /* End PBXNativeTarget section */
137 |
138 | /* Begin PBXProject section */
139 | 2575C6091CCE03EF00160B73 /* Project object */ = {
140 | isa = PBXProject;
141 | attributes = {
142 | LastUpgradeCheck = 0730;
143 | ORGANIZATIONNAME = AJB;
144 | TargetAttributes = {
145 | 2575C6101CCE03EF00160B73 = {
146 | CreatedOnToolsVersion = 7.3;
147 | DevelopmentTeam = 4985RR987S;
148 | };
149 | };
150 | };
151 | buildConfigurationList = 2575C60C1CCE03EF00160B73 /* Build configuration list for PBXProject "H264 Encode and Decode" */;
152 | compatibilityVersion = "Xcode 3.2";
153 | developmentRegion = English;
154 | hasScannedForEncodings = 0;
155 | knownRegions = (
156 | English,
157 | en,
158 | Base,
159 | );
160 | mainGroup = 2575C6081CCE03EF00160B73;
161 | productRefGroup = 2575C6121CCE03EF00160B73 /* Products */;
162 | projectDirPath = "";
163 | projectRoot = "";
164 | targets = (
165 | 2575C6101CCE03EF00160B73 /* H264 Encode and Decode */,
166 | );
167 | };
168 | /* End PBXProject section */
169 |
170 | /* Begin PBXResourcesBuildPhase section */
171 | 2575C60F1CCE03EF00160B73 /* Resources */ = {
172 | isa = PBXResourcesBuildPhase;
173 | buildActionMask = 2147483647;
174 | files = (
175 | 25BAEF741CDC446E00D43A42 /* LaunchScreen.storyboard in Resources */,
176 | BF500D502330D57D0068D2B9 /* Images.xcassets in Resources */,
177 | 25BAEF751CDC446E00D43A42 /* Main.storyboard in Resources */,
178 | 25BAEF801CDC449300D43A42 /* Info.plist in Resources */,
179 | );
180 | runOnlyForDeploymentPostprocessing = 0;
181 | };
182 | /* End PBXResourcesBuildPhase section */
183 |
184 | /* Begin PBXSourcesBuildPhase section */
185 | 2575C60D1CCE03EF00160B73 /* Sources */ = {
186 | isa = PBXSourcesBuildPhase;
187 | buildActionMask = 2147483647;
188 | files = (
189 | 25BAEF821CDC449300D43A42 /* ViewController.m in Sources */,
190 | 25BAEF811CDC449300D43A42 /* main.m in Sources */,
191 | 25BAEF7F1CDC449300D43A42 /* AppDelegate.m in Sources */,
192 | 25BAEF651CDC444200D43A42 /* AAPLEAGLLayer.m in Sources */,
193 | 25BAEF661CDC444200D43A42 /* VideoFileParser.m in Sources */,
194 | 25BAEF781CDC448200D43A42 /* H264HwEncoderImpl.m in Sources */,
195 | );
196 | runOnlyForDeploymentPostprocessing = 0;
197 | };
198 | /* End PBXSourcesBuildPhase section */
199 |
200 | /* Begin PBXVariantGroup section */
201 | 25BAEF701CDC446E00D43A42 /* LaunchScreen.storyboard */ = {
202 | isa = PBXVariantGroup;
203 | children = (
204 | 25BAEF711CDC446E00D43A42 /* Base */,
205 | );
206 | name = LaunchScreen.storyboard;
207 | sourceTree = "";
208 | };
209 | 25BAEF721CDC446E00D43A42 /* Main.storyboard */ = {
210 | isa = PBXVariantGroup;
211 | children = (
212 | 25BAEF731CDC446E00D43A42 /* Base */,
213 | );
214 | name = Main.storyboard;
215 | sourceTree = "";
216 | };
217 | /* End PBXVariantGroup section */
218 |
219 | /* Begin XCBuildConfiguration section */
220 | 2575C6261CCE03EF00160B73 /* Debug */ = {
221 | isa = XCBuildConfiguration;
222 | buildSettings = {
223 | ALWAYS_SEARCH_USER_PATHS = NO;
224 | CLANG_ANALYZER_NONNULL = YES;
225 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
226 | CLANG_CXX_LIBRARY = "libc++";
227 | CLANG_ENABLE_MODULES = YES;
228 | CLANG_ENABLE_OBJC_ARC = YES;
229 | CLANG_WARN_BOOL_CONVERSION = YES;
230 | CLANG_WARN_CONSTANT_CONVERSION = YES;
231 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
232 | CLANG_WARN_EMPTY_BODY = YES;
233 | CLANG_WARN_ENUM_CONVERSION = YES;
234 | CLANG_WARN_INT_CONVERSION = YES;
235 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
236 | CLANG_WARN_UNREACHABLE_CODE = YES;
237 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
238 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
239 | COPY_PHASE_STRIP = NO;
240 | DEBUG_INFORMATION_FORMAT = dwarf;
241 | ENABLE_STRICT_OBJC_MSGSEND = YES;
242 | ENABLE_TESTABILITY = YES;
243 | GCC_C_LANGUAGE_STANDARD = gnu99;
244 | GCC_DYNAMIC_NO_PIC = NO;
245 | GCC_NO_COMMON_BLOCKS = YES;
246 | GCC_OPTIMIZATION_LEVEL = 0;
247 | GCC_PREPROCESSOR_DEFINITIONS = (
248 | "DEBUG=1",
249 | "$(inherited)",
250 | );
251 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
252 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
253 | GCC_WARN_UNDECLARED_SELECTOR = YES;
254 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
255 | GCC_WARN_UNUSED_FUNCTION = YES;
256 | GCC_WARN_UNUSED_VARIABLE = YES;
257 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
258 | MTL_ENABLE_DEBUG_INFO = YES;
259 | ONLY_ACTIVE_ARCH = YES;
260 | SDKROOT = iphoneos;
261 | };
262 | name = Debug;
263 | };
264 | 2575C6271CCE03EF00160B73 /* Release */ = {
265 | isa = XCBuildConfiguration;
266 | buildSettings = {
267 | ALWAYS_SEARCH_USER_PATHS = NO;
268 | CLANG_ANALYZER_NONNULL = YES;
269 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
270 | CLANG_CXX_LIBRARY = "libc++";
271 | CLANG_ENABLE_MODULES = YES;
272 | CLANG_ENABLE_OBJC_ARC = YES;
273 | CLANG_WARN_BOOL_CONVERSION = YES;
274 | CLANG_WARN_CONSTANT_CONVERSION = YES;
275 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
276 | CLANG_WARN_EMPTY_BODY = YES;
277 | CLANG_WARN_ENUM_CONVERSION = YES;
278 | CLANG_WARN_INT_CONVERSION = YES;
279 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
280 | CLANG_WARN_UNREACHABLE_CODE = YES;
281 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
282 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
283 | COPY_PHASE_STRIP = NO;
284 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
285 | ENABLE_NS_ASSERTIONS = NO;
286 | ENABLE_STRICT_OBJC_MSGSEND = YES;
287 | GCC_C_LANGUAGE_STANDARD = gnu99;
288 | GCC_NO_COMMON_BLOCKS = YES;
289 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
290 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
291 | GCC_WARN_UNDECLARED_SELECTOR = YES;
292 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
293 | GCC_WARN_UNUSED_FUNCTION = YES;
294 | GCC_WARN_UNUSED_VARIABLE = YES;
295 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
296 | MTL_ENABLE_DEBUG_INFO = NO;
297 | SDKROOT = iphoneos;
298 | VALIDATE_PRODUCT = YES;
299 | };
300 | name = Release;
301 | };
302 | 2575C6291CCE03EF00160B73 /* Debug */ = {
303 | isa = XCBuildConfiguration;
304 | buildSettings = {
305 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
306 | DEVELOPMENT_TEAM = 4985RR987S;
307 | INFOPLIST_FILE = VTDemoOniPad/Info.plist;
308 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
309 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
310 | PRODUCT_BUNDLE_IDENTIFIER = LevyGG.VTDemoOniPad;
311 | PRODUCT_NAME = "H264 Encode and Decode";
312 | TARGETED_DEVICE_FAMILY = "1,2";
313 | };
314 | name = Debug;
315 | };
316 | 2575C62A1CCE03EF00160B73 /* Release */ = {
317 | isa = XCBuildConfiguration;
318 | buildSettings = {
319 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
320 | DEVELOPMENT_TEAM = 4985RR987S;
321 | INFOPLIST_FILE = VTDemoOniPad/Info.plist;
322 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
323 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
324 | PRODUCT_BUNDLE_IDENTIFIER = LevyGG.VTDemoOniPad;
325 | PRODUCT_NAME = "H264 Encode and Decode";
326 | TARGETED_DEVICE_FAMILY = "1,2";
327 | };
328 | name = Release;
329 | };
330 | /* End XCBuildConfiguration section */
331 |
332 | /* Begin XCConfigurationList section */
333 | 2575C60C1CCE03EF00160B73 /* Build configuration list for PBXProject "H264 Encode and Decode" */ = {
334 | isa = XCConfigurationList;
335 | buildConfigurations = (
336 | 2575C6261CCE03EF00160B73 /* Debug */,
337 | 2575C6271CCE03EF00160B73 /* Release */,
338 | );
339 | defaultConfigurationIsVisible = 0;
340 | defaultConfigurationName = Release;
341 | };
342 | 2575C6281CCE03EF00160B73 /* Build configuration list for PBXNativeTarget "H264 Encode and Decode" */ = {
343 | isa = XCConfigurationList;
344 | buildConfigurations = (
345 | 2575C6291CCE03EF00160B73 /* Debug */,
346 | 2575C62A1CCE03EF00160B73 /* Release */,
347 | );
348 | defaultConfigurationIsVisible = 0;
349 | defaultConfigurationName = Release;
350 | };
351 | /* End XCConfigurationList section */
352 | };
353 | rootObject = 2575C6091CCE03EF00160B73 /* Project object */;
354 | }
355 |
--------------------------------------------------------------------------------
/VTDemoOniPad/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // VTDemoOniPad
4 | //
5 | // Created by AJB on 16/4/25.
6 | // Copyright © 2016年 AJB. All rights reserved.
7 | //
8 |
9 | #import "ViewController.h"
10 |
11 | // 解码
12 | #import "VideoFileParser.h"
13 | #import "AAPLEAGLLayer.h"
14 | #import
15 |
16 | @interface ViewController ()
17 | {
18 | // 编码
19 | H264HwEncoderImpl *h264Encoder;
20 | AVCaptureSession *captureSession;
21 | bool startCalled;
22 | AVCaptureVideoPreviewLayer *previewLayer;
23 | NSString *h264FileSavePath;
24 | int fd;
25 | NSFileHandle *fileHandle;
26 | AVCaptureConnection* connection;
27 | AVSampleBufferDisplayLayer *sbDisplayLayer;
28 |
29 | // 解码
30 | uint8_t *_sps;
31 | NSInteger _spsSize;
32 | uint8_t *_pps;
33 | NSInteger _ppsSize;
34 | VTDecompressionSessionRef _deocderSession;
35 | CMVideoFormatDescriptionRef _decoderFormatDescription;
36 | AAPLEAGLLayer *_glLayer; // player
37 | bool playCalled;
38 | }
39 |
40 | @property (weak, nonatomic) IBOutlet UIButton *startStopBtn;
41 | @property (weak, nonatomic) IBOutlet UIButton *playerBtn;
42 |
43 | @end
44 |
45 | // 解码
46 | static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
47 |
48 | CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
49 | *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
50 | }
51 |
52 | @implementation ViewController
53 |
54 | - (void)viewDidLoad {
55 | [super viewDidLoad];
56 | // Do any additional setup after loading the view, typically from a nib.
57 | h264Encoder = [H264HwEncoderImpl alloc];
58 | [h264Encoder initWithConfiguration];
59 | startCalled = true;
60 | playCalled = true;
61 |
62 | // 设置文件保存位置在document文件夹
63 | NSFileManager *fileManager = [NSFileManager defaultManager];
64 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
65 | NSString *documentsDirectory = [paths objectAtIndex:0];
66 | h264FileSavePath = [documentsDirectory stringByAppendingPathComponent:@"test.h264"];
67 | [fileManager removeItemAtPath:h264FileSavePath error:nil];
68 | [fileManager createFileAtPath:h264FileSavePath contents:nil attributes:nil];
69 |
70 | }
71 |
72 | #pragma mark - 解码
73 | -(BOOL)initH264Decoder {
74 | if(_deocderSession) {
75 | return YES;
76 | }
77 |
78 | const uint8_t* const parameterSetPointers[2] = { _sps, _pps };
79 | const size_t parameterSetSizes[2] = { _spsSize, _ppsSize };
80 | OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
81 | 2, //param count
82 | parameterSetPointers,
83 | parameterSetSizes,
84 | 4, //nal start code size
85 | &_decoderFormatDescription);
86 |
87 | if(status == noErr) {
88 | CFDictionaryRef attrs = NULL;
89 | const void *keys[] = { kCVPixelBufferPixelFormatTypeKey };
90 | // kCVPixelFormatType_420YpCbCr8Planar is YUV420
91 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
92 | uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
93 | const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };
94 | attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
95 |
96 | VTDecompressionOutputCallbackRecord callBackRecord;
97 | callBackRecord.decompressionOutputCallback = didDecompress;
98 | callBackRecord.decompressionOutputRefCon = NULL;
99 |
100 | status = VTDecompressionSessionCreate(kCFAllocatorDefault,
101 | _decoderFormatDescription,
102 | NULL, attrs,
103 | &callBackRecord,
104 | &_deocderSession);
105 | CFRelease(attrs);
106 | } else {
107 | NSLog(@"IOS8VT: reset decoder session failed status=%d", (int)status);
108 | }
109 |
110 | return YES;
111 | }
112 | -(void)clearH264Deocder {
113 | if(_deocderSession) {
114 | VTDecompressionSessionInvalidate(_deocderSession);
115 | CFRelease(_deocderSession);
116 | _deocderSession = NULL;
117 | }
118 |
119 | if(_decoderFormatDescription) {
120 | CFRelease(_decoderFormatDescription);
121 | _decoderFormatDescription = NULL;
122 | }
123 |
124 | free(_sps);
125 | free(_pps);
126 | _spsSize = _ppsSize = 0;
127 | }
128 |
129 | -(CVPixelBufferRef)decode:(VideoPacket*)vp {
130 | CVPixelBufferRef outputPixelBuffer = NULL;
131 |
132 | CMBlockBufferRef blockBuffer = NULL;
133 | OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
134 | (void*)vp.buffer, vp.size,
135 | kCFAllocatorNull,
136 | NULL, 0, vp.size,
137 | 0, &blockBuffer);
138 | if(status == kCMBlockBufferNoErr) {
139 | CMSampleBufferRef sampleBuffer = NULL;
140 | const size_t sampleSizeArray[] = {vp.size};
141 | status = CMSampleBufferCreateReady(kCFAllocatorDefault,
142 | blockBuffer,
143 | _decoderFormatDescription ,
144 | 1, 0, NULL, 1, sampleSizeArray,
145 | &sampleBuffer);
146 | if (status == kCMBlockBufferNoErr && sampleBuffer) {
147 | VTDecodeFrameFlags flags = 0;
148 | VTDecodeInfoFlags flagOut = 0;
149 | OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_deocderSession,
150 | sampleBuffer,
151 | flags,
152 | &outputPixelBuffer,
153 | &flagOut);
154 |
155 | if(decodeStatus == kVTInvalidSessionErr) {
156 | NSLog(@"IOS8VT: Invalid session, reset decoder session");
157 | } else if(decodeStatus == kVTVideoDecoderBadDataErr) {
158 | NSLog(@"IOS8VT: decode failed status=%d(Bad data)", (int)decodeStatus);
159 | } else if(decodeStatus != noErr) {
160 | NSLog(@"IOS8VT: decode failed status=%d", (int)decodeStatus);
161 | }
162 |
163 | CFRelease(sampleBuffer);
164 | }
165 | CFRelease(blockBuffer);
166 | }
167 |
168 | return outputPixelBuffer;
169 | }
170 |
171 | -(void)decodeFile:(NSString*)fileName fileExt:(NSString*)fileExt {
172 | VideoFileParser *parser = [VideoFileParser alloc];
173 | [parser open:h264FileSavePath];
174 |
175 | VideoPacket *vp = nil;
176 | while(true) {
177 | vp = [parser nextPacket];
178 | if(vp == nil) {
179 | break;
180 | }
181 |
182 | uint32_t nalSize = (uint32_t)(vp.size - 4);
183 | uint8_t *pNalSize = (uint8_t*)(&nalSize);
184 | vp.buffer[0] = *(pNalSize + 3);
185 | vp.buffer[1] = *(pNalSize + 2);
186 | vp.buffer[2] = *(pNalSize + 1);
187 | vp.buffer[3] = *(pNalSize);
188 |
189 | CVPixelBufferRef pixelBuffer = NULL;
190 | int nalType = vp.buffer[4] & 0x1F;
191 | switch (nalType) {
192 | case 0x05:
193 | NSLog(@"Nal type is IDR frame");
194 | if([self initH264Decoder]) {
195 | pixelBuffer = [self decode:vp];
196 | }
197 | break;
198 | case 0x07:
199 | NSLog(@"Nal type is SPS");
200 | _spsSize = vp.size - 4;
201 | _sps = malloc(_spsSize);
202 | memcpy(_sps, vp.buffer + 4, _spsSize);
203 | break;
204 | case 0x08:
205 | NSLog(@"Nal type is PPS");
206 | _ppsSize = vp.size - 4;
207 | _pps = malloc(_ppsSize);
208 | memcpy(_pps, vp.buffer + 4, _ppsSize);
209 | break;
210 |
211 | default:
212 | NSLog(@"Nal type is B/P frame");
213 | pixelBuffer = [self decode:vp];
214 | break;
215 | }
216 |
217 | if(pixelBuffer) {
218 | dispatch_sync(dispatch_get_main_queue(), ^{
219 | _glLayer.pixelBuffer = pixelBuffer;
220 | });
221 |
222 | CVPixelBufferRelease(pixelBuffer);
223 | }
224 |
225 | NSLog(@"Read Nalu size %ld", (long)vp.size);
226 | }
227 | [parser close];
228 | }
229 |
230 | - (IBAction)playerAction:(id)sender {
231 |
232 | if (playCalled==true) {
233 | playCalled = false;
234 | [_playerBtn setTitle:@"close" forState:UIControlStateNormal];
235 | // 解码
236 | _glLayer = [[AAPLEAGLLayer alloc] initWithFrame:CGRectMake(0, 20, self.view.frame.size.width, (self.view.frame.size.width * 9)/16 )] ;
237 | [self.view.layer addSublayer:_glLayer];
238 |
239 | dispatch_async(dispatch_get_global_queue(0, 0), ^{
240 | [self decodeFile:@"test" fileExt:@"h264"];
241 | });
242 | return;
243 | }
244 | if(playCalled==false){
245 | playCalled = true;
246 | [_playerBtn setTitle:@"play" forState:UIControlStateNormal];
247 | [self clearH264Deocder];
248 | [_glLayer removeFromSuperlayer];
249 | }
250 |
251 | }
252 |
253 | #pragma mark - 编码
254 | // Called when start/stop button is pressed
255 | - (IBAction)StartStopAction:(id)sender {
256 |
257 | if (startCalled)
258 | {
259 | [self startCamera];
260 | startCalled = false;
261 | [_startStopBtn setTitle:@"Stop" forState:UIControlStateNormal];
262 | }
263 | else
264 | {
265 | [_startStopBtn setTitle:@"Start" forState:UIControlStateNormal];
266 | startCalled = true;
267 | [self stopCamera];
268 | [h264Encoder End];
269 | }
270 |
271 | }
272 |
273 | - (void) startCamera
274 | {
275 | // make input device
276 |
277 | NSError *deviceError;
278 |
279 | AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
280 |
281 | AVCaptureDeviceInput *inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:cameraDevice error:&deviceError];
282 |
283 | // make output device
284 | AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
285 | NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
286 | NSNumber* val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
287 | NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];
288 | outputDevice.videoSettings = videoSettings;
289 |
290 | [outputDevice setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
291 |
292 | // initialize capture session
293 |
294 | captureSession = [[AVCaptureSession alloc] init];
295 |
296 | [captureSession addInput:inputDevice];
297 | [captureSession addOutput:outputDevice];
298 |
299 | // begin configuration for the AVCaptureSession
300 | [captureSession beginConfiguration];
301 |
302 | // picture resolution
303 | [captureSession setSessionPreset:AVCaptureSessionPresetHigh];
304 | [captureSession setSessionPreset:[NSString stringWithString:AVCaptureSessionPreset1280x720]];
305 |
306 | connection = [outputDevice connectionWithMediaType:AVMediaTypeVideo];
307 | [self setRelativeVideoOrientation];
308 |
309 | NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
310 |
311 | [notify addObserver:self
312 | selector:@selector(statusBarOrientationDidChange:)
313 | name:@"StatusBarOrientationDidChange"
314 | object:nil];
315 |
316 |
317 | [captureSession commitConfiguration];
318 |
319 | // 添加另一个播放Layer,这个layer接收CMSampleBuffer来播放
320 | AVSampleBufferDisplayLayer *sb = [[AVSampleBufferDisplayLayer alloc]init];
321 | sb.backgroundColor = [UIColor blackColor].CGColor;
322 | sbDisplayLayer = sb;
323 | sb.videoGravity = AVLayerVideoGravityResizeAspect;
324 | sbDisplayLayer.frame = CGRectMake(0, 20, self.view.frame.size.width, 600);
325 | [self.view.layer addSublayer:sbDisplayLayer];
326 |
327 |
328 | // 开始编码
329 | [captureSession startRunning];
330 |
331 | // Open the file using POSIX as this is anyway a test application
332 | //fd = open([h264FileSavePath UTF8String], O_RDWR);
333 | fileHandle = [NSFileHandle fileHandleForWritingAtPath:h264FileSavePath];
334 |
335 | [h264Encoder initEncode:1280 height:720];
336 | h264Encoder.delegate = self;
337 |
338 |
339 | }
340 | - (void)statusBarOrientationDidChange:(NSNotification*)notification {
341 | [self setRelativeVideoOrientation];
342 | }
343 |
344 |
345 | - (void)setRelativeVideoOrientation {
346 | switch ([[UIDevice currentDevice] orientation]) {
347 | case UIInterfaceOrientationPortrait:
348 | #if defined(__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
349 | case UIInterfaceOrientationUnknown:
350 | #endif
351 | connection.videoOrientation = AVCaptureVideoOrientationPortrait;
352 |
353 | break;
354 | case UIInterfaceOrientationPortraitUpsideDown:
355 | connection.videoOrientation =
356 | AVCaptureVideoOrientationPortraitUpsideDown;
357 | break;
358 | case UIInterfaceOrientationLandscapeLeft:
359 | connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
360 | break;
361 | case UIInterfaceOrientationLandscapeRight:
362 | connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
363 | break;
364 | default:
365 | break;
366 | }
367 | }
368 | - (void) stopCamera
369 | {
370 | [captureSession stopRunning];
371 | [previewLayer removeFromSuperlayer];
372 | //close(fd);
373 | [fileHandle closeFile];
374 | fileHandle = NULL;
375 | [sbDisplayLayer removeFromSuperlayer];
376 | }
377 |
378 | #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate 摄像头画面代理
379 | -(void) captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
380 | {
381 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer( sampleBuffer );
382 | CGSize imageSize = CVImageBufferGetEncodedSize( imageBuffer );
383 | NSLog(@"ImageBufferSize------width:%.1f,heigh:%.1f",imageSize.width,imageSize.height);
384 |
385 | //直接把samplebuffer传给AVSampleBufferDisplayLayer进行预览播放
386 | [sbDisplayLayer enqueueSampleBuffer:sampleBuffer];
387 |
388 | [h264Encoder encode:sampleBuffer];
389 |
390 |
391 |
392 | }
393 |
394 | #pragma mark - H264HwEncoderImplDelegate delegate 解码代理
395 | - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps
396 | {
397 | NSLog(@"gotSpsPps %d %d", (int)[sps length], (int)[pps length]);
398 | //[sps writeToFile:h264FileSavePath atomically:YES];
399 | //[pps writeToFile:h264FileSavePath atomically:YES];
400 | // write(fd, [sps bytes], [sps length]);
401 | //write(fd, [pps bytes], [pps length]);
402 | const char bytes[] = "\x00\x00\x00\x01";
403 | size_t length = (sizeof bytes) - 1; //string literals have implicit trailing '\0'
404 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
405 | [fileHandle writeData:ByteHeader];
406 | [fileHandle writeData:sps];
407 | [fileHandle writeData:ByteHeader];
408 | [fileHandle writeData:pps];
409 |
410 | }
411 | - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame
412 | {
413 | NSLog(@"gotEncodedData %d", (int)[data length]);
414 | // static int framecount = 1;
415 |
416 | // [data writeToFile:h264FileSavePath atomically:YES];
417 | //write(fd, [data bytes], [data length]);
418 | if (fileHandle != NULL)
419 | {
420 | const char bytes[] = "\x00\x00\x00\x01";
421 | size_t length = (sizeof bytes) - 1; //string literals have implicit trailing '\0'
422 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
423 |
424 |
425 | /*NSData *UnitHeader;
426 | if(isKeyFrame)
427 | {
428 | char header[2];
429 | header[0] = '\x65';
430 | UnitHeader = [NSData dataWithBytes:header length:1];
431 | framecount = 1;
432 | }
433 | else
434 | {
435 | char header[4];
436 | header[0] = '\x41';
437 | //header[1] = '\x9A';
438 | //header[2] = framecount;
439 | UnitHeader = [NSData dataWithBytes:header length:1];
440 | framecount++;
441 | }*/
442 | [fileHandle writeData:ByteHeader];
443 | //[fileHandle writeData:UnitHeader];
444 | [fileHandle writeData:data];
445 | }
446 | }
447 |
448 |
449 | @end
450 |
--------------------------------------------------------------------------------
/VTDemoOniPad/AAPLEAGLLayer.m:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (C) 2014 Apple Inc. All Rights Reserved.
3 | See LICENSE.txt for this sample’s licensing information
4 |
5 | Abstract:
6 |
7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
8 |
9 | */
10 |
11 | #import "AAPLEAGLLayer.h"
12 |
13 | #import
14 | #import
15 | #include
16 | #import
17 | #include
18 | #include
19 | #include
20 |
21 | // Uniform index.
22 | enum
23 | {
24 | UNIFORM_Y,
25 | UNIFORM_UV,
26 | UNIFORM_ROTATION_ANGLE,
27 | UNIFORM_COLOR_CONVERSION_MATRIX,
28 | NUM_UNIFORMS
29 | };
30 | GLint uniforms[NUM_UNIFORMS];
31 |
32 | // Attribute index.
33 | enum
34 | {
35 | ATTRIB_VERTEX,
36 | ATTRIB_TEXCOORD,
37 | NUM_ATTRIBUTES
38 | };
39 |
40 | // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
41 |
42 | // BT.601, which is the standard for SDTV.
43 | static const GLfloat kColorConversion601[] = {
44 | 1.164, 1.164, 1.164,
45 | 0.0, -0.392, 2.017,
46 | 1.596, -0.813, 0.0,
47 | };
48 |
49 | // BT.709, which is the standard for HDTV.
50 | static const GLfloat kColorConversion709[] = {
51 | 1.164, 1.164, 1.164,
52 | 0.0, -0.213, 2.112,
53 | 1.793, -0.533, 0.0,
54 | };
55 |
56 |
57 |
58 | @interface AAPLEAGLLayer ()
59 | {
60 | // The pixel dimensions of the CAEAGLLayer.
61 | GLint _backingWidth;
62 | GLint _backingHeight;
63 |
64 | EAGLContext *_context;
65 | CVOpenGLESTextureRef _lumaTexture;
66 | CVOpenGLESTextureRef _chromaTexture;
67 |
68 | GLuint _frameBufferHandle;
69 | GLuint _colorBufferHandle;
70 |
71 | const GLfloat *_preferredConversion;
72 | }
73 | @property GLuint program;
74 |
75 | @end
76 | @implementation AAPLEAGLLayer
77 | @synthesize pixelBuffer = _pixelBuffer;
78 |
79 | -(CVPixelBufferRef) pixelBuffer
80 | {
81 | return _pixelBuffer;
82 | }
83 |
84 | - (void)setPixelBuffer:(CVPixelBufferRef)pb
85 | {
86 | if(_pixelBuffer) {
87 | CVPixelBufferRelease(_pixelBuffer);
88 | }
89 | _pixelBuffer = CVPixelBufferRetain(pb);
90 |
91 | int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
92 | int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
93 | [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
94 | }
95 |
96 | - (instancetype)initWithFrame:(CGRect)frame
97 | {
98 | self = [super init];
99 | if (self) {
100 | CGFloat scale = [[UIScreen mainScreen] scale];
101 | self.contentsScale = scale;
102 |
103 | self.opaque = TRUE;
104 | self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
105 |
106 | [self setFrame:frame];
107 |
108 | // Set the context into which the frames will be drawn.
109 | _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
110 |
111 | if (!_context) {
112 | return nil;
113 | }
114 |
115 | // Set the default conversion to BT.709, which is the standard for HDTV.
116 | _preferredConversion = kColorConversion709;
117 |
118 | [self setupGL];
119 | }
120 |
121 | return self;
122 | }
123 |
124 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight
125 | {
126 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
127 | return;
128 | }
129 |
130 | if(pixelBuffer == NULL) {
131 | NSLog(@"Pixel buffer is null");
132 | return;
133 | }
134 |
135 | CVReturn err;
136 |
137 | size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
138 |
139 | /*
140 | Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix.
141 | */
142 | CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
143 |
144 | if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
145 | _preferredConversion = kColorConversion601;
146 | }
147 | else {
148 | _preferredConversion = kColorConversion709;
149 | }
150 |
151 | /*
152 | CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
153 | */
154 |
155 | /*
156 | Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
157 | */
158 |
159 | CVOpenGLESTextureCacheRef _videoTextureCache;
160 |
161 | // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
162 | err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
163 | if (err != noErr) {
164 | NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
165 | return;
166 | }
167 |
168 | glActiveTexture(GL_TEXTURE0);
169 |
170 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
171 | _videoTextureCache,
172 | pixelBuffer,
173 | NULL,
174 | GL_TEXTURE_2D,
175 | GL_RED_EXT,
176 | frameWidth,
177 | frameHeight,
178 | GL_RED_EXT,
179 | GL_UNSIGNED_BYTE,
180 | 0,
181 | &_lumaTexture);
182 | if (err) {
183 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
184 | }
185 |
186 | glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
187 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
188 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
189 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
190 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
191 |
192 | if(planeCount == 2) {
193 | // UV-plane.
194 | glActiveTexture(GL_TEXTURE1);
195 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
196 | _videoTextureCache,
197 | pixelBuffer,
198 | NULL,
199 | GL_TEXTURE_2D,
200 | GL_RG_EXT,
201 | frameWidth / 2,
202 | frameHeight / 2,
203 | GL_RG_EXT,
204 | GL_UNSIGNED_BYTE,
205 | 1,
206 | &_chromaTexture);
207 | if (err) {
208 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
209 | }
210 |
211 | glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
212 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
213 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
214 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
215 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
216 | }
217 |
218 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
219 |
220 | // Set the view port to the entire view.
221 | glViewport(0, 0, _backingWidth, _backingHeight);
222 |
223 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
224 | glClear(GL_COLOR_BUFFER_BIT);
225 |
226 | // Use shader program.
227 | glUseProgram(self.program);
228 | // glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1);
229 | // glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1);
230 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
231 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
232 |
233 | // Set up the quad vertices with respect to the orientation and aspect ratio of the video.
234 | CGRect viewBounds = self.bounds;
235 | CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
236 | CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
237 |
238 | // Compute normalized quad coordinates to draw the frame into.
239 | CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
240 | CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,
241 | vertexSamplingRect.size.height/viewBounds.size.height);
242 |
243 | // Normalize the quad vertices.
244 | if (cropScaleAmount.width > cropScaleAmount.height) {
245 | normalizedSamplingSize.width = 1.0;
246 | normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
247 | }
248 | else {
249 | normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
250 | normalizedSamplingSize.height = 1.0;;
251 | }
252 |
253 | /*
254 | The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers.
255 | Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen.
256 | */
257 | GLfloat quadVertexData [] = {
258 | -1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
259 | normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
260 | -1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
261 | normalizedSamplingSize.width, normalizedSamplingSize.height,
262 | };
263 |
264 | // Update attribute values.
265 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
266 | glEnableVertexAttribArray(ATTRIB_VERTEX);
267 |
268 | /*
269 | The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
270 | */
271 | CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
272 | GLfloat quadTextureData[] = {
273 | CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
274 | CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
275 | CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),
276 | CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect)
277 | };
278 |
279 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
280 | glEnableVertexAttribArray(ATTRIB_TEXCOORD);
281 |
282 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
283 |
284 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
285 | [_context presentRenderbuffer:GL_RENDERBUFFER];
286 |
287 | [self cleanUpTextures];
288 | // Periodic texture cache flush every frame
289 | CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
290 |
291 | if(_videoTextureCache) {
292 | CFRelease(_videoTextureCache);
293 | }
294 | }
295 |
296 | # pragma mark - OpenGL setup
297 |
298 | - (void)setupGL
299 | {
300 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
301 | return;
302 | }
303 |
304 | [self setupBuffers];
305 | [self loadShaders];
306 |
307 | glUseProgram(self.program);
308 |
309 | // 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively.
310 | glUniform1i(uniforms[UNIFORM_Y], 0);
311 | glUniform1i(uniforms[UNIFORM_UV], 1);
312 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
313 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
314 | }
315 |
316 | #pragma mark - Utilities
317 |
318 | - (void)setupBuffers
319 | {
320 | glDisable(GL_DEPTH_TEST);
321 |
322 | glEnableVertexAttribArray(ATTRIB_VERTEX);
323 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
324 |
325 | glEnableVertexAttribArray(ATTRIB_TEXCOORD);
326 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
327 |
328 | [self createBuffers];
329 | }
330 |
331 | - (void) createBuffers
332 | {
333 | glGenFramebuffers(1, &_frameBufferHandle);
334 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
335 |
336 | glGenRenderbuffers(1, &_colorBufferHandle);
337 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
338 |
339 | [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
340 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
341 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
342 |
343 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
344 | if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
345 | NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
346 | }
347 | }
348 |
349 | - (void) releaseBuffers
350 | {
351 | if(_frameBufferHandle) {
352 | glDeleteFramebuffers(1, &_frameBufferHandle);
353 | _frameBufferHandle = 0;
354 | }
355 |
356 | if(_colorBufferHandle) {
357 | glDeleteRenderbuffers(1, &_colorBufferHandle);
358 | _colorBufferHandle = 0;
359 | }
360 | }
361 |
362 | - (void) resetRenderBuffer
363 | {
364 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
365 | return;
366 | }
367 |
368 | [self releaseBuffers];
369 | [self createBuffers];
370 | }
371 |
372 | - (void) cleanUpTextures
373 | {
374 | if (_lumaTexture) {
375 | CFRelease(_lumaTexture);
376 | _lumaTexture = NULL;
377 | }
378 |
379 | if (_chromaTexture) {
380 | CFRelease(_chromaTexture);
381 | _chromaTexture = NULL;
382 | }
383 | }
384 |
385 | #pragma mark - OpenGL ES 2 shader compilation
386 |
387 | const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
388 | "precision mediump float;"
389 | "uniform sampler2D SamplerY;"
390 | "uniform sampler2D SamplerUV;"
391 | "uniform mat3 colorConversionMatrix;"
392 | "void main()"
393 | "{"
394 | " mediump vec3 yuv;"
395 | " lowp vec3 rgb;"
396 | // Subtract constants to map the video range start at 0
397 | " yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
398 | " yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
399 | " rgb = colorConversionMatrix * yuv;"
400 | " gl_FragColor = vec4(rgb, 1);"
401 | "}";
402 |
403 | const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
404 | "attribute vec2 texCoord;"
405 | "uniform float preferredRotation;"
406 | "varying vec2 texCoordVarying;"
407 | "void main()"
408 | "{"
409 | " mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
410 | " sin(preferredRotation), cos(preferredRotation), 0.0, 0.0,"
411 | " 0.0, 0.0, 1.0, 0.0,"
412 | " 0.0, 0.0, 0.0, 1.0);"
413 | " gl_Position = position * rotationMatrix;"
414 | " texCoordVarying = texCoord;"
415 | "}";
416 |
417 | - (BOOL)loadShaders
418 | {
419 | GLuint vertShader = 0, fragShader = 0;
420 |
421 | // Create the shader program.
422 | self.program = glCreateProgram();
423 |
424 | if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
425 | NSLog(@"Failed to compile vertex shader");
426 | return NO;
427 | }
428 |
429 | if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
430 | NSLog(@"Failed to compile fragment shader");
431 | return NO;
432 | }
433 |
434 | // Attach vertex shader to program.
435 | glAttachShader(self.program, vertShader);
436 |
437 | // Attach fragment shader to program.
438 | glAttachShader(self.program, fragShader);
439 |
440 | // Bind attribute locations. This needs to be done prior to linking.
441 | glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
442 | glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
443 |
444 | // Link the program.
445 | if (![self linkProgram:self.program]) {
446 | NSLog(@"Failed to link program: %d", self.program);
447 |
448 | if (vertShader) {
449 | glDeleteShader(vertShader);
450 | vertShader = 0;
451 | }
452 | if (fragShader) {
453 | glDeleteShader(fragShader);
454 | fragShader = 0;
455 | }
456 | if (self.program) {
457 | glDeleteProgram(self.program);
458 | self.program = 0;
459 | }
460 |
461 | return NO;
462 | }
463 |
464 | // Get uniform locations.
465 | uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
466 | uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
467 | // uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold");
468 | // uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold");
469 | uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
470 | uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
471 |
472 | // Release vertex and fragment shaders.
473 | if (vertShader) {
474 | glDetachShader(self.program, vertShader);
475 | glDeleteShader(vertShader);
476 | }
477 | if (fragShader) {
478 | glDetachShader(self.program, fragShader);
479 | glDeleteShader(fragShader);
480 | }
481 |
482 | return YES;
483 | }
484 |
485 | - (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString
486 | {
487 | *shader = glCreateShader(type);
488 | glShaderSource(*shader, 1, &shaderString, NULL);
489 | glCompileShader(*shader);
490 |
491 | #if defined(DEBUG)
492 | GLint logLength;
493 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
494 | if (logLength > 0) {
495 | GLchar *log = (GLchar *)malloc(logLength);
496 | glGetShaderInfoLog(*shader, logLength, &logLength, log);
497 | NSLog(@"Shader compile log:\n%s", log);
498 | free(log);
499 | }
500 | #endif
501 |
502 | GLint status = 0;
503 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
504 | if (status == 0) {
505 | glDeleteShader(*shader);
506 | return NO;
507 | }
508 |
509 | return YES;
510 | }
511 |
512 | - (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
513 | {
514 | NSError *error;
515 | NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
516 | if (sourceString == nil) {
517 | NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
518 | return NO;
519 | }
520 |
521 | const GLchar *source = (GLchar *)[sourceString UTF8String];
522 |
523 | return [self compileShaderString:shader type:type shaderString:source];
524 | }
525 |
526 | - (BOOL)linkProgram:(GLuint)prog
527 | {
528 | GLint status;
529 | glLinkProgram(prog);
530 |
531 | #if defined(DEBUG)
532 | GLint logLength;
533 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
534 | if (logLength > 0) {
535 | GLchar *log = (GLchar *)malloc(logLength);
536 | glGetProgramInfoLog(prog, logLength, &logLength, log);
537 | NSLog(@"Program link log:\n%s", log);
538 | free(log);
539 | }
540 | #endif
541 |
542 | glGetProgramiv(prog, GL_LINK_STATUS, &status);
543 | if (status == 0) {
544 | return NO;
545 | }
546 |
547 | return YES;
548 | }
549 |
550 | - (BOOL)validateProgram:(GLuint)prog
551 | {
552 | GLint logLength, status;
553 |
554 | glValidateProgram(prog);
555 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
556 | if (logLength > 0) {
557 | GLchar *log = (GLchar *)malloc(logLength);
558 | glGetProgramInfoLog(prog, logLength, &logLength, log);
559 | NSLog(@"Program validate log:\n%s", log);
560 | free(log);
561 | }
562 |
563 | glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
564 | if (status == 0) {
565 | return NO;
566 | }
567 |
568 | return YES;
569 | }
570 |
571 | - (void)dealloc
572 | {
573 | if (!_context || ![EAGLContext setCurrentContext:_context]) {
574 | return;
575 | }
576 |
577 | [self cleanUpTextures];
578 |
579 | if(_pixelBuffer) {
580 | CVPixelBufferRelease(_pixelBuffer);
581 | }
582 |
583 | if (self.program) {
584 | glDeleteProgram(self.program);
585 | self.program = 0;
586 | }
587 | if(_context) {
588 | //[_context release];
589 | _context = nil;
590 | }
591 | //[super dealloc];
592 | }
593 |
594 | @end
595 |
--------------------------------------------------------------------------------