├── JNVideoKitDemo
├── JNVideoKitDemo.xcodeproj
│ ├── project.pbxproj
│ ├── project.xcworkspace
│ │ └── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── netease.xcuserdatad
│ │ └── xcschemes
│ │ ├── JNVideoKitDemo.xcscheme
│ │ └── xcschememanagement.plist
└── JNVideoKitDemo
│ ├── AppDelegate.h
│ ├── AppDelegate.m
│ ├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
│ ├── Info.plist
│ ├── JNVideoKit
│ ├── GPUImage
│ │ ├── GLProgram.h
│ │ ├── GLProgram.m
│ │ ├── GPUImage.h
│ │ ├── GPUImageFilter.h
│ │ ├── GPUImageFilter.m
│ │ ├── GPUImageMovie.h
│ │ ├── GPUImageMovie.m
│ │ ├── GPUImageMovieWriter.h
│ │ ├── GPUImageMovieWriter.m
│ │ ├── GPUImageOpenGLESContext.h
│ │ ├── GPUImageOpenGLESContext.m
│ │ ├── GPUImageOutput.h
│ │ ├── GPUImageOutput.m
│ │ ├── GPUImagePicture.h
│ │ ├── GPUImagePicture.m
│ │ ├── GPUImageRawData.h
│ │ ├── GPUImageRawData.m
│ │ ├── GPUImageRotationFilter.h
│ │ ├── GPUImageRotationFilter.m
│ │ ├── GPUImageVideoCamera.h
│ │ ├── GPUImageVideoCamera.m
│ │ ├── GPUImageView.h
│ │ └── GPUImageView.m
│ ├── IFFilters
│ │ ├── IFFilters
│ │ │ ├── IF1977Filter.h
│ │ │ ├── IF1977Filter.m
│ │ │ ├── IFAmaroFilter.h
│ │ │ ├── IFAmaroFilter.m
│ │ │ ├── IFBrannanFilter.h
│ │ │ ├── IFBrannanFilter.m
│ │ │ ├── IFEarlybirdFilter.h
│ │ │ ├── IFEarlybirdFilter.m
│ │ │ ├── IFHefeFilter.h
│ │ │ ├── IFHefeFilter.m
│ │ │ ├── IFHudsonFilter.h
│ │ │ ├── IFHudsonFilter.m
│ │ │ ├── IFInkwellFilter.h
│ │ │ ├── IFInkwellFilter.m
│ │ │ ├── IFLomofiFilter.h
│ │ │ ├── IFLomofiFilter.m
│ │ │ ├── IFLordKelvinFilter.h
│ │ │ ├── IFLordKelvinFilter.m
│ │ │ ├── IFNashvilleFilter.h
│ │ │ ├── IFNashvilleFilter.m
│ │ │ ├── IFNormalFilter.h
│ │ │ ├── IFNormalFilter.m
│ │ │ ├── IFRiseFilter.h
│ │ │ ├── IFRiseFilter.m
│ │ │ ├── IFRotationFilter.h
│ │ │ ├── IFRotationFilter.m
│ │ │ ├── IFSierraFilter.h
│ │ │ ├── IFSierraFilter.m
│ │ │ ├── IFSutroFilter.h
│ │ │ ├── IFSutroFilter.m
│ │ │ ├── IFToasterFilter.h
│ │ │ ├── IFToasterFilter.m
│ │ │ ├── IFValenciaFilter.h
│ │ │ ├── IFValenciaFilter.m
│ │ │ ├── IFWaldenFilter.h
│ │ │ ├── IFWaldenFilter.m
│ │ │ ├── IFXproIIFilter.h
│ │ │ └── IFXproIIFilter.m
│ │ ├── IFImage
│ │ │ ├── IFImageFilter.h
│ │ │ ├── IFImageFilter.m
│ │ │ ├── IFVideoCamera.h
│ │ │ ├── IFVideoCamera.m
│ │ │ └── InstaFilters.h
│ │ ├── Resources_for_IF_Filters
│ │ │ ├── 1977blowout.png
│ │ │ ├── 1977map.png
│ │ │ ├── amaroMap.png
│ │ │ ├── blackboard1024.png
│ │ │ ├── brannanBlowout.png
│ │ │ ├── brannanContrast.png
│ │ │ ├── brannanLuma.png
│ │ │ ├── brannanProcess.png
│ │ │ ├── brannanScreen.png
│ │ │ ├── earlyBirdCurves.png
│ │ │ ├── earlybirdBlowout.png
│ │ │ ├── earlybirdMap.png
│ │ │ ├── earlybirdOverlayMap.png
│ │ │ ├── edgeBurn.png
│ │ │ ├── hefeGradientMap.png
│ │ │ ├── hefeMap.png
│ │ │ ├── hefeMetal.png
│ │ │ ├── hefeSoftLight.png
│ │ │ ├── hudsonBackground.png
│ │ │ ├── hudsonMap.png
│ │ │ ├── inkwellMap.png
│ │ │ ├── kelvinMap.png
│ │ │ ├── lomoMap.png
│ │ │ ├── nashvilleMap.png
│ │ │ ├── overlayMap.png
│ │ │ ├── riseMap.png
│ │ │ ├── sierraMap.png
│ │ │ ├── sierraVignette.png
│ │ │ ├── softLight.png
│ │ │ ├── sutroCurves.png
│ │ │ ├── sutroEdgeBurn.png
│ │ │ ├── sutroMetal.png
│ │ │ ├── toasterColorShift.png
│ │ │ ├── toasterCurves.png
│ │ │ ├── toasterMetal.png
│ │ │ ├── toasterOverlayMapWarm.png
│ │ │ ├── toasterSoftLight.png
│ │ │ ├── valenciaGradientMap.png
│ │ │ ├── valenciaMap.png
│ │ │ ├── vignetteMap.png
│ │ │ ├── waldenMap.png
│ │ │ └── xproMap.png
│ │ ├── UIImage+Resize.h
│ │ └── UIImage+Resize.m
│ ├── JNVideoMerge.h
│ ├── JNVideoMerge.m
│ ├── JNVideoPlayer.h
│ ├── JNVideoPlayer.m
│ ├── JNVideoRecord.h
│ └── JNVideoRecord.m
│ ├── ViewController.h
│ ├── ViewController.m
│ └── main.m
└── README.md
/JNVideoKitDemo/JNVideoKitDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo.xcodeproj/xcuserdata/netease.xcuserdatad/xcschemes/JNVideoKitDemo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
39 |
40 |
41 |
42 |
43 |
44 |
54 |
56 |
62 |
63 |
64 |
65 |
66 |
67 |
73 |
75 |
81 |
82 |
83 |
84 |
86 |
87 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo.xcodeproj/xcuserdata/netease.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | JNVideoKitDemo.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | E6C12BCD1DBDE27D0080C8ED
16 |
17 | primary
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 16/10/24.
6 | // Copyright © 2016年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 16/10/24.
6 | // Copyright © 2016年 Jonear. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 |
24 | - (void)applicationWillResignActive:(UIApplication *)application {
25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
27 | }
28 |
29 |
30 | - (void)applicationDidEnterBackground:(UIApplication *)application {
31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
33 | }
34 |
35 |
36 | - (void)applicationWillEnterForeground:(UIApplication *)application {
37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
38 | }
39 |
40 |
41 | - (void)applicationDidBecomeActive:(UIApplication *)application {
42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
43 | }
44 |
45 |
46 | - (void)applicationWillTerminate:(UIApplication *)application {
47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
48 | }
49 |
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSCameraUsageDescription
6 | lala
7 | CFBundleDevelopmentRegion
8 | en
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GLProgram.h:
--------------------------------------------------------------------------------
1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
2 | // A description of this can be found at his page on the topic:
3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
4 | // I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
5 |
6 | #import
7 | #import
8 | #import
9 |
10 | @interface GLProgram : NSObject
11 | {
12 | NSMutableArray *attributes;
13 | NSMutableArray *uniforms;
14 | GLuint program,
15 | vertShader,
16 | fragShader;
17 | }
18 | - (id)initWithVertexShaderString:(NSString *)vShaderString
19 | fragmentShaderString:(NSString *)fShaderString;
20 | - (id)initWithVertexShaderString:(NSString *)vShaderString
21 | fragmentShaderFilename:(NSString *)fShaderFilename;
22 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
23 | fragmentShaderFilename:(NSString *)fShaderFilename;
24 | - (void)addAttribute:(NSString *)attributeName;
25 | - (GLuint)attributeIndex:(NSString *)attributeName;
26 | - (GLuint)uniformIndex:(NSString *)uniformName;
27 | - (BOOL)link;
28 | - (void)use;
29 | - (NSString *)vertexShaderLog;
30 | - (NSString *)fragmentShaderLog;
31 | - (NSString *)programLog;
32 | - (void)validate;
33 | @end
34 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GLProgram.m:
--------------------------------------------------------------------------------
1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
2 | // A description of this can be found at his page on the topic:
3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
4 |
5 |
6 | #import "GLProgram.h"
7 | // START:typedefs
8 | #pragma mark Function Pointer Definitions
9 | typedef void (*GLInfoFunction)(GLuint program,
10 | GLenum pname,
11 | GLint* params);
12 | typedef void (*GLLogFunction) (GLuint program,
13 | GLsizei bufsize,
14 | GLsizei* length,
15 | GLchar* infolog);
16 | // END:typedefs
17 | #pragma mark -
18 | #pragma mark Private Extension Method Declaration
19 | // START:extension
20 | @interface GLProgram()
21 |
22 | - (BOOL)compileShader:(GLuint *)shader
23 | type:(GLenum)type
24 | string:(NSString *)shaderString;
25 | - (NSString *)logForOpenGLObject:(GLuint)object
26 | infoCallback:(GLInfoFunction)infoFunc
27 | logFunc:(GLLogFunction)logFunc;
28 | @end
29 | // END:extension
30 | #pragma mark -
31 |
32 | @implementation GLProgram
33 | // START:init
34 |
35 | - (id)initWithVertexShaderString:(NSString *)vShaderString
36 | fragmentShaderString:(NSString *)fShaderString;
37 | {
38 | if ((self = [super init]))
39 | {
40 | attributes = [[NSMutableArray alloc] init];
41 | uniforms = [[NSMutableArray alloc] init];
42 | program = glCreateProgram();
43 |
44 | if (![self compileShader:&vertShader
45 | type:GL_VERTEX_SHADER
46 | string:vShaderString])
47 | NSLog(@"Failed to compile vertex shader");
48 |
49 | // Create and compile fragment shader
50 | if (![self compileShader:&fragShader
51 | type:GL_FRAGMENT_SHADER
52 | string:fShaderString])
53 | NSLog(@"Failed to compile fragment shader");
54 |
55 | glAttachShader(program, vertShader);
56 | glAttachShader(program, fragShader);
57 | }
58 |
59 | return self;
60 | }
61 |
62 | - (id)initWithVertexShaderString:(NSString *)vShaderString
63 | fragmentShaderFilename:(NSString *)fShaderFilename;
64 | {
65 | NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
66 | NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
67 |
68 | if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString]))
69 | {
70 | }
71 |
72 | return self;
73 | }
74 |
75 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
76 | fragmentShaderFilename:(NSString *)fShaderFilename;
77 | {
78 | NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"];
79 | NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];
80 |
81 | NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
82 | NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
83 |
84 | if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]))
85 | {
86 | }
87 |
88 | return self;
89 | }
90 | // END:init
91 | // START:compile
92 | - (BOOL)compileShader:(GLuint *)shader
93 | type:(GLenum)type
94 | string:(NSString *)shaderString
95 | {
96 | GLint status;
97 | const GLchar *source;
98 |
99 | source =
100 | (GLchar *)[shaderString UTF8String];
101 | if (!source)
102 | {
103 | NSLog(@"Failed to load vertex shader");
104 | return NO;
105 | }
106 |
107 | *shader = glCreateShader(type);
108 | glShaderSource(*shader, 1, &source, NULL);
109 | glCompileShader(*shader);
110 |
111 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
112 |
113 | if (status != GL_TRUE)
114 | {
115 | GLint logLength;
116 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
117 | if (logLength > 0)
118 | {
119 | GLchar *log = (GLchar *)malloc(logLength);
120 | glGetShaderInfoLog(*shader, logLength, &logLength, log);
121 | NSLog(@"Shader compile log:\n%s", log);
122 | free(log);
123 | }
124 | }
125 |
126 | return status == GL_TRUE;
127 | }
128 | // END:compile
129 | #pragma mark -
130 | // START:addattribute
131 | - (void)addAttribute:(NSString *)attributeName
132 | {
133 | if (![attributes containsObject:attributeName])
134 | {
135 | [attributes addObject:attributeName];
136 | glBindAttribLocation(program,
137 | [attributes indexOfObject:attributeName],
138 | [attributeName UTF8String]);
139 | }
140 | }
141 | // END:addattribute
142 | // START:indexmethods
143 | - (GLuint)attributeIndex:(NSString *)attributeName
144 | {
145 | return [attributes indexOfObject:attributeName];
146 | }
147 | - (GLuint)uniformIndex:(NSString *)uniformName
148 | {
149 | return glGetUniformLocation(program, [uniformName UTF8String]);
150 | }
151 | // END:indexmethods
152 | #pragma mark -
153 | // START:link
154 | - (BOOL)link
155 | {
156 | GLint status;
157 |
158 | glLinkProgram(program);
159 | glValidateProgram(program);
160 |
161 | glGetProgramiv(program, GL_LINK_STATUS, &status);
162 | if (status == GL_FALSE)
163 | return NO;
164 |
165 | if (vertShader)
166 | glDeleteShader(vertShader);
167 | if (fragShader)
168 | glDeleteShader(fragShader);
169 |
170 | return YES;
171 | }
172 | // END:link
173 | // START:use
174 | - (void)use
175 | {
176 | glUseProgram(program);
177 | }
178 | // END:use
179 | #pragma mark -
180 | // START:privatelog
181 | - (NSString *)logForOpenGLObject:(GLuint)object
182 | infoCallback:(GLInfoFunction)infoFunc
183 | logFunc:(GLLogFunction)logFunc
184 | {
185 | GLint logLength = 0, charsWritten = 0;
186 |
187 | infoFunc(object, GL_INFO_LOG_LENGTH, &logLength);
188 | if (logLength < 1)
189 | return nil;
190 |
191 | char *logBytes = malloc(logLength);
192 | logFunc(object, logLength, &charsWritten, logBytes);
193 | NSString *log = [[NSString alloc] initWithBytes:logBytes
194 | length:logLength
195 | encoding:NSUTF8StringEncoding];
196 | free(logBytes);
197 | return log;
198 | }
199 | // END:privatelog
200 | // START:log
201 | - (NSString *)vertexShaderLog
202 | {
203 | return [self logForOpenGLObject:vertShader
204 | infoCallback:(GLInfoFunction)&glGetProgramiv
205 | logFunc:(GLLogFunction)&glGetProgramInfoLog];
206 |
207 | }
208 | - (NSString *)fragmentShaderLog
209 | {
210 | return [self logForOpenGLObject:fragShader
211 | infoCallback:(GLInfoFunction)&glGetProgramiv
212 | logFunc:(GLLogFunction)&glGetProgramInfoLog];
213 | }
214 | - (NSString *)programLog
215 | {
216 | return [self logForOpenGLObject:program
217 | infoCallback:(GLInfoFunction)&glGetProgramiv
218 | logFunc:(GLLogFunction)&glGetProgramInfoLog];
219 | }
220 | // END:log
221 |
222 | - (void)validate;
223 | {
224 | GLint logLength;
225 |
226 | glValidateProgram(program);
227 | glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
228 | if (logLength > 0)
229 | {
230 | GLchar *log = (GLchar *)malloc(logLength);
231 | glGetProgramInfoLog(program, logLength, &logLength, log);
232 | NSLog(@"Program validate log:\n%s", log);
233 | free(log);
234 | }
235 | }
236 |
237 | #pragma mark -
238 | // START:dealloc
239 | - (void)dealloc
240 | {
241 |
242 | if (vertShader)
243 | glDeleteShader(vertShader);
244 |
245 | if (fragShader)
246 | glDeleteShader(fragShader);
247 |
248 | if (program)
249 | glDeleteProgram(program);
250 |
251 | }
252 | // END:dealloc
253 | @end
254 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImage.h:
--------------------------------------------------------------------------------
1 | #import "GLProgram.h"
2 |
3 | // Base classes
4 | #import "GPUImageOpenGLESContext.h"
5 | #import "GPUImageOutput.h"
6 | #import "GPUImageView.h"
7 | #import "GPUImageVideoCamera.h"
8 | #import "GPUImageMovie.h"
9 | #import "GPUImagePicture.h"
10 | #import "GPUImageRawData.h"
11 | #import "GPUImageMovieWriter.h"
12 |
13 | // Filters
14 | #import "GPUImageFilter.h"
15 | #import "GPUImageRotationFilter.h"
16 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 | #import
3 |
4 | #define STRINGIZE(x) #x
5 | #define STRINGIZE2(x) STRINGIZE(x)
6 | #define SHADER_STRING(text) @ STRINGIZE2(text)
7 |
8 | extern NSString *const kGPUImageVertexShaderString;
9 |
10 | @interface GPUImageFilter : GPUImageOutput
11 | {
12 | GLuint filterSourceTexture, filterSourceTexture2, filterOutputTexture;
13 |
14 | GLProgram *filterProgram;
15 |
16 | CGSize currentFilterSize;
17 | }
18 |
19 | // Initialization and teardown
20 | - (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
21 | - (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
22 |
23 | // Still image processing
24 | - (UIImage *)imageFromCurrentlyProcessedOutput;
25 | - (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
26 |
27 | // Rendering
28 | - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
29 |
30 | // Input parameters
31 | - (void)setInteger:(GLint)newInteger forUniform:(NSString *)uniformName;
32 | - (void)setFloat:(GLfloat)newFloat forUniform:(NSString *)uniformName;
33 | - (void)setSize:(CGSize)newSize forUniform:(NSString *)uniformName;
34 | - (void)setPoint:(CGPoint)newPoint forUniform:(NSString *)uniformName;
35 | - (void)setFloatVec3:(GLfloat *)newVec3 forUniform:(NSString *)uniformName;
36 | - (void)setFloatVec4:(GLfloat *)newVec4 forUniform:(NSString *)uniformName;
37 |
38 | @end
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 | #import "GPUImagePicture.h"
3 |
4 | // Hardcode the vertex shader for the filter, because it won't change
5 | NSString *const kGPUImageVertexShaderString = SHADER_STRING
6 | (
7 |
8 | attribute vec4 position;
9 | attribute vec4 inputTextureCoordinate;
10 |
11 | varying vec2 textureCoordinate;
12 |
13 | void main()
14 | {
15 | gl_Position = position;
16 | textureCoordinate = inputTextureCoordinate.xy;
17 | }
18 |
19 | );
20 |
21 | void dataProviderReleaseCallback (void *info, const void *data, size_t size);
22 |
23 | @interface GPUImageFilter ()
24 | {
25 | GLint filterPositionAttribute, filterTextureCoordinateAttribute;
26 | GLint filterInputTextureUniform, filterInputTextureUniform2;
27 |
28 | GLuint filterFramebuffer;
29 | }
30 |
31 | // Managing the display FBOs
32 | - (CGSize)sizeOfFBO;
33 | - (void)createFilterFBO;
34 | - (void)destroyFilterFBO;
35 | - (void)setFilterFBO;
36 |
37 | @end
38 |
39 | @implementation GPUImageFilter
40 |
41 | #pragma mark -
42 | #pragma mark Initialization and teardown
43 |
44 | - (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
45 | {
46 | if (!(self = [super init]))
47 | {
48 | return nil;
49 | }
50 |
51 | [GPUImageOpenGLESContext useImageProcessingContext];
52 | filterProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:fragmentShaderString];
53 |
54 | [filterProgram addAttribute:@"position"];
55 | [filterProgram addAttribute:@"inputTextureCoordinate"];
56 |
57 | if (![filterProgram link])
58 | {
59 | NSString *progLog = [filterProgram programLog];
60 | NSLog(@"Program link log: %@", progLog);
61 | NSString *fragLog = [filterProgram fragmentShaderLog];
62 | NSLog(@"Fragment shader compile log: %@", fragLog);
63 | NSString *vertLog = [filterProgram vertexShaderLog];
64 | NSLog(@"Vertex shader compile log: %@", vertLog);
65 | filterProgram = nil;
66 | NSAssert(NO, @"Filter shader link failed");
67 | }
68 |
69 | filterPositionAttribute = [filterProgram attributeIndex:@"position"];
70 | filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
71 | filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
72 | filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
73 |
74 | [filterProgram use];
75 | glEnableVertexAttribArray(filterPositionAttribute);
76 | glEnableVertexAttribArray(filterTextureCoordinateAttribute);
77 |
78 | return self;
79 | }
80 |
81 | - (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
82 | {
83 | NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@"fsh"];
84 | NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil];
85 |
86 | if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString]))
87 | {
88 | return nil;
89 | }
90 |
91 | return self;
92 |
93 | }
94 |
95 | - (void)dealloc
96 | {
97 | [self destroyFilterFBO];
98 |
99 | }
100 |
101 | #pragma mark -
102 | #pragma mark Still image processing
103 |
104 | void dataProviderReleaseCallback (void *info, const void *data, size_t size)
105 | {
106 | free((void *)data);
107 | }
108 |
109 | - (UIImage *)imageFromCurrentlyProcessedOutput;
110 | {
111 | [GPUImageOpenGLESContext useImageProcessingContext];
112 | [self setFilterFBO];
113 |
114 | CGSize currentFBOSize = [self sizeOfFBO];
115 |
116 | NSUInteger totalBytesForImage = (int)currentFBOSize.width * (int)currentFBOSize.height * 4;
117 | GLubyte *rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
118 | glReadPixels(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
119 |
120 | CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
121 | CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
122 |
123 | CGImageRef cgImageFromBytes = CGImageCreate((int)currentFBOSize.width, (int)currentFBOSize.height, 8, 32, 4 * (int)currentFBOSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault, dataProvider, NULL, NO, kCGRenderingIntentDefault);
124 | UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:UIImageOrientationLeft];
125 |
126 | CGImageRelease(cgImageFromBytes);
127 | CGDataProviderRelease(dataProvider);
128 | CGColorSpaceRelease(defaultRGBColorSpace);
129 | // free(rawImagePixels);
130 |
131 | return finalImage;
132 | }
133 |
134 | - (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
135 | {
136 | GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithImage:imageToFilter];
137 |
138 | [stillImageSource addTarget:self];
139 | [stillImageSource processImage];
140 |
141 | UIImage *processedImage = [self imageFromCurrentlyProcessedOutput];
142 |
143 | [stillImageSource removeTarget:self];
144 | return processedImage;
145 | }
146 |
147 | #pragma mark -
148 | #pragma mark Managing the display FBOs
149 |
150 | - (CGSize)sizeOfFBO;
151 | {
152 | CGSize outputSize = [self maximumOutputSize];
153 | if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
154 | {
155 | return inputTextureSize;
156 | }
157 | else
158 | {
159 | return outputSize;
160 | }
161 | }
162 |
163 | - (void)createFilterFBO;
164 | {
165 | glActiveTexture(GL_TEXTURE1);
166 | glGenFramebuffers(1, &filterFramebuffer);
167 | glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
168 |
169 | CGSize currentFBOSize = [self sizeOfFBO];
170 | // NSLog(@"Filter size: %f, %f", currentFBOSize.width, currentFBOSize.height);
171 |
172 | glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)currentFBOSize.width, (int)currentFBOSize.height);
173 | glBindTexture(GL_TEXTURE_2D, outputTexture);
174 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
175 | glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
176 |
177 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
178 |
179 | NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
180 | }
181 |
182 | - (void)destroyFilterFBO;
183 | {
184 | if (filterFramebuffer)
185 | {
186 | glDeleteFramebuffers(1, &filterFramebuffer);
187 | filterFramebuffer = 0;
188 | }
189 | }
190 |
191 | - (void)setFilterFBO;
192 | {
193 | if (!filterFramebuffer)
194 | {
195 | [self createFilterFBO];
196 | }
197 |
198 | glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
199 |
200 | CGSize currentFBOSize = [self sizeOfFBO];
201 | glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height);
202 | }
203 |
204 | #pragma mark -
205 | #pragma mark Rendering
206 |
207 | - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
208 | {
209 | [GPUImageOpenGLESContext useImageProcessingContext];
210 | [self setFilterFBO];
211 |
212 | [filterProgram use];
213 |
214 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
215 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
216 |
217 | glActiveTexture(GL_TEXTURE2);
218 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture);
219 |
220 | glUniform1i(filterInputTextureUniform, 2);
221 |
222 | if (filterSourceTexture2 != 0)
223 | {
224 | glActiveTexture(GL_TEXTURE3);
225 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture2);
226 |
227 | glUniform1i(filterInputTextureUniform2, 3);
228 | }
229 |
230 | glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
231 | glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
232 |
233 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
234 |
235 | for (id currentTarget in targets)
236 | {
237 | [currentTarget setInputSize:inputTextureSize];
238 | [currentTarget newFrameReady];
239 | }
240 | }
241 |
242 | #pragma mark -
243 | #pragma mark Input parameters
244 |
245 | - (void)setInteger:(GLint)newInteger forUniform:(NSString *)uniformName;
246 | {
247 | [GPUImageOpenGLESContext useImageProcessingContext];
248 | [filterProgram use];
249 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
250 |
251 | glUniform1i(uniformIndex, newInteger);
252 | }
253 |
254 | - (void)setFloat:(GLfloat)newFloat forUniform:(NSString *)uniformName;
255 | {
256 | [GPUImageOpenGLESContext useImageProcessingContext];
257 | [filterProgram use];
258 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
259 |
260 | glUniform1f(uniformIndex, newFloat);
261 | }
262 |
263 | - (void)setSize:(CGSize)newSize forUniform:(NSString *)uniformName;
264 | {
265 | [GPUImageOpenGLESContext useImageProcessingContext];
266 | [filterProgram use];
267 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
268 | GLfloat sizeUniform[2];
269 | sizeUniform[0] = newSize.width;
270 | sizeUniform[1] = newSize.height;
271 |
272 | glUniform2fv(uniformIndex, 1, sizeUniform);
273 | }
274 |
275 | - (void)setPoint:(CGPoint)newPoint forUniform:(NSString *)uniformName;
276 | {
277 | [GPUImageOpenGLESContext useImageProcessingContext];
278 | [filterProgram use];
279 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
280 | GLfloat sizeUniform[2];
281 | sizeUniform[0] = newPoint.x;
282 | sizeUniform[1] = newPoint.y;
283 |
284 | glUniform2fv(uniformIndex, 1, sizeUniform);
285 | }
286 |
287 | - (void)setFloatVec3:(GLfloat *)newVec3 forUniform:(NSString *)uniformName;
288 | {
289 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
290 | [filterProgram use];
291 |
292 | glUniform3fv(uniformIndex, 1, newVec3);
293 | }
294 |
295 | - (void)setFloatVec4:(GLfloat *)newVec4 forUniform:(NSString *)uniformName;
296 | {
297 | GLint uniformIndex = [filterProgram uniformIndex:uniformName];
298 | [filterProgram use];
299 |
300 | glUniform4fv(uniformIndex, 1, newVec4);
301 | }
302 |
303 | #pragma mark -
304 | #pragma mark GPUImageInput
305 |
306 | - (void)newFrameReady;
307 | {
308 | static const GLfloat squareVertices[] = {
309 | -1.0f, -1.0f,
310 | 1.0f, -1.0f,
311 | -1.0f, 1.0f,
312 | 1.0f, 1.0f,
313 | };
314 |
315 | static const GLfloat squareTextureCoordinates[] = {
316 | 0.0f, 0.0f,
317 | 1.0f, 0.0f,
318 | 0.0f, 1.0f,
319 | 1.0f, 1.0f,
320 | };
321 |
322 | [self renderToTextureWithVertices:squareVertices textureCoordinates:squareTextureCoordinates];
323 | }
324 |
325 | - (NSInteger)nextAvailableTextureIndex;
326 | {
327 | if (filterSourceTexture == 0)
328 | {
329 | return 0;
330 | }
331 | else
332 | {
333 | return 1;
334 | }
335 | }
336 |
337 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
338 | {
339 | if (textureIndex == 0)
340 | {
341 | filterSourceTexture = newInputTexture;
342 | }
343 | else
344 | {
345 | filterSourceTexture2 = newInputTexture;
346 | }
347 | }
348 |
349 | - (void)setInputSize:(CGSize)newSize;
350 | {
351 | inputTextureSize = newSize;
352 | }
353 |
354 | - (CGSize)maximumOutputSize;
355 | {
356 | if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
357 | {
358 | for (id currentTarget in targets)
359 | {
360 | if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
361 | {
362 | cachedMaximumOutputSize = [currentTarget maximumOutputSize];
363 | }
364 | }
365 | }
366 |
367 | return cachedMaximumOutputSize;
368 | }
369 |
370 | #pragma mark -
371 | #pragma mark Accessors
372 |
373 | @end
374 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageMovie.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import "GPUImageOpenGLESContext.h"
5 | #import "GPUImageOutput.h"
6 |
7 | @interface GPUImageMovie : GPUImageOutput {
8 | CVPixelBufferRef _currentBuffer;
9 | }
10 |
11 | @property (readwrite, retain) NSURL *url;
12 |
13 | -(id)initWithURL:(NSURL *)url;
14 | -(void)startProcessing;
15 | -(void)endProcessing;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageMovie.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMovie.h"
2 |
3 | @implementation GPUImageMovie
4 |
5 | @synthesize url = _url;
6 |
7 | -(id)initWithURL:(NSURL *)url {
8 | if (!(self = [super init])) {
9 | return nil;
10 | }
11 |
12 | self.url = url;
13 |
14 | return self;
15 | }
16 |
17 | -(void)startProcessing {
18 | // AVURLAsset to read input movie (i.e. mov recorded to local storage)
19 | NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
20 | AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
21 |
22 | // Load the input asset tracks information
23 | [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
24 | NSError *error = nil;
25 | // Check status of "tracks", make sure they were loaded
26 | AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
27 | if (!tracksStatus == AVKeyValueStatusLoaded) {
28 | // failed to load
29 | return;
30 | }
31 | /* Read video samples from input asset video track */
32 | AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:inputAsset error:&error];
33 |
34 | NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
35 | [outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
36 | AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[inputAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
37 |
38 | // Assign the tracks to the reader and start to read
39 | [reader addOutput:readerVideoTrackOutput];
40 | if ([reader startReading] == NO) {
41 | // Handle error
42 | NSLog(@"Error reading");
43 | }
44 |
45 | while (reader.status == AVAssetReaderStatusReading) {
46 |
47 | CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
48 | if (sampleBufferRef) {
49 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
50 | _currentBuffer = pixelBuffer;
51 | [self performSelectorOnMainThread:@selector(processFrame) withObject:nil waitUntilDone:YES];
52 |
53 | CMSampleBufferInvalidate(sampleBufferRef);
54 | CFRelease(sampleBufferRef);
55 | }
56 | }
57 | }];
58 | }
59 |
60 | -(void)processFrame {
61 | // Upload to texture
62 | CVPixelBufferLockBaseAddress(_currentBuffer, 0);
63 | int bufferHeight = CVPixelBufferGetHeight(_currentBuffer);
64 | int bufferWidth = CVPixelBufferGetWidth(_currentBuffer);
65 |
66 | glBindTexture(GL_TEXTURE_2D, outputTexture);
67 | // Using BGRA extension to pull in video frame data directly
68 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(_currentBuffer));
69 |
70 | CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight);
71 | for (id currentTarget in targets)
72 | {
73 | [currentTarget setInputSize:currentSize];
74 | [currentTarget newFrameReady];
75 | }
76 | CVPixelBufferUnlockBaseAddress(_currentBuffer, 0);
77 | }
78 |
79 | -(void)endProcessing {
80 |
81 | }
82 |
83 | @end
84 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageMovieWriter.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageOpenGLESContext.h"
4 |
5 | @interface GPUImageMovieWriter : NSObject
6 | {
7 | CMVideoDimensions videoDimensions;
8 | CMVideoCodecType videoType;
9 |
10 | NSURL *movieURL;
11 | AVAssetWriter *assetWriter;
12 | // AVAssetWriterInput *assetWriterAudioIn;
13 | AVAssetWriterInput *assetWriterVideoInput;
14 | AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
15 | dispatch_queue_t movieWritingQueue;
16 |
17 | CGSize videoSize;
18 | NSString *fileType;
19 | }
20 |
21 | // Initialization and teardown
22 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
23 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
24 |
25 | // Movie recording
26 | - (void)startRecording;
27 | - (void)startRecording:(BOOL)isDelay;
28 | - (void)finishRecording;
29 |
30 | - (NSTimeInterval)getRecordTime;
31 |
32 | @end
33 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageMovieWriter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMovieWriter.h"
2 |
3 | #import "GPUImageOpenGLESContext.h"
4 | #import "GLProgram.h"
5 | #import "GPUImageFilter.h"
6 |
7 | NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
8 | (
9 | varying highp vec2 textureCoordinate;
10 |
11 | uniform sampler2D inputImageTexture;
12 |
13 | void main()
14 | {
15 | gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
16 | }
17 | );
18 |
19 |
20 | @interface GPUImageMovieWriter ()
21 | {
22 | GLuint movieFramebuffer, movieRenderbuffer;
23 |
24 | GLProgram *colorSwizzlingProgram;
25 | GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
26 | GLint colorSwizzlingInputTextureUniform;
27 |
28 | GLuint inputTextureForMovieRendering;
29 |
30 | GLubyte *frameData;
31 |
32 | NSDate *startTime;
33 | }
34 |
35 | // Movie recording
36 | - (void)initializeMovie;
37 |
38 | // Frame rendering
39 | - (void)createDataFBO;
40 | - (void)destroyDataFBO;
41 | - (void)setFilterFBO;
42 | - (void)presentFramebuffer;
43 |
44 | - (void)renderAtInternalSize;
45 |
46 | @end
47 |
48 | @implementation GPUImageMovieWriter
49 |
50 | #pragma mark -
51 | #pragma mark Initialization and teardown
52 |
53 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize
54 | {
55 | return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
56 | }
57 |
58 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings
59 | {
60 | if (!(self = [super init]))
61 | {
62 | return nil;
63 | }
64 |
65 | videoSize = newSize;
66 | movieURL = newMovieURL;
67 | fileType = newFileType;
68 |
69 | [GPUImageOpenGLESContext useImageProcessingContext];
70 | colorSwizzlingProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
71 |
72 | [colorSwizzlingProgram addAttribute:@"position"];
73 | [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
74 |
75 | if (![colorSwizzlingProgram link])
76 | {
77 | NSString *progLog = [colorSwizzlingProgram programLog];
78 | NSLog(@"Program link log: %@", progLog);
79 | NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
80 | NSLog(@"Fragment shader compile log: %@", fragLog);
81 | NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
82 | NSLog(@"Vertex shader compile log: %@", vertLog);
83 | colorSwizzlingProgram = nil;
84 | NSAssert(NO, @"Filter shader link failed");
85 | }
86 |
87 | colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
88 | colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
89 | colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
90 |
91 | [colorSwizzlingProgram use];
92 | glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
93 | glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
94 |
95 | [self initializeMovieWithOutputSettings:outputSettings];
96 |
97 | return self;
98 | }
99 |
100 | - (void)dealloc;
101 | {
102 | if (frameData != NULL)
103 | {
104 | free(frameData);
105 | }
106 | }
107 |
108 | #pragma mark -
109 | #pragma mark Movie recording
110 |
111 | - (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
112 | {
113 | frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4);
114 |
115 | // frameData = (GLubyte *) calloc(videoSize.width * videoSize.height * 4, sizeof(GLubyte));
116 |
117 | NSError *error = nil;
118 |
119 | assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
120 | // assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:AVFileTypeQuickTimeMovie error:&error];
121 | if (error != nil)
122 | {
123 | NSLog(@"Error: %@", error);
124 | }
125 |
126 | if (outputSettings == nil) {
127 | NSMutableDictionary * t_outputSettings = [[NSMutableDictionary alloc] init];
128 | [t_outputSettings setObject: AVVideoCodecH264 forKey: AVVideoCodecKey];
129 | [t_outputSettings setObject: [NSNumber numberWithInt: videoSize.width] forKey: AVVideoWidthKey];
130 | [t_outputSettings setObject: [NSNumber numberWithInt: videoSize.height] forKey: AVVideoHeightKey];
131 |
132 | outputSettings = t_outputSettings;
133 | }
134 |
135 | /* NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
136 | [compressionProperties setObject: [NSNumber numberWithInt: 1000000] forKey: AVVideoAverageBitRateKey];
137 | [compressionProperties setObject: [NSNumber numberWithInt: 16] forKey: AVVideoMaxKeyFrameIntervalKey];
138 | [compressionProperties setObject: AVVideoProfileLevelH264Main31 forKey: AVVideoProfileLevelKey];
139 |
140 | [outputSettings setObject: compressionProperties forKey: AVVideoCompressionPropertiesKey];*/
141 |
142 | assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
143 | assetWriterVideoInput.expectsMediaDataInRealTime = YES;
144 | //writerInput.expectsMediaDataInRealTime = NO;
145 |
146 | NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
147 | [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
148 | [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
149 | nil];
150 | // NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
151 | // nil];
152 |
153 | assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
154 |
155 | [assetWriter addInput:assetWriterVideoInput];
156 | }
157 |
158 | - (void)startRecording {
159 | [self startRecording:NO];
160 | }
161 | - (void)startRecording:(BOOL)isDelay
162 | {
163 | startTime = [NSDate date];
164 | [assetWriter startWriting];
165 | if (isDelay) {
166 | [assetWriter startSessionAtSourceTime:CMTimeMake(4, 120)]; //为了去除多段视频前面的小黑屏
167 | } else {
168 | [assetWriter startSessionAtSourceTime:CMTimeMake(60, 120)];
169 | }
170 |
171 | }
172 |
173 | - (void)finishRecording;
174 | {
175 | if(assetWriter.status == AVAssetWriterStatusWriting) {
176 | [assetWriterVideoInput markAsFinished];
177 |
178 | // iOS 6+ SDK
179 | if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
180 | // Running iOS 6+
181 | [assetWriter finishWritingWithCompletionHandler:^{
182 | NSLog(@"~~~~finish write video!");
183 | // postNotifacation(FreeVideoRecordWriteVideoFinsh, nil, nil);
184 | }];
185 | }
186 | else {
187 | // Not running iOS 6
188 | #pragma clang diagnostic push
189 | #pragma clang diagnostic ignored "-Wdeprecated-declarations"
190 | [assetWriter finishWriting];
191 | // postNotifacation(FreeVideoRecordWriteVideoFinsh, nil, nil);
192 | #pragma clang diagnostic pop
193 | }
194 | } else {
195 | // postNotifacation(FreeVideoRecordWriteVideoFailed, nil, nil);
196 | }
197 | }
198 |
199 | - (NSTimeInterval)getRecordTime {
200 | return [[NSDate date] timeIntervalSinceDate:startTime];
201 | }
202 |
203 |
204 | #pragma mark -
205 | #pragma mark Frame rendering
206 |
207 | - (void)createDataFBO;
208 | {
209 | glActiveTexture(GL_TEXTURE1);
210 | glGenFramebuffers(1, &movieFramebuffer);
211 | glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
212 |
213 | glGenRenderbuffers(1, &movieRenderbuffer);
214 | glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
215 |
216 | glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);
217 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
218 |
219 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
220 |
221 | NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
222 | }
223 |
224 | - (void)destroyDataFBO;
225 | {
226 | if (movieFramebuffer)
227 | {
228 | glDeleteFramebuffers(1, &movieFramebuffer);
229 | movieFramebuffer = 0;
230 | }
231 |
232 | if (movieRenderbuffer)
233 | {
234 | glDeleteRenderbuffers(1, &movieRenderbuffer);
235 | movieRenderbuffer = 0;
236 | }
237 | }
238 |
239 | - (void)setFilterFBO;
240 | {
241 | if (!movieFramebuffer)
242 | {
243 | [self createDataFBO];
244 | }
245 |
246 | glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
247 |
248 | glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
249 | }
250 |
251 | - (void)presentFramebuffer;
252 | {
253 | glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
254 | [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay];
255 | }
256 |
257 | - (void)renderAtInternalSize;
258 | {
259 | [GPUImageOpenGLESContext useImageProcessingContext];
260 | [self setFilterFBO];
261 |
262 | [colorSwizzlingProgram use];
263 |
264 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
265 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
266 |
267 | // This needs to be flipped to write out to video correctly
268 | static const GLfloat squareVertices[] = {
269 | -1.0f, -1.0f,
270 | 1.0f, -1.0f,
271 | -1.0f, 1.0f,
272 | 1.0f, 1.0f,
273 | };
274 |
275 | static const GLfloat textureCoordinates[] = {
276 | 0.0f, 0.0f,
277 | 1.0f, 0.0f,
278 | 0.0f, 1.0f,
279 | 1.0f, 1.0f,
280 | };
281 |
282 | glActiveTexture(GL_TEXTURE4);
283 | glBindTexture(GL_TEXTURE_2D, inputTextureForMovieRendering);
284 | glUniform1i(colorSwizzlingInputTextureUniform, 4);
285 |
286 | glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
287 | glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
288 |
289 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
290 |
291 | [self presentFramebuffer];
292 | }
293 |
294 | #pragma mark -
295 | #pragma mark GPUImageInput protocol
296 |
297 | - (void)newFrameReady;
298 | {
299 | if (!assetWriterVideoInput.readyForMoreMediaData)
300 | {
301 | // NSLog(@"Had to drop a frame");
302 | return;
303 | }
304 |
305 | // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
306 | [GPUImageOpenGLESContext useImageProcessingContext];
307 | [self renderAtInternalSize];
308 |
309 | CVPixelBufferRef pixel_buffer = NULL;
310 |
311 | CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
312 | if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
313 | {
314 | return;
315 | // NSLog(@"Couldn't pull pixel buffer from pool");
316 | // glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, frameData);
317 | //
318 | // NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
319 | // [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
320 | // [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
321 | //
322 | // CFDictionaryRef optionsDictionary = (__bridge_retained CFDictionaryRef)options;
323 | // CVPixelBufferCreateWithBytes(kCFAllocatorDefault, (int)videoSize.width, (int)videoSize.height, kCVPixelFormatType_32BGRA, frameData, 4 * (int)videoSize.width, NULL, 0, optionsDictionary, &pixel_buffer);
324 | // CFRelease(optionsDictionary);
325 | // CVPixelBufferLockBaseAddress(pixel_buffer, 0);
326 | }
327 | else
328 | {
329 | CVPixelBufferLockBaseAddress(pixel_buffer, 0);
330 |
331 | // NSLog(@"Grabbing pixel buffer");
332 |
333 |
334 | GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
335 | glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
336 | }
337 |
338 | // May need to add a check here, because if two consecutive times with the same value are added to the movie, it aborts recording
339 | CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);
340 |
341 | if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:currentTime])
342 | {
343 | NSLog(@"Problem appending pixel buffer at time: %lld", currentTime.value);
344 | }
345 | else
346 | {
347 | // NSLog(@"Recorded pixel buffer at time: %lld", currentTime.value);
348 | }
349 | CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
350 |
351 | CVPixelBufferRelease(pixel_buffer);
352 | }
353 |
354 | - (NSInteger)nextAvailableTextureIndex;
355 | {
356 | return 0;
357 | }
358 |
359 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
360 | {
361 | inputTextureForMovieRendering = newInputTexture;
362 | }
363 |
364 | - (void)setInputSize:(CGSize)newSize;
365 | {
366 | }
367 |
368 | - (CGSize)maximumOutputSize;
369 | {
370 | return CGSizeZero;
371 | }
372 |
373 |
374 | @end
375 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageOpenGLESContext.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import
5 | #import
6 |
7 | @interface GPUImageOpenGLESContext : NSObject
8 | {
9 | EAGLContext *context;
10 | }
11 |
12 | @property(readonly) EAGLContext *context;
13 |
14 | + (GPUImageOpenGLESContext *)sharedImageProcessingOpenGLESContext;
15 | + (void)useImageProcessingContext;
16 | + (GLint)maximumTextureSizeForThisDevice;
17 | + (GLint)maximumTextureUnitsForThisDevice;
18 |
19 | - (void)presentBufferForDisplay;
20 |
21 | @end
22 |
23 | @protocol GPUImageInput
24 | - (void)newFrameReady;
25 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
26 | - (NSInteger)nextAvailableTextureIndex;
27 | - (void)setInputSize:(CGSize)newSize;
28 | - (CGSize)maximumOutputSize;
29 | @end
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageOpenGLESContext.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpenGLESContext.h"
2 | #import
3 |
4 | @implementation GPUImageOpenGLESContext
5 |
6 | // Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html
7 | + (GPUImageOpenGLESContext *)sharedImageProcessingOpenGLESContext;
8 | {
9 | static dispatch_once_t pred;
10 | static GPUImageOpenGLESContext *sharedImageProcessingOpenGLESContext = nil;
11 |
12 | dispatch_once(&pred, ^{
13 | sharedImageProcessingOpenGLESContext = [[GPUImageOpenGLESContext alloc] init];
14 | });
15 | return sharedImageProcessingOpenGLESContext;
16 | }
17 |
18 | + (void)useImageProcessingContext;
19 | {
20 | [EAGLContext setCurrentContext:[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context]];
21 | }
22 |
23 | + (GLint)maximumTextureSizeForThisDevice;
24 | {
25 | GLint maxTextureSize;
26 | glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
27 | return maxTextureSize;
28 | }
29 |
30 | + (GLint)maximumTextureUnitsForThisDevice;
31 | {
32 | GLint maxTextureUnits;
33 | glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits);
34 | return maxTextureUnits;
35 | }
36 |
37 | - (void)presentBufferForDisplay;
38 | {
39 | [context presentRenderbuffer:GL_RENDERBUFFER];
40 | }
41 |
42 | #pragma mark -
43 | #pragma mark Accessors
44 |
45 | @synthesize context;
46 |
47 | - (EAGLContext *)context;
48 | {
49 | if (context == nil)
50 | {
51 | context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
52 | NSAssert(context != nil, @"Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work.");
53 | [EAGLContext setCurrentContext:context];
54 |
55 | // Set up a few global settings for the image processing pipeline
56 | glEnable(GL_TEXTURE_2D);
57 | glDisable(GL_DEPTH_TEST);
58 | }
59 |
60 | return context;
61 | }
62 |
63 |
64 | @end
65 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageOutput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpenGLESContext.h"
2 | #import "GLProgram.h"
3 |
4 | @interface GPUImageOutput : NSObject
5 | {
6 | NSMutableArray *targets, *targetTextureIndices;
7 |
8 | GLuint outputTexture;
9 | CGSize inputTextureSize, cachedMaximumOutputSize;
10 | }
11 |
12 | @property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
13 |
14 | // Managing targets
15 | - (void)addTarget:(id)newTarget;
16 | - (void)removeTarget:(id)targetToRemove;
17 | - (void)removeAllTargets;
18 |
19 | // Manage the output texture
20 | - (void)initializeOutputTexture;
21 | - (void)deleteOutputTexture;
22 |
23 | @end
24 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageOutput.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @implementation GPUImageOutput
4 |
5 | #pragma mark -
6 | #pragma mark Initialization and teardown
7 |
8 | - (id)init;
9 | {
10 | if (!(self = [super init]))
11 | {
12 | return nil;
13 | }
14 |
15 | targets = [[NSMutableArray alloc] init];
16 | targetTextureIndices = [[NSMutableArray alloc] init];
17 |
18 | [self initializeOutputTexture];
19 |
20 | return self;
21 | }
22 |
23 | - (void)dealloc
24 | {
25 | [self removeAllTargets];
26 | [self deleteOutputTexture];
27 | }
28 |
29 | #pragma mark -
30 | #pragma mark Managing targets
31 |
32 | - (void)addTarget:(id)newTarget;
33 | {
34 | cachedMaximumOutputSize = CGSizeZero;
35 | NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
36 | [newTarget setInputTexture:outputTexture atIndex:nextAvailableTextureIndex];
37 | [targets addObject:newTarget];
38 | [targetTextureIndices addObject:[NSNumber numberWithInteger:nextAvailableTextureIndex]];
39 | }
40 |
41 | - (void)removeTarget:(id)targetToRemove;
42 | {
43 | cachedMaximumOutputSize = CGSizeZero;
44 | [targetToRemove setInputSize:CGSizeZero];
45 |
46 | NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
47 | if (indexOfObject != NSNotFound) {
48 | [targetToRemove setInputTexture:0 atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
49 | [targetTextureIndices removeObjectAtIndex:indexOfObject];
50 | [targets removeObject:targetToRemove];
51 | }
52 | }
53 |
54 | - (void)removeAllTargets;
55 | {
56 | cachedMaximumOutputSize = CGSizeZero;
57 | for (id targetToRemove in targets)
58 | {
59 | [targetToRemove setInputSize:CGSizeZero];
60 |
61 | NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
62 | [targetToRemove setInputTexture:0 atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
63 | }
64 | [targets removeAllObjects];
65 | [targetTextureIndices removeAllObjects];
66 | }
67 |
68 | #pragma mark -
69 | #pragma mark Manage the output texture
70 |
71 | - (void)initializeOutputTexture;
72 | {
73 | [GPUImageOpenGLESContext useImageProcessingContext];
74 |
75 | glActiveTexture(GL_TEXTURE0);
76 | glGenTextures(1, &outputTexture);
77 | glBindTexture(GL_TEXTURE_2D, outputTexture);
78 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
79 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
80 | // This is necessary for non-power-of-two textures
81 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
82 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
83 | }
84 |
85 | - (void)deleteOutputTexture;
86 | {
87 | if (outputTexture)
88 | {
89 | glDeleteTextures(1, &outputTexture);
90 | outputTexture = 0;
91 | }
92 | }
93 |
94 | #pragma mark -
95 | #pragma mark Accessors
96 |
97 | @synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
98 |
99 | @end
100 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImagePicture.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 |
5 | @interface GPUImagePicture : GPUImageOutput
6 | {
7 | UIImage *imageSource;
8 | }
9 |
10 | // Initialization and teardown
11 | - (id)initWithImage:(UIImage *)newImageSource;
12 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
13 |
14 | // Image rendering
15 | - (void)processImage;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImagePicture.m:
--------------------------------------------------------------------------------
1 | #import "GPUImagePicture.h"
2 |
3 | @implementation GPUImagePicture
4 |
5 | #pragma mark -
6 | #pragma mark Initialization and teardown
7 |
8 | - (id)initWithImage:(UIImage *)newImageSource;
9 | {
10 | if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO]))
11 | {
12 | return nil;
13 | }
14 |
15 | return self;
16 | }
17 |
18 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
19 | {
20 | if (!(self = [super init]))
21 | {
22 | return nil;
23 | }
24 |
25 | self.shouldSmoothlyScaleOutput = smoothlyScaleOutput;
26 | imageSource = newImageSource;
27 |
28 | [GPUImageOpenGLESContext useImageProcessingContext];
29 |
30 | CGSize pointSizeOfImage = [imageSource size];
31 | CGFloat scaleOfImage = [imageSource scale];
32 | CGSize pixelSizeOfImage = CGSizeMake(scaleOfImage * pointSizeOfImage.width, scaleOfImage * pointSizeOfImage.height);
33 |
34 | if (self.shouldSmoothlyScaleOutput)
35 | {
36 | // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill
37 | CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width));
38 | CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height));
39 |
40 | pixelSizeOfImage = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight));
41 | }
42 | GLubyte *imageData = (GLubyte *) malloc((int)pixelSizeOfImage.width * (int)pixelSizeOfImage.height * 4);
43 | CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
44 |
45 | CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)pixelSizeOfImage.width, (int)pixelSizeOfImage.height, 8, (int)pixelSizeOfImage.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
46 | // CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)pixelSizeOfImage.width, (int)pixelSizeOfImage.height, 8, (int)pixelSizeOfImage.width * 4, genericRGBColorspace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
47 | CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeOfImage.width, pixelSizeOfImage.height), [newImageSource CGImage]);
48 | CGContextRelease(imageContext);
49 | CGColorSpaceRelease(genericRGBColorspace);
50 |
51 | glBindTexture(GL_TEXTURE_2D, outputTexture);
52 | // Using BGRA extension to pull in video frame data directly
53 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeOfImage.width, (int)pixelSizeOfImage.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData);
54 |
55 | if (self.shouldSmoothlyScaleOutput)
56 | {
57 | glGenerateMipmap(GL_TEXTURE_2D);
58 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
59 | }
60 |
61 | free(imageData);
62 |
63 | return self;
64 | }
65 |
66 |
67 |
68 | #pragma mark -
69 | #pragma mark Image rendering
70 |
71 | - (void)processImage;
72 | {
73 | CGSize pointSizeOfImage = [imageSource size];
74 | CGFloat scaleOfImage = [imageSource scale];
75 | CGSize pixelSizeOfImage = CGSizeMake(scaleOfImage * pointSizeOfImage.width, scaleOfImage * pointSizeOfImage.height);
76 |
77 | for (id currentTarget in targets)
78 | {
79 | [currentTarget setInputSize:pixelSizeOfImage];
80 | [currentTarget newFrameReady];
81 | }
82 | }
83 |
84 | @end
85 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageRawData.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOpenGLESContext.h"
3 |
4 | struct GPUByteColorVector {
5 | GLubyte red;
6 | GLubyte green;
7 | GLubyte blue;
8 | GLubyte alpha;
9 | };
10 | typedef struct GPUByteColorVector GPUByteColorVector;
11 |
12 | @protocol GPUImageRawDataProcessor;
13 |
14 | @interface GPUImageRawData : NSObject
15 |
16 | @property(readwrite, unsafe_unretained, nonatomic) id delegate;
17 | @property(readonly) GLubyte *rawBytesForImage;
18 |
19 | // Initialization and teardown
20 | - (id)initWithImageSize:(CGSize)newImageSize;
21 |
22 | // Data access
23 | - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
24 |
25 | @end
26 |
27 | @protocol GPUImageRawDataProcessor
28 | - (void)newImageFrameAvailableFromDataSource:(GPUImageRawData *)rawDataSource;
29 | @end
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageRawData.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRawData.h"
2 |
3 | #import "GPUImageOpenGLESContext.h"
4 | #import "GLProgram.h"
5 | #import "GPUImageFilter.h"
6 |
7 | NSString *const kGPUImageDataFragmentShaderString = SHADER_STRING
8 | (
9 | varying highp vec2 textureCoordinate;
10 |
11 | uniform sampler2D inputImageTexture;
12 |
13 | void main()
14 | {
15 | gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
16 | }
17 | );
18 |
19 |
20 | @interface GPUImageRawData ()
21 | {
22 | CGSize imageSize;
23 | BOOL hasReadFromTheCurrentFrame;
24 |
25 | GLuint dataFramebuffer, dataRenderbuffer;
26 |
27 | GLuint inputTextureForDisplay;
28 |
29 | GLProgram *dataProgram;
30 | GLint dataPositionAttribute, dataTextureCoordinateAttribute;
31 | GLint dataInputTextureUniform;
32 |
33 | GLubyte *_rawBytesForImage;
34 | }
35 |
36 | // Frame rendering
37 | - (void)createDataFBO;
38 | - (void)destroyDataFBO;
39 | - (void)setFilterFBO;
40 | - (void)presentFramebuffer;
41 |
42 | - (void)renderAtInternalSize;
43 |
44 | @end
45 |
46 | @implementation GPUImageRawData
47 |
48 | #pragma mark -
49 | #pragma mark Initialization and teardown
50 |
51 | - (id)initWithImageSize:(CGSize)newImageSize;
52 | {
53 | if (!(self = [super init]))
54 | {
55 | return nil;
56 | }
57 |
58 | imageSize = newImageSize;
59 | hasReadFromTheCurrentFrame = NO;
60 | _rawBytesForImage = NULL;
61 |
62 | [GPUImageOpenGLESContext useImageProcessingContext];
63 | dataProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageDataFragmentShaderString];
64 |
65 | [dataProgram addAttribute:@"position"];
66 | [dataProgram addAttribute:@"inputTextureCoordinate"];
67 |
68 | if (![dataProgram link])
69 | {
70 | NSString *progLog = [dataProgram programLog];
71 | NSLog(@"Program link log: %@", progLog);
72 | NSString *fragLog = [dataProgram fragmentShaderLog];
73 | NSLog(@"Fragment shader compile log: %@", fragLog);
74 | NSString *vertLog = [dataProgram vertexShaderLog];
75 | NSLog(@"Vertex shader compile log: %@", vertLog);
76 | dataProgram = nil;
77 | NSAssert(NO, @"Filter shader link failed");
78 | }
79 |
80 | dataPositionAttribute = [dataProgram attributeIndex:@"position"];
81 | dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"];
82 | dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"];
83 |
84 | [dataProgram use];
85 | glEnableVertexAttribArray(dataPositionAttribute);
86 | glEnableVertexAttribArray(dataTextureCoordinateAttribute);
87 |
88 | return self;
89 | }
90 |
91 | - (void)dealloc
92 | {
93 | if (_rawBytesForImage != NULL)
94 | {
95 | free(_rawBytesForImage);
96 | _rawBytesForImage = NULL;
97 | }
98 | }
99 |
100 | #pragma mark -
101 | #pragma mark Frame rendering
102 |
103 | - (void)createDataFBO;
104 | {
105 | glActiveTexture(GL_TEXTURE1);
106 | glGenFramebuffers(1, &dataFramebuffer);
107 | glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer);
108 |
109 | glGenRenderbuffers(1, &dataRenderbuffer);
110 | glBindRenderbuffer(GL_RENDERBUFFER, dataRenderbuffer);
111 |
112 | glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)imageSize.width, (int)imageSize.height);
113 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, dataRenderbuffer);
114 |
115 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
116 |
117 | NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
118 | }
119 |
120 | - (void)destroyDataFBO;
121 | {
122 | if (dataFramebuffer)
123 | {
124 | glDeleteFramebuffers(1, &dataFramebuffer);
125 | dataFramebuffer = 0;
126 | }
127 |
128 | if (dataRenderbuffer)
129 | {
130 | glDeleteRenderbuffers(1, &dataRenderbuffer);
131 | dataRenderbuffer = 0;
132 | }
133 | }
134 |
135 | - (void)setFilterFBO;
136 | {
137 | if (!dataFramebuffer)
138 | {
139 | [self createDataFBO];
140 | }
141 |
142 | glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer);
143 |
144 | glViewport(0, 0, (int)imageSize.width, (int)imageSize.height);
145 | }
146 |
147 | - (void)presentFramebuffer;
148 | {
149 | glBindRenderbuffer(GL_RENDERBUFFER, dataRenderbuffer);
150 | [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay];
151 | }
152 |
153 | #pragma mark -
154 | #pragma mark Data access
155 |
156 | - (void)renderAtInternalSize;
157 | {
158 | [GPUImageOpenGLESContext useImageProcessingContext];
159 | [self setFilterFBO];
160 |
161 | [dataProgram use];
162 |
163 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
164 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
165 |
166 | static const GLfloat squareVertices[] = {
167 | -1.0f, -1.0f,
168 | 1.0f, -1.0f,
169 | -1.0f, 1.0f,
170 | 1.0f, 1.0f,
171 | };
172 |
173 | static const GLfloat textureCoordinates[] = {
174 | 0.0f, 1.0f,
175 | 1.0f, 1.0f,
176 | 0.0f, 0.0f,
177 | 1.0f, 0.0f,
178 | };
179 |
180 | glActiveTexture(GL_TEXTURE4);
181 | glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay);
182 | glUniform1i(dataInputTextureUniform, 4);
183 |
184 | glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
185 | glVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
186 |
187 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
188 |
189 | [self presentFramebuffer];
190 | }
191 |
192 | - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
193 | {
194 | GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage;
195 | // NSLog(@"Row start");
196 | // for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++)
197 | // {
198 | // GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition];
199 | // NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue);
200 | // }
201 | // NSLog(@"Row end");
202 |
203 | // GPUByteColorVector byteAtOne = imageColorBytes[1];
204 | // GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3];
205 | // GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width];
206 | // NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue);
207 |
208 | CGPoint locationToPickFrom = CGPointZero;
209 | locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0));
210 | locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0));
211 |
212 | return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
213 | }
214 |
215 | #pragma mark -
216 | #pragma mark GPUImageInput protocol
217 |
218 | - (void)newFrameReady;
219 | {
220 | hasReadFromTheCurrentFrame = NO;
221 |
222 | [self.delegate newImageFrameAvailableFromDataSource:self];
223 | }
224 |
225 | - (NSInteger)nextAvailableTextureIndex;
226 | {
227 | return 0;
228 | }
229 |
230 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
231 | {
232 | inputTextureForDisplay = newInputTexture;
233 | }
234 |
235 | - (void)setInputSize:(CGSize)newSize;
236 | {
237 | }
238 |
239 | - (CGSize)maximumOutputSize;
240 | {
241 | return imageSize;
242 | }
243 |
244 | #pragma mark -
245 | #pragma mark Accessors
246 |
247 | @synthesize rawBytesForImage = _rawBytesForImage;
248 | @synthesize delegate = _delegate;
249 |
250 | - (GLubyte *)rawBytesForImage;
251 | {
252 | if (_rawBytesForImage == NULL)
253 | {
254 | _rawBytesForImage = (GLubyte *) calloc(imageSize.width * imageSize.height * 4, sizeof(GLubyte));
255 | hasReadFromTheCurrentFrame = NO;
256 | }
257 |
258 | if (hasReadFromTheCurrentFrame)
259 | {
260 | return _rawBytesForImage;
261 | }
262 | else
263 | {
264 | [GPUImageOpenGLESContext useImageProcessingContext];
265 | [self renderAtInternalSize];
266 | glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage);
267 |
268 | return _rawBytesForImage;
269 | }
270 |
271 | }
272 |
273 | @end
274 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageRotationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | //typedef enum { kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal} GPUImageRotationMode;
4 | typedef enum {kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180} GPUImageRotationMode;
5 |
6 | @interface GPUImageRotationFilter : GPUImageFilter
7 | {
8 | GPUImageRotationMode rotationMode;
9 | }
10 |
11 | // Initialization and teardown
12 | - (id)initWithRotation:(GPUImageRotationMode)newRotationMode;
13 |
14 | - (void)setRotation:(GPUImageRotationMode)newRotationMode;
15 | @end
16 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageRotationFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRotationFilter.h"
2 |
3 | NSString *const kGPUImageRotationFragmentShaderString = SHADER_STRING
4 | (
5 | varying highp vec2 textureCoordinate;
6 |
7 | uniform sampler2D inputImageTexture;
8 |
9 | void main()
10 | {
11 | gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
12 | }
13 | );
14 |
15 | @implementation GPUImageRotationFilter
16 |
17 | #pragma mark -
18 | #pragma mark Initialization and teardown
19 |
20 | - (id)initWithRotation:(GPUImageRotationMode)newRotationMode;
21 | {
22 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageRotationFragmentShaderString]))
23 | {
24 | return nil;
25 | }
26 |
27 | rotationMode = newRotationMode;
28 |
29 | return self;
30 | }
31 |
32 | - (void)setRotation:(GPUImageRotationMode)newRotationMode;
33 | {
34 | rotationMode = newRotationMode;
35 | }
36 |
37 | #pragma mark -
38 | #pragma mark GPUImageInput
39 |
40 | - (void)setInputSize:(CGSize)newSize;
41 | {
42 | // if ( (rotationMode == kGPUImageRotateLeft) || (rotationMode == kGPUImageRotateRight) )
43 | // {
44 | // inputTextureSize.width = newSize.height;
45 | // inputTextureSize.height = newSize.width;
46 | // }
47 | // else
48 | // {
49 | inputTextureSize = newSize;
50 | // }
51 | }
52 |
53 | - (void)newFrameReady;
54 | {
55 | static const GLfloat rotationSquareVertices[] = {
56 | -1.0f, -1.0f,
57 | 1.0f, -1.0f,
58 | -1.0f, 1.0f,
59 | 1.0f, 1.0f,
60 | };
61 |
62 | static const GLfloat rotateLeftTextureCoordinates[] = {
63 | 1.0f, 0.0f,
64 | 1.0f, 1.0f,
65 | 0.0f, 0.0f,
66 | 0.0f, 1.0f,
67 | };
68 |
69 | static const GLfloat rotateRightTextureCoordinates[] = {
70 | 0.0f, 1.0f,
71 | 0.0f, 0.0f,
72 | 1.0f, 1.0f,
73 | 1.0f, 0.0f,
74 | };
75 |
76 | static const GLfloat verticalFlipTextureCoordinates[] = {
77 | 0.0f, 1.0f,
78 | 1.0f, 1.0f,
79 | 0.0f, 0.0f,
80 | 1.0f, 0.0f,
81 | };
82 |
83 | static const GLfloat horizontalFlipTextureCoordinates[] = {
84 | 1.0f, 0.0f,
85 | 0.0f, 0.0f,
86 | 1.0f, 1.0f,
87 | 0.0f, 1.0f,
88 | };
89 |
90 | switch (rotationMode)
91 | {
92 | case kGPUImageRotateLeft: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateLeftTextureCoordinates]; break;
93 | case kGPUImageRotateRight: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightTextureCoordinates]; break;
94 | case kGPUImageFlipHorizonal: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:verticalFlipTextureCoordinates]; break;
95 | case kGPUImageFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:horizontalFlipTextureCoordinates]; break;
96 | default: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotationSquareVertices]; break;
97 | }
98 |
99 | }
100 |
101 | @end
102 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageVideoCamera.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import "GPUImageOpenGLESContext.h"
5 | #import "GPUImageOutput.h"
6 |
7 | // From the iOS 5.0 release notes:
8 | // "In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight."
9 | // Currently, rotation is needed to handle each camera
10 |
11 | @interface GPUImageVideoCamera : GPUImageOutput
12 | {
13 | AVCaptureSession *captureSession;
14 | CVOpenGLESTextureCacheRef coreVideoTextureCache;
15 |
16 | NSUInteger numberOfFramesCaptured;
17 | CGFloat totalFrameTimeDuringCapture;
18 | BOOL runBenchmark;
19 | }
20 |
21 | @property(readonly) AVCaptureSession *captureSession;
22 | @property(readwrite, nonatomic) BOOL runBenchmark;
23 |
24 | // Initialization and teardown
25 | - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
26 |
27 | // Manage fast texture upload
28 | + (BOOL)supportsFastTextureUpload;
29 |
30 | // Manage the camera video stream
31 | - (void)startCameraCapture;
32 | - (void)stopCameraCapture;
33 |
34 | // Benchmarking
35 | - (CGFloat)averageFrameDurationDuringCapture;
36 |
37 | - (void)rotateCamera;
38 | - (AVCaptureDevicePosition)cameraPosition;
39 |
40 | @end
41 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageVideoCamera.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageVideoCamera.h"
2 |
3 | #pragma mark -
4 | #pragma mark Private methods and instance variables
5 |
6 | @interface GPUImageVideoCamera ()
7 | {
8 | AVCaptureDeviceInput *videoInput;
9 | AVCaptureVideoDataOutput *videoOutput;
10 | dispatch_semaphore_t frameRenderingSemaphore;
11 | }
12 |
13 | @end
14 |
15 | @implementation GPUImageVideoCamera
16 |
17 | #pragma mark -
18 | #pragma mark Initialization and teardown
19 |
20 | - (id)init;
21 | {
22 | if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
23 | {
24 | return nil;
25 | }
26 |
27 | return self;
28 | }
29 |
30 | - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
31 | {
32 | if (!(self = [super init]))
33 | {
34 | return nil;
35 | }
36 |
37 | runBenchmark = NO;
38 |
39 | if ([GPUImageVideoCamera supportsFastTextureUpload])
40 | {
41 | [GPUImageOpenGLESContext useImageProcessingContext];
42 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge CVEAGLContext)((__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context]), NULL, &coreVideoTextureCache);
43 | if (err)
44 | {
45 | NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d");
46 | }
47 |
48 | // Need to remove the initially created texture
49 | [self deleteOutputTexture];
50 | }
51 |
52 | // Grab the back-facing camera
53 | AVCaptureDevice *backFacingCamera = nil;
54 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
55 | for (AVCaptureDevice *device in devices)
56 | {
57 | if ([device position] == cameraPosition)
58 | {
59 | backFacingCamera = device;
60 | }
61 | }
62 |
63 | // Create the capture session
64 | captureSession = [[AVCaptureSession alloc] init];
65 |
66 | [captureSession beginConfiguration];
67 |
68 | // Add the video input
69 | NSError *error = nil;
70 | videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
71 | if ([captureSession canAddInput:videoInput])
72 | {
73 | [captureSession addInput:videoInput];
74 | }
75 |
76 | // Add the video frame output
77 | videoOutput = [[AVCaptureVideoDataOutput alloc] init];
78 | [videoOutput setAlwaysDiscardsLateVideoFrames:YES];
79 |
80 | [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
81 |
82 | // dispatch_queue_t videoQueue = dispatch_queue_create("com.sunsetlakesoftware.colortracking.videoqueue", NULL);
83 | // [videoOutput setSampleBufferDelegate:self queue:videoQueue];
84 |
85 | [videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
86 |
87 | if ([captureSession canAddOutput:videoOutput])
88 | {
89 | [captureSession addOutput:videoOutput];
90 | }
91 | else
92 | {
93 | NSLog(@"Couldn't add video output");
94 | }
95 |
96 | [captureSession setSessionPreset:sessionPreset];
97 | [captureSession commitConfiguration];
98 |
99 | // inputTextureSize
100 | frameRenderingSemaphore = dispatch_semaphore_create(1);
101 |
102 | return self;
103 | }
104 |
105 | - (void)dealloc
106 | {
107 | [self stopCameraCapture];
108 | // [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
109 |
110 | [captureSession removeInput:videoInput];
111 | [captureSession removeOutput:videoOutput];
112 |
113 | if ([GPUImageVideoCamera supportsFastTextureUpload])
114 | {
115 | CFRelease(coreVideoTextureCache);
116 | }
117 |
118 | #if !OS_OBJECT_USE_OBJC
119 | if (frameRenderingSemaphore != NULL)
120 | {
121 | dispatch_release(frameRenderingSemaphore);
122 | }
123 | #endif
124 | }
125 |
126 | #pragma mark -
127 | #pragma mark Manage fast texture upload
128 |
129 | + (BOOL)supportsFastTextureUpload;
130 | {
131 | return (CVOpenGLESTextureCacheCreate != NULL);
132 | }
133 |
134 | #pragma mark -
135 | #pragma mark Manage the camera video stream
136 |
137 | - (void)startCameraCapture;
138 | {
139 | if (![captureSession isRunning])
140 | {
141 | [captureSession startRunning];
142 | };
143 | }
144 |
145 | - (void)stopCameraCapture;
146 | {
147 | if ([captureSession isRunning])
148 | {
149 | [captureSession stopRunning];
150 | }
151 | }
152 |
153 | #pragma mark -
154 | #pragma mark Benchmarking
155 |
156 | - (CGFloat)averageFrameDurationDuringCapture;
157 | {
158 | // NSLog(@"Number of frames: %d", numberOfFramesCaptured);
159 | return (totalFrameTimeDuringCapture / (CGFloat)numberOfFramesCaptured) * 1000.0;
160 | }
161 |
162 | #pragma mark -
163 | #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
164 |
165 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
166 | {
167 | CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
168 | CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
169 | int bufferWidth = CVPixelBufferGetWidth(cameraFrame);
170 | int bufferHeight = CVPixelBufferGetHeight(cameraFrame);
171 |
172 | if ([GPUImageVideoCamera supportsFastTextureUpload])
173 | {
174 | // CVPixelBufferLockBaseAddress(cameraFrame, 0);
175 | if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
176 | {
177 | return;
178 | }
179 |
180 | [GPUImageOpenGLESContext useImageProcessingContext];
181 | CVOpenGLESTextureRef texture = NULL;
182 | CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
183 |
184 | if (!texture || err) {
185 | NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
186 | return;
187 | }
188 |
189 | outputTexture = CVOpenGLESTextureGetName(texture);
190 | glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
191 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
192 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
193 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
194 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
195 |
196 | for (id currentTarget in targets)
197 | {
198 | NSInteger indexOfObject = [targets indexOfObject:currentTarget];
199 | [currentTarget setInputTexture:outputTexture atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
200 |
201 | [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight)];
202 | [currentTarget newFrameReady];
203 | }
204 |
205 |
206 | // CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
207 |
208 | glBindTexture(outputTexture, 0);
209 |
210 | // Flush the CVOpenGLESTexture cache and release the texture
211 | CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
212 | CFRelease(texture);
213 | outputTexture = 0;
214 |
215 | dispatch_semaphore_signal(frameRenderingSemaphore);
216 |
217 | if (runBenchmark)
218 | {
219 | CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
220 | totalFrameTimeDuringCapture += currentFrameTime;
221 | numberOfFramesCaptured++;
222 | // NSLog(@"Average frame time : %f ms", 1000.0 * (totalFrameTimeDuringCapture / numberOfFramesCaptured));
223 | // NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
224 | }
225 | }
226 | else
227 | {
228 | // Upload to texture
229 | // CVPixelBufferLockBaseAddress(cameraFrame, 0);
230 | if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
231 | {
232 | return;
233 | }
234 |
235 | glBindTexture(GL_TEXTURE_2D, outputTexture);
236 | // Using BGRA extension to pull in video frame data directly
237 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
238 |
239 | for (id currentTarget in targets)
240 | {
241 | [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight)];
242 | [currentTarget newFrameReady];
243 | }
244 |
245 | dispatch_semaphore_signal(frameRenderingSemaphore);
246 | // CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
247 |
248 | if (runBenchmark)
249 | {
250 | CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
251 | totalFrameTimeDuringCapture += currentFrameTime;
252 | numberOfFramesCaptured++;
253 | // NSLog(@"Average frame time : %f ms", 1000.0 * (totalFrameTimeDuringCapture / numberOfFramesCaptured));
254 | // NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
255 | }
256 | }
257 | }
258 |
259 | - (AVCaptureDevicePosition)cameraPosition
260 | {
261 | return [[videoInput device] position];
262 | }
263 |
264 | - (void)rotateCamera
265 | {
266 | // if (self.frontFacingCameraPresent == NO)
267 | // return;
268 |
269 | NSError *error;
270 | AVCaptureDeviceInput *newVideoInput;
271 | AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
272 |
273 | if (currentCameraPosition == AVCaptureDevicePositionBack)
274 | {
275 | currentCameraPosition = AVCaptureDevicePositionFront;
276 | }
277 | else
278 | {
279 | currentCameraPosition = AVCaptureDevicePositionBack;
280 | }
281 |
282 | AVCaptureDevice *backFacingCamera = nil;
283 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
284 | for (AVCaptureDevice *device in devices)
285 | {
286 | if ([device position] == currentCameraPosition)
287 | {
288 | backFacingCamera = device;
289 | }
290 | }
291 | newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
292 |
293 | if (newVideoInput != nil)
294 | {
295 | [captureSession beginConfiguration];
296 |
297 | [captureSession removeInput:videoInput];
298 | if ([captureSession canAddInput:newVideoInput])
299 | {
300 | [captureSession addInput:newVideoInput];
301 | videoInput = newVideoInput;
302 | }
303 | else
304 | {
305 | [captureSession addInput:videoInput];
306 | }
307 | //captureSession.sessionPreset = oriPreset;
308 | [captureSession commitConfiguration];
309 | }
310 | }
311 |
312 | #pragma mark -
313 | #pragma mark Accessors
314 |
315 | @synthesize captureSession;
316 | @synthesize runBenchmark;
317 |
318 | @end
319 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageView.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOpenGLESContext.h"
3 |
4 | typedef enum {
5 | kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio
6 | kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color
7 | kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view
8 | } GPUImageFillModeType;
9 |
10 | @interface GPUImageView : UIView
11 |
12 | /** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio
13 | */
14 | @property(readwrite, nonatomic) GPUImageFillModeType fillMode;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/GPUImage/GPUImageView.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageView.h"
2 | #import
3 | #import
4 | #import "GPUImageOpenGLESContext.h"
5 | #import "GPUImageFilter.h"
6 | #import
7 |
8 | #define SCREEN_WIDTH ([[UIScreen mainScreen] bounds].size.width)
9 | #define SCREEN_HEIGHT ([[UIScreen mainScreen] bounds].size.height)
10 |
11 | NSString *const kGPUImageDisplayFragmentShaderString = SHADER_STRING
12 | (
13 | varying highp vec2 textureCoordinate;
14 |
15 | uniform sampler2D inputImageTexture;
16 |
17 | void main()
18 | {
19 | gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
20 | }
21 | );
22 |
23 | #pragma mark -
24 | #pragma mark Private methods and instance variables
25 |
26 | @interface GPUImageView ()
27 | {
28 | GLuint inputTextureForDisplay;
29 | GLint backingWidth, backingHeight;
30 | GLuint displayRenderbuffer, displayFramebuffer;
31 |
32 | GLProgram *displayProgram;
33 | GLint displayPositionAttribute, displayTextureCoordinateAttribute;
34 | GLint displayInputTextureUniform;
35 |
36 | CGSize inputImageSize;
37 | GLfloat imageVertices[8];
38 | }
39 |
40 | // Initialization and teardown
41 | - (void)commonInit;
42 |
43 | // Managing the display FBOs
44 | - (void)createDisplayFramebuffer;
45 | - (void)destroyDisplayFramebuffer;
46 |
47 | @end
48 |
49 | @implementation GPUImageView
50 |
51 | #pragma mark -
52 | #pragma mark Initialization and teardown
53 |
54 | + (Class)layerClass
55 | {
56 | return [CAEAGLLayer class];
57 | }
58 |
59 | - (id)initWithFrame:(CGRect)frame
60 | {
61 | if (!(self = [super initWithFrame:frame]))
62 | {
63 | return nil;
64 | }
65 |
66 | [self commonInit];
67 |
68 | return self;
69 | }
70 |
71 | -(id)initWithCoder:(NSCoder *)coder
72 | {
73 | if (!(self = [super initWithCoder:coder]))
74 | {
75 | return nil;
76 | }
77 |
78 | [self commonInit];
79 |
80 | return self;
81 | }
82 |
83 | - (void)commonInit;
84 | {
85 | // Set scaling to account for Retina display
86 | if ([self respondsToSelector:@selector(setContentScaleFactor:)])
87 | {
88 | self.contentScaleFactor = [[UIScreen mainScreen] scale];
89 | }
90 |
91 | CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
92 | eaglLayer.opaque = YES;
93 | eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
94 |
95 | [GPUImageOpenGLESContext useImageProcessingContext];
96 | displayProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageDisplayFragmentShaderString];
97 |
98 | [displayProgram addAttribute:@"position"];
99 | [displayProgram addAttribute:@"inputTextureCoordinate"];
100 |
101 | if (![displayProgram link])
102 | {
103 | NSString *progLog = [displayProgram programLog];
104 | NSLog(@"Program link log: %@", progLog);
105 | NSString *fragLog = [displayProgram fragmentShaderLog];
106 | NSLog(@"Fragment shader compile log: %@", fragLog);
107 | NSString *vertLog = [displayProgram vertexShaderLog];
108 | NSLog(@"Vertex shader compile log: %@", vertLog);
109 | displayProgram = nil;
110 | NSAssert(NO, @"Filter shader link failed");
111 | }
112 |
113 | displayPositionAttribute = [displayProgram attributeIndex:@"position"];
114 | displayTextureCoordinateAttribute = [displayProgram attributeIndex:@"inputTextureCoordinate"];
115 | displayInputTextureUniform = [displayProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputTexture" for the fragment shader
116 |
117 | [displayProgram use];
118 | glEnableVertexAttribArray(displayPositionAttribute);
119 | glEnableVertexAttribArray(displayTextureCoordinateAttribute);
120 |
121 |
122 | }
123 |
124 | - (void)dealloc
125 | {
126 | [self destroyDisplayFramebuffer];
127 | }
128 |
129 | #pragma mark -
130 | #pragma mark Managing the display FBOs
131 |
132 | - (void)createDisplayFramebuffer;
133 | {
134 | glGenFramebuffers(1, &displayFramebuffer);
135 | glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
136 |
137 | glGenRenderbuffers(1, &displayRenderbuffer);
138 | glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
139 |
140 | [[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
141 |
142 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
143 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
144 | // NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
145 |
146 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);
147 |
148 | GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
149 | NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation");
150 | }
151 |
152 | - (void)destroyDisplayFramebuffer;
153 | {
154 | if (displayFramebuffer)
155 | {
156 | glDeleteFramebuffers(1, &displayFramebuffer);
157 | displayFramebuffer = 0;
158 | }
159 |
160 | if (displayRenderbuffer)
161 | {
162 | glDeleteRenderbuffers(1, &displayRenderbuffer);
163 | displayRenderbuffer = 0;
164 | }
165 | }
166 |
167 | - (void)setDisplayFramebuffer;
168 | {
169 | if (!displayFramebuffer)
170 | {
171 | [self createDisplayFramebuffer];
172 | }
173 |
174 | glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
175 |
176 | glViewport(0, 0, backingWidth, backingHeight);
177 | }
178 |
179 | - (void)presentFramebuffer;
180 | {
181 | glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
182 | [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay];
183 | }
184 |
185 | #pragma mark -
186 | #pragma mark GPUInput protocol
187 |
188 | - (void)newFrameReady;
189 | {
190 | [GPUImageOpenGLESContext useImageProcessingContext];
191 | [self setDisplayFramebuffer];
192 |
193 | [displayProgram use];
194 |
195 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
196 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
197 |
198 | // static const GLfloat squareVertices[] = {
199 | // -1.0f, -1.0f,
200 | // 1.0f, -1.0f,
201 | // -1.0f, 1.0f,
202 | // 1.0f, 1.0f,
203 | // };
204 |
205 | static const GLfloat textureCoordinates[] = {
206 | 0.0f, 1.0f,
207 | 1.0f, 1.0f,
208 | 0.0f, 0.0f,
209 | 1.0f, 0.0f,
210 | };
211 |
212 | glActiveTexture(GL_TEXTURE4);
213 | glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay);
214 | glUniform1i(displayInputTextureUniform, 4);
215 |
216 | glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);
217 | glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
218 |
219 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
220 |
221 | [self presentFramebuffer];
222 | }
223 |
224 | - (NSInteger)nextAvailableTextureIndex;
225 | {
226 | return 0;
227 | }
228 |
229 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
230 | {
231 | inputTextureForDisplay = newInputTexture;
232 | }
233 |
234 | - (void)setInputSize:(CGSize)newSize;
235 | {
236 |
237 | }
238 |
239 |
240 | - (CGSize)maximumOutputSize;
241 | {
242 | if ([self respondsToSelector:@selector(setContentScaleFactor:)])
243 | {
244 | CGSize pointSize = self.bounds.size;
245 | return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height);
246 | }
247 | else
248 | {
249 | return self.bounds.size;
250 | }
251 | }
252 |
253 | - (void)setFillMode:(GPUImageFillModeType)newValue;
254 | {
255 | _fillMode = newValue;
256 | [self recalculateViewGeometry];
257 | }
258 |
259 | #pragma mark -
260 | #pragma mark Handling fill mode
261 |
262 | - (void)recalculateViewGeometry;
263 | {
264 | // runSynchronouslyOnVideoProcessingQueue(^{
265 | CGFloat heightScaling, widthScaling;
266 |
267 | CGSize currentViewSize = self.bounds.size;
268 | inputImageSize = CGSizeMake(SCREEN_WIDTH, SCREEN_WIDTH);
269 | // CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height;
270 | // CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height;
271 |
272 | CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds);
273 |
274 | switch(_fillMode)
275 | {
276 | case kGPUImageFillModeStretch:
277 | {
278 | widthScaling = 1.0;
279 | heightScaling = 1.0;
280 | }; break;
281 | case kGPUImageFillModePreserveAspectRatio:
282 | {
283 | widthScaling = insetRect.size.width / currentViewSize.width;
284 | heightScaling = insetRect.size.height / currentViewSize.height;
285 | }; break;
286 | case kGPUImageFillModePreserveAspectRatioAndFill:
287 | {
288 | // CGFloat widthHolder = insetRect.size.width / currentViewSize.width;
289 | widthScaling = currentViewSize.height / insetRect.size.height;
290 | heightScaling = currentViewSize.width / insetRect.size.width;
291 | }; break;
292 | }
293 |
294 | imageVertices[0] = -widthScaling;
295 | imageVertices[1] = -heightScaling;
296 | imageVertices[2] = widthScaling;
297 | imageVertices[3] = -heightScaling;
298 | imageVertices[4] = -widthScaling;
299 | imageVertices[5] = heightScaling;
300 | imageVertices[6] = widthScaling;
301 | imageVertices[7] = heightScaling;
302 | // });
303 |
304 | // static const GLfloat imageVertices[] = {
305 | // -1.0f, -1.0f,
306 | // 1.0f, -1.0f,
307 | // -1.0f, 1.0f,
308 | // 1.0f, 1.0f,
309 | // };
310 | }
311 |
312 |
313 | @end
314 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IF1977Filter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IF1977Filter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IF1977Filter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IF1977Filter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IF1977Filter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IF1977Filter.h"
10 |
11 | NSString *const kIF1977ShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 |
20 | void main()
21 | {
22 |
23 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
24 |
25 | texel = vec3(
26 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
27 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
28 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
29 |
30 | gl_FragColor = vec4(texel, 1.0);
31 | }
32 | );
33 |
34 | @implementation IF1977Filter
35 |
36 | - (id)init;
37 | {
38 | if (!(self = [super initWithFragmentShaderFromString:kIF1977ShaderString]))
39 | {
40 | return nil;
41 | }
42 |
43 | return self;
44 | }
45 |
46 | @end
47 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFAmaroFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFAmaroFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFAmaroFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFAmaroFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFAmaroFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFAmaroFilter.h"
10 |
11 | NSString *const kIFAmaroShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //blowout;
19 | uniform sampler2D inputImageTexture3; //overlay;
20 | uniform sampler2D inputImageTexture4; //map
21 |
22 | void main()
23 | {
24 |
25 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
26 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
27 |
28 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
29 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
30 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
31 |
32 | vec4 mapped;
33 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
34 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
35 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
36 | mapped.a = 1.0;
37 |
38 | gl_FragColor = mapped;
39 | }
40 | );
41 |
42 | @implementation IFAmaroFilter
43 |
44 | - (id)init;
45 | {
46 | if (!(self = [super initWithFragmentShaderFromString:kIFAmaroShaderString]))
47 | {
48 | return nil;
49 | }
50 |
51 | return self;
52 | }
53 |
54 | @end
55 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFBrannanFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFBrannanFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFBrannanFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFBrannanFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFBrannanFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFBrannanFilter.h"
10 |
11 | NSString *const kIFBrannanShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //process
19 | uniform sampler2D inputImageTexture3; //blowout
20 | uniform sampler2D inputImageTexture4; //contrast
21 | uniform sampler2D inputImageTexture5; //luma
22 | uniform sampler2D inputImageTexture6; //screen
23 |
24 | mat3 saturateMatrix = mat3(
25 | 1.105150,
26 | -0.044850,
27 | -0.046000,
28 | -0.088050,
29 | 1.061950,
30 | -0.089200,
31 | -0.017100,
32 | -0.017100,
33 | 1.132900);
34 |
35 | vec3 luma = vec3(.3, .59, .11);
36 |
37 | void main()
38 | {
39 |
40 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
41 |
42 | vec2 lookup;
43 | lookup.y = 0.5;
44 | lookup.x = texel.r;
45 | texel.r = texture2D(inputImageTexture2, lookup).r;
46 | lookup.x = texel.g;
47 | texel.g = texture2D(inputImageTexture2, lookup).g;
48 | lookup.x = texel.b;
49 | texel.b = texture2D(inputImageTexture2, lookup).b;
50 |
51 | texel = saturateMatrix * texel;
52 |
53 |
54 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
55 | float d = dot(tc, tc);
56 | vec3 sampled;
57 | lookup.y = 0.5;
58 | lookup.x = texel.r;
59 | sampled.r = texture2D(inputImageTexture3, lookup).r;
60 | lookup.x = texel.g;
61 | sampled.g = texture2D(inputImageTexture3, lookup).g;
62 | lookup.x = texel.b;
63 | sampled.b = texture2D(inputImageTexture3, lookup).b;
64 | float value = smoothstep(0.0, 1.0, d);
65 | texel = mix(sampled, texel, value);
66 |
67 | lookup.x = texel.r;
68 | texel.r = texture2D(inputImageTexture4, lookup).r;
69 | lookup.x = texel.g;
70 | texel.g = texture2D(inputImageTexture4, lookup).g;
71 | lookup.x = texel.b;
72 | texel.b = texture2D(inputImageTexture4, lookup).b;
73 |
74 |
75 | lookup.x = dot(texel, luma);
76 | texel = mix(texture2D(inputImageTexture5, lookup).rgb, texel, .5);
77 |
78 | lookup.x = texel.r;
79 | texel.r = texture2D(inputImageTexture6, lookup).r;
80 | lookup.x = texel.g;
81 | texel.g = texture2D(inputImageTexture6, lookup).g;
82 | lookup.x = texel.b;
83 | texel.b = texture2D(inputImageTexture6, lookup).b;
84 |
85 | gl_FragColor = vec4(texel, 1.0);
86 | }
87 | );
88 |
89 | @implementation IFBrannanFilter
90 |
91 | - (id)init;
92 | {
93 | if (!(self = [super initWithFragmentShaderFromString:kIFBrannanShaderString]))
94 | {
95 | return nil;
96 | }
97 |
98 | return self;
99 | }
100 |
101 | @end
102 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFEarlybirdFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFEarlybirdFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFEarlybirdFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFEarlybirdFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFEarlybirdFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFEarlybirdFilter.h"
10 |
11 | NSString *const kIFEarlybirdShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //earlyBirdCurves
19 | uniform sampler2D inputImageTexture3; //earlyBirdOverlay
20 | uniform sampler2D inputImageTexture4; //vig
21 | uniform sampler2D inputImageTexture5; //earlyBirdBlowout
22 | uniform sampler2D inputImageTexture6; //earlyBirdMap
23 |
24 | const mat3 saturate = mat3(
25 | 1.210300,
26 | -0.089700,
27 | -0.091000,
28 | -0.176100,
29 | 1.123900,
30 | -0.177400,
31 | -0.034200,
32 | -0.034200,
33 | 1.265800);
34 | const vec3 rgbPrime = vec3(0.25098, 0.14640522, 0.0);
35 | const vec3 desaturate = vec3(.3, .59, .11);
36 |
37 | void main()
38 | {
39 |
40 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
41 |
42 |
43 | vec2 lookup;
44 | lookup.y = 0.5;
45 |
46 | lookup.x = texel.r;
47 | texel.r = texture2D(inputImageTexture2, lookup).r;
48 |
49 | lookup.x = texel.g;
50 | texel.g = texture2D(inputImageTexture2, lookup).g;
51 |
52 | lookup.x = texel.b;
53 | texel.b = texture2D(inputImageTexture2, lookup).b;
54 |
55 | float desaturatedColor;
56 | vec3 result;
57 | desaturatedColor = dot(desaturate, texel);
58 |
59 |
60 | lookup.x = desaturatedColor;
61 | result.r = texture2D(inputImageTexture3, lookup).r;
62 | lookup.x = desaturatedColor;
63 | result.g = texture2D(inputImageTexture3, lookup).g;
64 | lookup.x = desaturatedColor;
65 | result.b = texture2D(inputImageTexture3, lookup).b;
66 |
67 | texel = saturate * mix(texel, result, .5);
68 |
69 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
70 | float d = dot(tc, tc);
71 |
72 | vec3 sampled;
73 | lookup.y = .5;
74 |
75 | /*
76 | lookup.x = texel.r;
77 | sampled.r = texture2D(inputImageTexture4, lookup).r;
78 |
79 | lookup.x = texel.g;
80 | sampled.g = texture2D(inputImageTexture4, lookup).g;
81 |
82 | lookup.x = texel.b;
83 | sampled.b = texture2D(inputImageTexture4, lookup).b;
84 |
85 | float value = smoothstep(0.0, 1.25, pow(d, 1.35)/1.65);
86 | texel = mix(texel, sampled, value);
87 | */
88 |
89 | //---
90 |
91 | lookup = vec2(d, texel.r);
92 | texel.r = texture2D(inputImageTexture4, lookup).r;
93 | lookup.y = texel.g;
94 | texel.g = texture2D(inputImageTexture4, lookup).g;
95 | lookup.y = texel.b;
96 | texel.b = texture2D(inputImageTexture4, lookup).b;
97 | float value = smoothstep(0.0, 1.25, pow(d, 1.35)/1.65);
98 |
99 | //---
100 |
101 | lookup.x = texel.r;
102 | sampled.r = texture2D(inputImageTexture5, lookup).r;
103 | lookup.x = texel.g;
104 | sampled.g = texture2D(inputImageTexture5, lookup).g;
105 | lookup.x = texel.b;
106 | sampled.b = texture2D(inputImageTexture5, lookup).b;
107 | texel = mix(sampled, texel, value);
108 |
109 |
110 | lookup.x = texel.r;
111 | texel.r = texture2D(inputImageTexture6, lookup).r;
112 | lookup.x = texel.g;
113 | texel.g = texture2D(inputImageTexture6, lookup).g;
114 | lookup.x = texel.b;
115 | texel.b = texture2D(inputImageTexture6, lookup).b;
116 |
117 | gl_FragColor = vec4(texel, 1.0);
118 | }
119 | );
120 |
121 | @implementation IFEarlybirdFilter
122 |
123 | - (id)init;
124 | {
125 | if (!(self = [super initWithFragmentShaderFromString:kIFEarlybirdShaderString]))
126 | {
127 | return nil;
128 | }
129 |
130 | return self;
131 | }
132 |
133 | @end
134 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFHefeFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFHefeFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFHefeFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFHefeFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFHefeFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFHefeFilter.h"
10 |
11 | NSString *const kIFHefeShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //edgeBurn
19 | uniform sampler2D inputImageTexture3; //hefeMap
20 | uniform sampler2D inputImageTexture4; //hefeGradientMap
21 | uniform sampler2D inputImageTexture5; //hefeSoftLight
22 | uniform sampler2D inputImageTexture6; //hefeMetal
23 |
24 | void main()
25 | {
26 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
27 | vec3 edge = texture2D(inputImageTexture2, textureCoordinate).rgb;
28 | texel = texel * edge;
29 |
30 | texel = vec3(
31 | texture2D(inputImageTexture3, vec2(texel.r, .16666)).r,
32 | texture2D(inputImageTexture3, vec2(texel.g, .5)).g,
33 | texture2D(inputImageTexture3, vec2(texel.b, .83333)).b);
34 |
35 | vec3 luma = vec3(.30, .59, .11);
36 | vec3 gradSample = texture2D(inputImageTexture4, vec2(dot(luma, texel), .5)).rgb;
37 | vec3 final = vec3(
38 | texture2D(inputImageTexture5, vec2(gradSample.r, texel.r)).r,
39 | texture2D(inputImageTexture5, vec2(gradSample.g, texel.g)).g,
40 | texture2D(inputImageTexture5, vec2(gradSample.b, texel.b)).b
41 | );
42 |
43 | vec3 metal = texture2D(inputImageTexture6, textureCoordinate).rgb;
44 | vec3 metaled = vec3(
45 | texture2D(inputImageTexture5, vec2(metal.r, texel.r)).r,
46 | texture2D(inputImageTexture5, vec2(metal.g, texel.g)).g,
47 | texture2D(inputImageTexture5, vec2(metal.b, texel.b)).b
48 | );
49 |
50 | gl_FragColor = vec4(metaled, 1.0);
51 | }
52 | );
53 |
54 | @implementation IFHefeFilter
55 |
56 | - (id)init;
57 | {
58 | if (!(self = [super initWithFragmentShaderFromString:kIFHefeShaderString]))
59 | {
60 | return nil;
61 | }
62 |
63 | return self;
64 | }
65 |
66 | @end
67 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFHudsonFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFHudsonFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFHudsonFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFHudsonFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFHudsonFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFHudsonFilter.h"
10 |
11 | NSString *const kIFHudsonShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //blowout;
19 | uniform sampler2D inputImageTexture3; //overlay;
20 | uniform sampler2D inputImageTexture4; //map
21 |
22 | void main()
23 | {
24 |
25 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
26 |
27 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
28 |
29 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
30 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
31 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
32 |
33 | vec4 mapped;
34 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
35 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
36 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
37 | mapped.a = 1.0;
38 | gl_FragColor = mapped;
39 | }
40 | );
41 |
42 | @implementation IFHudsonFilter
43 |
44 | - (id)init;
45 | {
46 | if (!(self = [super initWithFragmentShaderFromString:kIFHudsonShaderString]))
47 | {
48 | return nil;
49 | }
50 |
51 | return self;
52 | }
53 |
54 | @end
55 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFInkwellFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFInkwellFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFInkwellFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFInkwellFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFInkwellFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFInkwellFilter.h"
10 |
11 | NSString *const kIFInkWellShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 |
20 | void main()
21 | {
22 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
23 | texel = vec3(dot(vec3(0.3, 0.6, 0.1), texel));
24 | texel = vec3(texture2D(inputImageTexture2, vec2(texel.r, .16666)).r);
25 | gl_FragColor = vec4(texel, 1.0);
26 | }
27 | );
28 |
29 | @implementation IFInkwellFilter
30 |
31 | - (id)init;
32 | {
33 | if (!(self = [super initWithFragmentShaderFromString:kIFInkWellShaderString]))
34 | {
35 | return nil;
36 | }
37 |
38 | return self;
39 | }
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFLomofiFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFLomofiFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFLomofiFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFLomofiFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFLomofiFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFLomofiFilter.h"
10 |
11 | NSString *const kIFLomofiShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 | uniform sampler2D inputImageTexture3;
20 |
21 | void main()
22 | {
23 |
24 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
25 |
26 | vec2 red = vec2(texel.r, 0.16666);
27 | vec2 green = vec2(texel.g, 0.5);
28 | vec2 blue = vec2(texel.b, 0.83333);
29 |
30 | texel.rgb = vec3(
31 | texture2D(inputImageTexture2, red).r,
32 | texture2D(inputImageTexture2, green).g,
33 | texture2D(inputImageTexture2, blue).b);
34 |
35 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
36 | float d = dot(tc, tc);
37 | vec2 lookup = vec2(d, texel.r);
38 | texel.r = texture2D(inputImageTexture3, lookup).r;
39 | lookup.y = texel.g;
40 | texel.g = texture2D(inputImageTexture3, lookup).g;
41 | lookup.y = texel.b;
42 | texel.b = texture2D(inputImageTexture3, lookup).b;
43 |
44 | gl_FragColor = vec4(texel,1.0);
45 | }
46 | );
47 |
48 | @implementation IFLomofiFilter
49 |
50 | - (id)init;
51 | {
52 | if (!(self = [super initWithFragmentShaderFromString:kIFLomofiShaderString]))
53 | {
54 | return nil;
55 | }
56 |
57 | return self;
58 | }
59 |
60 | @end
61 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFLordKelvinFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFLordKelvinFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFLordKelvinFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFLordKelvinFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFLordKelvinFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFLordKelvinFilter.h"
10 |
11 | NSString *const kLordKelvinShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 |
20 | void main()
21 | {
22 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
23 |
24 | vec2 lookup;
25 | lookup.y = .5;
26 |
27 | lookup.x = texel.r;
28 | texel.r = texture2D(inputImageTexture2, lookup).r;
29 |
30 | lookup.x = texel.g;
31 | texel.g = texture2D(inputImageTexture2, lookup).g;
32 |
33 | lookup.x = texel.b;
34 | texel.b = texture2D(inputImageTexture2, lookup).b;
35 |
36 | gl_FragColor = vec4(texel, 1.0);
37 | }
38 | );
39 |
40 | @implementation IFLordKelvinFilter
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kLordKelvinShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | return self;
50 | }
51 |
52 | @end
53 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFNashvilleFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFNashvilleFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFNashvilleFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFNashvilleFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFNashvilleFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFNashvilleFilter.h"
10 |
11 | NSString *const kIFNashvilleShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 |
20 | void main()
21 | {
22 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
23 | texel = vec3(
24 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
25 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
26 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
27 | gl_FragColor = vec4(texel, 1.0);
28 | }
29 | );
30 |
31 | @implementation IFNashvilleFilter
32 |
33 | - (id)init;
34 | {
35 | if (!(self = [super initWithFragmentShaderFromString:kIFNashvilleShaderString]))
36 | {
37 | return nil;
38 | }
39 |
40 | return self;
41 | }
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFNormalFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFNormalFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFNormalFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFNormalFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFNormalFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFNormalFilter.h"
10 |
11 | NSString *const kIFNormalShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 |
19 | void main()
20 | {
21 |
22 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
23 |
24 | gl_FragColor = vec4(texel, 1.0);
25 | }
26 | );
27 |
28 | @implementation IFNormalFilter
29 |
30 | - (id)init;
31 | {
32 | if (!(self = [super initWithFragmentShaderFromString:kIFNormalShaderString]))
33 | {
34 | return nil;
35 | }
36 |
37 | return self;
38 | }
39 |
40 | @end
41 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFRiseFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFRiseFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFRiseFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFRiseFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFRiseFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFRiseFilter.h"
10 |
11 | NSString *const kIFRiseShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //blowout;
19 | uniform sampler2D inputImageTexture3; //overlay;
20 | uniform sampler2D inputImageTexture4; //map
21 |
22 | void main()
23 | {
24 |
25 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
26 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
27 |
28 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
29 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
30 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
31 |
32 | vec4 mapped;
33 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
34 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
35 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
36 | mapped.a = 1.0;
37 |
38 | gl_FragColor = mapped;
39 | }
40 | );
41 |
42 | @implementation IFRiseFilter
43 |
44 | - (id)init;
45 | {
46 | if (!(self = [super initWithFragmentShaderFromString:kIFRiseShaderString]))
47 | {
48 | return nil;
49 | }
50 |
51 | return self;
52 | }
53 |
54 | @end
55 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFRotationFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFRotationFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "GPUImageRotationFilter.h"
10 |
11 | @interface IFRotationFilter : GPUImageRotationFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFRotationFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFRotationFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFRotationFilter.h"
10 |
11 | @implementation IFRotationFilter
12 |
13 | //- (void)newFrameReady
14 | //{
15 | // static const GLfloat rotationSquareVertices[] = {
16 | // -1.0f, -1.0f,
17 | // 1.0f, -1.0f,
18 | // -1.0f, 1.0f,
19 | // 1.0f, 1.0f,
20 | // };
21 | //
22 | // static const GLfloat rotateLeftTextureCoordinates[] = {
23 | // 1.0f, 0.0f,
24 | // 1.0f, 1.0f,
25 | // 0.0f, 0.0f,
26 | // 0.0f, 1.0f,
27 | // };
28 | //
29 | // static const GLfloat rotateRightTextureCoordinates[] = {
30 | // 0.0f, 1.0f,
31 | // 0.0f, 0.0f,
32 | // 0.75f, 1.0f,
33 | // 0.75f, 0.0f,
34 | // };
35 | //
36 | // static const GLfloat verticalFlipTextureCoordinates[] = {
37 | // 0.0f, 1.0f,
38 | // 1.0f, 1.0f,
39 | // 0.0f, 0.0f,
40 | // 1.0f, 0.0f,
41 | // };
42 | //
43 | // static const GLfloat horizontalFlipTextureCoordinates[] = {
44 | // 1.0f, 0.0f,
45 | // 0.0f, 0.0f,
46 | // 1.0f, 1.0f,
47 | // 0.0f, 1.0f,
48 | // };
49 | //
50 | // switch (rotationMode)
51 | // {
52 | // case kGPUImageRotateLeft: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateLeftTextureCoordinates]; break;
53 | // case kGPUImageRotateRight: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightTextureCoordinates]; break;
54 | // case kGPUImageFlipHorizonal: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:verticalFlipTextureCoordinates]; break;
55 | // case kGPUImageFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:horizontalFlipTextureCoordinates]; break;
56 | // }
57 | //
58 | //}
59 |
60 | - (void)newFrameReady;
61 | {
62 | // static const GLfloat noRotationTextureCoordinates[] = {
63 | // 0.0f, 0.0f,
64 | // 1.0f, 0.0f,
65 | // 0.0f, 1.0f,
66 | // 1.0f, 1.0f,
67 | // };
68 |
69 | static const GLfloat rotationSquareVertices[] = {
70 | -1.0f, -1.0f,
71 | 1.0f, -1.0f,
72 | -1.0f, 1.0f,
73 | 1.0f, 1.0f,
74 | };
75 |
76 | static const GLfloat noRotationTextureCoordinates[] = {
77 | 0.0f, 1.0f,
78 | 0.0f, 0.0f,
79 | 0.75f, 1.0f,
80 | 0.75f, 0.0f,
81 | };
82 |
83 | static const GLfloat rotateRightTextureCoordinates[] = {
84 | 0.0f, 0.0f,
85 | 1.0f, 0.0f,
86 | 0.0f, 1.0f,
87 | 1.0f, 1.0f,
88 | };
89 |
90 | static const GLfloat rotateLeftTextureCoordinates[] = {
91 | 1.0f, 0.0f,
92 | 0.0f, 0.0f,
93 | 1.0f, 1.0f,
94 | 0.0f, 1.0f,
95 | };
96 |
97 | static const GLfloat verticalFlipTextureCoordinates[] = {
98 | 1.0f, 1.0f,
99 | 1.0f, 0.0f,
100 | 0.0f, 1.0f,
101 | 0.0f, 0.0f,
102 | };
103 |
104 | static const GLfloat horizontalFlipTextureCoordinates[] = {
105 | 1.0f, 1.0f,
106 | 0.0f, 1.0f,
107 | 1.0f, 0.0f,
108 | 0.0f, 0.0f,
109 | };
110 |
111 | static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
112 | 1.0f, 0.0f,
113 | 1.0f, 1.0f,
114 | 0.0f, 0.0f,
115 | 0.0f, 1.0f,
116 | };
117 |
118 | static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
119 | 1.0f, 1.0f,
120 | 1.0f, 0.0f,
121 | 0.0f, 1.0f,
122 | 0.0f, 0.0f,
123 | };
124 |
125 | static const GLfloat rotate180TextureCoordinates[] = {
126 | 0.0f, 0.0f,
127 | 0.0f, 1.0f,
128 | 0.75f, 0.0f,
129 | 0.75f, 1.0f,
130 | };
131 |
132 | switch(rotationMode)
133 | {
134 | case kGPUImageNoRotation: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:noRotationTextureCoordinates]; break;
135 | case kGPUImageRotateLeft: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateLeftTextureCoordinates]; break;
136 | case kGPUImageRotateRight: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightTextureCoordinates]; break;
137 | case kGPUImageFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:verticalFlipTextureCoordinates]; break;
138 | case kGPUImageFlipHorizonal: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:horizontalFlipTextureCoordinates]; break;
139 | case kGPUImageRotateRightFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightVerticalFlipTextureCoordinates]; break;
140 | case kGPUImageRotateRightFlipHorizontal: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightHorizontalFlipTextureCoordinates]; break;
141 | case kGPUImageRotate180: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotate180TextureCoordinates]; break;
142 | }
143 | }
144 |
145 | @end
146 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFSierraFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFSierraFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFSierraFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFSierraFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFSierraFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFSierraFilter.h"
10 |
11 | NSString *const kIFSierraShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //blowout;
19 | uniform sampler2D inputImageTexture3; //overlay;
20 | uniform sampler2D inputImageTexture4; //map
21 |
22 | void main()
23 | {
24 |
25 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
26 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
27 |
28 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
29 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
30 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
31 |
32 | vec4 mapped;
33 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
34 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
35 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
36 | mapped.a = 1.0;
37 |
38 | gl_FragColor = mapped;
39 | }
40 | );
41 |
42 | @implementation IFSierraFilter
43 |
44 | - (id)init;
45 | {
46 | if (!(self = [super initWithFragmentShaderFromString:kIFSierraShaderString]))
47 | {
48 | return nil;
49 | }
50 |
51 | return self;
52 | }
53 |
54 | @end
55 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFSutroFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFSutroFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFSutroFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFSutroFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFSutroFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFSutroFilter.h"
10 |
11 | NSString *const kIFSutroShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //sutroMap;
19 | uniform sampler2D inputImageTexture3; //sutroMetal;
20 | uniform sampler2D inputImageTexture4; //softLight
21 | uniform sampler2D inputImageTexture5; //sutroEdgeburn
22 | uniform sampler2D inputImageTexture6; //sutroCurves
23 |
24 | void main()
25 | {
26 |
27 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
28 |
29 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
30 | float d = dot(tc, tc);
31 | vec2 lookup = vec2(d, texel.r);
32 | texel.r = texture2D(inputImageTexture2, lookup).r;
33 | lookup.y = texel.g;
34 | texel.g = texture2D(inputImageTexture2, lookup).g;
35 | lookup.y = texel.b;
36 | texel.b = texture2D(inputImageTexture2, lookup).b;
37 |
38 | vec3 rgbPrime = vec3(0.1019, 0.0, 0.0);
39 | float m = dot(vec3(.3, .59, .11), texel.rgb) - 0.03058;
40 | texel = mix(texel, rgbPrime + m, 0.32);
41 |
42 | vec3 metal = texture2D(inputImageTexture3, textureCoordinate).rgb;
43 | texel.r = texture2D(inputImageTexture4, vec2(metal.r, texel.r)).r;
44 | texel.g = texture2D(inputImageTexture4, vec2(metal.g, texel.g)).g;
45 | texel.b = texture2D(inputImageTexture4, vec2(metal.b, texel.b)).b;
46 |
47 | texel = texel * texture2D(inputImageTexture5, textureCoordinate).rgb;
48 |
49 | texel.r = texture2D(inputImageTexture6, vec2(texel.r, .16666)).r;
50 | texel.g = texture2D(inputImageTexture6, vec2(texel.g, .5)).g;
51 | texel.b = texture2D(inputImageTexture6, vec2(texel.b, .83333)).b;
52 |
53 |
54 | gl_FragColor = vec4(texel, 1.0);
55 | }
56 | );
57 |
58 | @implementation IFSutroFilter
59 |
60 | - (id)init;
61 | {
62 | if (!(self = [super initWithFragmentShaderFromString:kIFSutroShaderString]))
63 | {
64 | return nil;
65 | }
66 |
67 | return self;
68 | }
69 |
70 | @end
71 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFToasterFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFToasterFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFToasterFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFToasterFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFToasterFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFToasterFilter.h"
10 |
11 | NSString *const kIFToasterShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //toasterMetal
19 | uniform sampler2D inputImageTexture3; //toasterSoftlight
20 | uniform sampler2D inputImageTexture4; //toasterCurves
21 | uniform sampler2D inputImageTexture5; //toasterOverlayMapWarm
22 | uniform sampler2D inputImageTexture6; //toasterColorshift
23 |
24 | void main()
25 | {
26 | lowp vec3 texel;
27 | mediump vec2 lookup;
28 | vec2 blue;
29 | vec2 green;
30 | vec2 red;
31 | lowp vec4 tmpvar_1;
32 | tmpvar_1 = texture2D (inputImageTexture, textureCoordinate);
33 | texel = tmpvar_1.xyz;
34 | lowp vec4 tmpvar_2;
35 | tmpvar_2 = texture2D (inputImageTexture2, textureCoordinate);
36 | lowp vec2 tmpvar_3;
37 | tmpvar_3.x = tmpvar_2.x;
38 | tmpvar_3.y = tmpvar_1.x;
39 | texel.x = texture2D (inputImageTexture3, tmpvar_3).x;
40 | lowp vec2 tmpvar_4;
41 | tmpvar_4.x = tmpvar_2.y;
42 | tmpvar_4.y = tmpvar_1.y;
43 | texel.y = texture2D (inputImageTexture3, tmpvar_4).y;
44 | lowp vec2 tmpvar_5;
45 | tmpvar_5.x = tmpvar_2.z;
46 | tmpvar_5.y = tmpvar_1.z;
47 | texel.z = texture2D (inputImageTexture3, tmpvar_5).z;
48 | red.x = texel.x;
49 | red.y = 0.16666;
50 | green.x = texel.y;
51 | green.y = 0.5;
52 | blue.x = texel.z;
53 | blue.y = 0.833333;
54 | texel.x = texture2D (inputImageTexture4, red).x;
55 | texel.y = texture2D (inputImageTexture4, green).y;
56 | texel.z = texture2D (inputImageTexture4, blue).z;
57 | mediump vec2 tmpvar_6;
58 | tmpvar_6 = ((2.0 * textureCoordinate) - 1.0);
59 | mediump vec2 tmpvar_7;
60 | tmpvar_7.x = dot (tmpvar_6, tmpvar_6);
61 | tmpvar_7.y = texel.x;
62 | lookup = tmpvar_7;
63 | texel.x = texture2D (inputImageTexture5, tmpvar_7).x;
64 | lookup.y = texel.y;
65 | texel.y = texture2D (inputImageTexture5, lookup).y;
66 | lookup.y = texel.z;
67 | texel.z = texture2D (inputImageTexture5, lookup).z;
68 | red.x = texel.x;
69 | green.x = texel.y;
70 | blue.x = texel.z;
71 | texel.x = texture2D (inputImageTexture6, red).x;
72 | texel.y = texture2D (inputImageTexture6, green).y;
73 | texel.z = texture2D (inputImageTexture6, blue).z;
74 | lowp vec4 tmpvar_8;
75 | tmpvar_8.w = 1.0;
76 | tmpvar_8.xyz = texel;
77 | gl_FragColor = tmpvar_8;
78 | }
79 | );
80 |
81 | @implementation IFToasterFilter
82 |
83 | - (id)init;
84 | {
85 | if (!(self = [super initWithFragmentShaderFromString:kIFToasterShaderString]))
86 | {
87 | return nil;
88 | }
89 |
90 | return self;
91 | }
92 |
93 | @end
94 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFValenciaFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFValenciaFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFValenciaFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFValenciaFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFValenciaFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFValenciaFilter.h"
10 |
11 |
12 | NSString *const kIFValenciaShaderString = SHADER_STRING
13 | (
14 | precision lowp float;
15 |
16 | varying highp vec2 textureCoordinate;
17 |
18 | uniform sampler2D inputImageTexture;
19 | uniform sampler2D inputImageTexture2; //map
20 | uniform sampler2D inputImageTexture3; //gradMap
21 |
22 | mat3 saturateMatrix = mat3(
23 | 1.1402,
24 | -0.0598,
25 | -0.061,
26 | -0.1174,
27 | 1.0826,
28 | -0.1186,
29 | -0.0228,
30 | -0.0228,
31 | 1.1772);
32 |
33 | vec3 lumaCoeffs = vec3(.3, .59, .11);
34 |
35 | void main()
36 | {
37 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
38 |
39 | texel = vec3(
40 | texture2D(inputImageTexture2, vec2(texel.r, .1666666)).r,
41 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
42 | texture2D(inputImageTexture2, vec2(texel.b, .8333333)).b
43 | );
44 |
45 | texel = saturateMatrix * texel;
46 | float luma = dot(lumaCoeffs, texel);
47 | texel = vec3(
48 | texture2D(inputImageTexture3, vec2(luma, texel.r)).r,
49 | texture2D(inputImageTexture3, vec2(luma, texel.g)).g,
50 | texture2D(inputImageTexture3, vec2(luma, texel.b)).b);
51 |
52 | gl_FragColor = vec4(texel, 1.0);
53 | }
54 | );
55 |
56 | @implementation IFValenciaFilter
57 |
58 | - (id)init;
59 | {
60 | if (!(self = [super initWithFragmentShaderFromString:kIFValenciaShaderString]))
61 | {
62 | return nil;
63 | }
64 |
65 | return self;
66 | }
67 |
68 | @end
69 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFWaldenFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFWaldenFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFWaldenFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFWaldenFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFWaldenFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFWaldenFilter.h"
10 |
11 | NSString *const kIFWaldenShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //map
19 | uniform sampler2D inputImageTexture3; //vigMap
20 |
21 | void main()
22 | {
23 |
24 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
25 |
26 | texel = vec3(
27 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
28 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
29 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
30 |
31 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
32 | float d = dot(tc, tc);
33 | vec2 lookup = vec2(d, texel.r);
34 | texel.r = texture2D(inputImageTexture3, lookup).r;
35 | lookup.y = texel.g;
36 | texel.g = texture2D(inputImageTexture3, lookup).g;
37 | lookup.y = texel.b;
38 | texel.b = texture2D(inputImageTexture3, lookup).b;
39 |
40 | gl_FragColor = vec4(texel, 1.0);
41 | }
42 | );
43 |
44 | @implementation IFWaldenFilter
45 |
46 | - (id)init;
47 | {
48 | if (!(self = [super initWithFragmentShaderFromString:kIFWaldenShaderString]))
49 | {
50 | return nil;
51 | }
52 |
53 | return self;
54 | }
55 |
56 | @end
57 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFXproIIFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFXproIIFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 |
11 | @interface IFXproIIFilter : IFImageFilter
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFFilters/IFXproIIFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFXproIIFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFXproIIFilter.h"
10 |
11 | NSString *const kIFXproIIShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2; //map
19 | uniform sampler2D inputImageTexture3; //vigMap
20 |
21 | void main()
22 | {
23 |
24 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
25 |
26 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
27 | float d = dot(tc, tc);
28 | vec2 lookup = vec2(d, texel.r);
29 | texel.r = texture2D(inputImageTexture3, lookup).r;
30 | lookup.y = texel.g;
31 | texel.g = texture2D(inputImageTexture3, lookup).g;
32 | lookup.y = texel.b;
33 | texel.b = texture2D(inputImageTexture3, lookup).b;
34 |
35 | vec2 red = vec2(texel.r, 0.16666);
36 | vec2 green = vec2(texel.g, 0.5);
37 | vec2 blue = vec2(texel.b, .83333);
38 | texel.r = texture2D(inputImageTexture2, red).r;
39 | texel.g = texture2D(inputImageTexture2, green).g;
40 | texel.b = texture2D(inputImageTexture2, blue).b;
41 |
42 | gl_FragColor = vec4(texel, 1.0);
43 |
44 | }
45 | );
46 |
47 | @implementation IFXproIIFilter
48 |
49 | - (id)init;
50 | {
51 | if (!(self = [super initWithFragmentShaderFromString:kIFXproIIShaderString]))
52 | {
53 | return nil;
54 | }
55 |
56 | return self;
57 | }
58 |
59 | @end
60 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFImage/IFImageFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFImageFilter.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "GPUImageFilter.h"
10 |
11 | @interface IFImageFilter : GPUImageFilter {
12 | GLuint filterSourceTexture3, filterSourceTexture4, filterSourceTexture5, filterSourceTexture6;
13 | }
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFImage/IFImageFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // IFImageFilter.m
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "IFImageFilter.h"
10 | #import "GPUImageOpenGLESContext.h"
11 |
12 | @interface IFImageFilter ()
13 | {
14 | GLint filterPositionAttribute, filterTextureCoordinateAttribute;
15 | GLint filterInputTextureUniform, filterInputTextureUniform2, filterInputTextureUniform3, filterInputTextureUniform4, filterInputTextureUniform5, filterInputTextureUniform6;
16 |
17 | GLuint filterFramebuffer;
18 | }
19 |
20 | @end
21 |
22 |
23 | @implementation IFImageFilter
24 |
25 | - (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
26 | {
27 | if (!(self = [super init]))
28 | {
29 | return nil;
30 | }
31 |
32 | [GPUImageOpenGLESContext useImageProcessingContext];
33 | filterProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:fragmentShaderString];
34 |
35 | [filterProgram addAttribute:@"position"];
36 | [filterProgram addAttribute:@"inputTextureCoordinate"];
37 |
38 | if (![filterProgram link])
39 | {
40 | NSString *progLog = [filterProgram programLog];
41 | NSLog(@"Program link log: %@", progLog);
42 | NSString *fragLog = [filterProgram fragmentShaderLog];
43 | NSLog(@"Fragment shader compile log: %@", fragLog);
44 | NSString *vertLog = [filterProgram vertexShaderLog];
45 | NSLog(@"Vertex shader compile log: %@", vertLog);
46 | filterProgram = nil;
47 | NSAssert(NO, @"Filter shader link failed");
48 | }
49 |
50 | filterPositionAttribute = [filterProgram attributeIndex:@"position"];
51 | filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
52 | filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
53 | filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
54 | filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for second input texture in the fragment shader
55 | filterInputTextureUniform4 = [filterProgram uniformIndex:@"inputImageTexture4"]; // This does assume a name of "inputImageTexture4" for second input texture in the fragment shader
56 | filterInputTextureUniform5 = [filterProgram uniformIndex:@"inputImageTexture5"]; // This does assume a name of "inputImageTexture5" for second input texture in the fragment shader
57 | filterInputTextureUniform6 = [filterProgram uniformIndex:@"inputImageTexture6"]; // This does assume a name of "inputImageTexture6" for second input texture in the fragment shader
58 |
59 |
60 | [filterProgram use];
61 | glEnableVertexAttribArray(filterPositionAttribute);
62 | glEnableVertexAttribArray(filterTextureCoordinateAttribute);
63 |
64 | return self;
65 | }
66 |
67 | - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
68 | {
69 | [GPUImageOpenGLESContext useImageProcessingContext];
70 | [self setFilterFBO];
71 |
72 | [filterProgram use];
73 |
74 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
75 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
76 |
77 | glActiveTexture(GL_TEXTURE2);
78 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture);
79 |
80 | glUniform1i(filterInputTextureUniform, 2);
81 |
82 | if (filterSourceTexture2 != 0)
83 | {
84 | glActiveTexture(GL_TEXTURE3);
85 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture2);
86 |
87 | glUniform1i(filterInputTextureUniform2, 3);
88 | }
89 | if (filterSourceTexture3 != 0)
90 | {
91 | glActiveTexture(GL_TEXTURE4);
92 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture3);
93 | glUniform1i(filterInputTextureUniform3, 4);
94 | }
95 | if (filterSourceTexture4 != 0)
96 | {
97 | glActiveTexture(GL_TEXTURE5);
98 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture4);
99 | glUniform1i(filterInputTextureUniform4, 5);
100 | }
101 | if (filterSourceTexture5 != 0)
102 | {
103 | glActiveTexture(GL_TEXTURE6);
104 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture5);
105 | glUniform1i(filterInputTextureUniform5, 6);
106 | }
107 | if (filterSourceTexture6 != 0)
108 | {
109 | glActiveTexture(GL_TEXTURE7);
110 | glBindTexture(GL_TEXTURE_2D, filterSourceTexture6);
111 | glUniform1i(filterInputTextureUniform6, 7);
112 | }
113 |
114 | glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
115 | glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
116 |
117 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
118 |
119 | for (id currentTarget in targets)
120 | {
121 | [currentTarget setInputSize:inputTextureSize];
122 | [currentTarget newFrameReady];
123 | }
124 | }
125 |
126 | - (void)setFilterFBO;
127 | {
128 | if (!filterFramebuffer)
129 | {
130 | [self createFilterFBO];
131 | }
132 |
133 | glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
134 |
135 | CGSize currentFBOSize = [self sizeOfFBO];
136 | glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height);
137 | }
138 |
139 | - (void)createFilterFBO;
140 | {
141 |
142 |
143 | glActiveTexture(GL_TEXTURE1);
144 | glGenFramebuffers(1, &filterFramebuffer);
145 | glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
146 |
147 | CGSize currentFBOSize = [self sizeOfFBO];
148 | // NSLog(@"Filter size: %f, %f", currentFBOSize.width, currentFBOSize.height);
149 |
150 | glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)currentFBOSize.width, (int)currentFBOSize.height);
151 | glBindTexture(GL_TEXTURE_2D, outputTexture);
152 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
153 | glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
154 |
155 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
156 |
157 | NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
158 | }
159 |
160 | - (CGSize)sizeOfFBO;
161 | {
162 | CGSize outputSize = [self maximumOutputSize];
163 | if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
164 | {
165 | return inputTextureSize;
166 | }
167 | else
168 | {
169 | return outputSize;
170 | }
171 | }
172 |
173 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
174 | {
175 | if (textureIndex == 0)
176 | {
177 | filterSourceTexture = newInputTexture;
178 | }
179 | else if (filterSourceTexture2 == 0)
180 | {
181 | filterSourceTexture2 = newInputTexture;
182 | }
183 | else if (filterSourceTexture3 == 0) {
184 | filterSourceTexture3 = newInputTexture;
185 | }
186 | else if (filterSourceTexture4 == 0) {
187 | filterSourceTexture4 = newInputTexture;
188 | }
189 | else if (filterSourceTexture5 == 0) {
190 | filterSourceTexture5 = newInputTexture;
191 | }
192 | else if (filterSourceTexture6 == 0) {
193 | filterSourceTexture6 = newInputTexture;
194 | }
195 |
196 | }
197 |
198 | //- (NSInteger)nextAvailableTextureIndex;
199 | //{
200 | // if (filterSourceTexture == 0)
201 | // {
202 | // filterSourceTexture = [firstInputFramebuffer texture];
203 | // }
204 | // else if (filterSourceTexture2 == 0)
205 | // {
206 | // filterSourceTexture2 = [firstInputFramebuffer texture];
207 | // }
208 | // else if (filterSourceTexture3 == 0) {
209 | // filterSourceTexture3 = [firstInputFramebuffer texture];
210 | // }
211 | // else if (filterSourceTexture4 == 0) {
212 | // filterSourceTexture4 = [firstInputFramebuffer texture];
213 | // }
214 | // else if (filterSourceTexture5 == 0) {
215 | // filterSourceTexture5 = [firstInputFramebuffer texture];
216 | // }
217 | // else if (filterSourceTexture6 == 0) {
218 | // filterSourceTexture6 = [firstInputFramebuffer texture];
219 | // }
220 | //}
221 |
222 | //- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex {
223 | // if (filterSourceTexture == 0)
224 | // {
225 | // filterSourceTexture = [newInputFramebuffer texture];
226 | // }
227 | // else if (filterSourceTexture2 == 0)
228 | // {
229 | // filterSourceTexture2 = [newInputFramebuffer texture];
230 | // }
231 | // else if (filterSourceTexture3 == 0) {
232 | // filterSourceTexture3 = [newInputFramebuffer texture];
233 | // }
234 | // else if (filterSourceTexture4 == 0) {
235 | // filterSourceTexture4 = [newInputFramebuffer texture];
236 | // }
237 | // else if (filterSourceTexture5 == 0) {
238 | // filterSourceTexture5 = [newInputFramebuffer texture];
239 | // }
240 | // else if (filterSourceTexture6 == 0) {
241 | // filterSourceTexture6 = [newInputFramebuffer texture];
242 | // }
243 | //}
244 |
245 | @end
246 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFImage/IFVideoCamera.h:
--------------------------------------------------------------------------------
1 | //
2 | // IFVideoCamera.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "InstaFilters.h"
11 | #import "IFRotationFilter.h"
12 |
13 | @class IFVideoCamera;
14 |
15 | @protocol IFVideoCameraDelegate
16 |
17 | - (void)IFVideoCameraWillStartCaptureStillImage:(IFVideoCamera *)videoCamera;
18 | - (void)IFVideoCameraDidFinishCaptureStillImage:(IFVideoCamera *)videoCamera;
19 | - (void)IFVideoCameraDidSaveStillImage:(IFVideoCamera *)videoCamera;
20 | - (BOOL)canIFVideoCameraStartRecordingMovie:(IFVideoCamera *)videoCamera;
21 | - (void)IFVideoCameraWillStartProcessingMovie:(IFVideoCamera *)videoCamera;
22 | - (void)IFVideoCameraDidFinishProcessingMovie:(IFVideoCamera *)videoCamera;
23 | @end
24 |
25 | @interface IFVideoCamera : GPUImageVideoCamera
26 |
27 | @property (weak, nonatomic) id delegate;
28 |
29 | @property (strong, readonly) GPUImageView *gpuImageView;
30 | @property (strong, readonly) GPUImageView *gpuImageView_HD;
31 | @property (nonatomic, strong) UIImage *rawImage;
32 |
33 | @property (nonatomic, unsafe_unretained, readonly) BOOL isRecordingMovie;
34 |
35 | @property (nonatomic, strong) IFImageFilter *internalFilter;
36 |
37 | - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition highVideoQuality:(BOOL)isHighQuality;
38 | - (void)switchFilter:(IFFilterType)type;
39 | - (void)cancelAlbumPhotoAndGoBackToNormal;
40 | - (void)takePhoto;
41 | - (void)startRecordingMovie;
42 | - (void)stopRecordingMovie;
43 | - (void)saveCurrentStillImage;
44 | - (void)setRotation:(GPUImageRotationMode)newRotationMode;
45 | @end
46 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/IFImage/InstaFilters.h:
--------------------------------------------------------------------------------
1 | //
2 | // InstaFilters.h
3 | // InstaFilters
4 | //
5 | // Created by Di Wu on 2/28/12.
6 | // Copyright (c) 2012 twitter:@diwup. All rights reserved.
7 | //
8 |
9 | typedef enum {
10 | IF_NORMAL_FILTER,
11 | IF_AMARO_FILTER,
12 | IF_RISE_FILTER,
13 | IF_HUDSON_FILTER,
14 | IF_XPROII_FILTER,
15 | IF_SIERRA_FILTER,
16 | IF_LOMOFI_FILTER,
17 | IF_EARLYBIRD_FILTER,
18 | IF_SUTRO_FILTER,
19 | IF_TOASTER_FILTER,
20 | IF_BRANNAN_FILTER,
21 | IF_INKWELL_FILTER,
22 | IF_WALDEN_FILTER,
23 | IF_HEFE_FILTER,
24 | IF_VALENCIA_FILTER,
25 | IF_NASHVILLE_FILTER,
26 | IF_1977_FILTER,
27 | IF_LORDKELVIN_FILTER,
28 | IF_FILTER_TOTAL_NUMBER
29 | } IFFilterType;
30 |
31 | #import "UIImage+Resize.h"
32 | #import "IFImageFilter.h"
33 | #import "IFVideoCamera.h"
34 | #import "IFSutroFilter.h"
35 | #import "IFRotationFilter.h"
36 | #import "IFAmaroFilter.h"
37 | #import "IFNormalFilter.h"
38 | #import "IFRiseFilter.h"
39 | #import "IFHudsonFilter.h"
40 | #import "IFXproIIFilter.h"
41 | #import "IFSierraFilter.h"
42 | #import "IFLomofiFilter.h"
43 | #import "IFEarlybirdFilter.h"
44 | #import "IFToasterFilter.h"
45 | #import "IFBrannanFilter.h"
46 | #import "IFInkwellFilter.h"
47 | #import "IFWaldenFilter.h"
48 | #import "IFHefeFilter.h"
49 | #import "IFValenciaFilter.h"
50 | #import "IFNashvilleFilter.h"
51 | #import "IF1977Filter.h"
52 | #import "IFLordKelvinFilter.h"
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/1977blowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/1977blowout.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/1977map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/1977map.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/amaroMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/amaroMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/blackboard1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/blackboard1024.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanBlowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanBlowout.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanContrast.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanContrast.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanLuma.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanLuma.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanProcess.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanProcess.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanScreen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/brannanScreen.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlyBirdCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlyBirdCurves.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdBlowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdBlowout.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdOverlayMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/earlybirdOverlayMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/edgeBurn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/edgeBurn.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeGradientMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeGradientMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeMetal.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeSoftLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hefeSoftLight.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hudsonBackground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hudsonBackground.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hudsonMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/hudsonMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/inkwellMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/inkwellMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/kelvinMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/kelvinMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/lomoMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/lomoMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/nashvilleMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/nashvilleMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/overlayMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/overlayMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/riseMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/riseMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sierraMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sierraMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sierraVignette.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sierraVignette.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/softLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/softLight.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroCurves.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroEdgeBurn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroEdgeBurn.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/sutroMetal.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterColorShift.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterColorShift.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterCurves.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterMetal.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterOverlayMapWarm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterOverlayMapWarm.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterSoftLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/toasterSoftLight.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/valenciaGradientMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/valenciaGradientMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/valenciaMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/valenciaMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/vignetteMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/vignetteMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/waldenMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/waldenMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/xproMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Jonear/JNVideoKit/986a170afb8d5f498ce37989187aa1274ba3932e/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/Resources_for_IF_Filters/xproMap.png
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/UIImage+Resize.h:
--------------------------------------------------------------------------------
1 | // UIImage+Resize.h
2 | // Created by Trevor Harmon on 8/5/09.
3 | // Free for personal or commercial use, with or without modification.
4 | // No warranty is expressed or implied.
5 |
6 | // Extends the UIImage class to support resizing/cropping
7 | #import
8 |
9 | @interface UIImage (Resize)
10 | - (UIImage *)croppedImage:(CGRect)bounds;
11 | - (UIImage *)thumbnailImage:(NSInteger)thumbnailSize
12 | transparentBorder:(NSUInteger)borderSize
13 | cornerRadius:(NSUInteger)cornerRadius
14 | interpolationQuality:(CGInterpolationQuality)quality;
15 | - (UIImage *)resizedImage:(CGSize)newSize
16 | interpolationQuality:(CGInterpolationQuality)quality;
17 | - (UIImage *)resizedImageWithContentMode:(UIViewContentMode)contentMode
18 | bounds:(CGSize)bounds
19 | interpolationQuality:(CGInterpolationQuality)quality;
20 |
21 | - (UIImage *)rotatedByDegrees:(CGFloat)degrees;
22 |
23 | @end
24 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/IFFilters/UIImage+Resize.m:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+Resize.m
3 | // SCCaptureCameraDemo
4 | //
5 | // Created by Aevitx on 14-1-17.
6 | // Copyright (c) 2014年 Aevitx. All rights reserved.
7 | //
8 |
9 | #import "UIImage+Resize.h"
10 |
11 | @implementation UIImage (Resize)
12 | // Returns a copy of this image that is cropped to the given bounds.
13 | // The bounds will be adjusted using CGRectIntegral.
14 | // This method ignores the image's imageOrientation setting.
15 | - (UIImage *)croppedImage:(CGRect)bounds {
16 | CGImageRef imageRef = CGImageCreateWithImageInRect([self CGImage], bounds);
17 | UIImage *croppedImage = [UIImage imageWithCGImage:imageRef];
18 | CGImageRelease(imageRef);
19 | return croppedImage;
20 | }
21 |
22 | // Returns a rescaled copy of the image, taking into account its orientation
23 | // The image will be scaled disproportionately if necessary to fit the bounds specified by the parameter
24 | - (UIImage *)resizedImage:(CGSize)newSize interpolationQuality:(CGInterpolationQuality)quality {
25 | BOOL drawTransposed;
26 | CGAffineTransform transform = CGAffineTransformIdentity;
27 |
28 | // In iOS 5 the image is already correctly rotated. See Eran Sandler's
29 | // addition here: http://eran.sandler.co.il/2011/11/07/uiimage-in-ios-5-orientation-and-resize/
30 |
31 | if([[[UIDevice currentDevice]systemVersion]floatValue] >= 5.0) {
32 | drawTransposed = YES;
33 | }
34 | else {
35 | switch(self.imageOrientation) {
36 | case UIImageOrientationLeft:
37 | case UIImageOrientationLeftMirrored:
38 | case UIImageOrientationRight:
39 | case UIImageOrientationRightMirrored:
40 | drawTransposed = YES;
41 | break;
42 | default:
43 | drawTransposed = NO;
44 | }
45 |
46 | transform = [self transformForOrientation:newSize];
47 | }
48 | transform = [self transformForOrientation:newSize];
49 | return [self resizedImage:newSize transform:transform drawTransposed:drawTransposed interpolationQuality:quality];
50 | }
51 |
52 | // Resizes the image according to the given content mode, taking into account the image's orientation
53 | - (UIImage *)resizedImageWithContentMode:(UIViewContentMode)contentMode
54 | bounds:(CGSize)bounds
55 | interpolationQuality:(CGInterpolationQuality)quality {
56 | CGFloat horizontalRatio = bounds.width / self.size.width;
57 | CGFloat verticalRatio = bounds.height / self.size.height;
58 | CGFloat ratio;
59 |
60 | switch(contentMode) {
61 | case UIViewContentModeScaleAspectFill:
62 | ratio = MAX(horizontalRatio, verticalRatio);
63 | break;
64 |
65 | case UIViewContentModeScaleAspectFit:
66 | ratio = MIN(horizontalRatio, verticalRatio);
67 | break;
68 |
69 | default:
70 | [NSException raise:NSInvalidArgumentException format:@"Unsupported content mode: %d", contentMode];
71 | }
72 |
73 | CGSize newSize = CGSizeMake(self.size.width * ratio, self.size.height * ratio);
74 |
75 | return [self resizedImage:newSize interpolationQuality:quality];
76 | }
77 |
78 |
79 | #pragma mark - fix orientation
80 | - (UIImage *)fixOrientation {
81 |
82 | // No-op if the orientation is already correct
83 | if (self.imageOrientation == UIImageOrientationUp) return self;
84 |
85 | // We need to calculate the proper transformation to make the image upright.
86 | // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
87 | CGAffineTransform transform = CGAffineTransformIdentity;
88 |
89 | switch (self.imageOrientation) {
90 | case UIImageOrientationDown:
91 | case UIImageOrientationDownMirrored:
92 | transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
93 | transform = CGAffineTransformRotate(transform, M_PI);
94 | break;
95 |
96 | case UIImageOrientationLeft:
97 | case UIImageOrientationLeftMirrored:
98 | transform = CGAffineTransformTranslate(transform, self.size.width, 0);
99 | transform = CGAffineTransformRotate(transform, M_PI_2);
100 | break;
101 |
102 | case UIImageOrientationRight:
103 | case UIImageOrientationRightMirrored:
104 | transform = CGAffineTransformTranslate(transform, 0, self.size.height);
105 | transform = CGAffineTransformRotate(transform, -M_PI_2);
106 | break;
107 | case UIImageOrientationUp:
108 | case UIImageOrientationUpMirrored:
109 | break;
110 | }
111 |
112 | switch (self.imageOrientation) {
113 | case UIImageOrientationUpMirrored:
114 | case UIImageOrientationDownMirrored:
115 | transform = CGAffineTransformTranslate(transform, self.size.width, 0);
116 | transform = CGAffineTransformScale(transform, -1, 1);
117 | break;
118 |
119 | case UIImageOrientationLeftMirrored:
120 | case UIImageOrientationRightMirrored:
121 | transform = CGAffineTransformTranslate(transform, self.size.height, 0);
122 | transform = CGAffineTransformScale(transform, -1, 1);
123 | break;
124 | case UIImageOrientationUp:
125 | case UIImageOrientationDown:
126 | case UIImageOrientationLeft:
127 | case UIImageOrientationRight:
128 | break;
129 | }
130 |
131 | // Now we draw the underlying CGImage into a new context, applying the transform
132 | // calculated above.
133 | CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
134 | CGImageGetBitsPerComponent(self.CGImage), 0,
135 | CGImageGetColorSpace(self.CGImage),
136 | CGImageGetBitmapInfo(self.CGImage));
137 | CGContextConcatCTM(ctx, transform);
138 | switch (self.imageOrientation) {
139 | case UIImageOrientationLeft:
140 | case UIImageOrientationLeftMirrored:
141 | case UIImageOrientationRight:
142 | case UIImageOrientationRightMirrored:
143 | // Grr...
144 | CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
145 | break;
146 |
147 | default:
148 | CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
149 | break;
150 | }
151 |
152 | // And now we just create a new UIImage from the drawing context
153 | CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
154 | UIImage *img = [UIImage imageWithCGImage:cgimg];
155 | CGContextRelease(ctx);
156 | CGImageRelease(cgimg);
157 | return img;
158 | }
159 |
160 | static inline CGFloat DegreesToRadians(CGFloat degrees)
161 | {
162 | return M_PI * (degrees / 180.0);
163 | }
164 |
165 | - (UIImage *)rotatedByDegrees:(CGFloat)degrees
166 | {
167 | // calculate the size of the rotated view's containing box for our drawing space
168 | UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];
169 | CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees));
170 | rotatedViewBox.transform = t;
171 | CGSize rotatedSize = rotatedViewBox.frame.size;
172 |
173 | // Create the bitmap context
174 | UIGraphicsBeginImageContext(rotatedSize);
175 | CGContextRef bitmap = UIGraphicsGetCurrentContext();
176 |
177 | // Move the origin to the middle of the image so we will rotate and scale around the center.
178 | CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
179 |
180 | // // Rotate the image context
181 | CGContextRotateCTM(bitmap, DegreesToRadians(degrees));
182 |
183 | // Now, draw the rotated/scaled image into the context
184 | CGContextScaleCTM(bitmap, 1.0, -1.0);
185 | CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);
186 |
187 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
188 | UIGraphicsEndImageContext();
189 | return newImage;
190 |
191 |
192 | }
193 |
194 | #pragma mark -
195 | #pragma mark Private helper methods
196 |
197 | // Returns a copy of the image that has been transformed using the given affine transform and scaled to the new size
198 | // The new image's orientation will be UIImageOrientationUp, regardless of the current image's orientation
199 | // If the new size is not integral, it will be rounded up
200 | - (UIImage *)resizedImage:(CGSize)newSize
201 | transform:(CGAffineTransform)transform
202 | drawTransposed:(BOOL)transpose
203 | interpolationQuality:(CGInterpolationQuality)quality {
204 | CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
205 | CGRect transposedRect = CGRectMake(0, 0, newRect.size.height, newRect.size.width);
206 | CGImageRef imageRef = self.CGImage;
207 |
208 | // Fix for a colorspace / transparency issue that affects some types of
209 | // images. See here: http://vocaro.com/trevor/blog/2009/10/12/resize-a-uiimage-the-right-way/comment-page-2/#comment-39951
210 |
211 | CGContextRef bitmap = CGBitmapContextCreate(NULL,
212 | newRect.size.width,
213 | newRect.size.height,
214 | 8,
215 | 0,
216 | CGImageGetColorSpace(imageRef),
217 | kCGImageAlphaNoneSkipLast);
218 |
219 | // Rotate and/or flip the image if required by its orientation
220 | CGContextConcatCTM(bitmap, transform);
221 |
222 | // Set the quality level to use when rescaling
223 | CGContextSetInterpolationQuality(bitmap, quality);
224 |
225 | // Draw into the context; this scales the image
226 | CGContextDrawImage(bitmap, transpose ? transposedRect : newRect, imageRef);
227 |
228 | // Get the resized image from the context and a UIImage
229 | CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
230 | UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
231 |
232 | // Clean up
233 | CGContextRelease(bitmap);
234 | CGImageRelease(newImageRef);
235 |
236 | return newImage;
237 | }
238 |
239 | // Returns an affine transform that takes into account the image orientation when drawing a scaled image
240 | - (CGAffineTransform)transformForOrientation:(CGSize)newSize {
241 | CGAffineTransform transform = CGAffineTransformIdentity;
242 |
243 | switch(self.imageOrientation) {
244 | case UIImageOrientationDown: // EXIF = 3
245 | case UIImageOrientationDownMirrored: // EXIF = 4
246 | transform = CGAffineTransformTranslate(transform, newSize.width, newSize.height);
247 | transform = CGAffineTransformRotate(transform, M_PI);
248 | break;
249 |
250 | case UIImageOrientationLeft: // EXIF = 6
251 | case UIImageOrientationLeftMirrored: // EXIF = 5
252 | transform = CGAffineTransformTranslate(transform, newSize.width, 0);
253 | transform = CGAffineTransformRotate(transform, M_PI_2);
254 | break;
255 |
256 | case UIImageOrientationRight: // EXIF = 8
257 | case UIImageOrientationRightMirrored: // EXIF = 7
258 | transform = CGAffineTransformTranslate(transform, 0, newSize.height);
259 | transform = CGAffineTransformRotate(transform, -M_PI_2);
260 | break;
261 | default:
262 | break;
263 | }
264 |
265 | switch(self.imageOrientation) {
266 | case UIImageOrientationUpMirrored: // EXIF = 2
267 | case UIImageOrientationDownMirrored: // EXIF = 4
268 | transform = CGAffineTransformTranslate(transform, newSize.width, 0);
269 | transform = CGAffineTransformScale(transform, -1, 1);
270 | break;
271 |
272 | case UIImageOrientationLeftMirrored: // EXIF = 5
273 | case UIImageOrientationRightMirrored: // EXIF = 7
274 | transform = CGAffineTransformTranslate(transform, newSize.height, 0);
275 | transform = CGAffineTransformScale(transform, -1, 1);
276 | break;
277 | default:
278 | break;
279 | }
280 |
281 | return transform;
282 | }
283 |
284 |
285 | @end
286 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/JNVideoMerge.h:
--------------------------------------------------------------------------------
1 | //
2 | // KSVideoMerge.h
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 12-8-30.
6 | // Copyright (c) 2012年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | typedef void (^CompletionBack)(BOOL succes);
13 |
14 | @interface JNVideoMerge : NSObject
15 |
16 | /**
17 | 语音和视频合成
18 |
19 | @param str_audio_path 语音文件路径
20 | @param str_video_path 视频文件路径
21 | @param str_merge_path 合成后导出路径
22 | @param block 完成回调
23 |
24 | @return 执行是否成功
25 | */
26 | + (BOOL)mergeAVFile:(NSString*)str_audio_path
27 | videoFilePath:(NSString*)str_video_path
28 | mergeFilePath:(NSString*)str_merge_path
29 | completion:(CompletionBack)block;
30 |
31 |
32 | /**
33 | 多个视频合成
34 |
35 | @param videoPathArray 多个视频路径数组
36 | @param str_merge_path 合成后导出路径
37 | @param block 完成回调
38 |
39 | @return 执行是否成功
40 | */
41 | + (BOOL)mergeFreeVideoFilePath:(NSArray *)videoPathArray
42 | mergeFilePath:(NSString*)str_merge_path
43 | completion:(CompletionBack)block;
44 |
45 |
46 |
47 | /**
48 | 裁剪视频
49 |
50 | @param videoPath 视频路径
51 | @param str_merge_path 裁剪完成后导出路径
52 | @param startTime 开始时间
53 | @param lengthTime 结束时间
54 | @param block 完成回调
55 |
56 | @return 执行是否成功
57 | */
58 | + (BOOL)cropVideoFilePath:(NSString *)videoPath
59 | mergeFilePath:(NSString*)str_merge_path
60 | startTime:(CMTime)startTime
61 | lengthTime:(CMTime)lengthTime
62 | completion:(CompletionBack)block;
63 |
64 | @end
65 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/JNVideoPlayer.h:
--------------------------------------------------------------------------------
1 | //
2 | // KSVideoPlayer.h
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 12-8-23.
6 | // Copyright (c) 2012年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import
12 |
13 | extern NSString *const AVPlayerItemDidPlayToEndTimeNotification; //播放完成
14 | extern NSString *const AVPlayerItemFailedToPlayToEndTimeNotification; //播放失败
15 |
16 | @interface JNVideoPlayer : NSObject
17 |
18 | @property (strong, nonatomic, readonly) AVPlayerItem *playerItem;
19 | @property (assign, nonatomic, readonly) BOOL isPlaying;
20 |
21 | - (BOOL)initVideoPlayer:(UIView *)p_view videoFilePath:(NSURL*)videoUrl;
22 |
23 | - (BOOL)play;
24 | - (BOOL)pause;
25 | - (BOOL)stop;
26 | - (BOOL)seek:(CGFloat)f_seek_time;
27 |
28 | - (CGFloat)currentTime;
29 | - (CGFloat)duration;
30 | - (CGFloat)timeScale;
31 |
32 | @end
33 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/JNVideoPlayer.m:
--------------------------------------------------------------------------------
1 | //
2 | // KSVideoPlayer.m
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 12-8-23.
6 | // Copyright (c) 2012年 Jonear. All rights reserved.
7 | //
8 |
9 | #import "JNVideoPlayer.h"
10 | #import
11 |
12 |
13 | @implementation JNVideoPlayer {
14 | AVPlayer *_player;
15 | }
16 |
17 | - (BOOL)initVideoPlayer:(UIView *)p_view videoFilePath:(NSURL*)videoUrl {
18 | if (p_view && videoUrl) {
19 | //使用playerItem获取视频的信息,当前播放时间,总时间等
20 | _playerItem = [AVPlayerItem playerItemWithURL:videoUrl];
21 | //player是视频播放的控制器,可以用来快进播放,暂停等
22 | _player = [AVPlayer playerWithPlayerItem:_playerItem];
23 | AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
24 | playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
25 | //调用一下setter方法
26 | playerLayer.frame = p_view.bounds;
27 |
28 | [p_view.layer addSublayer:playerLayer];
29 |
30 | _isPlaying = NO;
31 |
32 |
33 | [[NSNotificationCenter defaultCenter] removeObserver:self];
34 | [[NSNotificationCenter defaultCenter] addObserver:self
35 | selector:@selector(playFinishNotification:)
36 | name:AVPlayerItemDidPlayToEndTimeNotification
37 | object:_playerItem];
38 |
39 | [[NSNotificationCenter defaultCenter] addObserver:self
40 | selector:@selector(playFiledNotification:)
41 | name:AVPlayerItemFailedToPlayToEndTimeNotification
42 | object:_playerItem];
43 |
44 | return YES;
45 | }
46 |
47 | return NO;
48 | }
49 |
50 | - (BOOL)play {
51 | if (_player) {
52 | [_player play];
53 | _isPlaying = YES;
54 | return YES;
55 | }else {
56 | return NO;
57 | }
58 | }
59 |
60 | //暂停
61 | - (BOOL)pause{
62 | if (_player) {
63 | [_player pause];
64 | _isPlaying = NO;
65 | return YES;
66 | }else {
67 | return NO;
68 | }
69 | }
70 |
71 | // 停止
72 | - (BOOL)stop {
73 | if (_player) {
74 | [self seek:0.];
75 | [_player play];
76 | [_player pause];
77 | _isPlaying = NO;
78 | return YES;
79 | }else {
80 | return NO;
81 | }
82 | }
83 |
84 | - (BOOL)seek:(CGFloat)f_seek_time{
85 | if (_player) {
86 | CMTime time = CMTimeMake(f_seek_time*_player.currentTime.timescale, _player.currentTime.timescale);
87 | [_player seekToTime:time];
88 | return YES;
89 | } else {
90 | return NO;
91 | }
92 | }
93 |
94 | - (CGFloat)currentTime{
95 | if (_player) {
96 | CMTime ctime = _player.currentTime;
97 | UInt64 currentTimeSec = ctime.value/ctime.timescale;
98 | return currentTimeSec;
99 | }else {
100 | return 0;
101 | }
102 | }
103 |
104 | - (CGFloat)duration{
105 | if (_player && _playerItem) {
106 | CMTime ctime = _playerItem.duration;
107 | UInt64 currentTimeSec = ctime.value/ctime.timescale;
108 | return currentTimeSec;
109 | }else {
110 | return 0;
111 | }
112 | }
113 |
114 | - (CGFloat)timeScale {
115 | return _player.currentTime.timescale;
116 | }
117 |
118 | // MARK: - play notifcation
119 | - (void)playFinishNotification:(NSNotification *)notification {
120 | _isPlaying = NO;
121 | [self seek:0.];
122 | }
123 |
124 | - (void)playFiledNotification:(NSNotification *)notification {
125 | _isPlaying = NO;
126 | }
127 |
128 | @end
129 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/JNVideoRecord.h:
--------------------------------------------------------------------------------
1 | //
2 | // KSVideoRecordNew.h
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 14-10-14.
6 | // Copyright (c) 2014年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "GPUImage.h"
11 | #import "GPUImageFilter.h"
12 | #import "IFVideoCamera.h"
13 |
14 | @interface JNVideoRecord : NSObject
15 |
16 | @property (nonatomic, strong) IFVideoCamera *videoCamera;
17 |
18 | @property (nonatomic, strong) GPUImageOutput *imageFilter;
19 | @property (nonatomic, strong) GPUImageView *imageView;
20 |
21 | - (void)initVideoCapture:(GPUImageView *)imageView path:(NSString *)str_video_file_path;
22 |
23 | /**
24 | * 开始写入文件
25 | */
26 | - (void)startVideoRecord;
27 |
28 | /**
29 | * 切换摄像头
30 | */
31 | - (void)rotateCamera;
32 |
33 | /**
34 | * 开始录制并写入文件
35 | */
36 | - (void)startVideoCapture;
37 |
38 | /**
39 | * 停止视频
40 | */
41 | - (void)stopVideoCapture;
42 |
43 | /**
44 | * 停止写入文件,预览一直存在
45 | */
46 | - (void)waitVideoCapture;
47 |
48 | /**
49 | * 暂停 (小视频使用)
50 | */
51 | - (BOOL)pauseVideoCapture;
52 |
53 | /**
54 | * 继续 (小视频使用)
55 | */
56 | - (void)resumeVideoCapture:(NSString *)filePath;
57 |
58 | /**
59 | * 选择滤镜
60 | */
61 | - (void)setFilterModel:(IFFilterType)effect;
62 |
63 | @end
64 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/JNVideoKit/JNVideoRecord.m:
--------------------------------------------------------------------------------
1 | //
2 | // KSVideoRecordNew.m
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 14-10-14.
6 | // Copyright (c) 2014年 Jonear. All rights reserved.
7 | //
8 |
9 | #import "JNVideoRecord.h"
10 |
11 | #define VIDEO_WIDTH 480
12 | #define VIDEO_HEIGHT 480
13 |
14 | @implementation JNVideoRecord
15 | {
16 | GPUImageMovieWriter *_movieWriter;
17 | }
18 |
19 | - (id)init {
20 | self = [super init];
21 | if (self) {
22 | self.videoCamera = [[IFVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront highVideoQuality:YES];
23 |
24 | // self.videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
25 | // self.videoCamera.horizontallyMirrorFrontFacingCamera = YES;
26 | // self.videoCamera.horizontallyMirrorRearFacingCamera = NO;
27 | // [self.videoCamera switchFilter:IF_INKWELL_FILTER];
28 | }
29 | return self;
30 | }
31 |
32 | - (void)initVideoCapture:(GPUImageView *)imageView path:(NSString *)str_video_file_path{
33 | // 展示页
34 | _imageView = imageView;
35 | [imageView setFillMode:kGPUImageFillModePreserveAspectRatioAndFill];
36 | [self.videoCamera.internalFilter addTarget:imageView];
37 | // imageView.layer.contentsScale = 2.0f; //高质量追求
38 |
39 | // 显示view左右调换
40 | [self moveVideoShowPosition];
41 |
42 | // 写文件
43 | if (str_video_file_path.length > 0) {
44 | [[NSFileManager defaultManager] removeItemAtPath:str_video_file_path error:nil];
45 | [self creatVideoWriter:str_video_file_path];
46 | }
47 | }
48 |
49 | - (void)creatVideoWriter:(NSString *)filePath {
50 | NSMutableDictionary *videoSettings = [[NSMutableDictionary alloc] init];;
51 | [videoSettings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
52 | [videoSettings setObject:[NSNumber numberWithInteger:VIDEO_WIDTH] forKey:AVVideoWidthKey];
53 | [videoSettings setObject:[NSNumber numberWithInteger:VIDEO_HEIGHT] forKey:AVVideoHeightKey];
54 |
55 | //init audio setting
56 | AudioChannelLayout channelLayout;
57 | memset(&channelLayout, 0, sizeof(AudioChannelLayout));
58 | channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
59 |
60 | // NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
61 | // [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
62 | // [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
63 | // [ NSNumber numberWithFloat: 16000.0], AVSampleRateKey,
64 | // [ NSData dataWithBytes:&channelLayout length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey,
65 | // [ NSNumber numberWithInt: 32000 ], AVEncoderBitRateKey,
66 | // nil];
67 |
68 | //init Movie path
69 | unlink([filePath UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
70 | NSURL *movieURL = [NSURL fileURLWithPath:filePath];
71 |
72 | //init movieWriter
73 | _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(VIDEO_WIDTH, VIDEO_HEIGHT) fileType:AVFileTypeMPEG4 outputSettings:videoSettings];
74 | // _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
75 |
76 | // [_movieWriter setHasAudioTrack:YES audioSettings:audioSettings];
77 |
78 | [self.videoCamera.internalFilter addTarget:_movieWriter];
79 | }
80 |
81 | - (void)moveVideoShowPosition {
82 | if (self.videoCamera.cameraPosition == AVCaptureDevicePositionFront) {
83 | [self.videoCamera setRotation:kGPUImageRotate180];
84 | } else {
85 | [self.videoCamera setRotation:kGPUImageNoRotation];
86 | }
87 | }
88 |
89 | - (void)startVideoRecord {
90 | [_movieWriter startRecording];
91 | }
92 |
93 | - (void)rotateCamera {
94 | [_videoCamera rotateCamera];
95 | [self moveVideoShowPosition];
96 | }
97 |
98 | - (void)startVideoCapture {
99 | [_videoCamera startCameraCapture];
100 | // [self startVideoRecord];
101 | }
102 |
103 | - (void)stopVideoCapture {
104 | [_movieWriter finishRecording];
105 | [self.videoCamera.internalFilter removeTarget:_movieWriter];
106 | [_videoCamera stopCameraCapture];
107 | }
108 |
109 | - (void)waitVideoCapture {
110 | [_movieWriter finishRecording];
111 | [self.videoCamera.internalFilter removeTarget:_movieWriter];
112 | }
113 |
114 | - (BOOL)pauseVideoCapture {
115 | NSTimeInterval time = [_movieWriter getRecordTime];
116 | if (time > 1.) {
117 | [_movieWriter finishRecording];
118 | [self.videoCamera.internalFilter removeTarget:_movieWriter];
119 | return YES;
120 | } else {
121 | return NO;
122 | }
123 | }
124 |
125 | - (void)resumeVideoCapture:(NSString *)filePath {
126 | if (filePath.length > 0) {
127 | [[NSFileManager defaultManager] removeItemAtPath:filePath error:nil];
128 | [self creatVideoWriter:filePath];
129 | } else {
130 | [self.videoCamera.internalFilter addTarget:_movieWriter];
131 | }
132 | [self startVideoRecord];
133 | }
134 |
135 | - (void)setFilterModel:(IFFilterType)effect{
136 | [self.videoCamera switchFilter:effect];
137 | [self updateFilterView];
138 | }
139 |
140 | - (void)updateFilterView
141 | {
142 | // [_videoCamera removeTarget:self.videoCamera.internalFilter];
143 | if (_imageView) {
144 | [self.videoCamera.internalFilter addTarget:_imageView];
145 | }
146 | if (_movieWriter) {
147 | [self.videoCamera.internalFilter addTarget:_movieWriter];
148 | }
149 | // [_videoCamera addTarget:self.videoCamera.internalFilter];
150 | }
151 |
152 | - (void)dealloc
153 | {
154 | [_videoCamera stopCameraCapture];
155 | }
156 |
157 | @end
158 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 16/10/24.
6 | // Copyright © 2016年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ViewController : UIViewController
12 |
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 16/10/24.
6 | // Copyright © 2016年 Jonear. All rights reserved.
7 | //
8 |
9 | #import "ViewController.h"
10 | #import "JNVideoRecord.h"
11 | #import "JNVideoPlayer.h"
12 | #import "JNVideoMerge.h"
13 |
14 | @interface ViewController ()
15 |
16 | @end
17 |
18 | @implementation ViewController {
19 | GPUImageView *_imageView;
20 | JNVideoRecord *_videoRecord;
21 | JNVideoPlayer *_videoPlayer;
22 |
23 | UIButton *_demoButton;
24 | NSString *_strpath;
25 | }
26 |
27 | - (void)viewDidLoad {
28 | [super viewDidLoad];
29 | // Do any additional setup after loading the view, typically from a nib.
30 |
31 | _imageView = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, 320)];
32 | [self.view addSubview:_imageView];
33 |
34 | // video Record
35 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
36 | _strpath = [[paths objectAtIndex:0] stringByAppendingPathComponent:@"a.mp4"];
37 | _videoRecord = [[JNVideoRecord alloc] init];
38 | [_videoRecord initVideoCapture:_imageView path:_strpath];
39 | [_videoRecord setFilterModel:IF_EARLYBIRD_FILTER];
40 | [_videoRecord startVideoCapture];
41 |
42 | _demoButton = [[UIButton alloc] initWithFrame:CGRectMake(0, 400, 100, 100)];
43 | [_demoButton setCenter:CGPointMake(self.view.bounds.size.width/2, _demoButton.center.y)];
44 | [_demoButton.layer setCornerRadius:50];
45 | [_demoButton.layer setMasksToBounds:YES];
46 | [_demoButton addTarget:self action:@selector(buttonClick:) forControlEvents:UIControlEventTouchUpInside];
47 | [_demoButton setBackgroundColor:[UIColor redColor]];
48 | [_demoButton setTitle:@"record" forState:UIControlStateNormal];
49 | [self.view addSubview:_demoButton];
50 |
51 | // video Player
52 | _videoPlayer = [[JNVideoPlayer alloc] init];
53 | // NSURL *videoUrl = [NSURL URLWithString:@"http://115.231.22.25/v.cctv.com/flash/mp4video6/TMS/2011/01/05/cf752b1c12ce452b3040cab2f90bc265_h264818000nero_aac32-1.mp4?wshc_tag=0&wsts_tag=56e4fbf8&wsid_tag=7b3abf44&wsiphost=ipdbm"];
54 | // [_videoPlayer initVideoPlayer:_imageView videoFilePath:videoUrl];
55 | // [_videoPlayer play];
56 |
57 | }
58 |
59 |
60 | - (void)didReceiveMemoryWarning {
61 | [super didReceiveMemoryWarning];
62 | // Dispose of any resources that can be recreated.
63 | }
64 |
65 | - (void)buttonClick:(UIButton *)sender {
66 | NSString *title = [sender titleForState:UIControlStateNormal];
67 |
68 | if ([title isEqualToString:@"record"]) {
69 | [sender setTitle:@"stop record" forState:UIControlStateNormal];
70 | [_videoRecord startVideoRecord];
71 | } else if ([title isEqualToString:@"stop record"]) {
72 | [sender setTitle:@"play" forState:UIControlStateNormal];
73 | [_videoRecord stopVideoCapture];
74 |
75 | [_videoPlayer initVideoPlayer:_imageView videoFilePath:[NSURL fileURLWithPath:_strpath]];
76 |
77 | // notification
78 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
79 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playFinishNotification:) name:AVPlayerItemDidPlayToEndTimeNotification object:_videoPlayer.playerItem];
80 | } else if ([title isEqualToString:@"play"]) {
81 | [sender setTitle:@"stop play" forState:UIControlStateNormal];
82 |
83 | [_videoPlayer play];
84 | } else if ([title isEqualToString:@"stop play"]) {
85 | [sender setTitle:@"play" forState:UIControlStateNormal];
86 |
87 | [_videoPlayer pause];
88 | } else if ([title isEqualToString:@"crop"]) {
89 | // crop
90 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
91 | NSString *outpath = [[paths objectAtIndex:0] stringByAppendingPathComponent:@"b.mp4"];
92 |
93 | NSLog(@"~~~~~~~croping");
94 | CMTime time = CMTimeMake([_videoPlayer duration]/2*[_videoPlayer timeScale], [_videoPlayer timeScale]);
95 | [JNVideoMerge cropVideoFilePath:_strpath
96 | mergeFilePath:outpath
97 | startTime:time
98 | lengthTime:time
99 | completion:^(BOOL succes) {
100 | NSLog(@"~~~~~~~crop success:%zd, path:%@", succes, outpath);
101 | [_videoPlayer initVideoPlayer:_imageView videoFilePath:[NSURL fileURLWithPath:outpath]];
102 |
103 | // removeObserver
104 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
105 | [_videoPlayer play];
106 |
107 | [sender setTitle:@"finish" forState:UIControlStateNormal];
108 | }];
109 | }
110 | }
111 |
112 | - (void)playFinishNotification:(id)sender {
113 | [_demoButton setTitle:@"crop" forState:UIControlStateNormal];
114 | }
115 |
116 |
117 | @end
118 |
--------------------------------------------------------------------------------
/JNVideoKitDemo/JNVideoKitDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // JNVideoKitDemo
4 | //
5 | // Created by Jonear on 16/10/24.
6 | // Copyright © 2016年 Jonear. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # JNVideoKit
2 | ios视频组件
3 | (裁剪、合成、录制、滤镜、播放)
4 |
5 |
6 | ios video
7 | record、play、merge 、filters、crop
8 |
--------------------------------------------------------------------------------