├── .DS_Store
├── .gitignore
├── README.md
└── VideoFilterText
├── VideoFilterText.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ └── contents.xcworkspacedata
├── VideoFilterText
├── AppDelegate.h
├── AppDelegate.m
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ └── LaunchScreen.storyboard
├── Info.plist
├── Source
│ ├── .DS_Store
│ ├── libGPUImage.a
│ └── usr
│ │ ├── GLProgram.h
│ │ ├── GPUImage.h
│ │ ├── GPUImage3x3ConvolutionFilter.h
│ │ ├── GPUImage3x3TextureSamplingFilter.h
│ │ ├── GPUImageAdaptiveThresholdFilter.h
│ │ ├── GPUImageAddBlendFilter.h
│ │ ├── GPUImageAlphaBlendFilter.h
│ │ ├── GPUImageAmatorkaFilter.h
│ │ ├── GPUImageAverageColor.h
│ │ ├── GPUImageAverageLuminanceThresholdFilter.h
│ │ ├── GPUImageBilateralFilter.h
│ │ ├── GPUImageBoxBlurFilter.h
│ │ ├── GPUImageBrightnessFilter.h
│ │ ├── GPUImageBuffer.h
│ │ ├── GPUImageBulgeDistortionFilter.h
│ │ ├── GPUImageCGAColorspaceFilter.h
│ │ ├── GPUImageCannyEdgeDetectionFilter.h
│ │ ├── GPUImageChromaKeyBlendFilter.h
│ │ ├── GPUImageChromaKeyFilter.h
│ │ ├── GPUImageClosingFilter.h
│ │ ├── GPUImageColorBlendFilter.h
│ │ ├── GPUImageColorBurnBlendFilter.h
│ │ ├── GPUImageColorConversion.h
│ │ ├── GPUImageColorDodgeBlendFilter.h
│ │ ├── GPUImageColorInvertFilter.h
│ │ ├── GPUImageColorLocalBinaryPatternFilter.h
│ │ ├── GPUImageColorMatrixFilter.h
│ │ ├── GPUImageColorPackingFilter.h
│ │ ├── GPUImageColourFASTFeatureDetector.h
│ │ ├── GPUImageColourFASTSamplingOperation.h
│ │ ├── GPUImageContext.h
│ │ ├── GPUImageContrastFilter.h
│ │ ├── GPUImageCropFilter.h
│ │ ├── GPUImageCrosshairGenerator.h
│ │ ├── GPUImageCrosshatchFilter.h
│ │ ├── GPUImageDarkenBlendFilter.h
│ │ ├── GPUImageDifferenceBlendFilter.h
│ │ ├── GPUImageDilationFilter.h
│ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.h
│ │ ├── GPUImageDirectionalSobelEdgeDetectionFilter.h
│ │ ├── GPUImageDissolveBlendFilter.h
│ │ ├── GPUImageDivideBlendFilter.h
│ │ ├── GPUImageEmbossFilter.h
│ │ ├── GPUImageErosionFilter.h
│ │ ├── GPUImageExclusionBlendFilter.h
│ │ ├── GPUImageExposureFilter.h
│ │ ├── GPUImageFASTCornerDetectionFilter.h
│ │ ├── GPUImageFalseColorFilter.h
│ │ ├── GPUImageFilter.h
│ │ ├── GPUImageFilterGroup.h
│ │ ├── GPUImageFilterPipeline.h
│ │ ├── GPUImageFourInputFilter.h
│ │ ├── GPUImageFramebuffer.h
│ │ ├── GPUImageFramebufferCache.h
│ │ ├── GPUImageFramework.h
│ │ ├── GPUImageGammaFilter.h
│ │ ├── GPUImageGaussianBlurFilter.h
│ │ ├── GPUImageGaussianBlurPositionFilter.h
│ │ ├── GPUImageGaussianSelectiveBlurFilter.h
│ │ ├── GPUImageGlassSphereFilter.h
│ │ ├── GPUImageGrayscaleFilter.h
│ │ ├── GPUImageHSBFilter.h
│ │ ├── GPUImageHalftoneFilter.h
│ │ ├── GPUImageHardLightBlendFilter.h
│ │ ├── GPUImageHarrisCornerDetectionFilter.h
│ │ ├── GPUImageHazeFilter.h
│ │ ├── GPUImageHighPassFilter.h
│ │ ├── GPUImageHighlightShadowFilter.h
│ │ ├── GPUImageHistogramEqualizationFilter.h
│ │ ├── GPUImageHistogramFilter.h
│ │ ├── GPUImageHistogramGenerator.h
│ │ ├── GPUImageHoughTransformLineDetector.h
│ │ ├── GPUImageHueBlendFilter.h
│ │ ├── GPUImageHueFilter.h
│ │ ├── GPUImageJFAVoronoiFilter.h
│ │ ├── GPUImageKuwaharaFilter.h
│ │ ├── GPUImageKuwaharaRadius3Filter.h
│ │ ├── GPUImageLanczosResamplingFilter.h
│ │ ├── GPUImageLaplacianFilter.h
│ │ ├── GPUImageLevelsFilter.h
│ │ ├── GPUImageLightenBlendFilter.h
│ │ ├── GPUImageLineGenerator.h
│ │ ├── GPUImageLinearBurnBlendFilter.h
│ │ ├── GPUImageLocalBinaryPatternFilter.h
│ │ ├── GPUImageLookupFilter.h
│ │ ├── GPUImageLowPassFilter.h
│ │ ├── GPUImageLuminanceRangeFilter.h
│ │ ├── GPUImageLuminanceThresholdFilter.h
│ │ ├── GPUImageLuminosity.h
│ │ ├── GPUImageLuminosityBlendFilter.h
│ │ ├── GPUImageMaskFilter.h
│ │ ├── GPUImageMedianFilter.h
│ │ ├── GPUImageMissEtikateFilter.h
│ │ ├── GPUImageMonochromeFilter.h
│ │ ├── GPUImageMosaicFilter.h
│ │ ├── GPUImageMotionBlurFilter.h
│ │ ├── GPUImageMotionDetector.h
│ │ ├── GPUImageMovie.h
│ │ ├── GPUImageMovieComposition.h
│ │ ├── GPUImageMovieWriter.h
│ │ ├── GPUImageMultiplyBlendFilter.h
│ │ ├── GPUImageNobleCornerDetectionFilter.h
│ │ ├── GPUImageNonMaximumSuppressionFilter.h
│ │ ├── GPUImageNormalBlendFilter.h
│ │ ├── GPUImageOpacityFilter.h
│ │ ├── GPUImageOpeningFilter.h
│ │ ├── GPUImageOutput.h
│ │ ├── GPUImageOverlayBlendFilter.h
│ │ ├── GPUImageParallelCoordinateLineTransformFilter.h
│ │ ├── GPUImagePerlinNoiseFilter.h
│ │ ├── GPUImagePicture+TextureSubimage.h
│ │ ├── GPUImagePicture.h
│ │ ├── GPUImagePinchDistortionFilter.h
│ │ ├── GPUImagePixellateFilter.h
│ │ ├── GPUImagePixellatePositionFilter.h
│ │ ├── GPUImagePoissonBlendFilter.h
│ │ ├── GPUImagePolarPixellateFilter.h
│ │ ├── GPUImagePolkaDotFilter.h
│ │ ├── GPUImagePosterizeFilter.h
│ │ ├── GPUImagePrewittEdgeDetectionFilter.h
│ │ ├── GPUImageRGBClosingFilter.h
│ │ ├── GPUImageRGBDilationFilter.h
│ │ ├── GPUImageRGBErosionFilter.h
│ │ ├── GPUImageRGBFilter.h
│ │ ├── GPUImageRGBOpeningFilter.h
│ │ ├── GPUImageRawDataInput.h
│ │ ├── GPUImageRawDataOutput.h
│ │ ├── GPUImageSaturationBlendFilter.h
│ │ ├── GPUImageSaturationFilter.h
│ │ ├── GPUImageScreenBlendFilter.h
│ │ ├── GPUImageSepiaFilter.h
│ │ ├── GPUImageSharpenFilter.h
│ │ ├── GPUImageShiTomasiFeatureDetectionFilter.h
│ │ ├── GPUImageSingleComponentGaussianBlurFilter.h
│ │ ├── GPUImageSketchFilter.h
│ │ ├── GPUImageSmoothToonFilter.h
│ │ ├── GPUImageSobelEdgeDetectionFilter.h
│ │ ├── GPUImageSoftEleganceFilter.h
│ │ ├── GPUImageSoftLightBlendFilter.h
│ │ ├── GPUImageSolarizeFilter.h
│ │ ├── GPUImageSolidColorGenerator.h
│ │ ├── GPUImageSourceOverBlendFilter.h
│ │ ├── GPUImageSphereRefractionFilter.h
│ │ ├── GPUImageStillCamera.h
│ │ ├── GPUImageStretchDistortionFilter.h
│ │ ├── GPUImageSubtractBlendFilter.h
│ │ ├── GPUImageSwirlFilter.h
│ │ ├── GPUImageTextureInput.h
│ │ ├── GPUImageTextureOutput.h
│ │ ├── GPUImageThreeInputFilter.h
│ │ ├── GPUImageThresholdEdgeDetectionFilter.h
│ │ ├── GPUImageThresholdSketchFilter.h
│ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.h
│ │ ├── GPUImageTiltShiftFilter.h
│ │ ├── GPUImageToneCurveFilter.h
│ │ ├── GPUImageToonFilter.h
│ │ ├── GPUImageTransformFilter.h
│ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.h
│ │ ├── GPUImageTwoInputFilter.h
│ │ ├── GPUImageTwoPassFilter.h
│ │ ├── GPUImageTwoPassTextureSamplingFilter.h
│ │ ├── GPUImageUIElement.h
│ │ ├── GPUImageUnsharpMaskFilter.h
│ │ ├── GPUImageVideoCamera.h
│ │ ├── GPUImageView.h
│ │ ├── GPUImageVignetteFilter.h
│ │ ├── GPUImageVoronoiConsumerFilter.h
│ │ ├── GPUImageWeakPixelInclusionFilter.h
│ │ ├── GPUImageWhiteBalanceFilter.h
│ │ ├── GPUImageXYDerivativeFilter.h
│ │ ├── GPUImageZoomBlurFilter.h
│ │ └── GPUImageiOSBlurFilter.h
├── ViewController.h
├── ViewController.m
└── main.m
├── VideoFilterTextTests
├── Info.plist
└── VideoFilterTextTests.m
└── VideoFilterTextUITests
├── Info.plist
└── VideoFilterTextUITests.m
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cwos111509sina/VideoFilterText/0e2e7d3c05804a48204c79905885ac5c9d02cd7d/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 |
20 | ## Other
21 | *.moved-aside
22 | *.xcuserstate
23 |
24 | ## Obj-C/Swift specific
25 | *.hmap
26 | *.ipa
27 | *.dSYM.zip
28 | *.dSYM
29 |
30 | # CocoaPods
31 | #
32 | # We recommend against adding the Pods directory to your .gitignore. However
33 | # you should judge for yourself, the pros and cons are mentioned at:
34 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
35 | #
36 | # Pods/
37 |
38 | # Carthage
39 | #
40 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
41 | # Carthage/Checkouts
42 |
43 | Carthage/Build
44 |
45 | # fastlane
46 | #
47 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
48 | # screenshots whenever they are needed.
49 | # For more information about the recommended setup visit:
50 | # https://github.com/fastlane/fastlane/blob/master/fastlane/docs/Gitignore.md
51 |
52 | fastlane/report.xml
53 | fastlane/screenshots
54 |
55 | #Code Injection
56 | #
57 | # After new code Injection tools there's a generated folder /iOSInjectionProject
58 | # https://github.com/johnno1962/injectionforxcode
59 |
60 | iOSInjectionProject/
61 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # VideoFilterText
2 | 视频滤镜添加(GPUImage)
3 |
4 | Blog:
5 | [iOS 使用GPUImage为本地视频添加滤镜](https://cwos111509sina.github.io/Blog/OC/Article/iOS-使用GPUImage为本地视频添加滤镜)
6 |
7 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // VideoFilterText
4 | //
5 | // Created by zzjd on 2017/3/9.
6 | // Copyright © 2017年 zzjd. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // VideoFilterText
4 | //
5 | // Created by zzjd on 2017/3/9.
6 | // Copyright © 2017年 zzjd. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | #import "ViewController.h"
12 |
13 | @interface AppDelegate ()
14 |
15 | @end
16 |
17 | @implementation AppDelegate
18 |
19 |
20 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
21 |
22 |
23 |
24 | self.window = [[UIWindow alloc]initWithFrame:[UIScreen mainScreen].bounds];
25 |
26 | self.window.backgroundColor = [UIColor whiteColor];
27 |
28 |
29 | ViewController * VC = [[ViewController alloc]init];
30 |
31 |
32 | self.window.rootViewController = VC;
33 |
34 |
35 | [self.window makeKeyAndVisible];
36 |
37 | // Override point for customization after application launch.
38 | return YES;
39 | }
40 |
41 |
42 | - (void)applicationWillResignActive:(UIApplication *)application {
43 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
44 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
45 | }
46 |
47 |
48 | - (void)applicationDidEnterBackground:(UIApplication *)application {
49 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
50 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
51 | }
52 |
53 |
54 | - (void)applicationWillEnterForeground:(UIApplication *)application {
55 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
56 | }
57 |
58 |
59 | - (void)applicationDidBecomeActive:(UIApplication *)application {
60 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
61 | }
62 |
63 |
64 | - (void)applicationWillTerminate:(UIApplication *)application {
65 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
66 | }
67 |
68 |
69 | @end
70 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | }
43 | ],
44 | "info" : {
45 | "version" : 1,
46 | "author" : "xcode"
47 | }
48 | }
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSPhotoLibraryUsageDescription
24 | 是否允许此App访问你的媒体资料库?
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIRequiredDeviceCapabilities
28 |
29 | armv7
30 |
31 | UISupportedInterfaceOrientations
32 |
33 | UIInterfaceOrientationPortrait
34 | UIInterfaceOrientationLandscapeLeft
35 | UIInterfaceOrientationLandscapeRight
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cwos111509sina/VideoFilterText/0e2e7d3c05804a48204c79905885ac5c9d02cd7d/VideoFilterText/VideoFilterText/Source/.DS_Store
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/libGPUImage.a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cwos111509sina/VideoFilterText/0e2e7d3c05804a48204c79905885ac5c9d02cd7d/VideoFilterText/VideoFilterText/Source/libGPUImage.a
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GLProgram.h:
--------------------------------------------------------------------------------
1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
2 | // A description of this can be found at his page on the topic:
3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
4 | // I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
5 |
6 | #import
7 |
8 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
9 | #import
10 | #import
11 | #else
12 | #import
13 | #import
14 | #endif
15 |
16 | @interface GLProgram : NSObject
17 | {
18 | NSMutableArray *attributes;
19 | NSMutableArray *uniforms;
20 | GLuint program,
21 | vertShader,
22 | fragShader;
23 | }
24 |
25 | @property(readwrite, nonatomic) BOOL initialized;
26 | @property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
27 | @property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
28 | @property(readwrite, copy, nonatomic) NSString *programLog;
29 |
30 | - (id)initWithVertexShaderString:(NSString *)vShaderString
31 | fragmentShaderString:(NSString *)fShaderString;
32 | - (id)initWithVertexShaderString:(NSString *)vShaderString
33 | fragmentShaderFilename:(NSString *)fShaderFilename;
34 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
35 | fragmentShaderFilename:(NSString *)fShaderFilename;
36 | - (void)addAttribute:(NSString *)attributeName;
37 | - (GLuint)attributeIndex:(NSString *)attributeName;
38 | - (GLuint)uniformIndex:(NSString *)uniformName;
39 | - (BOOL)link;
40 | - (void)use;
41 | - (void)validate;
42 | @end
43 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImage.h:
--------------------------------------------------------------------------------
1 | #import "GLProgram.h"
2 |
3 | // Base classes
4 | #import "GPUImageContext.h"
5 | #import "GPUImageOutput.h"
6 | #import "GPUImageView.h"
7 | #import "GPUImageVideoCamera.h"
8 | #import "GPUImageStillCamera.h"
9 | #import "GPUImageMovie.h"
10 | #import "GPUImagePicture.h"
11 | #import "GPUImageRawDataInput.h"
12 | #import "GPUImageRawDataOutput.h"
13 | #import "GPUImageMovieWriter.h"
14 | #import "GPUImageFilterPipeline.h"
15 | #import "GPUImageTextureOutput.h"
16 | #import "GPUImageFilterGroup.h"
17 | #import "GPUImageTextureInput.h"
18 | #import "GPUImageUIElement.h"
19 | #import "GPUImageBuffer.h"
20 | #import "GPUImageFramebuffer.h"
21 | #import "GPUImageFramebufferCache.h"
22 |
23 | // Filters
24 | #import "GPUImageFilter.h"
25 | #import "GPUImageTwoInputFilter.h"
26 | #import "GPUImagePixellateFilter.h"
27 | #import "GPUImagePixellatePositionFilter.h"
28 | #import "GPUImageSepiaFilter.h"
29 | #import "GPUImageColorInvertFilter.h"
30 | #import "GPUImageSaturationFilter.h"
31 | #import "GPUImageContrastFilter.h"
32 | #import "GPUImageExposureFilter.h"
33 | #import "GPUImageBrightnessFilter.h"
34 | #import "GPUImageLevelsFilter.h"
35 | #import "GPUImageSharpenFilter.h"
36 | #import "GPUImageGammaFilter.h"
37 | #import "GPUImageSobelEdgeDetectionFilter.h"
38 | #import "GPUImageSketchFilter.h"
39 | #import "GPUImageToonFilter.h"
40 | #import "GPUImageSmoothToonFilter.h"
41 | #import "GPUImageMultiplyBlendFilter.h"
42 | #import "GPUImageDissolveBlendFilter.h"
43 | #import "GPUImageKuwaharaFilter.h"
44 | #import "GPUImageKuwaharaRadius3Filter.h"
45 | #import "GPUImageVignetteFilter.h"
46 | #import "GPUImageGaussianBlurFilter.h"
47 | #import "GPUImageGaussianBlurPositionFilter.h"
48 | #import "GPUImageGaussianSelectiveBlurFilter.h"
49 | #import "GPUImageOverlayBlendFilter.h"
50 | #import "GPUImageDarkenBlendFilter.h"
51 | #import "GPUImageLightenBlendFilter.h"
52 | #import "GPUImageSwirlFilter.h"
53 | #import "GPUImageSourceOverBlendFilter.h"
54 | #import "GPUImageColorBurnBlendFilter.h"
55 | #import "GPUImageColorDodgeBlendFilter.h"
56 | #import "GPUImageScreenBlendFilter.h"
57 | #import "GPUImageExclusionBlendFilter.h"
58 | #import "GPUImageDifferenceBlendFilter.h"
59 | #import "GPUImageSubtractBlendFilter.h"
60 | #import "GPUImageHardLightBlendFilter.h"
61 | #import "GPUImageSoftLightBlendFilter.h"
62 | #import "GPUImageColorBlendFilter.h"
63 | #import "GPUImageHueBlendFilter.h"
64 | #import "GPUImageSaturationBlendFilter.h"
65 | #import "GPUImageLuminosityBlendFilter.h"
66 | #import "GPUImageCropFilter.h"
67 | #import "GPUImageGrayscaleFilter.h"
68 | #import "GPUImageTransformFilter.h"
69 | #import "GPUImageChromaKeyBlendFilter.h"
70 | #import "GPUImageHazeFilter.h"
71 | #import "GPUImageLuminanceThresholdFilter.h"
72 | #import "GPUImagePosterizeFilter.h"
73 | #import "GPUImageBoxBlurFilter.h"
74 | #import "GPUImageAdaptiveThresholdFilter.h"
75 | #import "GPUImageSolarizeFilter.h"
76 | #import "GPUImageUnsharpMaskFilter.h"
77 | #import "GPUImageBulgeDistortionFilter.h"
78 | #import "GPUImagePinchDistortionFilter.h"
79 | #import "GPUImageCrosshatchFilter.h"
80 | #import "GPUImageCGAColorspaceFilter.h"
81 | #import "GPUImagePolarPixellateFilter.h"
82 | #import "GPUImageStretchDistortionFilter.h"
83 | #import "GPUImagePerlinNoiseFilter.h"
84 | #import "GPUImageJFAVoronoiFilter.h"
85 | #import "GPUImageVoronoiConsumerFilter.h"
86 | #import "GPUImageMosaicFilter.h"
87 | #import "GPUImageTiltShiftFilter.h"
88 | #import "GPUImage3x3ConvolutionFilter.h"
89 | #import "GPUImageEmbossFilter.h"
90 | #import "GPUImageCannyEdgeDetectionFilter.h"
91 | #import "GPUImageThresholdEdgeDetectionFilter.h"
92 | #import "GPUImageMaskFilter.h"
93 | #import "GPUImageHistogramFilter.h"
94 | #import "GPUImageHistogramGenerator.h"
95 | #import "GPUImageHistogramEqualizationFilter.h"
96 | #import "GPUImagePrewittEdgeDetectionFilter.h"
97 | #import "GPUImageXYDerivativeFilter.h"
98 | #import "GPUImageHarrisCornerDetectionFilter.h"
99 | #import "GPUImageAlphaBlendFilter.h"
100 | #import "GPUImageNormalBlendFilter.h"
101 | #import "GPUImageNonMaximumSuppressionFilter.h"
102 | #import "GPUImageRGBFilter.h"
103 | #import "GPUImageMedianFilter.h"
104 | #import "GPUImageBilateralFilter.h"
105 | #import "GPUImageCrosshairGenerator.h"
106 | #import "GPUImageToneCurveFilter.h"
107 | #import "GPUImageNobleCornerDetectionFilter.h"
108 | #import "GPUImageShiTomasiFeatureDetectionFilter.h"
109 | #import "GPUImageErosionFilter.h"
110 | #import "GPUImageRGBErosionFilter.h"
111 | #import "GPUImageDilationFilter.h"
112 | #import "GPUImageRGBDilationFilter.h"
113 | #import "GPUImageOpeningFilter.h"
114 | #import "GPUImageRGBOpeningFilter.h"
115 | #import "GPUImageClosingFilter.h"
116 | #import "GPUImageRGBClosingFilter.h"
117 | #import "GPUImageColorPackingFilter.h"
118 | #import "GPUImageSphereRefractionFilter.h"
119 | #import "GPUImageMonochromeFilter.h"
120 | #import "GPUImageOpacityFilter.h"
121 | #import "GPUImageHighlightShadowFilter.h"
122 | #import "GPUImageFalseColorFilter.h"
123 | #import "GPUImageHSBFilter.h"
124 | #import "GPUImageHueFilter.h"
125 | #import "GPUImageGlassSphereFilter.h"
126 | #import "GPUImageLookupFilter.h"
127 | #import "GPUImageAmatorkaFilter.h"
128 | #import "GPUImageMissEtikateFilter.h"
129 | #import "GPUImageSoftEleganceFilter.h"
130 | #import "GPUImageAddBlendFilter.h"
131 | #import "GPUImageDivideBlendFilter.h"
132 | #import "GPUImagePolkaDotFilter.h"
133 | #import "GPUImageLocalBinaryPatternFilter.h"
134 | #import "GPUImageColorLocalBinaryPatternFilter.h"
135 | #import "GPUImageLanczosResamplingFilter.h"
136 | #import "GPUImageAverageColor.h"
137 | #import "GPUImageSolidColorGenerator.h"
138 | #import "GPUImageLuminosity.h"
139 | #import "GPUImageAverageLuminanceThresholdFilter.h"
140 | #import "GPUImageWhiteBalanceFilter.h"
141 | #import "GPUImageChromaKeyFilter.h"
142 | #import "GPUImageLowPassFilter.h"
143 | #import "GPUImageHighPassFilter.h"
144 | #import "GPUImageMotionDetector.h"
145 | #import "GPUImageHalftoneFilter.h"
146 | #import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
147 | #import "GPUImageHoughTransformLineDetector.h"
148 | #import "GPUImageParallelCoordinateLineTransformFilter.h"
149 | #import "GPUImageThresholdSketchFilter.h"
150 | #import "GPUImageLineGenerator.h"
151 | #import "GPUImageLinearBurnBlendFilter.h"
152 | #import "GPUImageGaussianBlurPositionFilter.h"
153 | #import "GPUImagePixellatePositionFilter.h"
154 | #import "GPUImageTwoInputCrossTextureSamplingFilter.h"
155 | #import "GPUImagePoissonBlendFilter.h"
156 | #import "GPUImageMotionBlurFilter.h"
157 | #import "GPUImageZoomBlurFilter.h"
158 | #import "GPUImageLaplacianFilter.h"
159 | #import "GPUImageiOSBlurFilter.h"
160 | #import "GPUImageLuminanceRangeFilter.h"
161 | #import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
162 | #import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
163 | #import "GPUImageSingleComponentGaussianBlurFilter.h"
164 | #import "GPUImageThreeInputFilter.h"
165 | #import "GPUImageFourInputFilter.h"
166 | #import "GPUImageWeakPixelInclusionFilter.h"
167 | #import "GPUImageColorConversion.h"
168 | #import "GPUImageColourFASTFeatureDetector.h"
169 | #import "GPUImageColourFASTSamplingOperation.h"
170 |
171 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImage3x3ConvolutionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** Runs a 3x3 convolution kernel against the image
4 | */
5 | @interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
6 | {
7 | GLint convolutionMatrixUniform;
8 | }
9 |
10 | /** Convolution kernel to run against the image
11 |
12 | The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.
13 | The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three
14 | If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
15 | */
16 | @property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImage3x3TextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
4 |
5 | @interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | CGFloat texelWidth, texelHeight;
10 | BOOL hasOverriddenImageSizeFactor;
11 | }
12 |
13 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
14 | @property(readwrite, nonatomic) CGFloat texelWidth;
15 | @property(readwrite, nonatomic) CGFloat texelHeight;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAdaptiveThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup
4 |
5 | /** A multiplier for the background averaging blur radius in pixels, with a default of 4
6 | */
7 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAddBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAddBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAlphaBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAmatorkaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Amatorka
6 | http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_amatorka.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageAmatorkaFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAverageColor.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageColorAveragingVertexShaderString;
4 |
5 | @interface GPUImageAverageColor : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | NSUInteger numberOfStages;
10 |
11 | GLubyte *rawImagePixels;
12 | CGSize finalStageSize;
13 | }
14 |
15 | // This block is called on the completion of color averaging for a frame
16 | @property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
17 |
18 | - (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageAverageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
4 |
5 | // This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
6 | @property(readwrite, nonatomic) CGFloat thresholdMultiplier;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageBilateralFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | @interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
4 | {
5 | CGFloat firstDistanceNormalizationFactorUniform;
6 | CGFloat secondDistanceNormalizationFactorUniform;
7 | }
8 | // A normalization factor for the distance between central color and sample color.
9 | @property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
10 | @end
11 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageBoxBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | /** A hardware-accelerated box blur of an image
4 | */
5 | @interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageBrightnessFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBrightnessFilter : GPUImageFilter
4 | {
5 | GLint brightnessUniform;
6 | }
7 |
8 | // Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat brightness;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageBuffer.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBuffer : GPUImageFilter
4 | {
5 | NSMutableArray *bufferedFramebuffers;
6 | }
7 |
8 | @property(readwrite, nonatomic) NSUInteger bufferSize;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageBulgeDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /// Creates a bulge distortion on the image
4 | @interface GPUImageBulgeDistortionFilter : GPUImageFilter
5 | {
6 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
7 | }
8 |
9 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | @property(readwrite, nonatomic) CGPoint center;
11 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
12 | @property(readwrite, nonatomic) CGFloat radius;
13 | /// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
14 | @property(readwrite, nonatomic) CGFloat scale;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageCGAColorspaceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCGAColorspaceFilter : GPUImageFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageCannyEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGrayscaleFilter;
4 | @class GPUImageSingleComponentGaussianBlurFilter;
5 | @class GPUImageDirectionalSobelEdgeDetectionFilter;
6 | @class GPUImageDirectionalNonMaximumSuppressionFilter;
7 | @class GPUImageWeakPixelInclusionFilter;
8 |
9 | /** This applies the edge detection process described by John Canny in
10 |
11 | Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679–698, 1986.
12 |
13 | and implemented in OpenGL ES by
14 |
15 | A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011.
16 |
17 | It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall
18 | gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter
19 | acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower
20 | threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels.
21 | */
22 | @interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup
23 | {
24 | GPUImageGrayscaleFilter *luminanceFilter;
25 | GPUImageSingleComponentGaussianBlurFilter *blurFilter;
26 | GPUImageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter;
27 | GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
28 | GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter;
29 | }
30 |
31 | /** The image width and height factors tweak the appearance of the edges.
32 |
33 | These parameters affect the visibility of the detected edges
34 |
35 | By default, they match the inverse of the filter size in pixels
36 | */
37 | @property(readwrite, nonatomic) CGFloat texelWidth;
38 | /** The image width and height factors tweak the appearance of the edges.
39 |
40 | These parameters affect the visibility of the detected edges
41 |
42 | By default, they match the inverse of the filter size in pixels
43 | */
44 | @property(readwrite, nonatomic) CGFloat texelHeight;
45 |
46 | /** The underlying blur radius for the Gaussian blur. Default is 2.0.
47 | */
48 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
49 |
50 | /** The underlying blur texel spacing multiplier. Default is 1.0.
51 | */
52 | @property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier;
53 |
54 | /** Any edge with a gradient magnitude above this threshold will pass and show up in the final result.
55 | */
56 | @property(readwrite, nonatomic) CGFloat upperThreshold;
57 |
58 | /** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result.
59 | */
60 | @property(readwrite, nonatomic) CGFloat lowerThreshold;
61 |
62 | @end
63 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageChromaKeyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Selectively replaces a color in the first image with the second image
4 | */
5 | @interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
6 | {
7 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
8 | }
9 |
10 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
11 |
12 | The default value is 0.3
13 | */
14 | @property(readwrite, nonatomic) CGFloat thresholdSensitivity;
15 |
16 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
17 |
18 | The default value is 0.1
19 | */
20 | @property(readwrite, nonatomic) CGFloat smoothing;
21 |
22 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
23 |
24 | The default is green: (0.0, 1.0, 0.0).
25 |
26 | @param redComponent Red component of color to be replaced
27 | @param greenComponent Green component of color to be replaced
28 | @param blueComponent Blue component of color to be replaced
29 | */
30 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
31 |
32 | @end
33 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageChromaKeyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageChromaKeyFilter : GPUImageFilter
4 | {
5 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
6 | }
7 |
8 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
9 |
10 | The default value is 0.3
11 | */
12 | @property(readwrite, nonatomic) CGFloat thresholdSensitivity;
13 |
14 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
15 |
16 | The default value is 0.1
17 | */
18 | @property(readwrite, nonatomic) CGFloat smoothing;
19 |
20 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
21 |
22 | The default is green: (0.0, 1.0, 0.0).
23 |
24 | @param redComponent Red component of color to be replaced
25 | @param greenComponent Green component of color to be replaced
26 | @param blueComponent Blue component of color to be replaced
27 | */
28 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageColorBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color burn blend of two images
4 | */
5 | @interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorConversion.h:
--------------------------------------------------------------------------------
1 | #ifndef GPUImageColorConversion_h
2 | #define GPUImageColorConversion_h
3 |
4 | extern GLfloat *kColorConversion601;
5 | extern GLfloat *kColorConversion601FullRange;
6 | extern GLfloat *kColorConversion709;
7 | extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
8 | extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
9 | extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
10 |
11 |
12 | #endif /* GPUImageColorConversion_h */
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorDodgeBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color dodge blend of two images
4 | */
5 | @interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorInvertFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorInvertFilter : GPUImageFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorLocalBinaryPatternFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageColorLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorMatrixFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Transforms the colors of an image by applying a matrix to them
4 | */
5 | @interface GPUImageColorMatrixFilter : GPUImageFilter
6 | {
7 | GLint colorMatrixUniform;
8 | GLint intensityUniform;
9 | }
10 |
11 | /** A 4x4 matrix used to transform each color in an image
12 | */
13 | @property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;
14 |
15 | /** The degree to which the new transformed color replaces the original color for each pixel
16 | */
17 | @property(readwrite, nonatomic) CGFloat intensity;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColorPackingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorPackingFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | }
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColourFASTFeatureDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | // This generates image-wide feature descriptors using the ColourFAST process, as developed and described in
4 | //
5 | // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.
6 | //
7 | // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.
8 | // http://aut.researchgateway.ac.nz/handle/10292/7991
9 |
10 | @class GPUImageColourFASTSamplingOperation;
11 | @class GPUImageBoxBlurFilter;
12 |
13 | @interface GPUImageColourFASTFeatureDetector : GPUImageFilterGroup
14 | {
15 | GPUImageBoxBlurFilter *blurFilter;
16 | GPUImageColourFASTSamplingOperation *colourFASTSamplingOperation;
17 | }
18 | // The blur radius of the underlying box blur. The default is 3.0.
19 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageColourFASTSamplingOperation.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | // This is the feature extraction phase of the ColourFAST feature detector, as described in:
4 | //
5 | // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.
6 | //
7 | // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.
8 | // http://aut.researchgateway.ac.nz/handle/10292/7991
9 |
10 | @interface GPUImageColourFASTSamplingOperation : GPUImageTwoInputFilter
11 | {
12 | GLint texelWidthUniform, texelHeightUniform;
13 |
14 | CGFloat texelWidth, texelHeight;
15 | BOOL hasOverriddenImageSizeFactor;
16 | }
17 |
18 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
19 | @property(readwrite, nonatomic) CGFloat texelWidth;
20 | @property(readwrite, nonatomic) CGFloat texelHeight;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageContext.h:
--------------------------------------------------------------------------------
1 | #import "GLProgram.h"
2 | #import "GPUImageFramebuffer.h"
3 | #import "GPUImageFramebufferCache.h"
4 |
5 | #define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal)
6 |
7 | typedef NS_ENUM(NSUInteger, GPUImageRotationMode) {
8 | kGPUImageNoRotation,
9 | kGPUImageRotateLeft,
10 | kGPUImageRotateRight,
11 | kGPUImageFlipVertical,
12 | kGPUImageFlipHorizonal,
13 | kGPUImageRotateRightFlipVertical,
14 | kGPUImageRotateRightFlipHorizontal,
15 | kGPUImageRotate180
16 | };
17 |
18 | @interface GPUImageContext : NSObject
19 |
20 | @property(readonly, nonatomic) dispatch_queue_t contextQueue;
21 | @property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram;
22 | @property(readonly, retain, nonatomic) EAGLContext *context;
23 | @property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache;
24 | @property(readonly) GPUImageFramebufferCache *framebufferCache;
25 |
26 | + (void *)contextKey;
27 | + (GPUImageContext *)sharedImageProcessingContext;
28 | + (dispatch_queue_t)sharedContextQueue;
29 | + (GPUImageFramebufferCache *)sharedFramebufferCache;
30 | + (void)useImageProcessingContext;
31 | - (void)useAsCurrentContext;
32 | + (void)setActiveShaderProgram:(GLProgram *)shaderProgram;
33 | - (void)setContextShaderProgram:(GLProgram *)shaderProgram;
34 | + (GLint)maximumTextureSizeForThisDevice;
35 | + (GLint)maximumTextureUnitsForThisDevice;
36 | + (GLint)maximumVaryingVectorsForThisDevice;
37 | + (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;
38 | + (BOOL)deviceSupportsRedTextures;
39 | + (BOOL)deviceSupportsFramebufferReads;
40 | + (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;
41 |
42 | - (void)presentBufferForDisplay;
43 | - (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;
44 |
45 | - (void)useSharegroup:(EAGLSharegroup *)sharegroup;
46 |
47 | // Manage fast texture upload
48 | + (BOOL)supportsFastTextureUpload;
49 |
50 | @end
51 |
52 | @protocol GPUImageInput
53 | - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
54 | - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
55 | - (NSInteger)nextAvailableTextureIndex;
56 | - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
57 | - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
58 | - (CGSize)maximumOutputSize;
59 | - (void)endProcessing;
60 | - (BOOL)shouldIgnoreUpdatesToThisTarget;
61 | - (BOOL)enabled;
62 | - (BOOL)wantsMonochromeInput;
63 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
64 | @end
65 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageContrastFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the contrast of the image
4 | */
5 | @interface GPUImageContrastFilter : GPUImageFilter
6 | {
7 | GLint contrastUniform;
8 | }
9 |
10 | /** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat contrast;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageCropFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCropFilter : GPUImageFilter
4 | {
5 | GLfloat cropTextureCoordinates[8];
6 | }
7 |
8 | // The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image
9 | @property(readwrite, nonatomic) CGRect cropRegion;
10 |
11 | // Initialization and teardown
12 | - (id)initWithCropRegion:(CGRect)newCropRegion;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageCrosshairGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshairGenerator : GPUImageFilter
4 | {
5 | GLint crosshairWidthUniform, crosshairColorUniform;
6 | }
7 |
8 | // The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
9 | @property(readwrite, nonatomic) CGFloat crosshairWidth;
10 |
11 | // The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
12 | - (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 |
14 | // Rendering
15 | - (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageCrosshatchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshatchFilter : GPUImageFilter
4 | {
5 | GLint crossHatchSpacingUniform, lineWidthUniform;
6 | }
7 | // The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
8 | @property(readwrite, nonatomic) CGFloat crossHatchSpacing;
9 |
10 | // A relative width for the crosshatch lines. The default is 0.003.
11 | @property(readwrite, nonatomic) CGFloat lineWidth;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDarkenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDifferenceBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out bright features, and is most commonly used with black-and-white thresholded images.
5 |
6 | extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
7 | extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
8 | extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
9 | extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
10 |
11 | @interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
12 |
13 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
14 | - (id)initWithRadius:(NSUInteger)dilationRadius;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDirectionalNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 | GLint upperThresholdUniform, lowerThresholdUniform;
7 |
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | // These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
16 | @property(readwrite, nonatomic) CGFloat upperThreshold;
17 | @property(readwrite, nonatomic) CGFloat lowerThreshold;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDirectionalSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDissolveBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageDivideBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageEmbossFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
4 |
5 | // The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
6 | @property(readwrite, nonatomic) CGFloat intensity;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and is most commonly used with black-and-white thresholded images.
5 |
6 | @interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageExclusionBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageExposureFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageExposureFilter : GPUImageFilter
4 | {
5 | GLint exposureUniform;
6 | }
7 |
8 | // Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat exposure;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFASTCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGrayscaleFilter;
4 | @class GPUImage3x3TextureSamplingFilter;
5 | @class GPUImageNonMaximumSuppressionFilter;
6 |
7 | /*
8 | An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications:
9 |
10 | E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005.
11 | E. Rosten and T. Drummond. Machine learning for high-speed corner detection. European Conference on Computer Vision, 2006.
12 |
13 | For more about the FAST feature detector, see the resources here:
14 | http://www.edwardrosten.com/work/fast.html
15 | */
16 |
17 | typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType;
18 |
19 | @interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
20 | {
21 | GPUImageGrayscaleFilter *luminanceReductionFilter;
22 | GPUImage3x3TextureSamplingFilter *featureDetectionFilter;
23 | GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
24 | // Generate a lookup texture based on the bit patterns
25 |
26 | // Step 1: convert to monochrome if necessary
27 | // Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components
28 | // Step 3: do non-maximum suppression of close corner points
29 | }
30 |
31 | - (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFalseColorFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageFalseColorFilter : GPUImageFilter
4 | {
5 | GLint firstColorUniform, secondColorUniform;
6 | }
7 |
8 | // The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
9 | @property(readwrite, nonatomic) GPUVector4 firstColor;
10 | @property(readwrite, nonatomic) GPUVector4 secondColor;
11 |
12 | - (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 | - (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | #define STRINGIZE(x) #x
4 | #define STRINGIZE2(x) STRINGIZE(x)
5 | #define SHADER_STRING(text) @ STRINGIZE2(text)
6 |
7 | #define GPUImageHashIdentifier #
8 | #define GPUImageWrappedLabel(x) x
9 | #define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a
10 |
11 | extern NSString *const kGPUImageVertexShaderString;
12 | extern NSString *const kGPUImagePassthroughFragmentShaderString;
13 |
14 | struct GPUVector4 {
15 | GLfloat one;
16 | GLfloat two;
17 | GLfloat three;
18 | GLfloat four;
19 | };
20 | typedef struct GPUVector4 GPUVector4;
21 |
22 | struct GPUVector3 {
23 | GLfloat one;
24 | GLfloat two;
25 | GLfloat three;
26 | };
27 | typedef struct GPUVector3 GPUVector3;
28 |
29 | struct GPUMatrix4x4 {
30 | GPUVector4 one;
31 | GPUVector4 two;
32 | GPUVector4 three;
33 | GPUVector4 four;
34 | };
35 | typedef struct GPUMatrix4x4 GPUMatrix4x4;
36 |
37 | struct GPUMatrix3x3 {
38 | GPUVector3 one;
39 | GPUVector3 two;
40 | GPUVector3 three;
41 | };
42 | typedef struct GPUMatrix3x3 GPUMatrix3x3;
43 |
44 | /** GPUImage's base filter class
45 |
46 | Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
47 | */
48 | @interface GPUImageFilter : GPUImageOutput
49 | {
50 | GPUImageFramebuffer *firstInputFramebuffer;
51 |
52 | GLProgram *filterProgram;
53 | GLint filterPositionAttribute, filterTextureCoordinateAttribute;
54 | GLint filterInputTextureUniform;
55 | GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
56 |
57 | BOOL isEndProcessing;
58 |
59 | CGSize currentFilterSize;
60 | GPUImageRotationMode inputRotation;
61 |
62 | BOOL currentlyReceivingMonochromeInput;
63 |
64 | NSMutableDictionary *uniformStateRestorationBlocks;
65 | dispatch_semaphore_t imageCaptureSemaphore;
66 | }
67 |
68 | @property(readonly) CVPixelBufferRef renderTarget;
69 | @property(readwrite, nonatomic) BOOL preventRendering;
70 | @property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;
71 |
72 | /// @name Initialization and teardown
73 |
74 | /**
75 | Initialize with vertex and fragment shaders
76 |
77 | You make take advantage of the SHADER_STRING macro to write your shaders in-line.
78 | @param vertexShaderString Source code of the vertex shader to use
79 | @param fragmentShaderString Source code of the fragment shader to use
80 | */
81 | - (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
82 |
83 | /**
84 | Initialize with a fragment shader
85 |
86 | You may take advantage of the SHADER_STRING macro to write your shader in-line.
87 | @param fragmentShaderString Source code of fragment shader to use
88 | */
89 | - (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
90 | /**
91 | Initialize with a fragment shader
92 | @param fragmentShaderFilename Filename of fragment shader to load
93 | */
94 | - (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
95 | - (void)initializeAttributes;
96 | - (void)setupFilterForSize:(CGSize)filterFrameSize;
97 | - (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
98 | - (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
99 |
100 | /// @name Managing the display FBOs
101 | /** Size of the frame buffer object
102 | */
103 | - (CGSize)sizeOfFBO;
104 |
105 | /// @name Rendering
106 | + (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
107 | - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
108 | - (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
109 | - (CGSize)outputFrameSize;
110 |
111 | /// @name Input parameters
112 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
113 | - (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
114 | - (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
115 | - (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
116 | - (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
117 | - (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
118 | - (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
119 | - (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;
120 |
121 | - (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
122 | - (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
123 | - (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
124 | - (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
125 | - (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
126 | - (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
127 | - (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
128 | - (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
129 | - (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
130 |
131 | - (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
132 | - (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
133 |
134 | @end
135 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFilterGroup.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageFilterGroup : GPUImageOutput
5 | {
6 | NSMutableArray *filters;
7 | BOOL isEndProcessing;
8 | }
9 |
10 | @property(readwrite, nonatomic, strong) GPUImageOutput *terminalFilter;
11 | @property(readwrite, nonatomic, strong) NSArray *initialFilters;
12 | @property(readwrite, nonatomic, strong) GPUImageOutput *inputFilterToIgnoreForUpdates;
13 |
14 | // Filter management
15 | - (void)addFilter:(GPUImageOutput *)newFilter;
16 | - (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex;
17 | - (NSUInteger)filterCount;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFilterPipeline.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 | @interface GPUImageFilterPipeline : NSObject
5 | {
6 | NSString *stringValue;
7 | }
8 |
9 | @property (strong) NSMutableArray *filters;
10 |
11 | @property (strong) GPUImageOutput *input;
12 | @property (strong) id output;
13 |
14 | - (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id )output;
15 | - (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id )output;
16 | - (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id )output;
17 |
18 | - (void) addFilter:(GPUImageOutput *)filter;
19 | - (void) addFilter:(GPUImageOutput *)filter atIndex:(NSUInteger)insertIndex;
20 | - (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput *)filter;
21 | - (void) replaceAllFilters:(NSArray *) newFilters;
22 | - (void) removeFilter:(GPUImageOutput *)filter;
23 | - (void) removeFilterAtIndex:(NSUInteger)index;
24 | - (void) removeAllFilters;
25 |
26 | - (UIImage *) currentFilteredFrame;
27 | - (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
28 | - (CGImageRef) newCGImageFromCurrentFilteredFrame;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFourInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageThreeInputFilter.h"
2 |
3 | extern NSString *const kGPUImageFourInputTextureVertexShaderString;
4 |
5 | @interface GPUImageFourInputFilter : GPUImageThreeInputFilter
6 | {
7 | GPUImageFramebuffer *fourthInputFramebuffer;
8 |
9 | GLint filterFourthTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform4;
11 | GPUImageRotationMode inputRotation4;
12 | GLuint filterSourceTexture4;
13 | CMTime fourthFrameTime;
14 |
15 | BOOL hasSetThirdTexture, hasReceivedFourthFrame, fourthFrameWasVideo;
16 | BOOL fourthFrameCheckDisabled;
17 | }
18 |
19 | - (void)disableFourthFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFramebuffer.h:
--------------------------------------------------------------------------------
1 | #import
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | #import
5 | #import
6 | #import
7 | #else
8 | #import
9 | #import
10 | #endif
11 |
12 | #import
13 | #import
14 |
15 |
16 | typedef struct GPUTextureOptions {
17 | GLenum minFilter;
18 | GLenum magFilter;
19 | GLenum wrapS;
20 | GLenum wrapT;
21 | GLenum internalFormat;
22 | GLenum format;
23 | GLenum type;
24 | } GPUTextureOptions;
25 |
26 | @interface GPUImageFramebuffer : NSObject
27 |
28 | @property(readonly) CGSize size;
29 | @property(readonly) GPUTextureOptions textureOptions;
30 | @property(readonly) GLuint texture;
31 | @property(readonly) BOOL missingFramebuffer;
32 |
33 | // Initialization and teardown
34 | - (id)initWithSize:(CGSize)framebufferSize;
35 | - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
36 | - (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
37 |
38 | // Usage
39 | - (void)activateFramebuffer;
40 |
41 | // Reference counting
42 | - (void)lock;
43 | - (void)unlock;
44 | - (void)clearAllLocks;
45 | - (void)disableReferenceCounting;
46 | - (void)enableReferenceCounting;
47 |
48 | // Image capture
49 | - (CGImageRef)newCGImageFromFramebufferContents;
50 | - (void)restoreRenderTarget;
51 |
52 | // Raw data bytes
53 | - (void)lockForReading;
54 | - (void)unlockAfterReading;
55 | - (NSUInteger)bytesPerRow;
56 | - (GLubyte *)byteBuffer;
57 | - (CVPixelBufferRef)pixelBuffer;
58 |
59 | @end
60 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFramebufferCache.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageFramebuffer.h"
4 |
5 | @interface GPUImageFramebufferCache : NSObject
6 |
7 | // Framebuffer management
8 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
9 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
10 | - (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
11 | - (void)purgeAllUnassignedFramebuffers;
12 | - (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
13 | - (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageFramework.h:
--------------------------------------------------------------------------------
1 | #import
2 |
3 | //! Project version number for GPUImageFramework.
4 | FOUNDATION_EXPORT double GPUImageFrameworkVersionNumber;
5 |
6 | //! Project version string for GPUImageFramework.
7 | FOUNDATION_EXPORT const unsigned char GPUImageFrameworkVersionString[];
8 |
9 | #import
10 |
11 | // Base classes
12 | #import
13 | #import
14 | #import
15 | #import
16 | #import
17 | #import
18 | #import
19 | #import
20 | #import
21 | #import
22 | #import
23 | #import
24 | #import
25 | #import
26 | #import
27 | #import
28 | #import
29 | #import
30 |
31 | // Filters
32 | #import
33 | #import
34 | #import
35 | #import
36 | #import
37 | #import
38 | #import
39 | #import
40 | #import
41 | #import
42 | #import
43 | #import
44 | #import
45 | #import
46 | #import
47 | #import
48 | #import
49 | #import
50 | #import
51 | #import
52 | #import
53 | #import
54 | #import
55 | #import
56 | #import
57 | #import
58 | #import
59 | #import
60 | #import
61 | #import
62 | #import
63 | #import
64 | #import
65 | #import
66 | #import
67 | #import
68 | #import
69 | #import
70 | #import
71 | #import
72 | #import
73 | #import
74 | #import
75 | #import
76 | #import
77 | #import
78 | #import
79 | #import
80 | #import
81 | #import
82 | #import
83 | #import
84 | #import
85 | #import
86 | #import
87 | #import
88 | #import
89 | #import
90 | #import
91 | #import
92 | #import
93 | #import
94 | #import
95 | #import
96 | #import
97 | #import
98 | #import
99 | #import
100 | #import
101 | #import
102 | #import
103 | #import
104 | #import
105 | #import
106 | #import
107 | #import
108 | #import
109 | #import
110 | #import
111 | #import
112 | #import
113 | #import
114 | #import
115 | #import
116 | #import
117 | #import
118 | #import
119 | #import
120 | #import
121 | #import
122 | #import
123 | #import
124 | #import
125 | #import
126 | #import
127 | #import
128 | #import
129 | #import
130 | #import
131 | #import
132 | #import
133 | #import
134 | #import
135 | #import
136 | #import
137 | #import
138 | #import
139 | #import
140 | #import
141 | #import
142 | #import
143 | #import
144 | #import
145 | #import
146 | #import
147 | #import
148 | #import
149 | #import
150 | #import
151 | #import
152 | #import
153 | #import
154 | #import
155 | #import
156 | #import
157 | #import
158 | #import
159 | #import
160 | #import
161 | #import
162 | #import
163 | #import
164 | #import
165 | #import
166 | #import
167 | #import
168 | #import
169 | #import
170 | #import
171 | #import
172 | #import
173 | #import
174 | #import
175 | #import
176 | #import
177 | #import
178 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGammaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageGammaFilter : GPUImageFilter
4 | {
5 | GLint gammaUniform;
6 | }
7 |
8 | // Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat gamma;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGaussianBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | /** A Gaussian blur filter
4 | Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
5 | */
6 |
7 | @interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
8 | {
9 | BOOL shouldResizeBlurRadiusWithImageSize;
10 | CGFloat _blurRadiusInPixels;
11 | }
12 |
13 | /** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
14 | */
15 | @property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
16 |
17 | /** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
18 | */
19 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
20 |
21 | /** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.
22 | */
23 | @property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
24 | @property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
25 |
26 | /// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
27 | @property(readwrite, nonatomic) NSUInteger blurPasses;
28 |
29 | + (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
30 | + (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
31 | + (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
32 | + (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
33 |
34 | - (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
35 |
36 | @end
37 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGaussianBlurPositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | /** A more generalized 9x9 Gaussian blur filter
4 | */
5 | @interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
6 | {
7 | GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
8 | }
9 |
10 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
11 | */
12 | @property (readwrite, nonatomic) CGFloat blurSize;
13 |
14 | /** Center for the blur, defaults to 0.5, 0.5
15 | */
16 | @property (readwrite, nonatomic) CGPoint blurCenter;
17 |
18 | /** Radius for the blur, defaults to 1.0
19 | */
20 | @property (readwrite, nonatomic) CGFloat blurRadius;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGaussianSelectiveBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /** A Gaussian blur that preserves focus within a circular region
6 | */
7 | @interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageGaussianBlurFilter *blurFilter;
10 | GPUImageFilter *selectiveFocusFilter;
11 | BOOL hasOverriddenAspectRatio;
12 | }
13 |
14 | /** The radius of the circular area being excluded from the blur
15 | */
16 | @property (readwrite, nonatomic) CGFloat excludeCircleRadius;
17 | /** The center of the circular area being excluded from the blur
18 | */
19 | @property (readwrite, nonatomic) CGPoint excludeCirclePoint;
20 | /** The size of the area between the blurred portion and the clear circle
21 | */
22 | @property (readwrite, nonatomic) CGFloat excludeBlurSize;
23 | /** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
24 | */
25 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
26 | /** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
27 | */
28 | @property (readwrite, nonatomic) CGFloat aspectRatio;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGlassSphereFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSphereRefractionFilter.h"
2 |
3 | @interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageGrayscaleFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageLuminanceFragmentShaderString;
4 |
5 | /** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
6 | */
7 | @interface GPUImageGrayscaleFilter : GPUImageFilter
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHSBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | @interface GPUImageHSBFilter : GPUImageColorMatrixFilter
4 |
5 | /** Reset the filter to have no transformations.
6 | */
7 | - (void)reset;
8 |
9 | /** Add a hue rotation to the filter.
10 | The hue rotation is in the range [-360, 360] with 0 being no-change.
11 | Note that this adjustment is additive, so use the reset method if you need to.
12 | */
13 | - (void)rotateHue:(float)h;
14 |
15 | /** Add a saturation adjustment to the filter.
16 | The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
17 | Note that this adjustment is additive, so use the reset method if you need to.
18 | */
19 | - (void)adjustSaturation:(float)s;
20 |
21 | /** Add a brightness adjustment to the filter.
22 | The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
23 | Note that this adjustment is additive, so use the reset method if you need to.
24 | */
25 | - (void)adjustBrightness:(float)b;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHalftoneFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImageHalftoneFilter : GPUImagePixellateFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHardLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHarrisCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 | @class GPUImageXYDerivativeFilter;
5 | @class GPUImageGrayscaleFilter;
6 | @class GPUImageGaussianBlurFilter;
7 | @class GPUImageThresholdedNonMaximumSuppressionFilter;
8 | @class GPUImageColorPackingFilter;
9 |
10 | //#define DEBUGFEATUREDETECTION
11 |
12 | /** Harris corner detector
13 |
14 | First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)
15 |
16 | Second pass: blur the derivative (GPUImageGaussianBlurFilter)
17 |
18 | Third pass: apply the Harris corner detection calculation
19 |
20 | This is the Harris corner detector, as described in
21 | C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
22 | */
23 | @interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
24 | {
25 | GPUImageXYDerivativeFilter *derivativeFilter;
26 | GPUImageGaussianBlurFilter *blurFilter;
27 | GPUImageFilter *harrisCornerDetectionFilter;
28 | GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
29 | GPUImageColorPackingFilter *colorPackingFilter;
30 | GLfloat *cornersArray;
31 | GLubyte *rawImagePixels;
32 | }
33 |
34 | /** The radius of the underlying Gaussian blur. The default is 2.0.
35 | */
36 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
37 |
38 | // This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.
39 | @property(readwrite, nonatomic) CGFloat sensitivity;
40 |
41 | // A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.
42 | @property(readwrite, nonatomic) CGFloat threshold;
43 |
44 | // This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame
45 | @property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);
46 |
47 | // These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector
48 | @property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
49 |
50 | // Initialization and teardown
51 | - (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
52 |
53 | @end
54 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHazeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /*
4 | * The haze filter can be used to add or remove haze (similar to a UV filter)
5 | *
6 | * @author Alaric Cole
7 | * @creationDate 03/10/12
8 | *
9 | */
10 |
11 | /** The haze filter can be used to add or remove haze
12 |
13 | This is similar to a UV filter
14 | */
15 | @interface GPUImageHazeFilter : GPUImageFilter
16 | {
17 | GLint distanceUniform;
18 | GLint slopeUniform;
19 | }
20 |
21 | /** Strength of the color applied. Default 0. Values between -.3 and .3 are best
22 | */
23 | @property(readwrite, nonatomic) CGFloat distance;
24 |
25 | /** Amount of color change. Default 0. Values between -.3 and .3 are best
26 | */
27 | @property(readwrite, nonatomic) CGFloat slope;
28 |
29 | @end
30 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHighPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageDifferenceBlendFilter.h"
4 |
5 | @interface GPUImageHighPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageDifferenceBlendFilter *differenceBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHighlightShadowFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHighlightShadowFilter : GPUImageFilter
4 | {
5 | GLint shadowsUniform, highlightsUniform;
6 | }
7 |
8 | /**
9 | * 0 - 1, increase to lighten shadows.
10 | * @default 0
11 | */
12 | @property(readwrite, nonatomic) CGFloat shadows;
13 |
14 | /**
15 | * 0 - 1, decrease to darken highlights.
16 | * @default 1
17 | */
18 | @property(readwrite, nonatomic) CGFloat highlights;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHistogramEqualizationFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageHistogramEqualizationFilter.h
3 | // FilterShowcase
4 | //
5 | // Created by Adam Marcus on 19/08/2014.
6 | // Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.
7 | //
8 |
9 | #import "GPUImageFilterGroup.h"
10 | #import "GPUImageHistogramFilter.h"
11 | #import "GPUImageRawDataOutput.h"
12 | #import "GPUImageRawDataInput.h"
13 | #import "GPUImageTwoInputFilter.h"
14 |
15 | @interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
16 | {
17 | GPUImageHistogramFilter *histogramFilter;
18 | GPUImageRawDataOutput *rawDataOutputFilter;
19 | GPUImageRawDataInput *rawDataInputFilter;
20 | }
21 |
22 | @property(readwrite, nonatomic) NSUInteger downsamplingFactor;
23 |
24 | - (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
25 |
26 | @end
27 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHistogramFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
4 |
5 | @interface GPUImageHistogramFilter : GPUImageFilter
6 | {
7 | GPUImageHistogramType histogramType;
8 |
9 | GLubyte *vertexSamplingCoordinates;
10 |
11 | GLProgram *secondFilterProgram, *thirdFilterProgram;
12 | GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
13 | }
14 |
15 | // Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
16 | @property(readwrite, nonatomic) NSUInteger downsamplingFactor;
17 |
18 | // Initialization and teardown
19 | - (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
20 | - (void)initializeSecondaryAttributes;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHistogramGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHistogramGenerator : GPUImageFilter
4 | {
5 | GLint backgroundColorUniform;
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHoughTransformLineDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageThresholdEdgeDetectionFilter.h"
3 | #import "GPUImageParallelCoordinateLineTransformFilter.h"
4 | #import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
5 | #import "GPUImageCannyEdgeDetectionFilter.h"
6 |
7 | // This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass,
8 | // then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines
9 | // is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima.
10 | // These local maxima are then converted back into lines in normal space and returned via a callback block.
11 | //
12 | // Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient
13 | // to rasterize on a GPU.
14 | //
15 | // This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology
16 | // and described in their publications:
17 | //
18 | // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
19 | // http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf
20 | // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
21 | // http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf
22 |
23 | //#define DEBUGLINEDETECTION
24 |
25 | @interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup
26 | {
27 | GPUImageOutput *thresholdEdgeDetectionFilter;
28 |
29 | // GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter;
30 | GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter;
31 | GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
32 |
33 | GLfloat *linesArray;
34 | GLubyte *rawImagePixels;
35 | }
36 |
37 | // A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9.
38 | @property(readwrite, nonatomic) CGFloat edgeThreshold;
39 |
40 | // A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20.
41 | @property(readwrite, nonatomic) CGFloat lineDetectionThreshold;
42 |
43 | // This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame
44 | @property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime);
45 |
46 | // These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform
47 | @property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
48 |
49 | @end
50 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHueBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageHueFilter.h:
--------------------------------------------------------------------------------
1 |
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageHueFilter : GPUImageFilter
5 | {
6 | GLint hueAdjustUniform;
7 |
8 | }
9 | @property (nonatomic, readwrite) CGFloat hue;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageJFAVoronoiFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageJFAVoronoiFilter : GPUImageFilter
4 | {
5 | GLuint secondFilterOutputTexture;
6 | GLuint secondFilterFramebuffer;
7 |
8 |
9 | GLint sampleStepUniform;
10 | GLint sizeUniform;
11 | NSUInteger numPasses;
12 |
13 | }
14 |
15 | @property (nonatomic, readwrite) CGSize sizeInPixels;
16 |
17 | @end
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageKuwaharaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
4 | */
5 | @interface GPUImageKuwaharaFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform;
8 | }
9 |
10 | /// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.
11 | @property(readwrite, nonatomic) NSUInteger radius;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageKuwaharaRadius3Filter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageKuwaharaRadius3Filter.h
3 |
4 | #import "GPUImageFilter.h"
5 |
6 | @interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLanczosResamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | @interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
4 |
5 | @property(readwrite, nonatomic) CGSize originalImageSize;
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLaplacianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLevelsFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /**
4 | * Levels like Photoshop.
5 | *
6 | * The min, max, minOut and maxOut parameters are floats in the range [0, 1].
7 | * If you have parameters from Photoshop in the range [0, 255] you must first
8 | * convert them to be [0, 1].
9 | * The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.
10 | *
11 | * If you want to apply levels to RGB as well as individual channels you need to use
12 | * this filter twice - first for the individual channels and then for all channels.
13 | */
14 | @interface GPUImageLevelsFilter : GPUImageFilter
15 | {
16 | GLint minUniform;
17 | GLint midUniform;
18 | GLint maxUniform;
19 | GLint minOutputUniform;
20 | GLint maxOutputUniform;
21 |
22 | GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;
23 | }
24 |
25 | /** Set levels for the red channel */
26 | - (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
27 |
28 | - (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
29 |
30 | /** Set levels for the green channel */
31 | - (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
32 |
33 | - (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
34 |
35 | /** Set levels for the blue channel */
36 | - (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
37 |
38 | - (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
39 |
40 | /** Set levels for all channels at once */
41 | - (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
42 | - (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
43 |
44 | @end
45 |
46 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLightenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /// Blends two images by taking the maximum value of each color component between the images
4 | @interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
5 | {
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLineGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLineGenerator : GPUImageFilter
4 | {
5 | GLint lineWidthUniform, lineColorUniform;
6 | GLfloat *lineCoordinates;
7 | }
8 |
9 | // The width of the displayed lines, in pixels. The default is 1.
10 | @property(readwrite, nonatomic) CGFloat lineWidth;
11 |
12 | // The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
13 | - (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | // Rendering
16 | - (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLinearBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLocalBinaryPatternFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLookupFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLookupFilter : GPUImageTwoInputFilter
4 | {
5 | GLint intensityUniform;
6 | }
7 |
8 | // How To Use:
9 | // 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
10 | // For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
11 | // If you need more complex filter you can create as many lookup tables as required.
12 | // E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
13 | // 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
14 |
15 | // See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
16 |
17 | // Additional Info:
18 | // Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
19 | //for (int by = 0; by < 8; by++) {
20 | // for (int bx = 0; bx < 8; bx++) {
21 | // for (int g = 0; g < 64; g++) {
22 | // for (int r = 0; r < 64; r++) {
23 | // image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
24 | // (int)(g * 255.0 / 63.0 + 0.5),
25 | // (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
26 | // }
27 | // }
28 | // }
29 | //}
30 |
31 | // Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting
32 | @property(readwrite, nonatomic) CGFloat intensity;
33 |
34 | @end
35 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLowPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageBuffer.h"
3 | #import "GPUImageDissolveBlendFilter.h"
4 |
5 | @interface GPUImageLowPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageBuffer *bufferFilter;
8 | GPUImageDissolveBlendFilter *dissolveBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLuminanceRangeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLuminanceRangeFilter : GPUImageFilter
4 | {
5 | GLint rangeReductionUniform;
6 | }
7 |
8 | /** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
9 | */
10 | @property(readwrite, nonatomic) CGFloat rangeReductionFactor;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Pixels with a luminance above the threshold will appear white, and those below will be black
4 | */
5 | @interface GPUImageLuminanceThresholdFilter : GPUImageFilter
6 | {
7 | GLint thresholdUniform;
8 | }
9 |
10 | /** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default
11 | */
12 | @property(readwrite, nonatomic) CGFloat threshold;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLuminosity.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageAverageColor.h"
2 |
3 | @interface GPUImageLuminosity : GPUImageAverageColor
4 | {
5 | GLProgram *secondFilterProgram;
6 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
7 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
8 | GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;
9 | }
10 |
11 | // This block is called on the completion of color averaging for a frame
12 | @property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);
13 |
14 | - (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
15 | - (void)initializeSecondaryAttributes;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageLuminosityBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMaskFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMedianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMissEtikateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Miss Etikate:
6 | http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_miss_etikate.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageMissEtikateFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMonochromeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMonochromeFilter : GPUImageFilter
4 | {
5 | GLint intensityUniform, filterColorUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat intensity;
9 | @property(readwrite, nonatomic) GPUVector4 color;
10 |
11 | - (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMosaicFilter.h:
--------------------------------------------------------------------------------
1 |
2 | // This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 | #import "GPUImagePicture.h"
6 |
7 | @interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
8 | GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;
9 | GPUImagePicture *pic;
10 | }
11 |
12 | // This filter takes an input tileset, the tiles must ascend in luminance
13 | // It looks at the input image and replaces each display tile with an input tile
14 | // according to the luminance of that tile. The idea was to replicate the ASCII
15 | // video filters seen in other apps, but the tileset can be anything.
16 | @property(readwrite, nonatomic) CGSize inputTileSize;
17 | @property(readwrite, nonatomic) float numTiles;
18 | @property(readwrite, nonatomic) CGSize displayTileSize;
19 | @property(readwrite, nonatomic) BOOL colorOn;
20 | @property(readwrite, nonatomic, copy) NSString *tileSet;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMotionBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMotionBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The angular direction of the blur, in degrees. 0 degrees by default
10 | */
11 | @property (readwrite, nonatomic) CGFloat blurAngle;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMotionDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageAverageColor.h"
4 |
5 | @interface GPUImageMotionDetector : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageTwoInputFilter *frameComparisonFilter;
9 | GPUImageAverageColor *averageColor;
10 | }
11 |
12 | // This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
13 | @property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
14 |
15 | // For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
16 | @property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMovie.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageContext.h"
4 | #import "GPUImageOutput.h"
5 |
6 | /** Protocol for getting Movie played callback.
7 | */
8 | @protocol GPUImageMovieDelegate
9 |
10 | - (void)didCompletePlayingMovie;
11 | @end
12 |
13 | /** Source object for filtering movies
14 | */
15 | @interface GPUImageMovie : GPUImageOutput
16 |
17 | @property (readwrite, retain) AVAsset *asset;
18 | @property (readwrite, retain) AVPlayerItem *playerItem;
19 | @property(readwrite, retain) NSURL *url;
20 |
21 | /** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
22 | */
23 | @property(readwrite, nonatomic) BOOL runBenchmark;
24 |
25 | /** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
26 | */
27 | @property(readwrite, nonatomic) BOOL playAtActualSpeed;
28 |
29 | /** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
30 | */
31 | @property(readwrite, nonatomic) BOOL shouldRepeat;
32 |
33 | /** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.
34 | This property is not key-value observable.
35 | */
36 | @property(readonly, nonatomic) float progress;
37 |
38 | /** This is used to send the delete Movie did complete playing alert
39 | */
40 | @property (readwrite, nonatomic, assign) id delegate;
41 |
42 | @property (readonly, nonatomic) AVAssetReader *assetReader;
43 | @property (readonly, nonatomic) BOOL audioEncodingIsFinished;
44 | @property (readonly, nonatomic) BOOL videoEncodingIsFinished;
45 |
46 | /// @name Initialization and teardown
47 | - (id)initWithAsset:(AVAsset *)asset;
48 | - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
49 | - (id)initWithURL:(NSURL *)url;
50 | - (void)yuvConversionSetup;
51 |
52 | /// @name Movie processing
53 | - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
54 | - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
55 | - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
56 | - (void)startProcessing;
57 | - (void)endProcessing;
58 | - (void)cancelProcessing;
59 | - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
60 |
61 | @end
62 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMovieComposition.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageMovieComposition.h
3 | // Givit
4 | //
5 | // Created by Sean Meiners on 2013/01/25.
6 | //
7 | //
8 |
9 | #import "GPUImageMovie.h"
10 |
11 | @interface GPUImageMovieComposition : GPUImageMovie
12 |
13 | @property (readwrite, retain) AVComposition *compositon;
14 | @property (readwrite, retain) AVVideoComposition *videoComposition;
15 | @property (readwrite, retain) AVAudioMix *audioMix;
16 |
17 | - (id)initWithComposition:(AVComposition*)compositon
18 | andVideoComposition:(AVVideoComposition*)videoComposition
19 | andAudioMix:(AVAudioMix*)audioMix;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMovieWriter.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageContext.h"
4 |
5 | extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
6 |
7 | @protocol GPUImageMovieWriterDelegate
8 |
9 | @optional
10 | - (void)movieRecordingCompleted;
11 | - (void)movieRecordingFailedWithError:(NSError*)error;
12 |
13 | @end
14 |
15 | @interface GPUImageMovieWriter : NSObject
16 | {
17 | BOOL alreadyFinishedRecording;
18 |
19 | NSURL *movieURL;
20 | NSString *fileType;
21 | AVAssetWriter *assetWriter;
22 | AVAssetWriterInput *assetWriterAudioInput;
23 | AVAssetWriterInput *assetWriterVideoInput;
24 | AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
25 |
26 | GPUImageContext *_movieWriterContext;
27 | CVPixelBufferRef renderTarget;
28 | CVOpenGLESTextureRef renderTexture;
29 |
30 | CGSize videoSize;
31 | GPUImageRotationMode inputRotation;
32 | }
33 |
34 | @property(readwrite, nonatomic) BOOL hasAudioTrack;
35 | @property(readwrite, nonatomic) BOOL shouldPassthroughAudio;
36 | @property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;
37 | @property(nonatomic, copy) void(^completionBlock)(void);
38 | @property(nonatomic, copy) void(^failureBlock)(NSError*);
39 | @property(nonatomic, assign) id delegate;
40 | @property(readwrite, nonatomic) BOOL encodingLiveVideo;
41 | @property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);
42 | @property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);
43 | @property(nonatomic, copy) void(^audioProcessingCallback)(SInt16 **samplesRef, CMItemCount numSamplesInBuffer);
44 | @property(nonatomic) BOOL enabled;
45 | @property(nonatomic, readonly) AVAssetWriter *assetWriter;
46 | @property(nonatomic, readonly) CMTime duration;
47 | @property(nonatomic, assign) CGAffineTransform transform;
48 | @property(nonatomic, copy) NSArray *metaData;
49 | @property(nonatomic, assign, getter = isPaused) BOOL paused;
50 | @property(nonatomic, retain) GPUImageContext *movieWriterContext;
51 |
52 | // Initialization and teardown
53 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
54 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;
55 |
56 | - (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;
57 |
58 | // Movie recording
59 | - (void)startRecording;
60 | - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
61 | - (void)finishRecording;
62 | - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
63 | - (void)cancelRecording;
64 | - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
65 | - (void)enableSynchronizationCallbacks;
66 |
67 | @end
68 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageMultiplyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageNobleCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Noble corner detector
4 |
5 | This is the Noble variant on the Harris detector, from
6 | Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
7 | */
8 |
9 |
10 | @interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageNormalBlendFilter.h:
--------------------------------------------------------------------------------
1 | // Created by Jorge Garcia on 9/5/12.
2 | //
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 |
6 | @interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageOpacityFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageOpacityFilter : GPUImageFilter
4 | {
5 | GLint opacityUniform;
6 | }
7 |
8 | // Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
9 | @property(readwrite, nonatomic) CGFloat opacity;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageOutput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageContext.h"
2 | #import "GPUImageFramebuffer.h"
3 |
4 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
5 | #import
6 | #else
7 | // For now, just redefine this on the Mac
8 | typedef NS_ENUM(NSInteger, UIImageOrientation) {
9 | UIImageOrientationUp, // default orientation
10 | UIImageOrientationDown, // 180 deg rotation
11 | UIImageOrientationLeft, // 90 deg CCW
12 | UIImageOrientationRight, // 90 deg CW
13 | UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
14 | UIImageOrientationDownMirrored, // horizontal flip
15 | UIImageOrientationLeftMirrored, // vertical flip
16 | UIImageOrientationRightMirrored, // vertical flip
17 | };
18 | #endif
19 |
20 | dispatch_queue_attr_t GPUImageDefaultQueueAttribute(void);
21 | void runOnMainQueueWithoutDeadlocking(void (^block)(void));
22 | void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
23 | void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));
24 | void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
25 | void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
26 | void reportAvailableMemoryForGPUImage(NSString *tag);
27 |
28 | @class GPUImageMovieWriter;
29 |
30 | /** GPUImage's base source object
31 |
32 | Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
33 |
34 | - GPUImageVideoCamera (for live video from an iOS camera)
35 | - GPUImageStillCamera (for taking photos with the camera)
36 | - GPUImagePicture (for still images)
37 | - GPUImageMovie (for movies)
38 |
39 | Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
40 | */
41 | @interface GPUImageOutput : NSObject
42 | {
43 | GPUImageFramebuffer *outputFramebuffer;
44 |
45 | NSMutableArray *targets, *targetTextureIndices;
46 |
47 | CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;
48 |
49 | BOOL overrideInputSize;
50 |
51 | BOOL allTargetsWantMonochromeData;
52 | BOOL usingNextFrameForImageCapture;
53 | }
54 |
55 | @property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
56 | @property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
57 | @property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
58 | @property(readwrite, nonatomic, unsafe_unretained) id targetToIgnoreForUpdates;
59 | @property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
60 | @property(nonatomic) BOOL enabled;
61 | @property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
62 |
63 | /// @name Managing targets
64 | - (void)setInputFramebufferForTarget:(id)target atIndex:(NSInteger)inputTextureIndex;
65 | - (GPUImageFramebuffer *)framebufferForOutput;
66 | - (void)removeOutputFramebuffer;
67 | - (void)notifyTargetsAboutNewOutputTexture;
68 |
69 | /** Returns an array of the current targets.
70 | */
71 | - (NSArray*)targets;
72 |
73 | /** Adds a target to receive notifications when new frames are available.
74 |
75 | The target will be asked for its next available texture.
76 |
77 | See [GPUImageInput newFrameReadyAtTime:]
78 |
79 | @param newTarget Target to be added
80 | */
81 | - (void)addTarget:(id)newTarget;
82 |
83 | /** Adds a target to receive notifications when new frames are available.
84 |
85 | See [GPUImageInput newFrameReadyAtTime:]
86 |
87 | @param newTarget Target to be added
88 | */
89 | - (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
90 |
91 | /** Removes a target. The target will no longer receive notifications when new frames are available.
92 |
93 | @param targetToRemove Target to be removed
94 | */
95 | - (void)removeTarget:(id)targetToRemove;
96 |
97 | /** Removes all targets.
98 | */
99 | - (void)removeAllTargets;
100 |
101 | /// @name Manage the output texture
102 |
103 | - (void)forceProcessingAtSize:(CGSize)frameSize;
104 | - (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
105 |
106 | /// @name Still image processing
107 |
108 | - (void)useNextFrameForImageCapture;
109 | - (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
110 | - (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
111 |
112 | // Platform-specific image output methods
113 | // If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
114 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
115 | - (UIImage *)imageFromCurrentFramebuffer;
116 | - (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
117 | - (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
118 | - (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;
119 | #else
120 | - (NSImage *)imageFromCurrentFramebuffer;
121 | - (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
122 | - (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
123 | - (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;
124 | #endif
125 |
126 | - (BOOL)providesMonochromeOutput;
127 |
128 | @end
129 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageOverlayBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageParallelCoordinateLineTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.
4 | //
5 | // It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:
6 | // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
7 | // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
8 |
9 | @interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
10 | {
11 | GLubyte *rawImagePixels;
12 | GLfloat *lineCoordinates;
13 | unsigned int maxLinePairsToRender, linePairsToRender;
14 | }
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePerlinNoiseFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePerlinNoiseFilter : GPUImageFilter
4 | {
5 | GLint scaleUniform, colorStartUniform, colorFinishUniform;
6 | }
7 |
8 | @property (readwrite, nonatomic) GPUVector4 colorStart;
9 | @property (readwrite, nonatomic) GPUVector4 colorFinish;
10 |
11 | @property (readwrite, nonatomic) float scale;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePicture+TextureSubimage.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImagePicture+TextureSubimage.h
3 | // GPUImage
4 | //
5 | // Created by Jack Wu on 2014-05-28.
6 | // Copyright (c) 2014 Brad Larson. All rights reserved.
7 | //
8 |
9 | #import "GPUImagePicture.h"
10 |
11 | @interface GPUImagePicture (TextureSubimage)
12 |
13 | - (void)replaceTextureWithSubimage:(UIImage*)subimage;
14 | - (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource;
15 |
16 | - (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect;
17 | - (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePicture.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 |
5 | @interface GPUImagePicture : GPUImageOutput
6 | {
7 | CGSize pixelSizeOfImage;
8 | BOOL hasProcessedImage;
9 |
10 | dispatch_semaphore_t imageUpdateSemaphore;
11 | }
12 |
13 | // Initialization and teardown
14 | - (id)initWithURL:(NSURL *)url;
15 | - (id)initWithImage:(UIImage *)newImageSource;
16 | - (id)initWithCGImage:(CGImageRef)newImageSource;
17 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
18 | - (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
19 | - (id)initWithImage:(UIImage *)newImageSource removePremultiplication:(BOOL)removePremultiplication;
20 | - (id)initWithCGImage:(CGImageRef)newImageSource removePremultiplication:(BOOL)removePremultiplication;
21 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
22 | - (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
23 |
24 | // Image rendering
25 | - (void)processImage;
26 | - (CGSize)outputImageSize;
27 |
28 | /**
29 | * Process image with all targets and filters asynchronously
30 | * The completion handler is called after processing finished in the
31 | * GPU's dispatch queue - and only if this method did not return NO.
32 | *
33 | * @returns NO if resource is blocked and processing is discarded, YES otherwise
34 | */
35 | - (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
36 | - (void)processImageUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;
37 |
38 | @end
39 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePinchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a pinch distortion of the image
4 | */
5 | @interface GPUImagePinchDistortionFilter : GPUImageFilter
6 | {
7 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
8 | }
9 |
10 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | */
12 | @property(readwrite, nonatomic) CGPoint center;
13 | /** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0
14 | */
15 | @property(readwrite, nonatomic) CGFloat radius;
16 | /** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5
17 | */
18 | @property(readwrite, nonatomic) CGFloat scale;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellateFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePixellatePositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellatePositionFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 | // the center point to start pixelation in texture coordinates, default 0.5, 0.5
12 | @property(readwrite, nonatomic) CGPoint center;
13 |
14 | // the radius (0.0 - 1.0) in which to pixelate, default 1.0
15 | @property(readwrite, nonatomic) CGFloat radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePoissonBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputCrossTextureSamplingFilter.h"
2 | #import "GPUImageFilterGroup.h"
3 |
4 | @interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
5 | {
6 | GLint mixUniform;
7 |
8 | GPUImageFramebuffer *secondOutputFramebuffer;
9 | }
10 |
11 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
12 | @property(readwrite, nonatomic) CGFloat mix;
13 |
14 | // The number of times to propagate the gradients.
15 | // Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
16 | @property(readwrite, nonatomic) NSUInteger numIterations;
17 |
18 | @end
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePolarPixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePolarPixellateFilter : GPUImageFilter {
4 | GLint centerUniform, pixelSizeUniform;
5 | }
6 |
7 | // The center about which to apply the distortion, with a default of (0.5, 0.5)
8 | @property(readwrite, nonatomic) CGPoint center;
9 | // The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
10 | @property(readwrite, nonatomic) CGSize pixelSize;
11 |
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePolkaDotFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
4 | {
5 | GLint dotScalingUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat dotScaling;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePosterizeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
4 | */
5 | @interface GPUImagePosterizeFilter : GPUImageFilter
6 | {
7 | GLint colorLevelsUniform;
8 | }
9 |
10 | /** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
11 | */
12 | @property(readwrite, nonatomic) NSUInteger colorLevels;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImagePrewittEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRGBClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageRGBClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRGBDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out brighter colors, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)dilationRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRGBErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRGBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageRGBFilter : GPUImageFilter
4 | {
5 | GLint redUniform;
6 | GLint greenUniform;
7 | GLint blueUniform;
8 | }
9 |
10 | // Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
11 | @property (readwrite, nonatomic) CGFloat red;
12 | @property (readwrite, nonatomic) CGFloat green;
13 | @property (readwrite, nonatomic) CGFloat blue;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRGBOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRawDataInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | // The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
4 | // The bytes are uploaded and stored within a texture, so nothing is kept locally.
5 | // The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
6 | // The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
7 |
8 | typedef enum {
9 | GPUPixelFormatBGRA = GL_BGRA,
10 | GPUPixelFormatRGBA = GL_RGBA,
11 | GPUPixelFormatRGB = GL_RGB,
12 | GPUPixelFormatLuminance = GL_LUMINANCE
13 | } GPUPixelFormat;
14 |
15 | typedef enum {
16 | GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
17 | GPUPixelTypeFloat = GL_FLOAT
18 | } GPUPixelType;
19 |
20 | @interface GPUImageRawDataInput : GPUImageOutput
21 | {
22 | CGSize uploadedImageSize;
23 |
24 | dispatch_semaphore_t dataUpdateSemaphore;
25 | }
26 |
27 | // Initialization and teardown
28 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
29 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
30 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
31 |
32 | /** Input data pixel format
33 | */
34 | @property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
35 | @property (readwrite, nonatomic) GPUPixelType pixelType;
36 |
37 | // Image rendering
38 | - (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
39 | - (void)processData;
40 | - (void)processDataForTimestamp:(CMTime)frameTime;
41 | - (CGSize)outputImageSize;
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageRawDataOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | struct GPUByteColorVector {
5 | GLubyte red;
6 | GLubyte green;
7 | GLubyte blue;
8 | GLubyte alpha;
9 | };
10 | typedef struct GPUByteColorVector GPUByteColorVector;
11 |
12 | @protocol GPUImageRawDataProcessor;
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | @interface GPUImageRawDataOutput : NSObject {
16 | CGSize imageSize;
17 | GPUImageRotationMode inputRotation;
18 | BOOL outputBGRA;
19 | }
20 | #else
21 | @interface GPUImageRawDataOutput : NSObject {
22 | CGSize imageSize;
23 | GPUImageRotationMode inputRotation;
24 | BOOL outputBGRA;
25 | }
26 | #endif
27 |
28 | @property(readonly) GLubyte *rawBytesForImage;
29 | @property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
30 | @property(nonatomic) BOOL enabled;
31 |
32 | // Initialization and teardown
33 | - (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
34 |
35 | // Data access
36 | - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
37 | - (NSUInteger)bytesPerRowInOutput;
38 |
39 | - (void)setImageSize:(CGSize)newImageSize;
40 |
41 | - (void)lockFramebufferForReading;
42 | - (void)unlockFramebufferAfterReading;
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSaturationBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSaturationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the saturation of an image
4 | */
5 | @interface GPUImageSaturationFilter : GPUImageFilter
6 | {
7 | GLint saturationUniform;
8 | }
9 |
10 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat saturation;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageScreenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSepiaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | /// Simple sepia tone filter
4 | @interface GPUImageSepiaFilter : GPUImageColorMatrixFilter
5 |
6 | @end
7 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSharpenFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSharpenFilter : GPUImageFilter
4 | {
5 | GLint sharpnessUniform;
6 | GLint imageWidthFactorUniform, imageHeightFactorUniform;
7 | }
8 |
9 | // Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
10 | @property(readwrite, nonatomic) CGFloat sharpness;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageShiTomasiFeatureDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Shi-Tomasi feature detector
4 |
5 | This is the Shi-Tomasi feature detector, as described in
6 | J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
7 | */
8 |
9 | @interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
10 |
11 | // Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSingleComponentGaussianBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | // This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)
4 |
5 | @interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | /** Converts video to look like a sketch.
4 |
5 | This is just the Sobel edge detection filter with the colors inverted.
6 | */
7 | @interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
8 | {
9 | }
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSmoothToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 | @class GPUImageToonFilter;
5 |
6 | /** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
7 | */
8 | @interface GPUImageSmoothToonFilter : GPUImageFilterGroup
9 | {
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageToonFilter *toonFilter;
12 | }
13 |
14 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
15 | @property(readwrite, nonatomic) CGFloat texelWidth;
16 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
17 | @property(readwrite, nonatomic) CGFloat texelHeight;
18 |
19 | /// The radius of the underlying Gaussian blur. The default is 2.0.
20 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
21 |
22 | /// The threshold at which to apply the edges, default of 0.2
23 | @property(readwrite, nonatomic) CGFloat threshold;
24 |
25 | /// The levels of quantization for the posterization of colors within the scene, with a default of 10.0
26 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
27 |
28 | @end
29 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
6 | BOOL hasOverriddenImageSizeFactor;
7 | }
8 |
9 | // The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
10 | @property(readwrite, nonatomic) CGFloat texelWidth;
11 | @property(readwrite, nonatomic) CGFloat texelHeight;
12 |
13 | // The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
14 | @property(readwrite, nonatomic) CGFloat edgeStrength;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSoftEleganceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Soft Elegance Photoshop action
6 | http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603
7 | */
8 |
9 | // Note: If you want to use this effect you have to add
10 | // lookup_soft_elegance_1.png and lookup_soft_elegance_2.png
11 | // from Resources folder to your application bundle.
12 |
13 | @interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
14 | {
15 | GPUImagePicture *lookupImageSource1;
16 | GPUImagePicture *lookupImageSource2;
17 | }
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSoftLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSolarizeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Pixels with a luminance above the threshold will invert their color
4 | */
5 | @interface GPUImageSolarizeFilter : GPUImageFilter
6 | {
7 | GLint thresholdUniform;
8 | }
9 |
10 | /** Anything above this luminance will be inverted, and anything below normal. Ranges from 0.0 to 1.0, with 0.5 as the default
11 | */
12 | @property(readwrite, nonatomic) CGFloat threshold;
13 |
14 | @end
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSolidColorGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
4 | // dimensions, or this won't work correctly
5 |
6 |
7 | @interface GPUImageSolidColorGenerator : GPUImageFilter
8 | {
9 | GLint colorUniform;
10 | GLint useExistingAlphaUniform;
11 | }
12 |
13 | // This color dictates what the output image will be filled with
14 | @property(readwrite, nonatomic) GPUVector4 color;
15 | @property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
16 |
17 | - (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSourceOverBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSphereRefractionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSphereRefractionFilter : GPUImageFilter
4 | {
5 | GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
6 | }
7 |
8 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
9 | @property(readwrite, nonatomic) CGPoint center;
10 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
11 | @property(readwrite, nonatomic) CGFloat radius;
12 | /// The index of refraction for the sphere, with a default of 0.71
13 | @property(readwrite, nonatomic) CGFloat refractiveIndex;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageStillCamera.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageVideoCamera.h"
2 |
3 | void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);
4 | void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);
5 |
6 | @interface GPUImageStillCamera : GPUImageVideoCamera
7 |
8 | /** The JPEG compression quality to use when capturing a photo as a JPEG.
9 | */
10 | @property CGFloat jpegCompressionQuality;
11 |
12 | // Only reliably set inside the context of the completion handler of one of the capture methods
13 | @property (readonly) NSDictionary *currentCaptureMetadata;
14 |
15 | // Photography controls
16 | - (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
17 | - (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
18 | - (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
19 | - (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
20 | - (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
21 | - (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
22 | - (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageStretchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a stretch distortion of the image
4 | */
5 | @interface GPUImageStretchDistortionFilter : GPUImageFilter {
6 | GLint centerUniform;
7 | }
8 |
9 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | */
11 | @property(readwrite, nonatomic) CGPoint center;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSubtractBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageSwirlFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a swirl distortion on the image
4 | */
5 | @interface GPUImageSwirlFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform, centerUniform, angleUniform;
8 | }
9 |
10 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | @property(readwrite, nonatomic) CGPoint center;
12 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5
13 | @property(readwrite, nonatomic) CGFloat radius;
14 | /// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0
15 | @property(readwrite, nonatomic) CGFloat angle;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTextureInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageTextureInput : GPUImageOutput
4 | {
5 | CGSize textureSize;
6 | }
7 |
8 | // Initialization and teardown
9 | - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
10 |
11 | // Image rendering
12 | - (void)processTextureWithFrameTime:(CMTime)frameTime;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTextureOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | @protocol GPUImageTextureOutputDelegate;
5 |
6 | @interface GPUImageTextureOutput : NSObject
7 | {
8 | GPUImageFramebuffer *firstInputFramebuffer;
9 | }
10 |
11 | @property(readwrite, unsafe_unretained, nonatomic) id delegate;
12 | @property(readonly) GLuint texture;
13 | @property(nonatomic) BOOL enabled;
14 |
15 | - (void)doneWithTexture;
16 |
17 | @end
18 |
19 | @protocol GPUImageTextureOutputDelegate
20 | - (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageThreeInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
4 |
5 | @interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
6 | {
7 | GPUImageFramebuffer *thirdInputFramebuffer;
8 |
9 | GLint filterThirdTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform3;
11 | GPUImageRotationMode inputRotation3;
12 | GLuint filterSourceTexture3;
13 | CMTime thirdFrameTime;
14 |
15 | BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
16 | BOOL thirdFrameCheckDisabled;
17 | }
18 |
19 | - (void)disableThirdFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageThresholdEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageThresholdSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageThresholdEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageThresholdedNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | - (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTiltShiftFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /// A simulated tilt shift lens effect
6 | @interface GPUImageTiltShiftFilter : GPUImageFilterGroup
7 | {
8 | GPUImageGaussianBlurFilter *blurFilter;
9 | GPUImageFilter *tiltShiftFilter;
10 | }
11 |
12 | /// The radius of the underlying blur, in pixels. This is 7.0 by default.
13 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
14 |
15 | /// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
16 | @property(readwrite, nonatomic) CGFloat topFocusLevel;
17 |
18 | /// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
19 | @property(readwrite, nonatomic) CGFloat bottomFocusLevel;
20 |
21 | /// The rate at which the image gets blurry away from the in-focus region, default 0.2
22 | @property(readwrite, nonatomic) CGFloat focusFallOffRate;
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageToneCurveFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageToneCurveFilter : GPUImageFilter
4 |
5 | @property(readwrite, nonatomic, copy) NSArray *redControlPoints;
6 | @property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
7 | @property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
8 | @property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
9 |
10 | // Initialization and teardown
11 | - (id)initWithACVData:(NSData*)data;
12 |
13 | - (id)initWithACV:(NSString*)curveFilename;
14 | - (id)initWithACVURL:(NSURL*)curveFileURL;
15 |
16 | // This lets you set all three red, green, and blue tone curves at once.
17 | // NOTE: Deprecated this function because this effect can be accomplished
18 | // using the rgbComposite channel rather then setting all 3 R, G, and B channels.
19 | - (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
20 |
21 | - (void)setPointsWithACV:(NSString*)curveFilename;
22 | - (void)setPointsWithACVURL:(NSURL*)curveFileURL;
23 |
24 | // Curve calculation
25 | - (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
26 | - (NSMutableArray *)splineCurve:(NSArray *)points;
27 | - (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
28 | - (void)updateToneCurveTexture;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** This uses Sobel edge detection to place a black border around objects,
4 | and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
5 | */
6 | @interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
7 | {
8 | GLint thresholdUniform, quantizationLevelsUniform;
9 | }
10 |
11 | /** The threshold at which to apply the edges, default of 0.2
12 | */
13 | @property(readwrite, nonatomic) CGFloat threshold;
14 |
15 | /** The levels of quantization for the posterization of colors within the scene, with a default of 10.0
16 | */
17 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTransformFilter : GPUImageFilter
4 | {
5 | GLint transformMatrixUniform, orthographicMatrixUniform;
6 | GPUMatrix4x4 orthographicMatrix;
7 | }
8 |
9 | // You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
10 | @property(readwrite, nonatomic) CGAffineTransform affineTransform;
11 | @property(readwrite, nonatomic) CATransform3D transform3D;
12 |
13 | // This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
14 | @property(readwrite, nonatomic) BOOL ignoreAspectRatio;
15 |
16 | // sets the anchor point to top left corner
17 | @property(readwrite, nonatomic) BOOL anchorTopLeft;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTwoInputCrossTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTwoInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
4 |
5 | @interface GPUImageTwoInputFilter : GPUImageFilter
6 | {
7 | GPUImageFramebuffer *secondInputFramebuffer;
8 |
9 | GLint filterSecondTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform2;
11 | GPUImageRotationMode inputRotation2;
12 | CMTime firstFrameTime, secondFrameTime;
13 |
14 | BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
15 | BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
16 | }
17 |
18 | - (void)disableFirstFrameCheck;
19 | - (void)disableSecondFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTwoPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTwoPassFilter : GPUImageFilter
4 | {
5 | GPUImageFramebuffer *secondOutputFramebuffer;
6 |
7 | GLProgram *secondFilterProgram;
8 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
9 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
10 |
11 | NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
12 | }
13 |
14 | // Initialization and teardown
15 | - (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
16 | - (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
17 | - (void)initializeSecondaryAttributes;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageTwoPassTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
4 | {
5 | GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
6 | GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
7 | CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
8 | }
9 |
10 | // This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
11 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageUIElement.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageUIElement : GPUImageOutput
4 |
5 | // Initialization and teardown
6 | - (id)initWithView:(UIView *)inputView;
7 | - (id)initWithLayer:(CALayer *)inputLayer;
8 |
9 | // Layer management
10 | - (CGSize)layerSizeInPixels;
11 | - (void)update;
12 | - (void)updateUsingCurrentTime;
13 | - (void)updateWithTimestamp:(CMTime)frameTime;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageUnsharpMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | @interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
6 | {
7 | GPUImageGaussianBlurFilter *blurFilter;
8 | GPUImageFilter *unsharpMaskFilter;
9 | }
10 | // The blur radius of the underlying Gaussian blur. The default is 4.0.
11 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
12 |
13 | // The strength of the sharpening, from 0.0 on up, with a default of 1.0
14 | @property(readwrite, nonatomic) CGFloat intensity;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageVideoCamera.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import "GPUImageContext.h"
5 | #import "GPUImageOutput.h"
6 | #import "GPUImageColorConversion.h"
7 |
8 | //Optionally override the YUV to RGB matrices
9 | void setColorConversion601( GLfloat conversionMatrix[9] );
10 | void setColorConversion601FullRange( GLfloat conversionMatrix[9] );
11 | void setColorConversion709( GLfloat conversionMatrix[9] );
12 |
13 |
14 | //Delegate Protocal for Face Detection.
15 | @protocol GPUImageVideoCameraDelegate
16 |
17 | @optional
18 | - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
19 | @end
20 |
21 |
22 | /**
23 | A GPUImageOutput that provides frames from either camera
24 | */
25 | @interface GPUImageVideoCamera : GPUImageOutput
26 | {
27 | NSUInteger numberOfFramesCaptured;
28 | CGFloat totalFrameTimeDuringCapture;
29 |
30 | AVCaptureSession *_captureSession;
31 | AVCaptureDevice *_inputCamera;
32 | AVCaptureDevice *_microphone;
33 | AVCaptureDeviceInput *videoInput;
34 | AVCaptureVideoDataOutput *videoOutput;
35 |
36 | BOOL capturePaused;
37 | GPUImageRotationMode outputRotation, internalRotation;
38 | dispatch_semaphore_t frameRenderingSemaphore;
39 |
40 | BOOL captureAsYUV;
41 | GLuint luminanceTexture, chrominanceTexture;
42 |
43 | __unsafe_unretained id _delegate;
44 | }
45 |
46 | /// Whether or not the underlying AVCaptureSession is running
47 | @property(readonly, nonatomic) BOOL isRunning;
48 |
49 | /// The AVCaptureSession used to capture from the camera
50 | @property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
51 |
52 | /// This enables the capture session preset to be changed on the fly
53 | @property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
54 |
55 | /// This sets the frame rate of the camera (iOS 5 and above only)
56 | /**
57 | Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
58 | */
59 | @property (readwrite) int32_t frameRate;
60 |
61 | /// Easy way to tell which cameras are present on device
62 | @property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
63 | @property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
64 |
65 | /// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
66 | @property(readwrite, nonatomic) BOOL runBenchmark;
67 |
68 | /// Use this property to manage camera settings. Focus point, exposure point, etc.
69 | @property(readonly) AVCaptureDevice *inputCamera;
70 |
71 | /// This determines the rotation applied to the output image, based on the source material
72 | @property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
73 |
74 | /// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
75 | @property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
76 |
77 | @property(nonatomic, assign) id delegate;
78 |
79 | /// @name Initialization and teardown
80 |
81 | /** Begin a capture session
82 |
83 | See AVCaptureSession for acceptable values
84 |
85 | @param sessionPreset Session preset to use
86 | @param cameraPosition Camera to capture from
87 | */
88 | - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
89 |
90 | /** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
91 | can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
92 | later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
93 | */
94 | - (BOOL)addAudioInputsAndOutputs;
95 |
96 | /** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
97 | were removed, or NO is they hadn't already been added.
98 | */
99 | - (BOOL)removeAudioInputsAndOutputs;
100 |
101 | /** Tear down the capture session
102 | */
103 | - (void)removeInputsAndOutputs;
104 |
105 | /// @name Manage the camera video stream
106 |
107 | /** Start camera capturing
108 | */
109 | - (void)startCameraCapture;
110 |
111 | /** Stop camera capturing
112 | */
113 | - (void)stopCameraCapture;
114 |
115 | /** Pause camera capturing
116 | */
117 | - (void)pauseCameraCapture;
118 |
119 | /** Resume camera capturing
120 | */
121 | - (void)resumeCameraCapture;
122 |
123 | /** Process a video sample
124 | @param sampleBuffer Buffer to process
125 | */
126 | - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
127 |
128 | /** Process an audio sample
129 | @param sampleBuffer Buffer to process
130 | */
131 | - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
132 |
133 | /** Get the position (front, rear) of the source camera
134 | */
135 | - (AVCaptureDevicePosition)cameraPosition;
136 |
137 | /** Get the AVCaptureConnection of the source camera
138 | */
139 | - (AVCaptureConnection *)videoCaptureConnection;
140 |
141 | /** This flips between the front and rear cameras
142 | */
143 | - (void)rotateCamera;
144 |
145 | /// @name Benchmarking
146 |
147 | /** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
148 | */
149 | - (CGFloat)averageFrameDurationDuringCapture;
150 |
151 | - (void)resetBenchmarkAverage;
152 |
153 | + (BOOL)isBackFacingCameraPresent;
154 | + (BOOL)isFrontFacingCameraPresent;
155 |
156 | @end
157 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageView.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | typedef NS_ENUM(NSUInteger, GPUImageFillModeType) {
5 | kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio
6 | kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color
7 | kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view
8 | };
9 |
10 |
11 |
12 | /**
13 | UIView subclass to use as an endpoint for displaying GPUImage outputs
14 | */
15 | @interface GPUImageView : UIView
16 | {
17 | GPUImageRotationMode inputRotation;
18 | }
19 |
20 | /** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio
21 | */
22 | @property(readwrite, nonatomic) GPUImageFillModeType fillMode;
23 |
24 | /** This calculates the current display size, in pixels, taking into account Retina scaling factors
25 | */
26 | @property(readonly, nonatomic) CGSize sizeInPixels;
27 |
28 | @property(nonatomic) BOOL enabled;
29 |
30 | /** Handling fill mode
31 |
32 | @param redComponent Red component for background color
33 | @param greenComponent Green component for background color
34 | @param blueComponent Blue component for background color
35 | @param alphaComponent Alpha component for background color
36 | */
37 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
38 |
39 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageVignetteFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Performs a vignetting effect, fading out the image at the edges
4 | */
5 | @interface GPUImageVignetteFilter : GPUImageFilter
6 | {
7 | GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;
8 | }
9 |
10 | // the center for the vignette in tex coords (defaults to 0.5, 0.5)
11 | @property (nonatomic, readwrite) CGPoint vignetteCenter;
12 |
13 | // The color to use for the Vignette (defaults to black)
14 | @property (nonatomic, readwrite) GPUVector3 vignetteColor;
15 |
16 | // The normalized distance from the center where the vignette effect starts. Default of 0.5.
17 | @property (nonatomic, readwrite) CGFloat vignetteStart;
18 |
19 | // The normalized distance from the center where the vignette effect ends. Default of 0.75.
20 | @property (nonatomic, readwrite) CGFloat vignetteEnd;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageVoronoiConsumerFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
4 | {
5 | GLint sizeUniform;
6 | }
7 |
8 | @property (nonatomic, readwrite) CGSize sizeInPixels;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageWeakPixelInclusionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageWhiteBalanceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 | /**
3 | * Created by Alaric Cole
4 | * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it.
5 |
6 | */
7 | @interface GPUImageWhiteBalanceFilter : GPUImageFilter
8 | {
9 | GLint temperatureUniform, tintUniform;
10 | }
11 | //choose color temperature, in degrees Kelvin
12 | @property(readwrite, nonatomic) CGFloat temperature;
13 |
14 | //adjust tint to compensate
15 | @property(readwrite, nonatomic) CGFloat tint;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageXYDerivativeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageZoomBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageZoomBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The normalized center of the blur. (0.5, 0.5) by default
10 | */
11 | @property (readwrite, nonatomic) CGPoint blurCenter;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/Source/usr/GPUImageiOSBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageSaturationFilter;
4 | @class GPUImageGaussianBlurFilter;
5 | @class GPUImageLuminanceRangeFilter;
6 |
7 | @interface GPUImageiOSBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageSaturationFilter *saturationFilter;
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageLuminanceRangeFilter *luminanceRangeFilter;
12 | }
13 |
14 | /** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
15 | */
16 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
17 |
18 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
19 | */
20 | @property (readwrite, nonatomic) CGFloat saturation;
21 |
22 | /** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
23 | */
24 | @property (readwrite, nonatomic) CGFloat downsampling;
25 |
26 |
27 | /** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
28 | */
29 | @property (readwrite, nonatomic) CGFloat rangeReductionFactor;
30 |
31 | @end
32 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // VideoFilterText
4 | //
5 | // Created by zzjd on 2017/3/9.
6 | // Copyright © 2017年 zzjd. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ViewController : UIViewController
12 |
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/VideoFilterText/VideoFilterText/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // VideoFilterText
4 | //
5 | //
6 |
7 | #import "ViewController.h"
8 |
9 | #import "GPUImage.h"
10 |
11 | #import
12 |
13 | #define WIDTH [UIScreen mainScreen].bounds.size.width
14 | #define HEIGHT [UIScreen mainScreen].bounds.size.height
15 |
16 |
17 | #define WINDOW [[UIApplication sharedApplication] keyWindow]
18 |
19 |
20 | @interface ViewController ()
21 |
22 | @property (nonatomic,strong)GPUImageMovie * gpuMovie;//接管视频数据
23 |
24 | @property (nonatomic,strong)GPUImageView * gpuView;//预览视频内容
25 |
26 | @property (nonatomic,strong)GPUImageOutput * pixellateFilter;//视频滤镜
27 |
28 | @property (nonatomic,strong)GPUImageMovieWriter * movieWriter;//视频处理输出
29 |
30 | @property (nonatomic,strong)UIScrollView * EditView;//滤镜选择视图
31 |
32 | @property (nonatomic,strong)NSArray * GPUImgArr;//存放滤镜数组
33 |
34 | @property (nonatomic,copy)NSURL * filePath;//照片库第一个视频路径
35 | @property (nonatomic,copy)NSString * fileSavePath;//视频合成后存储路径
36 |
37 | @property (nonatomic,strong)NSMutableDictionary * dic;//存放上个滤镜filter
38 |
39 | @property (nonatomic,assign)NSTimer * timer;//设置计时器,因为重复合成同一个滤镜时间会很长超时后重新创建
40 | @property (nonatomic,assign)int timeNum;//记时时间
41 |
42 | @property (nonatomic,strong)UIView * hudView;//加载框
43 |
44 | @end
45 |
46 | @implementation ViewController
47 |
48 | - (void)viewDidLoad {
49 |
50 |
51 | [super viewDidLoad];
52 |
53 | self.view.backgroundColor = [UIColor whiteColor];
54 |
55 | _dic = [[NSMutableDictionary alloc]initWithDictionary:@{@"filter":@""}];
56 |
57 | [self getVideoUrl];//获取系统照片库第一个视频文件
58 |
59 |
60 | // Do any additional setup after loading the view, typically from a nib.
61 | }
62 |
63 | -(void)getVideoUrl{
64 |
65 | NSString *tipTextWhenNoPhotosAuthorization; // 提示语
66 | // 获取当前应用对照片的访问授权状态
67 | ALAuthorizationStatus authorizationStatus = [ALAssetsLibrary authorizationStatus];
68 | // 如果没有获取访问授权,或者访问授权状态已经被明确禁止,则显示提示语,引导用户开启授权
69 | if (authorizationStatus == ALAuthorizationStatusRestricted || authorizationStatus == ALAuthorizationStatusDenied) {
70 | NSDictionary *mainInfoDictionary = [[NSBundle mainBundle] infoDictionary];
71 | NSString *appName = [mainInfoDictionary objectForKey:@"CFBundleDisplayName"];
72 | tipTextWhenNoPhotosAuthorization = [NSString stringWithFormat:@"请在设备的\"设置-隐私-照片\"选项中,允许%@访问你的手机相册", appName];
73 | // 展示提示语
74 | }
75 |
76 | ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
77 |
78 | [assetsLibrary enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
79 | if (group) {
80 |
81 | [group setAssetsFilter:[ALAssetsFilter allVideos]];
82 | if (group.numberOfAssets > 0) {
83 |
84 | [group enumerateAssetsUsingBlock:^(ALAsset *result, NSUInteger index, BOOL *stop) {
85 |
86 | static int i = 1;
87 |
88 | if (i == 1) {
89 | i++;
90 | NSDateFormatter* dateFormatter = [[NSDateFormatter alloc] init];
91 | [dateFormatter setTimeZone:[NSTimeZone localTimeZone]];
92 | [dateFormatter setDateFormat:@"yyyyMMddHHmmss"]; //注意时间的格式:MM表示月份,mm表示分钟,HH用24小时制,小hh是12小时制。
93 | NSString* dateString = [dateFormatter stringFromDate:[result valueForProperty:ALAssetPropertyDate]];
94 |
95 | if (dateString) {
96 | _filePath = result.defaultRepresentation.url;
97 | [self createUI];
98 | }
99 | }
100 | }];
101 | }
102 | }
103 |
104 | } failureBlock:^(NSError *error) {
105 | NSLog(@"Asset group not found!\n");
106 | }];
107 |
108 |
109 |
110 | }
111 |
112 | -(void)createUI{
113 |
114 | _gpuView = [[GPUImageView alloc]initWithFrame:CGRectMake(0, 0, WIDTH, HEIGHT-200)];
115 | //设置展示页面的旋转
116 | // [_gpuView setInputRotation:kGPUImageRotateRight atIndex:0];
117 |
118 | [self.view addSubview:_gpuView];
119 |
120 |
121 | NSLog(@"filePath = %@",_filePath);
122 |
123 | _gpuMovie = [[GPUImageMovie alloc]initWithURL:_filePath];
124 | _gpuMovie.shouldRepeat = YES;//循环
125 |
126 | [_gpuMovie addTarget:_gpuView];
127 |
128 | [_gpuMovie startProcessing];
129 |
130 | [self createEditView];
131 |
132 | UIButton * composeBtn = [UIButton buttonWithType:UIButtonTypeCustom];
133 |
134 | composeBtn.frame = CGRectMake(30, HEIGHT-80, WIDTH-60, 40);
135 |
136 | composeBtn.backgroundColor = [UIColor blackColor];
137 |
138 | [composeBtn setTitle:@"合成" forState:UIControlStateNormal];
139 |
140 | [composeBtn addTarget:self action:@selector(composeBtnClick:) forControlEvents:UIControlEventTouchUpInside];
141 |
142 | [self.view addSubview:composeBtn];
143 |
144 | }
145 |
146 |
147 |
148 | #pragma mark ---------------------------选择滤镜----------------------------
149 |
150 | -(void)effectImgClick:(UIButton *)button{
151 |
152 | for (int i = 0 ; i<_GPUImgArr.count ;i++) {
153 | UIButton *btn = [_EditView viewWithTag:1000+i];
154 | btn.layer.borderWidth = 0;
155 | btn.userInteractionEnabled = YES;
156 | }
157 | button.userInteractionEnabled = NO;
158 | button.layer.borderWidth = 2;
159 | button.layer.borderColor = [UIColor redColor].CGColor;
160 |
161 |
162 | [_gpuMovie cancelProcessing];
163 | [_gpuMovie removeAllTargets];
164 |
165 | _gpuMovie = [[GPUImageMovie alloc]initWithURL:_filePath];
166 |
167 |
168 | if (button.tag == 1000) {
169 | _pixellateFilter = nil;
170 | [_gpuMovie addTarget:_gpuView];
171 |
172 | }else{
173 | _pixellateFilter = (GPUImageOutput *)[_GPUImgArr[button.tag-1000] objectForKey:@"filter"];
174 | [_gpuMovie addTarget:_pixellateFilter];
175 | [_pixellateFilter addTarget:_gpuView];
176 | }
177 |
178 | [_gpuMovie startProcessing];
179 |
180 | }
181 |
182 |
183 | #pragma mark ----------------------------合成视频点击事件-------------------------
184 | -(void)composeBtnClick:(UIButton *)btn{
185 | NSLog(@"开始合成");
186 | if ((_pixellateFilter == nil)|| (_pixellateFilter == _dic[@"filter"] )) {
187 | NSLog(@"未选择滤镜、或者与上个滤镜重复。请换个滤镜");
188 |
189 | }else{
190 | [self createHudView];
191 | _timeNum = 0;
192 | _timer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(timeRun) userInfo:nil repeats:YES];
193 |
194 | NSURL *movieURL = [NSURL fileURLWithPath:self.fileSavePath];
195 |
196 | _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(WIDTH, HEIGHT-200)];//视频存放路径及输出视频宽高
197 |
198 | [_pixellateFilter addTarget:_movieWriter];
199 |
200 | _movieWriter.shouldPassthroughAudio = YES;
201 |
202 | [_gpuMovie enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
203 | [_movieWriter startRecording];
204 |
205 |
206 | __weak ViewController * weakSelf = self;
207 |
208 | [_movieWriter setFailureBlock:^(NSError *error) {
209 | NSLog(@"合成失败 173:error = %@",error.description);
210 |
211 | dispatch_async(dispatch_get_main_queue(), ^{
212 |
213 | weakSelf.hudView.hidden = YES;
214 |
215 | [weakSelf.pixellateFilter removeTarget:weakSelf.movieWriter];
216 | [weakSelf.dic setObject:weakSelf.pixellateFilter forKey:@"filter"];
217 |
218 | [weakSelf.movieWriter finishRecording];
219 |
220 | [weakSelf.timer setFireDate:[NSDate distantFuture]];
221 |
222 | });
223 | }];
224 |
225 | [_movieWriter setCompletionBlock:^{
226 | NSLog(@"视频合成结束: 188 ");
227 |
228 | dispatch_async(dispatch_get_main_queue(), ^{
229 | weakSelf.hudView.hidden = YES;
230 |
231 | [weakSelf.pixellateFilter removeTarget:weakSelf.movieWriter];
232 | [weakSelf.dic setObject:weakSelf.pixellateFilter forKey:@"filter"];
233 | [weakSelf.movieWriter finishRecording];
234 |
235 | [weakSelf.timer setFireDate:[NSDate distantFuture]];
236 |
237 |
238 | });
239 | }];
240 |
241 |
242 | }
243 |
244 |
245 | }
246 |
247 |
248 | #pragma mark -----------------------计时器--------------------------
249 | -(void)timeRun{
250 |
251 | _timeNum += 1;
252 |
253 | if (_timeNum >= 60) {
254 | NSLog(@"视频处理超时");
255 | [_timer invalidate];
256 | _hudView.hidden = YES;
257 | [self createUI];
258 |
259 | }
260 |
261 | }
262 |
263 |
264 |
265 |
266 | #pragma mark -----------------------------创建加载框------------------------
267 |
268 | -(void)createHudView{
269 |
270 | if (!_hudView) {
271 | _hudView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, WIDTH, HEIGHT)];
272 | _hudView.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.6];
273 |
274 |
275 | UIView * huV = [[UIView alloc]initWithFrame:CGRectMake(WIDTH/2-50, HEIGHT/2-50, 100, 100)];
276 | huV.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.6];
277 |
278 | huV.layer.cornerRadius = 5;
279 | huV.clipsToBounds = YES;
280 |
281 | UIActivityIndicatorView * activityView = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhite];
282 |
283 |
284 | activityView.frame = CGRectMake(0, 0,huV.frame.size.width, huV.frame.size.height);
285 |
286 | [activityView startAnimating];
287 |
288 | [huV addSubview:activityView];
289 |
290 | [_hudView addSubview:huV];
291 |
292 |
293 | [WINDOW addSubview:_hudView];
294 |
295 | }else{
296 |
297 | _hudView.hidden = NO;
298 |
299 | }
300 |
301 | }
302 |
303 | #pragma mark -----------------------------视频存放位置------------------------
304 | -(NSString *)fileSavePath{
305 |
306 | NSFileManager* fileManager = [NSFileManager defaultManager];
307 |
308 | NSString *pathDocuments = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) objectAtIndex:0];
309 | NSString *createPath = [NSString stringWithFormat:@"%@/myVidio/333.mp4", pathDocuments];//视频存放位置
310 | NSString *createPath2 = [NSString stringWithFormat:@"%@/myVidio", pathDocuments];//视频存放文件夹
311 | //判断视频文件是否存在,存在删除
312 | BOOL blHave=[[NSFileManager defaultManager] fileExistsAtPath:createPath];
313 | if (blHave) {
314 | BOOL blDele= [fileManager removeItemAtPath:createPath error:nil];
315 | if (!blDele) {
316 | [fileManager removeItemAtPath:createPath error:nil];
317 | }
318 | }
319 | //判断视频存放文件夹是否存在,不存在创建
320 | BOOL blHave1=[[NSFileManager defaultManager] fileExistsAtPath:createPath2];
321 | if (!blHave1) {
322 | [fileManager createDirectoryAtPath:createPath2 withIntermediateDirectories:YES attributes:nil error:nil];
323 | }
324 |
325 | _fileSavePath = createPath;
326 |
327 |
328 | NSLog(@"视频输出地址 fileSavePath = %@",_fileSavePath);
329 |
330 | return _fileSavePath;
331 | }
332 |
333 |
334 | #pragma mark ---------------------------创建选择滤镜视图----------------------------
335 |
336 | -(void)createEditView{
337 |
338 | _EditView = [[UIScrollView alloc]initWithFrame:CGRectMake(0, HEIGHT-190, WIDTH, 100)];
339 | _EditView.showsVerticalScrollIndicator = NO;
340 | AVURLAsset * myAsset = [AVURLAsset assetWithURL:_filePath];
341 |
342 | //初始化AVAssetImageGenerator
343 | AVAssetImageGenerator * imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:myAsset];
344 | imageGenerator.appliesPreferredTrackTransform = YES;
345 |
346 | UIImage *inputImage = [[UIImage alloc]init];
347 |
348 | // First image
349 | //创建第一张预览图
350 | CGImageRef halfWayImage = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:nil error:nil];
351 | if (halfWayImage != NULL) {
352 | inputImage = [[UIImage alloc] initWithCGImage:halfWayImage];
353 | }
354 |
355 |
356 | _GPUImgArr = [self CreateGPUArr];
357 |
358 | for (int i = 0; i<_GPUImgArr.count; i++) {
359 |
360 |
361 | UIButton * effectImg = [UIButton buttonWithType:UIButtonTypeCustom];
362 | effectImg.frame = CGRectMake(10+i*((WIDTH-10)/5), 10, (WIDTH-10)/5-10, (WIDTH-10)/5-10);
363 | [effectImg setImage:inputImage forState:UIControlStateNormal];
364 |
365 | if (i>0) {
366 |
367 | GPUImageOutput * disFilter = (GPUImageOutput *)[_GPUImgArr[i] objectForKey:@"filter"];
368 |
369 | //设置要渲染的区域
370 | [disFilter useNextFrameForImageCapture];
371 | //获取数据源
372 | GPUImagePicture *stillImageSource = [[GPUImagePicture alloc]initWithImage:inputImage];
373 | //添加上滤镜
374 | [stillImageSource addTarget:disFilter];
375 | //开始渲染
376 | [stillImageSource processImage];
377 | //获取渲染后的图片
378 | UIImage *newImage = [disFilter imageFromCurrentFramebuffer];
379 |
380 |
381 | [effectImg setImage:newImage forState:UIControlStateNormal];
382 |
383 | }
384 |
385 | effectImg.layer.cornerRadius = ((WIDTH-10)/5-10)/2;
386 | effectImg.layer.masksToBounds = YES;
387 | effectImg.tag = 1000+i;
388 |
389 | [effectImg addTarget:self action:@selector(effectImgClick:) forControlEvents:UIControlEventTouchUpInside];
390 |
391 | if (i == 0) {
392 | effectImg.layer.borderWidth = 2;
393 | effectImg.layer.borderColor = [UIColor redColor].CGColor;
394 | }
395 |
396 | UILabel * effectName = [[UILabel alloc]initWithFrame:CGRectMake(effectImg.frame.origin.x, CGRectGetMaxY(effectImg.frame)+10, effectImg.frame.size.width, 20)];
397 | effectName.textColor = [UIColor blackColor];
398 | effectName.textAlignment = NSTextAlignmentCenter;
399 | effectName.font = [UIFont systemFontOfSize:12];
400 | effectName.text = _GPUImgArr[i][@"name"];
401 |
402 | [_EditView addSubview:effectImg];
403 | [_EditView addSubview:effectName];
404 |
405 | _EditView.contentSize = CGSizeMake(_GPUImgArr.count*(WIDTH-10)/5+10, _EditView.frame.size.height);
406 | }
407 |
408 |
409 | [self.view addSubview:_EditView];
410 | }
411 |
412 |
413 | #pragma mark ------------------------滤镜数组-----------------------
414 |
415 | -(NSArray *)CreateGPUArr{
416 | NSMutableArray * arr = [[NSMutableArray alloc]init];
417 |
418 | NSString * title0 = @"原图";
419 | NSDictionary * dic0 = [NSDictionary dictionaryWithObjectsAndKeys:@"",@"filter",title0,@"name", nil];
420 | [arr addObject:dic0];
421 |
422 |
423 | GPUImageOutput * Filter5 = [[GPUImageGammaFilter alloc] init];
424 | [(GPUImageGammaFilter *)Filter5 setGamma:1.5];
425 | NSString * title5 = @"伽马线";
426 | NSDictionary * dic5 = [NSDictionary dictionaryWithObjectsAndKeys:Filter5,@"filter",title5,@"name", nil];
427 | [arr addObject:dic5];
428 |
429 |
430 | GPUImageOutput * Filter6 = [[GPUImageColorInvertFilter alloc] init];
431 | NSString * title6 = @"反色";
432 | NSDictionary * dic6 = [NSDictionary dictionaryWithObjectsAndKeys:Filter6,@"filter",title6,@"name", nil];
433 | [arr addObject:dic6];
434 |
435 | GPUImageOutput * Filter7 = [[GPUImageSepiaFilter alloc] init];
436 | NSString * title7 = @"褐色怀旧";
437 | NSDictionary * dic7 = [NSDictionary dictionaryWithObjectsAndKeys:Filter7,@"filter",title7,@"name", nil];
438 | [arr addObject:dic7];
439 |
440 | GPUImageOutput * Filter8 = [[GPUImageGrayscaleFilter alloc] init];
441 | NSString * title8 = @"灰度";
442 | NSDictionary * dic8 = [NSDictionary dictionaryWithObjectsAndKeys:Filter8,@"filter",title8,@"name", nil];
443 | [arr addObject:dic8];
444 |
445 | GPUImageOutput * Filter9 = [[GPUImageHistogramGenerator alloc] init];
446 | NSString * title9 = @"色彩直方图?";
447 | NSDictionary * dic9 = [NSDictionary dictionaryWithObjectsAndKeys:Filter9,@"filter",title9,@"name", nil];
448 | [arr addObject:dic9];
449 |
450 |
451 | GPUImageOutput * Filter10 = [[GPUImageRGBFilter alloc] init];
452 | NSString * title10 = @"RGB";
453 | [(GPUImageRGBFilter *)Filter10 setRed:0.8];
454 | [(GPUImageRGBFilter *)Filter10 setGreen:0.3];
455 | [(GPUImageRGBFilter *)Filter10 setBlue:0.5];
456 | NSDictionary * dic10 = [NSDictionary dictionaryWithObjectsAndKeys:Filter10,@"filter",title10,@"name", nil];
457 | [arr addObject:dic10];
458 |
459 | GPUImageOutput * Filter11 = [[GPUImageMonochromeFilter alloc] init];
460 | [(GPUImageMonochromeFilter *)Filter11 setColorRed:0.3 green:0.5 blue:0.8];
461 | NSString * title11 = @"单色";
462 | NSDictionary * dic11 = [NSDictionary dictionaryWithObjectsAndKeys:Filter11,@"filter",title11,@"name", nil];
463 | [arr addObject:dic11];
464 |
465 | GPUImageOutput * Filter12 = [[GPUImageBoxBlurFilter alloc] init];
466 | // [(GPUImageMonochromeFilter *)Filter11 setColorRed:0.3 green:0.5 blue:0.8];
467 | NSString * title12 = @"单色";
468 | NSDictionary * dic12 = [NSDictionary dictionaryWithObjectsAndKeys:Filter12,@"filter",title12,@"name", nil];
469 | [arr addObject:dic12];
470 |
471 | GPUImageOutput * Filter13 = [[GPUImageSobelEdgeDetectionFilter alloc] init];
472 | // [(GPUImageSobelEdgeDetectionFilter *)Filter13 ];
473 | NSString * title13 = @"漫画反色";
474 | NSDictionary * dic13 = [NSDictionary dictionaryWithObjectsAndKeys:Filter13,@"filter",title13,@"name", nil];
475 | [arr addObject:dic13];
476 |
477 | GPUImageOutput * Filter14 = [[GPUImageXYDerivativeFilter alloc] init];
478 | // [(GPUImageSobelEdgeDetectionFilter *)Filter13 ];
479 | NSString * title14 = @"蓝绿边缘";
480 | NSDictionary * dic14 = [NSDictionary dictionaryWithObjectsAndKeys:Filter14,@"filter",title14,@"name", nil];
481 | [arr addObject:dic14];
482 |
483 |
484 | GPUImageOutput