├── ObjectOverlayOnVideoDemo.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── theappguruz-new-2.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
└── xcuserdata
│ └── theappguruz-new-2.xcuserdatad
│ ├── xcdebugger
│ └── Breakpoints_v2.xcbkptlist
│ └── xcschemes
│ ├── ObjectOverlayOnVideoDemo.xcscheme
│ └── xcschememanagement.plist
├── ObjectOverlayOnVideoDemo
├── AppDelegate.h
├── AppDelegate.m
├── Base.lproj
│ └── Main.storyboard
├── Images.xcassets
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── LaunchImage.launchimage
│ │ └── Contents.json
├── Missile_Preview.m4v
├── ObjectOverlayOnVideoDemo-Info.plist
├── ObjectOverlayOnVideoDemo-Prefix.pch
├── ThirdParty
│ └── GPU
│ │ └── Source
│ │ ├── GLProgram.h
│ │ ├── GLProgram.m
│ │ ├── GPUImage.h
│ │ ├── GPUImage3x3ConvolutionFilter.h
│ │ ├── GPUImage3x3ConvolutionFilter.m
│ │ ├── GPUImage3x3TextureSamplingFilter.h
│ │ ├── GPUImage3x3TextureSamplingFilter.m
│ │ ├── GPUImageAdaptiveThresholdFilter.h
│ │ ├── GPUImageAdaptiveThresholdFilter.m
│ │ ├── GPUImageAddBlendFilter.h
│ │ ├── GPUImageAddBlendFilter.m
│ │ ├── GPUImageAlphaBlendFilter.h
│ │ ├── GPUImageAlphaBlendFilter.m
│ │ ├── GPUImageAmatorkaFilter.h
│ │ ├── GPUImageAmatorkaFilter.m
│ │ ├── GPUImageAverageColor.h
│ │ ├── GPUImageAverageColor.m
│ │ ├── GPUImageAverageLuminanceThresholdFilter.h
│ │ ├── GPUImageAverageLuminanceThresholdFilter.m
│ │ ├── GPUImageBilateralFilter.h
│ │ ├── GPUImageBilateralFilter.m
│ │ ├── GPUImageBoxBlurFilter.h
│ │ ├── GPUImageBoxBlurFilter.m
│ │ ├── GPUImageBrightnessFilter.h
│ │ ├── GPUImageBrightnessFilter.m
│ │ ├── GPUImageBuffer.h
│ │ ├── GPUImageBuffer.m
│ │ ├── GPUImageBulgeDistortionFilter.h
│ │ ├── GPUImageBulgeDistortionFilter.m
│ │ ├── GPUImageCGAColorspaceFilter.h
│ │ ├── GPUImageCGAColorspaceFilter.m
│ │ ├── GPUImageCannyEdgeDetectionFilter.h
│ │ ├── GPUImageCannyEdgeDetectionFilter.m
│ │ ├── GPUImageChromaKeyBlendFilter.h
│ │ ├── GPUImageChromaKeyBlendFilter.m
│ │ ├── GPUImageChromaKeyFilter.h
│ │ ├── GPUImageChromaKeyFilter.m
│ │ ├── GPUImageClosingFilter.h
│ │ ├── GPUImageClosingFilter.m
│ │ ├── GPUImageColorBlendFilter.h
│ │ ├── GPUImageColorBlendFilter.m
│ │ ├── GPUImageColorBurnBlendFilter.h
│ │ ├── GPUImageColorBurnBlendFilter.m
│ │ ├── GPUImageColorDodgeBlendFilter.h
│ │ ├── GPUImageColorDodgeBlendFilter.m
│ │ ├── GPUImageColorInvertFilter.h
│ │ ├── GPUImageColorInvertFilter.m
│ │ ├── GPUImageColorMatrixFilter.h
│ │ ├── GPUImageColorMatrixFilter.m
│ │ ├── GPUImageColorPackingFilter.h
│ │ ├── GPUImageColorPackingFilter.m
│ │ ├── GPUImageContrastFilter.h
│ │ ├── GPUImageContrastFilter.m
│ │ ├── GPUImageCropFilter.h
│ │ ├── GPUImageCropFilter.m
│ │ ├── GPUImageCrosshairGenerator.h
│ │ ├── GPUImageCrosshairGenerator.m
│ │ ├── GPUImageCrosshatchFilter.h
│ │ ├── GPUImageCrosshatchFilter.m
│ │ ├── GPUImageDarkenBlendFilter.h
│ │ ├── GPUImageDarkenBlendFilter.m
│ │ ├── GPUImageDifferenceBlendFilter.h
│ │ ├── GPUImageDifferenceBlendFilter.m
│ │ ├── GPUImageDilationFilter.h
│ │ ├── GPUImageDilationFilter.m
│ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.h
│ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.m
│ │ ├── GPUImageDissolveBlendFilter.h
│ │ ├── GPUImageDissolveBlendFilter.m
│ │ ├── GPUImageDivideBlendFilter.h
│ │ ├── GPUImageDivideBlendFilter.m
│ │ ├── GPUImageEmbossFilter.h
│ │ ├── GPUImageEmbossFilter.m
│ │ ├── GPUImageErosionFilter.h
│ │ ├── GPUImageErosionFilter.m
│ │ ├── GPUImageExclusionBlendFilter.h
│ │ ├── GPUImageExclusionBlendFilter.m
│ │ ├── GPUImageExposureFilter.h
│ │ ├── GPUImageExposureFilter.m
│ │ ├── GPUImageFASTCornerDetectionFilter.h
│ │ ├── GPUImageFASTCornerDetectionFilter.m
│ │ ├── GPUImageFalseColorFilter.h
│ │ ├── GPUImageFalseColorFilter.m
│ │ ├── GPUImageFilter.h
│ │ ├── GPUImageFilter.m
│ │ ├── GPUImageFilterGroup.h
│ │ ├── GPUImageFilterGroup.m
│ │ ├── GPUImageFilterPipeline.h
│ │ ├── GPUImageFilterPipeline.m
│ │ ├── GPUImageGammaFilter.h
│ │ ├── GPUImageGammaFilter.m
│ │ ├── GPUImageGaussianBlurFilter.h
│ │ ├── GPUImageGaussianBlurFilter.m
│ │ ├── GPUImageGaussianBlurPositionFilter.h
│ │ ├── GPUImageGaussianBlurPositionFilter.m
│ │ ├── GPUImageGaussianSelectiveBlurFilter.h
│ │ ├── GPUImageGaussianSelectiveBlurFilter.m
│ │ ├── GPUImageGlassSphereFilter.h
│ │ ├── GPUImageGlassSphereFilter.m
│ │ ├── GPUImageGrayscaleFilter.h
│ │ ├── GPUImageGrayscaleFilter.m
│ │ ├── GPUImageHSBFilter.h
│ │ ├── GPUImageHSBFilter.m
│ │ ├── GPUImageHalftoneFilter.h
│ │ ├── GPUImageHalftoneFilter.m
│ │ ├── GPUImageHardLightBlendFilter.h
│ │ ├── GPUImageHardLightBlendFilter.m
│ │ ├── GPUImageHarrisCornerDetectionFilter.h
│ │ ├── GPUImageHarrisCornerDetectionFilter.m
│ │ ├── GPUImageHazeFilter.h
│ │ ├── GPUImageHazeFilter.m
│ │ ├── GPUImageHighPassFilter.h
│ │ ├── GPUImageHighPassFilter.m
│ │ ├── GPUImageHighlightShadowFilter.h
│ │ ├── GPUImageHighlightShadowFilter.m
│ │ ├── GPUImageHistogramFilter.h
│ │ ├── GPUImageHistogramFilter.m
│ │ ├── GPUImageHistogramGenerator.h
│ │ ├── GPUImageHistogramGenerator.m
│ │ ├── GPUImageHoughTransformLineDetector.h
│ │ ├── GPUImageHoughTransformLineDetector.m
│ │ ├── GPUImageHueBlendFilter.h
│ │ ├── GPUImageHueBlendFilter.m
│ │ ├── GPUImageHueFilter.h
│ │ ├── GPUImageHueFilter.m
│ │ ├── GPUImageJFAVoronoiFilter.h
│ │ ├── GPUImageJFAVoronoiFilter.m
│ │ ├── GPUImageKuwaharaFilter.h
│ │ ├── GPUImageKuwaharaFilter.m
│ │ ├── GPUImageKuwaharaRadius3Filter.h
│ │ ├── GPUImageKuwaharaRadius3Filter.m
│ │ ├── GPUImageLanczosResamplingFilter.h
│ │ ├── GPUImageLanczosResamplingFilter.m
│ │ ├── GPUImageLaplacianFilter.h
│ │ ├── GPUImageLaplacianFilter.m
│ │ ├── GPUImageLevelsFilter.h
│ │ ├── GPUImageLevelsFilter.m
│ │ ├── GPUImageLightenBlendFilter.h
│ │ ├── GPUImageLightenBlendFilter.m
│ │ ├── GPUImageLineGenerator.h
│ │ ├── GPUImageLineGenerator.m
│ │ ├── GPUImageLinearBurnBlendFilter.h
│ │ ├── GPUImageLinearBurnBlendFilter.m
│ │ ├── GPUImageLocalBinaryPatternFilter.h
│ │ ├── GPUImageLocalBinaryPatternFilter.m
│ │ ├── GPUImageLookupFilter.h
│ │ ├── GPUImageLookupFilter.m
│ │ ├── GPUImageLowPassFilter.h
│ │ ├── GPUImageLowPassFilter.m
│ │ ├── GPUImageLuminanceRangeFilter.h
│ │ ├── GPUImageLuminanceRangeFilter.m
│ │ ├── GPUImageLuminanceThresholdFilter.h
│ │ ├── GPUImageLuminanceThresholdFilter.m
│ │ ├── GPUImageLuminosity.h
│ │ ├── GPUImageLuminosity.m
│ │ ├── GPUImageLuminosityBlendFilter.h
│ │ ├── GPUImageLuminosityBlendFilter.m
│ │ ├── GPUImageMaskFilter.h
│ │ ├── GPUImageMaskFilter.m
│ │ ├── GPUImageMedianFilter.h
│ │ ├── GPUImageMedianFilter.m
│ │ ├── GPUImageMissEtikateFilter.h
│ │ ├── GPUImageMissEtikateFilter.m
│ │ ├── GPUImageMonochromeFilter.h
│ │ ├── GPUImageMonochromeFilter.m
│ │ ├── GPUImageMosaicFilter.h
│ │ ├── GPUImageMosaicFilter.m
│ │ ├── GPUImageMotionBlurFilter.h
│ │ ├── GPUImageMotionBlurFilter.m
│ │ ├── GPUImageMotionDetector.h
│ │ ├── GPUImageMotionDetector.m
│ │ ├── GPUImageMovie.h
│ │ ├── GPUImageMovie.m
│ │ ├── GPUImageMovieComposition.h
│ │ ├── GPUImageMovieComposition.m
│ │ ├── GPUImageMultiplyBlendFilter.h
│ │ ├── GPUImageMultiplyBlendFilter.m
│ │ ├── GPUImageNobleCornerDetectionFilter.h
│ │ ├── GPUImageNobleCornerDetectionFilter.m
│ │ ├── GPUImageNonMaximumSuppressionFilter.h
│ │ ├── GPUImageNonMaximumSuppressionFilter.m
│ │ ├── GPUImageNormalBlendFilter.h
│ │ ├── GPUImageNormalBlendFilter.m
│ │ ├── GPUImageOpacityFilter.h
│ │ ├── GPUImageOpacityFilter.m
│ │ ├── GPUImageOpeningFilter.h
│ │ ├── GPUImageOpeningFilter.m
│ │ ├── GPUImageOutput.h
│ │ ├── GPUImageOutput.m
│ │ ├── GPUImageOverlayBlendFilter.h
│ │ ├── GPUImageOverlayBlendFilter.m
│ │ ├── GPUImageParallelCoordinateLineTransformFilter.h
│ │ ├── GPUImageParallelCoordinateLineTransformFilter.m
│ │ ├── GPUImagePerlinNoiseFilter.h
│ │ ├── GPUImagePerlinNoiseFilter.m
│ │ ├── GPUImagePinchDistortionFilter.h
│ │ ├── GPUImagePinchDistortionFilter.m
│ │ ├── GPUImagePixellateFilter.h
│ │ ├── GPUImagePixellateFilter.m
│ │ ├── GPUImagePixellatePositionFilter.h
│ │ ├── GPUImagePixellatePositionFilter.m
│ │ ├── GPUImagePoissonBlendFilter.h
│ │ ├── GPUImagePoissonBlendFilter.m
│ │ ├── GPUImagePolarPixellateFilter.h
│ │ ├── GPUImagePolarPixellateFilter.m
│ │ ├── GPUImagePolkaDotFilter.h
│ │ ├── GPUImagePolkaDotFilter.m
│ │ ├── GPUImagePosterizeFilter.h
│ │ ├── GPUImagePosterizeFilter.m
│ │ ├── GPUImagePrewittEdgeDetectionFilter.h
│ │ ├── GPUImagePrewittEdgeDetectionFilter.m
│ │ ├── GPUImageRGBClosingFilter.h
│ │ ├── GPUImageRGBClosingFilter.m
│ │ ├── GPUImageRGBDilationFilter.h
│ │ ├── GPUImageRGBDilationFilter.m
│ │ ├── GPUImageRGBErosionFilter.h
│ │ ├── GPUImageRGBErosionFilter.m
│ │ ├── GPUImageRGBFilter.h
│ │ ├── GPUImageRGBFilter.m
│ │ ├── GPUImageRGBOpeningFilter.h
│ │ ├── GPUImageRGBOpeningFilter.m
│ │ ├── GPUImageRawDataInput.h
│ │ ├── GPUImageRawDataInput.m
│ │ ├── GPUImageRawDataOutput.h
│ │ ├── GPUImageRawDataOutput.m
│ │ ├── GPUImageSaturationBlendFilter.h
│ │ ├── GPUImageSaturationBlendFilter.m
│ │ ├── GPUImageSaturationFilter.h
│ │ ├── GPUImageSaturationFilter.m
│ │ ├── GPUImageScreenBlendFilter.h
│ │ ├── GPUImageScreenBlendFilter.m
│ │ ├── GPUImageSepiaFilter.h
│ │ ├── GPUImageSepiaFilter.m
│ │ ├── GPUImageSharpenFilter.h
│ │ ├── GPUImageSharpenFilter.m
│ │ ├── GPUImageShiTomasiFeatureDetectionFilter.h
│ │ ├── GPUImageShiTomasiFeatureDetectionFilter.m
│ │ ├── GPUImageSingleComponentGaussianBlurFilter.h
│ │ ├── GPUImageSingleComponentGaussianBlurFilter.m
│ │ ├── GPUImageSketchFilter.h
│ │ ├── GPUImageSketchFilter.m
│ │ ├── GPUImageSmoothToonFilter.h
│ │ ├── GPUImageSmoothToonFilter.m
│ │ ├── GPUImageSobelEdgeDetectionFilter.h
│ │ ├── GPUImageSobelEdgeDetectionFilter.m
│ │ ├── GPUImageSoftEleganceFilter.h
│ │ ├── GPUImageSoftEleganceFilter.m
│ │ ├── GPUImageSoftLightBlendFilter.h
│ │ ├── GPUImageSoftLightBlendFilter.m
│ │ ├── GPUImageSolidColorGenerator.h
│ │ ├── GPUImageSolidColorGenerator.m
│ │ ├── GPUImageSourceOverBlendFilter.h
│ │ ├── GPUImageSourceOverBlendFilter.m
│ │ ├── GPUImageSphereRefractionFilter.h
│ │ ├── GPUImageSphereRefractionFilter.m
│ │ ├── GPUImageStillCamera.h
│ │ ├── GPUImageStillCamera.m
│ │ ├── GPUImageStretchDistortionFilter.h
│ │ ├── GPUImageStretchDistortionFilter.m
│ │ ├── GPUImageSubtractBlendFilter.h
│ │ ├── GPUImageSubtractBlendFilter.m
│ │ ├── GPUImageSwirlFilter.h
│ │ ├── GPUImageSwirlFilter.m
│ │ ├── GPUImageTextureInput.h
│ │ ├── GPUImageTextureInput.m
│ │ ├── GPUImageTextureOutput.h
│ │ ├── GPUImageTextureOutput.m
│ │ ├── GPUImageThreeInputFilter.h
│ │ ├── GPUImageThreeInputFilter.m
│ │ ├── GPUImageThresholdEdgeDetectionFilter.h
│ │ ├── GPUImageThresholdEdgeDetectionFilter.m
│ │ ├── GPUImageThresholdSketchFilter.h
│ │ ├── GPUImageThresholdSketchFilter.m
│ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.h
│ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.m
│ │ ├── GPUImageTiltShiftFilter.h
│ │ ├── GPUImageTiltShiftFilter.m
│ │ ├── GPUImageToneCurveFilter.h
│ │ ├── GPUImageToneCurveFilter.m
│ │ ├── GPUImageToonFilter.h
│ │ ├── GPUImageToonFilter.m
│ │ ├── GPUImageTransformFilter.h
│ │ ├── GPUImageTransformFilter.m
│ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.h
│ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.m
│ │ ├── GPUImageTwoInputFilter.h
│ │ ├── GPUImageTwoInputFilter.m
│ │ ├── GPUImageTwoPassFilter.h
│ │ ├── GPUImageTwoPassFilter.m
│ │ ├── GPUImageTwoPassTextureSamplingFilter.h
│ │ ├── GPUImageTwoPassTextureSamplingFilter.m
│ │ ├── GPUImageUIElement.h
│ │ ├── GPUImageUIElement.m
│ │ ├── GPUImageUnsharpMaskFilter.h
│ │ ├── GPUImageUnsharpMaskFilter.m
│ │ ├── GPUImageVideoCamera.h
│ │ ├── GPUImageVideoCamera.m
│ │ ├── GPUImageVignetteFilter.h
│ │ ├── GPUImageVignetteFilter.m
│ │ ├── GPUImageVoronoiConsumerFilter.h
│ │ ├── GPUImageVoronoiConsumerFilter.m
│ │ ├── GPUImageWeakPixelInclusionFilter.h
│ │ ├── GPUImageWeakPixelInclusionFilter.m
│ │ ├── GPUImageWhiteBalanceFilter.h
│ │ ├── GPUImageWhiteBalanceFilter.m
│ │ ├── GPUImageXYDerivativeFilter.h
│ │ ├── GPUImageXYDerivativeFilter.m
│ │ ├── GPUImageZoomBlurFilter.h
│ │ ├── GPUImageZoomBlurFilter.m
│ │ ├── GPUImageiOSBlurFilter.h
│ │ ├── GPUImageiOSBlurFilter.m
│ │ ├── GPUimageDirectionalSobelEdgeDetectionFilter.h
│ │ ├── GPUimageDirectionalSobelEdgeDetectionFilter.m
│ │ └── iOS
│ │ ├── GPUImage-Prefix.pch
│ │ ├── GPUImageContext.h
│ │ ├── GPUImageContext.m
│ │ ├── GPUImageMovieWriter.h
│ │ ├── GPUImageMovieWriter.m
│ │ ├── GPUImagePicture.h
│ │ ├── GPUImagePicture.m
│ │ ├── GPUImageView.h
│ │ └── GPUImageView.m
├── ViewController.h
├── ViewController.m
├── en.lproj
│ └── InfoPlist.strings
└── main.m
├── ObjectOverlayOnVideoDemoTests
├── ObjectOverlayOnVideoDemoTests-Info.plist
├── ObjectOverlayOnVideoDemoTests.m
└── en.lproj
│ └── InfoPlist.strings
└── README.md
/ObjectOverlayOnVideoDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo.xcodeproj/project.xcworkspace/xcuserdata/theappguruz-new-2.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tejas123/iOS-Guide-for-Object-Overlay-on-Video/989373228768ebf135c85d5dcfc9c67efa562352/ObjectOverlayOnVideoDemo.xcodeproj/project.xcworkspace/xcuserdata/theappguruz-new-2.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo.xcodeproj/xcuserdata/theappguruz-new-2.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
8 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo.xcodeproj/xcuserdata/theappguruz-new-2.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | ObjectOverlayOnVideoDemo.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | EDE0888519189D780074E6B6
16 |
17 | primary
18 |
19 |
20 | EDE088A619189D780074E6B6
21 |
22 | primary
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // ObjectOverlayOnVideoDemo
4 | //
5 | // Created by Krupa-iMac on 06/05/14.
6 | // Copyright (c) 2014 TheAppGuruz. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // ObjectOverlayOnVideoDemo
4 | //
5 | // Created by Krupa-iMac on 06/05/14.
6 | // Copyright (c) 2014 TheAppGuruz. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @implementation AppDelegate
12 |
13 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
14 | {
15 | // Override point for customization after application launch.
16 | return YES;
17 | }
18 |
19 | - (void)applicationWillResignActive:(UIApplication *)application
20 | {
21 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
22 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
23 | }
24 |
25 | - (void)applicationDidEnterBackground:(UIApplication *)application
26 | {
27 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
28 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
29 | }
30 |
31 | - (void)applicationWillEnterForeground:(UIApplication *)application
32 | {
33 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | - (void)applicationDidBecomeActive:(UIApplication *)application
37 | {
38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
39 | }
40 |
41 | - (void)applicationWillTerminate:(UIApplication *)application
42 | {
43 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
44 | }
45 |
46 | @end
47 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "40x40",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "60x60",
16 | "scale" : "2x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/Images.xcassets/LaunchImage.launchimage/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "orientation" : "portrait",
5 | "idiom" : "iphone",
6 | "extent" : "full-screen",
7 | "minimum-system-version" : "7.0",
8 | "scale" : "2x"
9 | },
10 | {
11 | "orientation" : "portrait",
12 | "idiom" : "iphone",
13 | "subtype" : "retina4",
14 | "extent" : "full-screen",
15 | "minimum-system-version" : "7.0",
16 | "scale" : "2x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/Missile_Preview.m4v:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tejas123/iOS-Guide-for-Object-Overlay-on-Video/989373228768ebf135c85d5dcfc9c67efa562352/ObjectOverlayOnVideoDemo/Missile_Preview.m4v
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ObjectOverlayOnVideoDemo-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleDisplayName
8 | ${PRODUCT_NAME}
9 | CFBundleExecutable
10 | ${EXECUTABLE_NAME}
11 | CFBundleIdentifier
12 | tag.${PRODUCT_NAME:rfc1034identifier}
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | ${PRODUCT_NAME}
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleSignature
22 | ????
23 | CFBundleVersion
24 | 1.0
25 | LSRequiresIPhoneOS
26 |
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ObjectOverlayOnVideoDemo-Prefix.pch:
--------------------------------------------------------------------------------
1 | //
2 | // Prefix header
3 | //
4 | // The contents of this file are implicitly included at the beginning of every source file.
5 | //
6 |
7 | #import
8 |
9 | #ifndef __IPHONE_5_0
10 | #warning "This project uses features only available in iOS SDK 5.0 and later."
11 | #endif
12 |
13 | #ifdef __OBJC__
14 | #import
15 | #import
16 |
17 | #define FilePath [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"]
18 | #endif
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GLProgram.h:
--------------------------------------------------------------------------------
1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
2 | // A description of this can be found at his page on the topic:
3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
4 | // I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
5 |
6 | #import
7 |
8 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
9 | #import
10 | #import
11 | #else
12 | #import
13 | #import
14 | #endif
15 |
16 | @interface GLProgram : NSObject
17 | {
18 | NSMutableArray *attributes;
19 | NSMutableArray *uniforms;
20 | GLuint program,
21 | vertShader,
22 | fragShader;
23 | }
24 |
25 | @property(readwrite, nonatomic) BOOL initialized;
26 |
27 | - (id)initWithVertexShaderString:(NSString *)vShaderString
28 | fragmentShaderString:(NSString *)fShaderString;
29 | - (id)initWithVertexShaderString:(NSString *)vShaderString
30 | fragmentShaderFilename:(NSString *)fShaderFilename;
31 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
32 | fragmentShaderFilename:(NSString *)fShaderFilename;
33 | - (void)addAttribute:(NSString *)attributeName;
34 | - (GLuint)attributeIndex:(NSString *)attributeName;
35 | - (GLuint)uniformIndex:(NSString *)uniformName;
36 | - (BOOL)link;
37 | - (void)use;
38 | - (NSString *)vertexShaderLog;
39 | - (NSString *)fragmentShaderLog;
40 | - (NSString *)programLog;
41 | - (void)validate;
42 | @end
43 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImage3x3ConvolutionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** Runs a 3x3 convolution kernel against the image
4 | */
5 | @interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
6 | {
7 | GLint convolutionMatrixUniform;
8 | }
9 |
10 | /** Convolution kernel to run against the image
11 |
12 | The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.
13 | The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three
14 | If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
15 | */
16 | @property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImage3x3TextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
4 |
5 | @interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | CGFloat texelWidth, texelHeight;
10 | BOOL hasOverriddenImageSizeFactor;
11 | }
12 |
13 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
14 | @property(readwrite, nonatomic) CGFloat texelWidth;
15 | @property(readwrite, nonatomic) CGFloat texelHeight;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAdaptiveThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup
4 |
5 | /** A multiplier for the background averaging blur radius in pixels, with a default of 4
6 | */
7 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAddBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAddBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAlphaBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAlphaBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageAlphaBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | uniform lowp float mixturePercent;
13 |
14 | void main()
15 | {
16 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
17 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
18 |
19 | gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
20 | }
21 | );
22 | #else
23 | NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
24 | (
25 | varying vec2 textureCoordinate;
26 | varying vec2 textureCoordinate2;
27 |
28 | uniform sampler2D inputImageTexture;
29 | uniform sampler2D inputImageTexture2;
30 |
31 | uniform float mixturePercent;
32 |
33 | void main()
34 | {
35 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
36 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
37 |
38 | gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
39 | }
40 | );
41 | #endif
42 |
43 | @implementation GPUImageAlphaBlendFilter
44 |
45 | @synthesize mix = _mix;
46 |
47 | - (id)init;
48 | {
49 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageAlphaBlendFragmentShaderString]))
50 | {
51 | return nil;
52 | }
53 |
54 | mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
55 | self.mix = 0.5;
56 |
57 | return self;
58 | }
59 |
60 |
61 | #pragma mark -
62 | #pragma mark Accessors
63 |
64 | - (void)setMix:(CGFloat)newValue;
65 | {
66 | _mix = newValue;
67 |
68 | [self setFloat:_mix forUniform:mixUniform program:filterProgram];
69 | }
70 |
71 |
72 | @end
73 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAmatorkaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Amatorka
6 | http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_amatorka.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageAmatorkaFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAmatorkaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageAmatorkaFilter.h"
2 | #import "GPUImagePicture.h"
3 | #import "GPUImageLookupFilter.h"
4 |
5 | @implementation GPUImageAmatorkaFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | UIImage *image = [UIImage imageNamed:@"lookup_amatorka.png"];
16 | #else
17 | NSImage *image = [NSImage imageNamed:@"lookup_amatorka.png"];
18 | #endif
19 |
20 | NSAssert(image, @"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle.");
21 |
22 | lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
23 | GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
24 | [self addFilter:lookupFilter];
25 |
26 | [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
27 | [lookupImageSource processImage];
28 |
29 | self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
30 | self.terminalFilter = lookupFilter;
31 |
32 | return self;
33 | }
34 |
35 | -(void)prepareForImageCapture {
36 | [lookupImageSource processImage];
37 | [super prepareForImageCapture];
38 | }
39 |
40 | #pragma mark -
41 | #pragma mark Accessors
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAverageColor.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageColorAveragingVertexShaderString;
4 |
5 | @interface GPUImageAverageColor : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | NSUInteger numberOfStages;
10 | NSMutableArray *stageTextures, *stageFramebuffers, *stageSizes;
11 |
12 | GLubyte *rawImagePixels;
13 | }
14 |
15 | // This block is called on the completion of color averaging for a frame
16 | @property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
17 |
18 | - (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAverageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
4 |
5 | // This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
6 | @property(readwrite, nonatomic) CGFloat thresholdMultiplier;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageAverageLuminanceThresholdFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageAverageLuminanceThresholdFilter.h"
2 | #import "GPUImageLuminosity.h"
3 | #import "GPUImageLuminanceThresholdFilter.h"
4 |
5 | @interface GPUImageAverageLuminanceThresholdFilter()
6 | {
7 | GPUImageLuminosity *luminosityFilter;
8 | GPUImageLuminanceThresholdFilter *luminanceThresholdFilter;
9 | }
10 | @end
11 |
12 | @implementation GPUImageAverageLuminanceThresholdFilter
13 |
14 | @synthesize thresholdMultiplier = _thresholdMultiplier;
15 |
16 | #pragma mark -
17 | #pragma mark Initialization and teardown
18 |
19 | - (id)init;
20 | {
21 | if (!(self = [super init]))
22 | {
23 | return nil;
24 | }
25 |
26 | self.thresholdMultiplier = 1.0;
27 |
28 | luminosityFilter = [[GPUImageLuminosity alloc] init];
29 | [self addFilter:luminosityFilter];
30 |
31 | luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init];
32 | [self addFilter:luminanceThresholdFilter];
33 |
34 | __unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self;
35 | __unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter;
36 |
37 | [luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) {
38 | weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier;
39 | }];
40 |
41 | self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil];
42 | self.terminalFilter = luminanceThresholdFilter;
43 |
44 | return self;
45 | }
46 |
47 | @end
48 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBilateralFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | @interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
4 | {
5 | CGFloat firstDistanceNormalizationFactorUniform;
6 | CGFloat secondDistanceNormalizationFactorUniform;
7 | }
8 | // A normalization factor for the distance between central color and sample color.
9 | @property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
10 | @end
11 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBoxBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | /** A hardware-accelerated box blur of an image
4 | */
5 | @interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBrightnessFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBrightnessFilter : GPUImageFilter
4 | {
5 | GLint brightnessUniform;
6 | }
7 |
8 | // Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat brightness;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBrightnessFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageBrightnessFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float brightness;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float brightness;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageBrightnessFilter
36 |
37 | @synthesize brightness = _brightness;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | brightnessUniform = [filterProgram uniformIndex:@"brightness"];
50 | self.brightness = 0.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setBrightness:(CGFloat)newValue;
59 | {
60 | _brightness = newValue;
61 |
62 | [self setFloat:_brightness forUniform:brightnessUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBuffer.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBuffer : GPUImageFilter
4 | {
5 | NSMutableArray *bufferedTextures;
6 | }
7 |
8 | @property(readwrite, nonatomic) NSUInteger bufferSize;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageBulgeDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /// Creates a bulge distortion on the image
4 | @interface GPUImageBulgeDistortionFilter : GPUImageFilter
5 | {
6 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
7 | }
8 |
9 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | @property(readwrite, nonatomic) CGPoint center;
11 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
12 | @property(readwrite, nonatomic) CGFloat radius;
13 | /// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
14 | @property(readwrite, nonatomic) CGFloat scale;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageCGAColorspaceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCGAColorspaceFilter : GPUImageFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageChromaKeyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Selectively replaces a color in the first image with the second image
4 | */
5 | @interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
6 | {
7 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
8 | }
9 |
10 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
11 |
12 | The default value is 0.3
13 | */
14 | @property(readwrite, nonatomic) GLfloat thresholdSensitivity;
15 |
16 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
17 |
18 | The default value is 0.1
19 | */
20 | @property(readwrite, nonatomic) GLfloat smoothing;
21 |
22 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
23 |
24 | The default is green: (0.0, 1.0, 0.0).
25 |
26 | @param redComponent Red component of color to be replaced
27 | @param greenComponent Green component of color to be replaced
28 | @param blueComponent Blue component of color to be replaced
29 | */
30 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
31 |
32 | @end
33 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageChromaKeyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageChromaKeyFilter : GPUImageFilter
4 | {
5 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
6 | }
7 |
8 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
9 |
10 | The default value is 0.3
11 | */
12 | @property(readwrite, nonatomic) GLfloat thresholdSensitivity;
13 |
14 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
15 |
16 | The default value is 0.1
17 | */
18 | @property(readwrite, nonatomic) GLfloat smoothing;
19 |
20 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
21 |
22 | The default is green: (0.0, 1.0, 0.0).
23 |
24 | @param redComponent Red component of color to be replaced
25 | @param greenComponent Green component of color to be replaced
26 | @param blueComponent Blue component of color to be replaced
27 | */
28 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageClosingFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageClosingFilter.h"
2 | #import "GPUImageErosionFilter.h"
3 | #import "GPUImageDilationFilter.h"
4 |
5 | @implementation GPUImageClosingFilter
6 |
7 | @synthesize verticalTexelSpacing = _verticalTexelSpacing;
8 | @synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
9 |
10 | - (id)init;
11 | {
12 | if (!(self = [self initWithRadius:1]))
13 | {
14 | return nil;
15 | }
16 |
17 | return self;
18 | }
19 |
20 | - (id)initWithRadius:(NSUInteger)radius;
21 | {
22 | if (!(self = [super init]))
23 | {
24 | return nil;
25 | }
26 |
27 | // First pass: dilation
28 | dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
29 | [self addFilter:dilationFilter];
30 |
31 | // Second pass: erosion
32 | erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
33 | [self addFilter:erosionFilter];
34 |
35 | [dilationFilter addTarget:erosionFilter];
36 |
37 | self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
38 | self.terminalFilter = erosionFilter;
39 |
40 | return self;
41 | }
42 |
43 | - (void)setVerticalTexelSpacing:(CGFloat)newValue;
44 | {
45 | _verticalTexelSpacing = newValue;
46 | erosionFilter.verticalTexelSpacing = newValue;
47 | dilationFilter.verticalTexelSpacing = newValue;
48 | }
49 |
50 | - (void)setHorizontalTexelSpacing:(CGFloat)newValue;
51 | {
52 | _horizontalTexelSpacing = newValue;
53 | erosionFilter.horizontalTexelSpacing = newValue;
54 | dilationFilter.horizontalTexelSpacing = newValue;
55 | }
56 |
57 | @end
58 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageColorBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color burn blend of two images
4 | */
5 | @interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorBurnBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorBurnBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | mediump vec4 whiteColor = vec4(1.0);
17 | gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 | vec4 whiteColor = vec4(1.0);
34 | gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageColorBurnBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorDodgeBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color dodge blend of two images
4 | */
5 | @interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorDodgeBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorDodgeBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
5 | (
6 |
7 | precision mediump float;
8 |
9 | varying highp vec2 textureCoordinate;
10 | varying highp vec2 textureCoordinate2;
11 |
12 | uniform sampler2D inputImageTexture;
13 | uniform sampler2D inputImageTexture2;
14 |
15 | void main()
16 | {
17 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
18 | vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
19 |
20 | vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
21 | vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
22 |
23 | vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
24 | vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
25 |
26 | vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
27 |
28 | vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
29 |
30 | gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
31 | }
32 | );
33 | #else
34 | NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
35 | (
36 | varying vec2 textureCoordinate;
37 | varying vec2 textureCoordinate2;
38 |
39 | uniform sampler2D inputImageTexture;
40 | uniform sampler2D inputImageTexture2;
41 |
42 | void main()
43 | {
44 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
45 | vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
46 |
47 | vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
48 | vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
49 |
50 | vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
51 | vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
52 |
53 | vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
54 |
55 | vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
56 |
57 | gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
58 | }
59 | );
60 | #endif
61 |
62 | @implementation GPUImageColorDodgeBlendFilter
63 |
64 | - (id)init;
65 | {
66 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorDodgeBlendFragmentShaderString]))
67 | {
68 | return nil;
69 | }
70 |
71 | return self;
72 | }
73 |
74 | @end
75 |
76 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorInvertFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorInvertFilter : GPUImageFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorInvertFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorInvertFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | void main()
11 | {
12 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
13 |
14 | gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
15 | }
16 | );
17 | #else
18 | NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
19 | (
20 | varying vec2 textureCoordinate;
21 |
22 | uniform sampler2D inputImageTexture;
23 |
24 | void main()
25 | {
26 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
27 |
28 | gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
29 | }
30 | );
31 | #endif
32 |
33 | @implementation GPUImageColorInvertFilter
34 |
35 | - (id)init;
36 | {
37 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))
38 | {
39 | return nil;
40 | }
41 |
42 | return self;
43 | }
44 |
45 | @end
46 |
47 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorMatrixFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Transforms the colors of an image by applying a matrix to them
4 | */
5 | @interface GPUImageColorMatrixFilter : GPUImageFilter
6 | {
7 | GLint colorMatrixUniform;
8 | GLint intensityUniform;
9 | }
10 |
11 | /** A 4x4 matrix used to transform each color in an image
12 | */
13 | @property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;
14 |
15 | /** The degree to which the new transformed color replaces the original color for each pixel
16 | */
17 | @property(readwrite, nonatomic) CGFloat intensity;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorMatrixFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | uniform lowp mat4 colorMatrix;
11 | uniform lowp float intensity;
12 |
13 | void main()
14 | {
15 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
16 | lowp vec4 outputColor = textureColor * colorMatrix;
17 |
18 | gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
19 | }
20 | );
21 | #else
22 | NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
23 | (
24 | varying vec2 textureCoordinate;
25 |
26 | uniform sampler2D inputImageTexture;
27 |
28 | uniform mat4 colorMatrix;
29 | uniform float intensity;
30 |
31 | void main()
32 | {
33 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
34 | vec4 outputColor = textureColor * colorMatrix;
35 |
36 | gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
37 | }
38 | );
39 | #endif
40 |
41 | @implementation GPUImageColorMatrixFilter
42 |
43 | @synthesize intensity = _intensity;
44 | @synthesize colorMatrix = _colorMatrix;
45 |
46 | #pragma mark -
47 | #pragma mark Initialization and teardown
48 |
49 | - (id)init;
50 | {
51 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorMatrixFragmentShaderString]))
52 | {
53 | return nil;
54 | }
55 |
56 | colorMatrixUniform = [filterProgram uniformIndex:@"colorMatrix"];
57 | intensityUniform = [filterProgram uniformIndex:@"intensity"];
58 |
59 | self.intensity = 1.f;
60 | self.colorMatrix = (GPUMatrix4x4){
61 | {1.f, 0.f, 0.f, 0.f},
62 | {0.f, 1.f, 0.f, 0.f},
63 | {0.f, 0.f, 1.f, 0.f},
64 | {0.f, 0.f, 0.f, 1.f}
65 | };
66 |
67 | return self;
68 | }
69 |
70 | #pragma mark -
71 | #pragma mark Accessors
72 |
73 | - (void)setIntensity:(CGFloat)newIntensity;
74 | {
75 | _intensity = newIntensity;
76 |
77 | [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
78 | }
79 |
80 | - (void)setColorMatrix:(GPUMatrix4x4)newColorMatrix;
81 | {
82 | _colorMatrix = newColorMatrix;
83 |
84 | [self setMatrix4f:_colorMatrix forUniform:colorMatrixUniform program:filterProgram];
85 | }
86 |
87 | @end
88 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageColorPackingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorPackingFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | }
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageContrastFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the contrast of the image
4 | */
5 | @interface GPUImageContrastFilter : GPUImageFilter
6 | {
7 | GLint contrastUniform;
8 | }
9 |
10 | /** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat contrast;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageContrastFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageContrastFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float contrast;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float contrast;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageContrastFilter
36 |
37 | @synthesize contrast = _contrast;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | contrastUniform = [filterProgram uniformIndex:@"contrast"];
50 | self.contrast = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setContrast:(CGFloat)newValue;
59 | {
60 | _contrast = newValue;
61 |
62 | [self setFloat:_contrast forUniform:contrastUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageCropFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCropFilter : GPUImageFilter
4 | {
5 | GLfloat cropTextureCoordinates[8];
6 | }
7 |
8 | // The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image
9 | @property(readwrite, nonatomic) CGRect cropRegion;
10 |
11 | // Initialization and teardown
12 | - (id)initWithCropRegion:(CGRect)newCropRegion;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageCrosshairGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshairGenerator : GPUImageFilter
4 | {
5 | GLint crosshairWidthUniform, crosshairColorUniform;
6 | }
7 |
8 | // The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
9 | @property(readwrite, nonatomic) CGFloat crosshairWidth;
10 |
11 | // The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
12 | - (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 |
14 | // Rendering
15 | - (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageCrosshatchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshatchFilter : GPUImageFilter
4 | {
5 | GLint crossHatchSpacingUniform, lineWidthUniform;
6 | }
7 | // The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
8 | @property(readwrite, nonatomic) CGFloat crossHatchSpacing;
9 |
10 | // A relative width for the crosshatch lines. The default is 0.003.
11 | @property(readwrite, nonatomic) CGFloat lineWidth;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDarkenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDarkenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageDarkenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageDarkenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageDarkenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDifferenceBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDifferenceBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageDifferenceBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
17 | }
18 | );
19 | #else
20 | NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
21 | (
22 | varying vec2 textureCoordinate;
23 | varying vec2 textureCoordinate2;
24 |
25 | uniform sampler2D inputImageTexture;
26 | uniform sampler2D inputImageTexture2;
27 |
28 | void main()
29 | {
30 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
31 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
32 | gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
33 | }
34 | );
35 | #endif
36 |
37 | @implementation GPUImageDifferenceBlendFilter
38 |
39 | - (id)init;
40 | {
41 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString]))
42 | {
43 | return nil;
44 | }
45 |
46 | return self;
47 | }
48 |
49 | @end
50 |
51 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out bright features, and is most commonly used with black-and-white thresholded images.
5 |
6 | extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
7 | extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
8 | extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
9 | extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
10 |
11 | @interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
12 |
13 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
14 | - (id)initWithRadius:(NSUInteger)dilationRadius;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 | GLint upperThresholdUniform, lowerThresholdUniform;
7 |
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | // These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
16 | @property(readwrite, nonatomic) CGFloat upperThreshold;
17 | @property(readwrite, nonatomic) CGFloat lowerThreshold;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDissolveBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDissolveBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageDissolveBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 | uniform lowp float mixturePercent;
12 |
13 | void main()
14 | {
15 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
16 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
17 |
18 | gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
19 | }
20 | );
21 | #else
22 | NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
23 | (
24 | varying vec2 textureCoordinate;
25 | varying vec2 textureCoordinate2;
26 |
27 | uniform sampler2D inputImageTexture;
28 | uniform sampler2D inputImageTexture2;
29 | uniform float mixturePercent;
30 |
31 | void main()
32 | {
33 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
34 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
35 |
36 | gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
37 | }
38 | );
39 | #endif
40 |
41 | @implementation GPUImageDissolveBlendFilter
42 |
43 | @synthesize mix = _mix;
44 |
45 | #pragma mark -
46 | #pragma mark Initialization and teardown
47 |
48 | - (id)init;
49 | {
50 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString]))
51 | {
52 | return nil;
53 | }
54 |
55 | mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
56 | self.mix = 0.5;
57 |
58 | return self;
59 | }
60 |
61 | #pragma mark -
62 | #pragma mark Accessors
63 |
64 | - (void)setMix:(CGFloat)newValue;
65 | {
66 | _mix = newValue;
67 |
68 | [self setFloat:_mix forUniform:mixUniform program:filterProgram];
69 | }
70 |
71 | @end
72 |
73 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageDivideBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageEmbossFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
4 |
5 | // The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
6 | @property(readwrite, nonatomic) CGFloat intensity;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageEmbossFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageEmbossFilter.h"
2 |
3 | @implementation GPUImageEmbossFilter
4 |
5 | @synthesize intensity = _intensity;
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | self.intensity = 1.0;
15 |
16 | return self;
17 | }
18 |
19 | #pragma mark -
20 | #pragma mark Accessors
21 |
22 | - (void)setIntensity:(CGFloat)newValue;
23 | {
24 | // [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){
25 | // {-2.0f, -1.0f, 0.0f},
26 | // {-1.0f, 1.0f, 1.0f},
27 | // { 0.0f, 1.0f, 2.0f}
28 | // }];
29 |
30 | _intensity = newValue;
31 |
32 | GPUMatrix3x3 newConvolutionMatrix;
33 | newConvolutionMatrix.one.one = _intensity * (-2.0);
34 | newConvolutionMatrix.one.two = -_intensity;
35 | newConvolutionMatrix.one.three = 0.0f;
36 |
37 | newConvolutionMatrix.two.one = -_intensity;
38 | newConvolutionMatrix.two.two = 1.0;
39 | newConvolutionMatrix.two.three = _intensity;
40 |
41 | newConvolutionMatrix.three.one = 0.0f;
42 | newConvolutionMatrix.three.two = _intensity;
43 | newConvolutionMatrix.three.three = _intensity * 2.0;
44 |
45 | self.convolutionKernel = newConvolutionMatrix;
46 | }
47 |
48 |
49 | @end
50 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and is most commonly used with black-and-white thresholded images.
5 |
6 | @interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageExclusionBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageExclusionBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageExclusionBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
18 |
19 | gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
20 | }
21 | );
22 | #else
23 | NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
24 | (
25 | varying vec2 textureCoordinate;
26 | varying vec2 textureCoordinate2;
27 |
28 | uniform sampler2D inputImageTexture;
29 | uniform sampler2D inputImageTexture2;
30 |
31 | void main()
32 | {
33 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
34 | vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
35 |
36 | // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
37 |
38 | gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
39 | }
40 | );
41 | #endif
42 |
43 | @implementation GPUImageExclusionBlendFilter
44 |
45 | - (id)init;
46 | {
47 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageExclusionBlendFragmentShaderString]))
48 | {
49 | return nil;
50 | }
51 |
52 | return self;
53 | }
54 |
55 | @end
56 |
57 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageExposureFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageExposureFilter : GPUImageFilter
4 | {
5 | GLint exposureUniform;
6 | }
7 |
8 | // Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat exposure;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageExposureFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageExposureFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float exposure;
10 |
11 | void main()
12 | {
13 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float exposure;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageExposureFilter
36 |
37 | @synthesize exposure = _exposure;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | exposureUniform = [filterProgram uniformIndex:@"exposure"];
50 | self.exposure = 0.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setExposure:(CGFloat)newValue;
59 | {
60 | _exposure = newValue;
61 |
62 | [self setFloat:_exposure forUniform:exposureUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageFASTCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
4 | {
5 | // Generate a lookup texture based on the bit patterns
6 |
7 | // Step 1: convert to monochrome if necessary
8 | // Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components
9 | // Step 3: do non-maximum suppression of close corner points
10 | }
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageFASTCornerDetectionFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageFASTCornerDetectionFilter.h"
2 |
3 | @implementation GPUImageFASTCornerDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageFalseColorFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageFalseColorFilter : GPUImageFilter
4 | {
5 | GLint firstColorUniform, secondColorUniform;
6 | }
7 |
8 | // The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
9 | @property(readwrite, nonatomic) GPUVector4 firstColor;
10 | @property(readwrite, nonatomic) GPUVector4 secondColor;
11 |
12 | - (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 | - (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageFilterGroup.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageFilterGroup : GPUImageOutput
5 | {
6 | NSMutableArray *filters;
7 | BOOL isEndProcessing;
8 | }
9 |
10 | @property(readwrite, nonatomic, strong) GPUImageOutput *terminalFilter;
11 | @property(readwrite, nonatomic, strong) NSArray *initialFilters;
12 | @property(readwrite, nonatomic, strong) GPUImageOutput *inputFilterToIgnoreForUpdates;
13 |
14 | // Filter management
15 | - (void)addFilter:(GPUImageOutput *)newFilter;
16 | - (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex;
17 | - (NSUInteger)filterCount;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageFilterPipeline.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageFilterPipeline : NSObject
5 | {
6 | NSString *stringValue;
7 | }
8 |
9 | @property (strong) NSMutableArray *filters;
10 |
11 | @property (strong) GPUImageOutput *input;
12 | @property (strong) id output;
13 |
14 | - (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id )output;
15 | - (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id )output;
16 | - (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id )output;
17 |
18 | - (void) addFilter:(GPUImageFilter*)filter;
19 | - (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
20 | - (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
21 | - (void) replaceAllFilters:(NSArray*) newFilters;
22 | - (void) removeFilterAtIndex:(NSUInteger)index;
23 | - (void) removeAllFilters;
24 |
25 | - (UIImage *) currentFilteredFrame;
26 | - (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
27 | - (CGImageRef) newCGImageFromCurrentFilteredFrame;
28 | - (CGImageRef) newCGImageFromCurrentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGammaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageGammaFilter : GPUImageFilter
4 | {
5 | GLint gammaUniform;
6 | }
7 |
8 | // Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat gamma;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGammaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageGammaFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float gamma;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float gamma;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageGammaFilter
36 |
37 | @synthesize gamma = _gamma;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | gammaUniform = [filterProgram uniformIndex:@"gamma"];
50 | self.gamma = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setGamma:(CGFloat)newValue;
59 | {
60 | _gamma = newValue;
61 |
62 | [self setFloat:_gamma forUniform:gammaUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGaussianBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | /** A Gaussian blur filter
4 | Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
5 | */
6 |
7 | @interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
8 | {
9 | BOOL shouldResizeBlurRadiusWithImageSize;
10 | CGFloat _blurRadiusInPixels;
11 | }
12 |
13 | /** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
14 | */
15 | @property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
16 |
17 | /** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
18 | */
19 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
20 |
21 | /** Setting these properties will allow the blur radius to scale with the size of the image
22 | */
23 | @property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
24 | @property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
25 |
26 | /// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
27 | @property(readwrite, nonatomic) NSUInteger blurPasses;
28 |
29 | + (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
30 | + (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
31 | + (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
32 | + (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
33 |
34 | - (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
35 |
36 | @end
37 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGaussianBlurPositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | /** A more generalized 9x9 Gaussian blur filter
4 | */
5 | @interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
6 | {
7 | GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
8 | }
9 |
10 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
11 | */
12 | @property (readwrite, nonatomic) CGFloat blurSize;
13 |
14 | /** Center for the blur, defaults to 0.5, 0.5
15 | */
16 | @property (readwrite, nonatomic) CGPoint blurCenter;
17 |
18 | /** Radius for the blur, defaults to 1.0
19 | */
20 | @property (readwrite, nonatomic) CGFloat blurRadius;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGaussianSelectiveBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /** A Gaussian blur that preserves focus within a circular region
6 | */
7 | @interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageGaussianBlurFilter *blurFilter;
10 | GPUImageFilter *selectiveFocusFilter;
11 | BOOL hasOverriddenAspectRatio;
12 | }
13 |
14 | /** The radius of the circular area being excluded from the blur
15 | */
16 | @property (readwrite, nonatomic) CGFloat excludeCircleRadius;
17 | /** The center of the circular area being excluded from the blur
18 | */
19 | @property (readwrite, nonatomic) CGPoint excludeCirclePoint;
20 | /** The size of the area between the blurred portion and the clear circle
21 | */
22 | @property (readwrite, nonatomic) CGFloat excludeBlurSize;
23 | /** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
24 | */
25 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
26 | /** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
27 | */
28 | @property (readwrite, nonatomic) CGFloat aspectRatio;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGlassSphereFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSphereRefractionFilter.h"
2 |
3 | @interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageGrayscaleFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageLuminanceFragmentShaderString;
4 |
5 | /** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
6 | */
7 | @interface GPUImageGrayscaleFilter : GPUImageFilter
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHSBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | @interface GPUImageHSBFilter : GPUImageColorMatrixFilter
4 |
5 | /** Reset the filter to have no transformations.
6 | */
7 | - (void)reset;
8 |
9 | /** Add a hue rotation to the filter.
10 | The hue rotation is in the range [-360, 360] with 0 being no-change.
11 | Note that this adjustment is additive, so use the reset method if you need to.
12 | */
13 | - (void)rotateHue:(float)h;
14 |
15 | /** Add a saturation adjustment to the filter.
16 | The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
17 | Note that this adjustment is additive, so use the reset method if you need to.
18 | */
19 | - (void)adjustSaturation:(float)s;
20 |
21 | /** Add a brightness adjustment to the filter.
22 | The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
23 | Note that this adjustment is additive, so use the reset method if you need to.
24 | */
25 | - (void)adjustBrightness:(float)b;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHalftoneFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImageHalftoneFilter : GPUImagePixellateFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHardLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHarrisCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 | @class GPUImageXYDerivativeFilter;
5 | @class GPUImageGrayscaleFilter;
6 | @class GPUImageGaussianBlurFilter;
7 | @class GPUImageThresholdedNonMaximumSuppressionFilter;
8 | @class GPUImageColorPackingFilter;
9 |
10 | //#define DEBUGFEATUREDETECTION
11 |
12 | /** Harris corner detector
13 |
14 | First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)
15 |
16 | Second pass: blur the derivative (GPUImageGaussianBlurFilter)
17 |
18 | Third pass: apply the Harris corner detection calculation
19 |
20 | This is the Harris corner detector, as described in
21 | C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
22 | */
23 | @interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
24 | {
25 | GPUImageXYDerivativeFilter *derivativeFilter;
26 | GPUImageGaussianBlurFilter *blurFilter;
27 | GPUImageFilter *harrisCornerDetectionFilter;
28 | GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
29 | GPUImageColorPackingFilter *colorPackingFilter;
30 | GLfloat *cornersArray;
31 | GLubyte *rawImagePixels;
32 | }
33 |
34 | /** The radius of the underlying Gaussian blur. The default is 2.0.
35 | */
36 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
37 |
38 | // This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.
39 | @property(readwrite, nonatomic) CGFloat sensitivity;
40 |
41 | // A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.
42 | @property(readwrite, nonatomic) CGFloat threshold;
43 |
44 | // This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame
45 | @property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);
46 |
47 | // These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector
48 | @property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
49 |
50 | // Initialization and teardown
51 | - (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
52 |
53 | @end
54 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHazeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /*
4 | * The haze filter can be used to add or remove haze (similar to a UV filter)
5 | *
6 | * @author Alaric Cole
7 | * @creationDate 03/10/12
8 | *
9 | */
10 |
11 | /** The haze filter can be used to add or remove haze
12 |
13 | This is similar to a UV filter
14 | */
15 | @interface GPUImageHazeFilter : GPUImageFilter
16 | {
17 | GLint distanceUniform;
18 | GLint slopeUniform;
19 | }
20 |
21 | /** Strength of the color applied. Default 0. Values between -.3 and .3 are best
22 | */
23 | @property(readwrite, nonatomic) CGFloat distance;
24 |
25 | /** Amount of color change. Default 0. Values between -.3 and .3 are best
26 | */
27 | @property(readwrite, nonatomic) CGFloat slope;
28 |
29 | @end
30 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHazeFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageHazeFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | uniform lowp float hazeDistance;
11 | uniform highp float slope;
12 |
13 | void main()
14 | {
15 | //todo reconsider precision modifiers
16 | highp vec4 color = vec4(1.0);//todo reimplement as a parameter
17 |
18 | highp float d = textureCoordinate.y * slope + hazeDistance;
19 |
20 | highp vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
21 |
22 | c = (c - d * color) / (1.0 -d);
23 |
24 | gl_FragColor = c; //consider using premultiply(c);
25 | }
26 | );
27 | #else
28 | NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
29 | (
30 | varying vec2 textureCoordinate;
31 |
32 | uniform sampler2D inputImageTexture;
33 |
34 | uniform float hazeDistance;
35 | uniform float slope;
36 |
37 | void main()
38 | {
39 | //todo reconsider precision modifiers
40 | vec4 color = vec4(1.0);//todo reimplement as a parameter
41 |
42 | float d = textureCoordinate.y * slope + hazeDistance;
43 |
44 | vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
45 |
46 | c = (c - d * color) / (1.0 -d);
47 |
48 | gl_FragColor = c; //consider using premultiply(c);
49 | }
50 | );
51 | #endif
52 |
53 |
54 |
55 |
56 | @implementation GPUImageHazeFilter
57 |
58 | @synthesize distance = _distance;
59 | @synthesize slope = _slope;
60 | #pragma mark -
61 | #pragma mark Initialization and teardown
62 |
63 | - (id)init;
64 | {
65 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageHazeFragmentShaderString]))
66 | {
67 | return nil;
68 | }
69 |
70 | distanceUniform = [filterProgram uniformIndex:@"hazeDistance"];
71 | slopeUniform = [filterProgram uniformIndex:@"slope"];
72 |
73 | self.distance = 0.2;
74 | self.slope = 0.0;
75 | return self;
76 | }
77 |
78 | #pragma mark -
79 | #pragma mark Accessors
80 |
81 | - (void)setDistance:(CGFloat)newValue;
82 | {
83 | _distance = newValue;
84 |
85 | [self setFloat:_distance forUniform:distanceUniform program:filterProgram];
86 | }
87 |
88 | - (void)setSlope:(CGFloat)newValue;
89 | {
90 | _slope = newValue;
91 |
92 | [self setFloat:_slope forUniform:slopeUniform program:filterProgram];
93 | }
94 |
95 | @end
96 |
97 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHighPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageDifferenceBlendFilter.h"
4 |
5 | @interface GPUImageHighPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageDifferenceBlendFilter *differenceBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHighPassFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageHighPassFilter.h"
2 |
3 | @implementation GPUImageHighPassFilter
4 |
5 | @synthesize filterStrength;
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | // Start with a low pass filter to define the component to be removed
15 | lowPassFilter = [[GPUImageLowPassFilter alloc] init];
16 | [self addFilter:lowPassFilter];
17 |
18 | // Take the difference of the current frame from the low pass filtered result to get the high pass
19 | differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init];
20 | [self addFilter:differenceBlendFilter];
21 |
22 | // Texture location 0 needs to be the original image for the difference blend
23 | [lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1];
24 |
25 | self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil];
26 | self.terminalFilter = differenceBlendFilter;
27 |
28 | self.filterStrength = 0.5;
29 |
30 | return self;
31 | }
32 |
33 | #pragma mark -
34 | #pragma mark Accessors
35 |
36 | - (void)setFilterStrength:(CGFloat)newValue;
37 | {
38 | lowPassFilter.filterStrength = newValue;
39 | }
40 |
41 | - (CGFloat)filterStrength;
42 | {
43 | return lowPassFilter.filterStrength;
44 | }
45 |
46 | @end
47 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHighlightShadowFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHighlightShadowFilter : GPUImageFilter
4 | {
5 | GLint shadowsUniform, highlightsUniform;
6 | }
7 |
8 | /**
9 | * 0 - 1, increase to lighten shadows.
10 | * @default 0
11 | */
12 | @property(readwrite, nonatomic) CGFloat shadows;
13 |
14 | /**
15 | * 0 - 1, decrease to darken highlights.
16 | * @default 1
17 | */
18 | @property(readwrite, nonatomic) CGFloat highlights;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHistogramFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
4 |
5 | @interface GPUImageHistogramFilter : GPUImageFilter
6 | {
7 | GPUImageHistogramType histogramType;
8 |
9 | GLubyte *vertexSamplingCoordinates;
10 |
11 | GLProgram *secondFilterProgram, *thirdFilterProgram;
12 | GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
13 | }
14 |
15 | // Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
16 | @property(readwrite, nonatomic) NSUInteger downsamplingFactor;
17 |
18 | // Initialization and teardown
19 | - (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
20 | - (void)initializeSecondaryAttributes;
21 |
22 | // Rendering
23 | - (void)generatePointCoordinates;
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHistogramGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHistogramGenerator : GPUImageFilter
4 | {
5 | GLint backgroundColorUniform;
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHistogramGenerator.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageHistogramGenerator.h"
2 |
3 | NSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING
4 | (
5 | attribute vec4 position;
6 | attribute vec4 inputTextureCoordinate;
7 |
8 | varying vec2 textureCoordinate;
9 | varying float height;
10 |
11 | void main()
12 | {
13 | gl_Position = position;
14 | textureCoordinate = vec2(inputTextureCoordinate.x, 0.5);
15 | height = 1.0 - inputTextureCoordinate.y;
16 | }
17 | );
18 |
19 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
20 | NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
21 | (
22 | varying highp vec2 textureCoordinate;
23 | varying highp float height;
24 |
25 | uniform sampler2D inputImageTexture;
26 | uniform lowp vec4 backgroundColor;
27 |
28 | void main()
29 | {
30 | lowp vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
31 | lowp vec4 heightTest = vec4(step(height, colorChannels), 1.0);
32 | gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
33 | }
34 | );
35 | #else
36 | NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
37 | (
38 | varying vec2 textureCoordinate;
39 | varying float height;
40 |
41 | uniform sampler2D inputImageTexture;
42 | uniform vec4 backgroundColor;
43 |
44 | void main()
45 | {
46 | vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
47 | vec4 heightTest = vec4(step(height, colorChannels), 1.0);
48 | gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
49 | }
50 | );
51 | #endif
52 |
53 | @implementation GPUImageHistogramGenerator
54 |
55 | #pragma mark -
56 | #pragma mark Initialization and teardown
57 |
58 | - (id)init;
59 | {
60 | if (!(self = [super initWithVertexShaderFromString:kGPUImageHistogramGeneratorVertexShaderString fragmentShaderFromString:kGPUImageHistogramGeneratorFragmentShaderString]))
61 | {
62 | return nil;
63 | }
64 |
65 | backgroundColorUniform = [filterProgram uniformIndex:@"backgroundColor"];
66 |
67 | [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];
68 |
69 | return self;
70 | }
71 |
72 | #pragma mark -
73 | #pragma mark Accessors
74 |
75 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
76 | {
77 | // GLfloat backgroundColor[4];
78 | // backgroundColor[0] = redComponent;
79 | // backgroundColor[1] = greenComponent;
80 | // backgroundColor[2] = blueComponent;
81 | // backgroundColor[3] = alphaComponent;
82 | GPUVector4 backgroundColor = {redComponent, greenComponent, blueComponent, alphaComponent};
83 |
84 | [self setVec4:backgroundColor forUniform:backgroundColorUniform program:filterProgram];
85 | }
86 |
87 | @end
88 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHueBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageHueFilter.h:
--------------------------------------------------------------------------------
1 |
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageHueFilter : GPUImageFilter
5 | {
6 | GLint hueAdjustUniform;
7 |
8 | }
9 | @property (nonatomic, readwrite) CGFloat hue;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageJFAVoronoiFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageJFAVoronoiFilter : GPUImageFilter
4 | {
5 | GLuint secondFilterOutputTexture;
6 | GLuint secondFilterFramebuffer;
7 |
8 |
9 | GLint sampleStepUniform;
10 | GLint sizeUniform;
11 | NSUInteger numPasses;
12 |
13 | }
14 |
15 | @property (nonatomic, readwrite) CGSize sizeInPixels;
16 |
17 | @end
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageKuwaharaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
4 | */
5 | @interface GPUImageKuwaharaFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform;
8 | }
9 |
10 | /// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.
11 | @property(readwrite, nonatomic) GLuint radius;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageKuwaharaRadius3Filter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageKuwaharaRadius3Filter.h
3 |
4 | #import "GPUImageFilter.h"
5 |
6 | @interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLanczosResamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | @interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
4 |
5 | @property(readwrite, nonatomic) CGSize originalImageSize;
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLaplacianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLevelsFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /**
4 | * Levels like Photoshop.
5 | *
6 | * The min, max, minOut and maxOut parameters are floats in the range [0, 1].
7 | * If you have parameters from Photoshop in the range [0, 255] you must first
8 | * convert them to be [0, 1].
9 | * The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.
10 | *
11 | * If you want to apply levels to RGB as well as individual channels you need to use
12 | * this filter twice - first for the individual channels and then for all channels.
13 | */
14 | @interface GPUImageLevelsFilter : GPUImageFilter
15 | {
16 | GLint minUniform;
17 | GLint midUniform;
18 | GLint maxUniform;
19 | GLint minOutputUniform;
20 | GLint maxOutputUniform;
21 |
22 | GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;
23 | }
24 |
25 | /** Set levels for the red channel */
26 | - (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
27 |
28 | - (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
29 |
30 | /** Set levels for the green channel */
31 | - (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
32 |
33 | - (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
34 |
35 | /** Set levels for the blue channel */
36 | - (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
37 |
38 | - (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
39 |
40 | /** Set levels for all channels at once */
41 | - (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
42 | - (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
43 |
44 | @end
45 |
46 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLightenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /// Blends two images by taking the maximum value of each color component between the images
4 | @interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
5 | {
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLightenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLightenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = max(textureColor, textureColor2);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = max(textureColor, textureColor2);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageLightenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLineGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLineGenerator : GPUImageFilter
4 | {
5 | GLint lineWidthUniform, lineColorUniform;
6 | GLfloat *lineCoordinates;
7 | }
8 |
9 | // The width of the displayed lines, in pixels. The default is 1.
10 | @property(readwrite, nonatomic) CGFloat lineWidth;
11 |
12 | // The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
13 | - (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | // Rendering
16 | - (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLinearBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLinearBurnBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLinearBurnBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageLinearBurnBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLocalBinaryPatternFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLookupFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLookupFilter : GPUImageTwoInputFilter
4 |
5 | // How To Use:
6 | // 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
7 | // For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
8 | // If you need more complex filter you can create as many lookup tables as required.
9 | // E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
10 | // 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
11 |
12 | // See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
13 |
14 | // Additional Info:
15 | // Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
16 | //for (int by = 0; by < 8; by++) {
17 | // for (int bx = 0; bx < 8; bx++) {
18 | // for (int g = 0; g < 64; g++) {
19 | // for (int r = 0; r < 64; r++) {
20 | // image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
21 | // (int)(g * 255.0 / 63.0 + 0.5),
22 | // (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
23 | // }
24 | // }
25 | // }
26 | //}
27 |
28 | @end
29 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLowPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageBuffer.h"
3 | #import "GPUImageDissolveBlendFilter.h"
4 |
5 | @interface GPUImageLowPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageBuffer *bufferFilter;
8 | GPUImageDissolveBlendFilter *dissolveBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLowPassFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLowPassFilter.h"
2 |
3 | @implementation GPUImageLowPassFilter
4 |
5 | @synthesize filterStrength;
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | // Take in the frame and blend it with the previous one
15 | dissolveBlendFilter = [[GPUImageDissolveBlendFilter alloc] init];
16 | [self addFilter:dissolveBlendFilter];
17 |
18 | // Buffer the result to be fed back into the blend
19 | bufferFilter = [[GPUImageBuffer alloc] init];
20 | [self addFilter:bufferFilter];
21 |
22 | // Texture location 0 needs to be the original image for the dissolve blend
23 | [bufferFilter addTarget:dissolveBlendFilter atTextureLocation:1];
24 | [dissolveBlendFilter addTarget:bufferFilter];
25 |
26 | [dissolveBlendFilter disableSecondFrameCheck];
27 |
28 | // To prevent double updating of this filter, disable updates from the sharp image side
29 | // self.inputFilterToIgnoreForUpdates = unsharpMaskFilter;
30 |
31 | self.initialFilters = [NSArray arrayWithObject:dissolveBlendFilter];
32 | self.terminalFilter = dissolveBlendFilter;
33 |
34 | self.filterStrength = 0.5;
35 |
36 | return self;
37 | }
38 |
39 | #pragma mark -
40 | #pragma mark Accessors
41 |
42 | - (void)setFilterStrength:(CGFloat)newValue;
43 | {
44 | dissolveBlendFilter.mix = newValue;
45 | }
46 |
47 | - (CGFloat)filterStrength;
48 | {
49 | return dissolveBlendFilter.mix;
50 | }
51 |
52 | @end
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminanceRangeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLuminanceRangeFilter : GPUImageFilter
4 | {
5 | GLint rangeReductionUniform;
6 | }
7 |
8 | /** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
9 | */
10 | @property(readwrite, nonatomic) CGFloat rangeReductionFactor;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminanceRangeFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLuminanceRangeFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float rangeReduction;
10 |
11 | // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
12 | const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
13 |
14 | void main()
15 | {
16 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
17 | mediump float luminance = dot(textureColor.rgb, luminanceWeighting);
18 | mediump float luminanceRatio = ((0.5 - luminance) * rangeReduction);
19 |
20 | gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
21 | }
22 | );
23 | #else
24 | NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
25 | (
26 | varying vec2 textureCoordinate;
27 |
28 | uniform sampler2D inputImageTexture;
29 | uniform float rangeReduction;
30 |
31 | // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
32 | const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
33 |
34 | void main()
35 | {
36 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
37 | float luminance = dot(textureColor.rgb, luminanceWeighting);
38 | float luminanceRatio = ((0.5 - luminance) * rangeReduction);
39 |
40 | gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
41 | }
42 | );
43 | #endif
44 |
45 | @implementation GPUImageLuminanceRangeFilter
46 |
47 | @synthesize rangeReductionFactor = _rangeReductionFactor;
48 |
49 | #pragma mark -
50 | #pragma mark Initialization and teardown
51 |
52 | - (id)init;
53 | {
54 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceRangeFragmentShaderString]))
55 | {
56 | return nil;
57 | }
58 |
59 | rangeReductionUniform = [filterProgram uniformIndex:@"rangeReduction"];
60 | self.rangeReductionFactor = 0.6;
61 |
62 | return self;
63 | }
64 |
65 | #pragma mark -
66 | #pragma mark Accessors
67 |
68 | - (void)setRangeReductionFactor:(CGFloat)newValue;
69 | {
70 | _rangeReductionFactor = newValue;
71 |
72 | [self setFloat:_rangeReductionFactor forUniform:rangeReductionUniform program:filterProgram];
73 | }
74 |
75 |
76 | @end
77 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Pixels with a luminance above the threshold will appear white, and those below will be black
4 | */
5 | @interface GPUImageLuminanceThresholdFilter : GPUImageFilter
6 | {
7 | GLint thresholdUniform;
8 | }
9 |
10 | /** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default
11 | */
12 | @property(readwrite, nonatomic) CGFloat threshold;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminanceThresholdFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLuminanceThresholdFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float threshold;
10 |
11 | const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
12 |
13 | void main()
14 | {
15 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
16 | highp float luminance = dot(textureColor.rgb, W);
17 | highp float thresholdResult = step(threshold, luminance);
18 |
19 | gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
20 | }
21 | );
22 | #else
23 | NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
24 | (
25 | varying vec2 textureCoordinate;
26 |
27 | uniform sampler2D inputImageTexture;
28 | uniform float threshold;
29 |
30 | const vec3 W = vec3(0.2125, 0.7154, 0.0721);
31 |
32 | void main()
33 | {
34 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
35 | float luminance = dot(textureColor.rgb, W);
36 | float thresholdResult = step(threshold, luminance);
37 |
38 | gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
39 | }
40 | );
41 | #endif
42 |
43 | @implementation GPUImageLuminanceThresholdFilter
44 |
45 | @synthesize threshold = _threshold;
46 |
47 | #pragma mark -
48 | #pragma mark Initialization
49 |
50 | - (id)init;
51 | {
52 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceThresholdFragmentShaderString]))
53 | {
54 | return nil;
55 | }
56 |
57 | thresholdUniform = [filterProgram uniformIndex:@"threshold"];
58 | self.threshold = 0.5;
59 |
60 | return self;
61 | }
62 |
63 | #pragma mark -
64 | #pragma mark Accessors
65 |
66 | - (void)setThreshold:(CGFloat)newValue;
67 | {
68 | _threshold = newValue;
69 |
70 | [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
71 | }
72 |
73 | @end
74 |
75 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminosity.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageAverageColor.h"
2 |
3 | @interface GPUImageLuminosity : GPUImageAverageColor
4 | {
5 | GLProgram *secondFilterProgram;
6 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
7 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
8 | GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;
9 | }
10 |
11 | // This block is called on the completion of color averaging for a frame
12 | @property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);
13 |
14 | - (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
15 | - (void)initializeSecondaryAttributes;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageLuminosityBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMaskFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMaskFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMaskFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageMaskShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | //Averages mask's the RGB values, and scales that value by the mask's alpha
18 | //
19 | //The dot product should take fewer cycles than doing an average normally
20 | //
21 | //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
22 | lowp float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
23 |
24 | gl_FragColor = vec4(textureColor.xyz, newAlpha);
25 | // gl_FragColor = vec4(textureColor2);
26 | }
27 | );
28 | #else
29 | NSString *const kGPUImageMaskShaderString = SHADER_STRING
30 | (
31 | varying vec2 textureCoordinate;
32 | varying vec2 textureCoordinate2;
33 |
34 | uniform sampler2D inputImageTexture;
35 | uniform sampler2D inputImageTexture2;
36 |
37 | void main()
38 | {
39 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
40 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
41 |
42 | //Averages mask's the RGB values, and scales that value by the mask's alpha
43 | //
44 | //The dot product should take fewer cycles than doing an average normally
45 | //
46 | //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
47 | float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
48 |
49 | gl_FragColor = vec4(textureColor.xyz, newAlpha);
50 | // gl_FragColor = vec4(textureColor2);
51 | }
52 | );
53 | #endif
54 |
55 | @implementation GPUImageMaskFilter
56 |
57 | - (id)init;
58 | {
59 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageMaskShaderString]))
60 | {
61 | return nil;
62 | }
63 |
64 | return self;
65 | }
66 |
67 | - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
68 | {
69 | glEnable(GL_BLEND);
70 | glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
71 | [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture];
72 | glDisable(GL_BLEND);
73 | }
74 |
75 | @end
76 |
77 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMedianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMissEtikateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Miss Etikate:
6 | http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_miss_etikate.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageMissEtikateFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMissEtikateFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMissEtikateFilter.h"
2 | #import "GPUImagePicture.h"
3 | #import "GPUImageLookupFilter.h"
4 |
5 | @implementation GPUImageMissEtikateFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | UIImage *image = [UIImage imageNamed:@"lookup_miss_etikate.png"];
16 | #else
17 | NSImage *image = [NSImage imageNamed:@"lookup_miss_etikate.png"];
18 | #endif
19 |
20 | NSAssert(image, @"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle.");
21 |
22 | lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
23 | GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
24 | [self addFilter:lookupFilter];
25 |
26 | [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
27 | [lookupImageSource processImage];
28 |
29 | self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
30 | self.terminalFilter = lookupFilter;
31 |
32 | return self;
33 | }
34 |
35 | -(void)prepareForImageCapture {
36 | [lookupImageSource processImage];
37 | [super prepareForImageCapture];
38 | }
39 |
40 | #pragma mark -
41 | #pragma mark Accessors
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMonochromeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMonochromeFilter : GPUImageFilter
4 | {
5 | GLint intensityUniform, filterColorUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat intensity;
9 | @property(readwrite, nonatomic) GPUVector4 color;
10 |
11 | - (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMosaicFilter.h:
--------------------------------------------------------------------------------
1 |
2 | // This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 | #import "GPUImagePicture.h"
6 |
7 | @interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
8 | GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;
9 | GPUImagePicture *pic;
10 | }
11 |
12 | // This filter takes an input tileset, the tiles must ascend in luminance
13 | // It looks at the input image and replaces each display tile with an input tile
14 | // according to the luminance of that tile. The idea was to replicate the ASCII
15 | // video filters seen in other apps, but the tileset can be anything.
16 | @property(readwrite, nonatomic) CGSize inputTileSize;
17 | @property(readwrite, nonatomic) float numTiles;
18 | @property(readwrite, nonatomic) CGSize displayTileSize;
19 | @property(readwrite, nonatomic) BOOL colorOn;
20 |
21 | - (void)setNumTiles:(float)numTiles;
22 | - (void)setDisplayTileSize:(CGSize)displayTileSize;
23 | - (void)setInputTileSize:(CGSize)inputTileSize;
24 | - (void)setTileSet:(NSString *)tileSet;
25 | - (void)setColorOn:(BOOL)yes;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMotionBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMotionBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The angular direction of the blur, in degrees. 0 degrees by default
10 | */
11 | @property (readwrite, nonatomic) CGFloat blurAngle;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMotionDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageAverageColor.h"
4 |
5 | @interface GPUImageMotionDetector : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageTwoInputFilter *frameComparisonFilter;
9 | GPUImageAverageColor *averageColor;
10 | }
11 |
12 | // This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
13 | @property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
14 |
15 | // For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
16 | @property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMovie.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageContext.h"
4 | #import "GPUImageOutput.h"
5 |
6 | /** Protocol for getting Movie played callback.
7 | */
8 | @protocol GPUImageMovieDelegate
9 |
10 | - (void)didCompletePlayingMovie;
11 | @end
12 |
13 | /** Source object for filtering movies
14 | */
15 | @interface GPUImageMovie : GPUImageOutput
16 |
17 | @property (readwrite, retain) AVAsset *asset;
18 | @property (readwrite, retain) AVPlayerItem *playerItem;
19 | @property(readwrite, retain) NSURL *url;
20 |
21 | /** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
22 | */
23 | @property(readwrite, nonatomic) BOOL runBenchmark;
24 |
25 | /** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
26 | */
27 | @property(readwrite, nonatomic) BOOL playAtActualSpeed;
28 |
29 | /** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
30 | */
31 | @property(readwrite, nonatomic) BOOL shouldRepeat;
32 |
33 | /** This is used to send the delete Movie did complete playing alert
34 | */
35 | @property (readwrite, nonatomic, assign) id delegate;
36 |
37 | @property (readonly, nonatomic) AVAssetReader *assetReader;
38 | @property (readonly, nonatomic) BOOL audioEncodingIsFinished;
39 | @property (readonly, nonatomic) BOOL videoEncodingIsFinished;
40 |
41 | /// @name Initialization and teardown
42 | - (id)initWithAsset:(AVAsset *)asset;
43 | - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
44 | - (id)initWithURL:(NSURL *)url;
45 | - (void)textureCacheSetup;
46 |
47 | /// @name Movie processing
48 | - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
49 | - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
50 | - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
51 | - (void)startProcessing;
52 | - (void)endProcessing;
53 | - (void)cancelProcessing;
54 | - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
55 |
56 | @end
57 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMovieComposition.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageMovieComposition.h
3 | // Givit
4 | //
5 | // Created by Sean Meiners on 2013/01/25.
6 | //
7 | //
8 |
9 | #import "GPUImageMovie.h"
10 |
11 | @interface GPUImageMovieComposition : GPUImageMovie
12 |
13 | @property (readwrite, retain) AVComposition *compositon;
14 | @property (readwrite, retain) AVVideoComposition *videoComposition;
15 | @property (readwrite, retain) AVAudioMix *audioMix;
16 |
17 | - (id)initWithComposition:(AVComposition*)compositon
18 | andVideoComposition:(AVVideoComposition*)videoComposition
19 | andAudioMix:(AVAudioMix*)audioMix;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMovieComposition.m:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageMovieComposition.m
3 | // Givit
4 | //
5 | // Created by Sean Meiners on 2013/01/25.
6 | //
7 | //
8 |
9 | #import "GPUImageMovieComposition.h"
10 | #import "GPUImageMovieWriter.h"
11 |
12 | @implementation GPUImageMovieComposition
13 |
14 | @synthesize compositon = _compositon;
15 | @synthesize videoComposition = _videoComposition;
16 | @synthesize audioMix = _audioMix;
17 |
18 | - (id)initWithComposition:(AVComposition*)compositon
19 | andVideoComposition:(AVVideoComposition*)videoComposition
20 | andAudioMix:(AVAudioMix*)audioMix {
21 | if (!(self = [super init]))
22 | {
23 | return nil;
24 | }
25 |
26 | [self textureCacheSetup];
27 |
28 | self.compositon = compositon;
29 | self.videoComposition = videoComposition;
30 | self.audioMix = audioMix;
31 |
32 | return self;
33 | }
34 |
35 | - (AVAssetReader*)createAssetReader
36 | {
37 | //NSLog(@"creating reader from composition: %@, video: %@, audio: %@ with duration: %@", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration)));
38 |
39 | NSError *error = nil;
40 | AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error];
41 |
42 | NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
43 | AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo]
44 | videoSettings:outputSettings];
45 | #if ! TARGET_IPHONE_SIMULATOR
46 | if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] )
47 | [(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0];
48 | #endif
49 | readerVideoOutput.videoComposition = self.videoComposition;
50 | readerVideoOutput.alwaysCopiesSampleData = NO;
51 | [assetReader addOutput:readerVideoOutput];
52 |
53 | NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio];
54 | BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
55 | AVAssetReaderAudioMixOutput *readerAudioOutput = nil;
56 |
57 | if (shouldRecordAudioTrack)
58 | {
59 | [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
60 |
61 | readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
62 | readerAudioOutput.audioMix = self.audioMix;
63 | readerAudioOutput.alwaysCopiesSampleData = NO;
64 | [assetReader addOutput:readerAudioOutput];
65 | }
66 |
67 | return assetReader;
68 | }
69 |
70 | @end
71 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMultiplyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageMultiplyBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMultiplyBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageMultiplyBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageNobleCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Noble corner detector
4 |
5 | This is the Noble variant on the Harris detector, from
6 | Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
7 | */
8 |
9 |
10 | @interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageNormalBlendFilter.h:
--------------------------------------------------------------------------------
1 | // Created by Jorge Garcia on 9/5/12.
2 | //
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 |
6 | @interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageOpacityFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageOpacityFilter : GPUImageFilter
4 | {
5 | GLint opacityUniform;
6 | }
7 |
8 | // Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
9 | @property(readwrite, nonatomic) CGFloat opacity;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageOpacityFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpacityFilter.h"
2 |
3 | @implementation GPUImageOpacityFilter
4 |
5 | @synthesize opacity = _opacity;
6 |
7 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
8 | NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
9 | (
10 | varying highp vec2 textureCoordinate;
11 |
12 | uniform sampler2D inputImageTexture;
13 | uniform lowp float opacity;
14 |
15 | void main()
16 | {
17 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
18 |
19 | gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
20 | }
21 | );
22 | #else
23 | NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
24 | (
25 | varying vec2 textureCoordinate;
26 |
27 | uniform sampler2D inputImageTexture;
28 | uniform float opacity;
29 |
30 | void main()
31 | {
32 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
33 |
34 | gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
35 | }
36 | );
37 | #endif
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | opacityUniform = [filterProgram uniformIndex:@"opacity"];
50 | self.opacity = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setOpacity:(CGFloat)newValue;
59 | {
60 | _opacity = newValue;
61 |
62 | [self setFloat:_opacity forUniform:opacityUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageOpeningFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpeningFilter.h"
2 | #import "GPUImageErosionFilter.h"
3 | #import "GPUImageDilationFilter.h"
4 |
5 | @implementation GPUImageOpeningFilter
6 |
7 | @synthesize verticalTexelSpacing = _verticalTexelSpacing;
8 | @synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
9 |
10 | - (id)init;
11 | {
12 | if (!(self = [self initWithRadius:1]))
13 | {
14 | return nil;
15 | }
16 |
17 | return self;
18 | }
19 |
20 | - (id)initWithRadius:(NSUInteger)radius;
21 | {
22 | if (!(self = [super init]))
23 | {
24 | return nil;
25 | }
26 |
27 | // First pass: erosion
28 | erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
29 | [self addFilter:erosionFilter];
30 |
31 | // Second pass: dilation
32 | dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
33 | [self addFilter:dilationFilter];
34 |
35 | [erosionFilter addTarget:dilationFilter];
36 |
37 | self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
38 | self.terminalFilter = dilationFilter;
39 |
40 | return self;
41 | }
42 |
43 | - (void)setVerticalTexelSpacing:(CGFloat)newValue;
44 | {
45 | _verticalTexelSpacing = newValue;
46 | erosionFilter.verticalTexelSpacing = newValue;
47 | dilationFilter.verticalTexelSpacing = newValue;
48 | }
49 |
50 | - (void)setHorizontalTexelSpacing:(CGFloat)newValue;
51 | {
52 | _horizontalTexelSpacing = newValue;
53 | erosionFilter.horizontalTexelSpacing = newValue;
54 | dilationFilter.horizontalTexelSpacing = newValue;
55 | }
56 |
57 | @end
58 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageOverlayBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageParallelCoordinateLineTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.
4 | //
5 | // It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:
6 | // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
7 | // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
8 |
9 | @interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
10 | {
11 | GLubyte *rawImagePixels;
12 | GLfloat *lineCoordinates;
13 | unsigned int maxLinePairsToRender, linePairsToRender;
14 | }
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePerlinNoiseFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePerlinNoiseFilter : GPUImageFilter
4 | {
5 | GLint scaleUniform, colorStartUniform, colorFinishUniform;
6 | }
7 |
8 | @property (readwrite, nonatomic) GPUVector4 colorStart;
9 | @property (readwrite, nonatomic) GPUVector4 colorFinish;
10 |
11 | @property (readwrite, nonatomic) float scale;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePinchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a pinch distortion of the image
4 | */
5 | @interface GPUImagePinchDistortionFilter : GPUImageFilter
6 | {
7 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
8 | }
9 |
10 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | */
12 | @property(readwrite, nonatomic) CGPoint center;
13 | /** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0
14 | */
15 | @property(readwrite, nonatomic) CGFloat radius;
16 | /** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5
17 | */
18 | @property(readwrite, nonatomic) CGFloat scale;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellateFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePixellatePositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellatePositionFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 | // the center point to start pixelation in texture coordinates, default 0.5, 0.5
12 | @property(readwrite, nonatomic) CGPoint center;
13 |
14 | // the radius (0.0 - 1.0) in which to pixelate, default 1.0
15 | @property(readwrite, nonatomic) CGFloat radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePoissonBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputCrossTextureSamplingFilter.h"
2 | #import "GPUImageFilterGroup.h"
3 |
4 | @interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
5 | {
6 | GLint mixUniform;
7 |
8 | GLuint secondFilterOutputTexture;
9 | GLuint secondFilterFramebuffer;
10 | }
11 |
12 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
13 | @property(readwrite, nonatomic) CGFloat mix;
14 |
15 | // The number of times to propagate the gradients.
16 | // Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
17 | @property(readwrite, nonatomic) NSUInteger numIterations;
18 |
19 | @end
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePolarPixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePolarPixellateFilter : GPUImageFilter {
4 | GLint centerUniform, pixelSizeUniform;
5 | }
6 |
7 | // The center about which to apply the distortion, with a default of (0.5, 0.5)
8 | @property(readwrite, nonatomic) CGPoint center;
9 | // The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
10 | @property(readwrite, nonatomic) CGSize pixelSize;
11 |
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePolkaDotFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
4 | {
5 | GLint dotScalingUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat dotScaling;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePosterizeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
4 | */
5 | @interface GPUImagePosterizeFilter : GPUImageFilter
6 | {
7 | GLint colorLevelsUniform;
8 | }
9 |
10 | /** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
11 | */
12 | @property(readwrite, nonatomic) NSUInteger colorLevels;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePosterizeFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImagePosterizeFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float colorLevels;
10 |
11 | void main()
12 | {
13 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float colorLevels;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImagePosterizeFilter
36 |
37 | @synthesize colorLevels = _colorLevels;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | colorLevelsUniform = [filterProgram uniformIndex:@"colorLevels"];
50 | self.colorLevels = 10;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setColorLevels:(NSUInteger)newValue;
59 | {
60 | _colorLevels = newValue;
61 |
62 | [self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImagePrewittEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageRGBClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBClosingFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRGBClosingFilter.h"
2 | #import "GPUImageRGBErosionFilter.h"
3 | #import "GPUImageRGBDilationFilter.h"
4 |
5 | @implementation GPUImageRGBClosingFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [self initWithRadius:1]))
10 | {
11 | return nil;
12 | }
13 |
14 | return self;
15 | }
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 | {
19 | if (!(self = [super init]))
20 | {
21 | return nil;
22 | }
23 |
24 | // First pass: dilation
25 | dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
26 | [self addFilter:dilationFilter];
27 |
28 | // Second pass: erosion
29 | erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
30 | [self addFilter:erosionFilter];
31 |
32 | [dilationFilter addTarget:erosionFilter];
33 |
34 | self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
35 | self.terminalFilter = erosionFilter;
36 |
37 | return self;
38 | }
39 |
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out brighter colors, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)dilationRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageRGBFilter : GPUImageFilter
4 | {
5 | GLint redUniform;
6 | GLint greenUniform;
7 | GLint blueUniform;
8 | }
9 |
10 | // Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
11 | @property (readwrite, nonatomic) CGFloat red;
12 | @property (readwrite, nonatomic) CGFloat green;
13 | @property (readwrite, nonatomic) CGFloat blue;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRGBFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float redAdjustment;
10 | uniform highp float greenAdjustment;
11 | uniform highp float blueAdjustment;
12 |
13 | void main()
14 | {
15 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
16 |
17 | gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 |
25 | uniform sampler2D inputImageTexture;
26 | uniform float redAdjustment;
27 | uniform float greenAdjustment;
28 | uniform float blueAdjustment;
29 |
30 | void main()
31 | {
32 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
33 |
34 | gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageRGBFilter
40 |
41 | @synthesize red = _red, blue = _blue, green = _green;
42 |
43 | #pragma mark -
44 | #pragma mark Initialization and teardown
45 |
46 | - (id)init;
47 | {
48 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageRGBFragmentShaderString]))
49 | {
50 | return nil;
51 | }
52 |
53 | redUniform = [filterProgram uniformIndex:@"redAdjustment"];
54 | self.red = 1.0;
55 |
56 | greenUniform = [filterProgram uniformIndex:@"greenAdjustment"];
57 | self.green = 1.0;
58 |
59 | blueUniform = [filterProgram uniformIndex:@"blueAdjustment"];
60 | self.blue = 1.0;
61 |
62 | return self;
63 | }
64 |
65 | #pragma mark -
66 | #pragma mark Accessors
67 |
68 | - (void)setRed:(CGFloat)newValue;
69 | {
70 | _red = newValue;
71 |
72 | [self setFloat:_red forUniform:redUniform program:filterProgram];
73 | }
74 |
75 | - (void)setGreen:(CGFloat)newValue;
76 | {
77 | _green = newValue;
78 |
79 | [self setFloat:_green forUniform:greenUniform program:filterProgram];
80 | }
81 |
82 | - (void)setBlue:(CGFloat)newValue;
83 | {
84 | _blue = newValue;
85 |
86 | [self setFloat:_blue forUniform:blueUniform program:filterProgram];
87 | }
88 |
89 | @end
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRGBOpeningFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRGBOpeningFilter.h"
2 | #import "GPUImageRGBErosionFilter.h"
3 | #import "GPUImageRGBDilationFilter.h"
4 |
5 | @implementation GPUImageRGBOpeningFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [self initWithRadius:1]))
10 | {
11 | return nil;
12 | }
13 |
14 | return self;
15 | }
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 | {
19 | if (!(self = [super init]))
20 | {
21 | return nil;
22 | }
23 |
24 | // First pass: erosion
25 | erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
26 | [self addFilter:erosionFilter];
27 |
28 | // Second pass: dilation
29 | dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
30 | [self addFilter:dilationFilter];
31 |
32 | [erosionFilter addTarget:dilationFilter];
33 |
34 | self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
35 | self.terminalFilter = dilationFilter;
36 |
37 | return self;
38 | }
39 |
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRawDataInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | // The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
4 | // The bytes are uploaded and stored within a texture, so nothing is kept locally.
5 | // The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
6 | // The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
7 |
8 | typedef enum {
9 | GPUPixelFormatBGRA = GL_BGRA,
10 | GPUPixelFormatRGBA = GL_RGBA,
11 | GPUPixelFormatRGB = GL_RGB
12 | } GPUPixelFormat;
13 |
14 | typedef enum {
15 | GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
16 | GPUPixelTypeFloat = GL_FLOAT
17 | } GPUPixelType;
18 |
19 | @interface GPUImageRawDataInput : GPUImageOutput
20 | {
21 | CGSize uploadedImageSize;
22 |
23 | dispatch_semaphore_t dataUpdateSemaphore;
24 | }
25 |
26 | // Initialization and teardown
27 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
28 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
29 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
30 |
31 | /** Input data pixel format
32 | */
33 | @property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
34 | @property (readwrite, nonatomic) GPUPixelType pixelType;
35 |
36 | // Image rendering
37 | - (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
38 | - (void)processData;
39 | - (CGSize)outputImageSize;
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageRawDataOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | struct GPUByteColorVector {
5 | GLubyte red;
6 | GLubyte green;
7 | GLubyte blue;
8 | GLubyte alpha;
9 | };
10 | typedef struct GPUByteColorVector GPUByteColorVector;
11 |
12 | @protocol GPUImageRawDataProcessor;
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | @interface GPUImageRawDataOutput : NSObject {
16 | CGSize imageSize;
17 | CVOpenGLESTextureCacheRef rawDataTextureCache;
18 | CVPixelBufferRef renderTarget;
19 | GPUImageRotationMode inputRotation;
20 | BOOL outputBGRA;
21 | CVOpenGLESTextureRef renderTexture;
22 |
23 | __unsafe_unretained id textureDelegate;
24 | }
25 | #else
26 | @interface GPUImageRawDataOutput : NSObject {
27 | CGSize imageSize;
28 | CVOpenGLTextureCacheRef rawDataTextureCache;
29 | CVPixelBufferRef renderTarget;
30 | GPUImageRotationMode inputRotation;
31 | BOOL outputBGRA;
32 | CVOpenGLTextureRef renderTexture;
33 |
34 | __unsafe_unretained id textureDelegate;
35 | }
36 | #endif
37 |
38 | @property(readonly) GLubyte *rawBytesForImage;
39 | @property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
40 | @property(nonatomic) BOOL enabled;
41 |
42 | // Initialization and teardown
43 | - (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
44 |
45 | // Data access
46 | - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
47 | - (NSUInteger)bytesPerRowInOutput;
48 |
49 | - (void)setImageSize:(CGSize)newImageSize;
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSaturationBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSaturationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the saturation of an image
4 | */
5 | @interface GPUImageSaturationFilter : GPUImageFilter
6 | {
7 | GLint saturationUniform;
8 | }
9 |
10 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat saturation;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSaturationFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSaturationFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float saturation;
10 |
11 | // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
12 | const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
13 |
14 | void main()
15 | {
16 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
17 | lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
18 | lowp vec3 greyScaleColor = vec3(luminance);
19 |
20 | gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
21 |
22 | }
23 | );
24 | #else
25 | NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
26 | (
27 | varying vec2 textureCoordinate;
28 |
29 | uniform sampler2D inputImageTexture;
30 | uniform float saturation;
31 |
32 | // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
33 | const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
34 |
35 | void main()
36 | {
37 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
38 | float luminance = dot(textureColor.rgb, luminanceWeighting);
39 | vec3 greyScaleColor = vec3(luminance);
40 |
41 | gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
42 |
43 | }
44 | );
45 | #endif
46 |
47 | @implementation GPUImageSaturationFilter
48 |
49 | @synthesize saturation = _saturation;
50 |
51 | #pragma mark -
52 | #pragma mark Initialization and teardown
53 |
54 | - (id)init;
55 | {
56 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationFragmentShaderString]))
57 | {
58 | return nil;
59 | }
60 |
61 | saturationUniform = [filterProgram uniformIndex:@"saturation"];
62 | self.saturation = 1.0;
63 |
64 | return self;
65 | }
66 |
67 | #pragma mark -
68 | #pragma mark Accessors
69 |
70 | - (void)setSaturation:(CGFloat)newValue;
71 | {
72 | _saturation = newValue;
73 |
74 | [self setFloat:_saturation forUniform:saturationUniform program:filterProgram];
75 | }
76 |
77 | @end
78 |
79 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageScreenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageScreenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageScreenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | mediump vec4 whiteColor = vec4(1.0);
17 | gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 | vec4 whiteColor = vec4(1.0);
34 | gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageScreenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSepiaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | /// Simple sepia tone filter
4 | @interface GPUImageSepiaFilter : GPUImageColorMatrixFilter
5 |
6 | @end
7 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSepiaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSepiaFilter.h"
2 |
3 | @implementation GPUImageSepiaFilter
4 |
5 | - (id)init;
6 | {
7 | if (!(self = [super init]))
8 | {
9 | return nil;
10 | }
11 |
12 | self.intensity = 1.0;
13 | self.colorMatrix = (GPUMatrix4x4){
14 | {0.3588, 0.7044, 0.1368, 0.0},
15 | {0.2990, 0.5870, 0.1140, 0.0},
16 | {0.2392, 0.4696, 0.0912 ,0.0},
17 | {0,0,0,1.0},
18 | };
19 |
20 | return self;
21 | }
22 |
23 | @end
24 |
25 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSharpenFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSharpenFilter : GPUImageFilter
4 | {
5 | GLint sharpnessUniform;
6 | GLint imageWidthFactorUniform, imageHeightFactorUniform;
7 | }
8 |
9 | // Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
10 | @property(readwrite, nonatomic) CGFloat sharpness;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageShiTomasiFeatureDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Shi-Tomasi feature detector
4 |
5 | This is the Shi-Tomasi feature detector, as described in
6 | J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
7 | */
8 |
9 | @interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
10 |
11 | // Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageShiTomasiFeatureDetectionFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageShiTomasiFeatureDetectionFilter.h"
2 |
3 | @implementation GPUImageShiTomasiFeatureDetectionFilter
4 |
5 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
6 | NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
7 | (
8 | varying highp vec2 textureCoordinate;
9 |
10 | uniform sampler2D inputImageTexture;
11 | uniform lowp float sensitivity;
12 |
13 | void main()
14 | {
15 | mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
16 |
17 | mediump float derivativeDifference = derivativeElements.x - derivativeElements.y;
18 | mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
19 |
20 | // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
21 | mediump float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
22 |
23 | gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
24 | }
25 | );
26 | #else
27 | NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
28 | (
29 | varying vec2 textureCoordinate;
30 |
31 | uniform sampler2D inputImageTexture;
32 | uniform float sensitivity;
33 |
34 | void main()
35 | {
36 | vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
37 |
38 | float derivativeDifference = derivativeElements.x - derivativeElements.y;
39 | float zElement = (derivativeElements.z * 2.0) - 1.0;
40 |
41 | // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
42 | float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
43 |
44 | gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
45 | }
46 | );
47 | #endif
48 |
49 | #pragma mark -
50 | #pragma mark Initialization and teardown
51 |
52 | - (id)init;
53 | {
54 | if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageShiTomasiCornerDetectionFragmentShaderString]))
55 | {
56 | return nil;
57 | }
58 |
59 | self.sensitivity = 1.5;
60 |
61 | return self;
62 | }
63 |
64 |
65 | @end
66 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSingleComponentGaussianBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | // This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)
4 |
5 | @interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | /** Converts video to look like a sketch.
4 |
5 | This is just the Sobel edge detection filter with the colors inverted.
6 | */
7 | @interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
8 | {
9 | }
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSmoothToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 | @class GPUImageToonFilter;
5 |
6 | /** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
7 | */
8 | @interface GPUImageSmoothToonFilter : GPUImageFilterGroup
9 | {
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageToonFilter *toonFilter;
12 | }
13 |
14 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
15 | @property(readwrite, nonatomic) CGFloat texelWidth;
16 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
17 | @property(readwrite, nonatomic) CGFloat texelHeight;
18 |
19 | /// The radius of the underlying Gaussian blur. The default is 2.0.
20 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
21 |
22 | /// The threshold at which to apply the edges, default of 0.2
23 | @property(readwrite, nonatomic) CGFloat threshold;
24 |
25 | /// The levels of quantization for the posterization of colors within the scene, with a default of 10.0
26 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
27 |
28 | @end
29 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSmoothToonFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSmoothToonFilter.h"
2 | #import "GPUImageGaussianBlurFilter.h"
3 | #import "GPUImageToonFilter.h"
4 |
5 | @implementation GPUImageSmoothToonFilter
6 |
7 | @synthesize threshold;
8 | @synthesize blurRadiusInPixels;
9 | @synthesize quantizationLevels;
10 | @synthesize texelWidth;
11 | @synthesize texelHeight;
12 |
13 | - (id)init;
14 | {
15 | if (!(self = [super init]))
16 | {
17 | return nil;
18 | }
19 |
20 | // First pass: apply a variable Gaussian blur
21 | blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
22 | [self addFilter:blurFilter];
23 |
24 | // Second pass: run the Sobel edge detection on this blurred image, along with a posterization effect
25 | toonFilter = [[GPUImageToonFilter alloc] init];
26 | [self addFilter:toonFilter];
27 |
28 | // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
29 | [blurFilter addTarget:toonFilter];
30 |
31 | self.initialFilters = [NSArray arrayWithObject:blurFilter];
32 | self.terminalFilter = toonFilter;
33 |
34 | self.blurRadiusInPixels = 2.0;
35 | self.threshold = 0.2;
36 | self.quantizationLevels = 10.0;
37 |
38 | return self;
39 | }
40 |
41 | #pragma mark -
42 | #pragma mark Accessors
43 |
44 | - (void)setBlurRadiusInPixels:(CGFloat)newValue;
45 | {
46 | blurFilter.blurRadiusInPixels = newValue;
47 | }
48 |
49 | - (CGFloat)blurRadiusInPixels;
50 | {
51 | return blurFilter.blurRadiusInPixels;
52 | }
53 |
54 | - (void)setTexelWidth:(CGFloat)newValue;
55 | {
56 | toonFilter.texelWidth = newValue;
57 | }
58 |
59 | - (CGFloat)texelWidth;
60 | {
61 | return toonFilter.texelWidth;
62 | }
63 |
64 | - (void)setTexelHeight:(CGFloat)newValue;
65 | {
66 | toonFilter.texelHeight = newValue;
67 | }
68 |
69 | - (CGFloat)texelHeight;
70 | {
71 | return toonFilter.texelHeight;
72 | }
73 |
74 | - (void)setThreshold:(CGFloat)newValue;
75 | {
76 | toonFilter.threshold = newValue;
77 | }
78 |
79 | - (CGFloat)threshold;
80 | {
81 | return toonFilter.threshold;
82 | }
83 |
84 | - (void)setQuantizationLevels:(CGFloat)newValue;
85 | {
86 | toonFilter.quantizationLevels = newValue;
87 | }
88 |
89 | - (CGFloat)quantizationLevels;
90 | {
91 | return toonFilter.quantizationLevels;
92 | }
93 |
94 | @end
95 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
6 | BOOL hasOverriddenImageSizeFactor;
7 | }
8 |
9 | // The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
10 | @property(readwrite, nonatomic) CGFloat texelWidth;
11 | @property(readwrite, nonatomic) CGFloat texelHeight;
12 |
13 | // The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
14 | @property(readwrite, nonatomic) CGFloat edgeStrength;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSoftEleganceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Soft Elegance Photoshop action
6 | http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603
7 | */
8 |
9 | // Note: If you want to use this effect you have to add
10 | // lookup_soft_elegance_1.png and lookup_soft_elegance_2.png
11 | // from Resources folder to your application bundle.
12 |
13 | @interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
14 | {
15 | GPUImagePicture *lookupImageSource1;
16 | GPUImagePicture *lookupImageSource2;
17 | }
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSoftEleganceFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSoftEleganceFilter.h"
2 | #import "GPUImagePicture.h"
3 | #import "GPUImageLookupFilter.h"
4 | #import "GPUImageGaussianBlurFilter.h"
5 | #import "GPUImageAlphaBlendFilter.h"
6 |
7 | @implementation GPUImageSoftEleganceFilter
8 |
9 | - (id)init;
10 | {
11 | if (!(self = [super init]))
12 | {
13 | return nil;
14 | }
15 |
16 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
17 | UIImage *image1 = [UIImage imageNamed:@"lookup_soft_elegance_1.png"];
18 | UIImage *image2 = [UIImage imageNamed:@"lookup_soft_elegance_2.png"];
19 | #else
20 | NSImage *image1 = [NSImage imageNamed:@"lookup_soft_elegance_1.png"];
21 | NSImage *image2 = [NSImage imageNamed:@"lookup_soft_elegance_2.png"];
22 | #endif
23 |
24 | NSAssert(image1 && image2,
25 | @"To use GPUImageSoftEleganceFilter you need to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from GPUImage/framework/Resources to your application bundle.");
26 |
27 | lookupImageSource1 = [[GPUImagePicture alloc] initWithImage:image1];
28 | GPUImageLookupFilter *lookupFilter1 = [[GPUImageLookupFilter alloc] init];
29 | [self addFilter:lookupFilter1];
30 |
31 | [lookupImageSource1 addTarget:lookupFilter1 atTextureLocation:1];
32 | [lookupImageSource1 processImage];
33 |
34 | GPUImageGaussianBlurFilter *gaussianBlur = [[GPUImageGaussianBlurFilter alloc] init];
35 | gaussianBlur.blurRadiusInPixels = 10.0;
36 | [lookupFilter1 addTarget:gaussianBlur];
37 | [self addFilter:gaussianBlur];
38 |
39 | GPUImageAlphaBlendFilter *alphaBlend = [[GPUImageAlphaBlendFilter alloc] init];
40 | alphaBlend.mix = 0.14;
41 | [lookupFilter1 addTarget:alphaBlend];
42 | [gaussianBlur addTarget:alphaBlend];
43 | [self addFilter:alphaBlend];
44 |
45 | lookupImageSource2 = [[GPUImagePicture alloc] initWithImage:image2];
46 |
47 | GPUImageLookupFilter *lookupFilter2 = [[GPUImageLookupFilter alloc] init];
48 | [alphaBlend addTarget:lookupFilter2];
49 | [lookupImageSource2 addTarget:lookupFilter2];
50 | [lookupImageSource2 processImage];
51 | [self addFilter:lookupFilter2];
52 |
53 | self.initialFilters = [NSArray arrayWithObjects:lookupFilter1, nil];
54 | self.terminalFilter = lookupFilter2;
55 |
56 | return self;
57 | }
58 |
59 | -(void)prepareForImageCapture {
60 | [lookupImageSource1 processImage];
61 | [lookupImageSource2 processImage];
62 | [super prepareForImageCapture];
63 | }
64 |
65 | #pragma mark -
66 | #pragma mark Accessors
67 |
68 | @end
69 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSoftLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSoftLightBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSoftLightBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | lowp float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
18 | gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
19 | }
20 | );
21 | #else
22 | NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
23 | (
24 | varying vec2 textureCoordinate;
25 | varying vec2 textureCoordinate2;
26 |
27 | uniform sampler2D inputImageTexture;
28 | uniform sampler2D inputImageTexture2;
29 |
30 | void main()
31 | {
32 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
33 | vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
34 |
35 | float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
36 | gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
37 | }
38 | );
39 | #endif
40 |
41 | @implementation GPUImageSoftLightBlendFilter
42 |
43 | - (id)init;
44 | {
45 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSoftLightBlendFragmentShaderString]))
46 | {
47 | return nil;
48 | }
49 |
50 | return self;
51 | }
52 |
53 | @end
54 |
55 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSolidColorGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
4 | // dimensions, or this won't work correctly
5 |
6 |
7 | @interface GPUImageSolidColorGenerator : GPUImageFilter
8 | {
9 | GLint colorUniform;
10 | GLint useExistingAlphaUniform;
11 | }
12 |
13 | // This color dictates what the output image will be filled with
14 | @property(readwrite, nonatomic) GPUVector4 color;
15 | @property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
16 |
17 | - (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSourceOverBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSourceOverBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSourceOverBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
16 |
17 | gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
33 |
34 | gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageSourceOverBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSphereRefractionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSphereRefractionFilter : GPUImageFilter
4 | {
5 | GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
6 | }
7 |
8 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
9 | @property(readwrite, nonatomic) CGPoint center;
10 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
11 | @property(readwrite, nonatomic) CGFloat radius;
12 | /// The index of refraction for the sphere, with a default of 0.71
13 | @property(readwrite, nonatomic) CGFloat refractiveIndex;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageStillCamera.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageVideoCamera.h"
2 |
3 | void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);
4 | void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);
5 |
6 | @interface GPUImageStillCamera : GPUImageVideoCamera
7 |
8 | /** The JPEG compression quality to use when capturing a photo as a JPEG.
9 | */
10 | @property CGFloat jpegCompressionQuality;
11 |
12 | // Only reliably set inside the context of the completion handler of one of the capture methods
13 | @property (readonly) NSDictionary *currentCaptureMetadata;
14 |
15 | // Photography controls
16 | - (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
17 | - (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
18 | - (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
19 | - (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageStretchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a stretch distortion of the image
4 | */
5 | @interface GPUImageStretchDistortionFilter : GPUImageFilter {
6 | GLint centerUniform;
7 | }
8 |
9 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | */
11 | @property(readwrite, nonatomic) CGPoint center;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSubtractBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSubtractBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSubtractBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageSubtractBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageSwirlFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a swirl distortion on the image
4 | */
5 | @interface GPUImageSwirlFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform, centerUniform, angleUniform;
8 | }
9 |
10 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | @property(readwrite, nonatomic) CGPoint center;
12 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5
13 | @property(readwrite, nonatomic) CGFloat radius;
14 | /// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0
15 | @property(readwrite, nonatomic) CGFloat angle;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTextureInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageTextureInput : GPUImageOutput
4 | {
5 | CGSize textureSize;
6 | }
7 |
8 | // Initialization and teardown
9 | - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
10 |
11 | // Image rendering
12 | - (void)processTextureWithFrameTime:(CMTime)frameTime;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTextureInput.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageTextureInput.h"
2 |
3 | @implementation GPUImageTextureInput
4 |
5 | #pragma mark -
6 | #pragma mark Initialization and teardown
7 |
8 | - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
9 | {
10 | if (!(self = [super init]))
11 | {
12 | return nil;
13 | }
14 |
15 | runSynchronouslyOnVideoProcessingQueue(^{
16 | [GPUImageContext useImageProcessingContext];
17 |
18 | [self deleteOutputTexture];
19 | });
20 |
21 | outputTexture = newInputTexture;
22 | textureSize = newTextureSize;
23 |
24 | return self;
25 | }
26 |
27 | #pragma mark -
28 | #pragma mark Image rendering
29 |
30 | - (void)processTextureWithFrameTime:(CMTime)frameTime;
31 | {
32 | runAsynchronouslyOnVideoProcessingQueue(^{
33 | for (id currentTarget in targets)
34 | {
35 | NSInteger indexOfObject = [targets indexOfObject:currentTarget];
36 | NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
37 |
38 | [currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
39 | [currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
40 | }
41 | });
42 | }
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTextureOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | @protocol GPUImageTextureOutputDelegate;
5 |
6 | @interface GPUImageTextureOutput : NSObject
7 | {
8 | __unsafe_unretained id textureDelegate;
9 | }
10 |
11 | @property(readwrite, unsafe_unretained, nonatomic) id delegate;
12 | @property(readonly) GLuint texture;
13 | @property(nonatomic) BOOL enabled;
14 |
15 | @end
16 |
17 | @protocol GPUImageTextureOutputDelegate
18 | - (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTextureOutput.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageTextureOutput.h"
2 |
3 | @implementation GPUImageTextureOutput
4 |
5 | @synthesize delegate = _delegate;
6 | @synthesize texture = _texture;
7 | @synthesize enabled;
8 |
9 | #pragma mark -
10 | #pragma mark Initialization and teardown
11 |
12 | - (id)init;
13 | {
14 | if (!(self = [super init]))
15 | {
16 | return nil;
17 | }
18 |
19 | self.enabled = YES;
20 |
21 | return self;
22 | }
23 |
24 | #pragma mark -
25 | #pragma mark GPUImageInput protocol
26 |
27 | - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
28 | {
29 | [_delegate newFrameReadyFromTextureOutput:self];
30 | }
31 |
32 | - (NSInteger)nextAvailableTextureIndex;
33 | {
34 | return 0;
35 | }
36 |
37 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
38 | {
39 | _texture = newInputTexture;
40 | }
41 |
42 | - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
43 | {
44 | }
45 |
46 | - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
47 | {
48 | }
49 |
50 | - (CGSize)maximumOutputSize;
51 | {
52 | return CGSizeZero;
53 | }
54 |
55 | - (void)endProcessing
56 | {
57 | }
58 |
59 | - (BOOL)shouldIgnoreUpdatesToThisTarget;
60 | {
61 | return NO;
62 | }
63 |
64 | - (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex;
65 | {
66 | textureDelegate = newTextureDelegate;
67 | }
68 |
69 | - (void)conserveMemoryForNextFrame;
70 | {
71 |
72 | }
73 |
74 | - (BOOL)wantsMonochromeInput;
75 | {
76 | return NO;
77 | }
78 |
79 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
80 | {
81 |
82 | }
83 |
84 | @end
85 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageThreeInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
4 |
5 | @interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
6 | {
7 | GLint filterThirdTextureCoordinateAttribute;
8 | GLint filterInputTextureUniform3;
9 | GPUImageRotationMode inputRotation3;
10 | GLuint filterSourceTexture3;
11 | CMTime thirdFrameTime;
12 |
13 | BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
14 | BOOL thirdFrameCheckDisabled;
15 |
16 | __unsafe_unretained id thirdTextureDelegate;
17 | }
18 |
19 | - (void)disableThirdFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageThresholdEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageThresholdSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageThresholdEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | - (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTiltShiftFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /// A simulated tilt shift lens effect
6 | @interface GPUImageTiltShiftFilter : GPUImageFilterGroup
7 | {
8 | GPUImageGaussianBlurFilter *blurFilter;
9 | GPUImageFilter *tiltShiftFilter;
10 | }
11 |
12 | /// The radius of the underlying blur, in pixels. This is 7.0 by default.
13 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
14 |
15 | /// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
16 | @property(readwrite, nonatomic) CGFloat topFocusLevel;
17 |
18 | /// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
19 | @property(readwrite, nonatomic) CGFloat bottomFocusLevel;
20 |
21 | /// The rate at which the image gets blurry away from the in-focus region, default 0.2
22 | @property(readwrite, nonatomic) CGFloat focusFallOffRate;
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageToneCurveFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageToneCurveFilter : GPUImageFilter
4 |
5 | @property(readwrite, nonatomic, copy) NSArray *redControlPoints;
6 | @property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
7 | @property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
8 | @property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
9 |
10 | // Initialization and teardown
11 | - (id)initWithACVData:(NSData*)data;
12 |
13 | - (id)initWithACV:(NSString*)curveFilename;
14 | - (id)initWithACVURL:(NSURL*)curveFileURL;
15 |
16 | // This lets you set all three red, green, and blue tone curves at once.
17 | // NOTE: Deprecated this function because this effect can be accomplished
18 | // using the rgbComposite channel rather then setting all 3 R, G, and B channels.
19 | - (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
20 |
21 | - (void)setPointsWithACV:(NSString*)curveFilename;
22 | - (void)setPointsWithACVURL:(NSURL*)curveFileURL;
23 |
24 | // Curve calculation
25 | - (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
26 | - (NSMutableArray *)splineCurve:(NSArray *)points;
27 | - (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
28 | - (void)updateToneCurveTexture;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** This uses Sobel edge detection to place a black border around objects,
4 | and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
5 | */
6 | @interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
7 | {
8 | GLint thresholdUniform, quantizationLevelsUniform;
9 | }
10 |
11 | /** The threshold at which to apply the edges, default of 0.2
12 | */
13 | @property(readwrite, nonatomic) CGFloat threshold;
14 |
15 | /** The levels of quantization for the posterization of colors within the scene, with a default of 10.0
16 | */
17 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTransformFilter : GPUImageFilter
4 | {
5 | GLint transformMatrixUniform, orthographicMatrixUniform;
6 | GPUMatrix4x4 orthographicMatrix;
7 | }
8 |
9 | // You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
10 | @property(readwrite, nonatomic) CGAffineTransform affineTransform;
11 | @property(readwrite, nonatomic) CATransform3D transform3D;
12 |
13 | // This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
14 | @property(readwrite, nonatomic) BOOL ignoreAspectRatio;
15 |
16 | // sets the anchor point to top left corner
17 | @property(readwrite, nonatomic) BOOL anchorTopLeft;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTwoInputCrossTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTwoInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
4 |
5 | @interface GPUImageTwoInputFilter : GPUImageFilter
6 | {
7 | GLint filterSecondTextureCoordinateAttribute;
8 | GLint filterInputTextureUniform2;
9 | GPUImageRotationMode inputRotation2;
10 | GLuint filterSourceTexture2;
11 | CMTime firstFrameTime, secondFrameTime;
12 |
13 | BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
14 | BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
15 |
16 | __unsafe_unretained id secondTextureDelegate;
17 | }
18 |
19 | - (void)disableFirstFrameCheck;
20 | - (void)disableSecondFrameCheck;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTwoPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTwoPassFilter : GPUImageFilter
4 | {
5 | GLuint secondFilterOutputTexture;
6 |
7 | GLProgram *secondFilterProgram;
8 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
9 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
10 |
11 | GLuint secondFilterFramebuffer;
12 |
13 | NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
14 | }
15 |
16 | // Initialization and teardown
17 | - (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
18 | - (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
19 | - (void)initializeSecondaryAttributes;
20 | - (void)initializeSecondOutputTextureIfNeeded;
21 |
22 | // Managing the display FBOs
23 | - (void)createSecondFilterFBOofSize:(CGSize)currentFBOSize;
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageTwoPassTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
4 | {
5 | GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
6 | GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
7 | CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
8 | }
9 |
10 | // This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
11 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageUIElement.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageUIElement : GPUImageOutput
4 |
5 | // Initialization and teardown
6 | - (id)initWithView:(UIView *)inputView;
7 | - (id)initWithLayer:(CALayer *)inputLayer;
8 |
9 | // Layer management
10 | - (CGSize)layerSizeInPixels;
11 | - (void)update;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageUnsharpMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | @interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
6 | {
7 | GPUImageGaussianBlurFilter *blurFilter;
8 | GPUImageFilter *unsharpMaskFilter;
9 | }
10 | // The blur radius of the underlying Gaussian blur. The default is 4.0.
11 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
12 |
13 | // The strength of the sharpening, from 0.0 on up, with a default of 1.0
14 | @property(readwrite, nonatomic) CGFloat intensity;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageVignetteFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Performs a vignetting effect, fading out the image at the edges
4 | */
5 | @interface GPUImageVignetteFilter : GPUImageFilter
6 | {
7 | GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;
8 | }
9 |
10 | // the center for the vignette in tex coords (defaults to 0.5, 0.5)
11 | @property (nonatomic, readwrite) CGPoint vignetteCenter;
12 |
13 | // The color to use for the Vignette (defaults to black)
14 | @property (nonatomic, readwrite) GPUVector3 vignetteColor;
15 |
16 | // The normalized distance from the center where the vignette effect starts. Default of 0.5.
17 | @property (nonatomic, readwrite) CGFloat vignetteStart;
18 |
19 | // The normalized distance from the center where the vignette effect ends. Default of 0.75.
20 | @property (nonatomic, readwrite) CGFloat vignetteEnd;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageVoronoiConsumerFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
4 | {
5 | GLint sizeUniform;
6 | }
7 |
8 | @property (nonatomic, readwrite) CGSize sizeInPixels;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageVoronoiConsumerFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageVoronoiConsumerFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
5 | (
6 |
7 | precision highp float;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 | uniform vec2 size;
12 | varying vec2 textureCoordinate;
13 |
14 | vec2 getCoordFromColor(vec4 color)
15 | {
16 | float z = color.z * 256.0;
17 | float yoff = floor(z / 8.0);
18 | float xoff = mod(z, 8.0);
19 | float x = color.x*256.0 + xoff*256.0;
20 | float y = color.y*256.0 + yoff*256.0;
21 | return vec2(x,y) / size;
22 | }
23 |
24 | void main(void) {
25 | vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
26 | vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
27 |
28 | gl_FragColor = color;
29 | }
30 | );
31 | #else
32 | NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
33 | (
34 | uniform sampler2D inputImageTexture;
35 | uniform sampler2D inputImageTexture2;
36 | uniform vec2 size;
37 | varying vec2 textureCoordinate;
38 |
39 | vec2 getCoordFromColor(vec4 color)
40 | {
41 | float z = color.z * 256.0;
42 | float yoff = floor(z / 8.0);
43 | float xoff = mod(z, 8.0);
44 | float x = color.x*256.0 + xoff*256.0;
45 | float y = color.y*256.0 + yoff*256.0;
46 | return vec2(x,y) / size;
47 | }
48 |
49 | void main(void)
50 | {
51 | vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
52 | vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
53 |
54 | gl_FragColor = color;
55 | }
56 | );
57 | #endif
58 |
59 | @implementation GPUImageVoronoiConsumerFilter
60 |
61 | @synthesize sizeInPixels = _sizeInPixels;
62 |
63 | - (id)init;
64 | {
65 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageVoronoiConsumerFragmentShaderString]))
66 | {
67 | return nil;
68 | }
69 |
70 | sizeUniform = [filterProgram uniformIndex:@"size"];
71 |
72 | return self;
73 | }
74 |
75 | -(void)setSizeInPixels:(CGSize)sizeInPixels {
76 | _sizeInPixels = sizeInPixels;
77 |
78 | //validate that it's a power of 2 and square
79 |
80 | float width = log2(sizeInPixels.width);
81 | float height = log2(sizeInPixels.height);
82 |
83 | if (width != height) {
84 | NSLog(@"Voronoi point texture must be square");
85 | return;
86 | }
87 | if (width != floor(width) || height != floor(height)) {
88 | NSLog(@"Voronoi point texture must be a power of 2. Texture size %f, %f", sizeInPixels.width, sizeInPixels.height);
89 | return;
90 | }
91 | glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
92 | }
93 |
94 | @end
95 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageWeakPixelInclusionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageWhiteBalanceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 | /**
3 | * Created by Alaric Cole
4 | * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it.
5 |
6 | */
7 | @interface GPUImageWhiteBalanceFilter : GPUImageFilter
8 | {
9 | GLint temperatureUniform, tintUniform;
10 | }
11 | //choose color temperature, in degrees Kelvin
12 | @property(readwrite, nonatomic) int temperature;
13 |
14 | //adjust tint to compensate
15 | @property(readwrite, nonatomic) int tint;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageXYDerivativeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageZoomBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageZoomBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The normalized center of the blur. (0.5, 0.5) by default
10 | */
11 | @property (readwrite, nonatomic) CGPoint blurCenter;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUImageiOSBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageSaturationFilter;
4 | @class GPUImageGaussianBlurFilter;
5 | @class GPUImageLuminanceRangeFilter;
6 |
7 | @interface GPUImageiOSBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageSaturationFilter *saturationFilter;
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageLuminanceRangeFilter *luminanceRangeFilter;
12 | }
13 |
14 | /** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
15 | */
16 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
17 |
18 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
19 | */
20 | @property (readwrite, nonatomic) CGFloat saturation;
21 |
22 | /** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
23 | */
24 | @property (readwrite, nonatomic) CGFloat downsampling;
25 |
26 | @end
27 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/GPUimageDirectionalSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUimageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/iOS/GPUImage-Prefix.pch:
--------------------------------------------------------------------------------
1 | //
2 | // Prefix header for all source files of the 'GPUImage' target in the 'GPUImage' project
3 | //
4 |
5 | #ifdef __OBJC__
6 | #import
7 | #endif
8 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/iOS/GPUImageContext.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import
5 | #import
6 | #import
7 | #import "GLProgram.h"
8 |
9 | #define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal)
10 |
11 | typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode;
12 |
13 | @interface GPUImageContext : NSObject
14 |
15 | @property(readonly, nonatomic) dispatch_queue_t contextQueue;
16 | @property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram;
17 | @property(readonly, retain, nonatomic) EAGLContext *context;
18 |
19 | + (void *)contextKey;
20 | + (GPUImageContext *)sharedImageProcessingContext;
21 | + (dispatch_queue_t)sharedContextQueue;
22 | + (void)useImageProcessingContext;
23 | + (void)setActiveShaderProgram:(GLProgram *)shaderProgram;
24 | + (GLint)maximumTextureSizeForThisDevice;
25 | + (GLint)maximumTextureUnitsForThisDevice;
26 | + (GLint)maximumVaryingVectorsForThisDevice;
27 | + (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;
28 | + (BOOL)deviceSupportsRedTextures;
29 | + (BOOL)deviceSupportsFramebufferReads;
30 | + (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;
31 |
32 | - (void)presentBufferForDisplay;
33 | - (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;
34 |
35 | - (void)useSharegroup:(EAGLSharegroup *)sharegroup;
36 |
37 | // Manage fast texture upload
38 | + (BOOL)supportsFastTextureUpload;
39 |
40 | @end
41 |
42 | @protocol GPUImageTextureDelegate;
43 |
44 | @protocol GPUImageInput
45 | - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
46 | - (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
47 | - (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex;
48 | - (NSInteger)nextAvailableTextureIndex;
49 | - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
50 | - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
51 | - (CGSize)maximumOutputSize;
52 | - (void)endProcessing;
53 | - (BOOL)shouldIgnoreUpdatesToThisTarget;
54 | - (BOOL)enabled;
55 | - (void)conserveMemoryForNextFrame;
56 | - (BOOL)wantsMonochromeInput;
57 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
58 | @end
59 |
60 | @protocol GPUImageTextureDelegate
61 | - (void)textureNoLongerNeededForTarget:(id)textureTarget;
62 | @end
63 |
64 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/iOS/GPUImageMovieWriter.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageContext.h"
4 |
5 | extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
6 |
7 | @protocol GPUImageMovieWriterDelegate
8 |
9 | @optional
10 | - (void)movieRecordingCompleted;
11 | - (void)movieRecordingFailedWithError:(NSError*)error;
12 |
13 | @end
14 |
15 | @interface GPUImageMovieWriter : NSObject
16 | {
17 | BOOL alreadyFinishedRecording;
18 |
19 | NSURL *movieURL;
20 | NSString *fileType;
21 | AVAssetWriter *assetWriter;
22 | AVAssetWriterInput *assetWriterAudioInput;
23 | AVAssetWriterInput *assetWriterVideoInput;
24 | AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
25 | dispatch_queue_t movieWritingQueue;
26 |
27 | CVOpenGLESTextureCacheRef coreVideoTextureCache;
28 | CVPixelBufferRef renderTarget;
29 | CVOpenGLESTextureRef renderTexture;
30 |
31 | CGSize videoSize;
32 | GPUImageRotationMode inputRotation;
33 |
34 | __unsafe_unretained id textureDelegate;
35 | }
36 |
37 | @property(readwrite, nonatomic) BOOL hasAudioTrack;
38 | @property(readwrite, nonatomic) BOOL shouldPassthroughAudio;
39 | @property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;
40 | @property(nonatomic, copy) void(^completionBlock)(void);
41 | @property(nonatomic, copy) void(^failureBlock)(NSError*);
42 | @property(nonatomic, assign) id delegate;
43 | @property(readwrite, nonatomic) BOOL encodingLiveVideo;
44 | @property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);
45 | @property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);
46 | @property(nonatomic) BOOL enabled;
47 | @property(nonatomic, readonly) AVAssetWriter *assetWriter;
48 | @property(nonatomic, readonly) CMTime duration;
49 | @property(nonatomic, assign) CGAffineTransform transform;
50 | @property(nonatomic, copy) NSArray *metaData;
51 | @property(nonatomic, assign, getter = isPaused) BOOL paused;
52 |
53 | // Initialization and teardown
54 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
55 | - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;
56 |
57 | - (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;
58 |
59 | // Movie recording
60 | - (void)startRecording;
61 | - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
62 | - (void)finishRecording;
63 | - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
64 | - (void)cancelRecording;
65 | - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
66 | - (void)enableSynchronizationCallbacks;
67 |
68 | @end
69 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/iOS/GPUImagePicture.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 |
5 | @interface GPUImagePicture : GPUImageOutput
6 | {
7 | CGSize pixelSizeOfImage;
8 | BOOL hasProcessedImage;
9 |
10 | dispatch_semaphore_t imageUpdateSemaphore;
11 | }
12 |
13 | // Initialization and teardown
14 | - (id)initWithURL:(NSURL *)url;
15 | - (id)initWithImage:(UIImage *)newImageSource;
16 | - (id)initWithCGImage:(CGImageRef)newImageSource;
17 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
18 | - (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
19 |
20 | // Image rendering
21 | - (void)processImage;
22 | - (CGSize)outputImageSize;
23 |
24 | /**
25 | * Process image with all targets and filters asynchronously
26 | * The completion handler is called after processing finished in the
27 | * GPU's dispatch queue - and only if this method did not return NO.
28 | *
29 | * @returns NO if resource is blocked and processing is discarded, YES otherwise
30 | */
31 | - (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ThirdParty/GPU/Source/iOS/GPUImageView.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | typedef enum {
5 | kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio
6 | kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color
7 | kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view
8 | } GPUImageFillModeType;
9 |
10 | /**
11 | UIView subclass to use as an endpoint for displaying GPUImage outputs
12 | */
13 | @interface GPUImageView : UIView
14 | {
15 | GPUImageRotationMode inputRotation;
16 | __unsafe_unretained id textureDelegate;
17 | }
18 |
19 | /** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio
20 | */
21 | @property(readwrite, nonatomic) GPUImageFillModeType fillMode;
22 |
23 | /** This calculates the current display size, in pixels, taking into account Retina scaling factors
24 | */
25 | @property(readonly, nonatomic) CGSize sizeInPixels;
26 |
27 | @property(nonatomic) BOOL enabled;
28 |
29 | /** Handling fill mode
30 |
31 | @param redComponent Red component for background color
32 | @param greenComponent Green component for background color
33 | @param blueComponent Blue component for background color
34 | @param alphaComponent Alpha component for background color
35 | */
36 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
37 |
38 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
39 |
40 | @end
41 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // ObjectOverlayOnVideoDemo
4 | //
5 | // Created by Krupa-iMac on 06/05/14.
6 | // Copyright (c) 2014 TheAppGuruz. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "GPUImage.h"
11 |
12 | @interface ViewController : UIViewController
13 | {
14 | GPUImageMovie *movieFile;
15 | GPUImageFilter *filter;
16 | GPUImageMovieWriter *movieWriter;
17 | GPUImageUIElement *uiElementInput;
18 | }
19 |
20 | @property (nonatomic,strong) IBOutlet UIView *vwVideo;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/en.lproj/InfoPlist.strings:
--------------------------------------------------------------------------------
1 | /* Localized versions of Info.plist keys */
2 |
3 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // ObjectOverlayOnVideoDemo
4 | //
5 | // Created by Krupa-iMac on 06/05/14.
6 | // Copyright (c) 2014 TheAppGuruz. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | #import "AppDelegate.h"
12 |
13 | int main(int argc, char * argv[])
14 | {
15 | @autoreleasepool {
16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemoTests/ObjectOverlayOnVideoDemoTests-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | tag.${PRODUCT_NAME:rfc1034identifier}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundlePackageType
14 | BNDL
15 | CFBundleShortVersionString
16 | 1.0
17 | CFBundleSignature
18 | ????
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemoTests/ObjectOverlayOnVideoDemoTests.m:
--------------------------------------------------------------------------------
1 | //
2 | // ObjectOverlayOnVideoDemoTests.m
3 | // ObjectOverlayOnVideoDemoTests
4 | //
5 | // Created by Krupa-iMac on 06/05/14.
6 | // Copyright (c) 2014 TheAppGuruz. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ObjectOverlayOnVideoDemoTests : XCTestCase
12 |
13 | @end
14 |
15 | @implementation ObjectOverlayOnVideoDemoTests
16 |
17 | - (void)setUp
18 | {
19 | [super setUp];
20 | // Put setup code here. This method is called before the invocation of each test method in the class.
21 | }
22 |
23 | - (void)tearDown
24 | {
25 | // Put teardown code here. This method is called after the invocation of each test method in the class.
26 | [super tearDown];
27 | }
28 |
29 | - (void)testExample
30 | {
31 | XCTFail(@"No implementation for \"%s\"", __PRETTY_FUNCTION__);
32 | }
33 |
34 | @end
35 |
--------------------------------------------------------------------------------
/ObjectOverlayOnVideoDemoTests/en.lproj/InfoPlist.strings:
--------------------------------------------------------------------------------
1 | /* Localized versions of Info.plist keys */
2 |
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | iOS-Guide-for-Object-Overlay-on-Video
2 | =====================================
3 |
4 | The main objective of this post is to describe how to overlay objects like text and images on video. Well since iOS 4 turned out there has been a finer approach to do this and it is high time I demonstrated to you how.
5 |
6 | This Tutorial has been presented by The App Guruz - One of the best Mobile Application Development Company in India
7 |
--------------------------------------------------------------------------------