├── .gitignore ├── .gitmodules ├── .travis.yml ├── GLVideoFilter.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata ├── GLVideoFilter ├── AppDelegate.h ├── AppDelegate.m ├── Classes │ ├── FilterManager.h │ ├── FilterManager.m │ ├── QuadModel.h │ ├── QuadModel.m │ ├── ShaderManager.h │ └── ShaderManager.m ├── FilterViewController.h ├── FilterViewController.m ├── Filters.plist ├── GLVideoFilter-Info.plist ├── GLVideoFilter-Prefix.pch ├── GLVideoFilter.entitlements ├── Images.xcassets │ ├── AppIcon.appiconset │ │ ├── Contents.json │ │ ├── Icon-72.png │ │ ├── Icon-72@2x.png │ │ ├── Icon-Small-50.png │ │ ├── Icon-Small-50@2x.png │ │ ├── Icon-Small.png │ │ ├── Icon-Small@2x.png │ │ ├── Icon.png │ │ └── Icon@2x.png │ ├── LaunchImage.launchimage │ │ ├── Contents.json │ │ └── Default-568h@2x.png │ ├── Locked.imageset │ │ ├── Contents.json │ │ ├── Locked.png │ │ └── Locked@2x.png │ └── Unlocked.imageset │ │ ├── Contents.json │ │ ├── Unlocked.png │ │ └── Unlocked@2x.png ├── Shaders.plist ├── Shaders │ ├── CannyComic.fsh │ ├── CannyMag.fsh │ ├── CannyThreshold.fsh │ ├── CannyThresholdComposite.fsh │ ├── CannyThresholdInvert.fsh │ ├── Sobel.fsh │ ├── SobelBW.fsh │ ├── SobelBWComposite.fsh │ ├── SobelBlend.fsh │ ├── SobelCanny.fsh │ ├── SobelRGBComposite.fsh │ ├── blurXPass.fsh │ ├── blurYPass.fsh │ ├── passthrough.fsh │ ├── quadInvertY.vsh │ ├── quadKernel.vsh │ ├── quadPassthrough.vsh │ ├── quadScreenKernel.vsh │ └── yuv2rgb.fsh ├── StoryBoard_iPad.storyboard ├── StoryBoard_iPhone.storyboard └── main.m └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | ######################### 2 | # .gitignore file for Xcode4 / OS X Source projects 3 | # 4 | # Version 2.0 5 | # For latest version, see: http://stackoverflow.com/questions/49478/git-ignore-file-for-xcode-projects 6 | # 7 | # 2013 updates: 8 | # - fixed the broken "save personal Schemes" 9 | # 10 | # NB: if you are storing "built" products, this WILL NOT WORK, 11 | # and you should use a different .gitignore (or none at all) 12 | # This file is for SOURCE projects, where there are many extra 13 | # files that we want to exclude 14 | # 15 | ######################### 16 | 17 | ##### 18 | # OS X temporary files that should never be committed 19 | 20 | .DS_Store 21 | *.swp 22 | *.lock 23 | profile 24 | 25 | 26 | #### 27 | # Xcode temporary files that should never be committed 28 | # 29 | # NB: NIB/XIB files still exist even on Storyboard projects, so we want this... 30 | 31 | *~.nib 32 | 33 | 34 | #### 35 | # Xcode build files - 36 | # 37 | # NB: slash on the end, so we only remove the FOLDER, not any files that were badly named "DerivedData" 38 | 39 | DerivedData/ 40 | 41 | # NB: slash on the end, so we only remove the FOLDER, not any files that were badly named "build" 42 | 43 | build/ 44 | 45 | 46 | ##### 47 | # Xcode private settings (window sizes, bookmarks, breakpoints, custom executables, smart groups) 48 | # 49 | # This is complicated: 50 | # 51 | # SOMETIMES you need to put this file in version control. 52 | # Apple designed it poorly - if you use "custom executables", they are 53 | # saved in this file. 54 | # 99% of projects do NOT use those, so they do NOT want to version control this file. 55 | # ..but if you're in the 1%, comment out the line "*.pbxuser" 56 | 57 | *.pbxuser 58 | *.mode1v3 59 | *.mode2v3 60 | *.perspectivev3 61 | # NB: also, whitelist the default ones, some projects need to use these 62 | !default.pbxuser 63 | !default.mode1v3 64 | !default.mode2v3 65 | !default.perspectivev3 66 | 67 | 68 | #### 69 | # Xcode 4 - semi-personal settings 70 | # 71 | # 72 | # OPTION 1: --------------------------------- 73 | # throw away ALL personal settings (including custom schemes! 74 | # - unless they are "shared") 75 | # 76 | # NB: this is exclusive with OPTION 2 below 77 | xcuserdata 78 | 79 | # OPTION 2: --------------------------------- 80 | # get rid of ALL personal settings, but KEEP SOME OF THEM 81 | # - NB: you must manually uncomment the bits you want to keep 82 | # 83 | # NB: this is exclusive with OPTION 1 above 84 | # 85 | #xcuserdata/**/* 86 | 87 | # (requires option 2 above): Personal Schemes 88 | # 89 | #!xcuserdata/**/xcschemes/* 90 | 91 | #### 92 | # XCode 4 workspaces - more detailed 93 | # 94 | # Workspaces are important! They are a core feature of Xcode - don't exclude them :) 95 | # 96 | # Workspace layout is quite spammy. For reference: 97 | # 98 | # /(root)/ 99 | # /(project-name).xcodeproj/ 100 | # project.pbxproj 101 | # /project.xcworkspace/ 102 | # contents.xcworkspacedata 103 | # /xcuserdata/ 104 | # /(your name)/xcuserdatad/ 105 | # UserInterfaceState.xcuserstate 106 | # /xcsshareddata/ 107 | # /xcschemes/ 108 | # (shared scheme name).xcscheme 109 | # /xcuserdata/ 110 | # /(your name)/xcuserdatad/ 111 | # (private scheme).xcscheme 112 | # xcschememanagement.plist 113 | # 114 | # 115 | 116 | #### 117 | # Xcode 4 - Deprecated classes 118 | # 119 | # Allegedly, if you manually "deprecate" your classes, they get moved here. 120 | # 121 | # We're using source-control, so this is a "feature" that we do not want! 122 | 123 | *.moved-aside 124 | 125 | #### 126 | # Cocoapods: cocoapods.org 127 | # 128 | # Ignoring these files means that whoever uses the code will first have to run: 129 | # pod install 130 | # in the App.xcodeproj directory. 131 | # This ensures the latest dependencies are used. 132 | Pods/ 133 | Podfile.lock 134 | 135 | 136 | #### 137 | # UNKNOWN: recommended by others, but I can't discover what these files are 138 | # 139 | # ...none. Everything is now explained. 140 | 141 | *.xccheckout 142 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "MBProgressHUD"] 2 | path = MBProgressHUD 3 | url = https://github.com/jdg/MBProgressHUD 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: objective-c -------------------------------------------------------------------------------- /GLVideoFilter.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | B66E3E3313E9E79C00D2ACF0 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B66E3E3213E9E79C00D2ACF0 /* UIKit.framework */; }; 11 | B66E3E3513E9E79C00D2ACF0 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B66E3E3413E9E79C00D2ACF0 /* Foundation.framework */; }; 12 | B66E3E3713E9E79C00D2ACF0 /* GLKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B66E3E3613E9E79C00D2ACF0 /* GLKit.framework */; }; 13 | B66E3E3913E9E79C00D2ACF0 /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B66E3E3813E9E79C00D2ACF0 /* OpenGLES.framework */; }; 14 | B66E3E4113E9E79C00D2ACF0 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = B66E3E4013E9E79C00D2ACF0 /* main.m */; }; 15 | B66E3E4513E9E79C00D2ACF0 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = B66E3E4413E9E79C00D2ACF0 /* AppDelegate.m */; }; 16 | B66E3E4713E9E79C00D2ACF0 /* Sobel.fsh in Resources */ = {isa = PBXBuildFile; fileRef = B66E3E4613E9E79C00D2ACF0 /* Sobel.fsh */; }; 17 | B66E3E4913E9E79C00D2ACF0 /* quadInvertY.vsh in Resources */ = {isa = PBXBuildFile; fileRef = B66E3E4813E9E79C00D2ACF0 /* quadInvertY.vsh */; }; 18 | B66E3E4C13E9E79C00D2ACF0 /* FilterViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B66E3E4B13E9E79C00D2ACF0 /* FilterViewController.m */; }; 19 | B69AAFE013FC961F00B7125C /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B69AAFDF13FC961F00B7125C /* AVFoundation.framework */; }; 20 | B69AAFE313FC965400B7125C /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B69AAFE213FC965400B7125C /* CoreVideo.framework */; }; 21 | B69AAFE513FC972A00B7125C /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B69AAFE413FC972A00B7125C /* CoreMedia.framework */; }; 22 | DF0D63C318F78A3500D51089 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = DF0D63C218F78A3500D51089 /* CoreGraphics.framework */; }; 23 | DF0D63D718F78DB000D51089 /* MBProgressHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = DF0D63D618F78DB000D51089 /* MBProgressHUD.m */; }; 24 | DF0D63E118F7947800D51089 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = DF0D63E018F7947800D51089 /* Images.xcassets */; }; 25 | DF143B7416EB317E006D35F9 /* SobelRGBComposite.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF143B7116EB316B006D35F9 /* SobelRGBComposite.fsh */; }; 26 | DF143B7516EB317E006D35F9 /* yuv2rgb.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF143B7216EB316B006D35F9 /* yuv2rgb.fsh */; }; 27 | DF2BE98516E6939600FF0C90 /* SobelCanny.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF2BE98416E6796E00FF0C90 /* SobelCanny.fsh */; }; 28 | DF2BE98716E69BE000FF0C90 /* StoryBoard_iPhone.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = DF2BE98616E69BE000FF0C90 /* StoryBoard_iPhone.storyboard */; }; 29 | DF2BE98916E69BFC00FF0C90 /* StoryBoard_iPad.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = DF2BE98816E69BFC00FF0C90 /* StoryBoard_iPad.storyboard */; }; 30 | DF3F45581821C7A80069D69F /* quadKernel.vsh in Resources */ = {isa = PBXBuildFile; fileRef = DF3F45561821C4E50069D69F /* quadKernel.vsh */; }; 31 | DF4740B516E6EEFA0073DDB8 /* SobelBlend.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF4740B416E6EEEC0073DDB8 /* SobelBlend.fsh */; }; 32 | DF59483E16E5B91E00C16079 /* quadPassthrough.vsh in Resources */ = {isa = PBXBuildFile; fileRef = DF59483D16E5B90F00C16079 /* quadPassthrough.vsh */; }; 33 | DF59484016E5BB0900C16079 /* passthrough.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF59483F16E5BB0000C16079 /* passthrough.fsh */; }; 34 | DF7EE77518F8C16C000CD7AA /* Filters.plist in Resources */ = {isa = PBXBuildFile; fileRef = DF7EE77418F8C16C000CD7AA /* Filters.plist */; }; 35 | DF7EE77718F8C45A000CD7AA /* Shaders.plist in Resources */ = {isa = PBXBuildFile; fileRef = DF7EE77618F8C45A000CD7AA /* Shaders.plist */; }; 36 | DF7EE78218F915A3000CD7AA /* FilterManager.m in Sources */ = {isa = PBXBuildFile; fileRef = DF7EE77D18F915A3000CD7AA /* FilterManager.m */; }; 37 | DF7EE78318F915A3000CD7AA /* QuadModel.m in Sources */ = {isa = PBXBuildFile; fileRef = DF7EE77F18F915A3000CD7AA /* QuadModel.m */; }; 38 | DF7EE78418F915A3000CD7AA /* ShaderManager.m in Sources */ = {isa = PBXBuildFile; fileRef = DF7EE78118F915A3000CD7AA /* ShaderManager.m */; }; 39 | DF93BCCA16E4983C00C062FC /* SobelBW.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF93BCC816E495B200C062FC /* SobelBW.fsh */; }; 40 | DF9CAE8A1824C18800CE3945 /* blurXPass.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF9CAE881824C16800CE3945 /* blurXPass.fsh */; }; 41 | DF9CAE8B1824C18800CE3945 /* blurYPass.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DF9CAE891824C16800CE3945 /* blurYPass.fsh */; }; 42 | DFA4C5EA18F9D1B90099F4DE /* CannyMag.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFA4C5E818F9D1B90099F4DE /* CannyMag.fsh */; }; 43 | DFA4C5EB18F9D1B90099F4DE /* CannyThreshold.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFA4C5E918F9D1B90099F4DE /* CannyThreshold.fsh */; }; 44 | DFA4C5ED18F9E7820099F4DE /* quadScreenKernel.vsh in Resources */ = {isa = PBXBuildFile; fileRef = DFA4C5EC18F9E7820099F4DE /* quadScreenKernel.vsh */; }; 45 | DFA4C5EF18F9EBD40099F4DE /* CannyThresholdComposite.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFA4C5EE18F9EBD40099F4DE /* CannyThresholdComposite.fsh */; }; 46 | DFA4C5F118F9ED6D0099F4DE /* CannyThresholdInvert.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFA4C5F018F9ED6D0099F4DE /* CannyThresholdInvert.fsh */; }; 47 | DFC5BAC0190C9D7C004FF2B1 /* CannyComic.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFC5BABF190C9D7C004FF2B1 /* CannyComic.fsh */; }; 48 | DFC750F516E5C4340036CDD8 /* SobelBWComposite.fsh in Resources */ = {isa = PBXBuildFile; fileRef = DFC750F416E5C3E80036CDD8 /* SobelBWComposite.fsh */; }; 49 | /* End PBXBuildFile section */ 50 | 51 | /* Begin PBXFileReference section */ 52 | B66E3E2E13E9E79C00D2ACF0 /* GLVideoFilter.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GLVideoFilter.app; sourceTree = BUILT_PRODUCTS_DIR; }; 53 | B66E3E3213E9E79C00D2ACF0 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; 54 | B66E3E3413E9E79C00D2ACF0 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 55 | B66E3E3613E9E79C00D2ACF0 /* GLKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = GLKit.framework; path = System/Library/Frameworks/GLKit.framework; sourceTree = SDKROOT; }; 56 | B66E3E3813E9E79C00D2ACF0 /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; }; 57 | B66E3E3C13E9E79C00D2ACF0 /* GLVideoFilter-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "GLVideoFilter-Info.plist"; sourceTree = ""; }; 58 | B66E3E4013E9E79C00D2ACF0 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 59 | B66E3E4213E9E79C00D2ACF0 /* GLVideoFilter-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "GLVideoFilter-Prefix.pch"; sourceTree = ""; }; 60 | B66E3E4313E9E79C00D2ACF0 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 61 | B66E3E4413E9E79C00D2ACF0 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 62 | B66E3E4613E9E79C00D2ACF0 /* Sobel.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = Sobel.fsh; path = Shaders/Sobel.fsh; sourceTree = ""; }; 63 | B66E3E4813E9E79C00D2ACF0 /* quadInvertY.vsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = quadInvertY.vsh; path = Shaders/quadInvertY.vsh; sourceTree = ""; }; 64 | B66E3E4A13E9E79C00D2ACF0 /* FilterViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FilterViewController.h; sourceTree = ""; }; 65 | B66E3E4B13E9E79C00D2ACF0 /* FilterViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FilterViewController.m; sourceTree = ""; }; 66 | B69AAFDF13FC961F00B7125C /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 67 | B69AAFE213FC965400B7125C /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; }; 68 | B69AAFE413FC972A00B7125C /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; 69 | DF0D63C218F78A3500D51089 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; 70 | DF0D63D518F78DB000D51089 /* MBProgressHUD.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MBProgressHUD.h; path = MBProgressHUD/MBProgressHUD.h; sourceTree = SOURCE_ROOT; }; 71 | DF0D63D618F78DB000D51089 /* MBProgressHUD.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = MBProgressHUD.m; path = MBProgressHUD/MBProgressHUD.m; sourceTree = SOURCE_ROOT; }; 72 | DF0D63E018F7947800D51089 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; }; 73 | DF143B7116EB316B006D35F9 /* SobelRGBComposite.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = SobelRGBComposite.fsh; path = Shaders/SobelRGBComposite.fsh; sourceTree = ""; }; 74 | DF143B7216EB316B006D35F9 /* yuv2rgb.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = yuv2rgb.fsh; path = Shaders/yuv2rgb.fsh; sourceTree = ""; }; 75 | DF2BE98416E6796E00FF0C90 /* SobelCanny.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = SobelCanny.fsh; path = Shaders/SobelCanny.fsh; sourceTree = ""; }; 76 | DF2BE98616E69BE000FF0C90 /* StoryBoard_iPhone.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = StoryBoard_iPhone.storyboard; sourceTree = ""; }; 77 | DF2BE98816E69BFC00FF0C90 /* StoryBoard_iPad.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = StoryBoard_iPad.storyboard; sourceTree = ""; }; 78 | DF3F45561821C4E50069D69F /* quadKernel.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = quadKernel.vsh; path = Shaders/quadKernel.vsh; sourceTree = ""; }; 79 | DF4740B416E6EEEC0073DDB8 /* SobelBlend.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = SobelBlend.fsh; path = Shaders/SobelBlend.fsh; sourceTree = ""; }; 80 | DF59483D16E5B90F00C16079 /* quadPassthrough.vsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = quadPassthrough.vsh; path = Shaders/quadPassthrough.vsh; sourceTree = ""; }; 81 | DF59483F16E5BB0000C16079 /* passthrough.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = passthrough.fsh; path = Shaders/passthrough.fsh; sourceTree = ""; }; 82 | DF7EE77418F8C16C000CD7AA /* Filters.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Filters.plist; sourceTree = ""; }; 83 | DF7EE77618F8C45A000CD7AA /* Shaders.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Shaders.plist; sourceTree = ""; }; 84 | DF7EE77C18F915A3000CD7AA /* FilterManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FilterManager.h; path = Classes/FilterManager.h; sourceTree = ""; }; 85 | DF7EE77D18F915A3000CD7AA /* FilterManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = FilterManager.m; path = Classes/FilterManager.m; sourceTree = ""; }; 86 | DF7EE77E18F915A3000CD7AA /* QuadModel.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = QuadModel.h; path = Classes/QuadModel.h; sourceTree = ""; }; 87 | DF7EE77F18F915A3000CD7AA /* QuadModel.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = QuadModel.m; path = Classes/QuadModel.m; sourceTree = ""; }; 88 | DF7EE78018F915A3000CD7AA /* ShaderManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ShaderManager.h; path = Classes/ShaderManager.h; sourceTree = ""; }; 89 | DF7EE78118F915A3000CD7AA /* ShaderManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ShaderManager.m; path = Classes/ShaderManager.m; sourceTree = ""; }; 90 | DF93BCC816E495B200C062FC /* SobelBW.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = SobelBW.fsh; path = Shaders/SobelBW.fsh; sourceTree = ""; }; 91 | DF9CAE881824C16800CE3945 /* blurXPass.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = blurXPass.fsh; path = Shaders/blurXPass.fsh; sourceTree = ""; }; 92 | DF9CAE891824C16800CE3945 /* blurYPass.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = blurYPass.fsh; path = Shaders/blurYPass.fsh; sourceTree = ""; }; 93 | DFA4C5E818F9D1B90099F4DE /* CannyMag.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = CannyMag.fsh; path = Shaders/CannyMag.fsh; sourceTree = ""; }; 94 | DFA4C5E918F9D1B90099F4DE /* CannyThreshold.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = CannyThreshold.fsh; path = Shaders/CannyThreshold.fsh; sourceTree = ""; }; 95 | DFA4C5EC18F9E7820099F4DE /* quadScreenKernel.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = quadScreenKernel.vsh; path = Shaders/quadScreenKernel.vsh; sourceTree = ""; }; 96 | DFA4C5EE18F9EBD40099F4DE /* CannyThresholdComposite.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = CannyThresholdComposite.fsh; path = Shaders/CannyThresholdComposite.fsh; sourceTree = ""; }; 97 | DFA4C5F018F9ED6D0099F4DE /* CannyThresholdInvert.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = CannyThresholdInvert.fsh; path = Shaders/CannyThresholdInvert.fsh; sourceTree = ""; }; 98 | DFC5BABF190C9D7C004FF2B1 /* CannyComic.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = CannyComic.fsh; path = Shaders/CannyComic.fsh; sourceTree = ""; }; 99 | DFC5BAC1190D7F63004FF2B1 /* GLVideoFilter.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.xml; path = GLVideoFilter.entitlements; sourceTree = ""; }; 100 | DFC750F416E5C3E80036CDD8 /* SobelBWComposite.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = SobelBWComposite.fsh; path = Shaders/SobelBWComposite.fsh; sourceTree = ""; }; 101 | /* End PBXFileReference section */ 102 | 103 | /* Begin PBXFrameworksBuildPhase section */ 104 | B66E3E2B13E9E79C00D2ACF0 /* Frameworks */ = { 105 | isa = PBXFrameworksBuildPhase; 106 | buildActionMask = 2147483647; 107 | files = ( 108 | DF0D63C318F78A3500D51089 /* CoreGraphics.framework in Frameworks */, 109 | B66E3E3313E9E79C00D2ACF0 /* UIKit.framework in Frameworks */, 110 | B66E3E3513E9E79C00D2ACF0 /* Foundation.framework in Frameworks */, 111 | B66E3E3713E9E79C00D2ACF0 /* GLKit.framework in Frameworks */, 112 | B66E3E3913E9E79C00D2ACF0 /* OpenGLES.framework in Frameworks */, 113 | B69AAFE013FC961F00B7125C /* AVFoundation.framework in Frameworks */, 114 | B69AAFE313FC965400B7125C /* CoreVideo.framework in Frameworks */, 115 | B69AAFE513FC972A00B7125C /* CoreMedia.framework in Frameworks */, 116 | ); 117 | runOnlyForDeploymentPostprocessing = 0; 118 | }; 119 | /* End PBXFrameworksBuildPhase section */ 120 | 121 | /* Begin PBXGroup section */ 122 | B66E3E2313E9E79C00D2ACF0 = { 123 | isa = PBXGroup; 124 | children = ( 125 | B66E3E3A13E9E79C00D2ACF0 /* GLVideoFilter */, 126 | B66E3E3113E9E79C00D2ACF0 /* Frameworks */, 127 | B66E3E2F13E9E79C00D2ACF0 /* Products */, 128 | ); 129 | sourceTree = ""; 130 | }; 131 | B66E3E2F13E9E79C00D2ACF0 /* Products */ = { 132 | isa = PBXGroup; 133 | children = ( 134 | B66E3E2E13E9E79C00D2ACF0 /* GLVideoFilter.app */, 135 | ); 136 | name = Products; 137 | sourceTree = ""; 138 | }; 139 | B66E3E3113E9E79C00D2ACF0 /* Frameworks */ = { 140 | isa = PBXGroup; 141 | children = ( 142 | DF0D63C218F78A3500D51089 /* CoreGraphics.framework */, 143 | B69AAFE413FC972A00B7125C /* CoreMedia.framework */, 144 | B69AAFE213FC965400B7125C /* CoreVideo.framework */, 145 | B69AAFDF13FC961F00B7125C /* AVFoundation.framework */, 146 | B66E3E3213E9E79C00D2ACF0 /* UIKit.framework */, 147 | B66E3E3413E9E79C00D2ACF0 /* Foundation.framework */, 148 | B66E3E3613E9E79C00D2ACF0 /* GLKit.framework */, 149 | B66E3E3813E9E79C00D2ACF0 /* OpenGLES.framework */, 150 | ); 151 | name = Frameworks; 152 | sourceTree = ""; 153 | }; 154 | B66E3E3A13E9E79C00D2ACF0 /* GLVideoFilter */ = { 155 | isa = PBXGroup; 156 | children = ( 157 | DFC5BAC1190D7F63004FF2B1 /* GLVideoFilter.entitlements */, 158 | B66E3E4313E9E79C00D2ACF0 /* AppDelegate.h */, 159 | B66E3E4413E9E79C00D2ACF0 /* AppDelegate.m */, 160 | B66E3E4A13E9E79C00D2ACF0 /* FilterViewController.h */, 161 | B66E3E4B13E9E79C00D2ACF0 /* FilterViewController.m */, 162 | DF7EE77B18F8FD80000CD7AA /* Supporting Classes */, 163 | B66E3E3B13E9E79C00D2ACF0 /* Supporting Files */, 164 | ); 165 | path = GLVideoFilter; 166 | sourceTree = ""; 167 | }; 168 | B66E3E3B13E9E79C00D2ACF0 /* Supporting Files */ = { 169 | isa = PBXGroup; 170 | children = ( 171 | DF7EE77418F8C16C000CD7AA /* Filters.plist */, 172 | B66E3E3C13E9E79C00D2ACF0 /* GLVideoFilter-Info.plist */, 173 | B66E3E4213E9E79C00D2ACF0 /* GLVideoFilter-Prefix.pch */, 174 | DF0D63E018F7947800D51089 /* Images.xcassets */, 175 | B66E3E4013E9E79C00D2ACF0 /* main.m */, 176 | B6836C54141AE2AA005DB1A4 /* Shaders */, 177 | DF7EE77618F8C45A000CD7AA /* Shaders.plist */, 178 | DF2BE98816E69BFC00FF0C90 /* StoryBoard_iPad.storyboard */, 179 | DF2BE98616E69BE000FF0C90 /* StoryBoard_iPhone.storyboard */, 180 | ); 181 | name = "Supporting Files"; 182 | sourceTree = ""; 183 | }; 184 | B6836C54141AE2AA005DB1A4 /* Shaders */ = { 185 | isa = PBXGroup; 186 | children = ( 187 | DF9CAE8C1824C19900CE3945 /* Blurs */, 188 | DF9CAE8D1824C1B600CE3945 /* Filters */, 189 | B66E3E4813E9E79C00D2ACF0 /* quadInvertY.vsh */, 190 | DF3F45561821C4E50069D69F /* quadKernel.vsh */, 191 | DFA4C5EC18F9E7820099F4DE /* quadScreenKernel.vsh */, 192 | DF59483D16E5B90F00C16079 /* quadPassthrough.vsh */, 193 | DF59483F16E5BB0000C16079 /* passthrough.fsh */, 194 | DF143B7216EB316B006D35F9 /* yuv2rgb.fsh */, 195 | ); 196 | name = Shaders; 197 | sourceTree = ""; 198 | }; 199 | DF7EE77B18F8FD80000CD7AA /* Supporting Classes */ = { 200 | isa = PBXGroup; 201 | children = ( 202 | DF7EE77C18F915A3000CD7AA /* FilterManager.h */, 203 | DF7EE77D18F915A3000CD7AA /* FilterManager.m */, 204 | DF7EE77E18F915A3000CD7AA /* QuadModel.h */, 205 | DF7EE77F18F915A3000CD7AA /* QuadModel.m */, 206 | DF7EE78018F915A3000CD7AA /* ShaderManager.h */, 207 | DF7EE78118F915A3000CD7AA /* ShaderManager.m */, 208 | DF0D63D518F78DB000D51089 /* MBProgressHUD.h */, 209 | DF0D63D618F78DB000D51089 /* MBProgressHUD.m */, 210 | ); 211 | name = "Supporting Classes"; 212 | sourceTree = ""; 213 | }; 214 | DF9CAE8C1824C19900CE3945 /* Blurs */ = { 215 | isa = PBXGroup; 216 | children = ( 217 | DF9CAE881824C16800CE3945 /* blurXPass.fsh */, 218 | DF9CAE891824C16800CE3945 /* blurYPass.fsh */, 219 | ); 220 | name = Blurs; 221 | sourceTree = ""; 222 | }; 223 | DF9CAE8D1824C1B600CE3945 /* Filters */ = { 224 | isa = PBXGroup; 225 | children = ( 226 | DFC5BABF190C9D7C004FF2B1 /* CannyComic.fsh */, 227 | DFA4C5F018F9ED6D0099F4DE /* CannyThresholdInvert.fsh */, 228 | DFA4C5EE18F9EBD40099F4DE /* CannyThresholdComposite.fsh */, 229 | DFA4C5E818F9D1B90099F4DE /* CannyMag.fsh */, 230 | DFA4C5E918F9D1B90099F4DE /* CannyThreshold.fsh */, 231 | B66E3E4613E9E79C00D2ACF0 /* Sobel.fsh */, 232 | DF4740B416E6EEEC0073DDB8 /* SobelBlend.fsh */, 233 | DF93BCC816E495B200C062FC /* SobelBW.fsh */, 234 | DFC750F416E5C3E80036CDD8 /* SobelBWComposite.fsh */, 235 | DF2BE98416E6796E00FF0C90 /* SobelCanny.fsh */, 236 | DF143B7116EB316B006D35F9 /* SobelRGBComposite.fsh */, 237 | ); 238 | name = Filters; 239 | sourceTree = ""; 240 | }; 241 | /* End PBXGroup section */ 242 | 243 | /* Begin PBXNativeTarget section */ 244 | B66E3E2D13E9E79C00D2ACF0 /* GLVideoFilter */ = { 245 | isa = PBXNativeTarget; 246 | buildConfigurationList = B66E3E5513E9E79C00D2ACF0 /* Build configuration list for PBXNativeTarget "GLVideoFilter" */; 247 | buildPhases = ( 248 | B66E3E2A13E9E79C00D2ACF0 /* Sources */, 249 | B66E3E2B13E9E79C00D2ACF0 /* Frameworks */, 250 | B66E3E2C13E9E79C00D2ACF0 /* Resources */, 251 | ); 252 | buildRules = ( 253 | ); 254 | dependencies = ( 255 | ); 256 | name = GLVideoFilter; 257 | productName = GLCameraRipple; 258 | productReference = B66E3E2E13E9E79C00D2ACF0 /* GLVideoFilter.app */; 259 | productType = "com.apple.product-type.application"; 260 | }; 261 | /* End PBXNativeTarget section */ 262 | 263 | /* Begin PBXProject section */ 264 | B66E3E2513E9E79C00D2ACF0 /* Project object */ = { 265 | isa = PBXProject; 266 | attributes = { 267 | LastUpgradeCheck = 0510; 268 | TargetAttributes = { 269 | B66E3E2D13E9E79C00D2ACF0 = { 270 | DevelopmentTeam = G8X4XMPVS9; 271 | SystemCapabilities = { 272 | com.apple.iCloud = { 273 | enabled = 1; 274 | }; 275 | }; 276 | }; 277 | }; 278 | }; 279 | buildConfigurationList = B66E3E2813E9E79C00D2ACF0 /* Build configuration list for PBXProject "GLVideoFilter" */; 280 | compatibilityVersion = "Xcode 3.2"; 281 | developmentRegion = English; 282 | hasScannedForEncodings = 0; 283 | knownRegions = ( 284 | en, 285 | ); 286 | mainGroup = B66E3E2313E9E79C00D2ACF0; 287 | productRefGroup = B66E3E2F13E9E79C00D2ACF0 /* Products */; 288 | projectDirPath = ""; 289 | projectRoot = ""; 290 | targets = ( 291 | B66E3E2D13E9E79C00D2ACF0 /* GLVideoFilter */, 292 | ); 293 | }; 294 | /* End PBXProject section */ 295 | 296 | /* Begin PBXResourcesBuildPhase section */ 297 | B66E3E2C13E9E79C00D2ACF0 /* Resources */ = { 298 | isa = PBXResourcesBuildPhase; 299 | buildActionMask = 2147483647; 300 | files = ( 301 | DF9CAE8A1824C18800CE3945 /* blurXPass.fsh in Resources */, 302 | DF9CAE8B1824C18800CE3945 /* blurYPass.fsh in Resources */, 303 | DF3F45581821C7A80069D69F /* quadKernel.vsh in Resources */, 304 | DF143B7416EB317E006D35F9 /* SobelRGBComposite.fsh in Resources */, 305 | DF143B7516EB317E006D35F9 /* yuv2rgb.fsh in Resources */, 306 | DF7EE77718F8C45A000CD7AA /* Shaders.plist in Resources */, 307 | DFA4C5F118F9ED6D0099F4DE /* CannyThresholdInvert.fsh in Resources */, 308 | DFA4C5ED18F9E7820099F4DE /* quadScreenKernel.vsh in Resources */, 309 | DF4740B516E6EEFA0073DDB8 /* SobelBlend.fsh in Resources */, 310 | DF2BE98516E6939600FF0C90 /* SobelCanny.fsh in Resources */, 311 | DFC750F516E5C4340036CDD8 /* SobelBWComposite.fsh in Resources */, 312 | DFA4C5EA18F9D1B90099F4DE /* CannyMag.fsh in Resources */, 313 | DF59484016E5BB0900C16079 /* passthrough.fsh in Resources */, 314 | DF0D63E118F7947800D51089 /* Images.xcassets in Resources */, 315 | DFA4C5EF18F9EBD40099F4DE /* CannyThresholdComposite.fsh in Resources */, 316 | DF59483E16E5B91E00C16079 /* quadPassthrough.vsh in Resources */, 317 | DF93BCCA16E4983C00C062FC /* SobelBW.fsh in Resources */, 318 | B66E3E4713E9E79C00D2ACF0 /* Sobel.fsh in Resources */, 319 | B66E3E4913E9E79C00D2ACF0 /* quadInvertY.vsh in Resources */, 320 | DFC5BAC0190C9D7C004FF2B1 /* CannyComic.fsh in Resources */, 321 | DF7EE77518F8C16C000CD7AA /* Filters.plist in Resources */, 322 | DF2BE98716E69BE000FF0C90 /* StoryBoard_iPhone.storyboard in Resources */, 323 | DF2BE98916E69BFC00FF0C90 /* StoryBoard_iPad.storyboard in Resources */, 324 | DFA4C5EB18F9D1B90099F4DE /* CannyThreshold.fsh in Resources */, 325 | ); 326 | runOnlyForDeploymentPostprocessing = 0; 327 | }; 328 | /* End PBXResourcesBuildPhase section */ 329 | 330 | /* Begin PBXSourcesBuildPhase section */ 331 | B66E3E2A13E9E79C00D2ACF0 /* Sources */ = { 332 | isa = PBXSourcesBuildPhase; 333 | buildActionMask = 2147483647; 334 | files = ( 335 | DF7EE78318F915A3000CD7AA /* QuadModel.m in Sources */, 336 | B66E3E4113E9E79C00D2ACF0 /* main.m in Sources */, 337 | DF7EE78418F915A3000CD7AA /* ShaderManager.m in Sources */, 338 | B66E3E4513E9E79C00D2ACF0 /* AppDelegate.m in Sources */, 339 | DF7EE78218F915A3000CD7AA /* FilterManager.m in Sources */, 340 | DF0D63D718F78DB000D51089 /* MBProgressHUD.m in Sources */, 341 | B66E3E4C13E9E79C00D2ACF0 /* FilterViewController.m in Sources */, 342 | ); 343 | runOnlyForDeploymentPostprocessing = 0; 344 | }; 345 | /* End PBXSourcesBuildPhase section */ 346 | 347 | /* Begin XCBuildConfiguration section */ 348 | B66E3E5313E9E79C00D2ACF0 /* Debug */ = { 349 | isa = XCBuildConfiguration; 350 | buildSettings = { 351 | ALWAYS_SEARCH_USER_PATHS = NO; 352 | CLANG_ENABLE_OBJC_ARC = YES; 353 | CLANG_WARN_CONSTANT_CONVERSION = YES; 354 | CLANG_WARN_ENUM_CONVERSION = YES; 355 | CLANG_WARN_INT_CONVERSION = YES; 356 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 357 | CODE_SIGN_IDENTITY = "iPhone Developer"; 358 | COPY_PHASE_STRIP = NO; 359 | GCC_C_LANGUAGE_STANDARD = gnu99; 360 | GCC_DYNAMIC_NO_PIC = NO; 361 | GCC_OPTIMIZATION_LEVEL = 0; 362 | GCC_PREPROCESSOR_DEFINITIONS = ( 363 | "DEBUG=1", 364 | "$(inherited)", 365 | ); 366 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 367 | GCC_VERSION = ""; 368 | GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES; 369 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 370 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 371 | GCC_WARN_UNUSED_VARIABLE = YES; 372 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 373 | ONLY_ACTIVE_ARCH = YES; 374 | PROVISIONING_PROFILE = ""; 375 | SDKROOT = iphoneos; 376 | SUPPORTED_PLATFORMS = iphoneos; 377 | TARGETED_DEVICE_FAMILY = "1,2"; 378 | }; 379 | name = Debug; 380 | }; 381 | B66E3E5413E9E79C00D2ACF0 /* Release */ = { 382 | isa = XCBuildConfiguration; 383 | buildSettings = { 384 | ALWAYS_SEARCH_USER_PATHS = NO; 385 | CLANG_ENABLE_OBJC_ARC = YES; 386 | CLANG_WARN_CONSTANT_CONVERSION = YES; 387 | CLANG_WARN_ENUM_CONVERSION = YES; 388 | CLANG_WARN_INT_CONVERSION = YES; 389 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 390 | CODE_SIGN_IDENTITY = "iPhone Developer"; 391 | COPY_PHASE_STRIP = YES; 392 | GCC_C_LANGUAGE_STANDARD = gnu99; 393 | GCC_VERSION = ""; 394 | GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES; 395 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 396 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 397 | GCC_WARN_UNUSED_VARIABLE = YES; 398 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 399 | OTHER_CFLAGS = "-DNS_BLOCK_ASSERTIONS=1"; 400 | PROVISIONING_PROFILE = ""; 401 | SDKROOT = iphoneos; 402 | SUPPORTED_PLATFORMS = iphoneos; 403 | TARGETED_DEVICE_FAMILY = "1,2"; 404 | VALIDATE_PRODUCT = YES; 405 | }; 406 | name = Release; 407 | }; 408 | B66E3E5613E9E79C00D2ACF0 /* Debug */ = { 409 | isa = XCBuildConfiguration; 410 | buildSettings = { 411 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 412 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 413 | CODE_SIGN_ENTITLEMENTS = GLVideoFilter/GLVideoFilter.entitlements; 414 | CODE_SIGN_IDENTITY = "iPhone Developer"; 415 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 416 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 417 | GCC_PREFIX_HEADER = "GLVideoFilter/GLVideoFilter-Prefix.pch"; 418 | "GCC_THUMB_SUPPORT[arch=armv6]" = ""; 419 | INFOPLIST_FILE = "$(SRCROOT)/GLVideoFilter/GLVideoFilter-Info.plist"; 420 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 421 | LIBRARY_SEARCH_PATHS = "$(inherited)"; 422 | PRODUCT_NAME = GLVideoFilter; 423 | PROVISIONING_PROFILE = ""; 424 | USER_HEADER_SEARCH_PATHS = ""; 425 | WRAPPER_EXTENSION = app; 426 | }; 427 | name = Debug; 428 | }; 429 | B66E3E5713E9E79C00D2ACF0 /* Release */ = { 430 | isa = XCBuildConfiguration; 431 | buildSettings = { 432 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 433 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 434 | CODE_SIGN_ENTITLEMENTS = GLVideoFilter/GLVideoFilter.entitlements; 435 | CODE_SIGN_IDENTITY = "iPhone Developer"; 436 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 437 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 438 | GCC_PREFIX_HEADER = "GLVideoFilter/GLVideoFilter-Prefix.pch"; 439 | "GCC_THUMB_SUPPORT[arch=armv6]" = ""; 440 | INFOPLIST_FILE = "$(SRCROOT)/GLVideoFilter/GLVideoFilter-Info.plist"; 441 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 442 | LIBRARY_SEARCH_PATHS = "$(inherited)"; 443 | PRODUCT_NAME = GLVideoFilter; 444 | PROVISIONING_PROFILE = ""; 445 | USER_HEADER_SEARCH_PATHS = ""; 446 | WRAPPER_EXTENSION = app; 447 | }; 448 | name = Release; 449 | }; 450 | /* End XCBuildConfiguration section */ 451 | 452 | /* Begin XCConfigurationList section */ 453 | B66E3E2813E9E79C00D2ACF0 /* Build configuration list for PBXProject "GLVideoFilter" */ = { 454 | isa = XCConfigurationList; 455 | buildConfigurations = ( 456 | B66E3E5313E9E79C00D2ACF0 /* Debug */, 457 | B66E3E5413E9E79C00D2ACF0 /* Release */, 458 | ); 459 | defaultConfigurationIsVisible = 0; 460 | defaultConfigurationName = Release; 461 | }; 462 | B66E3E5513E9E79C00D2ACF0 /* Build configuration list for PBXNativeTarget "GLVideoFilter" */ = { 463 | isa = XCConfigurationList; 464 | buildConfigurations = ( 465 | B66E3E5613E9E79C00D2ACF0 /* Debug */, 466 | B66E3E5713E9E79C00D2ACF0 /* Release */, 467 | ); 468 | defaultConfigurationIsVisible = 0; 469 | defaultConfigurationName = Release; 470 | }; 471 | /* End XCConfigurationList section */ 472 | }; 473 | rootObject = B66E3E2513E9E79C00D2ACF0 /* Project object */; 474 | } 475 | -------------------------------------------------------------------------------- /GLVideoFilter.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /GLVideoFilter/AppDelegate.h: -------------------------------------------------------------------------------- 1 | #import 2 | 3 | @class FilterViewController; 4 | 5 | @interface AppDelegate : UIResponder 6 | 7 | @property (strong, nonatomic) UIWindow *window; 8 | 9 | @end 10 | -------------------------------------------------------------------------------- /GLVideoFilter/AppDelegate.m: -------------------------------------------------------------------------------- 1 | #import "AppDelegate.h" 2 | 3 | #import "FilterViewController.h" 4 | 5 | @implementation AppDelegate 6 | 7 | @synthesize window = _window; 8 | //@synthesize viewController = _viewController; 9 | 10 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 11 | { 12 | // get changes that might have happened while this 13 | // instance of your app wasn't running 14 | [[NSUbiquitousKeyValueStore defaultStore] synchronize]; 15 | 16 | // clear the key value store on launch 17 | /* 18 | NSDictionary *dict = [[NSUbiquitousKeyValueStore defaultStore] dictionaryRepresentation]; 19 | for (NSString *key in dict) { 20 | [[NSUbiquitousKeyValueStore defaultStore] removeObjectForKey:key]; 21 | } 22 | [[NSUbiquitousKeyValueStore defaultStore] synchronize]; 23 | */ 24 | return YES; 25 | } 26 | 27 | - (void)applicationWillResignActive:(UIApplication *)application 28 | { 29 | /* 30 | Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 31 | Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 32 | */ 33 | } 34 | 35 | - (void)applicationDidEnterBackground:(UIApplication *)application 36 | { 37 | /* 38 | Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 39 | If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 40 | */ 41 | } 42 | 43 | - (void)applicationWillEnterForeground:(UIApplication *)application 44 | { 45 | /* 46 | Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 47 | */ 48 | } 49 | 50 | - (void)applicationDidBecomeActive:(UIApplication *)application 51 | { 52 | /* 53 | Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 54 | */ 55 | } 56 | 57 | - (void)applicationWillTerminate:(UIApplication *)application 58 | { 59 | /* 60 | Called when the application is about to terminate. 61 | Save data if appropriate. 62 | See also applicationDidEnterBackground:. 63 | */ 64 | } 65 | 66 | @end 67 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/FilterManager.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | @interface FilterManager : NSObject 5 | 6 | #pragma mark - Class Interface 7 | 8 | +(void)loadFilters; 9 | +(void)teardownFilters; 10 | 11 | #pragma mark - Instance Interface 12 | 13 | -(void)nextFilter; 14 | -(void)prevFilter; 15 | -(NSArray*)getCurrentFilter; 16 | -(NSString*)getCurrentName; 17 | -(void)setFilterByName:(NSString *)name; 18 | @end 19 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/FilterManager.m: -------------------------------------------------------------------------------- 1 | #import "FilterManager.h" 2 | 3 | static bool _initialized = false; 4 | static NSArray *_filterList = nil; 5 | 6 | @implementation FilterManager 7 | 8 | #pragma mark - Class Methods 9 | 10 | +(void)loadFilters 11 | { 12 | if (!_initialized) 13 | { 14 | NSString *plist = [[NSBundle mainBundle] pathForResource:@"Filters" ofType:@"plist"]; 15 | NSArray *filters = [NSArray arrayWithContentsOfFile:plist]; 16 | NSMutableArray *tempFilterList = [NSMutableArray array]; 17 | for (NSDictionary *filter in filters) 18 | { 19 | #if defined(DEBUG) 20 | NSString *name = [filter objectForKey:@"Name"]; 21 | NSArray *passes = [filter objectForKey:@"Passes"]; 22 | NSLog(@"Found filter '%@' with %lu passes",name,[passes count]); 23 | #endif 24 | [tempFilterList addObject:filter]; 25 | } 26 | _filterList = [NSArray arrayWithArray:tempFilterList]; 27 | _initialized = true; 28 | } 29 | } 30 | 31 | +(void)teardownFilters 32 | { 33 | _filterList = nil; 34 | _initialized = false; 35 | } 36 | 37 | #pragma mark - Instance Methods and Variables 38 | 39 | NSUInteger currentFilter; 40 | 41 | - (id)init { 42 | if (!_initialized) 43 | [FilterManager loadFilters]; 44 | self = [super init]; 45 | if (self) 46 | { 47 | currentFilter = 0; 48 | } 49 | return self; 50 | } 51 | 52 | -(void)nextFilter 53 | { 54 | currentFilter++; 55 | if (currentFilter >= [_filterList count]) 56 | currentFilter = 0; 57 | } 58 | 59 | -(void)prevFilter 60 | { 61 | if (currentFilter == 0) 62 | currentFilter = [_filterList count] - 1; 63 | else 64 | currentFilter--; 65 | } 66 | 67 | -(NSArray*)getCurrentFilter 68 | { 69 | NSArray *filter = [[_filterList objectAtIndex:currentFilter] objectForKey:@"Passes"]; 70 | if (filter == nil) 71 | return [NSArray array]; 72 | return filter; 73 | } 74 | 75 | -(NSString*)getCurrentName 76 | { 77 | NSString *name = [[_filterList objectAtIndex:currentFilter] objectForKey:@"Name"]; 78 | if (name == nil) 79 | return @""; 80 | return name; 81 | } 82 | 83 | -(void)setFilterByName:(NSString *)name 84 | { 85 | if (name == nil || [name isEqualToString:@""]) 86 | currentFilter = 0; 87 | for (int i = 0; i < [_filterList count]; i++) 88 | { 89 | if ([name isEqualToString:[[_filterList objectAtIndex:i] objectForKey:@"Name"]]) 90 | currentFilter = i; 91 | } 92 | } 93 | 94 | 95 | @end 96 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/QuadModel.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import 4 | 5 | @interface QuadModel : NSObject 6 | 7 | - (GLfloat *)getVertices; 8 | - (GLfloat *)getTexCoords; 9 | - (GLushort *)getIndices; 10 | - (unsigned int)getVertexSize; 11 | - (unsigned int)getIndexSize; 12 | - (unsigned int)getIndexCount; 13 | 14 | - (id)init; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/QuadModel.m: -------------------------------------------------------------------------------- 1 | #import "QuadModel.h" 2 | 3 | @interface QuadModel () { 4 | 5 | unsigned int numVerts; 6 | unsigned int numIndicies; 7 | 8 | // data passed to GL 9 | GLKVector3 *quadVertices; 10 | GLKVector2 *quadTexCoords; 11 | GLushort *quadIndicies; 12 | } 13 | 14 | @end 15 | 16 | @implementation QuadModel 17 | 18 | 19 | - (void)initMesh 20 | { 21 | numVerts = 4; 22 | numIndicies = 4; 23 | quadVertices = (GLKVector3 *) malloc(4 * sizeof(GLKVector3)); 24 | quadTexCoords = (GLKVector2 *) malloc(4 * sizeof(GLKVector2)); 25 | quadIndicies = (GLushort *) malloc(4 * sizeof(GLushort)); 26 | 27 | quadVertices[0] = GLKVector3Make(-1, -1, 0); 28 | quadVertices[1] = GLKVector3Make(-1, 1, 0); 29 | quadVertices[2] = GLKVector3Make(1, -1, 0); 30 | quadVertices[3] = GLKVector3Make(1, 1, 0); 31 | 32 | quadTexCoords[0] = GLKVector2Make(0, 0); 33 | quadTexCoords[1] = GLKVector2Make(0, 1); 34 | quadTexCoords[2] = GLKVector2Make(1, 0); 35 | quadTexCoords[3] = GLKVector2Make(1, 1); 36 | 37 | quadIndicies[0] = 0; 38 | quadIndicies[1] = 1; 39 | quadIndicies[2] = 2; 40 | quadIndicies[3] = 3; 41 | 42 | } 43 | 44 | - (GLfloat *)getVertices 45 | { 46 | return (GLfloat *) quadVertices; 47 | } 48 | 49 | - (GLfloat *)getTexCoords 50 | { 51 | return (GLfloat *) quadTexCoords; 52 | } 53 | 54 | - (GLushort *)getIndices 55 | { 56 | return quadIndicies; 57 | } 58 | 59 | - (unsigned int)getVertexSize 60 | { 61 | return numVerts * sizeof(GLKVector3); 62 | } 63 | 64 | - (unsigned int)getIndexSize 65 | { 66 | return numIndicies*sizeof(GLushort); 67 | } 68 | 69 | - (unsigned int)getIndexCount 70 | { 71 | return numIndicies; 72 | } 73 | 74 | - (void)freeBuffers 75 | { 76 | free(quadVertices); 77 | free(quadIndicies); 78 | free(quadTexCoords); 79 | } 80 | 81 | - (id)init 82 | { 83 | self = [super init]; 84 | 85 | if (self) 86 | { 87 | [self initMesh]; 88 | } 89 | 90 | return self; 91 | } 92 | 93 | 94 | - (void)dealloc 95 | { 96 | [self freeBuffers]; 97 | } 98 | 99 | @end 100 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/ShaderManager.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | // Uniform index. 5 | enum 6 | { 7 | UNIFORM_Y, 8 | UNIFORM_UV, 9 | UNIFORM_RGB, 10 | UNIFORM_TEXELSIZE, 11 | UNIFORM_RGBCONVOLUTION, 12 | UNIFORM_COLORCONVOLUTION, 13 | UNIFORM_SCALE, 14 | UNIFORM_LOWTHRESHOLD, 15 | UNIFORM_HIGHTHRESHOLD, 16 | NUM_UNIFORMS 17 | }; 18 | 19 | // Attribute index. 20 | enum 21 | { 22 | ATTRIB_VERTEX, 23 | ATTRIB_TEXCOORD, 24 | NUM_ATTRIBUTES 25 | }; 26 | 27 | typedef struct shaderType { 28 | GLuint handle; 29 | GLint attributes[NUM_ATTRIBUTES]; 30 | GLint uniforms[NUM_UNIFORMS]; 31 | } shader_t; 32 | 33 | 34 | @interface ShaderManager : NSObject 35 | 36 | #pragma mark - Class Interface 37 | 38 | +(void)loadShaders; 39 | +(BOOL) loadShaderNamed:(NSString *)name into:(shader_t *)shader; 40 | +(void)teardownShaders; 41 | 42 | #pragma mark - Instance Interface 43 | 44 | @property GLKVector2 scale; 45 | @property GLKVector2 texelSize; 46 | @property GLfloat lowThreshold; 47 | @property GLfloat highThreshold; 48 | @property GLKMatrix3 rgbConvolution; 49 | @property GLKMatrix3 colorConvolution; 50 | 51 | -(BOOL)setShader:(shader_t)program; 52 | -(BOOL)setShaderNamed:(NSString *)name; 53 | 54 | @end 55 | -------------------------------------------------------------------------------- /GLVideoFilter/Classes/ShaderManager.m: -------------------------------------------------------------------------------- 1 | #import "ShaderManager.h" 2 | #import 3 | 4 | static bool _initialized = false; 5 | static NSDictionary *_shaderList = nil; 6 | 7 | @implementation ShaderManager 8 | 9 | #pragma mark - Class Methods and Variables 10 | 11 | +(BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file 12 | { 13 | GLint status; 14 | const GLchar *source; 15 | source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String]; 16 | if (!source) { 17 | NSLog(@"Failed to load shader '%@'",[file lastPathComponent]); 18 | return NO; 19 | } 20 | 21 | *shader = glCreateShader(type); 22 | glShaderSource(*shader, 1, &source, NULL); 23 | glCompileShader(*shader); 24 | 25 | #if defined(DEBUG) 26 | GLint logLength; 27 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); 28 | if (logLength > 0) { 29 | GLchar *log = (GLchar *)malloc(logLength); 30 | glGetShaderInfoLog(*shader, logLength, &logLength, log); 31 | NSLog(@"Shader '%@' compile log:\n%s", [file lastPathComponent], log); 32 | free(log); 33 | } 34 | #endif 35 | 36 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); 37 | if (status == 0) { 38 | glDeleteShader(*shader); 39 | return NO; 40 | } 41 | 42 | return YES; 43 | } 44 | 45 | +(BOOL)linkProgram:(GLuint)prog 46 | { 47 | GLint status; 48 | glLinkProgram(prog); 49 | 50 | #if defined(DEBUG) 51 | GLint logLength; 52 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); 53 | if (logLength > 0) { 54 | GLchar *log = (GLchar *)malloc(logLength); 55 | glGetProgramInfoLog(prog, logLength, &logLength, log); 56 | NSLog(@"Program link log:\n%s", log); 57 | free(log); 58 | } 59 | #endif 60 | 61 | glGetProgramiv(prog, GL_LINK_STATUS, &status); 62 | if (status == 0) { 63 | return NO; 64 | } 65 | return YES; 66 | } 67 | 68 | 69 | +(BOOL)loadShader: (shader_t *) program withVertex: (NSString *) vertexName withFragment: (NSString *) fragmentName 70 | { 71 | GLuint vertShader, fragShader; 72 | NSString *vertShaderPathname, *fragShaderPathname; 73 | 74 | // Create shader program. 75 | program->handle = glCreateProgram(); 76 | 77 | // Create and compile vertex shader. 78 | vertShaderPathname = [[NSBundle mainBundle] pathForResource:vertexName ofType:@"vsh"]; 79 | if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) { 80 | NSLog(@"Failed to compile vertex shader '%@'",vertexName); 81 | return NO; 82 | } 83 | 84 | // Create and compile fragment shader. 85 | fragShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentName ofType:@"fsh"]; 86 | if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) { 87 | NSLog(@"Failed to compile fragment shader '%@'",fragmentName); 88 | return NO; 89 | } 90 | 91 | // Attach vertex shader to program. 92 | glAttachShader(program->handle, vertShader); 93 | 94 | // Attach fragment shader to program. 95 | glAttachShader(program->handle, fragShader); 96 | 97 | // Bind attribute locations. 98 | // This needs to be done prior to linking. 99 | glBindAttribLocation(program->handle, ATTRIB_VERTEX, "position"); 100 | glBindAttribLocation(program->handle, ATTRIB_TEXCOORD, "texCoord"); 101 | #if defined(DEBUG) 102 | NSLog(@"Linking program with vertex shader '%@' and fragment shader '%@'...",vertexName,fragmentName); 103 | #endif 104 | 105 | // Link program. 106 | if (![self linkProgram:program->handle]) { 107 | NSLog(@"Failed to link program: %d", program->handle); 108 | 109 | if (vertShader) { 110 | glDeleteShader(vertShader); 111 | vertShader = 0; 112 | } 113 | if (fragShader) { 114 | glDeleteShader(fragShader); 115 | fragShader = 0; 116 | } 117 | if (program->handle) { 118 | glDeleteProgram(program->handle); 119 | program->handle = 0; 120 | } 121 | 122 | return NO; 123 | } 124 | 125 | // Get program locations. 126 | program->uniforms[UNIFORM_Y] = glGetUniformLocation(program->handle, "SamplerY"); 127 | program->uniforms[UNIFORM_UV] = glGetUniformLocation(program->handle, "SamplerUV"); 128 | program->uniforms[UNIFORM_RGB] = glGetUniformLocation(program->handle, "SamplerRGB"); 129 | program->uniforms[UNIFORM_TEXELSIZE] = glGetUniformLocation(program->handle, "texelSize"); 130 | program->uniforms[UNIFORM_RGBCONVOLUTION] = glGetUniformLocation(program->handle, "rgbConvolution"); 131 | program->uniforms[UNIFORM_COLORCONVOLUTION] = glGetUniformLocation(program->handle, "colorConvolution"); 132 | program->uniforms[UNIFORM_SCALE] = glGetUniformLocation(program->handle, "posScale"); 133 | program->uniforms[UNIFORM_LOWTHRESHOLD] = glGetUniformLocation(program->handle, "lowThreshold"); 134 | program->uniforms[UNIFORM_HIGHTHRESHOLD] = glGetUniformLocation(program->handle, "highThreshold"); 135 | // Release vertex and fragment shaders. 136 | if (vertShader) { 137 | glDetachShader(program->handle, vertShader); 138 | glDeleteShader(vertShader); 139 | } 140 | if (fragShader) { 141 | glDetachShader(program->handle, fragShader); 142 | glDeleteShader(fragShader); 143 | } 144 | return YES; 145 | } 146 | 147 | +(void)loadShaders 148 | { 149 | if (!_initialized) 150 | { 151 | NSString *plist = [[NSBundle mainBundle] pathForResource:@"Shaders" ofType:@"plist"]; 152 | NSDictionary *shaders = [NSDictionary dictionaryWithContentsOfFile:plist]; 153 | NSString *key; 154 | NSMutableDictionary *tempShaderList = [NSMutableDictionary dictionary]; 155 | for (key in shaders) { 156 | NSDictionary *shader = [shaders objectForKey:key]; 157 | NSString *vertex = [shader objectForKey:@"Vertex"]; 158 | NSString *fragment = [shader objectForKey:@"Fragment"]; 159 | shader_t temp; 160 | if ([self loadShader:&temp withVertex:vertex withFragment:fragment]) 161 | { 162 | NSData *data = [NSData dataWithBytes:&temp length:sizeof(shader_t)]; 163 | [tempShaderList setObject:data forKey:key]; 164 | } 165 | } 166 | _shaderList = [NSDictionary dictionaryWithDictionary:tempShaderList]; 167 | _initialized = true; 168 | } 169 | } 170 | 171 | +(BOOL) loadShaderNamed:(NSString *)name into:(shader_t *)shader 172 | { 173 | if (!_initialized) 174 | return NO; 175 | NSData *temp = [_shaderList objectForKey:name]; 176 | if (temp == nil) 177 | return NO; 178 | [temp getBytes:shader length:sizeof(shader_t)]; 179 | return YES; 180 | } 181 | 182 | +(void)teardownShaders 183 | { 184 | 185 | if (_initialized) 186 | { 187 | NSString *key; 188 | for (key in _shaderList) 189 | { 190 | shader_t shader; 191 | NSData *temp = [_shaderList objectForKey:key]; 192 | [temp getBytes:&shader length:sizeof(shader_t)]; 193 | if (shader.handle) 194 | glDeleteProgram(shader.handle); 195 | } 196 | _shaderList = nil; 197 | _initialized = false; 198 | } 199 | } 200 | 201 | #pragma mark - Instance Methods and Properties 202 | 203 | @synthesize scale=_scale; 204 | @synthesize texelSize=_texelSize; 205 | @synthesize rgbConvolution=_rgbConvolution; 206 | @synthesize colorConvolution=_colorConvolution; 207 | @synthesize lowThreshold=_lowThreshold; 208 | @synthesize highThreshold=_highThreshold; 209 | 210 | - (id)init { 211 | if (!_initialized) 212 | [ShaderManager loadShaders]; 213 | self = [super init]; 214 | if (self) 215 | { 216 | self.scale = GLKVector2Make(1, 1); 217 | self.texelSize = GLKVector2Make(1, 1); 218 | self.lowThreshold = 0.0f; 219 | self.highThreshold = 0.0f; 220 | self.rgbConvolution = GLKMatrix3Identity; 221 | self.colorConvolution = GLKMatrix3Identity; 222 | } 223 | return self; 224 | } 225 | 226 | -(BOOL)setShader:(shader_t)program 227 | { 228 | if (program.handle == 0) 229 | return NO; 230 | glUseProgram(program.handle); 231 | 232 | // bind appropriate uniforms 233 | if (program.uniforms[UNIFORM_Y] > -1) 234 | { 235 | glUniform1i(program.uniforms[UNIFORM_Y], 0); 236 | glUniform1i(program.uniforms[UNIFORM_UV], 1); 237 | } else { 238 | glUniform1i(program.uniforms[UNIFORM_RGB], 0); 239 | } 240 | if (program.uniforms[UNIFORM_TEXELSIZE] > -1) 241 | glUniform2fv(program.uniforms[UNIFORM_TEXELSIZE], 1, (GLfloat *) &_texelSize); 242 | if (program.uniforms[UNIFORM_RGBCONVOLUTION] > -1) 243 | glUniformMatrix3fv(program.uniforms[UNIFORM_RGBCONVOLUTION], 1, GL_FALSE, _rgbConvolution.m); 244 | if (program.uniforms[UNIFORM_COLORCONVOLUTION] > -1) 245 | glUniformMatrix3fv(program.uniforms[UNIFORM_COLORCONVOLUTION], 1, GL_FALSE, _colorConvolution.m); 246 | 247 | if (program.uniforms[UNIFORM_SCALE] > -1) 248 | { 249 | glUniform2fv(program.uniforms[UNIFORM_SCALE],1, (GLfloat *) &_scale); 250 | } 251 | if (program.uniforms[UNIFORM_LOWTHRESHOLD] > -1) 252 | glUniform1f(program.uniforms[UNIFORM_LOWTHRESHOLD], _lowThreshold); 253 | if (program.uniforms[UNIFORM_HIGHTHRESHOLD] > -1) 254 | glUniform1f(program.uniforms[UNIFORM_HIGHTHRESHOLD], _highThreshold); 255 | return YES; 256 | } 257 | 258 | -(BOOL)setShaderNamed:(NSString *)name 259 | { 260 | shader_t program; 261 | if ([ShaderManager loadShaderNamed:name into:&program]) 262 | return [self setShader:program]; 263 | else 264 | return NO; 265 | } 266 | @end 267 | -------------------------------------------------------------------------------- /GLVideoFilter/FilterViewController.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import 4 | #import "MBProgressHUD.h" 5 | #import "QuadModel.h" 6 | #import "ShaderManager.h" 7 | #import "FilterManager.h" 8 | 9 | @interface FilterViewController : GLKViewController { 10 | 11 | NSUbiquitousKeyValueStore *defaults; 12 | MBProgressHUD *_HUD; 13 | UIImage *_lockedIcon; 14 | UIImage *_unlockedIcon; 15 | BOOL _blurMode; 16 | BOOL _modeLock; 17 | NSInteger _colorMode; 18 | QuadModel *_quad; 19 | FilterManager *_filters; 20 | ShaderManager *_shaders; 21 | 22 | IBOutlet UITapGestureRecognizer *singleTapGestureRecognizer; 23 | IBOutlet UITapGestureRecognizer *doubleTapGestureRecognizer; 24 | } 25 | 26 | - (IBAction)tapGestureRecgonizer:(UITapGestureRecognizer *)sender; 27 | - (IBAction)swipeGestureRecognizer:(UISwipeGestureRecognizer *)sender; 28 | - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil; 29 | @end 30 | 31 | -------------------------------------------------------------------------------- /GLVideoFilter/FilterViewController.m: -------------------------------------------------------------------------------- 1 | #import 2 | #import "FilterViewController.h" 3 | 4 | 5 | #if __LP64__ 6 | static const bool _is64bit = true; 7 | #else 8 | static const bool _is64bit = false; 9 | #endif 10 | 11 | 12 | @implementation FilterViewController 13 | 14 | typedef enum { 15 | FBO_PING, 16 | FBO_PONG, 17 | FBO_RGB, 18 | NUM_FBOS 19 | } buff_t; 20 | 21 | enum 22 | { 23 | BLUR_NONE, 24 | BLUR_TWOPASS, 25 | NUM_BLURS 26 | }; 27 | 28 | enum 29 | { 30 | REGULAR, 31 | PROTANOPE, 32 | DEUTERANOPE, 33 | TRITANOPE, 34 | NUM_CONVOLUTIONS 35 | }; 36 | 37 | 38 | GLKMatrix3 _rgbConvolution; 39 | GLKMatrix3 _colorConvolution[NUM_CONVOLUTIONS]; 40 | NSArray *_colorConvolutionNames; 41 | 42 | bool _newFrame; 43 | GLuint _positionVBO; 44 | GLuint _texcoordVBO; 45 | GLuint _indexVBO; 46 | 47 | GLuint _fboTextures[NUM_FBOS]; 48 | GLuint _fbo[NUM_FBOS]; 49 | 50 | CGFloat _screenWidth; 51 | CGFloat _screenHeight; 52 | GLsizei _textureWidth; 53 | GLsizei _textureHeight; 54 | 55 | EAGLContext *_context; 56 | 57 | CVOpenGLESTextureRef _lumaTexture; 58 | CVOpenGLESTextureRef _chromaTexture; 59 | 60 | AVCaptureSession *_session; 61 | CVOpenGLESTextureCacheRef _videoTextureCache; 62 | 63 | 64 | // utility shaders 65 | shader_t _blurX, _blurY; 66 | shader_t _yuv2rgb; 67 | 68 | - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil 69 | { 70 | self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; 71 | if (self) { 72 | _filters = nil; 73 | _shaders = nil; 74 | _HUD = nil; 75 | _quad = nil; 76 | _lockedIcon = nil; 77 | _unlockedIcon = nil; 78 | } 79 | return self; 80 | } 81 | 82 | - (BOOL)prefersStatusBarHidden { 83 | return YES; 84 | } 85 | 86 | - (void)viewWillAppear:(BOOL)animated 87 | { 88 | [[UIApplication sharedApplication] setIdleTimerDisabled: YES]; 89 | defaults = [NSUbiquitousKeyValueStore defaultStore]; 90 | 91 | [[NSNotificationCenter defaultCenter] addObserver:self 92 | selector:@selector(externalUpdate:) 93 | name:NSUbiquitousKeyValueStoreDidChangeExternallyNotification 94 | object:nil]; 95 | [defaults synchronize]; 96 | 97 | _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 98 | if (!_context) { 99 | NSLog(@"Failed to create ES context"); 100 | } 101 | 102 | GLKView *view = (GLKView *)self.view; 103 | view.context = _context; 104 | view.contentScaleFactor = [UIScreen mainScreen].scale; 105 | 106 | _screenHeight = [UIScreen mainScreen].bounds.size.width * [UIScreen mainScreen].scale; 107 | _screenWidth = [UIScreen mainScreen].bounds.size.height * [UIScreen mainScreen].scale; 108 | 109 | _lockedIcon = [UIImage imageNamed:@"Locked"]; 110 | _unlockedIcon = [UIImage imageNamed:@"Unlocked"]; 111 | 112 | [self generateColorConvolutions]; 113 | [self setupGL]; 114 | 115 | 116 | _filters = [[FilterManager alloc] init]; 117 | 118 | [self setupAVCapture]; 119 | _newFrame = false; 120 | 121 | if ([defaults objectForKey:@"blurMode"] != nil) 122 | [self setBlurMode:[defaults boolForKey:@"blurMode"]]; 123 | else 124 | [self setBlurMode:YES]; 125 | [self setLockMode:[defaults boolForKey:@"modeLock"]]; 126 | [self setFilterByName:[defaults stringForKey:@"currentFilter"]]; 127 | [self setColorConvolution:(NSInteger)[defaults doubleForKey:@"colorMode"]]; 128 | [super viewWillAppear:animated]; 129 | 130 | } 131 | 132 | 133 | - (void)viewWillDisappear:(BOOL)animated 134 | { 135 | [_HUD hide:YES]; 136 | [self tearDownAVCapture]; 137 | [self tearDownGL]; 138 | [self updateDefaults]; 139 | [[NSNotificationCenter defaultCenter] removeObserver:self 140 | name:NSUbiquitousKeyValueStoreDidChangeExternallyNotification 141 | object:nil]; 142 | 143 | if ([EAGLContext currentContext] == _context) { 144 | [EAGLContext setCurrentContext:nil]; 145 | } 146 | _lockedIcon = nil; 147 | _unlockedIcon = nil; 148 | 149 | [super viewWillDisappear:animated]; 150 | } 151 | 152 | 153 | - (void)viewDidLoad 154 | { 155 | [super viewDidLoad]; 156 | 157 | if ([self respondsToSelector:@selector(setNeedsStatusBarAppearanceUpdate)]) 158 | { 159 | [self prefersStatusBarHidden]; 160 | [self performSelector:@selector(setNeedsStatusBarAppearanceUpdate)]; 161 | } 162 | else 163 | { 164 | // iOS 6 165 | [[UIApplication sharedApplication] setStatusBarHidden:YES withAnimation:UIStatusBarAnimationSlide]; 166 | } 167 | 168 | [singleTapGestureRecognizer requireGestureRecognizerToFail:doubleTapGestureRecognizer]; 169 | self.preferredFramesPerSecond = 60; 170 | } 171 | 172 | - (void)didReceiveMemoryWarning 173 | { 174 | [super didReceiveMemoryWarning]; 175 | // Release any cached data, images, etc. that aren't in use. 176 | } 177 | 178 | - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 179 | { 180 | // Camera image orientation on screen is fixed 181 | // with respect to the physical camera orientation. 182 | 183 | // [UIView setAnimationsEnabled:NO]; 184 | /* Your original orientation booleans*/ 185 | 186 | return UIInterfaceOrientationIsLandscape(interfaceOrientation); 187 | } 188 | 189 | -(void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration { 190 | [UIView setAnimationsEnabled:NO]; 191 | } 192 | 193 | 194 | - (void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation 195 | { 196 | [UIView setAnimationsEnabled:YES]; 197 | } 198 | 199 | #pragma mark - AV Foundation methods 200 | 201 | - (void)cleanUpTextures 202 | { 203 | if (_lumaTexture) 204 | { 205 | CFRelease(_lumaTexture); 206 | _lumaTexture = NULL; 207 | } 208 | 209 | if (_chromaTexture) 210 | { 211 | CFRelease(_chromaTexture); 212 | _chromaTexture = NULL; 213 | } 214 | 215 | // Periodic texture cache flush every frame 216 | CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); 217 | } 218 | 219 | - (void)captureOutput:(AVCaptureOutput *)captureOutput 220 | didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 221 | fromConnection:(AVCaptureConnection *)connection 222 | { 223 | CVReturn err; 224 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 225 | GLsizei width = (GLsizei) CVPixelBufferGetWidth(pixelBuffer); 226 | GLsizei height = (GLsizei) CVPixelBufferGetHeight(pixelBuffer); 227 | 228 | if (!_videoTextureCache) 229 | { 230 | NSLog(@"No video texture cache"); 231 | return; 232 | } 233 | 234 | if (_quad == nil) 235 | { 236 | // float textureAspect = height / width; 237 | float screenHeight = 238 | (_is64bit 239 | ? [UIScreen mainScreen].bounds.size.height * [UIScreen mainScreen].scale 240 | : [UIScreen mainScreen].bounds.size.height ); 241 | 242 | float scale = screenHeight / width; 243 | 244 | if (scale <= 1.0) 245 | { 246 | _textureWidth = ceil(width * scale); 247 | _textureHeight = ceil(height * scale); 248 | } else { 249 | _textureWidth = width; 250 | _textureHeight = height; 251 | } 252 | 253 | _quad = [[QuadModel alloc] init]; 254 | 255 | // set up buffers only *after* you have a source video stream 256 | // This allows the texture sizes to be set to the same as the source video stream 257 | [self setupBuffers]; 258 | } 259 | 260 | 261 | [self cleanUpTextures]; 262 | 263 | // CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture 264 | // optimally from CVImageBufferRef. 265 | 266 | // Y-plane 267 | glActiveTexture(GL_TEXTURE0); 268 | 269 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 270 | _videoTextureCache, 271 | pixelBuffer, 272 | NULL, 273 | GL_TEXTURE_2D, 274 | GL_RED_EXT, 275 | width, 276 | height, 277 | GL_RED_EXT, 278 | GL_UNSIGNED_BYTE, 279 | 0, 280 | &_lumaTexture); 281 | if (err) 282 | { 283 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 284 | } 285 | 286 | glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); 287 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 288 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 289 | 290 | // UV-plane 291 | glActiveTexture(GL_TEXTURE1); 292 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 293 | _videoTextureCache, 294 | pixelBuffer, 295 | NULL, 296 | GL_TEXTURE_2D, 297 | GL_RG_EXT, 298 | width/2, 299 | height/2, 300 | GL_RG_EXT, 301 | GL_UNSIGNED_BYTE, 302 | 1, 303 | &_chromaTexture); 304 | if (err) 305 | { 306 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 307 | } 308 | 309 | glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); 310 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 311 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 312 | 313 | 314 | 315 | // bind the Y'UV to RGB/Y shader 316 | [_shaders setShader:_yuv2rgb]; 317 | [_shaders setRgbConvolution:_rgbConvolution]; 318 | [_shaders setColorConvolution:_colorConvolution[_colorMode]]; 319 | 320 | glBindFramebuffer(GL_FRAMEBUFFER, _fbo[FBO_RGB]); 321 | glViewport(0, 0, _textureWidth,_textureHeight); 322 | glClear(GL_COLOR_BUFFER_BIT); 323 | 324 | // Render the camera frame into intermediate texture 325 | glDrawElements(GL_TRIANGLE_STRIP, [_quad getIndexCount], GL_UNSIGNED_SHORT, 0); 326 | _newFrame = true; 327 | 328 | } 329 | 330 | - (void)setupAVCapture 331 | { 332 | NSString *sessionPreset; 333 | //-- Create CVOpenGLESTextureCacheRef for optimal CVImageBufferRef to GLES texture conversion. 334 | #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API 335 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); 336 | #else 337 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)_context, NULL, &_videoTextureCache); 338 | #endif 339 | if (err) 340 | { 341 | NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err); 342 | return; 343 | } 344 | 345 | //-- Setup Capture Session. 346 | _session = [[AVCaptureSession alloc] init]; 347 | [_session beginConfiguration]; 348 | 349 | //-- Creata a video device and input from that Device. Add the input to the capture session. 350 | AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 351 | if(videoDevice == nil) 352 | assert(0); 353 | 354 | 355 | if (_is64bit) 356 | { 357 | AVCaptureDeviceFormat *bestFormat = nil; 358 | AVFrameRateRange *bestFrameRateRange = nil; 359 | for ( AVCaptureDeviceFormat *format in [videoDevice formats] ) { 360 | CMVideoDimensions temp = CMVideoFormatDescriptionGetDimensions(format.formatDescription); 361 | // cap resolution to 720p 362 | if (temp.height <= 720) 363 | { 364 | //NSLog(@"Found camera resolution %ix%i",temp.width,temp.height); 365 | for ( AVFrameRateRange *range in format.videoSupportedFrameRateRanges ) { 366 | //NSLog(@"Found frame rate range %f - %f",range.minFrameRate,range.maxFrameRate); 367 | if ( range.maxFrameRate >= bestFrameRateRange.maxFrameRate ) { 368 | 369 | bestFormat = format; 370 | bestFrameRateRange = range; 371 | } 372 | } 373 | } 374 | } 375 | 376 | if ( bestFormat ) { 377 | #if defined(DEBUG) 378 | CMVideoDimensions temp = CMVideoFormatDescriptionGetDimensions(bestFormat.formatDescription); 379 | NSLog(@"Setting camera resolution %ix%i@%ffps",temp.width,temp.height,bestFrameRateRange.maxFrameRate); 380 | #endif 381 | if ( [videoDevice lockForConfiguration:NULL] == YES ) { 382 | videoDevice.activeFormat = bestFormat; 383 | videoDevice.activeVideoMinFrameDuration = bestFrameRateRange.minFrameDuration; 384 | videoDevice.activeVideoMaxFrameDuration = bestFrameRateRange.minFrameDuration; 385 | [videoDevice unlockForConfiguration]; 386 | } 387 | } 388 | self.preferredFramesPerSecond = bestFrameRateRange.maxFrameRate; 389 | sessionPreset = AVCaptureSessionPresetInputPriority; 390 | 391 | //NSLog(@"64bit executable"); 392 | } else { 393 | 394 | if ([videoDevice respondsToSelector:@selector(activeVideoMinFrameDuration)]) 395 | { 396 | int maxHeight = 0; 397 | if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) 398 | { 399 | // Choosing bigger preset for bigger screen. 400 | if (self.view.contentScaleFactor == 2.0) 401 | maxHeight = 720; 402 | else 403 | maxHeight = 480; 404 | } 405 | else 406 | { 407 | if (_screenWidth > 480) 408 | maxHeight = 540; 409 | // use a 640x480 video stream for iPhones 410 | else 411 | maxHeight = 480; 412 | } 413 | AVCaptureDeviceFormat *bestFormat = nil; 414 | AVFrameRateRange *bestFrameRateRange = nil; 415 | for ( AVCaptureDeviceFormat *format in [videoDevice formats] ) { 416 | CMVideoDimensions temp = CMVideoFormatDescriptionGetDimensions(format.formatDescription); 417 | if (temp.height <= maxHeight) 418 | { 419 | //NSLog(@"Found camera resolution %ix%i",temp.width,temp.height); 420 | 421 | for ( AVFrameRateRange *range in format.videoSupportedFrameRateRanges ) { 422 | //NSLog(@"Found frame rate range %f - %f",range.minFrameRate,range.maxFrameRate); 423 | if ( range.maxFrameRate >= bestFrameRateRange.maxFrameRate ) { 424 | 425 | bestFormat = format; 426 | bestFrameRateRange = range; 427 | } 428 | } 429 | } 430 | } 431 | 432 | if ( bestFormat ) { 433 | #if defined(DEBUG) 434 | CMVideoDimensions temp = CMVideoFormatDescriptionGetDimensions(bestFormat.formatDescription); 435 | NSLog(@"Setting camera resolution %ix%i@%ffps",temp.width,temp.height,bestFrameRateRange.maxFrameRate); 436 | #endif 437 | if ( [videoDevice lockForConfiguration:NULL] == YES ) { 438 | videoDevice.activeFormat = bestFormat; 439 | videoDevice.activeVideoMinFrameDuration = bestFrameRateRange.minFrameDuration; 440 | videoDevice.activeVideoMaxFrameDuration = bestFrameRateRange.minFrameDuration; 441 | [videoDevice unlockForConfiguration]; 442 | } 443 | } 444 | self.preferredFramesPerSecond = bestFrameRateRange.maxFrameRate; 445 | sessionPreset = AVCaptureSessionPresetInputPriority; 446 | } else { 447 | if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) 448 | { 449 | // Choosing bigger preset for bigger screen. 450 | if (self.view.contentScaleFactor == 2.0) 451 | sessionPreset = AVCaptureSessionPreset1280x720; 452 | else 453 | sessionPreset = AVCaptureSessionPreset640x480; 454 | } 455 | else 456 | { 457 | // use a 640x480 video stream for iPhones 458 | 459 | sessionPreset = AVCaptureSessionPreset640x480; 460 | } 461 | } 462 | //NSLog(@"32bit executable"); 463 | } 464 | 465 | //-- Add the device to the session. 466 | NSError *error; 467 | AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 468 | if(error) 469 | assert(0); 470 | 471 | 472 | //-- Create the output for the capture session. 473 | AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init]; 474 | [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; // Probably want to set this to NO when recording 475 | 476 | //-- Set to YUV420. 477 | [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}]; // Necessary for manual preview 478 | 479 | // Set dispatch to be on the main thread so OpenGL can do things with the data 480 | [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 481 | 482 | //-- Set preset session size. 483 | [_session setSessionPreset:sessionPreset]; 484 | 485 | [_session addInput:input]; 486 | 487 | [_session addOutput:dataOutput]; 488 | [_session commitConfiguration]; 489 | 490 | [_session startRunning]; 491 | } 492 | 493 | - (void)tearDownAVCapture 494 | { 495 | [self cleanUpTextures]; 496 | 497 | CFRelease(_videoTextureCache); 498 | } 499 | 500 | #pragma mark - OpenGL methods 501 | 502 | - (void)setupGL 503 | { 504 | [EAGLContext setCurrentContext:_context]; 505 | 506 | _shaders = [[ShaderManager alloc] init]; 507 | 508 | [ShaderManager loadShaderNamed:@"blur-x" into:&_blurX]; 509 | [ShaderManager loadShaderNamed:@"blur-y" into:&_blurY]; 510 | [ShaderManager loadShaderNamed:@"yuv-rgb" into:&_yuv2rgb]; 511 | 512 | } 513 | 514 | 515 | - (void)setupBuffers 516 | { 517 | // create frame vertex buffers 518 | glGenBuffers(1, &_indexVBO); 519 | glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexVBO); 520 | glBufferData(GL_ELEMENT_ARRAY_BUFFER, [_quad getIndexSize], [_quad getIndices], GL_STATIC_DRAW); 521 | 522 | glGenBuffers(1, &_positionVBO); 523 | glBindBuffer(GL_ARRAY_BUFFER, _positionVBO); 524 | glBufferData(GL_ARRAY_BUFFER, [_quad getVertexSize], [_quad getVertices], GL_STATIC_DRAW); 525 | 526 | glEnableVertexAttribArray(ATTRIB_VERTEX); 527 | glVertexAttribPointer(ATTRIB_VERTEX, 3, GL_FLOAT, GL_FALSE, 3*sizeof(GLfloat), 0); 528 | 529 | glGenBuffers(1, &_texcoordVBO); 530 | glBindBuffer(GL_ARRAY_BUFFER, _texcoordVBO); 531 | glBufferData(GL_ARRAY_BUFFER, [_quad getVertexSize], [_quad getTexCoords], GL_STATIC_DRAW); 532 | 533 | glEnableVertexAttribArray(ATTRIB_TEXCOORD); 534 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0); 535 | 536 | // genereate textures and FBO's 537 | glGenTextures(NUM_FBOS, &_fboTextures[0]); 538 | glGenFramebuffers(NUM_FBOS, &_fbo[0]); 539 | 540 | for (int i = 0; i < NUM_FBOS; i++) 541 | { 542 | glBindFramebuffer(GL_FRAMEBUFFER, _fbo[i]); 543 | glBindTexture(GL_TEXTURE_2D, _fboTextures[i]); 544 | 545 | glTexImage2D( GL_TEXTURE_2D, 546 | 0, 547 | GL_RGBA, 548 | _textureWidth, _textureHeight, 549 | 0, 550 | GL_RGBA, 551 | GL_UNSIGNED_BYTE, 552 | NULL); 553 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 554 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 555 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 556 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 557 | 558 | 559 | glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _fboTextures[i], 0); 560 | 561 | GLenum status; 562 | status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 563 | switch(status) { 564 | case GL_FRAMEBUFFER_COMPLETE: 565 | //NSLog(@"fbo complete"); 566 | break; 567 | 568 | case GL_FRAMEBUFFER_UNSUPPORTED: 569 | NSLog(@"fbo unsupported"); 570 | break; 571 | 572 | default: 573 | /* programming error; will fail on all hardware */ 574 | NSLog(@"Framebuffer Error"); 575 | break; 576 | } 577 | } 578 | 579 | glBindTexture(GL_TEXTURE_2D, 0); 580 | 581 | GLfloat xScale = 1.0; 582 | GLfloat yScale = (_screenWidth/ _screenHeight) * ((GLfloat) _textureHeight / (GLfloat) _textureWidth); 583 | float orient = (self.interfaceOrientation == UIDeviceOrientationLandscapeRight) ? -1.0 : 1.0; 584 | [_shaders setScale:GLKVector2Make(xScale * orient, yScale * orient)]; 585 | [_shaders setTexelSize:GLKVector2Divide(GLKVector2Make(1.0, 1.0), GLKVector2Make(_textureWidth, _textureHeight))]; 586 | #if defined(DEBUG) 587 | NSLog(@"screen: %fx%f text: %ix%i scale: %f",_screenWidth,_screenHeight,_textureWidth,_textureHeight,yScale); 588 | #endif 589 | } 590 | 591 | - (void)tearDownGL 592 | { 593 | [EAGLContext setCurrentContext:_context]; 594 | 595 | glDeleteBuffers(1, &_positionVBO); 596 | glDeleteBuffers(1, &_texcoordVBO); 597 | glDeleteBuffers(1, &_indexVBO); 598 | 599 | _filters = nil; 600 | _shaders = nil; 601 | [FilterManager teardownFilters]; 602 | [ShaderManager teardownShaders]; 603 | 604 | glDeleteTextures(NUM_FBOS, &_fboTextures[0]); 605 | glDeleteFramebuffers(NUM_FBOS, &_fbo[0]); 606 | } 607 | 608 | 609 | // Render from one texture into another FBO 610 | - (BOOL)drawIntoFBO: (int) fboNum WithShaderNamed:(NSString *) name sourceTexture:(int)texNum 611 | { 612 | if (fboNum >= 0 && fboNum < NUM_FBOS && texNum >= 0 && texNum < NUM_FBOS) 613 | { 614 | if (![_shaders setShaderNamed:name]) 615 | return NO; 616 | glBindFramebuffer(GL_FRAMEBUFFER, _fbo[fboNum]); 617 | glViewport(0, 0, _textureWidth, _textureHeight); 618 | 619 | 620 | glBindTexture(GL_TEXTURE_2D, _fboTextures[texNum]); 621 | glClear(GL_COLOR_BUFFER_BIT); 622 | 623 | glDrawElements(GL_TRIANGLE_STRIP, [_quad getIndexCount], GL_UNSIGNED_SHORT, 0); 624 | 625 | return YES; 626 | } 627 | return NO; 628 | } 629 | 630 | // Render from one texture into another FBO 631 | - (BOOL)drawIntoFBO: (int) fboNum WithShader:(shader_t) shader sourceTexture:(int)texNum 632 | { 633 | if (fboNum >= 0 && fboNum < NUM_FBOS && texNum >= 0 && texNum < NUM_FBOS) 634 | { 635 | if (![_shaders setShader:shader]) 636 | return NO; 637 | glBindFramebuffer(GL_FRAMEBUFFER, _fbo[fboNum]); 638 | glViewport(0, 0, _textureWidth, _textureHeight); 639 | 640 | glBindTexture(GL_TEXTURE_2D, _fboTextures[texNum]); 641 | glClear(GL_COLOR_BUFFER_BIT); 642 | 643 | glDrawElements(GL_TRIANGLE_STRIP, [_quad getIndexCount], GL_UNSIGNED_SHORT, 0); 644 | 645 | return YES; 646 | } 647 | return NO; 648 | } 649 | 650 | // Render from one texture into the view 651 | - (BOOL)drawIntoView:(GLKView *) view WithShaderNamed:(NSString*) name sourceTexture:(int)texNum 652 | { 653 | if (view != nil) 654 | { 655 | if (![_shaders setShaderNamed:name]) 656 | return NO; 657 | [view bindDrawable]; 658 | glViewport(0, 0, _screenWidth, _screenHeight); 659 | glBindTexture(GL_TEXTURE_2D, _fboTextures[texNum]); 660 | glClear(GL_COLOR_BUFFER_BIT); 661 | 662 | glDrawElements(GL_TRIANGLE_STRIP, [_quad getIndexCount], GL_UNSIGNED_SHORT, 0); 663 | 664 | return YES; 665 | } 666 | return NO; 667 | } 668 | 669 | - (void)filterFrame:(GLuint) source intoView:(GLKView *) dest 670 | { 671 | GLuint currentSource = source; 672 | NSArray *currentFilter = [_filters getCurrentFilter]; 673 | GLuint currentDest = FBO_PING; 674 | NSInteger numFilters = [currentFilter count]; 675 | 676 | if (_blurMode) 677 | { 678 | if ([self drawIntoFBO:FBO_PONG WithShader:_blurX sourceTexture:currentSource] && 679 | [self drawIntoFBO:currentDest WithShader:_blurY sourceTexture:FBO_PONG]) 680 | { 681 | currentSource = currentDest; 682 | currentDest = (currentDest == FBO_PING) ? FBO_PONG : FBO_PING; 683 | } 684 | } 685 | 686 | for (NSInteger i = 0 ; i < (numFilters - 1); i++) 687 | { 688 | NSString *shaderName = [currentFilter objectAtIndex:i] ; 689 | if ([self drawIntoFBO:currentDest WithShaderNamed:shaderName sourceTexture:currentSource]) 690 | { 691 | currentSource = currentDest; 692 | currentDest = (currentDest == FBO_PING) ? FBO_PONG : FBO_PING; 693 | } 694 | } 695 | 696 | if (numFilters > 0) 697 | { 698 | NSString *shaderName = [currentFilter objectAtIndex:(numFilters - 1)]; 699 | [self drawIntoView:dest WithShaderNamed:shaderName sourceTexture:currentSource]; 700 | } 701 | } 702 | 703 | #pragma mark - GLKView and GLKViewController delegate methods 704 | 705 | - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect 706 | { 707 | glActiveTexture(GL_TEXTURE1); 708 | glBindTexture(GL_TEXTURE_2D, 0); 709 | glActiveTexture(GL_TEXTURE0); 710 | 711 | if (_newFrame) 712 | { 713 | GLfloat xScale = 1.0; 714 | GLfloat yScale = (_screenWidth/ _screenHeight) * ((GLfloat) _textureHeight / (GLfloat) _textureWidth); 715 | float orient = (self.interfaceOrientation == UIDeviceOrientationLandscapeRight) ? -1.0 : 1.0; 716 | [_shaders setScale:GLKVector2Make(xScale * orient, yScale * orient)]; 717 | [_shaders setTexelSize:GLKVector2Divide(GLKVector2Make(1.0, 1.0), GLKVector2Make(_textureWidth, _textureHeight))]; 718 | [_shaders setLowThreshold:0.1f]; 719 | [_shaders setHighThreshold:0.25f]; 720 | 721 | [self filterFrame:FBO_RGB intoView:view]; 722 | _newFrame = false; 723 | } 724 | } 725 | 726 | #pragma mark - Overlay updating methods 727 | 728 | -(void)updateOverlayWithText:(NSString*)text 729 | { 730 | [MBProgressHUD hideAllHUDsForView:self.view animated:NO]; 731 | _HUD = [[MBProgressHUD alloc] initWithView:self.view]; 732 | [self.view addSubview:_HUD]; 733 | 734 | // Configure for text only and offset down 735 | _HUD.mode = MBProgressHUDModeText; 736 | _HUD.labelText = text; 737 | _HUD.margin = 10.f; 738 | _HUD.delegate = self; 739 | _HUD.removeFromSuperViewOnHide = YES; 740 | 741 | [_HUD show:YES]; 742 | [_HUD hide:YES afterDelay:2]; 743 | } 744 | 745 | -(void)updateOverlayLock { 746 | [MBProgressHUD hideAllHUDsForView:self.view animated:NO]; 747 | _HUD = [[MBProgressHUD alloc] initWithView:self.view]; 748 | [self.view addSubview:_HUD]; 749 | 750 | NSString *text = (_modeLock ? @"Locked" : @"Unlocked"); 751 | UIImage *image = (_modeLock ? _lockedIcon : _unlockedIcon); 752 | 753 | // configure to use the custom view with image 754 | _HUD.customView = [[UIImageView alloc] initWithImage:image]; 755 | _HUD.mode = MBProgressHUDModeCustomView; 756 | _HUD.labelText = text; 757 | _HUD.margin = 10.f; 758 | _HUD.delegate = self; 759 | _HUD.removeFromSuperViewOnHide = YES; 760 | 761 | [_HUD show:YES]; 762 | [_HUD hide:YES afterDelay:2]; 763 | } 764 | 765 | -(void)updateOverlayBlur { 766 | // update the overlay 767 | NSString *blur; 768 | if (_blurMode) { 769 | blur = @"Blur Enabled"; 770 | } else { 771 | blur = @"Blur Disabled"; 772 | } 773 | 774 | [self updateOverlayWithText:blur]; 775 | } 776 | 777 | -(void)setBlurMode:(BOOL)newMode 778 | { 779 | if (_blurMode != newMode) 780 | { 781 | _blurMode = newMode; 782 | [self updateOverlayBlur]; 783 | [self saveDefaults]; 784 | } 785 | } 786 | 787 | -(void)setLockMode:(BOOL)newMode 788 | { 789 | if (_modeLock != newMode) 790 | { 791 | _modeLock = newMode; 792 | [self updateOverlayLock]; 793 | [self saveDefaults]; 794 | } 795 | 796 | } 797 | 798 | -(void)setFilterByName:(NSString *)name 799 | { 800 | if (![name isEqualToString:[_filters getCurrentName]]) 801 | { 802 | [_filters setFilterByName:name]; 803 | NSString *filterName = [_filters getCurrentName]; 804 | [self updateOverlayWithText:filterName]; 805 | [self saveDefaults]; 806 | } 807 | } 808 | 809 | -(void)setColorConvolution:(NSInteger)mode 810 | { 811 | if (_colorMode != mode) 812 | { 813 | if (mode >= NUM_CONVOLUTIONS) 814 | mode = 0; 815 | else if (mode < 0) 816 | mode = NUM_CONVOLUTIONS - 1; 817 | 818 | _colorMode = mode; 819 | 820 | [self updateOverlayWithText:[_colorConvolutionNames objectAtIndex:_colorMode]]; 821 | [self saveDefaults]; 822 | } 823 | } 824 | 825 | -(void)setNextFilter 826 | { 827 | [_filters nextFilter]; 828 | NSString *filterName = [_filters getCurrentName]; 829 | [self updateOverlayWithText:filterName]; 830 | [self saveDefaults]; 831 | } 832 | 833 | -(void)setPrevFilter 834 | { 835 | [_filters prevFilter]; 836 | NSString *filterName = [_filters getCurrentName]; 837 | [self updateOverlayWithText:filterName]; 838 | [self saveDefaults]; 839 | } 840 | 841 | -(void)generateColorConvolutions 842 | { 843 | _rgbConvolution = GLKMatrix3Make( 1 ,1 , 1, 844 | 0 ,-.18732 , 1.8556, 845 | 1.57481 , -.46813 , 0); 846 | 847 | _colorConvolution[REGULAR] = GLKMatrix3Identity; 848 | _colorConvolution[PROTANOPE] = GLKMatrix3MakeAndTranspose(0.20, 0.99, -0.19, 849 | 0.16, 0.79, 0.04, 850 | 0.01, -0.01, 1.00); 851 | 852 | _colorConvolution[DEUTERANOPE] = GLKMatrix3MakeAndTranspose(0.43, 0.72, -0.15, 853 | 0.34, 0.57, 0.09, 854 | -0.02, 0.03, 1.00); 855 | 856 | _colorConvolution[TRITANOPE] = GLKMatrix3MakeAndTranspose(0.97, 0.11, -0.08, 857 | 0.02, 0.82, 0.16, 858 | -0.06, 0.88, 0.18); 859 | 860 | _colorConvolutionNames = @[ @"Regular colorspace", @"Simulated Protanope", @"Simulated Deuteranope", @"Simulated Tritanope"]; 861 | } 862 | 863 | #pragma mark - Touch handling methods 864 | 865 | - (IBAction)tapGestureRecgonizer:(UITapGestureRecognizer *)sender { 866 | if (sender.state == UIGestureRecognizerStateEnded) 867 | { 868 | if (sender.numberOfTapsRequired == 1) 869 | [self setLockMode:!_modeLock]; 870 | else if (sender.numberOfTapsRequired == 2 && !_modeLock) 871 | [self setBlurMode:!_blurMode]; 872 | } 873 | } 874 | 875 | - (IBAction)swipeGestureRecognizer:(UISwipeGestureRecognizer *)sender { 876 | if (!_modeLock && sender.state == UIGestureRecognizerStateRecognized) 877 | { 878 | if (sender.direction == UISwipeGestureRecognizerDirectionLeft) 879 | { 880 | [self setNextFilter]; 881 | } else if (sender.direction == UISwipeGestureRecognizerDirectionRight) 882 | { 883 | [self setPrevFilter]; 884 | } 885 | 886 | if (sender.direction == UISwipeGestureRecognizerDirectionUp) 887 | { 888 | [self setColorConvolution:_colorMode + 1]; 889 | // [self setBlurMode:!_blurMode]; 890 | } else if (sender.direction == UISwipeGestureRecognizerDirectionDown) 891 | { 892 | [self setColorConvolution:_colorMode - 1]; 893 | 894 | // [self setBlurMode:!_blurMode]; 895 | } 896 | } 897 | } 898 | 899 | -(void) updateDefaults 900 | { 901 | NSLog(@"Updating defaults..."); 902 | [defaults setDouble:(double)_colorMode forKey:@"colorMode"]; 903 | NSString *filterName = [_filters getCurrentName]; 904 | [defaults setString:filterName forKey:@"currentFilter"]; 905 | [defaults setBool:_modeLock forKey:@"modeLock"]; 906 | [defaults setBool:_blurMode forKey:@"blurMode"]; 907 | [defaults synchronize]; 908 | } 909 | 910 | -(void) saveDefaults 911 | { 912 | [NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(updateDefaults) object:nil]; 913 | [self performSelector:@selector(updateDefaults) withObject:nil afterDelay:(NSTimeInterval) 2.0]; 914 | } 915 | 916 | -(void) externalUpdate:(NSNotification*) notificationObject { 917 | 918 | 919 | // prevent NSUserDefaultsDidChangeNotification from being posted while we update from iCloud 920 | 921 | [[NSNotificationCenter defaultCenter] removeObserver:self 922 | name:NSUserDefaultsDidChangeNotification 923 | object:nil]; 924 | 925 | NSArray *keys = [[notificationObject userInfo] objectForKey:@"NSUbiquitousKeyValueStoreChangedKeysKey"]; 926 | for (NSString *key in keys) 927 | { 928 | #if defined(DEBUG) 929 | NSLog(@"Key '%@' changed",key); 930 | #endif 931 | if ([key isEqualToString:@"modeLock"]) 932 | { 933 | [self setLockMode:[defaults boolForKey:key]]; 934 | } else if ([key isEqualToString:@"blurMode"]) 935 | { 936 | [self setBlurMode:[defaults boolForKey:key]]; 937 | } else if ([key isEqualToString:@"currentFilter"]) 938 | { 939 | [self setFilterByName:[defaults stringForKey:key]]; 940 | } else if ([key isEqualToString:@"colorMode"]) 941 | { 942 | [self setColorConvolution:(NSInteger)[defaults doubleForKey:key]]; 943 | } 944 | } 945 | // enable NSUserDefaultsDidChangeNotification notifications again 946 | 947 | [[NSNotificationCenter defaultCenter] addObserver:self 948 | selector:@selector(externalUpdate:) 949 | name:NSUserDefaultsDidChangeNotification 950 | object:nil]; 951 | } 952 | 953 | 954 | @end 955 | -------------------------------------------------------------------------------- /GLVideoFilter/Filters.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Name 7 | No Filter 8 | Passes 9 | 10 | screen 11 | 12 | 13 | 14 | Name 15 | Color Sobel Filter 16 | Passes 17 | 18 | sobel-rgb 19 | 20 | 21 | 22 | Name 23 | Grey Sobel Filter 24 | Passes 25 | 26 | sobel-bw 27 | screen 28 | 29 | 30 | 31 | Name 32 | Blended Sobel Filter 33 | Passes 34 | 35 | sobel-blend 36 | 37 | 38 | 39 | Name 40 | White Sobel Composite 41 | Passes 42 | 43 | sobel-composite-bw 44 | 45 | 46 | 47 | Name 48 | Color Sobel Composite 49 | Passes 50 | 51 | sobel-composite-rgb 52 | 53 | 54 | 55 | Name 56 | Canny Filter 57 | Passes 58 | 59 | canny-pre 60 | canny-mag 61 | canny-threshold 62 | 63 | 64 | 65 | Name 66 | Canny Composite 67 | Passes 68 | 69 | canny-pre 70 | canny-mag 71 | canny-composite 72 | 73 | 74 | 75 | Name 76 | Comic 77 | Passes 78 | 79 | canny-pre 80 | canny-mag 81 | canny-comic 82 | 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /GLVideoFilter/GLVideoFilter-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIconFiles 12 | 13 | Icon.png 14 | Icon@2x.png 15 | Icon-72.png 16 | Icon-72@2x.png 17 | Icon-Small-50.png 18 | Icon-Small-50@2x.png 19 | Icon-Small.png 20 | Icon-Small@2x.png 21 | 22 | CFBundleIcons 23 | 24 | CFBundleIcons~ipad 25 | 26 | CFBundleIdentifier 27 | com.yourcompany.${PRODUCT_NAME:rfc1034identifier} 28 | CFBundleInfoDictionaryVersion 29 | 6.0 30 | CFBundleName 31 | ${PRODUCT_NAME} 32 | CFBundlePackageType 33 | APPL 34 | CFBundleShortVersionString 35 | 1.0 36 | CFBundleSignature 37 | ???? 38 | CFBundleVersion 39 | 1.0 40 | LSRequiresIPhoneOS 41 | 42 | UIApplicationExitsOnSuspend 43 | 44 | UIMainStoryboardFile 45 | StoryBoard_iPhone 46 | UIMainStoryboardFile~ipad 47 | StoryBoard_iPad 48 | UIRequiredDeviceCapabilities 49 | 50 | video-camera 51 | 52 | UIStatusBarHidden 53 | 54 | UISupportedInterfaceOrientations 55 | 56 | UIInterfaceOrientationLandscapeRight 57 | UIInterfaceOrientationLandscapeLeft 58 | 59 | UISupportedInterfaceOrientations~ipad 60 | 61 | UIInterfaceOrientationLandscapeRight 62 | UIInterfaceOrientationLandscapeLeft 63 | 64 | UIViewControllerBasedStatusBarAppearance 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /GLVideoFilter/GLVideoFilter-Prefix.pch: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | #ifndef __IPHONE_6_0 5 | #warning "This project uses features only available in iOS SDK 5.0 and later." 6 | #endif 7 | 8 | #ifdef __OBJC__ 9 | #import 10 | #import 11 | #endif 12 | -------------------------------------------------------------------------------- /GLVideoFilter/GLVideoFilter.entitlements: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | com.apple.developer.ubiquity-container-identifiers 6 | 7 | $(TeamIdentifierPrefix)com.yourcompany.GLVideoFilter 8 | 9 | com.apple.developer.ubiquity-kvstore-identifier 10 | $(TeamIdentifierPrefix)$(CFBundleIdentifier) 11 | 12 | 13 | -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "scale" : "2x", 6 | "size" : "60x60" 7 | }, 8 | { 9 | "size" : "57x57", 10 | "idiom" : "iphone", 11 | "filename" : "Icon.png", 12 | "scale" : "1x" 13 | }, 14 | { 15 | "size" : "57x57", 16 | "idiom" : "iphone", 17 | "filename" : "Icon@2x.png", 18 | "scale" : "2x" 19 | }, 20 | { 21 | "size" : "72x72", 22 | "idiom" : "ipad", 23 | "filename" : "Icon-72.png", 24 | "scale" : "1x" 25 | }, 26 | { 27 | "size" : "72x72", 28 | "idiom" : "ipad", 29 | "filename" : "Icon-72@2x.png", 30 | "scale" : "2x" 31 | }, 32 | { 33 | "idiom" : "ipad", 34 | "scale" : "1x", 35 | "size" : "76x76" 36 | }, 37 | { 38 | "idiom" : "ipad", 39 | "scale" : "2x", 40 | "size" : "76x76" 41 | }, 42 | { 43 | "size" : "29x29", 44 | "idiom" : "iphone", 45 | "filename" : "Icon-Small.png", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "size" : "29x29", 50 | "idiom" : "iphone", 51 | "filename" : "Icon-Small@2x.png", 52 | "scale" : "2x" 53 | }, 54 | { 55 | "idiom" : "iphone", 56 | "scale" : "2x", 57 | "size" : "40x40" 58 | }, 59 | { 60 | "size" : "50x50", 61 | "idiom" : "ipad", 62 | "filename" : "Icon-Small-50.png", 63 | "scale" : "1x" 64 | }, 65 | { 66 | "size" : "50x50", 67 | "idiom" : "ipad", 68 | "filename" : "Icon-Small-50@2x.png", 69 | "scale" : "2x" 70 | }, 71 | { 72 | "idiom" : "ipad", 73 | "scale" : "1x", 74 | "size" : "40x40" 75 | }, 76 | { 77 | "idiom" : "ipad", 78 | "scale" : "2x", 79 | "size" : "40x40" 80 | }, 81 | { 82 | "size" : "29x29", 83 | "idiom" : "ipad", 84 | "filename" : "Icon-Small.png", 85 | "scale" : "1x" 86 | }, 87 | { 88 | "size" : "29x29", 89 | "idiom" : "ipad", 90 | "filename" : "Icon-Small@2x.png", 91 | "scale" : "2x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-72.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-72.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-72@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-72@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small-50.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small-50.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small-50@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small-50@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon-Small@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/AppIcon.appiconset/Icon@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/LaunchImage.launchimage/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "scale" : "1x", 6 | "orientation" : "portrait" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "scale" : "2x", 11 | "orientation" : "portrait" 12 | }, 13 | { 14 | "orientation" : "portrait", 15 | "idiom" : "iphone", 16 | "filename" : "Default-568h@2x.png", 17 | "subtype" : "retina4", 18 | "scale" : "2x" 19 | }, 20 | { 21 | "orientation" : "portrait", 22 | "idiom" : "iphone", 23 | "minimum-system-version" : "7.0", 24 | "scale" : "2x" 25 | }, 26 | { 27 | "orientation" : "portrait", 28 | "idiom" : "iphone", 29 | "filename" : "Default-568h@2x.png", 30 | "minimum-system-version" : "7.0", 31 | "subtype" : "retina4", 32 | "scale" : "2x" 33 | }, 34 | { 35 | "orientation" : "portrait", 36 | "idiom" : "ipad", 37 | "extent" : "to-status-bar", 38 | "scale" : "1x" 39 | }, 40 | { 41 | "orientation" : "portrait", 42 | "idiom" : "ipad", 43 | "extent" : "to-status-bar", 44 | "scale" : "2x" 45 | }, 46 | { 47 | "orientation" : "landscape", 48 | "idiom" : "ipad", 49 | "extent" : "to-status-bar", 50 | "scale" : "1x" 51 | }, 52 | { 53 | "orientation" : "landscape", 54 | "idiom" : "ipad", 55 | "extent" : "to-status-bar", 56 | "scale" : "2x" 57 | }, 58 | { 59 | "orientation" : "portrait", 60 | "idiom" : "ipad", 61 | "minimum-system-version" : "7.0", 62 | "extent" : "full-screen", 63 | "scale" : "1x" 64 | }, 65 | { 66 | "orientation" : "portrait", 67 | "idiom" : "ipad", 68 | "minimum-system-version" : "7.0", 69 | "extent" : "full-screen", 70 | "scale" : "2x" 71 | }, 72 | { 73 | "orientation" : "landscape", 74 | "idiom" : "ipad", 75 | "minimum-system-version" : "7.0", 76 | "extent" : "full-screen", 77 | "scale" : "1x" 78 | }, 79 | { 80 | "orientation" : "landscape", 81 | "idiom" : "ipad", 82 | "minimum-system-version" : "7.0", 83 | "extent" : "full-screen", 84 | "scale" : "2x" 85 | } 86 | ], 87 | "info" : { 88 | "version" : 1, 89 | "author" : "xcode" 90 | } 91 | } -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/LaunchImage.launchimage/Default-568h@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/LaunchImage.launchimage/Default-568h@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Locked.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x", 6 | "filename" : "Locked.png" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x", 11 | "filename" : "Locked@2x.png" 12 | } 13 | ], 14 | "info" : { 15 | "version" : 1, 16 | "author" : "xcode" 17 | } 18 | } -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Locked.imageset/Locked.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/Locked.imageset/Locked.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Locked.imageset/Locked@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/Locked.imageset/Locked@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Unlocked.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x", 6 | "filename" : "Unlocked.png" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x", 11 | "filename" : "Unlocked@2x.png" 12 | } 13 | ], 14 | "info" : { 15 | "version" : 1, 16 | "author" : "xcode" 17 | } 18 | } -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Unlocked.imageset/Unlocked.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/Unlocked.imageset/Unlocked.png -------------------------------------------------------------------------------- /GLVideoFilter/Images.xcassets/Unlocked.imageset/Unlocked@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dghost/GLVideoFilter/dece6453edba7412aa7428967b21c3bac6417451/GLVideoFilter/Images.xcassets/Unlocked.imageset/Unlocked@2x.png -------------------------------------------------------------------------------- /GLVideoFilter/Shaders.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | screen 6 | 7 | Vertex 8 | quadPassthrough 9 | Fragment 10 | passthrough 11 | 12 | yuv-rgb 13 | 14 | Vertex 15 | quadInvertY 16 | Fragment 17 | yuv2rgb 18 | 19 | blur-x 20 | 21 | Vertex 22 | quadKernel 23 | Fragment 24 | blurXPass 25 | 26 | blur-y 27 | 28 | Vertex 29 | quadKernel 30 | Fragment 31 | blurYPass 32 | 33 | sobel-rgb 34 | 35 | Vertex 36 | quadScreenKernel 37 | Fragment 38 | Sobel 39 | 40 | sobel-bw 41 | 42 | Vertex 43 | quadKernel 44 | Fragment 45 | SobelBW 46 | 47 | sobel-blend 48 | 49 | Vertex 50 | quadScreenKernel 51 | Fragment 52 | SobelBlend 53 | 54 | sobel-composite-bw 55 | 56 | Vertex 57 | quadScreenKernel 58 | Fragment 59 | SobelBWComposite 60 | 61 | sobel-composite-rgb 62 | 63 | Vertex 64 | quadScreenKernel 65 | Fragment 66 | SobelRGBComposite 67 | 68 | canny-composite 69 | 70 | Vertex 71 | quadScreenKernel 72 | Fragment 73 | CannyThresholdComposite 74 | 75 | canny-pre 76 | 77 | Vertex 78 | quadKernel 79 | Fragment 80 | SobelCanny 81 | 82 | canny-inverse 83 | 84 | Vertex 85 | quadScreenKernel 86 | Fragment 87 | CannyThresholdInvert 88 | 89 | canny-comic 90 | 91 | Vertex 92 | quadScreenKernel 93 | Fragment 94 | CannyComic 95 | 96 | canny-mag 97 | 98 | Vertex 99 | quadKernel 100 | Fragment 101 | CannyMag 102 | 103 | canny-threshold 104 | 105 | Vertex 106 | quadScreenKernel 107 | Fragment 108 | CannyThreshold 109 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/CannyComic.fsh: -------------------------------------------------------------------------------- 1 | // CannyComic.fsh 2 | // 3 | // Comic effect using Canny edge detection + thresholding 4 | // 5 | // Takes the input of CannyMag.fsh 6 | // 7 | // Outputs a black pixel if it is over the high threshold 8 | // or if it is over the low threshold and a neighbor is 9 | // over the high threshold. Otherwise, outputs a pixel value 10 | // based on intensity and a simple cell shading filter 11 | // 12 | 13 | 14 | uniform sampler2D SamplerRGB; 15 | 16 | uniform mediump float lowThreshold; 17 | uniform mediump float highThreshold; 18 | uniform highp vec2 texelSize; 19 | 20 | 21 | varying mediump vec2 tc11; 22 | varying mediump vec2 tc12; 23 | varying mediump vec2 tc13; 24 | varying mediump vec2 tc21; 25 | varying mediump vec2 tc22; 26 | varying mediump vec2 tc23; 27 | varying mediump vec2 tc31; 28 | varying mediump vec2 tc32; 29 | varying mediump vec2 tc33; 30 | 31 | #define sampleR(tc) (texture2D(SamplerRGB, tc).r) 32 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 33 | 34 | // parameters that define the comic effect 35 | #define LINE_SLOPE -0.9 36 | #define LINE_INTERVAL 10.0 37 | #define LINE_STRENGTH 1.0 38 | #define BLACK_THRESHOLD 0.2 39 | #define WHITE_THRESHOLD 0.45 40 | 41 | 42 | void main() 43 | { 44 | bool result = false; 45 | mediump vec4 temp = sampleRGBA(tc22); 46 | mediump float m22 = temp.r; 47 | 48 | 49 | if (m22 >= highThreshold ) 50 | { 51 | result = true; 52 | } 53 | else if (m22 >= lowThreshold){ 54 | mediump float m11 = sampleR(tc11); 55 | mediump float m12 = sampleR(tc12); 56 | mediump float m13 = sampleR(tc13); 57 | mediump float m21 = sampleR(tc21); 58 | mediump float m23 = sampleR(tc23); 59 | mediump float m31 = sampleR(tc31); 60 | mediump float m32 = sampleR(tc32); 61 | mediump float m33 = sampleR(tc33); 62 | if ((m11 >= highThreshold) || (m12 >= highThreshold) || (m13 >= highThreshold ) || 63 | (m21 >= highThreshold) || (m23 >= highThreshold) || 64 | (m31 >= highThreshold) || (m32 >= highThreshold) || (m33 >= highThreshold)) 65 | { 66 | result = true; 67 | } 68 | } 69 | 70 | 71 | // set pixel to white if it passed, or black otherwise 72 | mediump vec3 outColor; 73 | 74 | if (result) 75 | outColor = vec3(0.0); 76 | else 77 | { 78 | if (temp.a > WHITE_THRESHOLD) 79 | { 80 | outColor = vec3(1.0); 81 | } else if (temp.a < BLACK_THRESHOLD) 82 | { 83 | outColor = vec3(0.0); 84 | } else 85 | { 86 | mediump vec2 pixel = tc22 / texelSize; 87 | mediump float b = LINE_SLOPE * pixel.x - pixel.y; 88 | mediump float value = (floor(mod(b,LINE_INTERVAL)) - LINE_STRENGTH > 0.0) ? 1.0 : 0.0; 89 | outColor = vec3(value); 90 | } 91 | } 92 | gl_FragColor = vec4(outColor,1.0); 93 | } 94 | 95 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/CannyMag.fsh: -------------------------------------------------------------------------------- 1 | // CannyMag.fsh 2 | // 3 | // Canny Edge Detector 4 | // Takes the result of SobelCanny.fsh and discards pixels that fail 5 | // to meet the critera applied by the Canny edge detection algorithms 6 | // 7 | 8 | uniform sampler2D SamplerRGB; 9 | 10 | uniform mediump float threshold; 11 | 12 | varying mediump vec2 tc11; 13 | varying mediump vec2 tc12; 14 | varying mediump vec2 tc13; 15 | varying mediump vec2 tc21; 16 | varying mediump vec2 tc22; 17 | varying mediump vec2 tc23; 18 | varying mediump vec2 tc31; 19 | varying mediump vec2 tc32; 20 | varying mediump vec2 tc33; 21 | 22 | const mediump float pi = 3.1415926535; 23 | 24 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 25 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 26 | #define sampleA(tc) (texture2D(SamplerRGB, tc).a) 27 | 28 | // sample H and V and return the magnitude 29 | mediump float mag(mediump vec2 hv) 30 | { 31 | return (length(hv * 4.0)); 32 | } 33 | 34 | // sample the angle 35 | mediump float unpack(lowp float angle) 36 | { 37 | // convert it from 0.0 - 1.0 to -pi/2 to pi/2 38 | mediump float theta = angle - 0.5; 39 | theta = degrees(theta * pi); 40 | if (theta < 0.0) 41 | theta += 180.0; 42 | return theta; 43 | } 44 | 45 | #define sampleMag(tc) ((texture2D(SamplerRGB,tc).g)) 46 | 47 | #define sampleTheta(tc) (unpack(sampleA(tc))) 48 | 49 | void main() 50 | { 51 | mediump vec3 temp = sampleRGB(tc22); 52 | mediump float angle = unpack(temp.b); 53 | mediump float m11 = sampleMag(tc11); 54 | mediump float m12 = sampleMag(tc12); 55 | mediump float m13 = sampleMag(tc13); 56 | mediump float m21 = sampleMag(tc21); 57 | mediump float m22 = temp.g; 58 | mediump float m23 = sampleMag(tc23); 59 | mediump float m31 = sampleMag(tc31); 60 | mediump float m32 = sampleMag(tc32); 61 | mediump float m33 = sampleMag(tc33); 62 | 63 | mediump float result = 0.0; 64 | 65 | bool test = (angle <= 22.5 || angle >= 157.5)&&(m22 > m21 && m22 > m23) 66 | || ((angle <= 112.5 && angle >= 77.5)&&(m22 > m12 && m22 > m32)) 67 | || ((angle <= 77.5 && angle >= 22.5)&&(m22 > m11 && m22 > m33)) 68 | || ((angle >= 112.5 && angle <= 157.5)&&(m22 > m13 && m22 > m31)); 69 | 70 | mediump vec4 outColor = vec4(vec3(0),temp.r); 71 | 72 | if (test) 73 | outColor.rgb = vec3(m22); 74 | 75 | gl_FragColor = outColor; 76 | } 77 | 78 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/CannyThreshold.fsh: -------------------------------------------------------------------------------- 1 | // CannyThreshold.fsh 2 | // 3 | // Canny Edge Detector Thresholding pass 4 | // 5 | // Takes the input of CannyMag.fsh 6 | // 7 | // Outputs a white pixel if it is over the high threshold 8 | // or if it is over the low threshold and a neighbor is 9 | // over the high threshold. 10 | // 11 | // Future work would be to expand it to a 5x5 area 12 | // 13 | 14 | uniform sampler2D SamplerRGB; 15 | 16 | uniform mediump float lowThreshold; 17 | uniform mediump float highThreshold; 18 | 19 | uniform mediump vec2 texelSize; 20 | 21 | varying mediump vec2 tc11; 22 | varying mediump vec2 tc12; 23 | varying mediump vec2 tc13; 24 | varying mediump vec2 tc21; 25 | varying mediump vec2 tc22; 26 | varying mediump vec2 tc23; 27 | varying mediump vec2 tc31; 28 | varying mediump vec2 tc32; 29 | varying mediump vec2 tc33; 30 | 31 | #define sampleR(tc) (texture2D(SamplerRGB, tc).r) 32 | 33 | void main() 34 | { 35 | mediump float m22 = sampleR(tc22); 36 | mediump float result = 0.0; 37 | 38 | if (m22 >= highThreshold ) 39 | { 40 | result = 1.0; 41 | } 42 | else if (m22 >= lowThreshold){ 43 | mediump float m11 = sampleR(tc11); 44 | mediump float m12 = sampleR(tc12); 45 | mediump float m13 = sampleR(tc13); 46 | mediump float m21 = sampleR(tc21); 47 | mediump float m23 = sampleR(tc23); 48 | mediump float m31 = sampleR(tc31); 49 | mediump float m32 = sampleR(tc32); 50 | mediump float m33 = sampleR(tc33); 51 | if ((m11 >= highThreshold) || (m12 >= highThreshold) || (m13 >= highThreshold ) || 52 | (m21 >= highThreshold) || (m23 >= highThreshold) || 53 | (m31 >= highThreshold) || (m32 >= highThreshold) || (m33 >= highThreshold)) 54 | { 55 | result = 1.0; 56 | } 57 | } 58 | 59 | // set pixel to white if it passed, or black otherwise 60 | mediump vec4 outColor = vec4(vec3(result),1.0); 61 | gl_FragColor = outColor; 62 | } 63 | 64 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/CannyThresholdComposite.fsh: -------------------------------------------------------------------------------- 1 | // CannyThresholdComposite.fsh 2 | // 3 | // Canny Edge Detector Thresholding w/ Greyscale Composite 4 | // 5 | // Takes the input of CannyMag.fsh 6 | // 7 | // Outputs a red pixel if it is over the high threshold 8 | // or if it is over the low threshold and a neighbor is 9 | // over the high threshold. Otherwise, outputs the greyscale 10 | // value for that pixel. 11 | // 12 | // Future work would be to expand it to a 5x5 area 13 | // 14 | 15 | 16 | uniform sampler2D SamplerRGB; 17 | 18 | uniform mediump float lowThreshold; 19 | uniform mediump float highThreshold; 20 | 21 | varying mediump vec2 tc11; 22 | varying mediump vec2 tc12; 23 | varying mediump vec2 tc13; 24 | varying mediump vec2 tc21; 25 | varying mediump vec2 tc22; 26 | varying mediump vec2 tc23; 27 | varying mediump vec2 tc31; 28 | varying mediump vec2 tc32; 29 | varying mediump vec2 tc33; 30 | 31 | #define sampleR(tc) (texture2D(SamplerRGB, tc).r) 32 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 33 | 34 | void main() 35 | { 36 | bool result = false; 37 | mediump vec4 temp = sampleRGBA(tc22); 38 | mediump float m22 = temp.r; 39 | 40 | 41 | if (m22 >= highThreshold ) 42 | { 43 | result = true; 44 | } 45 | else if (m22 >= lowThreshold){ 46 | mediump float m11 = sampleR(tc11); 47 | mediump float m12 = sampleR(tc12); 48 | mediump float m13 = sampleR(tc13); 49 | mediump float m21 = sampleR(tc21); 50 | mediump float m23 = sampleR(tc23); 51 | mediump float m31 = sampleR(tc31); 52 | mediump float m32 = sampleR(tc32); 53 | mediump float m33 = sampleR(tc33); 54 | if ((m11 >= highThreshold) || (m12 >= highThreshold) || (m13 >= highThreshold ) || 55 | (m21 >= highThreshold) || (m23 >= highThreshold) || 56 | (m31 >= highThreshold) || (m32 >= highThreshold) || (m33 >= highThreshold)) 57 | { 58 | result = true; 59 | } 60 | } 61 | 62 | 63 | // set pixel to white if it passed, or black otherwise 64 | mediump vec3 outColor; 65 | 66 | if (result) 67 | outColor = vec3(1.0,0.0,0.0); 68 | else 69 | outColor = vec3(temp.a); 70 | 71 | gl_FragColor = vec4(outColor,1.0); 72 | } 73 | 74 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/CannyThresholdInvert.fsh: -------------------------------------------------------------------------------- 1 | // CannyThresholdInvert.fsh 2 | // 3 | // Canny Edge Detector Thresholding pass 4 | // 5 | // Takes the input of CannyMag.fsh 6 | // 7 | // Outputs a black pixel if it is over the high threshold 8 | // or if it is over the low threshold and a neighbor is 9 | // over the high threshold. 10 | // 11 | // Future work would be to expand it to a 5x5 area 12 | // 13 | 14 | uniform sampler2D SamplerRGB; 15 | 16 | uniform mediump float lowThreshold; 17 | uniform mediump float highThreshold; 18 | 19 | varying mediump vec2 tc11; 20 | varying mediump vec2 tc12; 21 | varying mediump vec2 tc13; 22 | varying mediump vec2 tc21; 23 | varying mediump vec2 tc22; 24 | varying mediump vec2 tc23; 25 | varying mediump vec2 tc31; 26 | varying mediump vec2 tc32; 27 | varying mediump vec2 tc33; 28 | 29 | #define sampleR(tc) (texture2D(SamplerRGB, tc).r) 30 | 31 | 32 | void main() 33 | { 34 | mediump float m22 = sampleR(tc22); 35 | mediump float result = 1.0; 36 | 37 | if (m22 >= highThreshold ) 38 | { 39 | result = 0.0; 40 | } 41 | else if (m22 >= lowThreshold){ 42 | mediump float m11 = sampleR(tc11); 43 | mediump float m12 = sampleR(tc12); 44 | mediump float m13 = sampleR(tc13); 45 | mediump float m21 = sampleR(tc21); 46 | mediump float m23 = sampleR(tc23); 47 | mediump float m31 = sampleR(tc31); 48 | mediump float m32 = sampleR(tc32); 49 | mediump float m33 = sampleR(tc33); 50 | if ((m11 >= highThreshold) || (m12 >= highThreshold) || (m13 >= highThreshold ) || 51 | (m21 >= highThreshold) || (m23 >= highThreshold) || 52 | (m31 >= highThreshold) || (m32 >= highThreshold) || (m33 >= highThreshold)) 53 | { 54 | result = 0.0; 55 | } 56 | } 57 | 58 | // set pixel to white if it passed, or black otherwise 59 | mediump vec4 outColor = vec4(vec3(result),1.0); 60 | gl_FragColor = outColor; 61 | } 62 | 63 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/Sobel.fsh: -------------------------------------------------------------------------------- 1 | // Sobel.fsh 2 | // 3 | // Simple Sobel pass 4 | // Operates on RGB source 5 | // 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | varying mediump vec2 tc11; 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc13; 12 | varying mediump vec2 tc21; 13 | //varying mediump vec2 tc22; 14 | varying mediump vec2 tc23; 15 | varying mediump vec2 tc31; 16 | varying mediump vec2 tc32; 17 | varying mediump vec2 tc33; 18 | 19 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 20 | 21 | void main() 22 | { 23 | 24 | mediump vec3 m11 = sampleRGB(tc11); 25 | mediump vec3 m12 = sampleRGB(tc12); 26 | mediump vec3 m13 = sampleRGB(tc13); 27 | mediump vec3 m21 = sampleRGB(tc21); 28 | // mediump vec3 m22 = sampleRGB(tc22)b; 29 | mediump vec3 m23 = sampleRGB(tc23); 30 | mediump vec3 m31 = sampleRGB(tc31); 31 | mediump vec3 m32 = sampleRGB(tc32); 32 | mediump vec3 m33 = sampleRGB(tc33); 33 | 34 | mediump vec3 H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 35 | 36 | mediump vec3 V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 37 | 38 | // calculate the length for each channel in the vector 39 | mediump vec3 sobel = sqrt(H*H+V*V); 40 | 41 | gl_FragColor = vec4(sobel,1.0); 42 | } 43 | 44 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/SobelBW.fsh: -------------------------------------------------------------------------------- 1 | // SobelBW.fsh 2 | // 3 | // Simple Sobel pass 4 | // Operates on Illumination (grayscale) source 5 | // 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | varying mediump vec2 tc11; 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc13; 12 | varying mediump vec2 tc21; 13 | //varying mediump vec2 tc22; 14 | varying mediump vec2 tc23; 15 | varying mediump vec2 tc31; 16 | varying mediump vec2 tc32; 17 | varying mediump vec2 tc33; 18 | 19 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 20 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 21 | #define sampleA(tc) (texture2D(SamplerRGB, tc).a) 22 | 23 | void main() 24 | { 25 | 26 | mediump float m11 = sampleA(tc11); 27 | mediump float m12 = sampleA(tc12); 28 | mediump float m13 = sampleA(tc13); 29 | mediump float m21 = sampleA(tc21); 30 | // mediump float m22 = sampleA(tc22); 31 | mediump float m23 = sampleA(tc23); 32 | mediump float m31 = sampleA(tc31); 33 | mediump float m32 = sampleA(tc32); 34 | mediump float m33 = sampleA(tc33); 35 | 36 | 37 | 38 | mediump float H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 39 | mediump float V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 40 | 41 | // for a single channel, sqrt(H*H+V*V) is equal to the length 42 | mediump float sobel = length(vec2(H,V)); 43 | 44 | 45 | // output result as gray 46 | mediump vec4 outColor = vec4(sobel); 47 | 48 | gl_FragColor = outColor; 49 | 50 | 51 | } 52 | 53 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/SobelBWComposite.fsh: -------------------------------------------------------------------------------- 1 | // SobelRGBComposite.fsh 2 | // 3 | // Composite Sobel pass 4 | // Shows the scene in grayscale with Sobel results overlaid in red 5 | // 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | varying mediump vec2 tc11; 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc13; 12 | varying mediump vec2 tc21; 13 | varying mediump vec2 tc22; 14 | varying mediump vec2 tc23; 15 | varying mediump vec2 tc31; 16 | varying mediump vec2 tc32; 17 | varying mediump vec2 tc33; 18 | 19 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 20 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 21 | #define sampleA(tc) (texture2D(SamplerRGB, tc).a) 22 | void main() 23 | { 24 | 25 | mediump float m11 = sampleA(tc11); 26 | mediump float m12 = sampleA(tc12); 27 | mediump float m13 = sampleA(tc13); 28 | mediump float m21 = sampleA(tc21); 29 | mediump float m22 = sampleA(tc22); 30 | mediump float m23 = sampleA(tc23); 31 | mediump float m31 = sampleA(tc31); 32 | mediump float m32 = sampleA(tc32); 33 | mediump float m33 = sampleA(tc33); 34 | 35 | mediump float H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 36 | mediump float V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 37 | mediump float sobel = length(vec2(H,V)); 38 | 39 | 40 | // set base value to be grayscale value at that pixel 41 | mediump vec4 outColor = vec4(vec3(m22),1.0); 42 | 43 | // add sobel result to red channel 44 | outColor.r += sobel; 45 | 46 | gl_FragColor = outColor; 47 | 48 | 49 | } 50 | 51 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/SobelBlend.fsh: -------------------------------------------------------------------------------- 1 | // SobelBlend.fsh 2 | // 3 | // Blended Sobel pass 4 | // Adds 50% of BW Sobel result to the RGB result 5 | // Results in image that is brighter than pure RGB 6 | // 7 | 8 | uniform sampler2D SamplerRGB; 9 | 10 | varying mediump vec2 tc11; 11 | varying mediump vec2 tc12; 12 | varying mediump vec2 tc13; 13 | varying mediump vec2 tc21; 14 | //varying mediump vec2 tc22; 15 | varying mediump vec2 tc23; 16 | varying mediump vec2 tc31; 17 | varying mediump vec2 tc32; 18 | varying mediump vec2 tc33; 19 | 20 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc).rgba) 21 | 22 | void main() 23 | { 24 | mediump vec4 m11 = sampleRGBA(tc11); 25 | mediump vec4 m12 = sampleRGBA(tc12); 26 | mediump vec4 m13 = sampleRGBA(tc13); 27 | mediump vec4 m21 = sampleRGBA(tc21); 28 | //mediump float m22 = sample(tc22); 29 | mediump vec4 m23 = sampleRGBA(tc23); 30 | mediump vec4 m31 = sampleRGBA(tc31); 31 | mediump vec4 m32 = sampleRGBA(tc32); 32 | mediump vec4 m33 = sampleRGBA(tc33); 33 | 34 | // calculate the sobel value for the RGB and Grayscale values 35 | mediump vec4 H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 36 | mediump vec4 V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 37 | 38 | // calculate the length of each channel 39 | mediump vec4 sobel = sqrt(H*H+V*V); 40 | 41 | // add 50% of the grayscale sobel to 50% of the RGB sobel. 42 | mediump vec3 rgb = sobel.rgb * 0.5 + vec3(sobel.a * 0.5 ); 43 | mediump vec4 outColor = vec4(rgb,1.0); 44 | gl_FragColor = outColor; 45 | } 46 | 47 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/SobelCanny.fsh: -------------------------------------------------------------------------------- 1 | // SobelRGBComposite.fsh 2 | // 3 | // Sobel pre-pass for the Canny edge detectors 4 | // Packs the image chroma value, sobel magnitude, and angle into the output pixel 5 | // 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | varying mediump vec2 tc11; 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc13; 12 | varying mediump vec2 tc21; 13 | varying mediump vec2 tc22; 14 | varying mediump vec2 tc23; 15 | varying mediump vec2 tc31; 16 | varying mediump vec2 tc32; 17 | varying mediump vec2 tc33; 18 | 19 | const mediump float invPi = 1.0/3.1415926535; 20 | 21 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 22 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 23 | #define sampleA(tc) (texture2D(SamplerRGB, tc).a) 24 | 25 | void main() 26 | { 27 | 28 | mediump float m11 = sampleA(tc11); 29 | mediump float m12 = sampleA(tc12); 30 | mediump float m13 = sampleA(tc13); 31 | mediump float m21 = sampleA(tc21); 32 | mediump float m22 = sampleA(tc22); 33 | mediump float m23 = sampleA(tc23); 34 | mediump float m31 = sampleA(tc31); 35 | mediump float m32 = sampleA(tc32); 36 | mediump float m33 = sampleA(tc33); 37 | 38 | mediump float H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 39 | mediump float V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 40 | mediump float sobel = length(vec2(H,V)); 41 | 42 | // atan returns -pi/2 to pi/2 - move it to 0.0-1.0 43 | mediump float theta = atan(H/V) * invPi; 44 | theta += 0.5; 45 | 46 | gl_FragColor = vec4(m22,sobel,theta,1.0); 47 | 48 | 49 | } 50 | 51 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/SobelRGBComposite.fsh: -------------------------------------------------------------------------------- 1 | // SobelRGBComposite.fsh 2 | // 3 | // Composite Sobel pass 4 | // Shows the RGB frame with Sobel results overlaid in red 5 | // 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | varying mediump vec2 tc11; 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc13; 12 | varying mediump vec2 tc21; 13 | varying mediump vec2 tc22; 14 | varying mediump vec2 tc23; 15 | varying mediump vec2 tc31; 16 | varying mediump vec2 tc32; 17 | varying mediump vec2 tc33; 18 | 19 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 20 | #define sampleRGB(tc) (texture2D(SamplerRGB, tc).rgb) 21 | #define sampleA(tc) (texture2D(SamplerRGB, tc).a) 22 | 23 | void main() 24 | { 25 | mediump float m11 = sampleA(tc11); 26 | mediump float m12 = sampleA(tc12); 27 | mediump float m13 = sampleA(tc13); 28 | mediump float m21 = sampleA(tc21); 29 | // mediump float m22 = sampleA(tc22); 30 | mediump float m23 = sampleA(tc23); 31 | mediump float m31 = sampleA(tc31); 32 | mediump float m32 = sampleA(tc32); 33 | mediump float m33 = sampleA(tc33); 34 | 35 | mediump float H = -m11 - 2.0*m12 - m13 +m31 + 2.0*m32 + m33; 36 | mediump float V = m11 - m13 + 2.0*m21 - 2.0*m23 + m31 - m33; 37 | mediump float sobel = length(vec2(H,V)); 38 | 39 | mediump vec3 inColor = sampleRGB(tc22); 40 | 41 | // set base value to be rgb value at that pixel 42 | mediump vec4 outColor = vec4(inColor,1.0); 43 | 44 | // add sobel result to red channel 45 | outColor.r += sobel; 46 | 47 | gl_FragColor = outColor; 48 | } 49 | 50 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/blurXPass.fsh: -------------------------------------------------------------------------------- 1 | // blurXPass.fsh 2 | // 3 | // Performs horizontal Gaussian blur 4 | // 5 | 6 | uniform sampler2D SamplerRGB; 7 | 8 | varying mediump vec2 tc21; 9 | varying mediump vec2 tc22; 10 | varying mediump vec2 tc23; 11 | 12 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 13 | 14 | const mediump float blur1 = 1.0 / 4.0; 15 | const mediump float blur2 = 2.0 / 4.0; 16 | 17 | void main() 18 | { 19 | mediump vec4 m21 = sampleRGBA(tc21); 20 | mediump vec4 m22 = sampleRGBA(tc22); 21 | mediump vec4 m23 = sampleRGBA(tc23); 22 | 23 | gl_FragColor = blur1 * (m21 + m23) + blur2 * m22; 24 | } 25 | 26 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/blurYPass.fsh: -------------------------------------------------------------------------------- 1 | // blurYPass.fsh 2 | // 3 | // Performs vertical Gaussian blur 4 | // 5 | 6 | 7 | uniform sampler2D SamplerRGB; 8 | 9 | 10 | varying mediump vec2 tc12; 11 | varying mediump vec2 tc22; 12 | varying mediump vec2 tc32; 13 | 14 | #define sampleRGBA(tc) (texture2D(SamplerRGB, tc)) 15 | 16 | const mediump float blur1 = 1.0 / 4.0; 17 | const mediump float blur2 = 2.0 / 4.0; 18 | 19 | void main() 20 | { 21 | mediump vec4 m12 = sampleRGBA(tc12); 22 | mediump vec4 m22 = sampleRGBA(tc22); 23 | mediump vec4 m32 = sampleRGBA(tc32); 24 | 25 | gl_FragColor = blur1 * (m12 + m32) 26 | + blur2 * m22; 27 | } 28 | 29 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/passthrough.fsh: -------------------------------------------------------------------------------- 1 | // passthrough.fsh 2 | // 3 | // Samples the texture and writes the result to the pixel 4 | // 5 | 6 | uniform sampler2D SamplerRGB; 7 | 8 | varying highp vec2 texCoordVarying; 9 | 10 | void main() 11 | { 12 | gl_FragColor = texture2D(SamplerRGB, texCoordVarying); 13 | } 14 | 15 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/quadInvertY.vsh: -------------------------------------------------------------------------------- 1 | // quadInvertY.vsh 2 | // 3 | // Shader that inverts Y axis on source texture 4 | // 5 | 6 | attribute vec4 position; 7 | attribute vec2 texCoord; 8 | 9 | varying vec2 texCoordVarying; 10 | 11 | void main() 12 | { 13 | texCoordVarying = vec2(texCoord.x, 1.0-texCoord.y); 14 | gl_Position = position; 15 | } 16 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/quadKernel.vsh: -------------------------------------------------------------------------------- 1 | // quadKernel.vsh 2 | // 3 | // Pre-computes the texture coordinates for the 3x3 kernel 4 | // and passes them as varyings to the fragment shaders 5 | // 6 | 7 | attribute vec4 position; 8 | attribute vec2 texCoord; 9 | 10 | uniform vec2 texelSize; 11 | 12 | varying vec2 tc11; 13 | varying vec2 tc12; 14 | varying vec2 tc13; 15 | varying vec2 tc21; 16 | varying vec2 tc22; 17 | varying vec2 tc23; 18 | varying vec2 tc31; 19 | varying vec2 tc32; 20 | varying vec2 tc33; 21 | 22 | void main() 23 | { 24 | tc11 = texCoord + vec2(-texelSize.x,+texelSize.y); 25 | tc12 = texCoord + vec2(0.0,+texelSize.y); 26 | tc13 = texCoord + vec2(+texelSize.x,+texelSize.y); 27 | tc21 = texCoord + vec2(-texelSize.x,0.0); 28 | tc22 = texCoord; 29 | tc23 = texCoord + vec2(+texelSize.x,0.0); 30 | tc31 = texCoord + vec2(-texelSize.x,-texelSize.y); 31 | tc32 = texCoord + vec2(0.0,-texelSize.y); 32 | tc33 = texCoord + vec2(+texelSize.x,-texelSize.y); 33 | gl_Position = position; 34 | } 35 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/quadPassthrough.vsh: -------------------------------------------------------------------------------- 1 | // quadPassthrough.vsh 2 | // 3 | // Pass through vertex shader for full screen quad 4 | // Also transforms the vertices to account for screen rotation 5 | // and screen aspect ratio 6 | // 7 | 8 | attribute vec4 position; 9 | attribute vec2 texCoord; 10 | 11 | uniform vec2 posScale; 12 | 13 | varying vec2 texCoordVarying; 14 | 15 | void main() 16 | { 17 | gl_Position = position * vec4(posScale,1.0,1.0); 18 | texCoordVarying = texCoord; 19 | } 20 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/quadScreenKernel.vsh: -------------------------------------------------------------------------------- 1 | // quadScreenKernel.vsh 2 | // 3 | // It pre-computes the texture coordinates for the 3x3 kernel 4 | // and passes them as varyings to the fragment shaders 5 | // 6 | // It also transforms the vertices to account for screen rotation 7 | // and screen aspect ratio 8 | // 9 | 10 | 11 | attribute vec4 position; 12 | attribute vec2 texCoord; 13 | 14 | uniform vec2 texelSize; 15 | uniform vec2 posScale; 16 | 17 | varying vec2 tc11; 18 | varying vec2 tc12; 19 | varying vec2 tc13; 20 | varying vec2 tc21; 21 | varying vec2 tc22; 22 | varying vec2 tc23; 23 | varying vec2 tc31; 24 | varying vec2 tc32; 25 | varying vec2 tc33; 26 | 27 | void main() 28 | { 29 | tc11 = texCoord + vec2(-texelSize.x,+texelSize.y); 30 | tc12 = texCoord + vec2(0.0,+texelSize.y); 31 | tc13 = texCoord + vec2(+texelSize.x,+texelSize.y); 32 | tc21 = texCoord + vec2(-texelSize.x,0.0); 33 | tc22 = texCoord; 34 | tc23 = texCoord + vec2(+texelSize.x,0.0); 35 | tc31 = texCoord + vec2(-texelSize.x,-texelSize.y); 36 | tc32 = texCoord + vec2(0.0,-texelSize.y); 37 | tc33 = texCoord + vec2(+texelSize.x,-texelSize.y); 38 | gl_Position = position * vec4(posScale,1.0,1.0); 39 | } 40 | -------------------------------------------------------------------------------- /GLVideoFilter/Shaders/yuv2rgb.fsh: -------------------------------------------------------------------------------- 1 | // yuv2rgb.fsh 2 | // 3 | // Convert Y'UV output of camera into packed RGB/Y output 4 | // Also perform additional color space transform if required 5 | // Can be used to mimic colorblindness 6 | // 7 | 8 | uniform sampler2D SamplerY; 9 | uniform sampler2D SamplerUV; 10 | 11 | varying mediump vec2 texCoordVarying; 12 | 13 | uniform mediump mat3 rgbConvolution; 14 | uniform mediump mat3 colorConvolution; 15 | 16 | 17 | mediump mat3 yuv2rgb = mat3( 1.0 ,1.0 , 1.0 , 18 | 0.0 ,-.18732 , 1.8556, 19 | 20 | 1.57481 , -.46813 , 0.0); 21 | void main() 22 | { 23 | mediump vec3 yuv; 24 | 25 | yuv.x = texture2D(SamplerY, texCoordVarying).r; 26 | yuv.yz = texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5); 27 | 28 | // perform the color convolution 29 | mediump vec3 rgb = clamp(rgbConvolution * yuv,0.0,1.0); 30 | 31 | // perform a color space transform for color blindness 32 | rgb = clamp(colorConvolution * rgb,0.0,1.0); 33 | 34 | // pack the RGB and original Y (grayscale) output into the texture 35 | gl_FragColor = vec4(rgb,yuv.x); 36 | } 37 | 38 | -------------------------------------------------------------------------------- /GLVideoFilter/StoryBoard_iPad.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /GLVideoFilter/StoryBoard_iPhone.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /GLVideoFilter/main.m: -------------------------------------------------------------------------------- 1 | #import 2 | 3 | #import "AppDelegate.h" 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | @autoreleasepool { 8 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | **Note: This repository is now deprecated.** 2 | 3 | Please see the [AccessibleVideo](https://github.com/dghost/AccessibleVideo) repository for a modern replacement implemented in Swift/Metal. 4 | 5 | ### GLVideoFilter v2.0 6 | Real-time image processing on a live video stream on iOS devices. This project was based on the [GLCameraRipple demo](http://developer.apple.com/library/ios/#samplecode/GLCameraRipple/Introduction/Intro.html), but has been updated to require iOS 6 and take advantage of iOS 7 features when available. 7 | 8 | Note: This only runs on actual devices, and will not run in the simulator. 9 | 10 | #### Description 11 | 12 | Built to support accessibility research by [Isla Schanuel](http://www.islaes.com), the purpose of this app was to evaluate the feasibility of edge detection techniques as an accessibility tool. While the screen size and form-factor of iOS devices is not conducive to real-world use, this was intended as a early prototype to test generating an accessible video stream from a non-stereoscopic video stream. As a result, the selection of filters is somewhat narrow in scope and is centered around providing meaningful ways of either enhancing a live video stream (by highlighting object edges) or transforming it entirely into a format that is easier for persons with reduced vision to see. 13 | 14 | #### Features 15 | 16 | - Flexible filter pipeline that supports arbitrary multi-pass filters 17 | - On-demand frame processing allows camera to update asychronously from the screen 18 | - Supports 60fps cameras under iOS7 19 | - Compatible with ARM64 devices 20 | - Support for iPhones, iPod Touches, and iPads 21 | 22 | ##### Filters 23 | 24 | The following filters have been implemented: 25 | 26 | - Sobel operator using an RGB video stream as source 27 | - Sobel operator using an Grayscale video stream source 28 | - A blended Sobel operator that adds 50% of the grayscale result to the RGB result 29 | - A composite that overlays the result of the Sobel operator on the grayscale video stream. 30 | - A composite that overlays the result of the Sobel operator on the RGB video stream. 31 | - Canny edge detector using a low threshold of 0.2 32 | - A composite that overlays the Canny edge detection results on the grayscale video stream. 33 | - A chained Sobel operator -> Canny edge detector with inverted colors 34 | 35 | Additionally, an optional blur pre-pass can be enabled for any video filter. 36 | 37 | #### Usage 38 | - One-finger left/right swipes cycle between filters. 39 | - One-finger up/down swipes cycle between blur modes. 40 | - One-finger tap locks or unlocks mode changing. 41 | 42 | ##### Thanks go out to... 43 | - [Jonathan George](http://jdg.net), for making [MBProgressHUD](https://github.com/jdg/MBProgressHUD). 44 | - [Gary Gehiere](http://blog.iamgary.com/helloworld/), for creating the [Lock/Unlock icons](http://www.pixelpressicons.com/?p=108) and licsensing them as [Creative Commons CA 2.5](http://creativecommons.org/licenses/by/2.5/ca/). 45 | --------------------------------------------------------------------------------