├── DevelopPlayerDemo.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcuserdata │ │ └── pepsikirk.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ ├── jiangxincai.xcuserdatad │ ├── xcdebugger │ │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ │ ├── DevelopPlayerDemo.xcscheme │ │ └── xcschememanagement.plist │ └── pepsikirk.xcuserdatad │ └── xcschemes │ ├── DevelopPlayerDemo.xcscheme │ └── xcschememanagement.plist ├── DevelopPlayerDemo ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Cat.mp4 ├── Info.plist ├── PKLayerContentsViewController.h ├── PKLayerContentsViewController.m ├── PKOpenGLESViewController.h ├── PKOpenGLESViewController.m ├── PKShortVideoPlayer │ ├── AVAssetReader+Layer.Contents │ │ ├── PKLayerContentsPlayerView.h │ │ ├── PKLayerContentsPlayerView.m │ │ ├── PKLayerVideoDecoder.h │ │ └── PKLayerVideoDecoder.m │ ├── AVAssetReader+OpenGL ES │ │ ├── GPUImage │ │ │ ├── GLProgram.h │ │ │ ├── GLProgram.m │ │ │ ├── GPUImageContext.h │ │ │ ├── GPUImageContext.m │ │ │ ├── GPUImageFramebuffer.h │ │ │ ├── GPUImageFramebuffer.m │ │ │ ├── GPUImageFramebufferCache.h │ │ │ └── GPUImageFramebufferCache.m │ │ ├── PKChatMessagePlayerView.h │ │ ├── PKChatMessagePlayerView.m │ │ ├── PKColorConversion.h │ │ ├── PKColorConversion.m │ │ ├── PKVideoDecoder.h │ │ └── PKVideoDecoder.m │ ├── AVPlayer │ │ ├── PKFullScreenPlayerView.h │ │ ├── PKFullScreenPlayerView.m │ │ ├── PKFullScreenPlayerViewController.h │ │ └── PKFullScreenPlayerViewController.m │ └── Categories │ │ ├── UIImage+PKShortVideoPlayer.h │ │ └── UIImage+PKShortVideoPlayer.m ├── ViewController.h ├── ViewController.m └── main.m └── README.md /DevelopPlayerDemo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 883CB9B31C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 883CB9B21C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.m */; }; 11 | 884C0B541C434C9A008AABAA /* PKLayerContentsPlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = 884C0B511C434C9A008AABAA /* PKLayerContentsPlayerView.m */; }; 12 | 884C0B551C434C9A008AABAA /* PKLayerVideoDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 884C0B531C434C9A008AABAA /* PKLayerVideoDecoder.m */; }; 13 | 884C0B5E1C434D20008AABAA /* PKFullScreenPlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = 884C0B5B1C434D20008AABAA /* PKFullScreenPlayerView.m */; }; 14 | 884C0B5F1C434D20008AABAA /* PKFullScreenPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 884C0B5D1C434D20008AABAA /* PKFullScreenPlayerViewController.m */; }; 15 | 884C0B621C4353DC008AABAA /* PKOpenGLESViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 884C0B611C4353DC008AABAA /* PKOpenGLESViewController.m */; }; 16 | 88DDEC341C3A568F00FA04E4 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 88DDEC331C3A568F00FA04E4 /* main.m */; }; 17 | 88DDEC371C3A568F00FA04E4 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 88DDEC361C3A568F00FA04E4 /* AppDelegate.m */; }; 18 | 88DDEC3A1C3A568F00FA04E4 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 88DDEC391C3A568F00FA04E4 /* ViewController.m */; }; 19 | 88DDEC3D1C3A568F00FA04E4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 88DDEC3B1C3A568F00FA04E4 /* Main.storyboard */; }; 20 | 88DDEC3F1C3A568F00FA04E4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 88DDEC3E1C3A568F00FA04E4 /* Assets.xcassets */; }; 21 | 88DDEC421C3A568F00FA04E4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 88DDEC401C3A568F00FA04E4 /* LaunchScreen.storyboard */; }; 22 | 88DDEC4A1C3A576A00FA04E4 /* Cat.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 88DDEC491C3A576A00FA04E4 /* Cat.mp4 */; }; 23 | 88DDEC501C3A5D2100FA04E4 /* PKLayerContentsViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 88DDEC4F1C3A5D2100FA04E4 /* PKLayerContentsViewController.m */; }; 24 | AE4066CE1D30E017002273A8 /* GLProgram.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066C11D30E017002273A8 /* GLProgram.m */; }; 25 | AE4066CF1D30E017002273A8 /* GPUImageContext.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066C31D30E017002273A8 /* GPUImageContext.m */; }; 26 | AE4066D01D30E017002273A8 /* GPUImageFramebuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066C51D30E017002273A8 /* GPUImageFramebuffer.m */; }; 27 | AE4066D11D30E017002273A8 /* GPUImageFramebufferCache.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066C71D30E017002273A8 /* GPUImageFramebufferCache.m */; }; 28 | AE4066D21D30E017002273A8 /* PKChatMessagePlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066C91D30E017002273A8 /* PKChatMessagePlayerView.m */; }; 29 | AE4066D31D30E017002273A8 /* PKColorConversion.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066CB1D30E017002273A8 /* PKColorConversion.m */; }; 30 | AE4066D41D30E017002273A8 /* PKVideoDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = AE4066CD1D30E017002273A8 /* PKVideoDecoder.m */; }; 31 | /* End PBXBuildFile section */ 32 | 33 | /* Begin PBXFileReference section */ 34 | 883CB9B11C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "UIImage+PKShortVideoPlayer.h"; path = "PKShortVideoPlayer/Categories/UIImage+PKShortVideoPlayer.h"; sourceTree = ""; }; 35 | 883CB9B21C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "UIImage+PKShortVideoPlayer.m"; path = "PKShortVideoPlayer/Categories/UIImage+PKShortVideoPlayer.m"; sourceTree = ""; }; 36 | 884C0B501C434C9A008AABAA /* PKLayerContentsPlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PKLayerContentsPlayerView.h; path = "PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerContentsPlayerView.h"; sourceTree = ""; }; 37 | 884C0B511C434C9A008AABAA /* PKLayerContentsPlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = PKLayerContentsPlayerView.m; path = "PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerContentsPlayerView.m"; sourceTree = ""; }; 38 | 884C0B521C434C9A008AABAA /* PKLayerVideoDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PKLayerVideoDecoder.h; path = "PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerVideoDecoder.h"; sourceTree = ""; }; 39 | 884C0B531C434C9A008AABAA /* PKLayerVideoDecoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = PKLayerVideoDecoder.m; path = "PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerVideoDecoder.m"; sourceTree = ""; }; 40 | 884C0B5A1C434D20008AABAA /* PKFullScreenPlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PKFullScreenPlayerView.h; path = PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerView.h; sourceTree = ""; }; 41 | 884C0B5B1C434D20008AABAA /* PKFullScreenPlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = PKFullScreenPlayerView.m; path = PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerView.m; sourceTree = ""; }; 42 | 884C0B5C1C434D20008AABAA /* PKFullScreenPlayerViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PKFullScreenPlayerViewController.h; path = PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerViewController.h; sourceTree = ""; }; 43 | 884C0B5D1C434D20008AABAA /* PKFullScreenPlayerViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = PKFullScreenPlayerViewController.m; path = PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerViewController.m; sourceTree = ""; }; 44 | 884C0B601C4353DC008AABAA /* PKOpenGLESViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PKOpenGLESViewController.h; sourceTree = ""; }; 45 | 884C0B611C4353DC008AABAA /* PKOpenGLESViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PKOpenGLESViewController.m; sourceTree = ""; }; 46 | 88DDEC2F1C3A568F00FA04E4 /* DevelopPlayerDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = DevelopPlayerDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 47 | 88DDEC331C3A568F00FA04E4 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 48 | 88DDEC351C3A568F00FA04E4 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 49 | 88DDEC361C3A568F00FA04E4 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 50 | 88DDEC381C3A568F00FA04E4 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 51 | 88DDEC391C3A568F00FA04E4 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 52 | 88DDEC3C1C3A568F00FA04E4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 53 | 88DDEC3E1C3A568F00FA04E4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 54 | 88DDEC411C3A568F00FA04E4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 55 | 88DDEC431C3A568F00FA04E4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 56 | 88DDEC491C3A576A00FA04E4 /* Cat.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = Cat.mp4; sourceTree = ""; }; 57 | 88DDEC4E1C3A5D2100FA04E4 /* PKLayerContentsViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PKLayerContentsViewController.h; sourceTree = ""; }; 58 | 88DDEC4F1C3A5D2100FA04E4 /* PKLayerContentsViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PKLayerContentsViewController.m; sourceTree = ""; }; 59 | AE4066C01D30E017002273A8 /* GLProgram.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLProgram.h; sourceTree = ""; }; 60 | AE4066C11D30E017002273A8 /* GLProgram.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLProgram.m; sourceTree = ""; }; 61 | AE4066C21D30E017002273A8 /* GPUImageContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageContext.h; sourceTree = ""; }; 62 | AE4066C31D30E017002273A8 /* GPUImageContext.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageContext.m; sourceTree = ""; }; 63 | AE4066C41D30E017002273A8 /* GPUImageFramebuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFramebuffer.h; sourceTree = ""; }; 64 | AE4066C51D30E017002273A8 /* GPUImageFramebuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFramebuffer.m; sourceTree = ""; }; 65 | AE4066C61D30E017002273A8 /* GPUImageFramebufferCache.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFramebufferCache.h; sourceTree = ""; }; 66 | AE4066C71D30E017002273A8 /* GPUImageFramebufferCache.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFramebufferCache.m; sourceTree = ""; }; 67 | AE4066C81D30E017002273A8 /* PKChatMessagePlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PKChatMessagePlayerView.h; sourceTree = ""; }; 68 | AE4066C91D30E017002273A8 /* PKChatMessagePlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PKChatMessagePlayerView.m; sourceTree = ""; }; 69 | AE4066CA1D30E017002273A8 /* PKColorConversion.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PKColorConversion.h; sourceTree = ""; }; 70 | AE4066CB1D30E017002273A8 /* PKColorConversion.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PKColorConversion.m; sourceTree = ""; }; 71 | AE4066CC1D30E017002273A8 /* PKVideoDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PKVideoDecoder.h; sourceTree = ""; }; 72 | AE4066CD1D30E017002273A8 /* PKVideoDecoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PKVideoDecoder.m; sourceTree = ""; }; 73 | /* End PBXFileReference section */ 74 | 75 | /* Begin PBXFrameworksBuildPhase section */ 76 | 88DDEC2C1C3A568F00FA04E4 /* Frameworks */ = { 77 | isa = PBXFrameworksBuildPhase; 78 | buildActionMask = 2147483647; 79 | files = ( 80 | ); 81 | runOnlyForDeploymentPostprocessing = 0; 82 | }; 83 | /* End PBXFrameworksBuildPhase section */ 84 | 85 | /* Begin PBXGroup section */ 86 | 883CB9B01C3A7869004D37C6 /* Categories */ = { 87 | isa = PBXGroup; 88 | children = ( 89 | 883CB9B11C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.h */, 90 | 883CB9B21C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.m */, 91 | ); 92 | name = Categories; 93 | sourceTree = ""; 94 | }; 95 | 884C0B4B1C434B39008AABAA /* AVAssetReader+Layer.Contents */ = { 96 | isa = PBXGroup; 97 | children = ( 98 | 884C0B501C434C9A008AABAA /* PKLayerContentsPlayerView.h */, 99 | 884C0B511C434C9A008AABAA /* PKLayerContentsPlayerView.m */, 100 | 884C0B521C434C9A008AABAA /* PKLayerVideoDecoder.h */, 101 | 884C0B531C434C9A008AABAA /* PKLayerVideoDecoder.m */, 102 | ); 103 | name = "AVAssetReader+Layer.Contents"; 104 | sourceTree = ""; 105 | }; 106 | 884C0B591C434CFE008AABAA /* AVPlayer */ = { 107 | isa = PBXGroup; 108 | children = ( 109 | 884C0B5A1C434D20008AABAA /* PKFullScreenPlayerView.h */, 110 | 884C0B5B1C434D20008AABAA /* PKFullScreenPlayerView.m */, 111 | 884C0B5C1C434D20008AABAA /* PKFullScreenPlayerViewController.h */, 112 | 884C0B5D1C434D20008AABAA /* PKFullScreenPlayerViewController.m */, 113 | ); 114 | name = AVPlayer; 115 | sourceTree = ""; 116 | }; 117 | 88DDEC261C3A568F00FA04E4 = { 118 | isa = PBXGroup; 119 | children = ( 120 | 88DDEC311C3A568F00FA04E4 /* DevelopPlayerDemo */, 121 | 88DDEC301C3A568F00FA04E4 /* Products */, 122 | ); 123 | sourceTree = ""; 124 | }; 125 | 88DDEC301C3A568F00FA04E4 /* Products */ = { 126 | isa = PBXGroup; 127 | children = ( 128 | 88DDEC2F1C3A568F00FA04E4 /* DevelopPlayerDemo.app */, 129 | ); 130 | name = Products; 131 | sourceTree = ""; 132 | }; 133 | 88DDEC311C3A568F00FA04E4 /* DevelopPlayerDemo */ = { 134 | isa = PBXGroup; 135 | children = ( 136 | 88DDEC541C3A640000FA04E4 /* PKShortVideoPlayer */, 137 | 88DDEC381C3A568F00FA04E4 /* ViewController.h */, 138 | 88DDEC391C3A568F00FA04E4 /* ViewController.m */, 139 | 88DDEC4E1C3A5D2100FA04E4 /* PKLayerContentsViewController.h */, 140 | 88DDEC4F1C3A5D2100FA04E4 /* PKLayerContentsViewController.m */, 141 | 884C0B601C4353DC008AABAA /* PKOpenGLESViewController.h */, 142 | 884C0B611C4353DC008AABAA /* PKOpenGLESViewController.m */, 143 | 88DDEC351C3A568F00FA04E4 /* AppDelegate.h */, 144 | 88DDEC361C3A568F00FA04E4 /* AppDelegate.m */, 145 | 88DDEC3B1C3A568F00FA04E4 /* Main.storyboard */, 146 | 88DDEC3E1C3A568F00FA04E4 /* Assets.xcassets */, 147 | 88DDEC401C3A568F00FA04E4 /* LaunchScreen.storyboard */, 148 | 88DDEC491C3A576A00FA04E4 /* Cat.mp4 */, 149 | 88DDEC431C3A568F00FA04E4 /* Info.plist */, 150 | 88DDEC321C3A568F00FA04E4 /* Supporting Files */, 151 | ); 152 | path = DevelopPlayerDemo; 153 | sourceTree = ""; 154 | }; 155 | 88DDEC321C3A568F00FA04E4 /* Supporting Files */ = { 156 | isa = PBXGroup; 157 | children = ( 158 | 88DDEC331C3A568F00FA04E4 /* main.m */, 159 | ); 160 | name = "Supporting Files"; 161 | sourceTree = ""; 162 | }; 163 | 88DDEC541C3A640000FA04E4 /* PKShortVideoPlayer */ = { 164 | isa = PBXGroup; 165 | children = ( 166 | 883CB9B01C3A7869004D37C6 /* Categories */, 167 | 884C0B591C434CFE008AABAA /* AVPlayer */, 168 | 884C0B4B1C434B39008AABAA /* AVAssetReader+Layer.Contents */, 169 | AE4066BE1D30E017002273A8 /* AVAssetReader+OpenGL ES */, 170 | ); 171 | name = PKShortVideoPlayer; 172 | sourceTree = ""; 173 | }; 174 | AE4066BE1D30E017002273A8 /* AVAssetReader+OpenGL ES */ = { 175 | isa = PBXGroup; 176 | children = ( 177 | AE4066BF1D30E017002273A8 /* GPUImage */, 178 | AE4066C81D30E017002273A8 /* PKChatMessagePlayerView.h */, 179 | AE4066C91D30E017002273A8 /* PKChatMessagePlayerView.m */, 180 | AE4066CA1D30E017002273A8 /* PKColorConversion.h */, 181 | AE4066CB1D30E017002273A8 /* PKColorConversion.m */, 182 | AE4066CC1D30E017002273A8 /* PKVideoDecoder.h */, 183 | AE4066CD1D30E017002273A8 /* PKVideoDecoder.m */, 184 | ); 185 | name = "AVAssetReader+OpenGL ES"; 186 | path = "PKShortVideoPlayer/AVAssetReader+OpenGL ES"; 187 | sourceTree = ""; 188 | }; 189 | AE4066BF1D30E017002273A8 /* GPUImage */ = { 190 | isa = PBXGroup; 191 | children = ( 192 | AE4066C01D30E017002273A8 /* GLProgram.h */, 193 | AE4066C11D30E017002273A8 /* GLProgram.m */, 194 | AE4066C21D30E017002273A8 /* GPUImageContext.h */, 195 | AE4066C31D30E017002273A8 /* GPUImageContext.m */, 196 | AE4066C41D30E017002273A8 /* GPUImageFramebuffer.h */, 197 | AE4066C51D30E017002273A8 /* GPUImageFramebuffer.m */, 198 | AE4066C61D30E017002273A8 /* GPUImageFramebufferCache.h */, 199 | AE4066C71D30E017002273A8 /* GPUImageFramebufferCache.m */, 200 | ); 201 | path = GPUImage; 202 | sourceTree = ""; 203 | }; 204 | /* End PBXGroup section */ 205 | 206 | /* Begin PBXNativeTarget section */ 207 | 88DDEC2E1C3A568F00FA04E4 /* DevelopPlayerDemo */ = { 208 | isa = PBXNativeTarget; 209 | buildConfigurationList = 88DDEC461C3A568F00FA04E4 /* Build configuration list for PBXNativeTarget "DevelopPlayerDemo" */; 210 | buildPhases = ( 211 | 88DDEC2B1C3A568F00FA04E4 /* Sources */, 212 | 88DDEC2C1C3A568F00FA04E4 /* Frameworks */, 213 | 88DDEC2D1C3A568F00FA04E4 /* Resources */, 214 | ); 215 | buildRules = ( 216 | ); 217 | dependencies = ( 218 | ); 219 | name = DevelopPlayerDemo; 220 | productName = DevelopPlayerDemo; 221 | productReference = 88DDEC2F1C3A568F00FA04E4 /* DevelopPlayerDemo.app */; 222 | productType = "com.apple.product-type.application"; 223 | }; 224 | /* End PBXNativeTarget section */ 225 | 226 | /* Begin PBXProject section */ 227 | 88DDEC271C3A568F00FA04E4 /* Project object */ = { 228 | isa = PBXProject; 229 | attributes = { 230 | LastUpgradeCheck = 0720; 231 | ORGANIZATIONNAME = 1yyg; 232 | TargetAttributes = { 233 | 88DDEC2E1C3A568F00FA04E4 = { 234 | CreatedOnToolsVersion = 7.2; 235 | }; 236 | }; 237 | }; 238 | buildConfigurationList = 88DDEC2A1C3A568F00FA04E4 /* Build configuration list for PBXProject "DevelopPlayerDemo" */; 239 | compatibilityVersion = "Xcode 3.2"; 240 | developmentRegion = English; 241 | hasScannedForEncodings = 0; 242 | knownRegions = ( 243 | en, 244 | Base, 245 | ); 246 | mainGroup = 88DDEC261C3A568F00FA04E4; 247 | productRefGroup = 88DDEC301C3A568F00FA04E4 /* Products */; 248 | projectDirPath = ""; 249 | projectRoot = ""; 250 | targets = ( 251 | 88DDEC2E1C3A568F00FA04E4 /* DevelopPlayerDemo */, 252 | ); 253 | }; 254 | /* End PBXProject section */ 255 | 256 | /* Begin PBXResourcesBuildPhase section */ 257 | 88DDEC2D1C3A568F00FA04E4 /* Resources */ = { 258 | isa = PBXResourcesBuildPhase; 259 | buildActionMask = 2147483647; 260 | files = ( 261 | 88DDEC421C3A568F00FA04E4 /* LaunchScreen.storyboard in Resources */, 262 | 88DDEC3F1C3A568F00FA04E4 /* Assets.xcassets in Resources */, 263 | 88DDEC4A1C3A576A00FA04E4 /* Cat.mp4 in Resources */, 264 | 88DDEC3D1C3A568F00FA04E4 /* Main.storyboard in Resources */, 265 | ); 266 | runOnlyForDeploymentPostprocessing = 0; 267 | }; 268 | /* End PBXResourcesBuildPhase section */ 269 | 270 | /* Begin PBXSourcesBuildPhase section */ 271 | 88DDEC2B1C3A568F00FA04E4 /* Sources */ = { 272 | isa = PBXSourcesBuildPhase; 273 | buildActionMask = 2147483647; 274 | files = ( 275 | 884C0B5F1C434D20008AABAA /* PKFullScreenPlayerViewController.m in Sources */, 276 | 88DDEC3A1C3A568F00FA04E4 /* ViewController.m in Sources */, 277 | 88DDEC371C3A568F00FA04E4 /* AppDelegate.m in Sources */, 278 | 884C0B541C434C9A008AABAA /* PKLayerContentsPlayerView.m in Sources */, 279 | AE4066CF1D30E017002273A8 /* GPUImageContext.m in Sources */, 280 | AE4066D21D30E017002273A8 /* PKChatMessagePlayerView.m in Sources */, 281 | AE4066CE1D30E017002273A8 /* GLProgram.m in Sources */, 282 | 883CB9B31C3A7935004D37C6 /* UIImage+PKShortVideoPlayer.m in Sources */, 283 | 884C0B5E1C434D20008AABAA /* PKFullScreenPlayerView.m in Sources */, 284 | 884C0B621C4353DC008AABAA /* PKOpenGLESViewController.m in Sources */, 285 | AE4066D11D30E017002273A8 /* GPUImageFramebufferCache.m in Sources */, 286 | 88DDEC501C3A5D2100FA04E4 /* PKLayerContentsViewController.m in Sources */, 287 | AE4066D41D30E017002273A8 /* PKVideoDecoder.m in Sources */, 288 | 884C0B551C434C9A008AABAA /* PKLayerVideoDecoder.m in Sources */, 289 | 88DDEC341C3A568F00FA04E4 /* main.m in Sources */, 290 | AE4066D31D30E017002273A8 /* PKColorConversion.m in Sources */, 291 | AE4066D01D30E017002273A8 /* GPUImageFramebuffer.m in Sources */, 292 | ); 293 | runOnlyForDeploymentPostprocessing = 0; 294 | }; 295 | /* End PBXSourcesBuildPhase section */ 296 | 297 | /* Begin PBXVariantGroup section */ 298 | 88DDEC3B1C3A568F00FA04E4 /* Main.storyboard */ = { 299 | isa = PBXVariantGroup; 300 | children = ( 301 | 88DDEC3C1C3A568F00FA04E4 /* Base */, 302 | ); 303 | name = Main.storyboard; 304 | sourceTree = ""; 305 | }; 306 | 88DDEC401C3A568F00FA04E4 /* LaunchScreen.storyboard */ = { 307 | isa = PBXVariantGroup; 308 | children = ( 309 | 88DDEC411C3A568F00FA04E4 /* Base */, 310 | ); 311 | name = LaunchScreen.storyboard; 312 | sourceTree = ""; 313 | }; 314 | /* End PBXVariantGroup section */ 315 | 316 | /* Begin XCBuildConfiguration section */ 317 | 88DDEC441C3A568F00FA04E4 /* Debug */ = { 318 | isa = XCBuildConfiguration; 319 | buildSettings = { 320 | ALWAYS_SEARCH_USER_PATHS = NO; 321 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 322 | CLANG_CXX_LIBRARY = "libc++"; 323 | CLANG_ENABLE_MODULES = YES; 324 | CLANG_ENABLE_OBJC_ARC = YES; 325 | CLANG_WARN_BOOL_CONVERSION = YES; 326 | CLANG_WARN_CONSTANT_CONVERSION = YES; 327 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 328 | CLANG_WARN_EMPTY_BODY = YES; 329 | CLANG_WARN_ENUM_CONVERSION = YES; 330 | CLANG_WARN_INT_CONVERSION = YES; 331 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 332 | CLANG_WARN_UNREACHABLE_CODE = YES; 333 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 334 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 335 | COPY_PHASE_STRIP = NO; 336 | DEBUG_INFORMATION_FORMAT = dwarf; 337 | ENABLE_STRICT_OBJC_MSGSEND = YES; 338 | ENABLE_TESTABILITY = YES; 339 | GCC_C_LANGUAGE_STANDARD = gnu99; 340 | GCC_DYNAMIC_NO_PIC = NO; 341 | GCC_NO_COMMON_BLOCKS = YES; 342 | GCC_OPTIMIZATION_LEVEL = 0; 343 | GCC_PREPROCESSOR_DEFINITIONS = ( 344 | "DEBUG=1", 345 | "$(inherited)", 346 | ); 347 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 348 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 349 | GCC_WARN_UNDECLARED_SELECTOR = YES; 350 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 351 | GCC_WARN_UNUSED_FUNCTION = YES; 352 | GCC_WARN_UNUSED_VARIABLE = YES; 353 | IPHONEOS_DEPLOYMENT_TARGET = 7.0; 354 | MTL_ENABLE_DEBUG_INFO = YES; 355 | ONLY_ACTIVE_ARCH = YES; 356 | SDKROOT = iphoneos; 357 | }; 358 | name = Debug; 359 | }; 360 | 88DDEC451C3A568F00FA04E4 /* Release */ = { 361 | isa = XCBuildConfiguration; 362 | buildSettings = { 363 | ALWAYS_SEARCH_USER_PATHS = NO; 364 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 365 | CLANG_CXX_LIBRARY = "libc++"; 366 | CLANG_ENABLE_MODULES = YES; 367 | CLANG_ENABLE_OBJC_ARC = YES; 368 | CLANG_WARN_BOOL_CONVERSION = YES; 369 | CLANG_WARN_CONSTANT_CONVERSION = YES; 370 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 371 | CLANG_WARN_EMPTY_BODY = YES; 372 | CLANG_WARN_ENUM_CONVERSION = YES; 373 | CLANG_WARN_INT_CONVERSION = YES; 374 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 375 | CLANG_WARN_UNREACHABLE_CODE = YES; 376 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 377 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 378 | COPY_PHASE_STRIP = NO; 379 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 380 | ENABLE_NS_ASSERTIONS = NO; 381 | ENABLE_STRICT_OBJC_MSGSEND = YES; 382 | GCC_C_LANGUAGE_STANDARD = gnu99; 383 | GCC_NO_COMMON_BLOCKS = YES; 384 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 385 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 386 | GCC_WARN_UNDECLARED_SELECTOR = YES; 387 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 388 | GCC_WARN_UNUSED_FUNCTION = YES; 389 | GCC_WARN_UNUSED_VARIABLE = YES; 390 | IPHONEOS_DEPLOYMENT_TARGET = 7.0; 391 | MTL_ENABLE_DEBUG_INFO = NO; 392 | SDKROOT = iphoneos; 393 | VALIDATE_PRODUCT = YES; 394 | }; 395 | name = Release; 396 | }; 397 | 88DDEC471C3A568F00FA04E4 /* Debug */ = { 398 | isa = XCBuildConfiguration; 399 | buildSettings = { 400 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 401 | INFOPLIST_FILE = DevelopPlayerDemo/Info.plist; 402 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 403 | PRODUCT_BUNDLE_IDENTIFIER = com.pepsikirk.DevelopPlayerDemo; 404 | PRODUCT_NAME = "$(TARGET_NAME)"; 405 | }; 406 | name = Debug; 407 | }; 408 | 88DDEC481C3A568F00FA04E4 /* Release */ = { 409 | isa = XCBuildConfiguration; 410 | buildSettings = { 411 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 412 | INFOPLIST_FILE = DevelopPlayerDemo/Info.plist; 413 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 414 | PRODUCT_BUNDLE_IDENTIFIER = com.pepsikirk.DevelopPlayerDemo; 415 | PRODUCT_NAME = "$(TARGET_NAME)"; 416 | }; 417 | name = Release; 418 | }; 419 | /* End XCBuildConfiguration section */ 420 | 421 | /* Begin XCConfigurationList section */ 422 | 88DDEC2A1C3A568F00FA04E4 /* Build configuration list for PBXProject "DevelopPlayerDemo" */ = { 423 | isa = XCConfigurationList; 424 | buildConfigurations = ( 425 | 88DDEC441C3A568F00FA04E4 /* Debug */, 426 | 88DDEC451C3A568F00FA04E4 /* Release */, 427 | ); 428 | defaultConfigurationIsVisible = 0; 429 | defaultConfigurationName = Release; 430 | }; 431 | 88DDEC461C3A568F00FA04E4 /* Build configuration list for PBXNativeTarget "DevelopPlayerDemo" */ = { 432 | isa = XCConfigurationList; 433 | buildConfigurations = ( 434 | 88DDEC471C3A568F00FA04E4 /* Debug */, 435 | 88DDEC481C3A568F00FA04E4 /* Release */, 436 | ); 437 | defaultConfigurationIsVisible = 0; 438 | defaultConfigurationName = Release; 439 | }; 440 | /* End XCConfigurationList section */ 441 | }; 442 | rootObject = 88DDEC271C3A568F00FA04E4 /* Project object */; 443 | } 444 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/project.xcworkspace/xcuserdata/pepsikirk.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pepsikirk/DevelopPlayerDemo/152f9a0ae7d1bc648b18fbae5ecc425743c8cbb5/DevelopPlayerDemo.xcodeproj/project.xcworkspace/xcuserdata/pepsikirk.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/xcuserdata/jiangxincai.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 8 | 14 | 15 | 16 | 18 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/xcuserdata/jiangxincai.xcuserdatad/xcschemes/DevelopPlayerDemo.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/xcuserdata/jiangxincai.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | DevelopPlayerDemo.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 88DDEC2E1C3A568F00FA04E4 16 | 17 | primary 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/xcuserdata/pepsikirk.xcuserdatad/xcschemes/DevelopPlayerDemo.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /DevelopPlayerDemo.xcodeproj/xcuserdata/pepsikirk.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | DevelopPlayerDemo.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 88DDEC2E1C3A568F00FA04E4 16 | 17 | primary 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | - (void)applicationWillResignActive:(UIApplication *)application { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 26 | } 27 | 28 | - (void)applicationDidEnterBackground:(UIApplication *)application { 29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 31 | } 32 | 33 | - (void)applicationWillEnterForeground:(UIApplication *)application { 34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 35 | } 36 | 37 | - (void)applicationDidBecomeActive:(UIApplication *)application { 38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 39 | } 40 | 41 | - (void)applicationWillTerminate:(UIApplication *)application { 42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 43 | } 44 | 45 | @end 46 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "29x29", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "40x40", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "40x40", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "60x60", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "60x60", 31 | "scale" : "3x" 32 | } 33 | ], 34 | "info" : { 35 | "version" : 1, 36 | "author" : "xcode" 37 | } 38 | } -------------------------------------------------------------------------------- /DevelopPlayerDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 27 | 34 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/Cat.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pepsikirk/DevelopPlayerDemo/152f9a0ae7d1bc648b18fbae5ecc425743c8cbb5/DevelopPlayerDemo/Cat.mp4 -------------------------------------------------------------------------------- /DevelopPlayerDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKLayerContentsViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // AVAssetReaderViewController.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface PKLayerContentsViewController : UIViewController 12 | 13 | @property (nonatomic, strong) NSString *videoPath; 14 | @property (nonatomic, strong) UIImage *image; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKLayerContentsViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // AVAssetReaderViewController.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKLayerContentsViewController.h" 10 | #import "PKLayerContentsPlayerView.h" 11 | 12 | @interface PKLayerContentsViewController () 13 | 14 | @property (nonatomic, strong) PKLayerContentsPlayerView *playerView; 15 | 16 | @end 17 | 18 | @implementation PKLayerContentsViewController 19 | 20 | #pragma mark - View Lifecycle 21 | 22 | - (void)viewDidLoad { 23 | [super viewDidLoad]; 24 | self.view.backgroundColor = [UIColor blackColor]; 25 | } 26 | 27 | - (void)viewDidAppear:(BOOL)animated { 28 | [super viewDidAppear:animated]; 29 | CGSize viewSize = self.view.bounds.size; 30 | CGSize imageSize = self.image.size; 31 | 32 | self.playerView = [[PKLayerContentsPlayerView alloc] initWithFrame:CGRectMake(0, 0, viewSize.width, viewSize.width* (imageSize.height/imageSize.width) ) videoPath:self.videoPath previewImage:self.image]; 33 | self.playerView.center = self.view.center; 34 | 35 | [self.view addSubview:self.playerView]; 36 | } 37 | 38 | - (void)viewWillDisappear:(BOOL)animated { 39 | [super viewWillDisappear:animated]; 40 | [self.playerView stop]; 41 | } 42 | 43 | - (void)didReceiveMemoryWarning { 44 | [super didReceiveMemoryWarning]; 45 | // Dispose of any resources that can be recreated. 46 | } 47 | 48 | @end 49 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKOpenGLESViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKOpenGLESViewController.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface PKOpenGLESViewController : UIViewController 12 | 13 | @property (nonatomic, strong) NSString *videoPath; 14 | @property (nonatomic, strong) UIImage *image; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKOpenGLESViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKOpenGLESViewController.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKOpenGLESViewController.h" 10 | #import "PKChatMessagePlayerView.h" 11 | 12 | @interface PKOpenGLESViewController () 13 | 14 | @property (nonatomic, strong) PKChatMessagePlayerView *playerView; 15 | 16 | @end 17 | 18 | @implementation PKOpenGLESViewController 19 | 20 | #pragma mark - View Lifecycle 21 | 22 | - (void)viewDidLoad { 23 | [super viewDidLoad]; 24 | self.view.backgroundColor = [UIColor blackColor]; 25 | } 26 | 27 | - (void)viewDidAppear:(BOOL)animated { 28 | [super viewDidAppear:animated]; 29 | CGSize viewSize = self.view.bounds.size; 30 | CGSize imageSize = self.image.size; 31 | 32 | self.playerView = [[PKChatMessagePlayerView alloc] initWithFrame:CGRectMake(0, 0, viewSize.width, viewSize.width* (imageSize.height/imageSize.width) ) videoPath:self.videoPath previewImage:self.image]; 33 | [self.playerView play]; 34 | self.playerView.center = self.view.center; 35 | 36 | [self.view addSubview:self.playerView]; 37 | } 38 | 39 | - (void)viewWillDisappear:(BOOL)animated { 40 | [super viewWillDisappear:animated]; 41 | [self.playerView stop]; 42 | } 43 | 44 | - (void)didReceiveMemoryWarning { 45 | [super didReceiveMemoryWarning]; 46 | // Dispose of any resources that can be recreated. 47 | } 48 | @end 49 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerContentsPlayerView.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKChatMessagePlayerView.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/5. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | @interface PKLayerContentsPlayerView : UIView 14 | 15 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage; 16 | 17 | - (void)stop; 18 | 19 | @end 20 | 21 | NS_ASSUME_NONNULL_END 22 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerContentsPlayerView.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKChatMessagePlayerView.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/5. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKLayerContentsPlayerView.h" 10 | @import AVFoundation; 11 | #import "PKLayerVideoDecoder.h" 12 | 13 | @interface PKLayerContentsPlayerView () 14 | 15 | @property (nonatomic, strong) NSString *videoPath; 16 | @property (nonatomic, strong) UIImage *previewImage; 17 | 18 | @property (nonatomic, strong) PKLayerVideoDecoder *videoDecoder; 19 | 20 | @end 21 | 22 | @implementation PKLayerContentsPlayerView 23 | 24 | #pragma mark - Initialization 25 | 26 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage { 27 | NSParameterAssert(videoPath != nil); 28 | NSParameterAssert(previewImage != nil); 29 | 30 | self = [super initWithFrame:frame]; 31 | if (self) { 32 | _videoPath = videoPath; 33 | _previewImage = previewImage; 34 | 35 | _videoDecoder = [[PKLayerVideoDecoder alloc] initWithVideoPath:videoPath size:self.bounds.size]; 36 | _videoDecoder.delegate = self; 37 | _videoDecoder.loop = YES; 38 | [_videoDecoder start]; 39 | } 40 | return self; 41 | } 42 | 43 | - (void)stop { 44 | [self.videoDecoder stop]; 45 | } 46 | 47 | #pragma mark - PKVideoDecoderDelegate 48 | 49 | - (void)videoDecoderDidDecodeFrame:(PKLayerVideoDecoder *)decoder pixelBuffer:(CVImageBufferRef)buffer { 50 | CGImageRef image = [PKLayerContentsPlayerView imageFromSampleBufferRef:buffer]; 51 | 52 | if (!image) { 53 | return; 54 | } 55 | 56 | dispatch_async(dispatch_get_main_queue(), ^{ 57 | self.layer.contents = (__bridge id)(image); 58 | CGImageRelease(image); 59 | }); 60 | } 61 | 62 | -(void)videoDecoderDidFinishDecoding:(PKLayerVideoDecoder *)decoder { 63 | 64 | } 65 | 66 | #pragma mark - Utility 67 | 68 | + (CGImageRef)imageFromSampleBufferRef:(CVImageBufferRef)imageBuffer { 69 | CVPixelBufferLockBaseAddress(imageBuffer, 0); 70 | 71 | void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 72 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 73 | size_t width = CVPixelBufferGetWidth(imageBuffer); 74 | size_t height = CVPixelBufferGetHeight(imageBuffer); 75 | 76 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 77 | CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst); 78 | CGImageRef quartzImage = CGBitmapContextCreateImage(context); 79 | 80 | CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 81 | 82 | CGContextRelease(context); 83 | CGColorSpaceRelease(colorSpace); 84 | CVBufferRelease(imageBuffer); 85 | 86 | return quartzImage; 87 | } 88 | 89 | 90 | @end 91 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerVideoDecoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKVideoDecoder.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/7. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @class PKLayerVideoDecoder; 13 | 14 | @protocol PKVideoDecoderDelegate 15 | 16 | @required 17 | - (void)videoDecoderDidDecodeFrame:(PKLayerVideoDecoder *)decoder pixelBuffer:(CVImageBufferRef)buffer; 18 | 19 | @optional 20 | - (void)videoDecoderDidFinishDecoding:(PKLayerVideoDecoder *)decoder; 21 | 22 | @end 23 | 24 | @interface PKLayerVideoDecoder : NSObject 25 | 26 | @property (nonatomic, assign, readonly) BOOL isRunning, isFinished; 27 | @property (nonatomic, assign) BOOL loop; 28 | 29 | @property (nonatomic, weak) id delegate; 30 | 31 | 32 | 33 | - (instancetype)initWithVideoPath:(NSString *)videoPath size:(CGSize)size; 34 | 35 | - (void)start; 36 | 37 | - (void)pause; 38 | 39 | - (void)stop; 40 | 41 | @end 42 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+Layer.Contents/PKLayerVideoDecoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKVideoDecoder.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/7. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKLayerVideoDecoder.h" 10 | 11 | @import AVFoundation; 12 | 13 | @interface PKLayerVideoDecoder () 14 | 15 | @property (nonatomic, assign) CGSize size; 16 | @property (nonatomic, assign) double frameRate; 17 | @property (nonatomic, assign) double currentTime; 18 | 19 | @property (nonatomic, strong) AVAsset *asset; 20 | @property (nonatomic, strong) AVAssetReader *assetReader; 21 | @property (nonatomic, strong) AVAssetReaderTrackOutput *assetReaderOutput; 22 | 23 | @property (nonatomic, assign) BOOL initFlag; 24 | @property (nonatomic, assign) BOOL resetFlag; 25 | @property (nonatomic, assign) BOOL finishFlag; 26 | 27 | @property (nonatomic, strong) NSTimer *timer; 28 | @property (nonatomic, strong) NSRecursiveLock *lock; 29 | 30 | @end 31 | 32 | @implementation PKLayerVideoDecoder 33 | 34 | #pragma mark - Initialization 35 | 36 | - (instancetype)initWithVideoPath:(NSString *)videoPath size:(CGSize)size { 37 | self = [super init]; 38 | if (self) { 39 | _size = size; 40 | _lock = [[NSRecursiveLock alloc] init]; 41 | 42 | NSDictionary *opts = @{ 43 | AVURLAssetPreferPreciseDurationAndTimingKey : @YES 44 | }; 45 | _asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoPath] options:opts]; 46 | _frameRate = 30; 47 | } 48 | return self; 49 | } 50 | 51 | - (void)dealloc { 52 | [_lock lock]; 53 | [_timer invalidate]; 54 | [_lock unlock]; 55 | } 56 | 57 | 58 | 59 | #pragma mark - Public 60 | 61 | - (void)start { 62 | [self.lock lock]; 63 | 64 | if( [self isRunning] ){ 65 | [self.lock unlock]; 66 | return; 67 | } 68 | self.initFlag = NO; 69 | [self preprocessForDecoding]; 70 | //定时器按照帧率获取 71 | self.timer = [NSTimer scheduledTimerWithTimeInterval:(1.0/self.frameRate) target:self selector:@selector(captureLoop) userInfo:nil repeats:YES]; 72 | 73 | [self.lock unlock]; 74 | } 75 | 76 | - (void)pause { 77 | [self.lock lock]; 78 | 79 | if( ![self isRunning] ){ 80 | [self.lock unlock]; 81 | return; 82 | } 83 | [self.timer invalidate]; 84 | self.timer = nil; 85 | [self processForPausing]; 86 | 87 | [self.lock unlock]; 88 | } 89 | 90 | - (void)stop { 91 | [self.lock lock]; 92 | 93 | self.currentTime = 0; 94 | [self.timer invalidate]; 95 | self.timer = nil; 96 | [self postprocessForDecoding]; 97 | 98 | [self.lock unlock]; 99 | } 100 | 101 | 102 | 103 | #pragma mark - Private 104 | 105 | - (BOOL)isRunning { 106 | return [self.timer isValid]? YES : NO; 107 | } 108 | 109 | - (void)preprocessForDecoding { 110 | [self initReader]; 111 | } 112 | 113 | - (void)postprocessForDecoding { 114 | [self releaseReader]; 115 | } 116 | 117 | - (void)captureLoop { 118 | dispatch_async(dispatch_get_global_queue(0, 0), ^{ 119 | [self captureNext]; 120 | }); 121 | } 122 | 123 | - (void)captureNext { 124 | [self.lock lock]; 125 | 126 | [self processForDecoding]; 127 | 128 | [self.lock unlock]; 129 | } 130 | 131 | - (void)processForDecoding { 132 | if( self.assetReader.status != AVAssetReaderStatusReading ){ 133 | if(self.assetReader.status == AVAssetReaderStatusCompleted ){ 134 | if(!self.loop ){ 135 | [self.timer invalidate]; 136 | self.timer = nil; 137 | 138 | self.resetFlag = YES; 139 | self.currentTime = 0; 140 | [self releaseReader]; 141 | return; 142 | } else { 143 | self.currentTime = 0; 144 | [self initReader]; 145 | } 146 | if (self.delegate && [self.delegate respondsToSelector:@selector(videoDecoderDidFinishDecoding:)]) { 147 | [self.delegate videoDecoderDidFinishDecoding:self]; 148 | } 149 | } 150 | } 151 | 152 | CMSampleBufferRef sampleBuffer = [self.assetReaderOutput copyNextSampleBuffer]; 153 | if(!sampleBuffer ){ 154 | return; 155 | } 156 | self.currentTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); 157 | CVImageBufferRef pixBuff = CMSampleBufferGetImageBuffer(sampleBuffer); 158 | 159 | if (self.delegate && [self.delegate respondsToSelector:@selector(videoDecoderDidDecodeFrame:pixelBuffer:)]) { 160 | [self.delegate videoDecoderDidDecodeFrame:self pixelBuffer:pixBuff]; 161 | } 162 | 163 | CMSampleBufferInvalidate(sampleBuffer); 164 | } 165 | 166 | - (void)processForPausing { 167 | 168 | } 169 | 170 | - (BOOL)isFinished { 171 | return (self.assetReader.status == AVAssetReaderStatusCompleted) ? YES : NO; 172 | } 173 | 174 | - (void)releaseReader { 175 | self.assetReader = nil; 176 | self.assetReaderOutput = nil; 177 | } 178 | 179 | - (void)initReader { 180 | AVAssetTrack *track = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 181 | 182 | NSDictionary *setting = @{ 183 | (id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA), 184 | (id)kCVPixelBufferWidthKey:@(self.size.width), 185 | (id)kCVPixelBufferHeightKey:@(self.size.height), 186 | }; 187 | // NSDictionary *setting = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:self.format] 188 | // forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 189 | self.assetReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:track outputSettings:setting]; 190 | self.frameRate = @(track.nominalFrameRate).doubleValue; 191 | 192 | self.assetReader = [[AVAssetReader alloc] initWithAsset:self.asset error:nil]; 193 | [self.assetReader addOutput:self.assetReaderOutput]; 194 | 195 | CMTime tm = CMTimeMake((int64_t)(self.currentTime*30000), 30000); 196 | [self.assetReader setTimeRange:CMTimeRangeMake(tm,self.asset.duration)]; 197 | 198 | [self.assetReader startReading]; 199 | } 200 | 201 | @end 202 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GLProgram.h: -------------------------------------------------------------------------------- 1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book. 2 | // A description of this can be found at his page on the topic: 3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html 4 | // I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders 5 | 6 | #import 7 | 8 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 9 | #import 10 | #import 11 | #else 12 | #import 13 | #import 14 | #endif 15 | 16 | @interface GLProgram : NSObject 17 | { 18 | NSMutableArray *attributes; 19 | NSMutableArray *uniforms; 20 | GLuint program, 21 | vertShader, 22 | fragShader; 23 | } 24 | 25 | @property(readwrite, nonatomic) BOOL initialized; 26 | @property(readwrite, copy, nonatomic) NSString *vertexShaderLog; 27 | @property(readwrite, copy, nonatomic) NSString *fragmentShaderLog; 28 | @property(readwrite, copy, nonatomic) NSString *programLog; 29 | 30 | - (id)initWithVertexShaderString:(NSString *)vShaderString 31 | fragmentShaderString:(NSString *)fShaderString; 32 | - (id)initWithVertexShaderString:(NSString *)vShaderString 33 | fragmentShaderFilename:(NSString *)fShaderFilename; 34 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename 35 | fragmentShaderFilename:(NSString *)fShaderFilename; 36 | - (void)addAttribute:(NSString *)attributeName; 37 | - (GLuint)attributeIndex:(NSString *)attributeName; 38 | - (GLuint)uniformIndex:(NSString *)uniformName; 39 | - (BOOL)link; 40 | - (void)use; 41 | - (void)validate; 42 | @end 43 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GLProgram.m: -------------------------------------------------------------------------------- 1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book. 2 | // A description of this can be found at his page on the topic: 3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html 4 | 5 | 6 | #import "GLProgram.h" 7 | // START:typedefs 8 | #pragma mark Function Pointer Definitions 9 | typedef void (*GLInfoFunction)(GLuint program, GLenum pname, GLint* params); 10 | typedef void (*GLLogFunction) (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog); 11 | // END:typedefs 12 | #pragma mark - 13 | #pragma mark Private Extension Method Declaration 14 | // START:extension 15 | @interface GLProgram() 16 | 17 | - (BOOL)compileShader:(GLuint *)shader 18 | type:(GLenum)type 19 | string:(NSString *)shaderString; 20 | @end 21 | // END:extension 22 | #pragma mark - 23 | 24 | @implementation GLProgram 25 | // START:init 26 | 27 | @synthesize initialized = _initialized; 28 | 29 | - (id)initWithVertexShaderString:(NSString *)vShaderString 30 | fragmentShaderString:(NSString *)fShaderString; 31 | { 32 | if ((self = [super init])) 33 | { 34 | _initialized = NO; 35 | 36 | attributes = [[NSMutableArray alloc] init]; 37 | uniforms = [[NSMutableArray alloc] init]; 38 | program = glCreateProgram(); 39 | 40 | if (![self compileShader:&vertShader 41 | type:GL_VERTEX_SHADER 42 | string:vShaderString]) 43 | { 44 | NSLog(@"Failed to compile vertex shader"); 45 | } 46 | 47 | // Create and compile fragment shader 48 | if (![self compileShader:&fragShader 49 | type:GL_FRAGMENT_SHADER 50 | string:fShaderString]) 51 | { 52 | NSLog(@"Failed to compile fragment shader"); 53 | } 54 | 55 | glAttachShader(program, vertShader); 56 | glAttachShader(program, fragShader); 57 | } 58 | 59 | return self; 60 | } 61 | 62 | - (id)initWithVertexShaderString:(NSString *)vShaderString 63 | fragmentShaderFilename:(NSString *)fShaderFilename; 64 | { 65 | NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"]; 66 | NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil]; 67 | 68 | if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString])) 69 | { 70 | } 71 | 72 | return self; 73 | } 74 | 75 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename 76 | fragmentShaderFilename:(NSString *)fShaderFilename; 77 | { 78 | NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"]; 79 | NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil]; 80 | 81 | NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"]; 82 | NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil]; 83 | 84 | if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString])) 85 | { 86 | } 87 | 88 | return self; 89 | } 90 | // END:init 91 | // START:compile 92 | - (BOOL)compileShader:(GLuint *)shader 93 | type:(GLenum)type 94 | string:(NSString *)shaderString 95 | { 96 | // CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); 97 | 98 | GLint status; 99 | const GLchar *source; 100 | 101 | source = 102 | (GLchar *)[shaderString UTF8String]; 103 | if (!source) 104 | { 105 | NSLog(@"Failed to load vertex shader"); 106 | return NO; 107 | } 108 | 109 | *shader = glCreateShader(type); 110 | glShaderSource(*shader, 1, &source, NULL); 111 | glCompileShader(*shader); 112 | 113 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); 114 | 115 | if (status != GL_TRUE) 116 | { 117 | GLint logLength; 118 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); 119 | if (logLength > 0) 120 | { 121 | GLchar *log = (GLchar *)malloc(logLength); 122 | glGetShaderInfoLog(*shader, logLength, &logLength, log); 123 | if (shader == &vertShader) 124 | { 125 | self.vertexShaderLog = [NSString stringWithFormat:@"%s", log]; 126 | } 127 | else 128 | { 129 | self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log]; 130 | } 131 | 132 | free(log); 133 | } 134 | } 135 | 136 | // CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime); 137 | // NSLog(@"Compiled in %f ms", linkTime * 1000.0); 138 | 139 | return status == GL_TRUE; 140 | } 141 | // END:compile 142 | #pragma mark - 143 | // START:addattribute 144 | - (void)addAttribute:(NSString *)attributeName 145 | { 146 | if (![attributes containsObject:attributeName]) 147 | { 148 | [attributes addObject:attributeName]; 149 | glBindAttribLocation(program, 150 | (GLuint)[attributes indexOfObject:attributeName], 151 | [attributeName UTF8String]); 152 | } 153 | } 154 | // END:addattribute 155 | // START:indexmethods 156 | - (GLuint)attributeIndex:(NSString *)attributeName 157 | { 158 | return (GLuint)[attributes indexOfObject:attributeName]; 159 | } 160 | - (GLuint)uniformIndex:(NSString *)uniformName 161 | { 162 | return glGetUniformLocation(program, [uniformName UTF8String]); 163 | } 164 | // END:indexmethods 165 | #pragma mark - 166 | // START:link 167 | - (BOOL)link 168 | { 169 | // CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); 170 | 171 | GLint status; 172 | 173 | glLinkProgram(program); 174 | 175 | glGetProgramiv(program, GL_LINK_STATUS, &status); 176 | if (status == GL_FALSE) 177 | return NO; 178 | 179 | if (vertShader) 180 | { 181 | glDeleteShader(vertShader); 182 | vertShader = 0; 183 | } 184 | if (fragShader) 185 | { 186 | glDeleteShader(fragShader); 187 | fragShader = 0; 188 | } 189 | 190 | self.initialized = YES; 191 | 192 | // CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime); 193 | // NSLog(@"Linked in %f ms", linkTime * 1000.0); 194 | 195 | return YES; 196 | } 197 | // END:link 198 | // START:use 199 | - (void)use 200 | { 201 | glUseProgram(program); 202 | } 203 | // END:use 204 | #pragma mark - 205 | 206 | - (void)validate; 207 | { 208 | GLint logLength; 209 | 210 | glValidateProgram(program); 211 | glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength); 212 | if (logLength > 0) 213 | { 214 | GLchar *log = (GLchar *)malloc(logLength); 215 | glGetProgramInfoLog(program, logLength, &logLength, log); 216 | self.programLog = [NSString stringWithFormat:@"%s", log]; 217 | free(log); 218 | } 219 | } 220 | 221 | #pragma mark - 222 | // START:dealloc 223 | - (void)dealloc 224 | { 225 | if (vertShader) 226 | glDeleteShader(vertShader); 227 | 228 | if (fragShader) 229 | glDeleteShader(fragShader); 230 | 231 | if (program) 232 | glDeleteProgram(program); 233 | 234 | } 235 | // END:dealloc 236 | @end 237 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageContext.h: -------------------------------------------------------------------------------- 1 | #import "GLProgram.h" 2 | #import "GPUImageFramebuffer.h" 3 | #import "GPUImageFramebufferCache.h" 4 | 5 | void runSynchronouslyOnVideoProcessingQueue(void (^block)(void)); 6 | void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void)); 7 | 8 | #define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal) 9 | 10 | typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode; 11 | 12 | @interface GPUImageContext : NSObject 13 | 14 | @property(readonly, nonatomic) dispatch_queue_t contextQueue; 15 | @property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram; 16 | @property(readonly, retain, nonatomic) EAGLContext *context; 17 | @property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache; 18 | @property(readonly) GPUImageFramebufferCache *framebufferCache; 19 | 20 | + (void *)contextKey; 21 | + (GPUImageContext *)sharedImageProcessingContext; 22 | + (dispatch_queue_t)sharedContextQueue; 23 | + (GPUImageFramebufferCache *)sharedFramebufferCache; 24 | + (void)useImageProcessingContext; 25 | - (void)useAsCurrentContext; 26 | + (void)setActiveShaderProgram:(GLProgram *)shaderProgram; 27 | - (void)setContextShaderProgram:(GLProgram *)shaderProgram; 28 | + (GLint)maximumTextureSizeForThisDevice; 29 | + (GLint)maximumTextureUnitsForThisDevice; 30 | + (GLint)maximumVaryingVectorsForThisDevice; 31 | + (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; 32 | + (BOOL)deviceSupportsRedTextures; 33 | + (BOOL)deviceSupportsFramebufferReads; 34 | + (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; 35 | 36 | - (void)presentBufferForDisplay; 37 | - (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; 38 | 39 | - (void)useSharegroup:(EAGLSharegroup *)sharegroup; 40 | 41 | // Manage fast texture upload 42 | + (BOOL)supportsFastTextureUpload; 43 | 44 | @end 45 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageContext.m: -------------------------------------------------------------------------------- 1 | #import "GPUImageContext.h" 2 | #import 3 | #import 4 | 5 | void runSynchronouslyOnVideoProcessingQueue(void (^block)(void)) 6 | { 7 | dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; 8 | #if !OS_OBJECT_USE_OBJC 9 | #pragma clang diagnostic push 10 | #pragma clang diagnostic ignored "-Wdeprecated-declarations" 11 | if (dispatch_get_current_queue() == videoProcessingQueue) 12 | #pragma clang diagnostic pop 13 | #else 14 | if (dispatch_get_specific([GPUImageContext contextKey])) 15 | #endif 16 | { 17 | block(); 18 | }else 19 | { 20 | dispatch_sync(videoProcessingQueue, block); 21 | } 22 | } 23 | 24 | void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void)) 25 | { 26 | dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; 27 | 28 | #if !OS_OBJECT_USE_OBJC 29 | #pragma clang diagnostic push 30 | #pragma clang diagnostic ignored "-Wdeprecated-declarations" 31 | if (dispatch_get_current_queue() == videoProcessingQueue) 32 | #pragma clang diagnostic pop 33 | #else 34 | if (dispatch_get_specific([GPUImageContext contextKey])) 35 | #endif 36 | { 37 | block(); 38 | }else 39 | { 40 | dispatch_async(videoProcessingQueue, block); 41 | } 42 | } 43 | 44 | #define MAXSHADERPROGRAMSALLOWEDINCACHE 40 45 | 46 | @interface GPUImageContext() 47 | { 48 | NSMutableDictionary *shaderProgramCache; 49 | NSMutableArray *shaderProgramUsageHistory; 50 | EAGLSharegroup *_sharegroup; 51 | } 52 | 53 | @end 54 | 55 | @implementation GPUImageContext 56 | 57 | @synthesize context = _context; 58 | @synthesize currentShaderProgram = _currentShaderProgram; 59 | @synthesize contextQueue = _contextQueue; 60 | @synthesize coreVideoTextureCache = _coreVideoTextureCache; 61 | @synthesize framebufferCache = _framebufferCache; 62 | 63 | static void *openGLESContextQueueKey; 64 | 65 | - (id)init; 66 | { 67 | if (!(self = [super init])) 68 | { 69 | return nil; 70 | } 71 | 72 | openGLESContextQueueKey = &openGLESContextQueueKey; 73 | _contextQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.openGLESContextQueue", NULL); 74 | 75 | #if OS_OBJECT_USE_OBJC 76 | dispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL); 77 | #endif 78 | shaderProgramCache = [[NSMutableDictionary alloc] init]; 79 | shaderProgramUsageHistory = [[NSMutableArray alloc] init]; 80 | 81 | return self; 82 | } 83 | 84 | + (void *)contextKey { 85 | return openGLESContextQueueKey; 86 | } 87 | 88 | // Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html 89 | + (GPUImageContext *)sharedImageProcessingContext; 90 | { 91 | static dispatch_once_t pred; 92 | static GPUImageContext *sharedImageProcessingContext = nil; 93 | 94 | dispatch_once(&pred, ^{ 95 | sharedImageProcessingContext = [[[self class] alloc] init]; 96 | }); 97 | return sharedImageProcessingContext; 98 | } 99 | 100 | + (dispatch_queue_t)sharedContextQueue; 101 | { 102 | return [[self sharedImageProcessingContext] contextQueue]; 103 | } 104 | 105 | + (GPUImageFramebufferCache *)sharedFramebufferCache; 106 | { 107 | return [[self sharedImageProcessingContext] framebufferCache]; 108 | } 109 | 110 | + (void)useImageProcessingContext; 111 | { 112 | [[GPUImageContext sharedImageProcessingContext] useAsCurrentContext]; 113 | } 114 | 115 | - (void)useAsCurrentContext; 116 | { 117 | EAGLContext *imageProcessingContext = [self context]; 118 | if ([EAGLContext currentContext] != imageProcessingContext) 119 | { 120 | [EAGLContext setCurrentContext:imageProcessingContext]; 121 | } 122 | } 123 | 124 | + (void)setActiveShaderProgram:(GLProgram *)shaderProgram; 125 | { 126 | GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext]; 127 | [sharedContext setContextShaderProgram:shaderProgram]; 128 | } 129 | 130 | - (void)setContextShaderProgram:(GLProgram *)shaderProgram; 131 | { 132 | EAGLContext *imageProcessingContext = [self context]; 133 | if ([EAGLContext currentContext] != imageProcessingContext) 134 | { 135 | [EAGLContext setCurrentContext:imageProcessingContext]; 136 | } 137 | 138 | if (self.currentShaderProgram != shaderProgram) 139 | { 140 | self.currentShaderProgram = shaderProgram; 141 | [shaderProgram use]; 142 | } 143 | } 144 | 145 | + (GLint)maximumTextureSizeForThisDevice; 146 | { 147 | static dispatch_once_t pred; 148 | static GLint maxTextureSize = 0; 149 | 150 | dispatch_once(&pred, ^{ 151 | [self useImageProcessingContext]; 152 | glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize); 153 | }); 154 | 155 | return maxTextureSize; 156 | } 157 | 158 | + (GLint)maximumTextureUnitsForThisDevice; 159 | { 160 | static dispatch_once_t pred; 161 | static GLint maxTextureUnits = 0; 162 | 163 | dispatch_once(&pred, ^{ 164 | [self useImageProcessingContext]; 165 | glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits); 166 | }); 167 | 168 | return maxTextureUnits; 169 | } 170 | 171 | + (GLint)maximumVaryingVectorsForThisDevice; 172 | { 173 | static dispatch_once_t pred; 174 | static GLint maxVaryingVectors = 0; 175 | 176 | dispatch_once(&pred, ^{ 177 | [self useImageProcessingContext]; 178 | glGetIntegerv(GL_MAX_VARYING_VECTORS, &maxVaryingVectors); 179 | }); 180 | 181 | return maxVaryingVectors; 182 | } 183 | 184 | + (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; 185 | { 186 | static dispatch_once_t pred; 187 | static NSArray *extensionNames = nil; 188 | 189 | // Cache extensions for later quick reference, since this won't change for a given device 190 | dispatch_once(&pred, ^{ 191 | [GPUImageContext useImageProcessingContext]; 192 | NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding]; 193 | extensionNames = [extensionsString componentsSeparatedByString:@" "]; 194 | }); 195 | 196 | return [extensionNames containsObject:extension]; 197 | } 198 | 199 | 200 | // http://www.khronos.org/registry/gles/extensions/EXT/EXT_texture_rg.txt 201 | 202 | + (BOOL)deviceSupportsRedTextures; 203 | { 204 | static dispatch_once_t pred; 205 | static BOOL supportsRedTextures = NO; 206 | 207 | dispatch_once(&pred, ^{ 208 | supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_texture_rg"]; 209 | }); 210 | 211 | return supportsRedTextures; 212 | } 213 | 214 | + (BOOL)deviceSupportsFramebufferReads; 215 | { 216 | static dispatch_once_t pred; 217 | static BOOL supportsFramebufferReads = NO; 218 | 219 | dispatch_once(&pred, ^{ 220 | supportsFramebufferReads = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_shader_framebuffer_fetch"]; 221 | }); 222 | 223 | return supportsFramebufferReads; 224 | } 225 | 226 | + (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; 227 | { 228 | GLint maxTextureSize = [self maximumTextureSizeForThisDevice]; 229 | if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) ) 230 | { 231 | return inputSize; 232 | } 233 | 234 | CGSize adjustedSize; 235 | if (inputSize.width > inputSize.height) 236 | { 237 | adjustedSize.width = (CGFloat)maxTextureSize; 238 | adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height; 239 | } 240 | else 241 | { 242 | adjustedSize.height = (CGFloat)maxTextureSize; 243 | adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width; 244 | } 245 | 246 | return adjustedSize; 247 | } 248 | 249 | - (void)presentBufferForDisplay; 250 | { 251 | [self.context presentRenderbuffer:GL_RENDERBUFFER]; 252 | } 253 | 254 | - (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; 255 | { 256 | NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@"V: %@ - F: %@", vertexShaderString, fragmentShaderString]; 257 | GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram]; 258 | 259 | if (programFromCache == nil) 260 | { 261 | programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]; 262 | [shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram]; 263 | // [shaderProgramUsageHistory addObject:lookupKeyForShaderProgram]; 264 | // if ([shaderProgramUsageHistory count] >= MAXSHADERPROGRAMSALLOWEDINCACHE) 265 | // { 266 | // for (NSUInteger currentShaderProgramRemovedFromCache = 0; currentShaderProgramRemovedFromCache < 10; currentShaderProgramRemovedFromCache++) 267 | // { 268 | // NSString *shaderProgramToRemoveFromCache = [shaderProgramUsageHistory objectAtIndex:0]; 269 | // [shaderProgramUsageHistory removeObjectAtIndex:0]; 270 | // [shaderProgramCache removeObjectForKey:shaderProgramToRemoveFromCache]; 271 | // } 272 | // } 273 | } 274 | 275 | return programFromCache; 276 | } 277 | 278 | - (void)useSharegroup:(EAGLSharegroup *)sharegroup; 279 | { 280 | NSAssert(_context == nil, @"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time."); 281 | 282 | _sharegroup = sharegroup; 283 | } 284 | 285 | - (EAGLContext *)createContext; 286 | { 287 | EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:_sharegroup]; 288 | NSAssert(context != nil, @"Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work."); 289 | return context; 290 | } 291 | 292 | 293 | #pragma mark - 294 | #pragma mark Manage fast texture upload 295 | 296 | + (BOOL)supportsFastTextureUpload; 297 | { 298 | #if TARGET_IPHONE_SIMULATOR 299 | return NO; 300 | #else 301 | 302 | #pragma clang diagnostic push 303 | #pragma clang diagnostic ignored "-Wtautological-pointer-compare" 304 | return (CVOpenGLESTextureCacheCreate != NULL); 305 | #pragma clang diagnostic pop 306 | 307 | #endif 308 | } 309 | 310 | #pragma mark - 311 | #pragma mark Accessors 312 | 313 | - (EAGLContext *)context; 314 | { 315 | if (_context == nil) 316 | { 317 | _context = [self createContext]; 318 | [EAGLContext setCurrentContext:_context]; 319 | 320 | // Set up a few global settings for the image processing pipeline 321 | glDisable(GL_DEPTH_TEST); 322 | } 323 | 324 | return _context; 325 | } 326 | 327 | - (CVOpenGLESTextureCacheRef)coreVideoTextureCache; 328 | { 329 | if (_coreVideoTextureCache == NULL) 330 | { 331 | #if defined(__IPHONE_6_0) 332 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [self context], NULL, &_coreVideoTextureCache); 333 | #else 334 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[self context], NULL, &_coreVideoTextureCache); 335 | #endif 336 | 337 | if (err) 338 | { 339 | NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); 340 | } 341 | 342 | } 343 | 344 | return _coreVideoTextureCache; 345 | } 346 | 347 | - (GPUImageFramebufferCache *)framebufferCache; 348 | { 349 | if (_framebufferCache == nil) 350 | { 351 | _framebufferCache = [[GPUImageFramebufferCache alloc] init]; 352 | } 353 | 354 | return _framebufferCache; 355 | } 356 | 357 | @end 358 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageFramebuffer.h: -------------------------------------------------------------------------------- 1 | #import 2 | 3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 4 | #import 5 | #import 6 | #import 7 | #else 8 | #import 9 | #import 10 | #endif 11 | 12 | #import 13 | #import 14 | 15 | 16 | typedef struct GPUTextureOptions { 17 | GLenum minFilter; 18 | GLenum magFilter; 19 | GLenum wrapS; 20 | GLenum wrapT; 21 | GLenum internalFormat; 22 | GLenum format; 23 | GLenum type; 24 | } GPUTextureOptions; 25 | 26 | @interface GPUImageFramebuffer : NSObject 27 | 28 | @property(readonly) CGSize size; 29 | @property(readonly) GPUTextureOptions textureOptions; 30 | @property(readonly) GLuint texture; 31 | @property(readonly) BOOL missingFramebuffer; 32 | 33 | // Initialization and teardown 34 | - (id)initWithSize:(CGSize)framebufferSize; 35 | - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture; 36 | - (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture; 37 | 38 | // Usage 39 | - (void)activateFramebuffer; 40 | 41 | // Reference counting 42 | - (void)lock; 43 | - (void)unlock; 44 | - (void)clearAllLocks; 45 | - (void)disableReferenceCounting; 46 | - (void)enableReferenceCounting; 47 | 48 | // Image capture 49 | - (CGImageRef)newCGImageFromFramebufferContents; 50 | - (void)restoreRenderTarget; 51 | 52 | // Raw data bytes 53 | - (void)lockForReading; 54 | - (void)unlockAfterReading; 55 | - (NSUInteger)bytesPerRow; 56 | - (GLubyte *)byteBuffer; 57 | 58 | @end 59 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageFramebuffer.m: -------------------------------------------------------------------------------- 1 | #import "GPUImageFramebuffer.h" 2 | #import "GPUImageContext.h" 3 | 4 | @interface GPUImageFramebuffer() 5 | { 6 | GLuint framebuffer; 7 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 8 | CVPixelBufferRef renderTarget; 9 | CVOpenGLESTextureRef renderTexture; 10 | NSUInteger readLockCount; 11 | #else 12 | #endif 13 | NSUInteger framebufferReferenceCount; 14 | BOOL referenceCountingDisabled; 15 | } 16 | 17 | - (void)generateFramebuffer; 18 | - (void)generateTexture; 19 | - (void)destroyFramebuffer; 20 | 21 | @end 22 | 23 | void dataProviderReleaseCallback (void *info, const void *data, size_t size); 24 | void dataProviderUnlockCallback (void *info, const void *data, size_t size); 25 | 26 | @implementation GPUImageFramebuffer 27 | 28 | @synthesize size = _size; 29 | @synthesize textureOptions = _textureOptions; 30 | @synthesize texture = _texture; 31 | @synthesize missingFramebuffer = _missingFramebuffer; 32 | 33 | #pragma mark - 34 | #pragma mark Initialization and teardown 35 | 36 | - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture; 37 | { 38 | if (!(self = [super init])) 39 | { 40 | return nil; 41 | } 42 | 43 | _textureOptions = fboTextureOptions; 44 | _size = framebufferSize; 45 | framebufferReferenceCount = 0; 46 | referenceCountingDisabled = NO; 47 | _missingFramebuffer = onlyGenerateTexture; 48 | 49 | if (_missingFramebuffer) 50 | { 51 | runSynchronouslyOnVideoProcessingQueue(^{ 52 | [GPUImageContext useImageProcessingContext]; 53 | [self generateTexture]; 54 | framebuffer = 0; 55 | }); 56 | } 57 | else 58 | { 59 | [self generateFramebuffer]; 60 | } 61 | return self; 62 | } 63 | 64 | - (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture; 65 | { 66 | if (!(self = [super init])) 67 | { 68 | return nil; 69 | } 70 | 71 | GPUTextureOptions defaultTextureOptions; 72 | defaultTextureOptions.minFilter = GL_LINEAR; 73 | defaultTextureOptions.magFilter = GL_LINEAR; 74 | defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE; 75 | defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE; 76 | defaultTextureOptions.internalFormat = GL_RGBA; 77 | defaultTextureOptions.format = GL_BGRA; 78 | defaultTextureOptions.type = GL_UNSIGNED_BYTE; 79 | 80 | _textureOptions = defaultTextureOptions; 81 | _size = framebufferSize; 82 | framebufferReferenceCount = 0; 83 | referenceCountingDisabled = YES; 84 | 85 | _texture = inputTexture; 86 | 87 | return self; 88 | } 89 | 90 | - (id)initWithSize:(CGSize)framebufferSize; 91 | { 92 | GPUTextureOptions defaultTextureOptions; 93 | defaultTextureOptions.minFilter = GL_LINEAR; 94 | defaultTextureOptions.magFilter = GL_LINEAR; 95 | defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE; 96 | defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE; 97 | defaultTextureOptions.internalFormat = GL_RGBA; 98 | defaultTextureOptions.format = GL_BGRA; 99 | defaultTextureOptions.type = GL_UNSIGNED_BYTE; 100 | 101 | if (!(self = [self initWithSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:NO])) 102 | { 103 | return nil; 104 | } 105 | 106 | return self; 107 | } 108 | 109 | - (void)dealloc 110 | { 111 | [self destroyFramebuffer]; 112 | } 113 | 114 | #pragma mark - 115 | #pragma mark Internal 116 | 117 | - (void)generateTexture; 118 | { 119 | glActiveTexture(GL_TEXTURE1); 120 | glGenTextures(1, &_texture); 121 | glBindTexture(GL_TEXTURE_2D, _texture); 122 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, _textureOptions.minFilter); 123 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, _textureOptions.magFilter); 124 | // This is necessary for non-power-of-two textures 125 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS); 126 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT); 127 | 128 | // TODO: Handle mipmaps 129 | } 130 | 131 | - (void)generateFramebuffer; 132 | { 133 | runSynchronouslyOnVideoProcessingQueue(^{ 134 | [GPUImageContext useImageProcessingContext]; 135 | 136 | glGenFramebuffers(1, &framebuffer); 137 | glBindFramebuffer(GL_FRAMEBUFFER, framebuffer); 138 | 139 | // By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache 140 | if ([GPUImageContext supportsFastTextureUpload]) 141 | { 142 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 143 | CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache]; 144 | // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ 145 | 146 | CFDictionaryRef empty; // empty value for attr value. 147 | CFMutableDictionaryRef attrs; 148 | empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary 149 | attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); 150 | CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty); 151 | 152 | CVReturn err = CVPixelBufferCreate(kCFAllocatorDefault, (int)_size.width, (int)_size.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget); 153 | if (err) 154 | { 155 | NSLog(@"FBO size: %f, %f", _size.width, _size.height); 156 | NSAssert(NO, @"Error at CVPixelBufferCreate %d", err); 157 | } 158 | 159 | err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget, 160 | NULL, // texture attributes 161 | GL_TEXTURE_2D, 162 | _textureOptions.internalFormat, // opengl format 163 | (int)_size.width, 164 | (int)_size.height, 165 | _textureOptions.format, // native iOS format 166 | _textureOptions.type, 167 | 0, 168 | &renderTexture); 169 | if (err) 170 | { 171 | NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 172 | } 173 | 174 | CFRelease(attrs); 175 | CFRelease(empty); 176 | 177 | glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); 178 | _texture = CVOpenGLESTextureGetName(renderTexture); 179 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS); 180 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT); 181 | 182 | glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); 183 | #endif 184 | } 185 | else 186 | { 187 | [self generateTexture]; 188 | 189 | glBindTexture(GL_TEXTURE_2D, _texture); 190 | 191 | glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0); 192 | glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _texture, 0); 193 | } 194 | 195 | #ifndef NS_BLOCK_ASSERTIONS 196 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 197 | NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); 198 | #endif 199 | 200 | glBindTexture(GL_TEXTURE_2D, 0); 201 | }); 202 | } 203 | 204 | - (void)destroyFramebuffer; 205 | { 206 | runSynchronouslyOnVideoProcessingQueue(^{ 207 | [GPUImageContext useImageProcessingContext]; 208 | 209 | if (framebuffer) 210 | { 211 | glDeleteFramebuffers(1, &framebuffer); 212 | framebuffer = 0; 213 | } 214 | 215 | 216 | if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer)) 217 | { 218 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 219 | if (renderTarget) 220 | { 221 | CFRelease(renderTarget); 222 | renderTarget = NULL; 223 | } 224 | 225 | if (renderTexture) 226 | { 227 | CFRelease(renderTexture); 228 | renderTexture = NULL; 229 | } 230 | #endif 231 | } 232 | else 233 | { 234 | glDeleteTextures(1, &_texture); 235 | } 236 | 237 | }); 238 | } 239 | 240 | #pragma mark - 241 | #pragma mark Usage 242 | 243 | - (void)activateFramebuffer; 244 | { 245 | glBindFramebuffer(GL_FRAMEBUFFER, framebuffer); 246 | glViewport(0, 0, (int)_size.width, (int)_size.height); 247 | } 248 | 249 | #pragma mark - 250 | #pragma mark Reference counting 251 | 252 | - (void)lock; 253 | { 254 | if (referenceCountingDisabled) 255 | { 256 | return; 257 | } 258 | 259 | framebufferReferenceCount++; 260 | } 261 | 262 | - (void)unlock; 263 | { 264 | if (referenceCountingDisabled) 265 | { 266 | return; 267 | } 268 | 269 | NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?"); 270 | framebufferReferenceCount--; 271 | if (framebufferReferenceCount < 1) 272 | { 273 | [[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self]; 274 | } 275 | } 276 | 277 | - (void)clearAllLocks; 278 | { 279 | framebufferReferenceCount = 0; 280 | } 281 | 282 | - (void)disableReferenceCounting; 283 | { 284 | referenceCountingDisabled = YES; 285 | } 286 | 287 | - (void)enableReferenceCounting; 288 | { 289 | referenceCountingDisabled = NO; 290 | } 291 | 292 | #pragma mark - 293 | #pragma mark Image capture 294 | 295 | void dataProviderReleaseCallback (void *info, const void *data, size_t size) 296 | { 297 | free((void *)data); 298 | } 299 | 300 | void dataProviderUnlockCallback (void *info, const void *data, size_t size) 301 | { 302 | GPUImageFramebuffer *framebuffer = (__bridge_transfer GPUImageFramebuffer*)info; 303 | 304 | [framebuffer restoreRenderTarget]; 305 | [framebuffer unlock]; 306 | [[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer]; 307 | } 308 | 309 | - (CGImageRef)newCGImageFromFramebufferContents; 310 | { 311 | // a CGImage can only be created from a 'normal' color texture 312 | NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA."); 313 | NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE."); 314 | 315 | __block CGImageRef cgImageFromBytes; 316 | 317 | runSynchronouslyOnVideoProcessingQueue(^{ 318 | [GPUImageContext useImageProcessingContext]; 319 | 320 | NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4; 321 | // It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache 322 | 323 | GLubyte *rawImagePixels; 324 | 325 | CGDataProviderRef dataProvider = NULL; 326 | if ([GPUImageContext supportsFastTextureUpload]) 327 | { 328 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 329 | NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0; 330 | NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4; 331 | 332 | glFinish(); 333 | CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation 334 | [self lockForReading]; 335 | rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget); 336 | dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback); 337 | [[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence 338 | #else 339 | #endif 340 | } 341 | else 342 | { 343 | [self activateFramebuffer]; 344 | rawImagePixels = (GLubyte *)malloc(totalBytesForImage); 345 | glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); 346 | dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback); 347 | [self unlock]; // Don't need to keep this around anymore 348 | } 349 | 350 | CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB(); 351 | 352 | if ([GPUImageContext supportsFastTextureUpload]) 353 | { 354 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 355 | cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault); 356 | #else 357 | #endif 358 | } 359 | else 360 | { 361 | cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault); 362 | } 363 | 364 | // Capture image with current device orientation 365 | CGDataProviderRelease(dataProvider); 366 | CGColorSpaceRelease(defaultRGBColorSpace); 367 | 368 | }); 369 | 370 | return cgImageFromBytes; 371 | } 372 | 373 | - (void)restoreRenderTarget; 374 | { 375 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 376 | [self unlockAfterReading]; 377 | CFRelease(renderTarget); 378 | #else 379 | #endif 380 | } 381 | 382 | #pragma mark - 383 | #pragma mark Raw data bytes 384 | 385 | - (void)lockForReading 386 | { 387 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 388 | if ([GPUImageContext supportsFastTextureUpload]) 389 | { 390 | if (readLockCount == 0) 391 | { 392 | CVPixelBufferLockBaseAddress(renderTarget, 0); 393 | } 394 | readLockCount++; 395 | } 396 | #endif 397 | } 398 | 399 | - (void)unlockAfterReading 400 | { 401 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 402 | if ([GPUImageContext supportsFastTextureUpload]) 403 | { 404 | NSAssert(readLockCount > 0, @"Unbalanced call to -[GPUImageFramebuffer unlockAfterReading]"); 405 | readLockCount--; 406 | if (readLockCount == 0) 407 | { 408 | CVPixelBufferUnlockBaseAddress(renderTarget, 0); 409 | } 410 | } 411 | #endif 412 | } 413 | 414 | - (NSUInteger)bytesPerRow; 415 | { 416 | if ([GPUImageContext supportsFastTextureUpload]) 417 | { 418 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 419 | return CVPixelBufferGetBytesPerRow(renderTarget); 420 | #else 421 | return _size.width * 4; // TODO: do more with this on the non-texture-cache side 422 | #endif 423 | } 424 | else 425 | { 426 | return _size.width * 4; 427 | } 428 | } 429 | 430 | - (GLubyte *)byteBuffer; 431 | { 432 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 433 | [self lockForReading]; 434 | GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget); 435 | [self unlockAfterReading]; 436 | return bufferBytes; 437 | #else 438 | return NULL; // TODO: do more with this on the non-texture-cache side 439 | #endif 440 | } 441 | 442 | - (GLuint)texture; 443 | { 444 | // NSLog(@"Accessing texture: %d from FB: %@", _texture, self); 445 | return _texture; 446 | } 447 | 448 | @end 449 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageFramebufferCache.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import "GPUImageFramebuffer.h" 4 | 5 | @interface GPUImageFramebufferCache : NSObject 6 | 7 | // Framebuffer management 8 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture; 9 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture; 10 | - (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer; 11 | - (void)purgeAllUnassignedFramebuffers; 12 | - (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer; 13 | - (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/GPUImage/GPUImageFramebufferCache.m: -------------------------------------------------------------------------------- 1 | #import "GPUImageFramebufferCache.h" 2 | #import "GPUImageContext.h" 3 | 4 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 5 | #import 6 | #else 7 | #endif 8 | 9 | @interface GPUImageFramebufferCache() 10 | { 11 | // NSCache *framebufferCache; 12 | NSMutableDictionary *framebufferCache; 13 | NSMutableDictionary *framebufferTypeCounts; 14 | NSMutableArray *activeImageCaptureList; // Where framebuffers that may be lost by a filter, but which are still needed for a UIImage, etc., are stored 15 | id memoryWarningObserver; 16 | 17 | dispatch_queue_t framebufferCacheQueue; 18 | } 19 | 20 | - (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture; 21 | 22 | @end 23 | 24 | 25 | @implementation GPUImageFramebufferCache 26 | 27 | #pragma mark - 28 | #pragma mark Initialization and teardown 29 | 30 | - (id)init; 31 | { 32 | if (!(self = [super init])) 33 | { 34 | return nil; 35 | } 36 | 37 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 38 | __unsafe_unretained __typeof__ (self) weakSelf = self; 39 | memoryWarningObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidReceiveMemoryWarningNotification object:nil queue:nil usingBlock:^(NSNotification *note) { 40 | __typeof__ (self) strongSelf = weakSelf; 41 | if (strongSelf) { 42 | [strongSelf purgeAllUnassignedFramebuffers]; 43 | } 44 | }]; 45 | #else 46 | #endif 47 | 48 | // framebufferCache = [[NSCache alloc] init]; 49 | framebufferCache = [[NSMutableDictionary alloc] init]; 50 | framebufferTypeCounts = [[NSMutableDictionary alloc] init]; 51 | activeImageCaptureList = [[NSMutableArray alloc] init]; 52 | framebufferCacheQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.framebufferCacheQueue", NULL); 53 | 54 | return self; 55 | } 56 | 57 | - (void)dealloc; 58 | { 59 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 60 | [[NSNotificationCenter defaultCenter] removeObserver:self]; 61 | #else 62 | #endif 63 | } 64 | 65 | #pragma mark - 66 | #pragma mark Framebuffer management 67 | 68 | - (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture; 69 | { 70 | if (onlyTexture) 71 | { 72 | return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d-NOFB", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type]; 73 | } 74 | else 75 | { 76 | return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type]; 77 | } 78 | } 79 | 80 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture; 81 | { 82 | __block GPUImageFramebuffer *framebufferFromCache = nil; 83 | // dispatch_sync(framebufferCacheQueue, ^{ 84 | runSynchronouslyOnVideoProcessingQueue(^{ 85 | NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture]; 86 | NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash]; 87 | NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue]; 88 | 89 | if ([numberOfMatchingTexturesInCache integerValue] < 1) 90 | { 91 | // Nothing in the cache, create a new framebuffer to use 92 | framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture]; 93 | } 94 | else 95 | { 96 | // Something found, pull the old framebuffer and decrement the count 97 | NSInteger currentTextureID = (numberOfMatchingTextures - 1); 98 | while ((framebufferFromCache == nil) && (currentTextureID >= 0)) 99 | { 100 | NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)currentTextureID]; 101 | framebufferFromCache = [framebufferCache objectForKey:textureHash]; 102 | // Test the values in the cache first, to see if they got invalidated behind our back 103 | if (framebufferFromCache != nil) 104 | { 105 | // Withdraw this from the cache while it's in use 106 | [framebufferCache removeObjectForKey:textureHash]; 107 | } 108 | currentTextureID--; 109 | } 110 | 111 | currentTextureID++; 112 | 113 | [framebufferTypeCounts setObject:[NSNumber numberWithInteger:currentTextureID] forKey:lookupHash]; 114 | 115 | if (framebufferFromCache == nil) 116 | { 117 | framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture]; 118 | } 119 | } 120 | }); 121 | 122 | [framebufferFromCache lock]; 123 | return framebufferFromCache; 124 | } 125 | 126 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture; 127 | { 128 | GPUTextureOptions defaultTextureOptions; 129 | defaultTextureOptions.minFilter = GL_LINEAR; 130 | defaultTextureOptions.magFilter = GL_LINEAR; 131 | defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE; 132 | defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE; 133 | defaultTextureOptions.internalFormat = GL_RGBA; 134 | defaultTextureOptions.format = GL_BGRA; 135 | defaultTextureOptions.type = GL_UNSIGNED_BYTE; 136 | 137 | return [self fetchFramebufferForSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:onlyTexture]; 138 | } 139 | 140 | - (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer; 141 | { 142 | [framebuffer clearAllLocks]; 143 | 144 | // dispatch_async(framebufferCacheQueue, ^{ 145 | runAsynchronouslyOnVideoProcessingQueue(^{ 146 | CGSize framebufferSize = framebuffer.size; 147 | GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions; 148 | NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer]; 149 | NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash]; 150 | NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue]; 151 | 152 | NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)numberOfMatchingTextures]; 153 | 154 | // [framebufferCache setObject:framebuffer forKey:textureHash cost:round(framebufferSize.width * framebufferSize.height * 4.0)]; 155 | [framebufferCache setObject:framebuffer forKey:textureHash]; 156 | [framebufferTypeCounts setObject:[NSNumber numberWithInteger:(numberOfMatchingTextures + 1)] forKey:lookupHash]; 157 | }); 158 | } 159 | 160 | - (void)purgeAllUnassignedFramebuffers; 161 | { 162 | runAsynchronouslyOnVideoProcessingQueue(^{ 163 | // dispatch_async(framebufferCacheQueue, ^{ 164 | [framebufferCache removeAllObjects]; 165 | [framebufferTypeCounts removeAllObjects]; 166 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE 167 | CVOpenGLESTextureCacheFlush([[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], 0); 168 | #else 169 | #endif 170 | }); 171 | } 172 | 173 | - (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer; 174 | { 175 | runAsynchronouslyOnVideoProcessingQueue(^{ 176 | // dispatch_async(framebufferCacheQueue, ^{ 177 | [activeImageCaptureList addObject:framebuffer]; 178 | }); 179 | } 180 | 181 | - (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer; 182 | { 183 | runAsynchronouslyOnVideoProcessingQueue(^{ 184 | // dispatch_async(framebufferCacheQueue, ^{ 185 | [activeImageCaptureList removeObject:framebuffer]; 186 | }); 187 | } 188 | 189 | @end 190 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKChatMessagePlayerView.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKChatMessagePlayerView.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | @interface PKChatMessagePlayerView : UIView 14 | 15 | @property (readonly, nonatomic) CGSize sizeInPixels; 16 | 17 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage; 18 | 19 | - (void)play; 20 | - (void)stop; 21 | 22 | @end 23 | 24 | NS_ASSUME_NONNULL_END 25 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKChatMessagePlayerView.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKChatMessagePlayerView.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKChatMessagePlayerView.h" 10 | #import "PKVideoDecoder.h" 11 | #import "GPUImageContext.h" 12 | #import "PKColorConversion.h" 13 | 14 | @interface PKChatMessagePlayerView () { 15 | GLuint displayRenderbuffer, displayFramebuffer; 16 | GLint displayPositionAttribute, displayTextureCoordinateAttribute; 17 | GLint displayInputTextureUniform; 18 | GLfloat imageVertices[8]; 19 | GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha; 20 | } 21 | 22 | @property (nonatomic, strong) GLProgram *displayProgram; 23 | @property (nonatomic, strong) GPUImageFramebuffer *inputFramebufferForDisplay; 24 | 25 | @property (nonatomic, assign) CGSize inputImageSize; 26 | @property (nonatomic, assign) CGSize boundsSizeAtFrameBufferEpoch; 27 | 28 | @property (nonatomic, strong) NSString *videoPath; 29 | @property (nonatomic, strong) UIImage *previewImage; 30 | @property (nonatomic, assign) GPUImageRotationMode rotationMode; 31 | 32 | @property (nonatomic, readwrite) CGSize sizeInPixels; 33 | 34 | @property (nonatomic, strong) PKVideoDecoder *decoder; 35 | 36 | @end 37 | 38 | @implementation PKChatMessagePlayerView 39 | 40 | #pragma mark - Initialization 41 | 42 | + (Class)layerClass { 43 | return [CAEAGLLayer class]; 44 | } 45 | 46 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage { 47 | NSParameterAssert(videoPath != nil); 48 | NSParameterAssert(previewImage != nil); 49 | 50 | self = [super initWithFrame:frame]; 51 | if (self) { 52 | _videoPath = videoPath; 53 | _previewImage = previewImage; 54 | 55 | [self commonInit]; 56 | 57 | _decoder = [[PKVideoDecoder alloc] initWithVideoPath:videoPath size:frame.size]; 58 | _decoder.delegate = self; 59 | } 60 | return self; 61 | } 62 | 63 | - (void)commonInit { 64 | // Set scaling to account for Retina display 65 | if ([self respondsToSelector:@selector(setContentScaleFactor:)]) { 66 | self.contentScaleFactor = [[UIScreen mainScreen] scale]; 67 | } 68 | 69 | AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:self.videoPath] options:nil]; 70 | NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 71 | 72 | if([tracks count] > 0) { 73 | AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; 74 | CGAffineTransform t = videoTrack.preferredTransform; 75 | 76 | if (t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) { 77 | // Portrait 78 | self.rotationMode = kGPUImageRotateRight; 79 | } else if (t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) { 80 | // PortraitUpsideDown 81 | self.rotationMode = kGPUImageRotateLeft; 82 | } else if (t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) { 83 | // LandscapeRight 84 | self.rotationMode = kGPUImageNoRotation; 85 | } else if (t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) { 86 | // LandscapeLeft 87 | self.rotationMode = kGPUImageRotate180; 88 | } 89 | } 90 | 91 | self.opaque = YES; 92 | self.hidden = NO; 93 | CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; 94 | eaglLayer.opaque = YES; 95 | eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; 96 | 97 | runSynchronouslyOnVideoProcessingQueue(^{ 98 | [GPUImageContext useImageProcessingContext]; 99 | 100 | self.displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; 101 | if (!self.displayProgram.initialized) { 102 | [self.displayProgram addAttribute:@"position"]; 103 | [self.displayProgram addAttribute:@"inputTextureCoordinate"]; 104 | 105 | if (![self.displayProgram link]) { 106 | NSString *progLog = [self.displayProgram programLog]; 107 | NSLog(@"Program link log: %@", progLog); 108 | NSString *fragLog = [self.displayProgram fragmentShaderLog]; 109 | NSLog(@"Fragment shader compile log: %@", fragLog); 110 | NSString *vertLog = [self.displayProgram vertexShaderLog]; 111 | NSLog(@"Vertex shader compile log: %@", vertLog); 112 | self.displayProgram = nil; 113 | NSAssert(NO, @"Filter shader link failed"); 114 | } 115 | } 116 | 117 | displayPositionAttribute = [self.displayProgram attributeIndex:@"position"]; 118 | displayTextureCoordinateAttribute = [self.displayProgram attributeIndex:@"inputTextureCoordinate"]; 119 | displayInputTextureUniform = [self.displayProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputTexture" for the fragment shader 120 | 121 | [GPUImageContext setActiveShaderProgram:self.displayProgram]; 122 | glEnableVertexAttribArray(displayPositionAttribute); 123 | glEnableVertexAttribArray(displayTextureCoordinateAttribute); 124 | 125 | [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0]; 126 | 127 | [self createDisplayFramebuffer]; 128 | }); 129 | } 130 | 131 | - (void)layoutSubviews { 132 | [super layoutSubviews]; 133 | 134 | // The frame buffer needs to be trashed and re-created when the view size changes. 135 | if (!CGSizeEqualToSize(self.bounds.size, self.boundsSizeAtFrameBufferEpoch) && 136 | !CGSizeEqualToSize(self.bounds.size, CGSizeZero)) { 137 | runSynchronouslyOnVideoProcessingQueue(^{ 138 | [self destroyDisplayFramebuffer]; 139 | [self createDisplayFramebuffer]; 140 | }); 141 | } else if (!CGSizeEqualToSize(self.bounds.size, CGSizeZero)) { 142 | [self recalculateViewGeometry]; 143 | } 144 | } 145 | 146 | - (void)dealloc { 147 | runSynchronouslyOnVideoProcessingQueue(^{ 148 | [self destroyDisplayFramebuffer]; 149 | }); 150 | [_decoder cancelProcessing]; 151 | } 152 | 153 | #pragma mark - Public 154 | 155 | - (void)play { 156 | [self.decoder startProcessing]; 157 | } 158 | 159 | - (void)stop { 160 | [self.decoder cancelProcessing]; 161 | } 162 | 163 | #pragma mark Managing the display FBOs 164 | 165 | - (void)createDisplayFramebuffer { 166 | [GPUImageContext useImageProcessingContext]; 167 | 168 | glGenFramebuffers(1, &displayFramebuffer); 169 | glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer); 170 | 171 | glGenRenderbuffers(1, &displayRenderbuffer); 172 | glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer); 173 | 174 | [[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer]; 175 | 176 | GLint backingWidth, backingHeight; 177 | 178 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth); 179 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight); 180 | 181 | if ( (backingWidth == 0) || (backingHeight == 0) ) { 182 | [self destroyDisplayFramebuffer]; 183 | return; 184 | } 185 | 186 | _sizeInPixels.width = (CGFloat)backingWidth; 187 | _sizeInPixels.height = (CGFloat)backingHeight; 188 | 189 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer); 190 | 191 | __unused GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER); 192 | NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.bounds.size.width, self.bounds.size.height); 193 | self.boundsSizeAtFrameBufferEpoch = self.bounds.size; 194 | 195 | [self recalculateViewGeometry]; 196 | } 197 | 198 | - (void)destroyDisplayFramebuffer { 199 | [GPUImageContext useImageProcessingContext]; 200 | 201 | if (displayFramebuffer) { 202 | glDeleteFramebuffers(1, &displayFramebuffer); 203 | displayFramebuffer = 0; 204 | } 205 | 206 | if (displayRenderbuffer) { 207 | glDeleteRenderbuffers(1, &displayRenderbuffer); 208 | displayRenderbuffer = 0; 209 | } 210 | } 211 | 212 | - (void)setDisplayFramebuffer { 213 | if (!displayFramebuffer) { 214 | [self createDisplayFramebuffer]; 215 | } 216 | 217 | glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer); 218 | 219 | glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height); 220 | } 221 | 222 | - (void)presentFramebuffer { 223 | glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer); 224 | [[GPUImageContext sharedImageProcessingContext] presentBufferForDisplay]; 225 | } 226 | 227 | #pragma mark Handling fill mode 228 | 229 | - (void)recalculateViewGeometry { 230 | runSynchronouslyOnVideoProcessingQueue(^{ 231 | CGFloat heightScaling, widthScaling; 232 | CGSize currentViewSize = self.bounds.size; 233 | CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(self.inputImageSize, self.bounds); 234 | 235 | widthScaling = insetRect.size.width / currentViewSize.width; 236 | heightScaling = insetRect.size.height / currentViewSize.height; 237 | 238 | imageVertices[0] = -widthScaling; 239 | imageVertices[1] = -heightScaling; 240 | imageVertices[2] = widthScaling; 241 | imageVertices[3] = -heightScaling; 242 | imageVertices[4] = -widthScaling; 243 | imageVertices[5] = heightScaling; 244 | imageVertices[6] = widthScaling; 245 | imageVertices[7] = heightScaling; 246 | }); 247 | } 248 | 249 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; { 250 | backgroundColorRed = redComponent; 251 | backgroundColorGreen = greenComponent; 252 | backgroundColorBlue = blueComponent; 253 | backgroundColorAlpha = alphaComponent; 254 | } 255 | 256 | + (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode; { 257 | static const GLfloat noRotationTextureCoordinates[] = { 258 | 0.0f, 1.0f, 259 | 1.0f, 1.0f, 260 | 0.0f, 0.0f, 261 | 1.0f, 0.0f, 262 | }; 263 | 264 | static const GLfloat rotateRightTextureCoordinates[] = { 265 | 1.0f, 1.0f, 266 | 1.0f, 0.0f, 267 | 0.0f, 1.0f, 268 | 0.0f, 0.0f, 269 | }; 270 | 271 | static const GLfloat rotateLeftTextureCoordinates[] = { 272 | 0.0f, 0.0f, 273 | 0.0f, 1.0f, 274 | 1.0f, 0.0f, 275 | 1.0f, 1.0f, 276 | }; 277 | 278 | static const GLfloat verticalFlipTextureCoordinates[] = { 279 | 0.0f, 0.0f, 280 | 1.0f, 0.0f, 281 | 0.0f, 1.0f, 282 | 1.0f, 1.0f, 283 | }; 284 | 285 | static const GLfloat horizontalFlipTextureCoordinates[] = { 286 | 1.0f, 1.0f, 287 | 0.0f, 1.0f, 288 | 1.0f, 0.0f, 289 | 0.0f, 0.0f, 290 | }; 291 | 292 | static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = { 293 | 1.0f, 0.0f, 294 | 1.0f, 1.0f, 295 | 0.0f, 0.0f, 296 | 0.0f, 1.0f, 297 | }; 298 | 299 | static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = { 300 | 1.0f, 1.0f, 301 | 1.0f, 0.0f, 302 | 0.0f, 1.0f, 303 | 0.0f, 0.0f, 304 | }; 305 | 306 | static const GLfloat rotate180TextureCoordinates[] = { 307 | 1.0f, 0.0f, 308 | 0.0f, 0.0f, 309 | 1.0f, 1.0f, 310 | 0.0f, 1.0f, 311 | }; 312 | 313 | switch(rotationMode) { 314 | case kGPUImageNoRotation: return noRotationTextureCoordinates; 315 | case kGPUImageRotateLeft: return rotateLeftTextureCoordinates; 316 | case kGPUImageRotateRight: return rotateRightTextureCoordinates; 317 | case kGPUImageFlipVertical: return verticalFlipTextureCoordinates; 318 | case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates; 319 | case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates; 320 | case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates; 321 | case kGPUImageRotate180: return rotate180TextureCoordinates; 322 | } 323 | } 324 | 325 | 326 | 327 | #pragma mark - PKVideoDecoderDelegate 328 | 329 | - (void)didDecodeInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer inputSize:(CGSize)newSize frameTime:(CMTime)frameTime { 330 | self.inputFramebufferForDisplay = newInputFramebuffer; 331 | [self.inputFramebufferForDisplay lock]; 332 | 333 | runSynchronouslyOnVideoProcessingQueue(^{ 334 | CGSize rotatedSize = newSize; 335 | 336 | if (!CGSizeEqualToSize(self.inputImageSize, rotatedSize)) { 337 | self.inputImageSize = rotatedSize; 338 | [self recalculateViewGeometry]; 339 | } 340 | }); 341 | 342 | runSynchronouslyOnVideoProcessingQueue(^{ 343 | [GPUImageContext setActiveShaderProgram:self.displayProgram]; 344 | [self setDisplayFramebuffer]; 345 | 346 | glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); 347 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 348 | 349 | glActiveTexture(GL_TEXTURE4); 350 | glBindTexture(GL_TEXTURE_2D, [self.inputFramebufferForDisplay texture]); 351 | glUniform1i(displayInputTextureUniform, 4); 352 | 353 | glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices); 354 | glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [PKChatMessagePlayerView textureCoordinatesForRotation:self.rotationMode]); 355 | 356 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 357 | 358 | [self presentFramebuffer]; 359 | [self.inputFramebufferForDisplay unlock]; 360 | self.inputFramebufferForDisplay = nil; 361 | }); 362 | } 363 | 364 | -(void)didCompletePlayingMovie { 365 | 366 | } 367 | 368 | #pragma mark - Getter 369 | 370 | - (CGSize)sizeInPixels { 371 | if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero)) { 372 | if ([self respondsToSelector:@selector(setContentScaleFactor:)]) { 373 | CGSize pointSize = self.bounds.size; 374 | return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height); 375 | } 376 | else { 377 | return self.bounds.size; 378 | } 379 | } 380 | else { 381 | return _sizeInPixels; 382 | } 383 | } 384 | 385 | @end 386 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKColorConversion.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKColorConversion.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #ifndef PKColorConversion_h 10 | #define PKColorConversion_h 11 | 12 | #import "GPUImageContext.h" 13 | 14 | extern GLfloat *kColorConversion601; 15 | extern GLfloat *kColorConversion601FullRange; 16 | extern GLfloat *kColorConversion709; 17 | extern NSString *const kGPUImageVertexShaderString; 18 | extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString; 19 | extern NSString *const kGPUImagePassthroughFragmentShaderString; 20 | 21 | #endif /* PKColorConversion_h */ 22 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKColorConversion.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKColorConversion.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKColorConversion.h" 10 | 11 | GLfloat kColorConversion601Default[] = { 12 | 1.164, 1.164, 1.164, 13 | 0.0, -0.392, 2.017, 14 | 1.596, -0.813, 0.0, 15 | }; 16 | 17 | GLfloat kColorConversion709Default[] = { 18 | 1.164, 1.164, 1.164, 19 | 0.0, -0.213, 2.112, 20 | 1.793, -0.533, 0.0, 21 | }; 22 | 23 | GLfloat kColorConversion601FullRangeDefault[] = { 24 | 1.0, 1.0, 1.0, 25 | 0.0, -0.343, 1.765, 26 | 1.4, -0.711, 0.0, 27 | }; 28 | 29 | 30 | GLfloat *kColorConversion601 = kColorConversion601Default; 31 | GLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault; 32 | GLfloat *kColorConversion709 = kColorConversion709Default; 33 | 34 | #define STRINGIZE(x) #x 35 | #define STRINGIZE2(x) STRINGIZE(x) 36 | #define SHADER_STRING(text) @ STRINGIZE2(text) 37 | 38 | NSString *const kGPUImageVertexShaderString = SHADER_STRING 39 | ( 40 | attribute vec4 position; 41 | attribute vec4 inputTextureCoordinate; 42 | 43 | varying vec2 textureCoordinate; 44 | 45 | void main() 46 | { 47 | gl_Position = position; 48 | textureCoordinate = inputTextureCoordinate.xy; 49 | } 50 | ); 51 | 52 | NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING 53 | ( 54 | varying highp vec2 textureCoordinate; 55 | 56 | uniform sampler2D luminanceTexture; 57 | uniform sampler2D chrominanceTexture; 58 | uniform mediump mat3 colorConversionMatrix; 59 | 60 | void main() 61 | { 62 | mediump vec3 yuv; 63 | lowp vec3 rgb; 64 | 65 | yuv.x = texture2D(luminanceTexture, textureCoordinate).r; 66 | yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5); 67 | rgb = colorConversionMatrix * yuv; 68 | 69 | gl_FragColor = vec4(rgb, 1); 70 | } 71 | ); 72 | 73 | NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING 74 | ( 75 | varying highp vec2 textureCoordinate; 76 | 77 | uniform sampler2D inputImageTexture; 78 | 79 | void main() 80 | { 81 | gl_FragColor = texture2D(inputImageTexture, textureCoordinate); 82 | } 83 | ); 84 | 85 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKVideoDecoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKVideoDecoder.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @class GPUImageFramebuffer; 13 | 14 | NS_ASSUME_NONNULL_BEGIN 15 | 16 | @protocol PKVideoDecoderDelegate 17 | 18 | - (void)didCompletePlayingMovie; 19 | 20 | - (void)didDecodeInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer inputSize:(CGSize)newSize frameTime:(CMTime)frameTime; 21 | 22 | @end 23 | 24 | @interface PKVideoDecoder : NSObject 25 | 26 | @property (nonatomic, strong) AVAsset *asset; 27 | @property (nonatomic, strong) NSString *videoPath; 28 | @property (nonatomic, strong, readonly) AVAssetReader *assetReader; 29 | @property (nonatomic, assign, readonly) CGFloat progress; 30 | 31 | @property (nonatomic, assign) BOOL keepLooping; 32 | 33 | @property (nonatomic, weak) id delegate; 34 | 35 | - (instancetype)initWithVideoPath:(NSString *)videoPath size:(CGSize)size; 36 | 37 | - (void)startProcessing; 38 | - (void)endProcessing; 39 | - (void)cancelProcessing; 40 | 41 | @end 42 | 43 | NS_ASSUME_NONNULL_END 44 | 45 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVAssetReader+OpenGL ES/PKVideoDecoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKVideoDecoder.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/11. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKVideoDecoder.h" 10 | #import "GLProgram.h" 11 | #import "GPUImageContext.h" 12 | #import "PKColorConversion.h" 13 | 14 | @interface PKVideoDecoder () { 15 | GPUImageFramebuffer *outputFramebuffer; 16 | GLProgram *yuvConversionProgram; 17 | 18 | CMTime previousFrameTime, processingFrameTime; 19 | CFAbsoluteTime previousActualFrameTime; 20 | 21 | GLuint luminanceTexture, chrominanceTexture; 22 | GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute; 23 | GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform; 24 | GLint yuvConversionMatrixUniform; 25 | 26 | const GLfloat *_preferredConversion; 27 | 28 | int imageBufferWidth, imageBufferHeight; 29 | } 30 | 31 | @property (nonatomic, assign) CGSize size; 32 | 33 | @property (nonatomic, strong, readwrite) AVAssetReader *reader; 34 | 35 | @end 36 | 37 | @implementation PKVideoDecoder 38 | 39 | 40 | 41 | #pragma mark - Initialization 42 | 43 | - (instancetype)initWithVideoPath:(NSString *)videoPath size:(CGSize)size { 44 | self = [super init]; 45 | if (self) { 46 | _videoPath = videoPath; 47 | _size = size; 48 | _asset = nil; 49 | _keepLooping = YES; 50 | [self yuvConversionSetup]; 51 | } 52 | return self; 53 | } 54 | 55 | - (void)yuvConversionSetup { 56 | runSynchronouslyOnVideoProcessingQueue(^{ 57 | [GPUImageContext useImageProcessingContext]; 58 | 59 | _preferredConversion = kColorConversion709; 60 | yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString]; 61 | 62 | if (!yuvConversionProgram.initialized) { 63 | [yuvConversionProgram addAttribute:@"position"]; 64 | [yuvConversionProgram addAttribute:@"inputTextureCoordinate"]; 65 | 66 | if (![yuvConversionProgram link]) { 67 | NSString *progLog = [yuvConversionProgram programLog]; 68 | NSLog(@"Program link log: %@", progLog); 69 | NSString *fragLog = [yuvConversionProgram fragmentShaderLog]; 70 | NSLog(@"Fragment shader compile log: %@", fragLog); 71 | NSString *vertLog = [yuvConversionProgram vertexShaderLog]; 72 | NSLog(@"Vertex shader compile log: %@", vertLog); 73 | yuvConversionProgram = nil; 74 | NSAssert(NO, @"Filter shader link failed"); 75 | } 76 | } 77 | 78 | yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"]; 79 | yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"]; 80 | yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"]; 81 | yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"]; 82 | yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"]; 83 | 84 | [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; 85 | 86 | glEnableVertexAttribArray(yuvConversionPositionAttribute); 87 | glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute); 88 | }); 89 | } 90 | 91 | - (AVAssetReader*)createAssetReader { 92 | NSError *error = nil; 93 | AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error]; 94 | AVAssetTrack *assetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 95 | 96 | CGSize outputSize = CGSizeZero; 97 | if (self.size.width > assetTrack.naturalSize.width) { 98 | outputSize = assetTrack.naturalSize; 99 | } else { 100 | outputSize= self.size; 101 | } 102 | 103 | NSDictionary *outputSettings = @{ 104 | (id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), 105 | (id)kCVPixelBufferWidthKey:@(outputSize.width), 106 | (id)kCVPixelBufferHeightKey:@(outputSize.height), 107 | }; 108 | 109 | AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetTrack outputSettings:outputSettings]; 110 | readerVideoTrackOutput.alwaysCopiesSampleData = NO; 111 | [assetReader addOutput:readerVideoTrackOutput]; 112 | 113 | return assetReader; 114 | } 115 | 116 | - (void)processAsset { 117 | self.reader = [self createAssetReader]; 118 | 119 | AVAssetReaderOutput *readerVideoTrackOutput = nil; 120 | 121 | for( AVAssetReaderOutput *output in self.reader.outputs ) { 122 | if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) { 123 | readerVideoTrackOutput = output; 124 | } 125 | } 126 | 127 | if ([self.reader startReading] == NO) { 128 | NSLog(@"Error reading from file at Path: %@", self.videoPath); 129 | return; 130 | } 131 | 132 | __weak typeof(self)weakSelf = self; 133 | 134 | while (self.reader.status == AVAssetReaderStatusReading) { 135 | [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput]; 136 | } 137 | 138 | if (self.reader.status == AVAssetReaderStatusCompleted) { 139 | 140 | [self.reader cancelReading]; 141 | 142 | if (self.keepLooping) { 143 | self.reader = nil; 144 | dispatch_async(dispatch_get_main_queue(), ^{ 145 | [self startProcessing]; 146 | }); 147 | } else { 148 | [weakSelf endProcessing]; 149 | } 150 | 151 | } 152 | } 153 | 154 | 155 | 156 | #pragma mark - Public 157 | 158 | - (void)startProcessing { 159 | previousFrameTime = kCMTimeZero; 160 | previousActualFrameTime = CFAbsoluteTimeGetCurrent(); 161 | 162 | if (self.asset) { 163 | dispatch_async(dispatch_get_global_queue(0, 0), ^{ 164 | [self processAsset]; 165 | }); 166 | return; 167 | } 168 | 169 | NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; 170 | AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:self.videoPath] options:inputOptions]; 171 | 172 | PKVideoDecoder __block *blockSelf = self; 173 | 174 | [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{ 175 | NSError *error = nil; 176 | AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error]; 177 | if (tracksStatus != AVKeyValueStatusLoaded) { 178 | return; 179 | } 180 | blockSelf.asset = inputAsset; 181 | [blockSelf processAsset]; 182 | blockSelf = nil; 183 | }]; 184 | } 185 | 186 | - (void)endProcessing { 187 | if (self.delegate && [self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) { 188 | [self.delegate didCompletePlayingMovie]; 189 | } 190 | } 191 | 192 | - (void)cancelProcessing { 193 | if (self.reader) { 194 | [self.reader cancelReading]; 195 | } 196 | [self endProcessing]; 197 | } 198 | 199 | - (void)convertYUVToRGBOutput { 200 | [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; 201 | outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO]; 202 | [outputFramebuffer activateFramebuffer]; 203 | 204 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 205 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 206 | 207 | static const GLfloat squareVertices[] = { 208 | -1.0f, -1.0f, 209 | 1.0f, -1.0f, 210 | -1.0f, 1.0f, 211 | 1.0f, 1.0f, 212 | }; 213 | 214 | static const GLfloat textureCoordinates[] = { 215 | 0.0f, 0.0f, 216 | 1.0f, 0.0f, 217 | 0.0f, 1.0f, 218 | 1.0f, 1.0f, 219 | }; 220 | 221 | glActiveTexture(GL_TEXTURE4); 222 | glBindTexture(GL_TEXTURE_2D, luminanceTexture); 223 | glUniform1i(yuvConversionLuminanceTextureUniform, 4); 224 | 225 | glActiveTexture(GL_TEXTURE5); 226 | glBindTexture(GL_TEXTURE_2D, chrominanceTexture); 227 | glUniform1i(yuvConversionChrominanceTextureUniform, 5); 228 | 229 | glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion); 230 | 231 | glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); 232 | glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); 233 | 234 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 235 | } 236 | 237 | 238 | 239 | #pragma mark - Pravite 240 | 241 | - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput { 242 | if (self.reader.status == AVAssetReaderStatusReading) { 243 | CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer]; 244 | if (sampleBufferRef) { 245 | //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef)))); 246 | // Do this outside of the video processing queue to not slow that down while waiting 247 | CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef); 248 | CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime); 249 | CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent(); 250 | 251 | CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame); 252 | CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime; 253 | 254 | if (frameTimeDifference > actualTimeDifference) 255 | { 256 | usleep(1000000.0 * (frameTimeDifference - actualTimeDifference)); 257 | } 258 | 259 | previousFrameTime = currentSampleTime; 260 | previousActualFrameTime = CFAbsoluteTimeGetCurrent(); 261 | 262 | __weak typeof(self)weakSelf = self; 263 | runSynchronouslyOnVideoProcessingQueue(^{ 264 | [weakSelf processMovieFrame:sampleBufferRef]; 265 | CMSampleBufferInvalidate(sampleBufferRef); 266 | CFRelease(sampleBufferRef); 267 | }); 268 | 269 | return YES; 270 | } 271 | else { 272 | if (!self.keepLooping) { 273 | [self endProcessing]; 274 | } 275 | } 276 | } 277 | return NO; 278 | } 279 | 280 | - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer { 281 | CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer); 282 | CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer); 283 | 284 | processingFrameTime = currentSampleTime; 285 | [self processMovieFrame:movieFrame withSampleTime:currentSampleTime]; 286 | } 287 | 288 | - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime { 289 | int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame); 290 | int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame); 291 | 292 | CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL); 293 | if (colorAttachments != NULL) { 294 | if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) { 295 | _preferredConversion = kColorConversion601FullRange; 296 | } 297 | else { 298 | _preferredConversion = kColorConversion709; 299 | } 300 | } 301 | else { 302 | _preferredConversion = kColorConversion601FullRange; 303 | } 304 | 305 | [GPUImageContext useImageProcessingContext]; 306 | 307 | CVOpenGLESTextureRef luminanceTextureRef = NULL; 308 | CVOpenGLESTextureRef chrominanceTextureRef = NULL; 309 | 310 | if (CVPixelBufferGetPlaneCount(movieFrame) > 0) {// Check for YUV planar inputs to do RGB conversion 311 | if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) ) { 312 | imageBufferWidth = bufferWidth; 313 | imageBufferHeight = bufferHeight; 314 | } 315 | 316 | CVReturn err; 317 | // Y-plane 318 | glActiveTexture(GL_TEXTURE4); 319 | if ([GPUImageContext deviceSupportsRedTextures]) { 320 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); 321 | } 322 | else { 323 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); 324 | } 325 | if (err) { 326 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 327 | } 328 | 329 | luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef); 330 | 331 | glBindTexture(GL_TEXTURE_2D, luminanceTexture); 332 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 333 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 334 | 335 | // UV-plane 336 | glActiveTexture(GL_TEXTURE5); 337 | if ([GPUImageContext deviceSupportsRedTextures]) { 338 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); 339 | } 340 | else { 341 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); 342 | } 343 | if (err) { 344 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 345 | } 346 | 347 | chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef); 348 | 349 | glBindTexture(GL_TEXTURE_2D, chrominanceTexture); 350 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 351 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 352 | 353 | [self convertYUVToRGBOutput]; 354 | 355 | if (self.delegate && [self.delegate respondsToSelector:@selector(didDecodeInputFramebuffer:inputSize:frameTime:)]) { 356 | [self.delegate didDecodeInputFramebuffer:outputFramebuffer inputSize:CGSizeMake(bufferWidth, bufferHeight) frameTime:currentSampleTime]; 357 | } 358 | [outputFramebuffer unlock]; 359 | 360 | CVPixelBufferUnlockBaseAddress(movieFrame, 0); 361 | CFRelease(luminanceTextureRef); 362 | CFRelease(chrominanceTextureRef); 363 | } 364 | } 365 | 366 | 367 | 368 | #pragma mark - Getter 369 | 370 | - (CGFloat)progress { 371 | if ( AVAssetReaderStatusReading == self.reader.status ) { 372 | float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale; 373 | float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale; 374 | return current / duration; 375 | } 376 | else if ( AVAssetReaderStatusCompleted == self.reader.status ) { 377 | return 1.f; 378 | } 379 | else { 380 | return 0.f; 381 | } 382 | } 383 | 384 | - (AVAssetReader*)assetReader { 385 | return self.reader; 386 | } 387 | 388 | 389 | @end 390 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerView.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKFullScreenPlayerView.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | @interface PKFullScreenPlayerView : UIView 14 | 15 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage; 16 | 17 | - (void)play; 18 | 19 | - (void)pause; 20 | 21 | @end 22 | 23 | NS_ASSUME_NONNULL_END 24 | 25 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerView.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKFullScreenPlayerView.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKFullScreenPlayerView.h" 10 | @import AVFoundation; 11 | 12 | @interface PKFullScreenPlayerView () 13 | 14 | @property (nonatomic, strong) NSString *videoPath; 15 | 16 | @property (nonatomic, strong) UIImage *previewImage; 17 | 18 | @property (nonatomic, strong) AVPlayer *player; 19 | @property (strong, nonatomic) AVPlayerLayer *playerLayer; 20 | 21 | @end 22 | 23 | 24 | 25 | @implementation PKFullScreenPlayerView 26 | 27 | #pragma mark - Initialization 28 | 29 | - (instancetype)initWithFrame:(CGRect)frame videoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage { 30 | NSParameterAssert(videoPath != nil); 31 | NSParameterAssert(previewImage != nil); 32 | 33 | self = [super initWithFrame:frame]; 34 | if (self) { 35 | _videoPath = videoPath; 36 | _previewImage = previewImage; 37 | 38 | AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:videoPath]]; 39 | 40 | __weak typeof(self)weakSelf = self; 41 | [asset loadValuesAsynchronouslyForKeys:@[@"playable"] completionHandler:^{ 42 | dispatch_async( dispatch_get_main_queue(), ^{ 43 | [weakSelf prepareToPlayAsset:asset]; 44 | }); 45 | }]; 46 | } 47 | return self; 48 | } 49 | 50 | - (void)dealloc { 51 | [_player pause]; 52 | [_player.currentItem cancelPendingSeeks]; 53 | [_player.currentItem.asset cancelLoading]; 54 | [_player replaceCurrentItemWithPlayerItem:nil]; 55 | _player = nil; 56 | 57 | [[NSNotificationCenter defaultCenter] removeObserver:self]; 58 | } 59 | 60 | 61 | 62 | #pragma mark Prepare to play asset, URL 63 | 64 | - (void)prepareToPlayAsset:(AVAsset *)asset { 65 | NSError *error = nil; 66 | AVKeyValueStatus keyStatus = [asset statusOfValueForKey:@"playable" error:&error]; 67 | if (keyStatus == AVKeyValueStatusFailed) { 68 | [self assetFailedToPrepareForPlayback:error]; 69 | return; 70 | } 71 | 72 | if (!asset.playable) { 73 | NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"Item cannot be played" code:0 userInfo:nil]; 74 | 75 | [self assetFailedToPrepareForPlayback:assetCannotBePlayedError]; 76 | return; 77 | } 78 | 79 | AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:asset]; 80 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; 81 | 82 | self.player = [AVPlayer playerWithPlayerItem:playerItem]; 83 | 84 | self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player]; 85 | self.playerLayer.frame = self.bounds; 86 | self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 87 | [self.layer addSublayer:_playerLayer]; 88 | 89 | [self.player play]; 90 | } 91 | 92 | 93 | 94 | #pragma mark - Error Handle 95 | 96 | - (void)assetFailedToPrepareForPlayback:(NSError *)error { 97 | UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Video cannot be played" message:@"Video cannot be played" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil]; 98 | [alertView show]; 99 | } 100 | 101 | 102 | 103 | #pragma mark - Notification 104 | 105 | - (void)playerItemDidReachEnd:(NSNotification *)notification { 106 | [self.player seekToTime:kCMTimeZero]; 107 | 108 | [self.player play]; 109 | } 110 | 111 | 112 | 113 | #pragma mark - Public 114 | 115 | - (void)play { 116 | if (!self.player) { 117 | return; 118 | } 119 | [self.player play]; 120 | } 121 | 122 | - (void)pause { 123 | if (!self.player) { 124 | return; 125 | } 126 | [self.player pause]; 127 | } 128 | 129 | 130 | @end 131 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // PKFullScreenPlayerViewController.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | @interface PKFullScreenPlayerViewController : UIViewController 14 | 15 | - (instancetype)initWithVideoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage; 16 | 17 | @end 18 | 19 | NS_ASSUME_NONNULL_END 20 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/AVPlayer/PKFullScreenPlayerViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // PKFullScreenPlayerViewController.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "PKFullScreenPlayerViewController.h" 10 | #import "PKFullScreenPlayerView.h" 11 | 12 | @interface PKFullScreenPlayerViewController () 13 | 14 | @property (nonatomic, strong) PKFullScreenPlayerView *playerView; 15 | 16 | @property (nonatomic, strong) NSString *videoPath; 17 | @property (nonatomic, strong) UIImage *image; 18 | 19 | @end 20 | 21 | @implementation PKFullScreenPlayerViewController 22 | 23 | #pragma mark - Initialization 24 | 25 | - (instancetype)initWithVideoPath:(NSString *)videoPath previewImage:(UIImage *)previewImage { 26 | NSParameterAssert(videoPath != nil); 27 | NSParameterAssert(previewImage != nil); 28 | 29 | self = [super init]; 30 | if (self) { 31 | _videoPath = videoPath; 32 | _image = previewImage; 33 | } 34 | return self; 35 | } 36 | 37 | 38 | 39 | #pragma mark - View Lifecycle 40 | 41 | - (void)viewDidLoad { 42 | [super viewDidLoad]; 43 | self.view.backgroundColor = [UIColor blackColor]; 44 | 45 | CGSize viewSize = self.view.bounds.size; 46 | CGSize imageSize = self.image.size; 47 | 48 | self.playerView = [[PKFullScreenPlayerView alloc] initWithFrame:CGRectMake(0, 0, viewSize.width, viewSize.width* (imageSize.height/imageSize.width) ) videoPath:self.videoPath previewImage:self.image]; 49 | self.playerView.center = self.view.center; 50 | 51 | [self.view addSubview:self.playerView]; 52 | 53 | UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapGestureRecognizer:)]; 54 | [self.view addGestureRecognizer:tap]; 55 | } 56 | 57 | - (void)viewWillAppear:(BOOL)animated { 58 | [super viewWillAppear:animated]; 59 | 60 | [self.navigationController setNavigationBarHidden:YES animated:YES]; 61 | } 62 | 63 | - (void)viewWillDisappear:(BOOL)animated { 64 | [super viewWillDisappear:animated]; 65 | 66 | [self.navigationController setNavigationBarHidden:NO animated:YES]; 67 | } 68 | 69 | - (BOOL)prefersStatusBarHidden { 70 | return YES; 71 | } 72 | 73 | - (void)didReceiveMemoryWarning { 74 | [super didReceiveMemoryWarning]; 75 | } 76 | 77 | 78 | 79 | #pragma mark - Tap GestureRecognizer 80 | 81 | - (void)tapGestureRecognizer:(UITapGestureRecognizer *)tap { 82 | [self.playerView pause]; 83 | 84 | CATransition *animation = [CATransition animation]; 85 | [animation setDuration:0.25]; 86 | [animation setType: kCATransitionFade]; 87 | 88 | [animation setSubtype: kCATransitionFromLeft]; 89 | [animation setTimingFunction:[CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear]]; 90 | 91 | [self.navigationController.view.layer addAnimation:animation forKey:nil]; 92 | 93 | if (self.navigationController) { 94 | [self.navigationController popViewControllerAnimated:NO]; 95 | } else { 96 | [self dismissViewControllerAnimated:YES completion:NULL]; 97 | } 98 | } 99 | 100 | @end 101 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/Categories/UIImage+PKShortVideoPlayer.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+PKShortVideoPlayer.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | @class AVAsset; 11 | 12 | NS_ASSUME_NONNULL_BEGIN 13 | 14 | @interface UIImage (PKShortVideoPlayer) 15 | 16 | + (UIImage *)pk_previewImageWithVideoURL:(NSURL *)videoURL; 17 | 18 | @end 19 | 20 | NS_ASSUME_NONNULL_END -------------------------------------------------------------------------------- /DevelopPlayerDemo/PKShortVideoPlayer/Categories/UIImage+PKShortVideoPlayer.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+PKShortVideoPlayer.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "UIImage+PKShortVideoPlayer.h" 10 | @import AVFoundation; 11 | 12 | @implementation UIImage (PKShortVideoPlayer) 13 | 14 | + (UIImage *)pk_previewImageWithVideoURL:(NSURL *)videoURL { 15 | AVAsset *asset = [AVAsset assetWithURL:videoURL]; 16 | 17 | AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset]; 18 | generator.appliesPreferredTrackTransform = YES; 19 | 20 | CGImageRef img = [generator copyCGImageAtTime:CMTimeMake(1, asset.duration.timescale) actualTime:NULL error:nil]; 21 | UIImage *image = [UIImage imageWithCGImage:img]; 22 | 23 | CGImageRelease(img); 24 | return image; 25 | } 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import "PKFullScreenPlayerViewController.h" 11 | #import "UIImage+PKShortVideoPlayer.h" 12 | #import "PKLayerContentsViewController.h" 13 | #import "PKOpenGLESViewController.h" 14 | 15 | @interface ViewController () 16 | 17 | @property (nonatomic, strong) NSString *videoPath; 18 | @property (nonatomic, strong) UIImage *image; 19 | 20 | @end 21 | 22 | @implementation ViewController 23 | 24 | - (void)viewDidLoad { 25 | [super viewDidLoad]; 26 | // Do any additional setup after loading the view, typically from a nib. 27 | self.videoPath = [[NSBundle mainBundle] pathForResource:@"Cat" ofType:@"mp4"]; 28 | 29 | self.image = [UIImage pk_previewImageWithVideoURL:[NSURL fileURLWithPath:self.videoPath]]; 30 | } 31 | 32 | - (void)didReceiveMemoryWarning { 33 | [super didReceiveMemoryWarning]; 34 | // Dispose of any resources that can be recreated. 35 | } 36 | 37 | - (IBAction)goToAVPlayer:(id)sender { 38 | if (!self.image) { 39 | return; 40 | } 41 | 42 | PKFullScreenPlayerViewController *playerViewController = [[PKFullScreenPlayerViewController alloc] initWithVideoPath:self.videoPath previewImage:self.image]; 43 | 44 | CATransition *animation = [CATransition animation]; 45 | [animation setDuration:0.25]; 46 | [animation setType: kCATransitionFade]; 47 | 48 | [animation setSubtype: kCATransitionFromLeft]; 49 | [animation setTimingFunction:[CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear]]; 50 | 51 | [self.navigationController.view.layer addAnimation:animation forKey:nil]; 52 | [self.navigationController pushViewController:playerViewController animated:NO]; 53 | } 54 | 55 | #pragma mark - Segue 56 | 57 | - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { 58 | if ([segue.identifier isEqualToString:@"segueLayerContents"]) { 59 | PKLayerContentsViewController *vc = segue.destinationViewController; 60 | vc.videoPath = self.videoPath; 61 | vc.image = self.image; 62 | } else if ([segue.identifier isEqualToString:@"segueOpenGLES"]) { 63 | PKOpenGLESViewController *vc = segue.destinationViewController; 64 | vc.videoPath = self.videoPath; 65 | vc.image = self.image; 66 | } 67 | } 68 | 69 | @end 70 | -------------------------------------------------------------------------------- /DevelopPlayerDemo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // DevelopPlayerDemo 4 | // 5 | // Created by jiangxincai on 16/1/4. 6 | // Copyright © 2016年 pepsikirk. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DevelopPlayerDemo 2 | DevelopPlayerDemo 3 | 4 | 5 | --------------------------------------------------------------------------------