├── .gitignore
├── README.md
├── README_rtc.md
├── ReplayKit_Live.md
├── UPLiveSDKDemo.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ ├── IDEWorkspaceChecks.plist
│ └── WorkspaceSettings.xcsettings
├── UPLiveSDKDemo
├── AppDelegate.h
├── AppDelegate.m
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── BeautifyFilter.h
├── BeautifyFilter.m
├── Info.plist
├── LiveVC.h
├── LiveVC.m
├── LiveVC.xib
├── PlayerVC.h
├── PlayerVC.m
├── PlayerVC.xib
├── UPLivePlayerDemoViewController.h
├── UPLivePlayerDemoViewController.m
├── UPLivePlayerVC.h
├── UPLivePlayerVC.m
├── UPLivePlayerVC.xib
├── UPLiveSDKDemoHomeVC.h
├── UPLiveSDKDemoHomeVC.m
├── UPLiveSDKDemoHomeVC.xib
├── UPLiveStreamerDemoViewController.h
├── UPLiveStreamerDemoViewController.m
├── UPLiveStreamerLivingVC.h
├── UPLiveStreamerLivingVC.m
├── UPLiveStreamerLivingVC.xib
├── UPLiveStreamerSettingVC.h
├── UPLiveStreamerSettingVC.m
├── UPLiveStreamerSettingVC.xib
├── huzi.png
├── main.m
└── upyun_logo.png
└── UPLiveService
├── GPUImage
├── GLProgram.h
├── GLProgram.m
├── GPUImage.h
├── GPUImage3x3ConvolutionFilter.h
├── GPUImage3x3ConvolutionFilter.m
├── GPUImage3x3TextureSamplingFilter.h
├── GPUImage3x3TextureSamplingFilter.m
├── GPUImageAdaptiveThresholdFilter.h
├── GPUImageAdaptiveThresholdFilter.m
├── GPUImageAddBlendFilter.h
├── GPUImageAddBlendFilter.m
├── GPUImageAlphaBlendFilter.h
├── GPUImageAlphaBlendFilter.m
├── GPUImageAmatorkaFilter.h
├── GPUImageAmatorkaFilter.m
├── GPUImageAverageColor.h
├── GPUImageAverageColor.m
├── GPUImageAverageLuminanceThresholdFilter.h
├── GPUImageAverageLuminanceThresholdFilter.m
├── GPUImageBilateralFilter.h
├── GPUImageBilateralFilter.m
├── GPUImageBoxBlurFilter.h
├── GPUImageBoxBlurFilter.m
├── GPUImageBrightnessFilter.h
├── GPUImageBrightnessFilter.m
├── GPUImageBuffer.h
├── GPUImageBuffer.m
├── GPUImageBulgeDistortionFilter.h
├── GPUImageBulgeDistortionFilter.m
├── GPUImageCGAColorspaceFilter.h
├── GPUImageCGAColorspaceFilter.m
├── GPUImageCannyEdgeDetectionFilter.h
├── GPUImageCannyEdgeDetectionFilter.m
├── GPUImageChromaKeyBlendFilter.h
├── GPUImageChromaKeyBlendFilter.m
├── GPUImageChromaKeyFilter.h
├── GPUImageChromaKeyFilter.m
├── GPUImageClosingFilter.h
├── GPUImageClosingFilter.m
├── GPUImageColorBlendFilter.h
├── GPUImageColorBlendFilter.m
├── GPUImageColorBurnBlendFilter.h
├── GPUImageColorBurnBlendFilter.m
├── GPUImageColorConversion.h
├── GPUImageColorConversion.m
├── GPUImageColorDodgeBlendFilter.h
├── GPUImageColorDodgeBlendFilter.m
├── GPUImageColorInvertFilter.h
├── GPUImageColorInvertFilter.m
├── GPUImageColorLocalBinaryPatternFilter.h
├── GPUImageColorLocalBinaryPatternFilter.m
├── GPUImageColorMatrixFilter.h
├── GPUImageColorMatrixFilter.m
├── GPUImageColorPackingFilter.h
├── GPUImageColorPackingFilter.m
├── GPUImageColourFASTFeatureDetector.h
├── GPUImageColourFASTFeatureDetector.m
├── GPUImageColourFASTSamplingOperation.h
├── GPUImageColourFASTSamplingOperation.m
├── GPUImageContrastFilter.h
├── GPUImageContrastFilter.m
├── GPUImageCropFilter.h
├── GPUImageCropFilter.m
├── GPUImageCrosshairGenerator.h
├── GPUImageCrosshairGenerator.m
├── GPUImageCrosshatchFilter.h
├── GPUImageCrosshatchFilter.m
├── GPUImageDarkenBlendFilter.h
├── GPUImageDarkenBlendFilter.m
├── GPUImageDifferenceBlendFilter.h
├── GPUImageDifferenceBlendFilter.m
├── GPUImageDilationFilter.h
├── GPUImageDilationFilter.m
├── GPUImageDirectionalNonMaximumSuppressionFilter.h
├── GPUImageDirectionalNonMaximumSuppressionFilter.m
├── GPUImageDirectionalSobelEdgeDetectionFilter.h
├── GPUImageDirectionalSobelEdgeDetectionFilter.m
├── GPUImageDissolveBlendFilter.h
├── GPUImageDissolveBlendFilter.m
├── GPUImageDivideBlendFilter.h
├── GPUImageDivideBlendFilter.m
├── GPUImageEmbossFilter.h
├── GPUImageEmbossFilter.m
├── GPUImageErosionFilter.h
├── GPUImageErosionFilter.m
├── GPUImageExclusionBlendFilter.h
├── GPUImageExclusionBlendFilter.m
├── GPUImageExposureFilter.h
├── GPUImageExposureFilter.m
├── GPUImageFASTCornerDetectionFilter.h
├── GPUImageFASTCornerDetectionFilter.m
├── GPUImageFalseColorFilter.h
├── GPUImageFalseColorFilter.m
├── GPUImageFilter.h
├── GPUImageFilter.m
├── GPUImageFilterGroup.h
├── GPUImageFilterGroup.m
├── GPUImageFilterPipeline.h
├── GPUImageFilterPipeline.m
├── GPUImageFourInputFilter.h
├── GPUImageFourInputFilter.m
├── GPUImageFramebuffer.h
├── GPUImageFramebuffer.m
├── GPUImageFramebufferCache.h
├── GPUImageFramebufferCache.m
├── GPUImageGammaFilter.h
├── GPUImageGammaFilter.m
├── GPUImageGaussianBlurFilter.h
├── GPUImageGaussianBlurFilter.m
├── GPUImageGaussianBlurPositionFilter.h
├── GPUImageGaussianBlurPositionFilter.m
├── GPUImageGaussianSelectiveBlurFilter.h
├── GPUImageGaussianSelectiveBlurFilter.m
├── GPUImageGlassSphereFilter.h
├── GPUImageGlassSphereFilter.m
├── GPUImageGrayscaleFilter.h
├── GPUImageGrayscaleFilter.m
├── GPUImageHSBFilter.h
├── GPUImageHSBFilter.m
├── GPUImageHalftoneFilter.h
├── GPUImageHalftoneFilter.m
├── GPUImageHardLightBlendFilter.h
├── GPUImageHardLightBlendFilter.m
├── GPUImageHarrisCornerDetectionFilter.h
├── GPUImageHarrisCornerDetectionFilter.m
├── GPUImageHazeFilter.h
├── GPUImageHazeFilter.m
├── GPUImageHighPassFilter.h
├── GPUImageHighPassFilter.m
├── GPUImageHighlightShadowFilter.h
├── GPUImageHighlightShadowFilter.m
├── GPUImageHighlightShadowTintFilter.h
├── GPUImageHighlightShadowTintFilter.m
├── GPUImageHistogramEqualizationFilter.h
├── GPUImageHistogramEqualizationFilter.m
├── GPUImageHistogramFilter.h
├── GPUImageHistogramFilter.m
├── GPUImageHistogramGenerator.h
├── GPUImageHistogramGenerator.m
├── GPUImageHoughTransformLineDetector.h
├── GPUImageHoughTransformLineDetector.m
├── GPUImageHueBlendFilter.h
├── GPUImageHueBlendFilter.m
├── GPUImageHueFilter.h
├── GPUImageHueFilter.m
├── GPUImageJFAVoronoiFilter.h
├── GPUImageJFAVoronoiFilter.m
├── GPUImageKuwaharaFilter.h
├── GPUImageKuwaharaFilter.m
├── GPUImageKuwaharaRadius3Filter.h
├── GPUImageKuwaharaRadius3Filter.m
├── GPUImageLanczosResamplingFilter.h
├── GPUImageLanczosResamplingFilter.m
├── GPUImageLaplacianFilter.h
├── GPUImageLaplacianFilter.m
├── GPUImageLevelsFilter.h
├── GPUImageLevelsFilter.m
├── GPUImageLightenBlendFilter.h
├── GPUImageLightenBlendFilter.m
├── GPUImageLineGenerator.h
├── GPUImageLineGenerator.m
├── GPUImageLinearBurnBlendFilter.h
├── GPUImageLinearBurnBlendFilter.m
├── GPUImageLocalBinaryPatternFilter.h
├── GPUImageLocalBinaryPatternFilter.m
├── GPUImageLookupFilter.h
├── GPUImageLookupFilter.m
├── GPUImageLowPassFilter.h
├── GPUImageLowPassFilter.m
├── GPUImageLuminanceRangeFilter.h
├── GPUImageLuminanceRangeFilter.m
├── GPUImageLuminanceThresholdFilter.h
├── GPUImageLuminanceThresholdFilter.m
├── GPUImageLuminosity.h
├── GPUImageLuminosity.m
├── GPUImageLuminosityBlendFilter.h
├── GPUImageLuminosityBlendFilter.m
├── GPUImageMaskFilter.h
├── GPUImageMaskFilter.m
├── GPUImageMedianFilter.h
├── GPUImageMedianFilter.m
├── GPUImageMissEtikateFilter.h
├── GPUImageMissEtikateFilter.m
├── GPUImageMonochromeFilter.h
├── GPUImageMonochromeFilter.m
├── GPUImageMosaicFilter.h
├── GPUImageMosaicFilter.m
├── GPUImageMotionBlurFilter.h
├── GPUImageMotionBlurFilter.m
├── GPUImageMotionDetector.h
├── GPUImageMotionDetector.m
├── GPUImageMovie.h
├── GPUImageMovie.m
├── GPUImageMovieComposition.h
├── GPUImageMovieComposition.m
├── GPUImageMultiplyBlendFilter.h
├── GPUImageMultiplyBlendFilter.m
├── GPUImageNobleCornerDetectionFilter.h
├── GPUImageNobleCornerDetectionFilter.m
├── GPUImageNonMaximumSuppressionFilter.h
├── GPUImageNonMaximumSuppressionFilter.m
├── GPUImageNormalBlendFilter.h
├── GPUImageNormalBlendFilter.m
├── GPUImageOpacityFilter.h
├── GPUImageOpacityFilter.m
├── GPUImageOpeningFilter.h
├── GPUImageOpeningFilter.m
├── GPUImageOutput.h
├── GPUImageOutput.m
├── GPUImageOverlayBlendFilter.h
├── GPUImageOverlayBlendFilter.m
├── GPUImageParallelCoordinateLineTransformFilter.h
├── GPUImageParallelCoordinateLineTransformFilter.m
├── GPUImagePerlinNoiseFilter.h
├── GPUImagePerlinNoiseFilter.m
├── GPUImagePinchDistortionFilter.h
├── GPUImagePinchDistortionFilter.m
├── GPUImagePixellateFilter.h
├── GPUImagePixellateFilter.m
├── GPUImagePixellatePositionFilter.h
├── GPUImagePixellatePositionFilter.m
├── GPUImagePoissonBlendFilter.h
├── GPUImagePoissonBlendFilter.m
├── GPUImagePolarPixellateFilter.h
├── GPUImagePolarPixellateFilter.m
├── GPUImagePolkaDotFilter.h
├── GPUImagePolkaDotFilter.m
├── GPUImagePosterizeFilter.h
├── GPUImagePosterizeFilter.m
├── GPUImagePrewittEdgeDetectionFilter.h
├── GPUImagePrewittEdgeDetectionFilter.m
├── GPUImageRGBClosingFilter.h
├── GPUImageRGBClosingFilter.m
├── GPUImageRGBDilationFilter.h
├── GPUImageRGBDilationFilter.m
├── GPUImageRGBErosionFilter.h
├── GPUImageRGBErosionFilter.m
├── GPUImageRGBFilter.h
├── GPUImageRGBFilter.m
├── GPUImageRGBOpeningFilter.h
├── GPUImageRGBOpeningFilter.m
├── GPUImageRawDataInput.h
├── GPUImageRawDataInput.m
├── GPUImageRawDataOutput.h
├── GPUImageRawDataOutput.m
├── GPUImageSaturationBlendFilter.h
├── GPUImageSaturationBlendFilter.m
├── GPUImageSaturationFilter.h
├── GPUImageSaturationFilter.m
├── GPUImageScreenBlendFilter.h
├── GPUImageScreenBlendFilter.m
├── GPUImageSepiaFilter.h
├── GPUImageSepiaFilter.m
├── GPUImageSharpenFilter.h
├── GPUImageSharpenFilter.m
├── GPUImageShiTomasiFeatureDetectionFilter.h
├── GPUImageShiTomasiFeatureDetectionFilter.m
├── GPUImageSingleComponentGaussianBlurFilter.h
├── GPUImageSingleComponentGaussianBlurFilter.m
├── GPUImageSketchFilter.h
├── GPUImageSketchFilter.m
├── GPUImageSkinToneFilter.h
├── GPUImageSkinToneFilter.m
├── GPUImageSmoothToonFilter.h
├── GPUImageSmoothToonFilter.m
├── GPUImageSobelEdgeDetectionFilter.h
├── GPUImageSobelEdgeDetectionFilter.m
├── GPUImageSoftEleganceFilter.h
├── GPUImageSoftEleganceFilter.m
├── GPUImageSoftLightBlendFilter.h
├── GPUImageSoftLightBlendFilter.m
├── GPUImageSolarizeFilter.h
├── GPUImageSolarizeFilter.m
├── GPUImageSolidColorGenerator.h
├── GPUImageSolidColorGenerator.m
├── GPUImageSourceOverBlendFilter.h
├── GPUImageSourceOverBlendFilter.m
├── GPUImageSphereRefractionFilter.h
├── GPUImageSphereRefractionFilter.m
├── GPUImageStillCamera.h
├── GPUImageStillCamera.m
├── GPUImageStretchDistortionFilter.h
├── GPUImageStretchDistortionFilter.m
├── GPUImageSubtractBlendFilter.h
├── GPUImageSubtractBlendFilter.m
├── GPUImageSwirlFilter.h
├── GPUImageSwirlFilter.m
├── GPUImageTextureInput.h
├── GPUImageTextureInput.m
├── GPUImageTextureOutput.h
├── GPUImageTextureOutput.m
├── GPUImageThreeInputFilter.h
├── GPUImageThreeInputFilter.m
├── GPUImageThresholdEdgeDetectionFilter.h
├── GPUImageThresholdEdgeDetectionFilter.m
├── GPUImageThresholdSketchFilter.h
├── GPUImageThresholdSketchFilter.m
├── GPUImageThresholdedNonMaximumSuppressionFilter.h
├── GPUImageThresholdedNonMaximumSuppressionFilter.m
├── GPUImageTiltShiftFilter.h
├── GPUImageTiltShiftFilter.m
├── GPUImageToneCurveFilter.h
├── GPUImageToneCurveFilter.m
├── GPUImageToonFilter.h
├── GPUImageToonFilter.m
├── GPUImageTransformFilter.h
├── GPUImageTransformFilter.m
├── GPUImageTwoInputCrossTextureSamplingFilter.h
├── GPUImageTwoInputCrossTextureSamplingFilter.m
├── GPUImageTwoInputFilter.h
├── GPUImageTwoInputFilter.m
├── GPUImageTwoPassFilter.h
├── GPUImageTwoPassFilter.m
├── GPUImageTwoPassTextureSamplingFilter.h
├── GPUImageTwoPassTextureSamplingFilter.m
├── GPUImageUIElement.h
├── GPUImageUIElement.m
├── GPUImageUnsharpMaskFilter.h
├── GPUImageUnsharpMaskFilter.m
├── GPUImageVibranceFilter.h
├── GPUImageVibranceFilter.m
├── GPUImageVideoCamera.h
├── GPUImageVideoCamera.m
├── GPUImageVignetteFilter.h
├── GPUImageVignetteFilter.m
├── GPUImageVoronoiConsumerFilter.h
├── GPUImageVoronoiConsumerFilter.m
├── GPUImageWeakPixelInclusionFilter.h
├── GPUImageWeakPixelInclusionFilter.m
├── GPUImageWhiteBalanceFilter.h
├── GPUImageWhiteBalanceFilter.m
├── GPUImageXYDerivativeFilter.h
├── GPUImageXYDerivativeFilter.m
├── GPUImageZoomBlurFilter.h
├── GPUImageZoomBlurFilter.m
├── GPUImageiOSBlurFilter.h
├── GPUImageiOSBlurFilter.m
└── iOS
│ ├── Framework
│ ├── GPUImageFramework.h
│ └── module.modulemap
│ ├── GPUImage-Prefix.pch
│ ├── GPUImageContext.h
│ ├── GPUImageContext.m
│ ├── GPUImageMovieWriter.h
│ ├── GPUImageMovieWriter.m
│ ├── GPUImagePicture+TextureSubimage.h
│ ├── GPUImagePicture+TextureSubimage.m
│ ├── GPUImagePicture.h
│ ├── GPUImagePicture.m
│ ├── GPUImageView.h
│ └── GPUImageView.m
├── UPAVCapturer
├── Class
│ ├── AudioMonitorPlayer.h
│ ├── AudioMonitorPlayer.m
│ ├── UPAudioCapture.h
│ ├── UPAudioCapture.m
│ ├── UPAudioGraph.h
│ ├── UPAudioGraph.m
│ ├── UPVideoCapture.h
│ ├── UPVideoCapture.m
│ └── focus.png
├── ImageProcessor
│ ├── Custom Filters
│ │ ├── FILTERSIMAGE
│ │ │ ├── 1977blowout.png
│ │ │ ├── 1977map.png
│ │ │ ├── amaroMap.png
│ │ │ ├── blackboard1024.png
│ │ │ ├── brannanBlowout.png
│ │ │ ├── brannanContrast.png
│ │ │ ├── brannanLuma.png
│ │ │ ├── brannanProcess.png
│ │ │ ├── brannanScreen.png
│ │ │ ├── earlyBirdCurves.png
│ │ │ ├── earlybirdBlowout.png
│ │ │ ├── earlybirdMap.png
│ │ │ ├── earlybirdOverlayMap.png
│ │ │ ├── edgeBurn.png
│ │ │ ├── hefeGradientMap.png
│ │ │ ├── hefeMap.png
│ │ │ ├── hefeMetal.png
│ │ │ ├── hefeSoftLight.png
│ │ │ ├── hudsonBackground.png
│ │ │ ├── hudsonMap.png
│ │ │ ├── inkwellMap.png
│ │ │ ├── kelvinMap.png
│ │ │ ├── lomoMap.png
│ │ │ ├── lookup.png
│ │ │ ├── lookup_amatorka.png
│ │ │ ├── lookup_miss_etikate.png
│ │ │ ├── lookup_soft_elegance_1.png
│ │ │ ├── lookup_soft_elegance_2.png
│ │ │ ├── nashvilleMap.png
│ │ │ ├── overlayMap.png
│ │ │ ├── riseMap.png
│ │ │ ├── sierraMap.png
│ │ │ ├── sierraVignette.png
│ │ │ ├── softLight.png
│ │ │ ├── sutroCurves.png
│ │ │ ├── sutroEdgeBurn.png
│ │ │ ├── sutroMetal.png
│ │ │ ├── toasterColorShift.png
│ │ │ ├── toasterCurves.png
│ │ │ ├── toasterMetal.png
│ │ │ ├── toasterOverlayMapWarm.png
│ │ │ ├── toasterSoftLight.png
│ │ │ ├── valenciaGradientMap.png
│ │ │ ├── valenciaMap.png
│ │ │ ├── vignetteMap.png
│ │ │ ├── waldenMap.png
│ │ │ └── xproMap.png
│ │ ├── FW1977Filter.h
│ │ ├── FW1977Filter.m
│ │ ├── FWAmaroFilter.h
│ │ ├── FWAmaroFilter.m
│ │ ├── FWBrannanFilter.h
│ │ ├── FWBrannanFilter.m
│ │ ├── FWEarlybirdFilter.h
│ │ ├── FWEarlybirdFilter.m
│ │ ├── FWFiveInputFilter.h
│ │ ├── FWFiveInputFilter.m
│ │ ├── FWHefeFilter.h
│ │ ├── FWHefeFilter.m
│ │ ├── FWHudsonFilter.h
│ │ ├── FWHudsonFilter.m
│ │ ├── FWInkwellFilter.h
│ │ ├── FWInkwellFilter.m
│ │ ├── FWLomofiFilter.h
│ │ ├── FWLomofiFilter.m
│ │ ├── FWLordKelvinFilter.h
│ │ ├── FWLordKelvinFilter.m
│ │ ├── FWNashvilleFilter.h
│ │ ├── FWNashvilleFilter.m
│ │ ├── FWRiseFilter.h
│ │ ├── FWRiseFilter.m
│ │ ├── FWSierraFilter.h
│ │ ├── FWSierraFilter.m
│ │ ├── FWSixInputFilter.h
│ │ ├── FWSixInputFilter.m
│ │ ├── FWSutroFilter.h
│ │ ├── FWSutroFilter.m
│ │ ├── FWToasterFilter.h
│ │ ├── FWToasterFilter.m
│ │ ├── FWValenciaFilter.h
│ │ ├── FWValenciaFilter.m
│ │ ├── FWWaldenFilter.h
│ │ ├── FWWaldenFilter.m
│ │ ├── FWXproIIFilter.h
│ │ ├── FWXproIIFilter.m
│ │ ├── UPCustonFilters.h
│ │ └── UPCustonFilters.m
│ └── VideoFilter
│ │ ├── GPUImageBeautifyFilter.h
│ │ ├── GPUImageBeautifyFilter.m
│ │ ├── LFGPUImageBeautyFilter.h
│ │ └── LFGPUImageBeautyFilter.m
├── UPAVCapturer.h
└── UPAVCapturer.m
└── UPLiveSDKDll.framework
├── Headers
├── AudioProcessor.h
├── RtcManager.h
├── UPAVPlayer.h
├── UPAVStreamer.h
├── UPLiveSDKConfig.h
└── UPLiveSDKLogger.h
├── Info.plist
└── UPLiveSDKDll
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 |
20 | ## Other
21 | *.moved-aside
22 | *.xccheckout
23 | *.xcscmblueprint
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 | *.ipa
28 |
29 | # CocoaPods
30 | #
31 | # We recommend against adding the Pods directory to your .gitignore. However
32 | # you should judge for yourself, the pros and cons are mentioned at:
33 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
34 | #
35 | # Pods/
36 |
37 | # Carthage
38 | #
39 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
40 | # Carthage/Checkouts
41 |
42 | Carthage/Build
43 |
44 | # fastlane
45 | #
46 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
47 | # screenshots whenever they are needed.
48 | # For more information about the recommended setup visit:
49 | # https://github.com/fastlane/fastlane/blob/master/docs/Gitignore.md
50 |
51 | fastlane/report.xml
52 | fastlane/screenshots
--------------------------------------------------------------------------------
/ReplayKit_Live.md:
--------------------------------------------------------------------------------
1 | ### 说明
2 | 如果对 `ReplayKit` 概念不是很清楚的, 建议看一下这篇写的很详细的文章 [iOS 10 ReplayKit Live 与 Broadcast UI/Upload Extension](http://blog.lessfun.com/blog/2016/09/21/ios-10-replaykit-live-and-broadcast-extension/)
3 |
4 | ### 使用
5 | [录制端](http://test654123.b0.upaiyun.com/UPLiveSDKDemo.zip)
6 |
7 | [被录制端](https://github.com/Mobcrush/ReplayKitDemo) 这个是测试用的 游戏的 `demo`.
8 |
9 | 要在手机上先安装录制端 `demo (UPLiveSDKDemo)`, 然后下载被录制端(即上面的 游戏 `demo` )`build` 之后, 点击游戏中像 `Wi-Fi` 的那个按钮,选择 "UPYUN录屏验证", 然后就是等待 `Extension` 启动.
10 |
11 | ### 相关建议
12 |
13 | 环境 `iOS 10.0` `Xcode 8.0` 以上
14 |
15 | 1. 10.1以上的系统运行比较稳定(包含 10.1), 10.0 以上也能运行, 画面显示会稍微差一点.
16 |
17 | 2. 新建的 `Broadcast Upload Extension`, 需要调整一下 `Broadcast Upload Extension` 的 `Info.plist`, 详情可以参考 demo 或者 参考推荐文章的 [评论](http://blog.lessfun.com/blog/2016/09/21/ios-10-replaykit-live-and-broadcast-extension/)
18 |
19 | 3. 注意 `Broadcast Upload Extension` 要使用 单例 , ( `demo` 里面已经写好了 `Uploader` 这个类)
20 |
21 | 4. 新建 `Broadcast Upload Extension` 的时候, 可以选上 `include UI Extension`, 这样就不用再新建 `UI Extension` 了, 创建后选 `activate`.
22 |
23 | 5. 因为 `App` 和 `Extension` 不共享代码, 新建之后 `Broadcast Upload Extension` 需要添加工程依赖 (如果已经添加了我们的 `SDK` , 文件不用重新拷贝一份, 但是 `Extension` 的依赖库还是要设置的), 添加方法可以参考 `demo` [工程设置](https://github.com/upyun/ios-live-sdk#%E5%B7%A5%E7%A8%8B%E4%BE%9D%E8%B5%96).
24 |
25 | __注意__ 不支持 `bit code`。
26 |
27 | 有录屏相关的需求和问题, 欢迎联系, 我们会解答并提供相关支持
28 | 邮箱:livesdk@upai.com
29 | QQ:3392887145
30 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | PreviewsEnabled
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/4/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // UPAVPlayerDemo
4 | //
5 | // Created by DING FENG on 2/16/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 | #import "UPLiveSDKDemoHomeVC.h"
11 |
12 |
13 |
14 |
15 | @interface AppDelegate ()
16 |
17 | @end
18 |
19 | @implementation AppDelegate
20 |
21 |
22 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
23 |
24 | UPLiveSDKDemoHomeVC *vc = [[UPLiveSDKDemoHomeVC alloc] init];
25 | UINavigationController *navController = [[UINavigationController alloc] initWithRootViewController:vc];
26 | self.window.rootViewController = navController;
27 |
28 | return YES;
29 | }
30 |
31 |
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | }
43 | ],
44 | "info" : {
45 | "version" : 1,
46 | "author" : "xcode"
47 | }
48 | }
--------------------------------------------------------------------------------
/UPLiveSDKDemo/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/BeautifyFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // BeautifyFilter.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 6/21/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "GPUImageBeautifyFilter.h"
11 | #import
12 |
13 |
14 | typedef void(^TextChange)(NSString *text);
15 |
16 | @interface BeautifyFilter : NSObject
17 | @property (nonatomic) int level;
18 | @property (nonatomic, strong)GPUImageUIElement *UIElement;
19 | @property (nonatomic, copy)TextChange change;
20 |
21 | - (CGImageRef)filterImage:(CGImageRef)image;
22 |
23 | - (CGImageRef)filterImageWithWatermark:(CGImageRef)image;
24 |
25 | - (CGImageRef)imageWithWatermark:(CGImageRef)image;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/LiveVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // LiveVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 20/07/2017.
6 | // Copyright © 2017 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface LiveVC : UIViewController
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/PlayerVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // PlayerVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 20/07/2017.
6 | // Copyright © 2017 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface PlayerVC : UIViewController
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/PlayerVC.m:
--------------------------------------------------------------------------------
1 | //
2 | // PlayerVC.m
3 | // UPLiveSDKDemo
4 | //
5 | // Copyright © 2017 upyun.com. All rights reserved.
6 |
7 | #import "PlayerVC.h"
8 | #import
9 |
10 | @interface PlayerVC ()
11 | {
12 | UPAVPlayer *_player;
13 | }
14 | @end
15 |
16 | @implementation PlayerVC
17 |
18 | - (void)viewDidLoad {
19 | [super viewDidLoad];
20 | }
21 |
22 | - (void)viewWillAppear:(BOOL)animated {
23 | //1. 初始化播放器
24 | _player = [[UPAVPlayer alloc] initWithURL:@"rtmp://live.hkstv.hk.lxdns.com/live/hks"];
25 |
26 | //2. 设置代理,接受播放错误,播放进度,播放状态等回调信息
27 | _player.delegate = self;
28 |
29 | //3. 设置播放器 playView Frame
30 | [_player setFrame:self.view.bounds];
31 |
32 | //4. 添加播放器 playView
33 | [self.view insertSubview:_player.playView atIndex:0];
34 |
35 | //5. 开始播放
36 | [_player play];
37 | }
38 |
39 | - (void)viewWillDisappear:(BOOL)animated {
40 | //6. 关闭页面,播放器需要 stop 才会自动释放。
41 | [_player stop];
42 | }
43 |
44 | #pragma mark UPAVPlayerDelegate
45 | - (void)player:(UPAVPlayer *)player playerError:(NSError *)error {
46 | //7. 监听播放错误。
47 | UIAlertController* alert = [UIAlertController alertControllerWithTitle:@"播放失败" message:error.description preferredStyle:1];
48 | UIAlertAction* defaultAction = [UIAlertAction actionWithTitle:@"OK" style:UIAlertActionStyleDefault handler:nil];
49 | [alert addAction:defaultAction];
50 | [self presentViewController:alert animated:YES completion:nil];
51 | }
52 |
53 |
54 | #pragma mark 播放,停止按钮
55 | - (IBAction)playBtnTap:(id)sender {
56 | //8. 播放按钮。
57 | [_player play];
58 | }
59 |
60 | - (IBAction)stopBtnTap:(id)sender {
61 | //9. 停止按钮。
62 | [_player stop];
63 | }
64 |
65 | @end
66 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLivePlayerDemoViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLivePlayerDemoViewController.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/19/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface UPLivePlayerDemoViewController : UIViewController
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLivePlayerVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLivePlayerVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/20/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface UPLivePlayerVC : UIViewController
12 |
13 | @property (nonatomic, strong) NSString *url;
14 | @property (nonatomic) int bufferingTime;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLiveSDKDemoHomeVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveSDKDemoHomeVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 20/07/2017.
6 | // Copyright © 2017 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface UPLiveSDKDemoHomeVC : UIViewController
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLiveSDKDemoHomeVC.m:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveSDKDemoHomeVC.m
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 20/07/2017.
6 | // Copyright © 2017 upyun.com. All rights reserved.
7 | //
8 |
9 | #import "UPLiveSDKDemoHomeVC.h"
10 |
11 |
12 | #import "UPLivePlayerDemoViewController.h"//包括播放器详细功能设置
13 | #import "UPLiveStreamerDemoViewController.h"//包括推流器详细功能设置,和连麦演示功能
14 |
15 | #import "LiveVC.h"//精简版推流页面。不包括连麦,美颜,混音等功能逻辑
16 | #import "PlayerVC.h"//精简版播放界面。
17 |
18 | @interface UPLiveSDKDemoHomeVC ()
19 |
20 | @end
21 |
22 | @implementation UPLiveSDKDemoHomeVC
23 |
24 | - (void)viewDidLoad {
25 | self.title = @"又拍云直播SDK";
26 | }
27 |
28 | - (IBAction)playerBtn1Tap:(id)sender {
29 | UPLivePlayerDemoViewController *vc = [UPLivePlayerDemoViewController new];
30 | vc.title = @"播放器";
31 | vc.view.backgroundColor = [UIColor whiteColor];
32 | [self.navigationController pushViewController:vc animated:YES];
33 | }
34 | - (IBAction)streamerBtn1Tap:(id)sender {
35 | UPLiveStreamerDemoViewController *vc = [UPLiveStreamerDemoViewController new];
36 | vc.title = @"推流器";
37 | vc.view.backgroundColor = [UIColor whiteColor];
38 | [self.navigationController pushViewController:vc animated:YES];
39 | }
40 |
41 |
42 | - (IBAction)playerBtn2Tap:(id)sender {
43 | PlayerVC *vc = [PlayerVC new];
44 | vc.title = @"播放";
45 | vc.view.backgroundColor = [UIColor whiteColor];
46 | [self.navigationController pushViewController:vc animated:YES];
47 | }
48 | - (IBAction)streamerBtn2Tap:(id)sender {
49 | LiveVC *vc = [LiveVC new];
50 | vc.title = @"直播";
51 | vc.view.backgroundColor = [UIColor whiteColor];
52 | [self.navigationController pushViewController:vc animated:YES];
53 | }
54 |
55 | @end
56 |
57 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLiveStreamerDemoViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveStreamerDemoViewController.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/19/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "UPAVCapturer.h"
11 |
12 | @interface Settings : NSObject
13 |
14 |
15 | @property (nonatomic, strong) NSString *streamId;
16 | @property (nonatomic, strong) NSString *rtmpServerPushPath;
17 | @property (nonatomic, strong) NSString *rtmpServerPlayPath;
18 | @property (nonatomic) int fps;//设置帧频率
19 | @property (nonatomic) BOOL beautifyOn;//美颜滤镜开关
20 | @property (nonatomic) BOOL streamingOn;//推流是否开启(Off 状态下,虽然有视频捕捉但是不会推流)
21 | @property (nonatomic) BOOL camaraTorchOn;//闪光灯
22 | @property (nonatomic) AVCaptureDevicePosition camaraPosition;//设置前置后置摄像头
23 | @property (nonatomic) AVCaptureVideoOrientation videoOrientation;//设置拍摄横屏竖屏幕
24 | @property (nonatomic) UPAVCapturerPresetLevel level;
25 | @property (nonatomic, assign) int filterLevel;
26 | @property (nonatomic) BOOL fullScreenPreviewOn;
27 |
28 | @end
29 |
30 |
31 | @interface UPLiveStreamerDemoViewController : UIViewController
32 | @property (nonatomic, strong) Settings *settings;
33 | @end
34 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLiveStreamerLivingVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveStreamerLivingVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/19/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "UPLiveStreamerDemoViewController.h"
11 |
12 |
13 | @interface UPLiveStreamerLivingVC : UIViewController
14 | @property (nonatomic, strong) Settings *settings;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/UPLiveStreamerSettingVC.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveStreamerSettingVC.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/20/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "UPLiveStreamerDemoViewController.h"
11 |
12 |
13 |
14 | @interface UPLiveStreamerSettingVC : UIViewController
15 | @property (nonatomic, weak) UPLiveStreamerDemoViewController *demoVC;
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/huzi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveSDKDemo/huzi.png
--------------------------------------------------------------------------------
/UPLiveSDKDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 5/4/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/UPLiveSDKDemo/upyun_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveSDKDemo/upyun_logo.png
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GLProgram.h:
--------------------------------------------------------------------------------
1 | // This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
2 | // A description of this can be found at his page on the topic:
3 | // http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
4 | // I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
5 |
6 | #import
7 |
8 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
9 | #import
10 | #import
11 | #else
12 | #import
13 | #import
14 | #endif
15 |
16 | @interface GLProgram : NSObject
17 | {
18 | NSMutableArray *attributes;
19 | NSMutableArray *uniforms;
20 | GLuint program,
21 | vertShader,
22 | fragShader;
23 | }
24 |
25 | @property(readwrite, nonatomic) BOOL initialized;
26 | @property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
27 | @property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
28 | @property(readwrite, copy, nonatomic) NSString *programLog;
29 |
30 | - (id)initWithVertexShaderString:(NSString *)vShaderString
31 | fragmentShaderString:(NSString *)fShaderString;
32 | - (id)initWithVertexShaderString:(NSString *)vShaderString
33 | fragmentShaderFilename:(NSString *)fShaderFilename;
34 | - (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
35 | fragmentShaderFilename:(NSString *)fShaderFilename;
36 | - (void)addAttribute:(NSString *)attributeName;
37 | - (GLuint)attributeIndex:(NSString *)attributeName;
38 | - (GLuint)uniformIndex:(NSString *)uniformName;
39 | - (BOOL)link;
40 | - (void)use;
41 | - (void)validate;
42 | @end
43 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImage3x3ConvolutionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** Runs a 3x3 convolution kernel against the image
4 | */
5 | @interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
6 | {
7 | GLint convolutionMatrixUniform;
8 | }
9 |
10 | /** Convolution kernel to run against the image
11 |
12 | The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.
13 | The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three
14 | If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
15 | */
16 | @property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImage3x3TextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
4 |
5 | @interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | CGFloat texelWidth, texelHeight;
10 | BOOL hasOverriddenImageSizeFactor;
11 | }
12 |
13 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
14 | @property(readwrite, nonatomic) CGFloat texelWidth;
15 | @property(readwrite, nonatomic) CGFloat texelHeight;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAdaptiveThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup
4 |
5 | /** A multiplier for the background averaging blur radius in pixels, with a default of 4
6 | */
7 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAddBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAddBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAlphaBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAmatorkaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Amatorka
6 | http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_amatorka.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageAmatorkaFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAmatorkaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageAmatorkaFilter.h"
2 | #import "GPUImagePicture.h"
3 | #import "GPUImageLookupFilter.h"
4 |
5 | @implementation GPUImageAmatorkaFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | UIImage *image = [UIImage imageNamed:@"lookup_amatorka.png"];
16 | #else
17 | NSImage *image = [NSImage imageNamed:@"lookup_amatorka.png"];
18 | #endif
19 |
20 | NSAssert(image, @"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle.");
21 |
22 | lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
23 | GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
24 | [self addFilter:lookupFilter];
25 |
26 | [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
27 | [lookupImageSource processImage];
28 |
29 | self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
30 | self.terminalFilter = lookupFilter;
31 |
32 | return self;
33 | }
34 |
35 | #pragma mark -
36 | #pragma mark Accessors
37 |
38 | @end
39 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAverageColor.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageColorAveragingVertexShaderString;
4 |
5 | @interface GPUImageAverageColor : GPUImageFilter
6 | {
7 | GLint texelWidthUniform, texelHeightUniform;
8 |
9 | NSUInteger numberOfStages;
10 |
11 | GLubyte *rawImagePixels;
12 | CGSize finalStageSize;
13 | }
14 |
15 | // This block is called on the completion of color averaging for a frame
16 | @property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
17 |
18 | - (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAverageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
4 |
5 | // This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
6 | @property(readwrite, nonatomic) CGFloat thresholdMultiplier;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageAverageLuminanceThresholdFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageAverageLuminanceThresholdFilter.h"
2 | #import "GPUImageLuminosity.h"
3 | #import "GPUImageLuminanceThresholdFilter.h"
4 |
5 | @interface GPUImageAverageLuminanceThresholdFilter()
6 | {
7 | GPUImageLuminosity *luminosityFilter;
8 | GPUImageLuminanceThresholdFilter *luminanceThresholdFilter;
9 | }
10 | @end
11 |
12 | @implementation GPUImageAverageLuminanceThresholdFilter
13 |
14 | @synthesize thresholdMultiplier = _thresholdMultiplier;
15 |
16 | #pragma mark -
17 | #pragma mark Initialization and teardown
18 |
19 | - (id)init;
20 | {
21 | if (!(self = [super init]))
22 | {
23 | return nil;
24 | }
25 |
26 | self.thresholdMultiplier = 1.0;
27 |
28 | luminosityFilter = [[GPUImageLuminosity alloc] init];
29 | [self addFilter:luminosityFilter];
30 |
31 | luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init];
32 | [self addFilter:luminanceThresholdFilter];
33 |
34 | __unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self;
35 | __unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter;
36 |
37 | [luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) {
38 | weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier;
39 | }];
40 |
41 | self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil];
42 | self.terminalFilter = luminanceThresholdFilter;
43 |
44 | return self;
45 | }
46 |
47 | @end
48 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBilateralFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | @interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
4 | {
5 | CGFloat firstDistanceNormalizationFactorUniform;
6 | CGFloat secondDistanceNormalizationFactorUniform;
7 | }
8 | // A normalization factor for the distance between central color and sample color.
9 | @property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
10 | @end
11 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBoxBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | /** A hardware-accelerated box blur of an image
4 | */
5 | @interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBrightnessFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBrightnessFilter : GPUImageFilter
4 | {
5 | GLint brightnessUniform;
6 | }
7 |
8 | // Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat brightness;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBrightnessFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageBrightnessFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float brightness;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float brightness;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageBrightnessFilter
36 |
37 | @synthesize brightness = _brightness;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | brightnessUniform = [filterProgram uniformIndex:@"brightness"];
50 | self.brightness = 0.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setBrightness:(CGFloat)newValue;
59 | {
60 | _brightness = newValue;
61 |
62 | [self setFloat:_brightness forUniform:brightnessUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBuffer.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageBuffer : GPUImageFilter
4 | {
5 | NSMutableArray *bufferedFramebuffers;
6 | }
7 |
8 | @property(readwrite, nonatomic) NSUInteger bufferSize;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageBulgeDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /// Creates a bulge distortion on the image
4 | @interface GPUImageBulgeDistortionFilter : GPUImageFilter
5 | {
6 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
7 | }
8 |
9 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | @property(readwrite, nonatomic) CGPoint center;
11 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
12 | @property(readwrite, nonatomic) CGFloat radius;
13 | /// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
14 | @property(readwrite, nonatomic) CGFloat scale;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageCGAColorspaceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCGAColorspaceFilter : GPUImageFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageChromaKeyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Selectively replaces a color in the first image with the second image
4 | */
5 | @interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
6 | {
7 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
8 | }
9 |
10 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
11 |
12 | The default value is 0.3
13 | */
14 | @property(readwrite, nonatomic) CGFloat thresholdSensitivity;
15 |
16 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
17 |
18 | The default value is 0.1
19 | */
20 | @property(readwrite, nonatomic) CGFloat smoothing;
21 |
22 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
23 |
24 | The default is green: (0.0, 1.0, 0.0).
25 |
26 | @param redComponent Red component of color to be replaced
27 | @param greenComponent Green component of color to be replaced
28 | @param blueComponent Blue component of color to be replaced
29 | */
30 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
31 |
32 | @end
33 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageChromaKeyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageChromaKeyFilter : GPUImageFilter
4 | {
5 | GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
6 | }
7 |
8 | /** The threshold sensitivity controls how similar pixels need to be colored to be replaced
9 |
10 | The default value is 0.3
11 | */
12 | @property(readwrite, nonatomic) CGFloat thresholdSensitivity;
13 |
14 | /** The degree of smoothing controls how gradually similar colors are replaced in the image
15 |
16 | The default value is 0.1
17 | */
18 | @property(readwrite, nonatomic) CGFloat smoothing;
19 |
20 | /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
21 |
22 | The default is green: (0.0, 1.0, 0.0).
23 |
24 | @param redComponent Red component of color to be replaced
25 | @param greenComponent Green component of color to be replaced
26 | @param blueComponent Blue component of color to be replaced
27 | */
28 | - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageClosingFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageClosingFilter.h"
2 | #import "GPUImageErosionFilter.h"
3 | #import "GPUImageDilationFilter.h"
4 |
5 | @implementation GPUImageClosingFilter
6 |
7 | @synthesize verticalTexelSpacing = _verticalTexelSpacing;
8 | @synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
9 |
10 | - (id)init;
11 | {
12 | if (!(self = [self initWithRadius:1]))
13 | {
14 | return nil;
15 | }
16 |
17 | return self;
18 | }
19 |
20 | - (id)initWithRadius:(NSUInteger)radius;
21 | {
22 | if (!(self = [super init]))
23 | {
24 | return nil;
25 | }
26 |
27 | // First pass: dilation
28 | dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
29 | [self addFilter:dilationFilter];
30 |
31 | // Second pass: erosion
32 | erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
33 | [self addFilter:erosionFilter];
34 |
35 | [dilationFilter addTarget:erosionFilter];
36 |
37 | self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
38 | self.terminalFilter = erosionFilter;
39 |
40 | return self;
41 | }
42 |
43 | - (void)setVerticalTexelSpacing:(CGFloat)newValue;
44 | {
45 | _verticalTexelSpacing = newValue;
46 | erosionFilter.verticalTexelSpacing = newValue;
47 | dilationFilter.verticalTexelSpacing = newValue;
48 | }
49 |
50 | - (void)setHorizontalTexelSpacing:(CGFloat)newValue;
51 | {
52 | _horizontalTexelSpacing = newValue;
53 | erosionFilter.horizontalTexelSpacing = newValue;
54 | dilationFilter.horizontalTexelSpacing = newValue;
55 | }
56 |
57 | @end
58 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageColorBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color burn blend of two images
4 | */
5 | @interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorBurnBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorBurnBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | mediump vec4 whiteColor = vec4(1.0);
17 | gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 | vec4 whiteColor = vec4(1.0);
34 | gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageColorBurnBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorConversion.h:
--------------------------------------------------------------------------------
1 | #ifndef GPUImageColorConversion_h
2 | #define GPUImageColorConversion_h
3 |
4 | extern GLfloat *kColorConversion601;
5 | extern GLfloat *kColorConversion601FullRange;
6 | extern GLfloat *kColorConversion709;
7 | extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
8 | extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
9 | extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
10 |
11 |
12 | #endif /* GPUImageColorConversion_h */
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorDodgeBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /** Applies a color dodge blend of two images
4 | */
5 | @interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
6 | {
7 | }
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorInvertFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorInvertFilter : GPUImageFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorInvertFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorInvertFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | void main()
11 | {
12 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
13 |
14 | gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
15 | }
16 | );
17 | #else
18 | NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
19 | (
20 | varying vec2 textureCoordinate;
21 |
22 | uniform sampler2D inputImageTexture;
23 |
24 | void main()
25 | {
26 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
27 |
28 | gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
29 | }
30 | );
31 | #endif
32 |
33 | @implementation GPUImageColorInvertFilter
34 |
35 | - (id)init;
36 | {
37 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))
38 | {
39 | return nil;
40 | }
41 |
42 | return self;
43 | }
44 |
45 | @end
46 |
47 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorLocalBinaryPatternFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageColorLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorMatrixFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Transforms the colors of an image by applying a matrix to them
4 | */
5 | @interface GPUImageColorMatrixFilter : GPUImageFilter
6 | {
7 | GLint colorMatrixUniform;
8 | GLint intensityUniform;
9 | }
10 |
11 | /** A 4x4 matrix used to transform each color in an image
12 | */
13 | @property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;
14 |
15 | /** The degree to which the new transformed color replaces the original color for each pixel
16 | */
17 | @property(readwrite, nonatomic) CGFloat intensity;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColorPackingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageColorPackingFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | }
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColourFASTFeatureDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | // This generates image-wide feature descriptors using the ColourFAST process, as developed and described in
4 | //
5 | // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.
6 | //
7 | // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.
8 | // http://aut.researchgateway.ac.nz/handle/10292/7991
9 |
10 | @class GPUImageColourFASTSamplingOperation;
11 | @class GPUImageBoxBlurFilter;
12 |
13 | @interface GPUImageColourFASTFeatureDetector : GPUImageFilterGroup
14 | {
15 | GPUImageBoxBlurFilter *blurFilter;
16 | GPUImageColourFASTSamplingOperation *colourFASTSamplingOperation;
17 | }
18 | // The blur radius of the underlying box blur. The default is 3.0.
19 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColourFASTFeatureDetector.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageColourFASTFeatureDetector.h"
2 | #import "GPUImageColourFASTSamplingOperation.h"
3 | #import "GPUImageBoxBlurFilter.h"
4 |
5 | @implementation GPUImageColourFASTFeatureDetector
6 |
7 | @synthesize blurRadiusInPixels;
8 |
9 | - (id)init;
10 | {
11 | if (!(self = [super init]))
12 | {
13 | return nil;
14 | }
15 |
16 | // First pass: apply a variable Gaussian blur
17 | blurFilter = [[GPUImageBoxBlurFilter alloc] init];
18 | [self addFilter:blurFilter];
19 |
20 | // Second pass: combine the blurred image with the original sharp one
21 | colourFASTSamplingOperation = [[GPUImageColourFASTSamplingOperation alloc] init];
22 | [self addFilter:colourFASTSamplingOperation];
23 |
24 | // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
25 | [blurFilter addTarget:colourFASTSamplingOperation atTextureLocation:1];
26 |
27 | self.initialFilters = [NSArray arrayWithObjects:blurFilter, colourFASTSamplingOperation, nil];
28 | self.terminalFilter = colourFASTSamplingOperation;
29 |
30 | self.blurRadiusInPixels = 3.0;
31 |
32 | return self;
33 | }
34 |
35 | #pragma mark -
36 | #pragma mark Accessors
37 |
38 | - (void)setBlurRadiusInPixels:(CGFloat)newValue;
39 | {
40 | blurFilter.blurRadiusInPixels = newValue;
41 | }
42 |
43 | - (CGFloat)blurRadiusInPixels;
44 | {
45 | return blurFilter.blurRadiusInPixels;
46 | }
47 |
48 | @end
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageColourFASTSamplingOperation.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | // This is the feature extraction phase of the ColourFAST feature detector, as described in:
4 | //
5 | // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.
6 | //
7 | // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.
8 | // http://aut.researchgateway.ac.nz/handle/10292/7991
9 |
10 | @interface GPUImageColourFASTSamplingOperation : GPUImageTwoInputFilter
11 | {
12 | GLint texelWidthUniform, texelHeightUniform;
13 |
14 | CGFloat texelWidth, texelHeight;
15 | BOOL hasOverriddenImageSizeFactor;
16 | }
17 |
18 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
19 | @property(readwrite, nonatomic) CGFloat texelWidth;
20 | @property(readwrite, nonatomic) CGFloat texelHeight;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageContrastFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the contrast of the image
4 | */
5 | @interface GPUImageContrastFilter : GPUImageFilter
6 | {
7 | GLint contrastUniform;
8 | }
9 |
10 | /** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat contrast;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageContrastFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageContrastFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float contrast;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float contrast;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageContrastFilter
36 |
37 | @synthesize contrast = _contrast;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | contrastUniform = [filterProgram uniformIndex:@"contrast"];
50 | self.contrast = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setContrast:(CGFloat)newValue;
59 | {
60 | _contrast = newValue;
61 |
62 | [self setFloat:_contrast forUniform:contrastUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageCropFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCropFilter : GPUImageFilter
4 | {
5 | GLfloat cropTextureCoordinates[8];
6 | }
7 |
8 | // The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image
9 | @property(readwrite, nonatomic) CGRect cropRegion;
10 |
11 | // Initialization and teardown
12 | - (id)initWithCropRegion:(CGRect)newCropRegion;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageCrosshairGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshairGenerator : GPUImageFilter
4 | {
5 | GLint crosshairWidthUniform, crosshairColorUniform;
6 | }
7 |
8 | // The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
9 | @property(readwrite, nonatomic) CGFloat crosshairWidth;
10 |
11 | // The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
12 | - (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 |
14 | // Rendering
15 | - (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageCrosshatchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageCrosshatchFilter : GPUImageFilter
4 | {
5 | GLint crossHatchSpacingUniform, lineWidthUniform;
6 | }
7 | // The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
8 | @property(readwrite, nonatomic) CGFloat crossHatchSpacing;
9 |
10 | // A relative width for the crosshatch lines. The default is 0.003.
11 | @property(readwrite, nonatomic) CGFloat lineWidth;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDarkenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDarkenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageDarkenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageDarkenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageDarkenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDifferenceBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDifferenceBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageDifferenceBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
17 | }
18 | );
19 | #else
20 | NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
21 | (
22 | varying vec2 textureCoordinate;
23 | varying vec2 textureCoordinate2;
24 |
25 | uniform sampler2D inputImageTexture;
26 | uniform sampler2D inputImageTexture2;
27 |
28 | void main()
29 | {
30 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
31 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
32 | gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
33 | }
34 | );
35 | #endif
36 |
37 | @implementation GPUImageDifferenceBlendFilter
38 |
39 | - (id)init;
40 | {
41 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString]))
42 | {
43 | return nil;
44 | }
45 |
46 | return self;
47 | }
48 |
49 | @end
50 |
51 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out bright features, and is most commonly used with black-and-white thresholded images.
5 |
6 | extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
7 | extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
8 | extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
9 | extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
10 |
11 | @interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
12 |
13 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
14 | - (id)initWithRadius:(NSUInteger)dilationRadius;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDirectionalNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 | GLint upperThresholdUniform, lowerThresholdUniform;
7 |
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | // These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
16 | @property(readwrite, nonatomic) CGFloat upperThreshold;
17 | @property(readwrite, nonatomic) CGFloat lowerThreshold;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDirectionalSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDissolveBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
4 | {
5 | GLint mixUniform;
6 | }
7 |
8 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
9 | @property(readwrite, nonatomic) CGFloat mix;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageDivideBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageEmbossFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
4 |
5 | // The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
6 | @property(readwrite, nonatomic) CGFloat intensity;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageEmbossFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageEmbossFilter.h"
2 |
3 | @implementation GPUImageEmbossFilter
4 |
5 | @synthesize intensity = _intensity;
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | self.intensity = 1.0;
15 |
16 | return self;
17 | }
18 |
19 | #pragma mark -
20 | #pragma mark Accessors
21 |
22 | - (void)setIntensity:(CGFloat)newValue;
23 | {
24 | // [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){
25 | // {-2.0f, -1.0f, 0.0f},
26 | // {-1.0f, 1.0f, 1.0f},
27 | // { 0.0f, 1.0f, 2.0f}
28 | // }];
29 |
30 | _intensity = newValue;
31 |
32 | GPUMatrix3x3 newConvolutionMatrix;
33 | newConvolutionMatrix.one.one = _intensity * (-2.0);
34 | newConvolutionMatrix.one.two = -_intensity;
35 | newConvolutionMatrix.one.three = 0.0f;
36 |
37 | newConvolutionMatrix.two.one = -_intensity;
38 | newConvolutionMatrix.two.two = 1.0;
39 | newConvolutionMatrix.two.three = _intensity;
40 |
41 | newConvolutionMatrix.three.one = 0.0f;
42 | newConvolutionMatrix.three.two = _intensity;
43 | newConvolutionMatrix.three.three = _intensity * 2.0;
44 |
45 | self.convolutionKernel = newConvolutionMatrix;
46 | }
47 |
48 |
49 | @end
50 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and is most commonly used with black-and-white thresholded images.
5 |
6 | @interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageExclusionBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageExposureFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageExposureFilter : GPUImageFilter
4 | {
5 | GLint exposureUniform;
6 | }
7 |
8 | // Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat exposure;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageExposureFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageExposureFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float exposure;
10 |
11 | void main()
12 | {
13 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float exposure;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageExposureFilter
36 |
37 | @synthesize exposure = _exposure;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | exposureUniform = [filterProgram uniformIndex:@"exposure"];
50 | self.exposure = 0.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setExposure:(CGFloat)newValue;
59 | {
60 | _exposure = newValue;
61 |
62 | [self setFloat:_exposure forUniform:exposureUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFASTCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGrayscaleFilter;
4 | @class GPUImage3x3TextureSamplingFilter;
5 | @class GPUImageNonMaximumSuppressionFilter;
6 |
7 | /*
8 | An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications:
9 |
10 | E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005.
11 | E. Rosten and T. Drummond. Machine learning for high-speed corner detection. European Conference on Computer Vision, 2006.
12 |
13 | For more about the FAST feature detector, see the resources here:
14 | http://www.edwardrosten.com/work/fast.html
15 | */
16 |
17 | typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType;
18 |
19 | @interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
20 | {
21 | GPUImageGrayscaleFilter *luminanceReductionFilter;
22 | GPUImage3x3TextureSamplingFilter *featureDetectionFilter;
23 | GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
24 | // Generate a lookup texture based on the bit patterns
25 |
26 | // Step 1: convert to monochrome if necessary
27 | // Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components
28 | // Step 3: do non-maximum suppression of close corner points
29 | }
30 |
31 | - (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFalseColorFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageFalseColorFilter : GPUImageFilter
4 | {
5 | GLint firstColorUniform, secondColorUniform;
6 | }
7 |
8 | // The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
9 | @property(readwrite, nonatomic) GPUVector4 firstColor;
10 | @property(readwrite, nonatomic) GPUVector4 secondColor;
11 |
12 | - (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
13 | - (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFilterGroup.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageFilterGroup : GPUImageOutput
5 | {
6 | NSMutableArray *filters;
7 | BOOL isEndProcessing;
8 | }
9 |
10 | @property(readwrite, nonatomic, strong) GPUImageOutput *terminalFilter;
11 | @property(readwrite, nonatomic, strong) NSArray *initialFilters;
12 | @property(readwrite, nonatomic, strong) GPUImageOutput *inputFilterToIgnoreForUpdates;
13 |
14 | // Filter management
15 | - (void)addFilter:(GPUImageOutput *)newFilter;
16 | - (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex;
17 | - (NSUInteger)filterCount;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFilterPipeline.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 | @interface GPUImageFilterPipeline : NSObject
5 | {
6 | NSString *stringValue;
7 | }
8 |
9 | @property (strong) NSMutableArray *filters;
10 |
11 | @property (strong) GPUImageOutput *input;
12 | @property (strong) id output;
13 |
14 | - (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id )output;
15 | - (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id )output;
16 | - (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id )output;
17 |
18 | - (void) addFilter:(GPUImageOutput *)filter;
19 | - (void) addFilter:(GPUImageOutput *)filter atIndex:(NSUInteger)insertIndex;
20 | - (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput *)filter;
21 | - (void) replaceAllFilters:(NSArray *) newFilters;
22 | - (void) removeFilter:(GPUImageOutput *)filter;
23 | - (void) removeFilterAtIndex:(NSUInteger)index;
24 | - (void) removeAllFilters;
25 |
26 | - (UIImage *) currentFilteredFrame;
27 | - (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
28 | - (CGImageRef) newCGImageFromCurrentFilteredFrame;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFourInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageThreeInputFilter.h"
2 |
3 | extern NSString *const kGPUImageFourInputTextureVertexShaderString;
4 |
5 | @interface GPUImageFourInputFilter : GPUImageThreeInputFilter
6 | {
7 | GPUImageFramebuffer *fourthInputFramebuffer;
8 |
9 | GLint filterFourthTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform4;
11 | GPUImageRotationMode inputRotation4;
12 | GLuint filterSourceTexture4;
13 | CMTime fourthFrameTime;
14 |
15 | BOOL hasSetThirdTexture, hasReceivedFourthFrame, fourthFrameWasVideo;
16 | BOOL fourthFrameCheckDisabled;
17 | }
18 |
19 | - (void)disableFourthFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFramebuffer.h:
--------------------------------------------------------------------------------
1 | #import
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | #import
5 | #import
6 | #import
7 | #else
8 | #import
9 | #import
10 | #endif
11 |
12 | #import
13 | #import
14 |
15 |
16 | typedef struct GPUTextureOptions {
17 | GLenum minFilter;
18 | GLenum magFilter;
19 | GLenum wrapS;
20 | GLenum wrapT;
21 | GLenum internalFormat;
22 | GLenum format;
23 | GLenum type;
24 | } GPUTextureOptions;
25 |
26 | @interface GPUImageFramebuffer : NSObject
27 |
28 | @property(readonly) CGSize size;
29 | @property(readonly) GPUTextureOptions textureOptions;
30 | @property(readonly) GLuint texture;
31 | @property(readonly) BOOL missingFramebuffer;
32 |
33 | // Initialization and teardown
34 | - (id)initWithSize:(CGSize)framebufferSize;
35 | - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
36 | - (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
37 |
38 | // Usage
39 | - (void)activateFramebuffer;
40 |
41 | // Reference counting
42 | - (void)lock;
43 | - (void)unlock;
44 | - (void)clearAllLocks;
45 | - (void)disableReferenceCounting;
46 | - (void)enableReferenceCounting;
47 |
48 | // Image capture
49 | - (CGImageRef)newCGImageFromFramebufferContents;
50 | - (void)restoreRenderTarget;
51 |
52 | // Raw data bytes
53 | - (void)lockForReading;
54 | - (void)unlockAfterReading;
55 | - (NSUInteger)bytesPerRow;
56 | - (GLubyte *)byteBuffer;
57 | - (CVPixelBufferRef)pixelBuffer;
58 |
59 | @end
60 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageFramebufferCache.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "GPUImageFramebuffer.h"
4 |
5 | @interface GPUImageFramebufferCache : NSObject
6 |
7 | // Framebuffer management
8 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
9 | - (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
10 | - (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
11 | - (void)purgeAllUnassignedFramebuffers;
12 | - (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
13 | - (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGammaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageGammaFilter : GPUImageFilter
4 | {
5 | GLint gammaUniform;
6 | }
7 |
8 | // Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
9 | @property(readwrite, nonatomic) CGFloat gamma;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGammaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageGammaFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform lowp float gamma;
10 |
11 | void main()
12 | {
13 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float gamma;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImageGammaFilter
36 |
37 | @synthesize gamma = _gamma;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | gammaUniform = [filterProgram uniformIndex:@"gamma"];
50 | self.gamma = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setGamma:(CGFloat)newValue;
59 | {
60 | _gamma = newValue;
61 |
62 | [self setFloat:_gamma forUniform:gammaUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGaussianBlurPositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | /** A more generalized 9x9 Gaussian blur filter
4 | */
5 | @interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
6 | {
7 | GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
8 | }
9 |
10 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
11 | */
12 | @property (readwrite, nonatomic) CGFloat blurSize;
13 |
14 | /** Center for the blur, defaults to 0.5, 0.5
15 | */
16 | @property (readwrite, nonatomic) CGPoint blurCenter;
17 |
18 | /** Radius for the blur, defaults to 1.0
19 | */
20 | @property (readwrite, nonatomic) CGFloat blurRadius;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGaussianSelectiveBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /** A Gaussian blur that preserves focus within a circular region
6 | */
7 | @interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageGaussianBlurFilter *blurFilter;
10 | GPUImageFilter *selectiveFocusFilter;
11 | BOOL hasOverriddenAspectRatio;
12 | }
13 |
14 | /** The radius of the circular area being excluded from the blur
15 | */
16 | @property (readwrite, nonatomic) CGFloat excludeCircleRadius;
17 | /** The center of the circular area being excluded from the blur
18 | */
19 | @property (readwrite, nonatomic) CGPoint excludeCirclePoint;
20 | /** The size of the area between the blurred portion and the clear circle
21 | */
22 | @property (readwrite, nonatomic) CGFloat excludeBlurSize;
23 | /** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
24 | */
25 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
26 | /** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
27 | */
28 | @property (readwrite, nonatomic) CGFloat aspectRatio;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGlassSphereFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSphereRefractionFilter.h"
2 |
3 | @interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageGrayscaleFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageLuminanceFragmentShaderString;
4 |
5 | /** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
6 | */
7 | @interface GPUImageGrayscaleFilter : GPUImageFilter
8 |
9 | @end
10 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHSBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | @interface GPUImageHSBFilter : GPUImageColorMatrixFilter
4 |
5 | /** Reset the filter to have no transformations.
6 | */
7 | - (void)reset;
8 |
9 | /** Add a hue rotation to the filter.
10 | The hue rotation is in the range [-360, 360] with 0 being no-change.
11 | Note that this adjustment is additive, so use the reset method if you need to.
12 | */
13 | - (void)rotateHue:(float)h;
14 |
15 | /** Add a saturation adjustment to the filter.
16 | The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
17 | Note that this adjustment is additive, so use the reset method if you need to.
18 | */
19 | - (void)adjustSaturation:(float)s;
20 |
21 | /** Add a brightness adjustment to the filter.
22 | The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
23 | Note that this adjustment is additive, so use the reset method if you need to.
24 | */
25 | - (void)adjustBrightness:(float)b;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHalftoneFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImageHalftoneFilter : GPUImagePixellateFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHardLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHazeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /*
4 | * The haze filter can be used to add or remove haze (similar to a UV filter)
5 | *
6 | * @author Alaric Cole
7 | * @creationDate 03/10/12
8 | *
9 | */
10 |
11 | /** The haze filter can be used to add or remove haze
12 |
13 | This is similar to a UV filter
14 | */
15 | @interface GPUImageHazeFilter : GPUImageFilter
16 | {
17 | GLint distanceUniform;
18 | GLint slopeUniform;
19 | }
20 |
21 | /** Strength of the color applied. Default 0. Values between -.3 and .3 are best
22 | */
23 | @property(readwrite, nonatomic) CGFloat distance;
24 |
25 | /** Amount of color change. Default 0. Values between -.3 and .3 are best
26 | */
27 | @property(readwrite, nonatomic) CGFloat slope;
28 |
29 | @end
30 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHighPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageDifferenceBlendFilter.h"
4 |
5 | @interface GPUImageHighPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageDifferenceBlendFilter *differenceBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHighPassFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageHighPassFilter.h"
2 |
3 | @implementation GPUImageHighPassFilter
4 |
5 | @synthesize filterStrength;
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | // Start with a low pass filter to define the component to be removed
15 | lowPassFilter = [[GPUImageLowPassFilter alloc] init];
16 | [self addFilter:lowPassFilter];
17 |
18 | // Take the difference of the current frame from the low pass filtered result to get the high pass
19 | differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init];
20 | [self addFilter:differenceBlendFilter];
21 |
22 | // Texture location 0 needs to be the original image for the difference blend
23 | [lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1];
24 |
25 | self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil];
26 | self.terminalFilter = differenceBlendFilter;
27 |
28 | self.filterStrength = 0.5;
29 |
30 | return self;
31 | }
32 |
33 | #pragma mark -
34 | #pragma mark Accessors
35 |
36 | - (void)setFilterStrength:(CGFloat)newValue;
37 | {
38 | lowPassFilter.filterStrength = newValue;
39 | }
40 |
41 | - (CGFloat)filterStrength;
42 | {
43 | return lowPassFilter.filterStrength;
44 | }
45 |
46 | @end
47 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHighlightShadowFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHighlightShadowFilter : GPUImageFilter
4 | {
5 | GLint shadowsUniform, highlightsUniform;
6 | }
7 |
8 | /**
9 | * 0 - 1, increase to lighten shadows.
10 | * @default 0
11 | */
12 | @property(readwrite, nonatomic) CGFloat shadows;
13 |
14 | /**
15 | * 0 - 1, decrease to darken highlights.
16 | * @default 1
17 | */
18 | @property(readwrite, nonatomic) CGFloat highlights;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHighlightShadowTintFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageHighlightShadowTintFilter.h
3 | //
4 | //
5 | // Created by github.com/r3mus on 8/14/15.
6 | //
7 | //
8 |
9 | #import "GPUImageFilter.h"
10 |
11 | @interface GPUImageHighlightShadowTintFilter : GPUImageFilter
12 | {
13 | GLint shadowTintIntensityUniform, highlightTintIntensityUniform, shadowTintColorUniform, highlightTintColorUniform;
14 | }
15 |
16 | // The shadowTint and highlightTint colors specify what colors replace the dark and light areas of the image, respectively. The defaults for shadows are black, highlighs white.
17 | @property(readwrite, nonatomic) GLfloat shadowTintIntensity;
18 | @property(readwrite, nonatomic) GPUVector4 shadowTintColor;
19 | @property(readwrite, nonatomic) GLfloat highlightTintIntensity;
20 | @property(readwrite, nonatomic) GPUVector4 highlightTintColor;
21 |
22 | - (void)setShadowTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
23 | - (void)setHighlightTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHistogramEqualizationFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageHistogramEqualizationFilter.h
3 | // FilterShowcase
4 | //
5 | // Created by Adam Marcus on 19/08/2014.
6 | // Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.
7 | //
8 |
9 | #import "GPUImageFilterGroup.h"
10 | #import "GPUImageHistogramFilter.h"
11 | #import "GPUImageRawDataOutput.h"
12 | #import "GPUImageRawDataInput.h"
13 | #import "GPUImageTwoInputFilter.h"
14 |
15 | @interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
16 | {
17 | GPUImageHistogramFilter *histogramFilter;
18 | GPUImageRawDataOutput *rawDataOutputFilter;
19 | GPUImageRawDataInput *rawDataInputFilter;
20 | }
21 |
22 | @property(readwrite, nonatomic) NSUInteger downsamplingFactor;
23 |
24 | - (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
25 |
26 | @end
27 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHistogramFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
4 |
5 | @interface GPUImageHistogramFilter : GPUImageFilter
6 | {
7 | GPUImageHistogramType histogramType;
8 |
9 | GLubyte *vertexSamplingCoordinates;
10 |
11 | GLProgram *secondFilterProgram, *thirdFilterProgram;
12 | GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
13 | }
14 |
15 | // Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
16 | @property(readwrite, nonatomic) NSUInteger downsamplingFactor;
17 |
18 | // Initialization and teardown
19 | - (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
20 | - (void)initializeSecondaryAttributes;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHistogramGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageHistogramGenerator : GPUImageFilter
4 | {
5 | GLint backgroundColorUniform;
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHueBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageHueFilter.h:
--------------------------------------------------------------------------------
1 |
2 | #import "GPUImageFilter.h"
3 |
4 | @interface GPUImageHueFilter : GPUImageFilter
5 | {
6 | GLint hueAdjustUniform;
7 |
8 | }
9 | @property (nonatomic, readwrite) CGFloat hue;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageJFAVoronoiFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageJFAVoronoiFilter : GPUImageFilter
4 | {
5 | GLuint secondFilterOutputTexture;
6 | GLuint secondFilterFramebuffer;
7 |
8 |
9 | GLint sampleStepUniform;
10 | GLint sizeUniform;
11 | NSUInteger numPasses;
12 |
13 | }
14 |
15 | @property (nonatomic, readwrite) CGSize sizeInPixels;
16 |
17 | @end
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageKuwaharaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
4 | */
5 | @interface GPUImageKuwaharaFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform;
8 | }
9 |
10 | /// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.
11 | @property(readwrite, nonatomic) NSUInteger radius;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageKuwaharaRadius3Filter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageKuwaharaRadius3Filter.h
3 |
4 | #import "GPUImageFilter.h"
5 |
6 | @interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLanczosResamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | @interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
4 |
5 | @property(readwrite, nonatomic) CGSize originalImageSize;
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLaplacianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3ConvolutionFilter.h"
2 |
3 | @interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLightenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | /// Blends two images by taking the maximum value of each color component between the images
4 | @interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
5 | {
6 | }
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLightenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLightenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = max(textureColor, textureColor2);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = max(textureColor, textureColor2);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageLightenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLineGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLineGenerator : GPUImageFilter
4 | {
5 | GLint lineWidthUniform, lineColorUniform;
6 | GLfloat *lineCoordinates;
7 | }
8 |
9 | // The width of the displayed lines, in pixels. The default is 1.
10 | @property(readwrite, nonatomic) CGFloat lineWidth;
11 |
12 | // The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
13 | - (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
14 |
15 | // Rendering
16 | - (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLinearBurnBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLinearBurnBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageLinearBurnBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageLinearBurnBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLocalBinaryPatternFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLookupFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLookupFilter : GPUImageTwoInputFilter
4 | {
5 | GLint intensityUniform;
6 | }
7 |
8 | // How To Use:
9 | // 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
10 | // For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
11 | // If you need more complex filter you can create as many lookup tables as required.
12 | // E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
13 | // 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
14 |
15 | // See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
16 |
17 | // Additional Info:
18 | // Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
19 | //for (int by = 0; by < 8; by++) {
20 | // for (int bx = 0; bx < 8; bx++) {
21 | // for (int g = 0; g < 64; g++) {
22 | // for (int r = 0; r < 64; r++) {
23 | // image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
24 | // (int)(g * 255.0 / 63.0 + 0.5),
25 | // (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
26 | // }
27 | // }
28 | // }
29 | //}
30 |
31 | // Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting
32 | @property(readwrite, nonatomic) CGFloat intensity;
33 |
34 | @end
35 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLowPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageBuffer.h"
3 | #import "GPUImageDissolveBlendFilter.h"
4 |
5 | @interface GPUImageLowPassFilter : GPUImageFilterGroup
6 | {
7 | GPUImageBuffer *bufferFilter;
8 | GPUImageDissolveBlendFilter *dissolveBlendFilter;
9 | }
10 |
11 | // This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
12 | @property(readwrite, nonatomic) CGFloat filterStrength;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLuminanceRangeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageLuminanceRangeFilter : GPUImageFilter
4 | {
5 | GLint rangeReductionUniform;
6 | }
7 |
8 | /** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
9 | */
10 | @property(readwrite, nonatomic) CGFloat rangeReductionFactor;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLuminanceThresholdFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Pixels with a luminance above the threshold will appear white, and those below will be black
4 | */
5 | @interface GPUImageLuminanceThresholdFilter : GPUImageFilter
6 | {
7 | GLint thresholdUniform;
8 | }
9 |
10 | /** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default
11 | */
12 | @property(readwrite, nonatomic) CGFloat threshold;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLuminosity.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageAverageColor.h"
2 |
3 | @interface GPUImageLuminosity : GPUImageAverageColor
4 | {
5 | GLProgram *secondFilterProgram;
6 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
7 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
8 | GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;
9 | }
10 |
11 | // This block is called on the completion of color averaging for a frame
12 | @property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);
13 |
14 | - (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
15 | - (void)initializeSecondaryAttributes;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageLuminosityBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMaskFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMedianFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMissEtikateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Photoshop action by Miss Etikate:
6 | http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961
7 | */
8 |
9 | // Note: If you want to use this effect you have to add lookup_miss_etikate.png
10 | // from Resources folder to your application bundle.
11 |
12 | @interface GPUImageMissEtikateFilter : GPUImageFilterGroup
13 | {
14 | GPUImagePicture *lookupImageSource;
15 | }
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMissEtikateFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMissEtikateFilter.h"
2 | #import "GPUImagePicture.h"
3 | #import "GPUImageLookupFilter.h"
4 |
5 | @implementation GPUImageMissEtikateFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [super init]))
10 | {
11 | return nil;
12 | }
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | UIImage *image = [UIImage imageNamed:@"lookup_miss_etikate.png"];
16 | #else
17 | NSImage *image = [NSImage imageNamed:@"lookup_miss_etikate.png"];
18 | #endif
19 |
20 | NSAssert(image, @"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle.");
21 |
22 | lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
23 | GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
24 | [self addFilter:lookupFilter];
25 |
26 | [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
27 | [lookupImageSource processImage];
28 |
29 | self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
30 | self.terminalFilter = lookupFilter;
31 |
32 | return self;
33 | }
34 |
35 | #pragma mark -
36 | #pragma mark Accessors
37 |
38 | @end
39 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMonochromeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMonochromeFilter : GPUImageFilter
4 | {
5 | GLint intensityUniform, filterColorUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat intensity;
9 | @property(readwrite, nonatomic) GPUVector4 color;
10 |
11 | - (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMosaicFilter.h:
--------------------------------------------------------------------------------
1 |
2 | // This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 | #import "GPUImagePicture.h"
6 |
7 | @interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
8 | GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;
9 | GPUImagePicture *pic;
10 | }
11 |
12 | // This filter takes an input tileset, the tiles must ascend in luminance
13 | // It looks at the input image and replaces each display tile with an input tile
14 | // according to the luminance of that tile. The idea was to replicate the ASCII
15 | // video filters seen in other apps, but the tileset can be anything.
16 | @property(readwrite, nonatomic) CGSize inputTileSize;
17 | @property(readwrite, nonatomic) float numTiles;
18 | @property(readwrite, nonatomic) CGSize displayTileSize;
19 | @property(readwrite, nonatomic) BOOL colorOn;
20 | @property(readwrite, nonatomic, copy) NSString *tileSet;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMotionBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageMotionBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The angular direction of the blur, in degrees. 0 degrees by default
10 | */
11 | @property (readwrite, nonatomic) CGFloat blurAngle;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMotionDetector.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 | #import "GPUImageLowPassFilter.h"
3 | #import "GPUImageAverageColor.h"
4 |
5 | @interface GPUImageMotionDetector : GPUImageFilterGroup
6 | {
7 | GPUImageLowPassFilter *lowPassFilter;
8 | GPUImageTwoInputFilter *frameComparisonFilter;
9 | GPUImageAverageColor *averageColor;
10 | }
11 |
12 | // This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
13 | @property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
14 |
15 | // For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
16 | @property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMovieComposition.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageMovieComposition.h
3 | // Givit
4 | //
5 | // Created by Sean Meiners on 2013/01/25.
6 | //
7 | //
8 |
9 | #import "GPUImageMovie.h"
10 |
11 | @interface GPUImageMovieComposition : GPUImageMovie
12 |
13 | @property (readwrite, retain) AVComposition *compositon;
14 | @property (readwrite, retain) AVVideoComposition *videoComposition;
15 | @property (readwrite, retain) AVAudioMix *audioMix;
16 |
17 | - (id)initWithComposition:(AVComposition*)compositon
18 | andVideoComposition:(AVVideoComposition*)videoComposition
19 | andAudioMix:(AVAudioMix*)audioMix;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMultiplyBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageMultiplyBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageMultiplyBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 base = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageMultiplyBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageNobleCornerDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Noble corner detector
4 |
5 | This is the Noble variant on the Harris detector, from
6 | Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
7 | */
8 |
9 |
10 | @interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageNormalBlendFilter.h:
--------------------------------------------------------------------------------
1 | // Created by Jorge Garcia on 9/5/12.
2 | //
3 |
4 | #import "GPUImageTwoInputFilter.h"
5 |
6 | @interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageOpacityFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageOpacityFilter : GPUImageFilter
4 | {
5 | GLint opacityUniform;
6 | }
7 |
8 | // Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
9 | @property(readwrite, nonatomic) CGFloat opacity;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageOpacityFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpacityFilter.h"
2 |
3 | @implementation GPUImageOpacityFilter
4 |
5 | @synthesize opacity = _opacity;
6 |
7 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
8 | NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
9 | (
10 | varying highp vec2 textureCoordinate;
11 |
12 | uniform sampler2D inputImageTexture;
13 | uniform lowp float opacity;
14 |
15 | void main()
16 | {
17 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
18 |
19 | gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
20 | }
21 | );
22 | #else
23 | NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
24 | (
25 | varying vec2 textureCoordinate;
26 |
27 | uniform sampler2D inputImageTexture;
28 | uniform float opacity;
29 |
30 | void main()
31 | {
32 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
33 |
34 | gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
35 | }
36 | );
37 | #endif
38 |
39 | #pragma mark -
40 | #pragma mark Initialization and teardown
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | opacityUniform = [filterProgram uniformIndex:@"opacity"];
50 | self.opacity = 1.0;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setOpacity:(CGFloat)newValue;
59 | {
60 | _opacity = newValue;
61 |
62 | [self setFloat:_opacity forUniform:opacityUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageErosionFilter;
4 | @class GPUImageDilationFilter;
5 |
6 | // A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageErosionFilter *erosionFilter;
12 | GPUImageDilationFilter *dilationFilter;
13 | }
14 |
15 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageOpeningFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageOpeningFilter.h"
2 | #import "GPUImageErosionFilter.h"
3 | #import "GPUImageDilationFilter.h"
4 |
5 | @implementation GPUImageOpeningFilter
6 |
7 | @synthesize verticalTexelSpacing = _verticalTexelSpacing;
8 | @synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
9 |
10 | - (id)init;
11 | {
12 | if (!(self = [self initWithRadius:1]))
13 | {
14 | return nil;
15 | }
16 |
17 | return self;
18 | }
19 |
20 | - (id)initWithRadius:(NSUInteger)radius;
21 | {
22 | if (!(self = [super init]))
23 | {
24 | return nil;
25 | }
26 |
27 | // First pass: erosion
28 | erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
29 | [self addFilter:erosionFilter];
30 |
31 | // Second pass: dilation
32 | dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
33 | [self addFilter:dilationFilter];
34 |
35 | [erosionFilter addTarget:dilationFilter];
36 |
37 | self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
38 | self.terminalFilter = dilationFilter;
39 |
40 | return self;
41 | }
42 |
43 | - (void)setVerticalTexelSpacing:(CGFloat)newValue;
44 | {
45 | _verticalTexelSpacing = newValue;
46 | erosionFilter.verticalTexelSpacing = newValue;
47 | dilationFilter.verticalTexelSpacing = newValue;
48 | }
49 |
50 | - (void)setHorizontalTexelSpacing:(CGFloat)newValue;
51 | {
52 | _horizontalTexelSpacing = newValue;
53 | erosionFilter.horizontalTexelSpacing = newValue;
54 | dilationFilter.horizontalTexelSpacing = newValue;
55 | }
56 |
57 | @end
58 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageOverlayBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageParallelCoordinateLineTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.
4 | //
5 | // It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:
6 | // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
7 | // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
8 |
9 | @interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
10 | {
11 | GLubyte *rawImagePixels;
12 | GLfloat *lineCoordinates;
13 | unsigned int maxLinePairsToRender, linePairsToRender;
14 | }
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePerlinNoiseFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePerlinNoiseFilter : GPUImageFilter
4 | {
5 | GLint scaleUniform, colorStartUniform, colorFinishUniform;
6 | }
7 |
8 | @property (readwrite, nonatomic) GPUVector4 colorStart;
9 | @property (readwrite, nonatomic) GPUVector4 colorFinish;
10 |
11 | @property (readwrite, nonatomic) float scale;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePinchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a pinch distortion of the image
4 | */
5 | @interface GPUImagePinchDistortionFilter : GPUImageFilter
6 | {
7 | GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
8 | }
9 |
10 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | */
12 | @property(readwrite, nonatomic) CGPoint center;
13 | /** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0
14 | */
15 | @property(readwrite, nonatomic) CGFloat radius;
16 | /** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5
17 | */
18 | @property(readwrite, nonatomic) CGFloat scale;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellateFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePixellatePositionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePixellatePositionFilter : GPUImageFilter
4 | {
5 | GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
6 | }
7 |
8 | // The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
9 | @property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
10 |
11 | // the center point to start pixelation in texture coordinates, default 0.5, 0.5
12 | @property(readwrite, nonatomic) CGPoint center;
13 |
14 | // the radius (0.0 - 1.0) in which to pixelate, default 1.0
15 | @property(readwrite, nonatomic) CGFloat radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePoissonBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputCrossTextureSamplingFilter.h"
2 | #import "GPUImageFilterGroup.h"
3 |
4 | @interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
5 | {
6 | GLint mixUniform;
7 |
8 | GPUImageFramebuffer *secondOutputFramebuffer;
9 | }
10 |
11 | // Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
12 | @property(readwrite, nonatomic) CGFloat mix;
13 |
14 | // The number of times to propagate the gradients.
15 | // Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
16 | @property(readwrite, nonatomic) NSUInteger numIterations;
17 |
18 | @end
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePolarPixellateFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImagePolarPixellateFilter : GPUImageFilter {
4 | GLint centerUniform, pixelSizeUniform;
5 | }
6 |
7 | // The center about which to apply the distortion, with a default of (0.5, 0.5)
8 | @property(readwrite, nonatomic) CGPoint center;
9 | // The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
10 | @property(readwrite, nonatomic) CGSize pixelSize;
11 |
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePolkaDotFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImagePixellateFilter.h"
2 |
3 | @interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
4 | {
5 | GLint dotScalingUniform;
6 | }
7 |
8 | @property(readwrite, nonatomic) CGFloat dotScaling;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePosterizeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
4 | */
5 | @interface GPUImagePosterizeFilter : GPUImageFilter
6 | {
7 | GLint colorLevelsUniform;
8 | }
9 |
10 | /** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
11 | */
12 | @property(readwrite, nonatomic) NSUInteger colorLevels;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePosterizeFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImagePosterizeFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 | uniform highp float colorLevels;
10 |
11 | void main()
12 | {
13 | highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
14 |
15 | gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
16 | }
17 | );
18 | #else
19 | NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
20 | (
21 | varying vec2 textureCoordinate;
22 |
23 | uniform sampler2D inputImageTexture;
24 | uniform float colorLevels;
25 |
26 | void main()
27 | {
28 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
29 |
30 | gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
31 | }
32 | );
33 | #endif
34 |
35 | @implementation GPUImagePosterizeFilter
36 |
37 | @synthesize colorLevels = _colorLevels;
38 |
39 | #pragma mark -
40 | #pragma mark Initialization
41 |
42 | - (id)init;
43 | {
44 | if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString]))
45 | {
46 | return nil;
47 | }
48 |
49 | colorLevelsUniform = [filterProgram uniformIndex:@"colorLevels"];
50 | self.colorLevels = 10;
51 |
52 | return self;
53 | }
54 |
55 | #pragma mark -
56 | #pragma mark Accessors
57 |
58 | - (void)setColorLevels:(NSUInteger)newValue;
59 | {
60 | _colorLevels = newValue;
61 |
62 | [self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram];
63 | }
64 |
65 | @end
66 |
67 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImagePrewittEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBClosingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
7 | // This helps to filter out smaller dark elements.
8 |
9 | @interface GPUImageRGBClosingFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBClosingFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRGBClosingFilter.h"
2 | #import "GPUImageRGBErosionFilter.h"
3 | #import "GPUImageRGBDilationFilter.h"
4 |
5 | @implementation GPUImageRGBClosingFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [self initWithRadius:1]))
10 | {
11 | return nil;
12 | }
13 |
14 | return self;
15 | }
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 | {
19 | if (!(self = [super init]))
20 | {
21 | return nil;
22 | }
23 |
24 | // First pass: dilation
25 | dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
26 | [self addFilter:dilationFilter];
27 |
28 | // Second pass: erosion
29 | erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
30 | [self addFilter:erosionFilter];
31 |
32 | [dilationFilter addTarget:erosionFilter];
33 |
34 | self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
35 | self.terminalFilter = erosionFilter;
36 |
37 | return self;
38 | }
39 |
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBDilationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out brighter colors, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)dilationRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBErosionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassTextureSamplingFilter.h"
2 |
3 | // For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
4 | // This extends out dark features, and can be used for abstraction of color images.
5 |
6 | @interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
7 |
8 | // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
9 | - (id)initWithRadius:(NSUInteger)erosionRadius;
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageRGBFilter : GPUImageFilter
4 | {
5 | GLint redUniform;
6 | GLint greenUniform;
7 | GLint blueUniform;
8 | }
9 |
10 | // Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
11 | @property (readwrite, nonatomic) CGFloat red;
12 | @property (readwrite, nonatomic) CGFloat green;
13 | @property (readwrite, nonatomic) CGFloat blue;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBOpeningFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageRGBErosionFilter;
4 | @class GPUImageRGBDilationFilter;
5 |
6 | // A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius.
7 | // This helps to filter out smaller bright elements.
8 |
9 | @interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
10 | {
11 | GPUImageRGBErosionFilter *erosionFilter;
12 | GPUImageRGBDilationFilter *dilationFilter;
13 | }
14 |
15 | - (id)initWithRadius:(NSUInteger)radius;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRGBOpeningFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageRGBOpeningFilter.h"
2 | #import "GPUImageRGBErosionFilter.h"
3 | #import "GPUImageRGBDilationFilter.h"
4 |
5 | @implementation GPUImageRGBOpeningFilter
6 |
7 | - (id)init;
8 | {
9 | if (!(self = [self initWithRadius:1]))
10 | {
11 | return nil;
12 | }
13 |
14 | return self;
15 | }
16 |
17 | - (id)initWithRadius:(NSUInteger)radius;
18 | {
19 | if (!(self = [super init]))
20 | {
21 | return nil;
22 | }
23 |
24 | // First pass: erosion
25 | erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
26 | [self addFilter:erosionFilter];
27 |
28 | // Second pass: dilation
29 | dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
30 | [self addFilter:dilationFilter];
31 |
32 | [erosionFilter addTarget:dilationFilter];
33 |
34 | self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
35 | self.terminalFilter = dilationFilter;
36 |
37 | return self;
38 | }
39 |
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRawDataInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | // The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
4 | // The bytes are uploaded and stored within a texture, so nothing is kept locally.
5 | // The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
6 | // The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
7 |
8 | typedef enum {
9 | GPUPixelFormatBGRA = GL_BGRA,
10 | GPUPixelFormatRGBA = GL_RGBA,
11 | GPUPixelFormatRGB = GL_RGB,
12 | GPUPixelFormatLuminance = GL_LUMINANCE
13 | } GPUPixelFormat;
14 |
15 | typedef enum {
16 | GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
17 | GPUPixelTypeFloat = GL_FLOAT
18 | } GPUPixelType;
19 |
20 | @interface GPUImageRawDataInput : GPUImageOutput
21 | {
22 | CGSize uploadedImageSize;
23 |
24 | dispatch_semaphore_t dataUpdateSemaphore;
25 | }
26 |
27 | // Initialization and teardown
28 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
29 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
30 | - (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
31 |
32 | /** Input data pixel format
33 | */
34 | @property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
35 | @property (readwrite, nonatomic) GPUPixelType pixelType;
36 |
37 | // Image rendering
38 | - (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
39 | - (void)processData;
40 | - (void)processDataForTimestamp:(CMTime)frameTime;
41 | - (CGSize)outputImageSize;
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageRawDataOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | struct GPUByteColorVector {
5 | GLubyte red;
6 | GLubyte green;
7 | GLubyte blue;
8 | GLubyte alpha;
9 | };
10 | typedef struct GPUByteColorVector GPUByteColorVector;
11 |
12 | @protocol GPUImageRawDataProcessor;
13 |
14 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
15 | @interface GPUImageRawDataOutput : NSObject {
16 | CGSize imageSize;
17 | GPUImageRotationMode inputRotation;
18 | BOOL outputBGRA;
19 | }
20 | #else
21 | @interface GPUImageRawDataOutput : NSObject {
22 | CGSize imageSize;
23 | GPUImageRotationMode inputRotation;
24 | BOOL outputBGRA;
25 | }
26 | #endif
27 |
28 | @property(readonly) GLubyte *rawBytesForImage;
29 | @property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
30 | @property(nonatomic) BOOL enabled;
31 |
32 | // Initialization and teardown
33 | - (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
34 |
35 | // Data access
36 | - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
37 | - (NSUInteger)bytesPerRowInOutput;
38 |
39 | - (void)setImageSize:(CGSize)newImageSize;
40 |
41 | - (void)lockFramebufferForReading;
42 | - (void)unlockFramebufferAfterReading;
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSaturationBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSaturationFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Adjusts the saturation of an image
4 | */
5 | @interface GPUImageSaturationFilter : GPUImageFilter
6 | {
7 | GLint saturationUniform;
8 | }
9 |
10 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
11 | */
12 | @property(readwrite, nonatomic) CGFloat saturation;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageScreenBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageScreenBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageScreenBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 | mediump vec4 whiteColor = vec4(1.0);
17 | gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 | vec4 whiteColor = vec4(1.0);
34 | gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageScreenBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSepiaFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageColorMatrixFilter.h"
2 |
3 | /// Simple sepia tone filter
4 | @interface GPUImageSepiaFilter : GPUImageColorMatrixFilter
5 |
6 | @end
7 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSepiaFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSepiaFilter.h"
2 |
3 | @implementation GPUImageSepiaFilter
4 |
5 | - (id)init;
6 | {
7 | if (!(self = [super init]))
8 | {
9 | return nil;
10 | }
11 |
12 | self.intensity = 1.0;
13 | self.colorMatrix = (GPUMatrix4x4){
14 | {0.3588, 0.7044, 0.1368, 0.0},
15 | {0.2990, 0.5870, 0.1140, 0.0},
16 | {0.2392, 0.4696, 0.0912 ,0.0},
17 | {0,0,0,1.0},
18 | };
19 |
20 | return self;
21 | }
22 |
23 | @end
24 |
25 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSharpenFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSharpenFilter : GPUImageFilter
4 | {
5 | GLint sharpnessUniform;
6 | GLint imageWidthFactorUniform, imageHeightFactorUniform;
7 | }
8 |
9 | // Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
10 | @property(readwrite, nonatomic) CGFloat sharpness;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageShiTomasiFeatureDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageHarrisCornerDetectionFilter.h"
2 |
3 | /** Shi-Tomasi feature detector
4 |
5 | This is the Shi-Tomasi feature detector, as described in
6 | J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
7 | */
8 |
9 | @interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
10 |
11 | // Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSingleComponentGaussianBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageGaussianBlurFilter.h"
2 |
3 | // This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)
4 |
5 | @interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | /** Converts video to look like a sketch.
4 |
5 | This is just the Sobel edge detection filter with the colors inverted.
6 | */
7 | @interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
8 | {
9 | }
10 |
11 | @end
12 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSkinToneFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageSkinToneFilter.h
3 | //
4 | //
5 | // Created by github.com/r3mus on 8/14/15.
6 | //
7 | //
8 |
9 | #import "GPUImageTwoInputFilter.h"
10 |
11 | typedef NS_ENUM(NSUInteger, GPUImageSkinToneUpperColor) {
12 | GPUImageSkinToneUpperColorGreen,
13 | GPUImageSkinToneUpperColorOrange
14 | };
15 |
16 | extern NSString *const kGPUImageSkinToneFragmentShaderString;
17 |
18 | @interface GPUImageSkinToneFilter : GPUImageFilter
19 | {
20 | GLint skinToneAdjustUniform;
21 | GLint skinHueUniform;
22 | GLint skinHueThresholdUniform;
23 | GLint maxHueShiftUniform;
24 | GLint maxSaturationShiftUniform;
25 | GLint upperSkinToneColorUniform;
26 | }
27 |
28 | // The amount of effect to apply, between -1.0 (pink) and +1.0 (orange OR green). Default is 0.0.
29 | @property (nonatomic, readwrite) CGFloat skinToneAdjust;
30 |
31 | // The initial hue of skin to adjust. Default is 0.05 (a common skin red).
32 | @property (nonatomic, readwrite) CGFloat skinHue;
33 |
34 | // The bell curve "breadth" of the skin hue adjustment (i.e. how different from the original skinHue will the modifications effect).
35 | // Default is 40.0
36 | @property (nonatomic, readwrite) CGFloat skinHueThreshold;
37 |
38 | // The maximum amount of hue shift allowed in the adjustments that affect hue (pink, green). Default = 0.25.
39 | @property (nonatomic, readwrite) CGFloat maxHueShift;
40 |
41 | // The maximum amount of saturation shift allowed in the adjustments that affect saturation (orange). Default = 0.4.
42 | @property (nonatomic, readwrite) CGFloat maxSaturationShift;
43 |
44 | // Defines whether the upper range (> 0.0) will change the skin tone to green (hue) or orange (saturation)
45 | @property (nonatomic, readwrite) GPUImageSkinToneUpperColor upperSkinToneColor;
46 |
47 | @end
48 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSmoothToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 | @class GPUImageToonFilter;
5 |
6 | /** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
7 | */
8 | @interface GPUImageSmoothToonFilter : GPUImageFilterGroup
9 | {
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageToonFilter *toonFilter;
12 | }
13 |
14 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
15 | @property(readwrite, nonatomic) CGFloat texelWidth;
16 | /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
17 | @property(readwrite, nonatomic) CGFloat texelHeight;
18 |
19 | /// The radius of the underlying Gaussian blur. The default is 2.0.
20 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
21 |
22 | /// The threshold at which to apply the edges, default of 0.2
23 | @property(readwrite, nonatomic) CGFloat threshold;
24 |
25 | /// The levels of quantization for the posterization of colors within the scene, with a default of 10.0
26 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
27 |
28 | @end
29 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSobelEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
6 | BOOL hasOverriddenImageSizeFactor;
7 | }
8 |
9 | // The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
10 | @property(readwrite, nonatomic) CGFloat texelWidth;
11 | @property(readwrite, nonatomic) CGFloat texelHeight;
12 |
13 | // The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
14 | @property(readwrite, nonatomic) CGFloat edgeStrength;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSoftEleganceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImagePicture;
4 |
5 | /** A photo filter based on Soft Elegance Photoshop action
6 | http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603
7 | */
8 |
9 | // Note: If you want to use this effect you have to add
10 | // lookup_soft_elegance_1.png and lookup_soft_elegance_2.png
11 | // from Resources folder to your application bundle.
12 |
13 | @interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
14 | {
15 | GPUImagePicture *lookupImageSource1;
16 | GPUImagePicture *lookupImageSource2;
17 | }
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSoftLightBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSolarizeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Pixels with a luminance above the threshold will invert their color
4 | */
5 | @interface GPUImageSolarizeFilter : GPUImageFilter
6 | {
7 | GLint thresholdUniform;
8 | }
9 |
10 | /** Anything above this luminance will be inverted, and anything below normal. Ranges from 0.0 to 1.0, with 0.5 as the default
11 | */
12 | @property(readwrite, nonatomic) CGFloat threshold;
13 |
14 | @end
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSolidColorGenerator.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | // This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
4 | // dimensions, or this won't work correctly
5 |
6 |
7 | @interface GPUImageSolidColorGenerator : GPUImageFilter
8 | {
9 | GLint colorUniform;
10 | GLint useExistingAlphaUniform;
11 | }
12 |
13 | // This color dictates what the output image will be filled with
14 | @property(readwrite, nonatomic) GPUVector4 color;
15 | @property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
16 |
17 | - (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSourceOverBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSourceOverBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSourceOverBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
16 |
17 | gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
33 |
34 | gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageSourceOverBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSphereRefractionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageSphereRefractionFilter : GPUImageFilter
4 | {
5 | GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
6 | }
7 |
8 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
9 | @property(readwrite, nonatomic) CGPoint center;
10 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
11 | @property(readwrite, nonatomic) CGFloat radius;
12 | /// The index of refraction for the sphere, with a default of 0.71
13 | @property(readwrite, nonatomic) CGFloat refractiveIndex;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageStretchDistortionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a stretch distortion of the image
4 | */
5 | @interface GPUImageStretchDistortionFilter : GPUImageFilter {
6 | GLint centerUniform;
7 | }
8 |
9 | /** The center about which to apply the distortion, with a default of (0.5, 0.5)
10 | */
11 | @property(readwrite, nonatomic) CGPoint center;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSubtractBlendFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSubtractBlendFilter.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageSubtractBlendFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 | varying highp vec2 textureCoordinate2;
8 |
9 | uniform sampler2D inputImageTexture;
10 | uniform sampler2D inputImageTexture2;
11 |
12 | void main()
13 | {
14 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
15 | lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
16 |
17 | gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
18 | }
19 | );
20 | #else
21 | NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
22 | (
23 | varying vec2 textureCoordinate;
24 | varying vec2 textureCoordinate2;
25 |
26 | uniform sampler2D inputImageTexture;
27 | uniform sampler2D inputImageTexture2;
28 |
29 | void main()
30 | {
31 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
32 | vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
33 |
34 | gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
35 | }
36 | );
37 | #endif
38 |
39 | @implementation GPUImageSubtractBlendFilter
40 |
41 | - (id)init;
42 | {
43 | if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString]))
44 | {
45 | return nil;
46 | }
47 |
48 | return self;
49 | }
50 |
51 | @end
52 |
53 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageSwirlFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Creates a swirl distortion on the image
4 | */
5 | @interface GPUImageSwirlFilter : GPUImageFilter
6 | {
7 | GLint radiusUniform, centerUniform, angleUniform;
8 | }
9 |
10 | /// The center about which to apply the distortion, with a default of (0.5, 0.5)
11 | @property(readwrite, nonatomic) CGPoint center;
12 | /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5
13 | @property(readwrite, nonatomic) CGFloat radius;
14 | /// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0
15 | @property(readwrite, nonatomic) CGFloat angle;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTextureInput.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageTextureInput : GPUImageOutput
4 | {
5 | CGSize textureSize;
6 | }
7 |
8 | // Initialization and teardown
9 | - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
10 |
11 | // Image rendering
12 | - (void)processTextureWithFrameTime:(CMTime)frameTime;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTextureInput.m:
--------------------------------------------------------------------------------
1 | #import "GPUImageTextureInput.h"
2 |
3 | @implementation GPUImageTextureInput
4 |
5 | #pragma mark -
6 | #pragma mark Initialization and teardown
7 |
8 | - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
9 | {
10 | if (!(self = [super init]))
11 | {
12 | return nil;
13 | }
14 |
15 | runSynchronouslyOnVideoProcessingQueue(^{
16 | [GPUImageContext useImageProcessingContext];
17 | });
18 |
19 | textureSize = newTextureSize;
20 |
21 | runSynchronouslyOnVideoProcessingQueue(^{
22 | outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];
23 | });
24 |
25 | return self;
26 | }
27 |
28 | #pragma mark -
29 | #pragma mark Image rendering
30 |
31 | - (void)processTextureWithFrameTime:(CMTime)frameTime;
32 | {
33 | runAsynchronouslyOnVideoProcessingQueue(^{
34 | for (id currentTarget in targets)
35 | {
36 | NSInteger indexOfObject = [targets indexOfObject:currentTarget];
37 | NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
38 |
39 | [currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
40 | [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
41 | [currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
42 | }
43 | });
44 | }
45 |
46 | @end
47 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTextureOutput.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | @protocol GPUImageTextureOutputDelegate;
5 |
6 | @interface GPUImageTextureOutput : NSObject
7 | {
8 | GPUImageFramebuffer *firstInputFramebuffer;
9 | }
10 |
11 | @property(readwrite, unsafe_unretained, nonatomic) id delegate;
12 | @property(readonly) GLuint texture;
13 | @property(nonatomic) BOOL enabled;
14 |
15 | - (void)doneWithTexture;
16 |
17 | @end
18 |
19 | @protocol GPUImageTextureOutputDelegate
20 | - (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageThreeInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
4 |
5 | @interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
6 | {
7 | GPUImageFramebuffer *thirdInputFramebuffer;
8 |
9 | GLint filterThirdTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform3;
11 | GPUImageRotationMode inputRotation3;
12 | GLuint filterSourceTexture3;
13 | CMTime thirdFrameTime;
14 |
15 | BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
16 | BOOL thirdFrameCheckDisabled;
17 | }
18 |
19 | - (void)disableThirdFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageThresholdEdgeDetectionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageThresholdSketchFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageThresholdEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageThresholdedNonMaximumSuppressionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
4 | {
5 | GLint thresholdUniform;
6 | }
7 |
8 | /** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
9 | */
10 | @property(readwrite, nonatomic) CGFloat threshold;
11 |
12 | - (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTiltShiftFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | /// A simulated tilt shift lens effect
6 | @interface GPUImageTiltShiftFilter : GPUImageFilterGroup
7 | {
8 | GPUImageGaussianBlurFilter *blurFilter;
9 | GPUImageFilter *tiltShiftFilter;
10 | }
11 |
12 | /// The radius of the underlying blur, in pixels. This is 7.0 by default.
13 | @property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
14 |
15 | /// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
16 | @property(readwrite, nonatomic) CGFloat topFocusLevel;
17 |
18 | /// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
19 | @property(readwrite, nonatomic) CGFloat bottomFocusLevel;
20 |
21 | /// The rate at which the image gets blurry away from the in-focus region, default 0.2
22 | @property(readwrite, nonatomic) CGFloat focusFallOffRate;
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageToneCurveFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageToneCurveFilter : GPUImageFilter
4 |
5 | @property(readwrite, nonatomic, copy) NSArray *redControlPoints;
6 | @property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
7 | @property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
8 | @property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
9 |
10 | // Initialization and teardown
11 | - (id)initWithACVData:(NSData*)data;
12 |
13 | - (id)initWithACV:(NSString*)curveFilename;
14 | - (id)initWithACVURL:(NSURL*)curveFileURL;
15 |
16 | // This lets you set all three red, green, and blue tone curves at once.
17 | // NOTE: Deprecated this function because this effect can be accomplished
18 | // using the rgbComposite channel rather then setting all 3 R, G, and B channels.
19 | - (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
20 |
21 | - (void)setPointsWithACV:(NSString*)curveFilename;
22 | - (void)setPointsWithACVURL:(NSURL*)curveFileURL;
23 |
24 | // Curve calculation
25 | - (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
26 | - (NSMutableArray *)splineCurve:(NSArray *)points;
27 | - (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
28 | - (void)updateToneCurveTexture;
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageToonFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | /** This uses Sobel edge detection to place a black border around objects,
4 | and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
5 | */
6 | @interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
7 | {
8 | GLint thresholdUniform, quantizationLevelsUniform;
9 | }
10 |
11 | /** The threshold at which to apply the edges, default of 0.2
12 | */
13 | @property(readwrite, nonatomic) CGFloat threshold;
14 |
15 | /** The levels of quantization for the posterization of colors within the scene, with a default of 10.0
16 | */
17 | @property(readwrite, nonatomic) CGFloat quantizationLevels;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTransformFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTransformFilter : GPUImageFilter
4 | {
5 | GLint transformMatrixUniform, orthographicMatrixUniform;
6 | GPUMatrix4x4 orthographicMatrix;
7 | }
8 |
9 | // You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
10 | @property(readwrite, nonatomic) CGAffineTransform affineTransform;
11 | @property(readwrite, nonatomic) CATransform3D transform3D;
12 |
13 | // This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
14 | @property(readwrite, nonatomic) BOOL ignoreAspectRatio;
15 |
16 | // sets the anchor point to top left corner
17 | @property(readwrite, nonatomic) BOOL anchorTopLeft;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTwoInputCrossTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
4 | {
5 | GLint texelWidthUniform, texelHeightUniform;
6 |
7 | CGFloat texelWidth, texelHeight;
8 | BOOL hasOverriddenImageSizeFactor;
9 | }
10 |
11 | // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
12 | @property(readwrite, nonatomic) CGFloat texelWidth;
13 | @property(readwrite, nonatomic) CGFloat texelHeight;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTwoInputFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
4 |
5 | @interface GPUImageTwoInputFilter : GPUImageFilter
6 | {
7 | GPUImageFramebuffer *secondInputFramebuffer;
8 |
9 | GLint filterSecondTextureCoordinateAttribute;
10 | GLint filterInputTextureUniform2;
11 | GPUImageRotationMode inputRotation2;
12 | CMTime firstFrameTime, secondFrameTime;
13 |
14 | BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
15 | BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
16 | }
17 |
18 | - (void)disableFirstFrameCheck;
19 | - (void)disableSecondFrameCheck;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTwoPassFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageTwoPassFilter : GPUImageFilter
4 | {
5 | GPUImageFramebuffer *secondOutputFramebuffer;
6 |
7 | GLProgram *secondFilterProgram;
8 | GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
9 | GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
10 |
11 | NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
12 | }
13 |
14 | // Initialization and teardown
15 | - (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
16 | - (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
17 | - (void)initializeSecondaryAttributes;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageTwoPassTextureSamplingFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoPassFilter.h"
2 |
3 | @interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
4 | {
5 | GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
6 | GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
7 | CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
8 | }
9 |
10 | // This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
11 | @property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageUIElement.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageOutput.h"
2 |
3 | @interface GPUImageUIElement : GPUImageOutput
4 |
5 | // Initialization and teardown
6 | - (id)initWithView:(UIView *)inputView;
7 | - (id)initWithLayer:(CALayer *)inputLayer;
8 |
9 | // Layer management
10 | - (CGSize)layerSizeInPixels;
11 | - (void)update;
12 | - (void)updateUsingCurrentTime;
13 | - (void)updateWithTimestamp:(CMTime)frameTime;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageUnsharpMaskFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageGaussianBlurFilter;
4 |
5 | @interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
6 | {
7 | GPUImageGaussianBlurFilter *blurFilter;
8 | GPUImageFilter *unsharpMaskFilter;
9 | }
10 | // The blur radius of the underlying Gaussian blur. The default is 4.0.
11 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
12 |
13 | // The strength of the sharpening, from 0.0 on up, with a default of 1.0
14 | @property(readwrite, nonatomic) CGFloat intensity;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageVibranceFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageVibranceFilter.h
3 | //
4 | //
5 | // Created by github.com/r3mus on 8/14/15.
6 | //
7 | //
8 |
9 | #import "GPUImageFilter.h"
10 |
11 | @interface GPUImageVibranceFilter : GPUImageFilter
12 | {
13 | GLint vibranceUniform;
14 | }
15 |
16 | // Modifies the saturation of desaturated colors, leaving saturated colors unmodified.
17 | // Value -1 to 1 (-1 is minimum vibrance, 0 is no change, and 1 is maximum vibrance)
18 | @property (readwrite, nonatomic) GLfloat vibrance;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageVignetteFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | /** Performs a vignetting effect, fading out the image at the edges
4 | */
5 | @interface GPUImageVignetteFilter : GPUImageFilter
6 | {
7 | GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;
8 | }
9 |
10 | // the center for the vignette in tex coords (defaults to 0.5, 0.5)
11 | @property (nonatomic, readwrite) CGPoint vignetteCenter;
12 |
13 | // The color to use for the Vignette (defaults to black)
14 | @property (nonatomic, readwrite) GPUVector3 vignetteColor;
15 |
16 | // The normalized distance from the center where the vignette effect starts. Default of 0.5.
17 | @property (nonatomic, readwrite) CGFloat vignetteStart;
18 |
19 | // The normalized distance from the center where the vignette effect ends. Default of 0.75.
20 | @property (nonatomic, readwrite) CGFloat vignetteEnd;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageVoronoiConsumerFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageTwoInputFilter.h"
2 |
3 | @interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
4 | {
5 | GLint sizeUniform;
6 | }
7 |
8 | @property (nonatomic, readwrite) CGSize sizeInPixels;
9 |
10 | @end
11 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageWeakPixelInclusionFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImage3x3TextureSamplingFilter.h"
2 |
3 | @interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageWhiteBalanceFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 | /**
3 | * Created by Alaric Cole
4 | * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it.
5 |
6 | */
7 | @interface GPUImageWhiteBalanceFilter : GPUImageFilter
8 | {
9 | GLint temperatureUniform, tintUniform;
10 | }
11 | //choose color temperature, in degrees Kelvin
12 | @property(readwrite, nonatomic) CGFloat temperature;
13 |
14 | //adjust tint to compensate
15 | @property(readwrite, nonatomic) CGFloat tint;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageXYDerivativeFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageSobelEdgeDetectionFilter.h"
2 |
3 | @interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
4 |
5 | @end
6 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageZoomBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface GPUImageZoomBlurFilter : GPUImageFilter
4 |
5 | /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
6 | */
7 | @property (readwrite, nonatomic) CGFloat blurSize;
8 |
9 | /** The normalized center of the blur. (0.5, 0.5) by default
10 | */
11 | @property (readwrite, nonatomic) CGPoint blurCenter;
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/GPUImageiOSBlurFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilterGroup.h"
2 |
3 | @class GPUImageSaturationFilter;
4 | @class GPUImageGaussianBlurFilter;
5 | @class GPUImageLuminanceRangeFilter;
6 |
7 | @interface GPUImageiOSBlurFilter : GPUImageFilterGroup
8 | {
9 | GPUImageSaturationFilter *saturationFilter;
10 | GPUImageGaussianBlurFilter *blurFilter;
11 | GPUImageLuminanceRangeFilter *luminanceRangeFilter;
12 | }
13 |
14 | /** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
15 | */
16 | @property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
17 |
18 | /** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
19 | */
20 | @property (readwrite, nonatomic) CGFloat saturation;
21 |
22 | /** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
23 | */
24 | @property (readwrite, nonatomic) CGFloat downsampling;
25 |
26 |
27 | /** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
28 | */
29 | @property (readwrite, nonatomic) CGFloat rangeReductionFactor;
30 |
31 | @end
32 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/iOS/Framework/module.modulemap:
--------------------------------------------------------------------------------
1 | framework module GPUImage {
2 | umbrella header "GPUImageFramework.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/iOS/GPUImage-Prefix.pch:
--------------------------------------------------------------------------------
1 | //
2 | // Prefix header for all source files of the 'GPUImage' target in the 'GPUImage' project
3 | //
4 |
5 | #ifdef __OBJC__
6 | #import
7 | #endif
8 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/iOS/GPUImagePicture+TextureSubimage.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImagePicture+TextureSubimage.h
3 | // GPUImage
4 | //
5 | // Created by Jack Wu on 2014-05-28.
6 | // Copyright (c) 2014 Brad Larson. All rights reserved.
7 | //
8 |
9 | #import "GPUImagePicture.h"
10 |
11 | @interface GPUImagePicture (TextureSubimage)
12 |
13 | - (void)replaceTextureWithSubimage:(UIImage*)subimage;
14 | - (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource;
15 |
16 | - (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect;
17 | - (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/iOS/GPUImagePicture.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageOutput.h"
3 |
4 |
5 | @interface GPUImagePicture : GPUImageOutput
6 | {
7 | CGSize pixelSizeOfImage;
8 | BOOL hasProcessedImage;
9 |
10 | dispatch_semaphore_t imageUpdateSemaphore;
11 | }
12 |
13 | // Initialization and teardown
14 | - (id)initWithURL:(NSURL *)url;
15 | - (id)initWithImage:(UIImage *)newImageSource;
16 | - (id)initWithCGImage:(CGImageRef)newImageSource;
17 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
18 | - (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
19 | - (id)initWithImage:(UIImage *)newImageSource removePremultiplication:(BOOL)removePremultiplication;
20 | - (id)initWithCGImage:(CGImageRef)newImageSource removePremultiplication:(BOOL)removePremultiplication;
21 | - (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
22 | - (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
23 |
24 | // Image rendering
25 | - (void)processImage;
26 | - (CGSize)outputImageSize;
27 |
28 | /**
29 | * Process image with all targets and filters asynchronously
30 | * The completion handler is called after processing finished in the
31 | * GPU's dispatch queue - and only if this method did not return NO.
32 | *
33 | * @returns NO if resource is blocked and processing is discarded, YES otherwise
34 | */
35 | - (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
36 | - (void)processImageUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;
37 |
38 | @end
39 |
--------------------------------------------------------------------------------
/UPLiveService/GPUImage/iOS/GPUImageView.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import "GPUImageContext.h"
3 |
4 | typedef NS_ENUM(NSUInteger, GPUImageFillModeType) {
5 | kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio
6 | kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color
7 | kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view
8 | };
9 |
10 |
11 |
12 | /**
13 | UIView subclass to use as an endpoint for displaying GPUImage outputs
14 | */
15 | @interface GPUImageView : UIView
16 | {
17 | GPUImageRotationMode inputRotation;
18 | }
19 |
20 | /** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio
21 | */
22 | @property(readwrite, nonatomic) GPUImageFillModeType fillMode;
23 |
24 | /** This calculates the current display size, in pixels, taking into account Retina scaling factors
25 | */
26 | @property(readonly, nonatomic) CGSize sizeInPixels;
27 |
28 | @property(nonatomic) BOOL enabled;
29 |
30 | /** Handling fill mode
31 |
32 | @param redComponent Red component for background color
33 | @param greenComponent Green component for background color
34 | @param blueComponent Blue component for background color
35 | @param alphaComponent Alpha component for background color
36 | */
37 | - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
38 |
39 | - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
40 |
41 | @end
42 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/Class/AudioMonitorPlayer.h:
--------------------------------------------------------------------------------
1 | //
2 | // AudioMonitorPlayer.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 12/06/2017.
6 | // Copyright © 2017 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | @interface AudioMonitorPlayer : NSObject
13 | @property (nonatomic, assign) BOOL mute;
14 |
15 | - (void)start;
16 | - (void)renderAudioBuffer:(AudioBuffer)audioBuffer info:(AudioStreamBasicDescription)asbd;
17 | - (void)stop;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/Class/UPAudioCapture.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPAudioCapture.h
3 | // Test_audioUnitRecorderAndPlayer
4 | //
5 | // Created by DING FENG on 7/20/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | typedef NS_ENUM(NSInteger, UPAudioUnitCategory) {
13 | UPAudioUnitCategory_recorder,
14 | UPAudioUnitCategory_player,
15 | UPAudioUnitCategory_recorderAndplayer
16 | };
17 |
18 | @protocol UPAudioCaptureProtocol
19 | - (void)didReceiveBuffer:(AudioBuffer)audioBuffer info:(AudioStreamBasicDescription)asbd;
20 | @end
21 |
22 | @interface UPAudioCapture : NSObject
23 |
24 | @property (nonatomic, weak) id delegate;
25 | @property (nonatomic) int increaserRate;// 0静音 - 100原声 - 200两倍音量增益
26 | @property (nonatomic) BOOL deNoise;
27 | @property (nonatomic) int bgmPlayerType;
28 |
29 |
30 |
31 |
32 | - (id)initWith:(UPAudioUnitCategory)category;
33 | - (id)initWith:(UPAudioUnitCategory)category samplerate:(int)samplerate;
34 | - (void)start;
35 | - (void)stop;
36 |
37 |
38 |
39 | @property (nonatomic, strong) NSString *backgroudMusicUrl;
40 | @property (nonatomic) BOOL backgroudMusicOn;
41 | @property (nonatomic, assign) Float32 backgroudMusicVolume;// 默认值为 1 即原声音量
42 |
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/Class/UPAudioGraph.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPAudioGraph.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 9/10/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | @class UPAudioGraph;
13 |
14 | @protocol UPAudioGraphProtocol
15 | - (void)audioGraph:(UPAudioGraph *)audioGraph
16 | didOutputBuffer:(AudioBuffer)audioBuffer
17 | info:(AudioStreamBasicDescription)asbd;
18 | @end
19 |
20 | @interface UPAudioGraph : NSObject
21 | @property (nonatomic, weak) id delegate;
22 |
23 |
24 | - (void)setMixerInputCallbackStruct:(AURenderCallbackStruct)callbackStruct;
25 | - (void)start;
26 | - (void)stop;
27 | - (void)setMixerInputPcmInfo:(AudioStreamBasicDescription)asbd forBusIndex:(int)bus;
28 | - (void)needRenderFramesNum:(UInt32)framesNum
29 | timeStamp:(const AudioTimeStamp *)inTimeStamp
30 | flag:(AudioUnitRenderActionFlags *)ioActionFlags;
31 |
32 | // volume = 1.0 是原声音量
33 | @property (nonatomic, assign) Float32 volumeOfInputBus0;
34 | @property (nonatomic, assign) Float32 volumeOfInputBus1;
35 | @property (nonatomic, assign) Float32 volumeOfOutput;
36 |
37 | @end
38 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/Class/focus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/Class/focus.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/1977blowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/1977blowout.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/1977map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/1977map.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/amaroMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/amaroMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/blackboard1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/blackboard1024.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanBlowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanBlowout.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanContrast.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanContrast.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanLuma.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanLuma.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanProcess.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanProcess.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanScreen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/brannanScreen.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlyBirdCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlyBirdCurves.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdBlowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdBlowout.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdOverlayMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/earlybirdOverlayMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/edgeBurn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/edgeBurn.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeGradientMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeGradientMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeMetal.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeSoftLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hefeSoftLight.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hudsonBackground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hudsonBackground.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hudsonMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/hudsonMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/inkwellMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/inkwellMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/kelvinMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/kelvinMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lomoMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lomoMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_amatorka.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_amatorka.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_miss_etikate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_miss_etikate.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_soft_elegance_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_soft_elegance_1.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_soft_elegance_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/lookup_soft_elegance_2.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/nashvilleMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/nashvilleMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/overlayMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/overlayMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/riseMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/riseMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sierraMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sierraMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sierraVignette.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sierraVignette.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/softLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/softLight.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroCurves.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroEdgeBurn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroEdgeBurn.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/sutroMetal.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterColorShift.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterColorShift.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterCurves.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterCurves.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterMetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterMetal.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterOverlayMapWarm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterOverlayMapWarm.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterSoftLight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/toasterSoftLight.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/valenciaGradientMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/valenciaGradientMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/valenciaMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/valenciaMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/vignetteMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/vignetteMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/waldenMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/waldenMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/xproMap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FILTERSIMAGE/xproMap.png
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FW1977Filter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FW1977Filter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter9 : GPUImageThreeInputFilter
12 |
13 | @end
14 |
15 | @interface FW1977Filter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | }
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWAmaroFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWAmaroFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter3 : GPUImageFourInputFilter
12 |
13 | @end
14 |
15 | @interface FWAmaroFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | GPUImagePicture *imageSource3;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWBrannanFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWBrannanFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWSixInputFilter.h"
11 |
12 | @interface FWFilter16 : FWSixInputFilter
13 |
14 | @end
15 |
16 | @interface FWBrannanFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | GPUImagePicture *imageSource3;
21 | GPUImagePicture *imageSource4;
22 | GPUImagePicture *imageSource5;
23 | }
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWEarlybirdFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWEarlybirdFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWSixInputFilter.h"
11 |
12 | @interface FWFilter13 : FWSixInputFilter
13 |
14 | @end
15 |
16 | @interface FWEarlybirdFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | GPUImagePicture *imageSource3;
21 | GPUImagePicture *imageSource4;
22 | GPUImagePicture *imageSource5;
23 | }
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWFiveInputFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWFiveInputFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFiveInputFilter : GPUImageFourInputFilter
12 | {
13 | GPUImageFramebuffer *fifthInputFramebuffer;
14 |
15 | GLint filterFifthTextureCoordinateAttribute;
16 | GLint filterInputTextureUniform5;
17 | GPUImageRotationMode inputRotation5;
18 | GLuint filterSourceTexture5;
19 | CMTime fifthFrameTime;
20 |
21 | BOOL hasSetFourthTexture, hasReceivedFifthFrame, fifthFrameWasVideo;
22 | BOOL fifthFrameCheckDisabled;
23 | }
24 |
25 | - (void)disableFifthFrameCheck;
26 |
27 | @end
28 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWHefeFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWHefeFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWSixInputFilter.h"
11 |
12 | @interface FWFilter17 : FWSixInputFilter
13 |
14 | @end
15 |
16 | @interface FWHefeFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | GPUImagePicture *imageSource3;
21 | GPUImagePicture *imageSource4;
22 | GPUImagePicture *imageSource5;
23 | }
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWHudsonFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWHudsonFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter5 : GPUImageFourInputFilter
12 |
13 | @end
14 |
15 | @interface FWHudsonFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | GPUImagePicture *imageSource3;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWInkwellFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWInkwellFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter10 : GPUImageTwoInputFilter
12 |
13 | @end
14 |
15 | @interface FWInkwellFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource;
18 | }
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWInkwellFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // FWInkwellFilter.m
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "FWInkwellFilter.h"
10 |
11 | NSString *const kFWInkWellShaderString = SHADER_STRING
12 | (
13 | precision lowp float;
14 |
15 | varying highp vec2 textureCoordinate;
16 |
17 | uniform sampler2D inputImageTexture;
18 | uniform sampler2D inputImageTexture2;
19 |
20 | void main()
21 | {
22 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
23 | texel = vec3(dot(vec3(0.3, 0.6, 0.1), texel));
24 | texel = vec3(texture2D(inputImageTexture2, vec2(texel.r, .16666)).r);
25 | gl_FragColor = vec4(texel, 1.0);
26 | }
27 | );
28 |
29 | @implementation FWFilter10
30 |
31 | - (id)init;
32 | {
33 | if (!(self = [super initWithFragmentShaderFromString:kFWInkWellShaderString]))
34 | {
35 | return nil;
36 | }
37 |
38 | return self;
39 | }
40 |
41 | @end
42 |
43 | @implementation FWInkwellFilter
44 |
45 | - (id)init
46 | {
47 | if (!(self = [super init]))
48 | {
49 | return nil;
50 | }
51 |
52 | UIImage *image = [UIImage imageNamed:@"inkwellMap"];
53 |
54 | imageSource = [[GPUImagePicture alloc] initWithImage:image];
55 | FWFilter10 *filter = [[FWFilter10 alloc] init];
56 |
57 | [self addFilter:filter];
58 | [imageSource addTarget:filter atTextureLocation:1];
59 | [imageSource processImage];
60 |
61 | self.initialFilters = [NSArray arrayWithObjects:filter, nil];
62 | self.terminalFilter = filter;
63 |
64 | return self;
65 | }
66 |
67 | @end
68 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWLomofiFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWLomofiFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter6 : GPUImageThreeInputFilter
12 |
13 | @end
14 |
15 | @interface FWLomofiFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | }
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWLordKelvinFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWLordKelvinFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/8.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "GPUImageFilter.h"
11 | #import "GPUImageFilterGroup.h"
12 |
13 | @interface FWFilter2 : GPUImageTwoInputFilter
14 |
15 | @end
16 |
17 | @interface FWLordKelvinFilter : GPUImageFilterGroup
18 | {
19 | GPUImagePicture *imageSource;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWNashvilleFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWNashvilleFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/8.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter1 : GPUImageTwoInputFilter
12 |
13 |
14 |
15 | @end
16 |
17 | @interface FWNashvilleFilter : GPUImageFilterGroup
18 | {
19 | GPUImagePicture *imageSource ;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWRiseFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWRiseFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter4 : GPUImageFourInputFilter
12 |
13 | @end
14 |
15 | @interface FWRiseFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | GPUImagePicture *imageSource3;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWSierraFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWSierraFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter11 : GPUImageFourInputFilter
12 |
13 | @end
14 |
15 | @interface FWSierraFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | GPUImagePicture *imageSource3;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWSixInputFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWSixInputFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWFiveInputFilter.h"
11 |
12 | @interface FWSixInputFilter : FWFiveInputFilter
13 | {
14 | GPUImageFramebuffer *sixthInputFramebuffer;
15 |
16 | GLint filterSixthTextureCoordinateAttribute;
17 | GLint filterInputTextureUniform6;
18 | GPUImageRotationMode inputRotation6;
19 | GLuint filterSourceTexture6;
20 | CMTime sixthFrameTime;
21 |
22 | BOOL hasSetFifthTexture, hasReceivedSixthFrame, sixthFrameWasVideo;
23 | BOOL sixthFrameCheckDisabled;
24 | }
25 |
26 | - (void)disableSixthFrameCheck;
27 | @end
28 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWSutroFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWSutroFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWSixInputFilter.h"
11 |
12 | @interface FWFilter14 : FWSixInputFilter
13 |
14 | @end
15 |
16 | @interface FWSutroFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | GPUImagePicture *imageSource3;
21 | GPUImagePicture *imageSource4;
22 | GPUImagePicture *imageSource5;
23 | }
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWToasterFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWToasterFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "FWSixInputFilter.h"
11 |
12 | @interface FWFilter15 : FWSixInputFilter
13 |
14 | @end
15 |
16 | @interface FWToasterFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | GPUImagePicture *imageSource3;
21 | GPUImagePicture *imageSource4;
22 | GPUImagePicture *imageSource5;
23 | }
24 |
25 | @end
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWValenciaFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWValenciaFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 | #import "GPUImageThreeInputFilter.h"
11 |
12 | @interface FWFilter8 : GPUImageThreeInputFilter
13 |
14 | @end
15 |
16 | @interface FWValenciaFilter : GPUImageFilterGroup
17 | {
18 | GPUImagePicture *imageSource1;
19 | GPUImagePicture *imageSource2;
20 | }
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWWaldenFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWWaldenFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter7 : GPUImageThreeInputFilter
12 |
13 | @end
14 |
15 | @interface FWWaldenFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | }
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/FWXproIIFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // FWXproIIFilter.h
3 | // FWMeituApp
4 | //
5 | // Created by hzkmn on 16/1/11.
6 | // Copyright © 2016年 ForrestWoo co,.ltd. All rights reserved.
7 | //
8 |
9 | #import "GPUImage.h"
10 |
11 | @interface FWFilter12 : GPUImageThreeInputFilter
12 |
13 | @end
14 |
15 | @interface FWXproIIFilter : GPUImageFilterGroup
16 | {
17 | GPUImagePicture *imageSource1;
18 | GPUImagePicture *imageSource2;
19 | }
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/UPCustonFilters.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPCustonFilters.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by 林港 on 16/8/17.
6 | // Copyright © 2016年 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 |
12 | #import "FWNashvilleFilter.h"
13 | #import "FWLordKelvinFilter.h"
14 | #import "GPUImageVibranceFilter.h"
15 | #import "FWAmaroFilter.h"
16 | #import "FWRiseFilter.h"
17 | #import "FWHudsonFilter.h"
18 | #import "FW1977Filter.h"
19 | #import "FWValenciaFilter.h"
20 | #import "FWXproIIFilter.h"
21 | #import "FWWaldenFilter.h"
22 | #import "FWLomofiFilter.h"
23 | #import "FWInkwellFilter.h"
24 | #import "FWSierraFilter.h"
25 | #import "FWEarlybirdFilter.h"
26 | #import "FWSutroFilter.h"
27 | #import "FWToasterFilter.h"
28 | #import "FWBrannanFilter.h"
29 | #import "FWHefeFilter.h"
30 |
31 | @interface UPCustonFilters : NSObject
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/Custom Filters/UPCustonFilters.m:
--------------------------------------------------------------------------------
1 | //
2 | // UPCustonFilters.m
3 | // UPLiveSDKDemo
4 | //
5 | // Created by 林港 on 16/8/17.
6 | // Copyright © 2016年 upyun.com. All rights reserved.
7 | //
8 |
9 | #import "UPCustonFilters.h"
10 |
11 |
12 |
13 | @implementation UPCustonFilters
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/VideoFilter/GPUImageBeautifyFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // GPUImageBeautifyFilter.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by 林港 on 16/8/17.
6 | // Copyright © 2016年 upyun.com. All rights reserved.
7 | //
8 |
9 | //
10 | // GPUImageBeautifyFilter.h
11 | // BeautifyFaceDemo
12 | //
13 | // Created by guikz on 16/4/28.
14 | // Copyright © 2016年 guikz. All rights reserved.
15 | //
16 |
17 | #import "GPUImage.h"
18 |
19 | @class GPUImageCombinationFilter;
20 | @class GPUImageSobelEdgeDetectionFilter;
21 |
22 | @interface GPUImageBeautifyFilter : GPUImageFilterGroup {
23 | // 修改参考 http://www.jianshu.com/p/dde412cab8db
24 |
25 | }
26 |
27 |
28 |
29 |
30 |
31 | /// 美颜效果。值越大效果越强。可适当调整
32 | @property (nonatomic, assign)CGFloat level;//默认值 0.6
33 |
34 | /// 磨皮, 双边模糊,平滑处理。值越小效果越强。建议保持默认值。
35 | @property (nonatomic, assign)CGFloat bilateralLevel;//默认值 4.0
36 |
37 | /// 饱和度。值越小画面越灰白,值越大色彩越强烈。可适当调整。
38 | @property (nonatomic, assign)CGFloat saturationLevel;//默认值 1.1
39 |
40 | /// 亮度。值越小画面越暗,值越大越明亮。可适当调整。
41 | @property (nonatomic, assign)CGFloat brightnessLevel;//默认值 1.1
42 |
43 |
44 | @property (nonatomic, strong) GPUImageHSBFilter *hsbFilter;
45 | @property (nonatomic, strong) GPUImageBilateralFilter *bilateralFilter;
46 | @property (nonatomic, strong) GPUImageSobelEdgeDetectionFilter *sobelEdgeFilter;
47 | @property (nonatomic, strong) GPUImageCombinationFilter *combinationFilter;
48 |
49 |
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/UPLiveService/UPAVCapturer/ImageProcessor/VideoFilter/LFGPUImageBeautyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface LFGPUImageBeautyFilter : GPUImageFilter {
4 | }
5 |
6 | @property (nonatomic, assign) CGFloat beautyLevel;
7 | @property (nonatomic, assign) CGFloat brightLevel;
8 | @property (nonatomic, assign) CGFloat toneLevel;
9 | @end
10 |
--------------------------------------------------------------------------------
/UPLiveService/UPLiveSDKDll.framework/Headers/AudioProcessor.h:
--------------------------------------------------------------------------------
1 | //
2 | // AudioProcessor.h
3 | // UPLiveSDKDemo
4 | //
5 | // Created by DING FENG on 8/22/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AudioProcessor : NSObject
12 |
13 |
14 | /*
15 | PCM sample 格式需要是 s16
16 | 噪音分贝设置负值。 默认是 -8
17 | PCM samplerate。 默认 44100
18 | */
19 |
20 |
21 |
22 | - (id)initWithNoiseSuppress:(int)level samplerate:(int)rate;
23 | - (NSData *)noiseSuppression:(NSData *)pcmInput;
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/UPLiveService/UPLiveSDKDll.framework/Headers/UPLiveSDKConfig.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPLiveSDKConfig.h
3 | // UPLiveSDKLib
4 | //
5 | // Created by DING FENG on 6/29/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | #define UPLiveSDK_Version @"4.1.7"
12 |
13 | typedef NS_ENUM(NSInteger, UPLiveSDKLogger_level) {
14 | UP_Level_debug,
15 | UP_Level_warn,
16 | UP_Level_error
17 | };
18 |
19 | @interface UPLiveSDKConfig : NSObject
20 | /// log 打印模式
21 | + (void)setLogLevel:(UPLiveSDKLogger_level)level;
22 | /// 播放质量统计功能,默认开
23 | + (void)setStatistcsOn:(BOOL)onOff;
24 |
25 | @end
26 |
27 |
28 |
--------------------------------------------------------------------------------
/UPLiveService/UPLiveSDKDll.framework/Headers/UPLiveSDKLogger.h:
--------------------------------------------------------------------------------
1 | //
2 | // UPAVPlayerLogger.h
3 | // UPAVPlayerDemo
4 | //
5 | // Created by DING FENG on 2/23/16.
6 | // Copyright © 2016 upyun.com. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "UPLiveSDKConfig.h"
11 |
12 |
13 |
14 |
15 | #define logActiveTags (UP_Tag_stream + UP_Tag_video + UP_Tag_audio + UP_Tag_default + UP_Tag_event + UP_Tag_verbose)
16 |
17 |
18 | typedef NS_ENUM(NSInteger, UPLiveSDKLogger_tag) {
19 | UP_Tag_stream = 1 << 0,
20 | UP_Tag_video = 1 << 1,
21 | UP_Tag_audio = 1 << 2,
22 | UP_Tag_default = 1 << 3,
23 | UP_Tag_event = 1 << 4,
24 | UP_Tag_verbose = 1 << 5
25 | };
26 |
27 | @interface UPLiveSDKLogger : NSObject
28 |
29 | @property (nonatomic)UPLiveSDKLogger_level UP_LOG_LEVEL_LIMIT;
30 |
31 | + (UPLiveSDKLogger *)sharedInstance;
32 |
33 | + (void)log:(NSString *)message level:(UPLiveSDKLogger_level)level tag:(UPLiveSDKLogger_tag)tag;
34 | + (void)setLogLevel:(UPLiveSDKLogger_level)level;
35 |
36 | @end
37 |
--------------------------------------------------------------------------------
/UPLiveService/UPLiveSDKDll.framework/Info.plist:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPLiveSDKDll.framework/Info.plist
--------------------------------------------------------------------------------
/UPLiveService/UPLiveSDKDll.framework/UPLiveSDKDll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/upyun/ios-live-sdk/d90e4427181b0404051c8574cb88276789a26a40/UPLiveService/UPLiveSDKDll.framework/UPLiveSDKDll
--------------------------------------------------------------------------------