├── .github
└── FUNDING.yml
├── .gitignore
├── LICENSE
├── Podfile
├── Podfile.lock
├── Pods
├── Manifest.lock
├── Pods.xcodeproj
│ └── project.pbxproj
├── Target Support Files
│ ├── Pods-iOS-Depth-Sampler
│ │ ├── Pods-iOS-Depth-Sampler-Info.plist
│ │ ├── Pods-iOS-Depth-Sampler-acknowledgements.markdown
│ │ ├── Pods-iOS-Depth-Sampler-acknowledgements.plist
│ │ ├── Pods-iOS-Depth-Sampler-dummy.m
│ │ ├── Pods-iOS-Depth-Sampler-frameworks-Debug-input-files.xcfilelist
│ │ ├── Pods-iOS-Depth-Sampler-frameworks-Debug-output-files.xcfilelist
│ │ ├── Pods-iOS-Depth-Sampler-frameworks-Release-input-files.xcfilelist
│ │ ├── Pods-iOS-Depth-Sampler-frameworks-Release-output-files.xcfilelist
│ │ ├── Pods-iOS-Depth-Sampler-frameworks.sh
│ │ ├── Pods-iOS-Depth-Sampler-umbrella.h
│ │ ├── Pods-iOS-Depth-Sampler.debug.xcconfig
│ │ ├── Pods-iOS-Depth-Sampler.modulemap
│ │ └── Pods-iOS-Depth-Sampler.release.xcconfig
│ └── Vivid
│ │ ├── Vivid-Info.plist
│ │ ├── Vivid-dummy.m
│ │ ├── Vivid-prefix.pch
│ │ ├── Vivid-umbrella.h
│ │ ├── Vivid.debug.xcconfig
│ │ ├── Vivid.modulemap
│ │ └── Vivid.release.xcconfig
└── Vivid
│ ├── LICENSE
│ ├── README.md
│ └── Sources
│ ├── YUCIBilateralFilter.cikernel
│ ├── YUCIBilateralFilter.h
│ ├── YUCIBilateralFilter.m
│ ├── YUCIBlobsGenerator.cikernel
│ ├── YUCIBlobsGenerator.h
│ ├── YUCIBlobsGenerator.m
│ ├── YUCICLAHE.cikernel
│ ├── YUCICLAHE.h
│ ├── YUCICLAHE.m
│ ├── YUCIColorLookup.cikernel
│ ├── YUCIColorLookup.h
│ ├── YUCIColorLookup.m
│ ├── YUCIColorLookupTableDefault.png
│ ├── YUCICrossZoomTransition.cikernel
│ ├── YUCICrossZoomTransition.h
│ ├── YUCICrossZoomTransition.m
│ ├── YUCIFXAA.cikernel
│ ├── YUCIFXAA.h
│ ├── YUCIFXAA.m
│ ├── YUCIFilmBurnTransition.cikernel
│ ├── YUCIFilmBurnTransition.h
│ ├── YUCIFilmBurnTransition.m
│ ├── YUCIFilterConstructor.h
│ ├── YUCIFilterConstructor.m
│ ├── YUCIFilterPreviewGenerator.h
│ ├── YUCIFilterPreviewGenerator.m
│ ├── YUCIFilterUtilities.h
│ ├── YUCIFilterUtilities.m
│ ├── YUCIFlashTransition.cikernel
│ ├── YUCIFlashTransition.h
│ ├── YUCIFlashTransition.m
│ ├── YUCIHSLToRGB.cikernel
│ ├── YUCIHistogramEqualization.h
│ ├── YUCIHistogramEqualization.m
│ ├── YUCIRGBToHSL.cikernel
│ ├── YUCIRGBToneCurve.cikernel
│ ├── YUCIRGBToneCurve.h
│ ├── YUCIRGBToneCurve.m
│ ├── YUCIReflectedTile.cikernel
│ ├── YUCIReflectedTile.h
│ ├── YUCIReflectedTile.m
│ ├── YUCIReflectedTileROICalculator.h
│ ├── YUCIReflectedTileROICalculator.m
│ ├── YUCISkyGenerator.cikernel
│ ├── YUCISkyGenerator.h
│ ├── YUCISkyGenerator.m
│ ├── YUCIStarfieldGenerator.cikernel
│ ├── YUCIStarfieldGenerator.h
│ ├── YUCIStarfieldGenerator.m
│ ├── YUCISurfaceBlur.cikernel
│ ├── YUCISurfaceBlur.h
│ ├── YUCISurfaceBlur.m
│ ├── YUCITriangularPixellate.cikernel
│ ├── YUCITriangularPixellate.h
│ ├── YUCITriangularPixellate.m
│ ├── YUCIUtilities.h
│ └── YUCIUtilities.m
├── README.md
├── README_resources
├── 3d.gif
├── arkit-depth.gif
├── blend.gif
├── depth_1.gif
├── depth_baby_histoeq.jpg
└── portraitmatte.gif
├── iOS-Depth-Sampler.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── iOS-Depth-Sampler.xcworkspace
├── contents.xcworkspacedata
└── xcshareddata
│ └── IDEWorkspaceChecks.plist
└── iOS-Depth-Sampler
├── AppDelegate.swift
├── Assets.xcassets
├── AppIcon.appiconset
│ └── Contents.json
├── BigBang.imageset
│ ├── BigBang-184895179-web-2.jpg
│ └── Contents.json
├── Contents.json
├── burn
│ ├── Contents.json
│ ├── burn000001.imageset
│ │ ├── Contents.json
│ │ └── burn000001.jpg
│ ├── burn000002.imageset
│ │ ├── Contents.json
│ │ └── burn000002.jpg
│ ├── burn000003.imageset
│ │ ├── Contents.json
│ │ └── burn000003.jpg
│ ├── burn000004.imageset
│ │ ├── Contents.json
│ │ └── burn000004.jpg
│ ├── burn000005.imageset
│ │ ├── Contents.json
│ │ └── burn000005.jpg
│ ├── burn000006.imageset
│ │ ├── Contents.json
│ │ └── burn000006.jpg
│ ├── burn000007.imageset
│ │ ├── Contents.json
│ │ └── burn000007.jpg
│ ├── burn000008.imageset
│ │ ├── Contents.json
│ │ └── burn000008.jpg
│ ├── burn000009.imageset
│ │ ├── Contents.json
│ │ └── burn000009.jpg
│ ├── burn000010.imageset
│ │ ├── Contents.json
│ │ └── burn000010.jpg
│ ├── burn000011.imageset
│ │ ├── Contents.json
│ │ └── burn000011.jpg
│ ├── burn000012.imageset
│ │ ├── Contents.json
│ │ └── burn000012.jpg
│ ├── burn000013.imageset
│ │ ├── Contents.json
│ │ └── burn000013.jpg
│ ├── burn000014.imageset
│ │ ├── Contents.json
│ │ └── burn000014.jpg
│ ├── burn000015.imageset
│ │ ├── Contents.json
│ │ └── burn000015.jpg
│ ├── burn000016.imageset
│ │ ├── Contents.json
│ │ └── burn000016.jpg
│ ├── burn000017.imageset
│ │ ├── Contents.json
│ │ └── burn000017.jpg
│ ├── burn000018.imageset
│ │ ├── Contents.json
│ │ └── burn000018.jpg
│ ├── burn000019.imageset
│ │ ├── Contents.json
│ │ └── burn000019.jpg
│ ├── burn000020.imageset
│ │ ├── Contents.json
│ │ └── burn000020.jpg
│ ├── burn000021.imageset
│ │ ├── Contents.json
│ │ └── burn000021.jpg
│ ├── burn000022.imageset
│ │ ├── Contents.json
│ │ └── burn000022.jpg
│ ├── burn000023.imageset
│ │ ├── Contents.json
│ │ └── burn000023.jpg
│ └── burn000024.imageset
│ │ ├── Contents.json
│ │ └── burn000024.jpg
├── earth.imageset
│ ├── 10207-1.jpg
│ └── Contents.json
├── something.imageset
│ ├── 7341d720.jpg
│ └── Contents.json
└── warp
│ ├── 000001.imageset
│ ├── 000001.jpg
│ └── Contents.json
│ ├── 000002.imageset
│ ├── 000002.jpg
│ └── Contents.json
│ ├── 000003.imageset
│ ├── 000003.jpg
│ └── Contents.json
│ ├── 000004.imageset
│ ├── 000004.jpg
│ └── Contents.json
│ ├── 000005.imageset
│ ├── 000005.jpg
│ └── Contents.json
│ ├── 000006.imageset
│ ├── 000006.jpg
│ └── Contents.json
│ ├── 000007.imageset
│ ├── 000007.jpg
│ └── Contents.json
│ ├── 000008.imageset
│ ├── 000008.jpg
│ └── Contents.json
│ ├── 000009.imageset
│ ├── 000009.jpg
│ └── Contents.json
│ ├── 000010.imageset
│ ├── 000010.jpg
│ └── Contents.json
│ ├── 000011.imageset
│ ├── 000011.jpg
│ └── Contents.json
│ ├── 000012.imageset
│ ├── 000012.jpg
│ └── Contents.json
│ ├── 000013.imageset
│ ├── 000013.jpg
│ └── Contents.json
│ ├── 000014.imageset
│ ├── 000014.jpg
│ └── Contents.json
│ ├── 000015.imageset
│ ├── 000015.jpg
│ └── Contents.json
│ ├── 000016.imageset
│ ├── 000016.jpg
│ └── Contents.json
│ ├── 000017.imageset
│ ├── 000017.jpg
│ └── Contents.json
│ ├── 000018.imageset
│ ├── 000018.jpg
│ └── Contents.json
│ ├── 000019.imageset
│ ├── 000019.jpg
│ └── Contents.json
│ ├── 000020.imageset
│ ├── 000020.jpg
│ └── Contents.json
│ ├── 000021.imageset
│ ├── 000021.jpg
│ └── Contents.json
│ ├── 000022.imageset
│ ├── 000022.jpg
│ └── Contents.json
│ ├── 000023.imageset
│ ├── 000023.jpg
│ └── Contents.json
│ ├── 000024.imageset
│ ├── 000024.jpg
│ └── Contents.json
│ └── Contents.json
├── Base.lproj
└── LaunchScreen.storyboard
├── DepthImagePickableViewController.swift
├── Info.plist
├── Main.storyboard
├── Renderer
├── MetalRenderer.swift
└── PassThrough.metal
├── Resources
├── image-with-depth.jpg
└── image-with-matte.jpg
├── RootViewCell.swift
├── RootViewController.swift
├── SampleDataSource.swift
├── Samples
├── ARKit
│ ├── ARKitDepth.storyboard
│ ├── ARKitDepthViewController.swift
│ └── TrackingState+Description.swift
├── Depth-from-Camera-Roll
│ ├── DepthFromCameraRoll.storyboard
│ └── DepthFromCameraRollViewController.swift
├── PointCloud
│ ├── PointCloud.storyboard
│ ├── PointCloud.swift
│ └── PointCloudViewController.swift
├── Portrait-Matte
│ ├── PortraitMatte.storyboard
│ └── PortraitMatteViewController.swift
├── Realtime-Depth
│ ├── Base.lproj
│ │ └── RealtimeDepth.storyboard
│ └── RealtimeDepthViewController.swift
└── Realtime-Mask
│ ├── Base.lproj
│ └── RealtimeDepthMask.storyboard
│ └── RealtimeDepthMaskViewController.swift
├── Utils
├── AVDepthData+Utils.swift
├── CGImageSource+Depth.swift
├── CIImage+Utils.swift
├── CVPixelBuffer+CIImage.swift
├── PhotosUtils.swift
├── UIAlertController+Utils.swift
└── UIImage+Utils.swift
├── VideoCapture
├── AVCaptureDevice+Extension.swift
├── VideoCameraType.swift
└── VideoCapture.swift
├── iOS-Depth-Sampler-Bridging-Header.h
└── iOS-Depth-Sampler.entitlements
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [shu223]
4 | custom: ['https://paypal.me/shu223', 'https://note.com/shu223/m/me1aa6761ab16']
5 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 |
20 | ## Other
21 | *.moved-aside
22 | *.xccheckout
23 | *.xcscmblueprint
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 | *.ipa
28 | *.dSYM.zip
29 | *.dSYM
30 |
31 | ## Playgrounds
32 | timeline.xctimeline
33 | playground.xcworkspace
34 |
35 | # Swift Package Manager
36 | #
37 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
38 | # Packages/
39 | # Package.pins
40 | # Package.resolved
41 | .build/
42 |
43 | # CocoaPods
44 | #
45 | # We recommend against adding the Pods directory to your .gitignore. However
46 | # you should judge for yourself, the pros and cons are mentioned at:
47 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
48 | #
49 | # Pods/
50 |
51 | # Carthage
52 | #
53 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
54 | # Carthage/Checkouts
55 |
56 | Carthage/Build
57 |
58 | # fastlane
59 | #
60 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
61 | # screenshots whenever they are needed.
62 | # For more information about the recommended setup visit:
63 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
64 |
65 | fastlane/report.xml
66 | fastlane/Preview.html
67 | fastlane/screenshots/**/*.png
68 | fastlane/test_output
69 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Shuichi Tsutsumi
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Podfile:
--------------------------------------------------------------------------------
1 | platform :ios, '11.0'
2 |
3 | target 'iOS-Depth-Sampler' do
4 | use_frameworks!
5 |
6 | pod 'Vivid'
7 |
8 | end
9 |
10 | post_install do |installer|
11 | installer.pods_project.targets.each do |target|
12 | target.build_configurations.each do |config|
13 | config.build_settings['GCC_WARN_INHIBIT_ALL_WARNINGS'] = 'YES'
14 | if target.name == 'Vivid'
15 | config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '11.0'
16 | end
17 | end
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - Vivid (0.9)
3 |
4 | DEPENDENCIES:
5 | - Vivid
6 |
7 | SPEC REPOS:
8 | trunk:
9 | - Vivid
10 |
11 | SPEC CHECKSUMS:
12 | Vivid: 0fde7409beac71224deb151dd78f98a5bb860497
13 |
14 | PODFILE CHECKSUM: 65cab38e29d6fc7cbd9f3e58214ad09faeee5145
15 |
16 | COCOAPODS: 1.11.2
17 |
--------------------------------------------------------------------------------
/Pods/Manifest.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - Vivid (0.9)
3 |
4 | DEPENDENCIES:
5 | - Vivid
6 |
7 | SPEC REPOS:
8 | trunk:
9 | - Vivid
10 |
11 | SPEC CHECKSUMS:
12 | Vivid: 0fde7409beac71224deb151dd78f98a5bb860497
13 |
14 | PODFILE CHECKSUM: 65cab38e29d6fc7cbd9f3e58214ad09faeee5145
15 |
16 | COCOAPODS: 1.11.2
17 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | ${PRODUCT_BUNDLE_IDENTIFIER}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | ${PRODUCT_NAME}
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 1.0.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | ${CURRENT_PROJECT_VERSION}
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-acknowledgements.markdown:
--------------------------------------------------------------------------------
1 | # Acknowledgements
2 | This application makes use of the following third party libraries:
3 |
4 | ## Vivid
5 |
6 | The MIT License (MIT)
7 |
8 | Copyright (c) 2016 Yu Ao
9 |
10 | Permission is hereby granted, free of charge, to any person obtaining a copy
11 | of this software and associated documentation files (the "Software"), to deal
12 | in the Software without restriction, including without limitation the rights
13 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | copies of the Software, and to permit persons to whom the Software is
15 | furnished to do so, subject to the following conditions:
16 |
17 | The above copyright notice and this permission notice shall be included in all
18 | copies or substantial portions of the Software.
19 |
20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26 | SOFTWARE.
27 |
28 | Generated by CocoaPods - https://cocoapods.org
29 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-acknowledgements.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | PreferenceSpecifiers
6 |
7 |
8 | FooterText
9 | This application makes use of the following third party libraries:
10 | Title
11 | Acknowledgements
12 | Type
13 | PSGroupSpecifier
14 |
15 |
16 | FooterText
17 | The MIT License (MIT)
18 |
19 | Copyright (c) 2016 Yu Ao
20 |
21 | Permission is hereby granted, free of charge, to any person obtaining a copy
22 | of this software and associated documentation files (the "Software"), to deal
23 | in the Software without restriction, including without limitation the rights
24 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
25 | copies of the Software, and to permit persons to whom the Software is
26 | furnished to do so, subject to the following conditions:
27 |
28 | The above copyright notice and this permission notice shall be included in all
29 | copies or substantial portions of the Software.
30 |
31 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
32 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
33 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
34 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
35 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
36 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
37 | SOFTWARE.
38 |
39 | License
40 | MIT
41 | Title
42 | Vivid
43 | Type
44 | PSGroupSpecifier
45 |
46 |
47 | FooterText
48 | Generated by CocoaPods - https://cocoapods.org
49 | Title
50 |
51 | Type
52 | PSGroupSpecifier
53 |
54 |
55 | StringsTable
56 | Acknowledgements
57 | Title
58 | Acknowledgements
59 |
60 |
61 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-dummy.m:
--------------------------------------------------------------------------------
1 | #import
2 | @interface PodsDummy_Pods_iOS_Depth_Sampler : NSObject
3 | @end
4 | @implementation PodsDummy_Pods_iOS_Depth_Sampler
5 | @end
6 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks-Debug-input-files.xcfilelist:
--------------------------------------------------------------------------------
1 | ${PODS_ROOT}/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks.sh
2 | ${BUILT_PRODUCTS_DIR}/Vivid/Vivid.framework
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks-Debug-output-files.xcfilelist:
--------------------------------------------------------------------------------
1 | ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Vivid.framework
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks-Release-input-files.xcfilelist:
--------------------------------------------------------------------------------
1 | ${PODS_ROOT}/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks.sh
2 | ${BUILT_PRODUCTS_DIR}/Vivid/Vivid.framework
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-frameworks-Release-output-files.xcfilelist:
--------------------------------------------------------------------------------
1 | ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Vivid.framework
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler-umbrella.h:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 |
14 | FOUNDATION_EXPORT double Pods_iOS_Depth_SamplerVersionNumber;
15 | FOUNDATION_EXPORT const unsigned char Pods_iOS_Depth_SamplerVersionString[];
16 |
17 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler.debug.xcconfig:
--------------------------------------------------------------------------------
1 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Vivid"
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Vivid/Vivid.framework/Headers"
5 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
6 | OTHER_LDFLAGS = $(inherited) -framework "Vivid"
7 | PODS_BUILD_DIR = ${BUILD_DIR}
8 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
9 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
10 | PODS_ROOT = ${SRCROOT}/Pods
11 | PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
12 | USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
13 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler.modulemap:
--------------------------------------------------------------------------------
1 | framework module Pods_iOS_Depth_Sampler {
2 | umbrella header "Pods-iOS-Depth-Sampler-umbrella.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Pods-iOS-Depth-Sampler/Pods-iOS-Depth-Sampler.release.xcconfig:
--------------------------------------------------------------------------------
1 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Vivid"
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Vivid/Vivid.framework/Headers"
5 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
6 | OTHER_LDFLAGS = $(inherited) -framework "Vivid"
7 | PODS_BUILD_DIR = ${BUILD_DIR}
8 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
9 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
10 | PODS_ROOT = ${SRCROOT}/Pods
11 | PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
12 | USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
13 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | ${PRODUCT_BUNDLE_IDENTIFIER}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | ${PRODUCT_NAME}
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 0.9.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | ${CURRENT_PROJECT_VERSION}
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid-dummy.m:
--------------------------------------------------------------------------------
1 | #import
2 | @interface PodsDummy_Vivid : NSObject
3 | @end
4 | @implementation PodsDummy_Vivid
5 | @end
6 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid-prefix.pch:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid-umbrella.h:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 | #import "YUCIBilateralFilter.h"
14 | #import "YUCIBlobsGenerator.h"
15 | #import "YUCICLAHE.h"
16 | #import "YUCIColorLookup.h"
17 | #import "YUCICrossZoomTransition.h"
18 | #import "YUCIFilmBurnTransition.h"
19 | #import "YUCIFilterConstructor.h"
20 | #import "YUCIFilterPreviewGenerator.h"
21 | #import "YUCIFilterUtilities.h"
22 | #import "YUCIFlashTransition.h"
23 | #import "YUCIFXAA.h"
24 | #import "YUCIHistogramEqualization.h"
25 | #import "YUCIReflectedTile.h"
26 | #import "YUCIReflectedTileROICalculator.h"
27 | #import "YUCIRGBToneCurve.h"
28 | #import "YUCISkyGenerator.h"
29 | #import "YUCIStarfieldGenerator.h"
30 | #import "YUCISurfaceBlur.h"
31 | #import "YUCITriangularPixellate.h"
32 | #import "YUCIUtilities.h"
33 |
34 | FOUNDATION_EXPORT double VividVersionNumber;
35 | FOUNDATION_EXPORT const unsigned char VividVersionString[];
36 |
37 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid.debug.xcconfig:
--------------------------------------------------------------------------------
1 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
2 | CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Vivid
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | PODS_BUILD_DIR = ${BUILD_DIR}
5 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
6 | PODS_ROOT = ${SRCROOT}
7 | PODS_TARGET_SRCROOT = ${PODS_ROOT}/Vivid
8 | PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
9 | PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
10 | SKIP_INSTALL = YES
11 | USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
12 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid.modulemap:
--------------------------------------------------------------------------------
1 | framework module Vivid {
2 | umbrella header "Vivid-umbrella.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/Pods/Target Support Files/Vivid/Vivid.release.xcconfig:
--------------------------------------------------------------------------------
1 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
2 | CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Vivid
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | PODS_BUILD_DIR = ${BUILD_DIR}
5 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
6 | PODS_ROOT = ${SRCROOT}
7 | PODS_TARGET_SRCROOT = ${PODS_ROOT}/Vivid
8 | PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
9 | PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
10 | SKIP_INSTALL = YES
11 | USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
12 |
--------------------------------------------------------------------------------
/Pods/Vivid/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 Yu Ao
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Pods/Vivid/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | 
4 | 
5 | 
6 |
7 | A set of filters and utilities for Apple's [Core Image](https://developer.apple.com/library/mac/documentation/GraphicsImaging/Conceptual/CoreImaging/ci_intro/ci_intro.html) framework.
8 |
9 | Available on both OS X and iOS.
10 |
11 | Involving...
12 |
13 | ##Core Image Filters
14 |
15 | ###Filters
16 |
17 | ####YUCIRGBToneCurve
18 |
19 | Adjusts tone response of the R, G, and B channels of an image.
20 |
21 | The filter takes in an array of control points that define the spline curve for each color component, or for all three in the composite.
22 |
23 | These are stored as `CIVector`s in an `NSArray`, with normalized X and Y coordinates from `0` to `1`.
24 |
25 | The defaults are `[(0,0), (0.5,0.5), (1,1)]`
26 |
27 | 
28 |
29 | ####YUCIColorLookup
30 |
31 | Uses a color lookup table (LUT) to remap the colors in an image. The default LUT can be found at `Sources/YUCIColorLookupTableDefault.png`
32 |
33 | *This filter may not work well in the default light-linear working color space. Use `kCIContextWorkingColorSpace` key to specify a working color space when creating the `CIContext` object.*
34 |
35 | 
36 |
37 | ####YUCISurfaceBlur
38 |
39 | A bilateral filter. Blurs an image while preserving edges. This filter is almost identical to Photoshop's "Surface Blur" filter.
40 |
41 | Useful for creating special effects and for removing noise or graininess. Slow on large `inputRadius`.
42 |
43 | 
44 |
45 | ####YUCITriangularPixellate
46 |
47 | Maps an image to colored triangles.
48 |
49 | 
50 |
51 | ####YUCIFXAA
52 |
53 | A basic implementation of FXAA (Fast Approximate Anti-Aliasing).
54 |
55 | 
56 |
57 | ####YUCIHistogramEqualization
58 |
59 | Perform a [Histogram Equalization](https://en.wikipedia.org/wiki/Histogram_equalization) on the input image. Internally uses `Accelerate.framework`.
60 |
61 | 
62 |
63 | ####YUCIReflectedTile
64 |
65 | Produces a tiled image from a source image by reflecting pixels over the edges.
66 |
67 | 
68 |
69 | ####YUCICLAHE
70 |
71 | Perform a [Contrast Limited Adaptive Histogram Equalization](https://en.wikipedia.org/wiki/Adaptive_histogram_equalization#Contrast_Limited_AHE) on the lightness channel of the input image.
72 |
73 | 
74 |
75 | ###Transitions
76 |
77 | ####YUCICrossZoomTransition
78 |
79 | A transition that pushes the `inputImage` toward the viewer and then snaps back with the `inputTargetImage`.
80 |
81 | 
82 |
83 | ####YUCIFlashTransition
84 |
85 | Transitions from one image to another by creating a flash effect.
86 |
87 | 
88 |
89 | ###Generators
90 |
91 | ####YUCIStarfieldGenerator
92 |
93 | Generate a starfield image. Animatable by changing the `inputTime` parameter. Based on [Star Nest](https://www.shadertoy.com/view/XlfGRj) by Pablo Román Andrioli
94 |
95 | 
96 |
97 | ####YUCIBlobsGenerator
98 |
99 | Generate a image with colorful blobs. Animatable by changing the `inputTime` parameter. Based on [Blobs](https://www.shadertoy.com/view/lsfGzr) by [@paulofalcao](https://twitter.com/paulofalcao)
100 |
101 | 
102 |
103 | ##Utilities
104 |
105 | ####YUCIFilterConstructor
106 |
107 | A singleton that conforms to `CIFilterConstructor` protocol.
108 |
109 | Can be used in `+[CIFilter registerFilterName:constructor:classAttributes:]` to register a `CIFilter`. This filter constructor simply assume that the `filterName` is the class name of the custom `CIFilter` and calls `[[FilterClass alloc] init]` to construct a filter.
110 |
111 | ####YUCIFilterPreviewGenerator
112 |
113 | Can be used to generate a preview image/gif for a filter. All the preview images/gifs on this page are generated by this utility. __For demonstration/testing purposes only, do not use it in your production code.__
114 |
115 | ##Next
116 |
117 | - [x] Add filter previews to readme.
118 | - [x] AA for triangular pixellate filter.
119 | - [x] CLAHE
120 | - [ ] Write a paper on the implementation of CLAHE.
121 | - [ ] Kuwahara filter
122 |
123 | ##Related Projects
124 |
125 | ####[YUCIHighPassSkinSmoothing](https://github.com/YuAo/YUCIHighPassSkinSmoothing)
126 |
127 | An implementation of High Pass Skin Smoothing.
128 |
129 | ####[YUCIImageView](https://github.com/YuAo/YUCIImageView)
130 |
131 | An image view for rendering CIImage with Metal/OpenGL/CoreGraphics.
132 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBilateralFilter.cikernel:
--------------------------------------------------------------------------------
1 | const int GAUSSIAN_SAMPLES = MACRO_GAUSSIAN_SAMPLES;
2 |
3 | kernel vec4 filterKernel(sampler inputImage, vec2 texelOffset, float distanceNormalizationFactor) {
4 |
5 | float GAUSSIAN_WEIGHTS[(GAUSSIAN_SAMPLES + 1)/2];
6 |
7 | MACRO_SETUP_GAUSSIAN_WEIGHTS
8 |
9 | vec2 blurCoordinates[GAUSSIAN_SAMPLES];
10 |
11 | for (int i = 0; i < GAUSSIAN_SAMPLES; i++) {
12 | int multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));
13 | vec2 blurStep = float(multiplier) * texelOffset;
14 | blurCoordinates[i] = destCoord() + blurStep;
15 | }
16 |
17 | int centralIndex = (GAUSSIAN_SAMPLES - 1)/2;
18 |
19 | vec4 centralColor;
20 | float gaussianWeightTotal;
21 | vec4 sum;
22 | vec4 sampleColor;
23 | float distanceFromCentralColor;
24 | float gaussianWeight;
25 |
26 | centralColor = sample(inputImage,samplerTransform(inputImage, blurCoordinates[centralIndex]));
27 | gaussianWeightTotal = GAUSSIAN_WEIGHTS[0];
28 | sum = centralColor * GAUSSIAN_WEIGHTS[0];
29 |
30 | for (int i = 0; i < centralIndex; i++) {
31 | sampleColor = sample(inputImage,samplerTransform(inputImage, blurCoordinates[i]));
32 | distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
33 | gaussianWeight = GAUSSIAN_WEIGHTS[centralIndex - i] * (1.0 - distanceFromCentralColor);
34 | gaussianWeightTotal += gaussianWeight;
35 | sum += sampleColor * gaussianWeight;
36 | }
37 |
38 | for (int i = centralIndex + 1; i < GAUSSIAN_SAMPLES; i++) {
39 | sampleColor = sample(inputImage,samplerTransform(inputImage, blurCoordinates[i]));
40 | distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
41 | gaussianWeight = GAUSSIAN_WEIGHTS[i-centralIndex] * (1.0 - distanceFromCentralColor);
42 | gaussianWeightTotal += gaussianWeight;
43 | sum += sampleColor * gaussianWeight;
44 | }
45 |
46 | vec4 result = sum / gaussianWeightTotal;
47 |
48 | return result;
49 | }
50 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBilateralFilter.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIBilateralFilter.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/2/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIBilateralFilter : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputRadius; //default 10
16 | @property (nonatomic, copy, null_resettable) NSNumber *inputDistanceNormalizationFactor; //default 6.0
17 | @property (nonatomic, copy, null_resettable) NSNumber *inputTexelSpacingMultiplier; //default 1.0
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBilateralFilter.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIBilateralFilter.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/2/16.
6 | //
7 | //
8 |
9 | #import "YUCIBilateralFilter.h"
10 | #import "YUCIFilterUtilities.h"
11 |
12 | @implementation YUCIBilateralFilter
13 |
14 | static NSDictionary *YUCIBilateralFilterKernels;
15 |
16 | + (NSInteger)sampleCountForRadius:(NSInteger)radius {
17 | CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0;
18 | radius = floor(sqrt(-2.0 * pow(radius, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(radius, 2.0))) ));
19 | if (radius % 2 == 0) {
20 | radius = radius - 1;
21 | }
22 | if (radius <= 0) {
23 | radius = 1;
24 | }
25 | return radius;
26 | }
27 |
28 | + (CIKernel *)filterKernelForRadius:(NSInteger)radius {
29 | NSInteger sampleCount = [self sampleCountForRadius:radius];
30 |
31 | CIKernel *kernel = YUCIBilateralFilterKernels[@(sampleCount)];
32 |
33 | if (kernel) {
34 | return kernel;
35 | } else {
36 | double sigma = radius;
37 |
38 | double sum = 0;
39 | NSMutableArray *wights = [NSMutableArray array];
40 | for (NSInteger i = 0; i < (sampleCount + 1)/2; ++i) {
41 | double wight = YUCIGaussianDistributionPDF(i, sigma);
42 | if (i == 0) {
43 | sum += wight;
44 | } else {
45 | sum += wight * 2;
46 | }
47 | [wights addObject:@(wight)];
48 | }
49 | double scale = 1.0/sum;
50 |
51 | NSString *setupString = @"";
52 | for (NSInteger i = 0; i < wights.count; ++i) {
53 | setupString = [setupString stringByAppendingFormat:@"GAUSSIAN_WEIGHTS[%@] = %@; \n",@(i),@([wights[i] doubleValue] * scale)];
54 | }
55 |
56 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIBilateralFilter class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
57 | kernelString = [kernelString stringByReplacingOccurrencesOfString:@"MACRO_GAUSSIAN_SAMPLES" withString:@(sampleCount).stringValue];
58 | kernelString = [kernelString stringByReplacingOccurrencesOfString:@"MACRO_SETUP_GAUSSIAN_WEIGHTS" withString:setupString];
59 | kernel = [CIKernel kernelWithString:kernelString];
60 |
61 | NSMutableDictionary *newKernels = [NSMutableDictionary dictionaryWithDictionary:YUCIBilateralFilterKernels];
62 | [newKernels setObject:kernel forKey:@(sampleCount)];
63 | YUCIBilateralFilterKernels = newKernels.copy;
64 |
65 | return kernel;
66 | }
67 | }
68 |
69 | - (NSNumber *)inputRadius {
70 | if (!_inputRadius) {
71 | _inputRadius = @(10);
72 | }
73 | return _inputRadius;
74 | }
75 |
76 | - (NSNumber *)inputTexelSpacingMultiplier {
77 | if (!_inputTexelSpacingMultiplier) {
78 | _inputTexelSpacingMultiplier = @(1.0);
79 | }
80 | return _inputTexelSpacingMultiplier;
81 | }
82 |
83 | - (NSNumber *)inputDistanceNormalizationFactor {
84 | if (!_inputDistanceNormalizationFactor) {
85 | _inputDistanceNormalizationFactor = @(5.0);
86 | }
87 | return _inputDistanceNormalizationFactor;
88 | }
89 |
90 | - (void)setDefaults {
91 | self.inputRadius = nil;
92 | self.inputTexelSpacingMultiplier = nil;
93 | self.inputDistanceNormalizationFactor = nil;
94 | }
95 |
96 | - (CIImage *)outputImage {
97 | if (!self.inputImage) {
98 | return nil;
99 | }
100 |
101 | CIKernel *kernel = [YUCIBilateralFilter filterKernelForRadius:[self.inputRadius integerValue]];
102 | CGFloat inset = -([self.class sampleCountForRadius:self.inputRadius.integerValue] * self.inputTexelSpacingMultiplier.doubleValue)/2.0;
103 | CIImage *horizontalPassResult = [kernel applyWithExtent:self.inputImage.extent
104 | roiCallback:^CGRect(int index, CGRect destRect) {
105 | return CGRectInset(destRect, inset, 0);
106 | } arguments:@[self.inputImage,
107 | [CIVector vectorWithX:[self.inputTexelSpacingMultiplier doubleValue] Y:0],
108 | self.inputDistanceNormalizationFactor]];
109 | CIImage *verticalPassResult = [kernel applyWithExtent:horizontalPassResult.extent
110 | roiCallback:^CGRect(int index, CGRect destRect) {
111 | return CGRectInset(destRect, 0, inset);
112 | } arguments:@[horizontalPassResult,
113 | [CIVector vectorWithX:0 Y:[self.inputTexelSpacingMultiplier doubleValue]],
114 | self.inputDistanceNormalizationFactor]];
115 | return verticalPassResult;
116 | }
117 |
118 | @end
119 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBlobsGenerator.cikernel:
--------------------------------------------------------------------------------
1 | // Blobs by @paulofalcao
2 | // https://www.shadertoy.com/view/lsfGzr
3 |
4 | float makePoint(float x,float y,float fx,float fy,float sx,float sy,float t){
5 | float xx=x+sin(t*fx)*sx;
6 | float yy=y+cos(t*fy)*sy;
7 | return 1.0/sqrt(xx*xx+yy*yy);
8 | }
9 |
10 | kernel vec4 coreImageKernel(vec4 inputExtent, float inputTime) {
11 | vec2 fragCoord = destCoord();
12 | vec2 iResolution = inputExtent.zw;
13 | float time = inputTime;
14 |
15 | vec2 p=(fragCoord.xy/iResolution.x)*2.0-vec2(1.0,iResolution.y/iResolution.x);
16 |
17 | p=p*2.0;
18 |
19 | float x=p.x;
20 | float y=p.y;
21 |
22 | float a=makePoint(x,y,3.3,2.9,0.3,0.3,time);
23 | a=a+makePoint(x,y,1.9,2.0,0.4,0.4,time);
24 | a=a+makePoint(x,y,0.8,0.7,0.4,0.5,time);
25 | a=a+makePoint(x,y,2.3,0.1,0.6,0.3,time);
26 | a=a+makePoint(x,y,0.8,1.7,0.5,0.4,time);
27 | a=a+makePoint(x,y,0.3,1.0,0.4,0.4,time);
28 | a=a+makePoint(x,y,1.4,1.7,0.4,0.5,time);
29 | a=a+makePoint(x,y,1.3,2.1,0.6,0.3,time);
30 | a=a+makePoint(x,y,1.8,1.7,0.5,0.4,time);
31 |
32 | float b=makePoint(x,y,1.2,1.9,0.3,0.3,time);
33 | b=b+makePoint(x,y,0.7,2.7,0.4,0.4,time);
34 | b=b+makePoint(x,y,1.4,0.6,0.4,0.5,time);
35 | b=b+makePoint(x,y,2.6,0.4,0.6,0.3,time);
36 | b=b+makePoint(x,y,0.7,1.4,0.5,0.4,time);
37 | b=b+makePoint(x,y,0.7,1.7,0.4,0.4,time);
38 | b=b+makePoint(x,y,0.8,0.5,0.4,0.5,time);
39 | b=b+makePoint(x,y,1.4,0.9,0.6,0.3,time);
40 | b=b+makePoint(x,y,0.7,1.3,0.5,0.4,time);
41 |
42 | float c=makePoint(x,y,3.7,0.3,0.3,0.3,time);
43 | c=c+makePoint(x,y,1.9,1.3,0.4,0.4,time);
44 | c=c+makePoint(x,y,0.8,0.9,0.4,0.5,time);
45 | c=c+makePoint(x,y,1.2,1.7,0.6,0.3,time);
46 | c=c+makePoint(x,y,0.3,0.6,0.5,0.4,time);
47 | c=c+makePoint(x,y,0.3,0.3,0.4,0.4,time);
48 | c=c+makePoint(x,y,1.4,0.8,0.4,0.5,time);
49 | c=c+makePoint(x,y,0.2,0.6,0.6,0.3,time);
50 | c=c+makePoint(x,y,1.3,0.5,0.5,0.4,time);
51 |
52 | vec3 d=vec3(a,b,c)/32.0;
53 |
54 | return vec4(d.x,d.y,d.z,1.0);
55 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBlobsGenerator.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIBlobsGenerator.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/6/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIBlobsGenerator : CIFilter
12 |
13 | @property (nonatomic, copy, null_resettable) CIVector *inputExtent; //default 640x800
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputTime; //default 0
16 |
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIBlobsGenerator.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIBlobsGenerator.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/6/16.
6 | //
7 | //
8 |
9 | #import "YUCIBlobsGenerator.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIBlobsGenerator
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIBlobsGenerator class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryGenerator],
22 | kCIAttributeFilterDisplayName: @"Blobs Generator"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIColorKernel *)filterKernel {
29 | static CIColorKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIBlobsGenerator class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIColorKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (CIVector *)inputExtent {
39 | if (!_inputExtent) {
40 | _inputExtent = [CIVector vectorWithCGRect:CGRectMake(0, 0, 640, 480)];
41 | }
42 | return _inputExtent;
43 | }
44 |
45 | - (NSNumber *)inputTime {
46 | if (!_inputTime) {
47 | _inputTime = @(0);
48 | }
49 | return _inputTime;
50 | }
51 |
52 | - (void)setDefaults {
53 | self.inputExtent = nil;
54 | self.inputTime = nil;
55 | }
56 |
57 | - (CIImage *)outputImage {
58 | return [[YUCIBlobsGenerator filterKernel] applyWithExtent:self.inputExtent.CGRectValue
59 | arguments:@[self.inputExtent,self.inputTime]];
60 | }
61 |
62 | @end
63 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCICLAHE.cikernel:
--------------------------------------------------------------------------------
1 |
2 | #define YUCICLAHE_LOOKUP(LUTs, LUTIndex, value) \
3 | (sample(LUTs,samplerTransform(LUTs,vec2(value * 255.0 + 0.5 + samplerExtent(LUTs).x, LUTIndex + 0.5 + samplerExtent(LUTs).y))).r)
4 |
5 | kernel vec4 filterKernel(sampler inputImage, sampler LUTs, vec2 tileGridSize, vec2 tileSize) {
6 | vec4 textureColor = sample(inputImage,samplerCoord(inputImage)); /* HSL Color */
7 |
8 | vec2 coord = destCoord() - samplerExtent(inputImage).xy;
9 |
10 | float txf = coord.x / tileSize.x - 0.5;
11 |
12 | float tx1 = floor(txf);
13 | float tx2 = tx1 + 1.0;
14 |
15 | float xa_p = txf - tx1;
16 | float xa1_p = 1.0 - xa_p;
17 |
18 | tx1 = max(tx1, 0.0);
19 | tx2 = min(tx2, tileGridSize.x - 1.0);
20 |
21 | float tyf = coord.y / tileSize.y - 0.5;
22 |
23 | float ty1 = floor(tyf);
24 | float ty2 = ty1 + 1.0;
25 |
26 | float ya = tyf - ty1;
27 | float ya1 = 1.0 - ya;
28 |
29 | ty1 = max(ty1, 0.0);
30 | ty2 = min(ty2, tileGridSize.y - 1.0);
31 |
32 | float srcVal = textureColor.b;
33 |
34 | float lutPlane1_ind1 = YUCICLAHE_LOOKUP(LUTs, ty1 * tileGridSize.x + tx1, srcVal);
35 | float lutPlane1_ind2 = YUCICLAHE_LOOKUP(LUTs, ty1 * tileGridSize.x + tx2, srcVal);
36 | float lutPlane2_ind1 = YUCICLAHE_LOOKUP(LUTs, ty2 * tileGridSize.x + tx1, srcVal);
37 | float lutPlane2_ind2 = YUCICLAHE_LOOKUP(LUTs, ty2 * tileGridSize.x + tx2, srcVal);
38 |
39 | float res = (lutPlane1_ind1 * xa1_p + lutPlane1_ind2 * xa_p) * ya1 + (lutPlane2_ind1 * xa1_p + lutPlane2_ind2 * xa_p) * ya;
40 |
41 | return vec4(vec3(textureColor.r, textureColor.g, clamp(res,0.0,1.0)),textureColor.a);
42 | }
43 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCICLAHE.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCICLAHE.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/16/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCICLAHE : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputClipLimit; //default 1.0;
16 |
17 | @property (nonatomic, copy, null_resettable) CIVector *inputTileGridSize; //default (x:8 y:8)
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIColorLookup.cikernel:
--------------------------------------------------------------------------------
1 |
2 | kernel vec4 filterKernel(sampler inputImage, sampler inputLUT, float intensity) {
3 | vec4 textureColor = sample(inputImage,samplerCoord(inputImage));
4 | textureColor = clamp(textureColor, vec4(0.0), vec4(1.0));
5 |
6 | float blueColor = textureColor.b * 63.0;
7 |
8 | vec2 quad1;
9 | quad1.y = floor(floor(blueColor) / 8.0);
10 | quad1.x = floor(blueColor) - (quad1.y * 8.0);
11 |
12 | vec2 quad2;
13 | quad2.y = floor(ceil(blueColor) / 8.0);
14 | quad2.x = ceil(blueColor) - (quad2.y * 8.0);
15 |
16 | vec2 texPos1;
17 | texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
18 | texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
19 |
20 | vec2 texPos2;
21 | texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
22 | texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
23 |
24 | texPos1.y = 1.0 - texPos1.y;
25 | texPos2.y = 1.0 - texPos2.y;
26 |
27 | vec4 inputLUTExtent = samplerExtent(inputLUT);
28 |
29 | vec4 newColor1 = sample(inputLUT, samplerTransform(inputLUT, texPos1 * vec2(512.0) + inputLUTExtent.xy));
30 | vec4 newColor2 = sample(inputLUT, samplerTransform(inputLUT, texPos2 * vec2(512.0) + inputLUTExtent.xy));
31 |
32 | vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
33 | return mix(textureColor, vec4(newColor.rgb, textureColor.a), intensity);
34 | }
35 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIColorLookup.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIColorLookupFilter.h
3 | // CoreImageSkinEnhancementFilter
4 | //
5 | // Created by YuAo on 1/20/16.
6 | // Copyright © 2016 YuAo. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | /*
12 | Note:
13 |
14 | Requires CIContext object with a sRGB working color space instead of the default light-linear color space.
15 |
16 | */
17 |
18 | @interface YUCIColorLookup : CIFilter
19 |
20 | @property (nonatomic, strong, nullable) CIImage *inputImage;
21 |
22 | @property (nonatomic, strong, null_resettable) CIImage *inputColorLookupTable;
23 |
24 | @property (nonatomic, copy, null_resettable) NSNumber *inputIntensity;
25 |
26 | @end
27 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIColorLookup.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIColorLookupFilter.m
3 | // CoreImageSkinEnhancementFilter
4 | //
5 | // Created by YuAo on 1/20/16.
6 | // Copyright © 2016 YuAo. All rights reserved.
7 | //
8 |
9 | #import "YUCIColorLookup.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIColorLookup
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIColorLookup class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryColorEffect,kCICategoryInterlaced,kCICategoryNonSquarePixels],
22 | kCIAttributeFilterDisplayName: @"Color Lookup"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIKernel *)filterKernel {
29 | static CIKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIColorLookup class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (NSNumber *)inputIntensity {
39 | if (!_inputIntensity) {
40 | _inputIntensity = @(1.0);
41 | }
42 | return _inputIntensity;
43 | }
44 |
45 | - (CIImage *)inputColorLookupTable {
46 | if (!_inputColorLookupTable) {
47 | _inputColorLookupTable = [CIImage imageWithContentsOfURL:[[NSBundle bundleForClass:self.class] URLForResource:@"YUCIColorLookupTableDefault" withExtension:@"png"]];
48 | }
49 | return _inputColorLookupTable;
50 | }
51 |
52 | - (void)setDefaults {
53 | self.inputIntensity = nil;
54 | self.inputColorLookupTable = nil;
55 | }
56 |
57 | - (CIImage *)outputImage {
58 | if (!self.inputImage) {
59 | return nil;
60 | }
61 |
62 | return [[YUCIColorLookup filterKernel] applyWithExtent:self.inputImage.extent
63 | roiCallback:^CGRect(int index, CGRect destRect) {
64 | if (index == 0) {
65 | return destRect;
66 | } else {
67 | return self.inputColorLookupTable.extent;
68 | }
69 | }
70 | arguments:@[self.inputImage,self.inputColorLookupTable,self.inputIntensity]];
71 | }
72 |
73 | @end
74 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIColorLookupTableDefault.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/Pods/Vivid/Sources/YUCIColorLookupTableDefault.png
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCICrossZoomTransition.cikernel:
--------------------------------------------------------------------------------
1 |
2 | const float PI = 3.141592653589793;
3 |
4 | float linearEase(float begin, float change, float duration, float time) {
5 | return change * time / duration + begin;
6 | }
7 |
8 | float exponentialEaseInOut(float begin, float change, float duration, float time) {
9 | if (time == 0.0) {
10 | return begin;
11 | } else if (time == duration) {
12 | return begin + change;
13 | }
14 | time = time / (duration / 2.0);
15 | if (time < 1.0) {
16 | return change / 2.0 * pow(2.0, 10.0 * (time - 1.0)) + begin;
17 | } else {
18 | return change / 2.0 * (-pow(2.0, -10.0 * (time - 1.0)) + 2.0) + begin;
19 | }
20 | }
21 |
22 | float sinusoidalEaseInOut(float begin, float change, float duration, float time) {
23 | return -change / 2.0 * (cos(PI * time / duration) - 1.0) + begin;
24 | }
25 |
26 | /* random number between 0 and 1 */
27 | float random(vec3 scale, float seed) {
28 | /* use the fragment position for randomness */
29 | return fract(sin(dot(gl_FragCoord.xyz + seed, scale)) * 43758.5453 + seed);
30 | }
31 |
32 | kernel vec4 filterKernel(sampler inputImage, sampler inputTargetImage, float inputStrength, vec4 inputExtent, float progress) {
33 | // Linear interpolate center across center half of the image
34 | vec2 center = vec2(linearEase(0.25, 0.5, 1.0, progress), 0.5);
35 | float dissolve = exponentialEaseInOut(0.0, 1.0, 1.0, progress);
36 |
37 | // Mirrored sinusoidal loop. 0->strength then strength->0
38 | float strength = sinusoidalEaseInOut(0.0, inputStrength, 0.5, progress);
39 |
40 | vec4 color = vec4(0.0);
41 | float total = 0.0;
42 | vec2 textureCoordinate = ((destCoord() - inputExtent.xy)/inputExtent.zw);
43 | vec2 toCenter = center - textureCoordinate;
44 |
45 | /* randomize the lookup values to hide the fixed number of samples */
46 | float offset = random(vec3(12.9898, 78.233, 151.7182), 0.0);
47 |
48 | for (float t = 0.0; t <= 10.0; t++) {
49 | float percent = (t + offset) / 10.0;
50 | float weight = 4.0 * (percent - percent * percent);
51 |
52 | vec2 uv = (textureCoordinate + toCenter * percent * strength) * inputExtent.zw + inputExtent.xy;
53 | vec4 crossFade = mix(sample(inputImage, samplerTransform(inputImage,uv)), sample(inputTargetImage, samplerTransform(inputTargetImage,uv)), dissolve);
54 | color += crossFade * weight;
55 | total += weight;
56 | }
57 | return color/total;
58 | }
59 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCICrossZoomTransition.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCICrossZoomTransition.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCICrossZoomTransition : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 | @property (nonatomic, strong, nullable) CIImage *inputTargetImage;
15 |
16 | @property (nonatomic, copy, nullable) CIVector *inputExtent;
17 |
18 | @property (nonatomic, copy, null_resettable) NSNumber *inputStrength; //default 0.3
19 |
20 | @property (nonatomic, copy, null_resettable) NSNumber *inputTime; /* 0 to 1 */
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCICrossZoomTransition.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCICrossZoomTransition.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import "YUCICrossZoomTransition.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCICrossZoomTransition
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCICrossZoomTransition class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryTransition],
22 | kCIAttributeFilterDisplayName: @"Cross Zoom Transition"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIKernel *)filterKernel {
29 | static CIKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCICrossZoomTransition class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (NSNumber *)inputStrength {
39 | if (!_inputStrength) {
40 | _inputStrength = @(0.3);
41 | }
42 | return _inputStrength;
43 | }
44 |
45 | - (NSNumber *)inputTime {
46 | if (!_inputTime) {
47 | _inputTime = @(0.0);
48 | }
49 | return _inputTime;
50 | }
51 |
52 | - (void)setDefaults {
53 | self.inputStrength = nil;
54 | self.inputTime = nil;
55 | }
56 |
57 | - (CIImage *)outputImage {
58 | if (!self.inputImage || !self.inputTargetImage) {
59 | return nil;
60 | }
61 |
62 | CIVector *defaultInputExtent = [CIVector vectorWithCGRect:CGRectUnion(self.inputImage.extent, self.inputTargetImage.extent)];
63 | CIVector *extent = self.inputExtent?:defaultInputExtent;
64 | return [[YUCICrossZoomTransition filterKernel] applyWithExtent:extent.CGRectValue
65 | roiCallback:^CGRect(int index, CGRect destRect) {
66 | return extent.CGRectValue;
67 | }
68 | arguments:@[self.inputImage,self.inputTargetImage,self.inputStrength,extent,self.inputTime]];
69 | }
70 |
71 | @end
72 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFXAA.cikernel:
--------------------------------------------------------------------------------
1 |
2 | #define YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, coord) \
3 | (sample(tex, samplerTransform(tex, coord)))
4 |
5 | kernel vec4 filterKernel(sampler tex) {
6 | vec2 fragCoord = destCoord();
7 | vec2 v_rgbNW = (fragCoord + vec2(-1.0, -1.0));
8 | vec2 v_rgbNE = (fragCoord + vec2(1.0, -1.0));
9 | vec2 v_rgbSW = (fragCoord + vec2(-1.0, 1.0));
10 | vec2 v_rgbSE = (fragCoord + vec2(1.0, 1.0));
11 | vec2 v_rgbM = fragCoord;
12 |
13 | const float FXAA_REDUCE_MIN = (1.0/ 128.0);
14 | const float FXAA_REDUCE_MUL = (1.0 / 8.0);
15 | const float FXAA_SPAN_MAX = 8.0;
16 |
17 | vec4 color;
18 | vec3 rgbNW = YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, v_rgbNW).xyz;
19 | vec3 rgbNE = YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, v_rgbNE).xyz;
20 | vec3 rgbSW = YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, v_rgbSW).xyz;
21 | vec3 rgbSE = YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, v_rgbSE).xyz;
22 | vec4 texColor = YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, v_rgbM);
23 | vec3 rgbM = texColor.xyz;
24 | vec3 luma = vec3(0.299, 0.587, 0.114);
25 | float lumaNW = dot(rgbNW, luma);
26 | float lumaNE = dot(rgbNE, luma);
27 | float lumaSW = dot(rgbSW, luma);
28 | float lumaSE = dot(rgbSE, luma);
29 | float lumaM = dot(rgbM, luma);
30 | float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));
31 | float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));
32 |
33 | vec2 dir;
34 | dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));
35 | dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));
36 |
37 | float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *
38 | (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);
39 |
40 | float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);
41 | dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),
42 | max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),
43 | dir * rcpDirMin));
44 |
45 | vec3 rgbA = 0.5 * (YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, fragCoord + dir * (1.0 / 3.0 - 0.5)).xyz + YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, fragCoord + dir * (2.0 / 3.0 - 0.5)).xyz);
46 |
47 | vec3 rgbB = rgbA * 0.5 + 0.25 * (YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, fragCoord + dir * -0.5).xyz + YUCIFXAA_SAMPLE_TEXTURE_AT_LOCATION(tex, fragCoord + dir * 0.5).xyz);
48 |
49 | float lumaB = dot(rgbB, luma);
50 | if ((lumaB < lumaMin) || (lumaB > lumaMax))
51 | color = vec4(rgbA, texColor.a);
52 | else
53 | color = vec4(rgbB, texColor.a);
54 | return color;
55 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFXAA.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFXAA.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/14/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIFXAA : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFXAA.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFXAA.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/14/16.
6 | //
7 | //
8 |
9 | #import "YUCIFXAA.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIFXAA
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIFXAA class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo],
22 | kCIAttributeFilterDisplayName: @"Fast Approximate Anti-Aliasing"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIKernel *)filterKernel {
29 | static CIKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIFXAA class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (CIImage *)outputImage {
39 | if (!self.inputImage) {
40 | return nil;
41 | }
42 |
43 | return [[YUCIFXAA filterKernel] applyWithExtent:self.inputImage.extent
44 | roiCallback:^CGRect(int index, CGRect destRect) {
45 | return CGRectInset(destRect, -8, -8); //FXAA_SPAN_MAX
46 | } arguments:@[self.inputImage.imageByClampingToExtent]];
47 | }
48 |
49 | @end
50 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilmBurnTransition.cikernel:
--------------------------------------------------------------------------------
1 |
2 | const float pi = 3.141592653589793;
3 |
4 | const float Seed = 2.31;
5 |
6 | const float FPS = 40.0;
7 |
8 | float sigmoid(float x, float a) {
9 | float b = pow(x*2.,a)/2.;
10 | if (x > .5) {
11 | b = 1.-pow(2.-(x*2.),a)/2.;
12 | }
13 | return b;
14 | }
15 |
16 | float rand(float co){
17 | return fract(sin((co*24.9898)+Seed)*43758.5453);
18 | }
19 |
20 | float rand(vec2 co){
21 | return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
22 | }
23 |
24 | float apow(float a,float b) {
25 | return pow(abs(a),b)*sign(b);
26 | }
27 |
28 | vec3 pow3(vec3 a,vec3 b) {
29 | return vec3(apow(a.r,b.r),apow(a.g,b.g),apow(a.b,b.b));
30 | }
31 |
32 | float smooth_mix(float a,float b,float c) {
33 | return mix(a,b,sigmoid(c,2.));
34 | }
35 |
36 | float random(vec2 co, float shft){
37 | co += 10.;
38 | return smooth_mix(fract(sin(dot(co.xy ,vec2(12.9898+(floor(shft)*.5),78.233+Seed))) * 43758.5453),fract(sin(dot(co.xy ,vec2(12.9898+(floor(shft+1.)*.5),78.233+Seed))) * 43758.5453),fract(shft));
39 | }
40 |
41 | float smooth_random(vec2 co, float shft) {
42 | return smooth_mix(smooth_mix(random(floor(co),shft),random(floor(co+vec2(1.,0.)),shft),fract(co.x)),smooth_mix(random(floor(co+vec2(0.,1.)),shft),random(floor(co+vec2(1.,1.)),shft),fract(co.x)),fract(co.y));
43 | }
44 |
45 | vec4 sampleTexture(vec2 p, sampler from, sampler to, float progress) {
46 | return mix(sample(from, samplerTransform(from,p)), sample(to, samplerTransform(to,p)), sigmoid(progress,10.));
47 | }
48 |
49 | #define clamps(x) clamp(x,0.,1.)
50 |
51 | kernel vec4 filterKernel(sampler inputImage, sampler inputTargetImage, vec4 inputExtent, float progress) {
52 | vec2 textureCoordinate = ((destCoord() - inputExtent.xy)/inputExtent.zw);
53 | vec2 p = textureCoordinate;
54 | vec3 f = vec3(0.);
55 | for (float i = 0.; i < 13.; i++) {
56 | f += sin(((p.x*rand(i)*6.)+(progress*8.))+rand(i+1.43))*sin(((p.y*rand(i+4.4)*6.)+(progress*6.))+rand(i+2.4));
57 | f += 1.-clamps(length(p-vec2(smooth_random(vec2(progress*1.3),i+1.),smooth_random(vec2(progress*.5),i+6.25)))*mix(20.,70.,rand(i)));
58 | }
59 | f += 4.;
60 | f /= 11.;
61 | f = pow3(f*vec3(1.,0.7,0.6),vec3(1.,2.-sin(progress*pi),1.3));
62 | f *= sin(progress*pi);
63 |
64 | p -= .5;
65 | p *= 1.+(smooth_random(vec2(progress*5.),6.3)*sin(progress*pi)*.05);
66 | p += .5;
67 |
68 | vec4 blurred_image = vec4(0.);
69 | float bluramount = sin(progress*pi)*.03;
70 |
71 | #define repeats 30.
72 | for (float i = 0.; i < repeats; i++) {
73 | vec2 q = vec2(cos(degrees((i/repeats)*360.)),sin(degrees((i/repeats)*360.))) * (rand(vec2(i,p.x+p.y))+bluramount);
74 | vec2 uv2 = p+(q*bluramount);
75 | uv2 = uv2 * inputExtent.zw + inputExtent.xy;
76 | blurred_image += sampleTexture(uv2, inputImage, inputTargetImage, progress);
77 | }
78 | blurred_image /= repeats;
79 |
80 | return blurred_image+vec4(f,0.);
81 | }
82 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilmBurnTransition.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilmBurnTransition.h
3 | // Pods
4 | //
5 | // Created by YuAo on 22/05/2017.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIFilmBurnTransition : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 | @property (nonatomic, strong, nullable) CIImage *inputTargetImage;
15 |
16 | @property (nonatomic, copy, nullable) CIVector *inputExtent;
17 |
18 | @property (nonatomic, copy, null_resettable) NSNumber *inputTime; /* 0 to 1 */
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilmBurnTransition.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilmBurnTransition.m
3 | // Pods
4 | //
5 | // Created by YuAo on 22/05/2017.
6 | //
7 | //
8 |
9 | #import "YUCIFilmBurnTransition.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIFilmBurnTransition
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIFilmBurnTransition class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryTransition],
22 | kCIAttributeFilterDisplayName: @"Film Burn Transition"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIKernel *)filterKernel {
29 | static CIKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIFilmBurnTransition class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (NSNumber *)inputTime {
39 | if (!_inputTime) {
40 | _inputTime = @(0.0);
41 | }
42 | return _inputTime;
43 | }
44 |
45 | - (void)setDefaults {
46 | self.inputExtent = nil;
47 | self.inputTime = nil;
48 | }
49 |
50 | - (CIImage *)outputImage {
51 | if (!self.inputImage || !self.inputTargetImage) {
52 | return nil;
53 | }
54 |
55 | CIVector *defaultInputExtent = [CIVector vectorWithCGRect:CGRectUnion(self.inputImage.extent, self.inputTargetImage.extent)];
56 | CIVector *extent = self.inputExtent?:defaultInputExtent;
57 | return [[YUCIFilmBurnTransition filterKernel] applyWithExtent:extent.CGRectValue
58 | roiCallback:^CGRect(int index, CGRect destRect) {
59 | return extent.CGRectValue;
60 | }
61 | arguments:@[self.inputImage,self.inputTargetImage,extent,self.inputTime]];
62 | }
63 |
64 | @end
65 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilterConstructor.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilterConstructor.h
3 | // Pods
4 | //
5 | // Created by YuAo on 1/23/16.
6 | //
7 | //
8 |
9 | #import
10 | #import
11 |
12 | NS_ASSUME_NONNULL_BEGIN
13 | /*
14 | Using class with name `filterName` to construct a filter object.
15 | */
16 |
17 | @interface YUCIFilterConstructor : NSObject
18 |
19 | + (instancetype)constructor;
20 |
21 | - (instancetype)init NS_UNAVAILABLE;
22 |
23 | @end
24 |
25 | NS_ASSUME_NONNULL_END
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilterConstructor.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilterConstructor.m
3 | // Pods
4 | //
5 | // Created by YuAo on 1/23/16.
6 | //
7 | //
8 |
9 | #import "YUCIFilterConstructor.h"
10 |
11 | @implementation YUCIFilterConstructor
12 |
13 | + (instancetype)constructor {
14 | static YUCIFilterConstructor *constructor;
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | constructor = [[YUCIFilterConstructor alloc] initForSharedConstructor];
18 | });
19 | return constructor;
20 | }
21 |
22 | - (instancetype)initForSharedConstructor {
23 | if (self = [super init]) {
24 |
25 | }
26 | return self;
27 | }
28 |
29 | - (CIFilter *)filterWithName:(NSString *)name {
30 | return [[NSClassFromString(name) alloc] init];
31 | }
32 |
33 | @end
34 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilterPreviewGenerator.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilterPreviewGenerator.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/14/16.
6 | //
7 | //
8 |
9 | #import
10 | #import
11 |
12 | NS_ASSUME_NONNULL_BEGIN
13 |
14 | @interface YUCIFilterPreviewGenerator : NSObject
15 |
16 | + (void)generatePreviewForFilter:(CIFilter *)filter
17 | context:(nullable CIContext *)context
18 | completion:(void (^)(NSData *previewData, NSString *preferredFilename))completion;
19 |
20 | @end
21 |
22 | NS_ASSUME_NONNULL_END
23 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilterUtilities.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilterUtilities.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/3/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | FOUNDATION_EXPORT double YUCIGaussianDistributionPDF(double x, double sigma);
12 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFilterUtilities.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFilterUtilities.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/3/16.
6 | //
7 | //
8 |
9 | #import "YUCIFilterUtilities.h"
10 |
11 | double YUCIGaussianDistributionPDF(double x, double sigma) {
12 | return 1.0/sqrt(2 * M_PI * sigma * sigma) * exp((- x * x) / (2 * sigma * sigma));
13 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFlashTransition.cikernel:
--------------------------------------------------------------------------------
1 | const vec3 flashColor = vec3(1.0, 0.8, 0.3);
2 | const float flashVelocity = 3.0;
3 |
4 | kernel vec4 filterKernel(__sample inputImage, __sample inputTargetImage,
5 | float flashPhase,
6 | float flashIntensity,
7 | float flashZoomEffect,
8 | vec4 inputExtent, float progress)
9 | {
10 | vec2 p = (destCoord() - inputExtent.xy)/inputExtent.zw;
11 | float intensity = mix(1.0, 2.0*distance(p, vec2(0.5, 0.5)), flashZoomEffect) * flashIntensity * pow(smoothstep(flashPhase, 0.0, distance(0.5, progress)), flashVelocity);
12 | vec4 c = mix(inputImage, inputTargetImage, smoothstep(0.5*(1.0-flashPhase), 0.5*(1.0+flashPhase), progress));
13 | c += intensity * vec4(flashColor, 1.0);
14 | return c;
15 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFlashTransition.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFlashTransition.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIFlashTransition : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 | @property (nonatomic, strong, nullable) CIImage *inputTargetImage;
15 |
16 | @property (nonatomic, copy, null_resettable) NSNumber *inputFlashPhase;
17 | @property (nonatomic, copy, null_resettable) NSNumber *inputFlashIntensity;
18 | @property (nonatomic, copy, null_resettable) NSNumber *inputFlashZoom;
19 |
20 | @property (nonatomic, copy, nullable) CIVector *inputExtent;
21 |
22 | @property (nonatomic, copy, null_resettable) NSNumber *inputTime; /* 0 to 1 */
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIFlashTransition.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIFlashTransition.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import "YUCIFlashTransition.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIFlashTransition
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIFlashTransition class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryTransition],
22 | kCIAttributeFilterDisplayName: @"Flash Transition"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIColorKernel *)filterKernel {
29 | static CIColorKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIFlashTransition class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIColorKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (NSNumber *)inputFlashPhase {
39 | if (!_inputFlashPhase) {
40 | _inputFlashPhase = @(0.6);
41 | }
42 | return _inputFlashPhase;
43 | }
44 |
45 | - (NSNumber *)inputFlashIntensity {
46 | if (!_inputFlashIntensity) {
47 | _inputFlashIntensity = @(3.0);
48 | }
49 | return _inputFlashIntensity;
50 | }
51 |
52 | - (NSNumber *)inputFlashZoom {
53 | if (!_inputFlashZoom) {
54 | _inputFlashZoom = @(0.5);
55 | }
56 | return _inputFlashZoom;
57 | }
58 |
59 | - (NSNumber *)inputTime {
60 | if (!_inputTime) {
61 | _inputTime = @(0);
62 | }
63 | return _inputTime;
64 | }
65 |
66 | - (void)setDefaults {
67 | self.inputFlashPhase = nil;
68 | self.inputFlashIntensity = nil;
69 | self.inputFlashZoom = nil;
70 | self.inputTime = nil;
71 | }
72 |
73 | - (CIImage *)outputImage {
74 | if (!self.inputImage || !self.inputTargetImage) {
75 | return nil;
76 | }
77 |
78 | CIVector *defaultInputExtent = [CIVector vectorWithCGRect:CGRectUnion(self.inputImage.extent, self.inputTargetImage.extent)];
79 | CIVector *extent = self.inputExtent?:defaultInputExtent;
80 | return [[YUCIFlashTransition filterKernel] applyWithExtent:extent.CGRectValue
81 | arguments:@[self.inputImage,self.inputTargetImage,self.inputFlashPhase,self.inputFlashIntensity,self.inputFlashZoom,extent,self.inputTime]];
82 | }
83 |
84 | @end
85 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIHSLToRGB.cikernel:
--------------------------------------------------------------------------------
1 | float hue2rgb(float p, float q, float t){
2 | if(t < 0.0) t += 1.0;
3 | if(t > 1.0) t -= 1.0;
4 | if(t < 1.0/6.0) return p + (q - p) * 6.0 * t;
5 | if(t < 1.0/2.0) return q;
6 | if(t < 2.0/3.0) return p + (q - p) * (2.0/3.0 - t) * 6.0;
7 | return p;
8 | }
9 |
10 | kernel vec4 hsl2rgb(__sample inputColor)
11 | {
12 | vec4 color = clamp(inputColor,vec4(0.0),vec4(1.0));
13 |
14 | float h = color.r;
15 | float s = color.g;
16 | float l = color.b;
17 |
18 | float r,g,b;
19 | if(s <= 0.0){
20 | r = g = b = l;
21 | }else{
22 | float q = l < 0.5 ? (l * (1.0 + s)) : (l + s - l * s);
23 | float p = 2.0 * l - q;
24 | r = hue2rgb(p, q, h + 1.0/3.0);
25 | g = hue2rgb(p, q, h);
26 | b = hue2rgb(p, q, h - 1.0/3.0);
27 | }
28 | return vec4(r,g,b,color.a);
29 | }
30 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIHistogramEqualization.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIHistogramEqualization.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/15/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIHistogramEqualization : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIHistogramEqualization.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIHistogramEqualization.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/15/16.
6 | //
7 | //
8 |
9 | #import "YUCIHistogramEqualization.h"
10 | #import "YUCIFilterConstructor.h"
11 | #import
12 |
13 | @interface YUCIHistogramEqualization ()
14 |
15 | @property (nonatomic, strong) CIContext *context;
16 |
17 | @end
18 |
19 | @implementation YUCIHistogramEqualization
20 |
21 | + (void)load {
22 | static dispatch_once_t onceToken;
23 | dispatch_once(&onceToken, ^{
24 | @autoreleasepool {
25 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
26 | [CIFilter registerFilterName:NSStringFromClass([YUCIHistogramEqualization class])
27 | constructor:[YUCIFilterConstructor constructor]
28 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryColorAdjustment],
29 | kCIAttributeFilterDisplayName: @"Histogram Equalization"}];
30 | }
31 | }
32 | });
33 | }
34 |
35 | - (CIContext *)context {
36 | if (!_context) {
37 | _context = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace: CFBridgingRelease(CGColorSpaceCreateWithName(kCGColorSpaceSRGB))}];
38 | }
39 | return _context;
40 | }
41 |
42 | - (CIImage *)outputImage {
43 | if (!self.inputImage) {
44 | return nil;
45 | }
46 |
47 | ptrdiff_t rowBytes = self.inputImage.extent.size.width * 4; // ARGB has 4 components
48 | uint8_t *byteBuffer = calloc(rowBytes * self.inputImage.extent.size.height, sizeof(uint8_t)); // Buffer to render into
49 | [self.context render:self.inputImage
50 | toBitmap:byteBuffer
51 | rowBytes:rowBytes
52 | bounds:self.inputImage.extent
53 | format:kCIFormatARGB8
54 | colorSpace:self.context.workingColorSpace];
55 |
56 | vImage_Buffer vImageBuffer;
57 | vImageBuffer.data = byteBuffer;
58 | vImageBuffer.width = self.inputImage.extent.size.width;
59 | vImageBuffer.height = self.inputImage.extent.size.height;
60 | vImageBuffer.rowBytes = rowBytes;
61 |
62 | vImageEqualization_ARGB8888(&vImageBuffer, &vImageBuffer, kvImageNoFlags);
63 |
64 | NSData *bitmapData = [NSData dataWithBytesNoCopy:vImageBuffer.data length:vImageBuffer.rowBytes * vImageBuffer.height freeWhenDone:YES];
65 | CIImage *result = [[CIImage alloc] initWithBitmapData:bitmapData bytesPerRow:vImageBuffer.rowBytes size:CGSizeMake(vImageBuffer.width, vImageBuffer.height) format:kCIFormatARGB8 colorSpace:self.context.workingColorSpace];
66 | return result;
67 | }
68 |
69 | @end
70 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIRGBToHSL.cikernel:
--------------------------------------------------------------------------------
1 | kernel vec4 rgb2hsl(__sample inputColor)
2 | {
3 | vec4 color = clamp(inputColor,vec4(0.0),vec4(1.0));
4 |
5 | //Compute min and max component values
6 | float MAX = max(color.r, max(color.g, color.b));
7 | float MIN = min(color.r, min(color.g, color.b));
8 |
9 | //Make sure MAX > MIN to avoid division by zero later
10 | MAX = max(MIN + 1e-6, MAX);
11 |
12 | //Compute luminosity
13 | float l = (MIN + MAX) / 2.0;
14 |
15 | //Compute saturation
16 | float s = (l < 0.5 ? (MAX - MIN) / (MIN + MAX) : (MAX - MIN) / (2.0 - MAX - MIN));
17 |
18 | //Compute hue
19 | float h = (MAX == color.r ? (color.g - color.b) / (MAX - MIN) : (MAX == color.g ? 2.0 + (color.b - color.r) / (MAX - MIN) : 4.0 + (color.r - color.g) / (MAX - MIN)));
20 | h /= 6.0;
21 | h = (h < 0.0 ? 1.0 + h : h);
22 |
23 | return vec4(h, s, l, color.a);
24 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIRGBToneCurve.cikernel:
--------------------------------------------------------------------------------
1 |
2 | kernel vec4 filterKernel(sampler inputImage, sampler toneCurveTexture, float intensity) {
3 | vec4 textureColor = sample(inputImage,samplerCoord(inputImage));
4 | vec4 toneCurveTextureExtent = samplerExtent(toneCurveTexture);
5 |
6 | vec2 redCoord = samplerTransform(toneCurveTexture,vec2(textureColor.r * 255.0 + 0.5 + toneCurveTextureExtent.x, toneCurveTextureExtent.y + 0.5));
7 | vec2 greenCoord = samplerTransform(toneCurveTexture,vec2(textureColor.g * 255.0 + 0.5 + toneCurveTextureExtent.x, toneCurveTextureExtent.y + 0.5));
8 | vec2 blueCoord = samplerTransform(toneCurveTexture,vec2(textureColor.b * 255.0 + 0.5 + toneCurveTextureExtent.x, toneCurveTextureExtent.y + 0.5));
9 |
10 | float redCurveValue = sample(toneCurveTexture, redCoord).r;
11 | float greenCurveValue = sample(toneCurveTexture, greenCoord).g;
12 | float blueCurveValue = sample(toneCurveTexture, blueCoord).b;
13 | return vec4(mix(textureColor.rgb,vec3(redCurveValue, greenCurveValue, blueCurveValue),intensity),textureColor.a);
14 | }
15 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIRGBToneCurve.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIRGBToneCurveFilter.h
3 | // Pods
4 | //
5 | // Created by YuAo on 1/21/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIRGBToneCurve : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @property (nonatomic, copy, null_resettable) NSArray *inputRedControlPoints;
16 | @property (nonatomic, copy, null_resettable) NSArray *inputGreenControlPoints;
17 | @property (nonatomic, copy, null_resettable) NSArray *inputBlueControlPoints;
18 | @property (nonatomic, copy, null_resettable) NSArray *inputRGBCompositeControlPoints;
19 |
20 | @property (nonatomic, copy, null_resettable) NSNumber *inputIntensity; //default 1.0
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIReflectedTile.cikernel:
--------------------------------------------------------------------------------
1 |
2 | kernel vec2 filterKernel(float inputMode, vec4 inputExtent) {
3 | vec2 coord = destCoord();
4 | if (coord.x >= inputExtent.x &&
5 | coord.x < inputExtent.x + inputExtent.z &&
6 | coord.y >= inputExtent.y &&
7 | coord.y < inputExtent.y + inputExtent.w
8 | ) {
9 | return coord;
10 | } else {
11 | int mode = int(floor(inputMode + 0.5));
12 | float w = inputExtent.z - (mode == 0 ? 1.0: 0.0);
13 | float h = inputExtent.w - (mode == 0 ? 1.0: 0.0);
14 |
15 | float x = coord.x - inputExtent.x - inputExtent.z;
16 | float nx = floor(x/w);
17 | float dx = x - nx * w;
18 |
19 | float y = coord.y - inputExtent.y - inputExtent.w;
20 | float ny = floor(y/h);
21 | float dy = y - ny * h;
22 |
23 | if (int(mod(nx,2.0)) == 1) {
24 | coord.x = inputExtent.x + inputExtent.z - (w - dx);
25 | } else {
26 | coord.x = inputExtent.x + (w - dx);
27 | }
28 |
29 | if (int(mod(ny,2.0)) == 1) {
30 | coord.y = inputExtent.y + inputExtent.w - (h - dy);
31 | } else {
32 | coord.y = inputExtent.y + (h - dy);
33 | }
34 | return coord;
35 | }
36 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIReflectedTile.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIReflectTile.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/16/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | typedef NS_ENUM(NSInteger, YUCIReflectedTileMode) {
12 | YUCIReflectedTileModeReflectWithoutBorder = 0,
13 | YUCIReflectedTileModeReflectWithBorder = 1,
14 | };
15 |
16 | @interface YUCIReflectedTile : CIFilter
17 |
18 | @property (nonatomic, strong, nullable) CIImage *inputImage;
19 |
20 | @property (nonatomic, copy, null_resettable) NSNumber *inputMode; //default: YUCIReflectedTileModeReflectWithoutBorder
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIReflectedTile.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIReflectTile.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/16/16.
6 | //
7 | //
8 |
9 | #import "YUCIReflectedTile.h"
10 | #import "YUCIFilterConstructor.h"
11 | #import "YUCIReflectedTileROICalculator.h"
12 |
13 | @implementation YUCIReflectedTile
14 |
15 | + (void)load {
16 | static dispatch_once_t onceToken;
17 | dispatch_once(&onceToken, ^{
18 | @autoreleasepool {
19 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
20 | [CIFilter registerFilterName:NSStringFromClass([YUCIReflectedTile class])
21 | constructor:[YUCIFilterConstructor constructor]
22 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryTileEffect],
23 | kCIAttributeFilterDisplayName: @"Reflected Tile"}];
24 | }
25 | }
26 | });
27 | }
28 |
29 | + (CIWarpKernel *)filterKernel {
30 | static CIWarpKernel *kernel;
31 | static dispatch_once_t onceToken;
32 | dispatch_once(&onceToken, ^{
33 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIReflectedTile class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
34 | kernel = [CIWarpKernel kernelWithString:kernelString];
35 | });
36 | return kernel;
37 | }
38 |
39 | - (NSNumber *)inputMode {
40 | if (!_inputMode) {
41 | _inputMode = @(YUCIReflectedTileModeReflectWithoutBorder);
42 | }
43 | return _inputMode;
44 | }
45 |
46 | - (void)setDefaults {
47 | self.inputMode = nil;
48 | }
49 |
50 | - (CIImage *)outputImage {
51 | if (!self.inputImage) {
52 | return nil;
53 | }
54 |
55 | CGRect inputExtent = self.inputImage.extent;
56 | return [[YUCIReflectedTile filterKernel] applyWithExtent:CGRectInfinite
57 | roiCallback:^CGRect(int index, CGRect destRect) {
58 | return [YUCIReflectedTileROICalculator ROIForDestinationRect:destRect inputImageExtent:inputExtent mode:self.inputMode.integerValue];
59 | }
60 | inputImage:self.inputImage
61 | arguments:@[self.inputMode,[CIVector vectorWithCGRect:self.inputImage.extent]]];
62 | }
63 |
64 | @end
65 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIReflectedTileROICalculator.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIReflectedTileROICalculator.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/19/16.
6 | //
7 | //
8 |
9 | #import
10 | #import "YUCIReflectedTile.h"
11 |
12 | @interface YUCIReflectedTileROICalculator : NSObject
13 |
14 | + (CGRect)ROIForDestinationRect:(CGRect)destRect inputImageExtent:(CGRect)inputExtent mode:(YUCIReflectedTileMode)mode;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISkyGenerator.cikernel:
--------------------------------------------------------------------------------
1 | //https://github.com/wwwtyro/glsl-atmosphere
2 |
3 | #define PI 3.141592
4 | #define iSteps 16
5 | #define jSteps 8
6 |
7 | float rsi(vec3 r0, vec3 rd, float sr) {
8 | // Simplified ray-sphere intersection that assumes
9 | // the ray starts inside the sphere and that the
10 | // sphere is centered at the origin. Always intersects.
11 | float a = dot(rd, rd);
12 | float b = 2.0 * dot(rd, r0);
13 | float c = dot(r0, r0) - (sr * sr);
14 | return (-b + sqrt((b*b) - 4.0*a*c))/(2.0*a);
15 | }
16 |
17 | vec3 atmosphere(vec3 r, vec3 r0, vec3 pSun, float iSun, float rPlanet, float rAtmos, vec3 kRlh, float kMie, float shRlh, float shMie, float g) {
18 | // Normalize the sun and view directions.
19 | pSun = normalize(pSun);
20 | r = normalize(r);
21 |
22 | // Calculate the step size of the primary ray.
23 | float iStepSize = rsi(r0, r, rAtmos) / float(iSteps);
24 |
25 | // Initialize the primary ray time.
26 | float iTime = 0.0;
27 |
28 | // Initialize accumulators for Rayleigh and Mie scattering.
29 | vec3 totalRlh = vec3(0.0);
30 | vec3 totalMie = vec3(0.0);
31 |
32 | // Initialize optical depth accumulators for the primary ray.
33 | float iOdRlh = 0.0;
34 | float iOdMie = 0.0;
35 |
36 | // Calculate the Rayleigh and Mie phases.
37 | float mu = dot(r, pSun);
38 | float mumu = mu * mu;
39 | float gg = g * g;
40 | float pRlh = 3.0 / (16.0 * PI) * (1.0 + mumu);
41 | float pMie = 3.0 / (8.0 * PI) * ((1.0 - gg) * (mumu + 1.0)) / (pow(1.0 + gg - 2.0 * mu * g, 1.5) * (2.0 + gg));
42 |
43 | // Sample the primary ray.
44 | for (int i = 0; i < iSteps; i++) {
45 |
46 | // Calculate the primary ray sample position.
47 | vec3 iPos = r0 + r * (iTime + iStepSize * 0.5);
48 |
49 | // Calculate the height of the sample.
50 | float iHeight = length(iPos) - rPlanet;
51 |
52 | // Calculate the optical depth of the Rayleigh and Mie scattering for this step.
53 | float odStepRlh = exp(-iHeight / shRlh) * iStepSize;
54 | float odStepMie = exp(-iHeight / shMie) * iStepSize;
55 |
56 | // Accumulate optical depth.
57 | iOdRlh += odStepRlh;
58 | iOdMie += odStepMie;
59 |
60 | // Calculate the step size of the secondary ray.
61 | float jStepSize = rsi(iPos, pSun, rAtmos) / float(jSteps);
62 |
63 | // Initialize the secondary ray time.
64 | float jTime = 0.0;
65 |
66 | // Initialize optical depth accumulators for the secondary ray.
67 | float jOdRlh = 0.0;
68 | float jOdMie = 0.0;
69 |
70 | // Sample the secondary ray.
71 | for (int j = 0; j < jSteps; j++) {
72 |
73 | // Calculate the secondary ray sample position.
74 | vec3 jPos = iPos + pSun * (jTime + jStepSize * 0.5);
75 |
76 | // Calculate the height of the sample.
77 | float jHeight = length(jPos) - rPlanet;
78 |
79 | // Accumulate the optical depth.
80 | jOdRlh += exp(-jHeight / shRlh) * jStepSize;
81 | jOdMie += exp(-jHeight / shMie) * jStepSize;
82 |
83 | // Increment the secondary ray time.
84 | jTime += jStepSize;
85 | }
86 |
87 | // Calculate attenuation.
88 | vec3 attn = exp(-(kMie * (iOdMie + jOdMie) + kRlh * (iOdRlh + jOdRlh)));
89 |
90 | // Accumulate scattering.
91 | totalRlh += odStepRlh * attn;
92 | totalMie += odStepMie * attn;
93 |
94 | // Increment the primary ray time.
95 | iTime += iStepSize;
96 |
97 | }
98 |
99 | // Calculate and return the final color.
100 | return iSun * (pRlh * kRlh * totalRlh + pMie * kMie * totalMie);
101 | }
102 |
103 | kernel vec4 coreImageKernel(vec4 inputExtent, vec2 viewPoint, vec3 sunPosition, float sunIntensity) {
104 | vec2 fragCoord = destCoord();
105 | vec2 iResolution = inputExtent.zw;
106 | vec3 color = atmosphere(
107 | normalize(vec3((vec2(0.5) - fragCoord/iResolution) * vec2(-2.0) + viewPoint,-1.0)), // normalized ray direction
108 | vec3(0,6372e3,0), // ray origin
109 | sunPosition, // position of the sun
110 | sunIntensity, // intensity of the sun
111 | 6371e3, // radius of the planet in meters
112 | 6471e3, // radius of the atmosphere in meters
113 | vec3(5.5e-6, 13.0e-6, 22.4e-6), // Rayleigh scattering coefficient
114 | 21e-6, // Mie scattering coefficient
115 | 8e3, // Rayleigh scale height
116 | 1.2e3, // Mie scale height
117 | 0.758 // Mie preferred scattering direction
118 | );
119 |
120 | // Apply exposure.
121 | color = 1.0 - exp(-1.0 * color);
122 |
123 | return vec4(color, 1.0);
124 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISkyGenerator.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCISkyGenerator.h
3 | // Pods
4 | //
5 | // Created by YuAo on 3/15/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCISkyGenerator : CIFilter
12 |
13 | @property (nonatomic, copy, null_resettable) CIVector *inputExtent;
14 |
15 | @property (nonatomic, copy, null_resettable) CIVector *inputSunPosition;
16 |
17 | @property (nonatomic, copy, null_resettable) NSNumber *inputSunIntensity;
18 |
19 | @property (nonatomic, copy, null_resettable) CIVector *inputViewPointOffset;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISkyGenerator.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCISkyGenerator.m
3 | // Pods
4 | //
5 | // Created by YuAo on 3/15/16.
6 | //
7 | //
8 |
9 | #import "YUCISkyGenerator.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCISkyGenerator
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCISkyGenerator class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryGenerator],
22 | kCIAttributeFilterDisplayName: @"Sky Generator"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIColorKernel *)filterKernel {
29 | static CIColorKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCISkyGenerator class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIColorKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (CIVector *)inputExtent {
39 | if (!_inputExtent) {
40 | _inputExtent = [CIVector vectorWithCGRect:CGRectMake(0, 0, 640, 480)];
41 | }
42 | return _inputExtent;
43 | }
44 |
45 | - (CIVector *)inputViewPointOffset {
46 | if (!_inputViewPointOffset) {
47 | _inputViewPointOffset = [CIVector vectorWithX:0 Y:0.9];
48 | }
49 | return _inputViewPointOffset;
50 | }
51 |
52 | - (CIVector *)inputSunPosition {
53 | if (!_inputSunPosition) {
54 | _inputSunPosition = [CIVector vectorWithX:0 Y:0.1 Z:-1.0];
55 | }
56 | return _inputSunPosition;
57 | }
58 |
59 | - (NSNumber *)inputSunIntensity {
60 | if (!_inputSunIntensity) {
61 | _inputSunIntensity = @30;
62 | }
63 | return _inputSunIntensity;
64 | }
65 |
66 | - (void)setDefaults {
67 | self.inputExtent = nil;
68 | self.inputViewPointOffset = nil;
69 | self.inputSunPosition = nil;
70 | self.inputSunIntensity = nil;
71 | }
72 |
73 | - (CIImage *)outputImage {
74 | return [[YUCISkyGenerator filterKernel] applyWithExtent:self.inputExtent.CGRectValue
75 | arguments:@[self.inputExtent,self.inputViewPointOffset,self.inputSunPosition,self.inputSunIntensity]];
76 | }
77 |
78 | @end
79 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIStarfieldGenerator.cikernel:
--------------------------------------------------------------------------------
1 | // Star Nest by Pablo Román Andrioli
2 |
3 | // This content is under the MIT License.
4 |
5 | //mat2 is not supported by core image, use vec4. https://developer.apple.com/library/mac/documentation/GraphicsImaging/Reference/CIKernelLangRef/ci_gslang_ext.html
6 | vec2 vec2MultiplyMat2(vec2 v, vec4 m) {
7 | return vec2(m.x * v.x + m.y * v.y, m.z * v.x + m.w * v.y);
8 | }
9 |
10 | kernel vec4 coreImageKernel(vec4 inputExtent, float inputTime, vec2 inputRotation)
11 | {
12 | const int iterations = 17;
13 | const float formuparam = 0.53;
14 |
15 | const int volsteps = 20;
16 | const float stepsize = 0.1;
17 |
18 | const float zoom = 0.800;
19 | const float tile = 0.850;
20 | const float speed = 0.010;
21 |
22 | const float brightness = 0.0015;
23 | const float darkmatter = 0.300;
24 | const float distfading = 0.730;
25 | const float saturation = 0.850;
26 |
27 | vec2 fragCoord = destCoord();
28 | vec2 iResolution = inputExtent.zw;
29 |
30 | //get coords and direction
31 | vec2 uv=fragCoord.xy/iResolution.xy-.5;
32 | uv.y*=iResolution.y/iResolution.x;
33 | vec3 dir=vec3(uv*zoom,1.);
34 | float time=inputTime*speed+.25;
35 |
36 | //mouse rotation
37 | vec2 iMouse = inputRotation;
38 | float a1=.5+iMouse.x/iResolution.x*2.;
39 | float a2=.8+iMouse.y/iResolution.y*2.;
40 | vec4 rot1=vec4(cos(a1),sin(a1),-sin(a1),cos(a1));
41 | vec4 rot2=vec4(cos(a2),sin(a2),-sin(a2),cos(a2));
42 | dir.xz = vec2MultiplyMat2(dir.xz,rot1);
43 | dir.xy = vec2MultiplyMat2(dir.xy,rot2);
44 | vec3 from=vec3(1.,.5,0.5);
45 | from+=vec3(time*2.,time,-2.);
46 | from.xz = vec2MultiplyMat2(from.xz,rot1);
47 | from.xy = vec2MultiplyMat2(from.xy,rot2);
48 |
49 | //volumetric rendering
50 | float s=0.1,fade=1.;
51 | vec3 v=vec3(0.);
52 | for (int r=0; r6) { fade*=1.-dm; } // dark matter, don't render near
64 |
65 | v+=fade;
66 | v+=vec3(s,s*s,s*s*s*s)*a*brightness*fade; // coloring based on distance
67 | fade*=distfading; // distance fading
68 | s+=stepsize;
69 | }
70 | v=mix(vec3(length(v)),v,saturation); //color adjust
71 | return vec4(v*.01,1.);
72 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIStarfieldGenerator.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIStarNestGenerator.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCIStarfieldGenerator : CIFilter
12 |
13 | @property (nonatomic, copy, null_resettable) CIVector *inputExtent;
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputTime;
16 |
17 | @property (nonatomic, copy, null_resettable) CIVector *inputRotation;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIStarfieldGenerator.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIStarNestGenerator.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/4/16.
6 | //
7 | //
8 |
9 | #import "YUCIStarfieldGenerator.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCIStarfieldGenerator
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCIStarfieldGenerator class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryGenerator],
22 | kCIAttributeFilterDisplayName: @"Starfield Generator"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIColorKernel *)filterKernel {
29 | static CIColorKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCIStarfieldGenerator class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIColorKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (CIVector *)inputExtent {
39 | if (!_inputExtent) {
40 | _inputExtent = [CIVector vectorWithCGRect:CGRectMake(0, 0, 640, 480)];
41 | }
42 | return _inputExtent;
43 | }
44 |
45 | - (NSNumber *)inputTime {
46 | if (!_inputTime) {
47 | _inputTime = @(0);
48 | }
49 | return _inputTime;
50 | }
51 |
52 | - (CIVector *)inputRotation {
53 | if (!_inputRotation) {
54 | _inputRotation = [CIVector vectorWithX:0 Y:0];
55 | }
56 | return _inputRotation;
57 | }
58 |
59 | - (void)setDefaults {
60 | self.inputExtent = nil;
61 | self.inputTime = nil;
62 | self.inputRotation = nil;
63 | }
64 |
65 | - (CIImage *)outputImage {
66 | return [[YUCIStarfieldGenerator filterKernel] applyWithExtent:self.inputExtent.CGRectValue
67 | arguments:@[self.inputExtent,self.inputTime,self.inputRotation]];
68 | }
69 |
70 | @end
71 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISurfaceBlur.cikernel:
--------------------------------------------------------------------------------
1 | float normpdf(in float x, in float sigma)
2 | {
3 | return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
4 | }
5 |
6 | float normpdf3(in vec3 v, in float sigma)
7 | {
8 | return 0.39894*exp(-0.5*dot(v,v)/(sigma*sigma))/sigma;
9 | }
10 |
11 | kernel vec4 filterKernel(sampler inputImage, float bsigma) {
12 | const int SAMPLES_COUNT = MACRO_SAMPLES_COUNT;
13 |
14 | float GAUSSIAN_WEIGHTS[SAMPLES_COUNT];
15 | MACRO_SETUP_GAUSSIAN_WEIGHTS
16 | /*
17 | float sigma = float(kSize);
18 | for (int j = 0; j <= kSize; ++j)
19 | {
20 | GAUSSIAN_WEIGHTS[kSize+j] = GAUSSIAN_WEIGHTS[kSize-j] = normpdf(float(j), sigma);
21 | }
22 | */
23 |
24 | const int kSize = (SAMPLES_COUNT-1)/2;
25 | vec3 finalColor = vec3(0.0);
26 |
27 | vec3 c = sample(inputImage,samplerCoord(inputImage)).rgb;
28 |
29 | float Z = 0.0;
30 |
31 | vec3 cc;
32 | float factor;
33 | float bZ = 1.0/normpdf(0.0, bsigma);
34 |
35 | for (int i=-kSize; i <= kSize; ++i)
36 | {
37 | for (int j=-kSize; j <= kSize; ++j)
38 | {
39 | cc = sample(inputImage,samplerTransform(inputImage, destCoord().xy + vec2(float(i),float(j)))).rgb;
40 | factor = normpdf3(cc-c, bsigma)*bZ*GAUSSIAN_WEIGHTS[kSize+j]*GAUSSIAN_WEIGHTS[kSize+i];
41 | Z += factor;
42 | finalColor += factor*cc;
43 | }
44 | }
45 |
46 | return vec4(finalColor/Z, 1.0);
47 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISurfaceBlur.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCISurfaceBlur.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/3/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCISurfaceBlur : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputRadius; //default 10
16 |
17 | @property (nonatomic, copy, null_resettable) NSNumber *inputThreshold; //0.0-255.0; //default 10
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCISurfaceBlur.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCISurfaceBlur.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/3/16.
6 | //
7 | //
8 |
9 | #import "YUCISurfaceBlur.h"
10 | #import "YUCIFilterUtilities.h"
11 | #import "YUCIFilterConstructor.h"
12 |
13 | @implementation YUCISurfaceBlur
14 |
15 | + (void)load {
16 | static dispatch_once_t onceToken;
17 | dispatch_once(&onceToken, ^{
18 | @autoreleasepool {
19 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
20 | [CIFilter registerFilterName:NSStringFromClass([YUCISurfaceBlur class])
21 | constructor:[YUCIFilterConstructor constructor]
22 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryBlur],
23 | kCIAttributeFilterDisplayName: @"Surface Blur"}];
24 | }
25 | }
26 | });
27 | }
28 |
29 | static NSDictionary *YUCISurfaceBlurKernels;
30 |
31 | + (CIKernel *)filterKernelForRadius:(NSInteger)radius {
32 | CIKernel *kernel = YUCISurfaceBlurKernels[@(radius)];
33 |
34 | if (kernel) {
35 | return kernel;
36 | } else {
37 | NSMutableArray *wights = [NSMutableArray array];
38 | for (NSInteger i = 0; i < radius; ++i) {
39 | double wight = YUCIGaussianDistributionPDF(i, radius);
40 | [wights addObject:@(wight)];
41 | }
42 |
43 | NSString *gaussianWeightsSetupProgram = @"";
44 | for (NSInteger i = 0; i < wights.count; ++i) {
45 | gaussianWeightsSetupProgram = [gaussianWeightsSetupProgram stringByAppendingFormat:@"GAUSSIAN_WEIGHTS[%@] = GAUSSIAN_WEIGHTS[%@] = %@; \n",@(radius - 1 - i),@(radius - 1 + i),wights[i]];
46 | }
47 |
48 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCISurfaceBlur class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
49 | kernelString = [kernelString stringByReplacingOccurrencesOfString:@"MACRO_SAMPLES_COUNT" withString:@(radius * 2).stringValue];
50 | kernelString = [kernelString stringByReplacingOccurrencesOfString:@"MACRO_SETUP_GAUSSIAN_WEIGHTS" withString:gaussianWeightsSetupProgram];
51 | kernel = [CIKernel kernelWithString:kernelString];
52 |
53 | NSMutableDictionary *newKernels = [NSMutableDictionary dictionaryWithDictionary:YUCISurfaceBlurKernels];
54 | [newKernels setObject:kernel forKey:@(radius)];
55 | YUCISurfaceBlurKernels = newKernels.copy;
56 |
57 | return kernel;
58 | }
59 | }
60 |
61 | - (NSNumber *)inputRadius {
62 | if (!_inputRadius) {
63 | _inputRadius = @(10);
64 | }
65 | return _inputRadius;
66 | }
67 |
68 | - (NSNumber *)inputThreshold {
69 | if (!_inputThreshold) {
70 | _inputThreshold = @(10);
71 | }
72 | return _inputThreshold;
73 | }
74 |
75 | - (void)setDefaults {
76 | self.inputRadius = nil;
77 | self.inputThreshold = nil;
78 | }
79 |
80 | - (CIImage *)outputImage {
81 | if (!self.inputImage) {
82 | return nil;
83 | }
84 |
85 | if (self.inputRadius.integerValue <= 0 || self.inputThreshold.doubleValue < 0) {
86 | return self.inputImage;
87 | }
88 |
89 | CIKernel *kernel = [YUCISurfaceBlur filterKernelForRadius:[self.inputRadius integerValue]];
90 | return [kernel applyWithExtent:self.inputImage.extent
91 | roiCallback:^CGRect(int index, CGRect destRect) {
92 | return CGRectInset(destRect, -self.inputRadius.integerValue, -self.inputRadius.integerValue);
93 | } arguments:@[self.inputImage.imageByClampingToExtent,@(self.inputThreshold.doubleValue/255.0 * 2.0)]];
94 | }
95 |
96 |
97 | @end
98 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCITriangularPixellate.cikernel:
--------------------------------------------------------------------------------
1 |
2 | kernel vec4 filterKernel(sampler inputImage, vec2 center, float scale, float tan_halfInputAngle) {
3 | const int MSAA_SAMPLE_COUNT = 4; //4xMSAA RGSS
4 | vec2 MSAASampleCoords[MSAA_SAMPLE_COUNT];
5 | MSAASampleCoords[0] = destCoord() + vec2(-0.375,0.125);
6 | MSAASampleCoords[1] = destCoord() + vec2(0.125,0.375);
7 | MSAASampleCoords[2] = destCoord() + vec2(0.375,-0.125);
8 | MSAASampleCoords[3] = destCoord() + vec2(-0.125,-0.375);
9 |
10 | float scaleY = scale/2.0 / tan_halfInputAngle;
11 |
12 | vec4 color = vec4(0.0);
13 | for (int i = 0; i < MSAA_SAMPLE_COUNT; ++i) {
14 | vec2 sampleCoord = MSAASampleCoords[i] - center;
15 | float nx = floor(sampleCoord.x/scale);
16 | float ny = floor(sampleCoord.y/scaleY);
17 | float ly = sampleCoord.y - ny * scaleY;
18 | float lx = sampleCoord.x - nx * scale;
19 | float dx = (scaleY - ly) * tan_halfInputAngle;
20 | vec2 coord;
21 | if (lx > scale/2.0 + dx) {
22 | coord = vec2((nx + 1.0) * scale, (ny + 0.5) * scaleY);
23 | } else if (lx < scale/2.0 - dx) {
24 | coord = vec2(nx * scale, (ny + 0.5) * scaleY);
25 | } else {
26 | coord = vec2((nx + 0.5) * scale, (ny + 0.5) * scaleY);
27 | }
28 | color += sample(inputImage,samplerTransform(inputImage,coord + center));
29 | }
30 | return color/float(MSAA_SAMPLE_COUNT);
31 | }
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCITriangularPixellate.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCITriangularPixellate.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/9/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | @interface YUCITriangularPixellate : CIFilter
12 |
13 | @property (nonatomic, strong, nullable) CIImage *inputImage;
14 |
15 | @property (nonatomic, copy, null_resettable) NSNumber *inputVertexAngle; //default: M_PI/2.0
16 |
17 | @property (nonatomic, copy, null_resettable) NSNumber *inputScale; //default: 20.0
18 |
19 | @property (nonatomic, copy, null_resettable) CIVector *inputCenter; //default: (0,0)
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCITriangularPixellate.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCITriangularPixellate.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/9/16.
6 | //
7 | //
8 |
9 | #import "YUCITriangularPixellate.h"
10 | #import "YUCIFilterConstructor.h"
11 |
12 | @implementation YUCITriangularPixellate
13 |
14 | + (void)load {
15 | static dispatch_once_t onceToken;
16 | dispatch_once(&onceToken, ^{
17 | @autoreleasepool {
18 | if ([CIFilter respondsToSelector:@selector(registerFilterName:constructor:classAttributes:)]) {
19 | [CIFilter registerFilterName:NSStringFromClass([YUCITriangularPixellate class])
20 | constructor:[YUCIFilterConstructor constructor]
21 | classAttributes:@{kCIAttributeFilterCategories: @[kCICategoryStillImage,kCICategoryVideo,kCICategoryStylize],
22 | kCIAttributeFilterDisplayName: @"Triangular Pixellate"}];
23 | }
24 | }
25 | });
26 | }
27 |
28 | + (CIKernel *)filterKernel {
29 | static CIKernel *kernel;
30 | static dispatch_once_t onceToken;
31 | dispatch_once(&onceToken, ^{
32 | NSString *kernelString = [[NSString alloc] initWithContentsOfURL:[[NSBundle bundleForClass:self] URLForResource:NSStringFromClass([YUCITriangularPixellate class]) withExtension:@"cikernel"] encoding:NSUTF8StringEncoding error:nil];
33 | kernel = [CIKernel kernelWithString:kernelString];
34 | });
35 | return kernel;
36 | }
37 |
38 | - (NSNumber *)inputScale {
39 | if (!_inputScale) {
40 | _inputScale = @(20);
41 | }
42 | return _inputScale;
43 | }
44 |
45 | - (NSNumber *)inputVertexAngle {
46 | if (!_inputVertexAngle) {
47 | _inputVertexAngle = @(M_PI/2.0);
48 | }
49 | return _inputVertexAngle;
50 | }
51 |
52 | - (CIVector *)inputCenter {
53 | if (!_inputCenter) {
54 | _inputCenter = [CIVector vectorWithX:0 Y:0];
55 | }
56 | return _inputCenter;
57 | }
58 |
59 | - (void)setDefaults {
60 | self.inputScale = nil;
61 | self.inputVertexAngle = nil;
62 | self.inputCenter = nil;
63 | }
64 |
65 | - (CIImage *)outputImage {
66 | if (!self.inputImage) {
67 | return nil;
68 | }
69 |
70 | CGFloat tanHalfInputAngle = tan(self.inputVertexAngle.floatValue/2.0);
71 | return [[YUCITriangularPixellate filterKernel] applyWithExtent:self.inputImage.extent
72 | roiCallback:^CGRect(int index, CGRect destRect) {
73 | return CGRectInset(destRect, -self.inputScale.floatValue, -self.inputScale.floatValue/2.0 * tanHalfInputAngle);
74 | } arguments:@[self.inputImage.imageByClampingToExtent,self.inputCenter,self.inputScale,@(tanHalfInputAngle)]];
75 | }
76 |
77 | @end
78 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIUtilities.h:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIUtilities.h
3 | // Pods
4 | //
5 | // Created by YuAo on 2/17/16.
6 | //
7 | //
8 |
9 | #import
10 |
11 | #define YUCI_METAMACRO_CONCAT(A, B) \
12 | YUCI_METAMACRO_CONCAT_(A, B)
13 |
14 | #define YUCI_METAMACRO_CONCAT_(A, B) A ## B
15 |
16 | #define YUCIDefer \
17 | try {} @finally {} \
18 | __strong YUCIDeferCleanupBlock YUCI_METAMACRO_CONCAT(YUCIDeferBlock_, __LINE__) __attribute__((cleanup(YUCIDeferExecuteCleanupBlock), unused)) = ^
19 |
20 | typedef void (^YUCIDeferCleanupBlock)();
21 |
22 | void YUCIDeferExecuteCleanupBlock (__strong YUCIDeferCleanupBlock *block);
23 |
--------------------------------------------------------------------------------
/Pods/Vivid/Sources/YUCIUtilities.m:
--------------------------------------------------------------------------------
1 | //
2 | // YUCIUtilities.m
3 | // Pods
4 | //
5 | // Created by YuAo on 2/17/16.
6 | //
7 | //
8 |
9 | #import "YUCIUtilities.h"
10 |
11 | void YUCIDeferExecuteCleanupBlock (__strong YUCIDeferCleanupBlock *block) {
12 | (*block)();
13 | }
14 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # iOS-Depth-Sampler
2 |
3 | [](https://developer.apple.com/iphone/index.action)
5 | [](https://developer.apple.com/swift)
7 | [](http://mit-license.org)
9 | [](http://twitter.com/shu223)
10 |
11 | Code examples of Depth APIs in iOS
12 |
13 | ## Requirement
14 |
15 | Use devices which has a **dual camera** (e.g. iPhone 8 Plus) or a **TrueDepth camera** (e.g. iPhone X)
16 |
17 | ## How to build
18 |
19 | Open `ARKit-Sampler.xcworkspace` with Xcode 10 and build it!
20 |
21 | It can **NOT** run on **Simulator**. (Because it uses Metal.)
22 |
23 |
24 | ## Contents
25 |
26 | ### Real-time Depth
27 |
28 | Depth visualization in real time using AV Foundation.
29 |
30 | 
31 |
32 | ### Real-time Depth Mask
33 |
34 | Blending a background image with a mask created from depth.
35 |
36 | 
37 |
38 | ### Depth from Camera Roll
39 |
40 | Depth visualization from pictures in the camera roll.
41 |
42 |
43 |
44 | Plaease try this after taking **a picture with the Camera app using the PORTRAIT mode**.
45 |
46 | ### Portrait Matte
47 |
48 | Background removal demo using Portrait Effect Matte (or Portrait Effect Matte).
49 |
50 | 
51 |
52 | Plaease try this after taking **a picture of a HUMAN with PORTRAIT mode**.
53 |
54 | Available in iOS 12 or later.
55 |
56 | ### ARKit Depth
57 |
58 | Depth visualization on ARKit. The depth on ARKit is available only when using `ARFaceTrackingConfiguration`.
59 |
60 | 
61 |
62 | ### 2D image in 3D space
63 |
64 | A demo to render a 2D image in 3D space.
65 |
66 | 
67 |
68 |
69 | ### AR occlusion
70 |
71 | [WIP] An occlusion sample on ARKit using depth.
72 |
73 | ## Author
74 |
75 | **Shuichi Tsutsumi**
76 |
77 | Freelance iOS programmer from Japan.
78 |
79 |
80 |
81 |
82 |
83 |
84 | - PAST WORKS: [My Profile Summary](https://medium.com/@shu223/my-profile-summary-f14bfc1e7099#.vdh0i7clr)
85 | - PROFILES: [LinkedIn](https://www.linkedin.com/in/shuichi-tsutsumi-525b755b/)
86 | - BLOGS: [English](https://medium.com/@shu223/) / [Japanese](http://d.hatena.ne.jp/shu223/)
87 | - CONTACTS: [Twitter](https://twitter.com/shu223) / [Facebook](https://www.facebook.com/shuichi.tsutsumi)
88 |
--------------------------------------------------------------------------------
/README_resources/3d.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/3d.gif
--------------------------------------------------------------------------------
/README_resources/arkit-depth.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/arkit-depth.gif
--------------------------------------------------------------------------------
/README_resources/blend.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/blend.gif
--------------------------------------------------------------------------------
/README_resources/depth_1.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/depth_1.gif
--------------------------------------------------------------------------------
/README_resources/depth_baby_histoeq.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/depth_baby_histoeq.jpg
--------------------------------------------------------------------------------
/README_resources/portraitmatte.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/README_resources/portraitmatte.gif
--------------------------------------------------------------------------------
/iOS-Depth-Sampler.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/08/20.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | return true
18 | }
19 |
20 | }
21 |
22 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/BigBang.imageset/BigBang-184895179-web-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/BigBang.imageset/BigBang-184895179-web-2.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/BigBang.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "BigBang-184895179-web-2.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000001.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000001.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000001.imageset/burn000001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000001.imageset/burn000001.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000002.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000002.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000002.imageset/burn000002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000002.imageset/burn000002.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000003.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000003.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000003.imageset/burn000003.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000003.imageset/burn000003.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000004.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000004.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000004.imageset/burn000004.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000004.imageset/burn000004.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000005.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000005.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000005.imageset/burn000005.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000005.imageset/burn000005.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000006.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000006.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000006.imageset/burn000006.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000006.imageset/burn000006.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000007.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000007.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000007.imageset/burn000007.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000007.imageset/burn000007.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000008.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000008.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000008.imageset/burn000008.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000008.imageset/burn000008.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000009.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000009.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000009.imageset/burn000009.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000009.imageset/burn000009.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000010.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000010.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000010.imageset/burn000010.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000010.imageset/burn000010.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000011.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000011.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000011.imageset/burn000011.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000011.imageset/burn000011.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000012.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000012.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000012.imageset/burn000012.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000012.imageset/burn000012.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000013.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000013.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000013.imageset/burn000013.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000013.imageset/burn000013.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000014.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000014.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000014.imageset/burn000014.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000014.imageset/burn000014.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000015.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000015.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000015.imageset/burn000015.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000015.imageset/burn000015.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000016.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000016.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000016.imageset/burn000016.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000016.imageset/burn000016.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000017.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000017.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000017.imageset/burn000017.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000017.imageset/burn000017.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000018.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000018.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000018.imageset/burn000018.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000018.imageset/burn000018.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000019.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000019.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000019.imageset/burn000019.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000019.imageset/burn000019.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000020.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000020.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000020.imageset/burn000020.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000020.imageset/burn000020.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000021.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000021.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000021.imageset/burn000021.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000021.imageset/burn000021.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000022.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000022.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000022.imageset/burn000022.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000022.imageset/burn000022.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000023.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000023.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000023.imageset/burn000023.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000023.imageset/burn000023.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000024.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "burn000024.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/burn/burn000024.imageset/burn000024.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/burn/burn000024.imageset/burn000024.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/earth.imageset/10207-1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/earth.imageset/10207-1.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/earth.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "10207-1.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/something.imageset/7341d720.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/something.imageset/7341d720.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/something.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "7341d720.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000001.imageset/000001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000001.imageset/000001.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000001.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000001.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000002.imageset/000002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000002.imageset/000002.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000002.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000002.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000003.imageset/000003.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000003.imageset/000003.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000003.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000003.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000004.imageset/000004.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000004.imageset/000004.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000004.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000004.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000005.imageset/000005.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000005.imageset/000005.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000005.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000005.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000006.imageset/000006.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000006.imageset/000006.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000006.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000006.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000007.imageset/000007.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000007.imageset/000007.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000007.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000007.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000008.imageset/000008.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000008.imageset/000008.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000008.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000008.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000009.imageset/000009.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000009.imageset/000009.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000009.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000009.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000010.imageset/000010.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000010.imageset/000010.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000010.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000010.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000011.imageset/000011.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000011.imageset/000011.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000011.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000011.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000012.imageset/000012.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000012.imageset/000012.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000012.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000012.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000013.imageset/000013.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000013.imageset/000013.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000013.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000013.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000014.imageset/000014.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000014.imageset/000014.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000014.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000014.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000015.imageset/000015.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000015.imageset/000015.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000015.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000015.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000016.imageset/000016.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000016.imageset/000016.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000016.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000016.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000017.imageset/000017.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000017.imageset/000017.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000017.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000017.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000018.imageset/000018.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000018.imageset/000018.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000018.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000018.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000019.imageset/000019.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000019.imageset/000019.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000019.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000019.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000020.imageset/000020.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000020.imageset/000020.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000020.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000020.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000021.imageset/000021.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000021.imageset/000021.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000021.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000021.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000022.imageset/000022.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000022.imageset/000022.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000022.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000022.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000023.imageset/000023.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000023.imageset/000023.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000023.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000023.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000024.imageset/000024.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Assets.xcassets/warp/000024.imageset/000024.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/000024.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "000024.jpg",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Assets.xcassets/warp/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/DepthImagePickableViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DepthImagePickableViewController.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2023/08/11.
6 | // Copyright © 2023 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import PhotosUI
11 |
12 | class DepthImagePickableViewController: UIViewController {
13 |
14 | // override
15 | func loadImage(at url: URL) {
16 | }
17 |
18 | @IBAction func pickerBtnTapped() {
19 | var configuration = PHPickerConfiguration()
20 | configuration.filter = .depthEffectPhotos
21 | configuration.selectionLimit = 1
22 | // configuration.preferredAssetRepresentationMode = .current
23 | let picker = PHPickerViewController(configuration: configuration)
24 | picker.delegate = self
25 | present(picker, animated: true, completion: nil)
26 | }
27 | }
28 |
29 | extension DepthImagePickableViewController: PHPickerViewControllerDelegate {
30 | func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
31 | defer {
32 | picker.dismiss(animated: true, completion: nil)
33 | }
34 | guard let provider = results.first?.itemProvider else { return }
35 | guard let typeIdentifier = provider.registeredContentTypes.contains(UTType.heic) ? UTType.heic.identifier : provider.registeredTypeIdentifiers.first else { return }
36 | guard provider.hasItemConformingToTypeIdentifier(typeIdentifier) else { return }
37 |
38 | provider.loadFileRepresentation(forTypeIdentifier: typeIdentifier) { [weak self] (url, error) in
39 | guard let self = self else { return }
40 | if let error = error {
41 | print("loadFileRepresentation failed with error: \(error)")
42 | }
43 | if let url = url {
44 | self.loadImage(at: url)
45 | }
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | Needs to use camera
25 | NSPhotoLibraryUsageDescription
26 | Needs to use pictures in the Photo Library
27 | UILaunchStoryboardName
28 | LaunchScreen
29 | UIMainStoryboardFile
30 | Main
31 | UIRequiredDeviceCapabilities
32 |
33 | armv7
34 |
35 | UISupportedInterfaceOrientations
36 |
37 | UIInterfaceOrientationPortrait
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Renderer/PassThrough.metal:
--------------------------------------------------------------------------------
1 | //
2 | // PassThrough.metal
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/29.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | #include
9 | using namespace metal;
10 |
11 | typedef struct {
12 | float2 position [[ attribute(0) ]];
13 | float2 texCoord [[ attribute(1) ]];
14 | } Vertex;
15 |
16 | typedef struct {
17 | float4 position [[ position ]];
18 | float2 texCoord;
19 | } ColorInOut;
20 |
21 | vertex ColorInOut passThroughVertex(Vertex in [[ stage_in ]])
22 | {
23 | ColorInOut out;
24 | out.position = float4(in.position, 0.0, 1.0);
25 | out.texCoord = in.texCoord;
26 | return out;
27 | }
28 |
29 | fragment float4 passThroughFragment(ColorInOut in [[ stage_in ]],
30 | texture2d texture [[ texture(0) ]])
31 | {
32 | constexpr sampler colorSampler;
33 | float4 color = texture.sample(colorSampler, in.texCoord);
34 | return color;
35 | }
36 |
37 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Resources/image-with-depth.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Resources/image-with-depth.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Resources/image-with-matte.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shu223/iOS-Depth-Sampler/ed5c96f9d753fce86a633f069defd3458e90a8c5/iOS-Depth-Sampler/Resources/image-with-matte.jpg
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/RootViewCell.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RootViewCell.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/11.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class RootViewCell: UITableViewCell {
12 |
13 | @IBOutlet private weak var titleLabel: UILabel!
14 | @IBOutlet private weak var detailLabel: UILabel!
15 |
16 | override func awakeFromNib() {
17 | super.awakeFromNib()
18 | }
19 |
20 | override func setSelected(_ selected: Bool, animated: Bool) {
21 | super.setSelected(selected, animated: animated)
22 | }
23 |
24 | func showSample(_ sample: Sample) {
25 | titleLabel.text = sample.title
26 | detailLabel.text = sample.detail
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/RootViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RootViewController.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/11.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class RootViewController: UITableViewController {
12 |
13 | private let dataSource = SampleDataSource()
14 |
15 | override func viewDidLoad() {
16 | super.viewDidLoad()
17 | }
18 |
19 | override func didReceiveMemoryWarning() {
20 | super.didReceiveMemoryWarning()
21 | }
22 |
23 | // MARK: UITableViewDataSource
24 |
25 | override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
26 | return dataSource.samples.count
27 | }
28 |
29 | override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
30 | guard let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) as? RootViewCell else {fatalError()}
31 |
32 | let sample = dataSource.samples[(indexPath as NSIndexPath).row]
33 | cell.showSample(sample)
34 |
35 | return cell
36 | }
37 |
38 | override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
39 | let sample = dataSource.samples[(indexPath as NSIndexPath).row]
40 |
41 | navigationController?.pushViewController(sample.controller(), animated: true)
42 |
43 | tableView.deselectRow(at: indexPath, animated: true)
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/SampleDataSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SampleDataSource.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/11.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | struct Sample {
12 | let title: String
13 | let detail: String
14 | let classPrefix: String
15 |
16 | func controller() -> UIViewController {
17 | let storyboard = UIStoryboard(name: classPrefix, bundle: nil)
18 | guard let controller = storyboard.instantiateInitialViewController() else {fatalError()}
19 | controller.title = title
20 | return controller
21 | }
22 | }
23 |
24 | struct SampleDataSource {
25 | let samples = [
26 | Sample(
27 | title: "Real-time Depth",
28 | detail: "Depth visualization in real time using AV Foundation",
29 | classPrefix: "RealtimeDepth"
30 | ),
31 | Sample(
32 | title: "Real-time Depth Mask",
33 | detail: "Blending a background image with a mask created from depth",
34 | classPrefix: "RealtimeDepthMask"
35 | ),
36 | Sample(
37 | title: "Depth from Camera Roll",
38 | detail: "Depth visualization from pictures in the camera roll",
39 | classPrefix: "DepthFromCameraRoll"
40 | ),
41 | Sample(
42 | title: "Portrait Matte",
43 | detail: "Background removal demo using Portrait Matte",
44 | classPrefix: "PortraitMatte"
45 | ),
46 | Sample(
47 | title: "ARKit Depth",
48 | detail: "Depth visualization on ARKit",
49 | classPrefix: "ARKitDepth"
50 | ),
51 | Sample(
52 | title: "2D image in 3D space",
53 | detail: "A demo to render a 2D image in 3D space",
54 | classPrefix: "PointCloud"
55 | ),
56 | ]
57 | }
58 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/ARKit/ARKitDepth.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/ARKit/ARKitDepthViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ARKitDepthViewController.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/08.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import ARKit
10 | import MetalKit
11 |
12 | class ARKitDepthViewController: UIViewController {
13 |
14 | @IBOutlet weak var mtkView: MTKView!
15 | @IBOutlet weak var sceneView: ARSCNView!
16 | @IBOutlet weak var trackingStateLabel: UILabel!
17 |
18 | private var faceGeometry: ARSCNFaceGeometry!
19 | private let faceNode = SCNNode()
20 |
21 | private var renderer: MetalRenderer!
22 | private var depthImage: CIImage?
23 | private var currentDrawableSize: CGSize!
24 |
25 | override func viewDidLoad() {
26 | super.viewDidLoad()
27 |
28 | guard ARFaceTrackingConfiguration.isSupported else { fatalError() }
29 |
30 | sceneView.delegate = self
31 | sceneView.automaticallyUpdatesLighting = true
32 | sceneView.scene = SCNScene()
33 |
34 | guard let device = sceneView.device else { fatalError("This device doesn't support Metal.") }
35 | mtkView.device = device
36 | mtkView.backgroundColor = UIColor.clear
37 | mtkView.delegate = self
38 | renderer = MetalRenderer(metalDevice: device, renderDestination: mtkView)
39 | currentDrawableSize = mtkView.currentDrawable!.layer.drawableSize
40 |
41 | faceGeometry = ARSCNFaceGeometry(device: device, fillMesh: true)
42 | if let material = faceGeometry.firstMaterial {
43 | material.diffuse.contents = UIColor.green
44 | material.lightingModel = .physicallyBased
45 | }
46 | faceNode.geometry = faceGeometry
47 |
48 | }
49 |
50 | override func viewWillAppear(_ animated: Bool) {
51 | super.viewWillAppear(animated)
52 | let configuration = ARFaceTrackingConfiguration()
53 | configuration.isLightEstimationEnabled = true
54 | sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
55 | }
56 |
57 | override func viewWillDisappear(_ animated: Bool) {
58 | sceneView.session.pause()
59 | mtkView.delegate = nil
60 | super.viewWillDisappear(animated)
61 | }
62 | }
63 |
64 | extension ARKitDepthViewController: ARSCNViewDelegate {
65 | func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
66 | DispatchQueue.global(qos: .default).async {
67 | guard let frame = self.sceneView.session.currentFrame else { return }
68 | if let depthImage = frame.transformedDepthImage(targetSize: self.currentDrawableSize) {
69 | self.depthImage = depthImage
70 | }
71 | }
72 | }
73 |
74 | func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
75 | print("trackingState: \(camera.trackingState)")
76 | trackingStateLabel.text = camera.trackingState.description
77 | }
78 |
79 | func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
80 | print("anchor:\(anchor), node: \(node), node geometry: \(String(describing: node.geometry))")
81 | guard let faceAnchor = anchor as? ARFaceAnchor else { return }
82 |
83 | faceGeometry.update(from: faceAnchor.geometry)
84 |
85 | node.addChildNode(faceNode)
86 | }
87 |
88 | func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
89 | guard let faceAnchor = anchor as? ARFaceAnchor else { return }
90 |
91 | faceGeometry.update(from: faceAnchor.geometry)
92 | }
93 |
94 | func renderer(_ renderer: SCNSceneRenderer, didRemove node: SCNNode, for anchor: ARAnchor) {
95 | print("\(self.classForCoder)/" + #function)
96 | }
97 | }
98 |
99 | extension ARKitDepthViewController: MTKViewDelegate {
100 | func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
101 | currentDrawableSize = size
102 | }
103 |
104 | func draw(in view: MTKView) {
105 | if let image = depthImage {
106 | renderer.update(with: image)
107 | }
108 | }
109 | }
110 |
111 | extension ARFrame {
112 | func transformedDepthImage(targetSize: CGSize) -> CIImage? {
113 | guard let depthData = capturedDepthData else { return nil }
114 | return depthData.depthDataMap.transformedImage(targetSize: CGSize(width: targetSize.height, height: targetSize.width), rotationAngle: -CGFloat.pi/2)
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/ARKit/TrackingState+Description.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TrackingState+Description.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2017/08/25.
5 | // Copyright © 2017 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import ARKit
9 |
10 | extension ARCamera.TrackingState {
11 | public var description: String {
12 | switch self {
13 | case .notAvailable:
14 | return "TRACKING UNAVAILABLE"
15 | case .normal:
16 | return "TRACKING NORMAL"
17 | case .limited(let reason):
18 | switch reason {
19 | case .excessiveMotion:
20 | return "TRACKING LIMITED\nToo much camera movement"
21 | case .insufficientFeatures:
22 | return "TRACKING LIMITED\nNot enough surface detail"
23 | case .initializing:
24 | return "Tracking LIMITED\nInitialization in progress."
25 | case .relocalizing:
26 | return "Tracking LIMITED\nRelocalizing."
27 | @unknown default:
28 | fatalError()
29 | }
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/Depth-from-Camera-Roll/DepthFromCameraRollViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DepthFromCameraRollViewController.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/22.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import Photos
10 |
11 | class DepthFromCameraRollViewController: DepthImagePickableViewController {
12 |
13 | @IBOutlet weak var imageView: UIImageView!
14 | @IBOutlet weak var typeSegmentedCtl: UISegmentedControl!
15 |
16 | private var image: UIImage?
17 | private var disparityPixelBuffer: CVPixelBuffer?
18 | private var depthPixelBuffer: CVPixelBuffer?
19 |
20 | override func viewDidLoad() {
21 | super.viewDidLoad()
22 |
23 | PHPhotoLibrary.requestAuthorization({ status in
24 | switch status {
25 | case .authorized:
26 | let url = Bundle.main.url(forResource: "image-with-depth", withExtension: "jpg")!
27 | self.loadImage(at: url)
28 | default:
29 | fatalError()
30 | }
31 | })
32 | }
33 |
34 | override func loadImage(at url: URL) {
35 | let imageSource = CGImageSourceCreateWithURL(url as CFURL, nil)!
36 | processImageSource(imageSource)
37 | guard let image = UIImage(contentsOfFile: url.path) else { fatalError() }
38 | self.image = image
39 |
40 | DispatchQueue.main.async { [weak self] in
41 | guard let self = self else { return }
42 | typeSegmentedCtl.selectedSegmentIndex = 0
43 | }
44 | drawImage(image)
45 | }
46 |
47 | private func drawImage(_ image: UIImage?) {
48 | DispatchQueue.main.async {
49 | self.imageView.image = image
50 | }
51 | }
52 |
53 | private func draw(pixelBuffer: CVPixelBuffer?) {
54 | var image: UIImage? = nil
55 | if let pixelBuffer = pixelBuffer {
56 | if let depthMapImage = UIImage(pixelBuffer: pixelBuffer) {
57 | image = depthMapImage
58 | }
59 | // Histogram Equalization
60 | // if let cgImage = image?.cgImage {
61 | // var ciImage = CIImage(cgImage: cgImage)
62 | // ciImage = ciImage.applyingFilter("YUCIHistogramEqualization")
63 | // image = UIImage(ciImage: ciImage)
64 | // }
65 | }
66 | drawImage(image)
67 | }
68 |
69 | private func processImageSource(_ imageSource: CGImageSource) {
70 | self.disparityPixelBuffer = imageSource.getDisparityData()?.depthDataMap
71 | self.depthPixelBuffer = imageSource.getDepthData()?.depthDataMap
72 | }
73 |
74 | private func updateView() {
75 | switch typeSegmentedCtl.selectedSegmentIndex {
76 | case 0:
77 | drawImage(image)
78 | case 1:
79 | draw(pixelBuffer: disparityPixelBuffer)
80 | case 2:
81 | draw(pixelBuffer: depthPixelBuffer)
82 | default:
83 | fatalError()
84 | }
85 | }
86 |
87 | // MARK: - Actions
88 |
89 | @IBAction func typeSegmentChanged(_ sender: UISegmentedControl) {
90 | updateView()
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/PointCloud/PointCloud.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PointCloud.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/09/14.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 | // Reference: https://github.com/eugeneu/PoindCloudRenderer
8 |
9 | import SceneKit
10 |
11 | struct PointCloudVertex {
12 | var x: Float, y: Float, z: Float
13 | var r: Float, g: Float, b: Float
14 | }
15 |
16 | @objc class PointCloud: NSObject {
17 |
18 | var points: [SCNVector3] = []
19 | var colors: [UInt8] = []
20 |
21 | public func pointCloudNode() -> SCNNode {
22 | var vertices = Array(repeating: PointCloudVertex(x: 0,y: 0,z: 0,r: 0,g: 0,b: 0), count: points.count)
23 |
24 | for i in 0...(points.count-1) {
25 | let p = points[i]
26 | vertices[i].x = Float(p.x)
27 | vertices[i].y = Float(p.y)
28 | vertices[i].z = Float(p.z)
29 | vertices[i].r = Float(colors[i * 4]) / 255.0
30 | vertices[i].g = Float(colors[i * 4 + 1]) / 255.0
31 | vertices[i].b = Float(colors[i * 4 + 2]) / 255.0
32 | }
33 |
34 | let node = buildNode(points: vertices)
35 | return node
36 | }
37 |
38 | private func buildNode(points: [PointCloudVertex]) -> SCNNode {
39 | let vertexData = NSData(
40 | bytes: points,
41 | length: MemoryLayout.size * points.count
42 | )
43 | let positionSource = SCNGeometrySource(
44 | data: vertexData as Data,
45 | semantic: SCNGeometrySource.Semantic.vertex,
46 | vectorCount: points.count,
47 | usesFloatComponents: true,
48 | componentsPerVector: 3,
49 | bytesPerComponent: MemoryLayout.size,
50 | dataOffset: 0,
51 | dataStride: MemoryLayout.size
52 | )
53 | let colorSource = SCNGeometrySource(
54 | data: vertexData as Data,
55 | semantic: SCNGeometrySource.Semantic.color,
56 | vectorCount: points.count,
57 | usesFloatComponents: true,
58 | componentsPerVector: 3,
59 | bytesPerComponent: MemoryLayout.size,
60 | dataOffset: MemoryLayout.size * 3,
61 | dataStride: MemoryLayout.size
62 | )
63 | let element = SCNGeometryElement(
64 | data: nil,
65 | primitiveType: .point,
66 | primitiveCount: points.count,
67 | bytesPerIndex: MemoryLayout.size
68 | )
69 |
70 | // for bigger dots
71 | element.pointSize = 1
72 | element.minimumPointScreenSpaceRadius = 1
73 | element.maximumPointScreenSpaceRadius = 7
74 |
75 | let pointsGeometry = SCNGeometry(sources: [positionSource, colorSource], elements: [element])
76 |
77 | return SCNNode(geometry: pointsGeometry)
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/Portrait-Matte/PortraitMatteViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DepthFromCameraRollViewController.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/22.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import Photos
10 |
11 | class PortraitMatteViewController: DepthImagePickableViewController {
12 |
13 | @IBOutlet weak var imageView: UIImageView!
14 | @IBOutlet weak var typeSegmentedCtl: UISegmentedControl!
15 |
16 | private var image: UIImage?
17 | private var imageSource: CGImageSource? {
18 | didSet {
19 | let isEnabled = imageSource != nil
20 | DispatchQueue.main.async(execute: {
21 | self.typeSegmentedCtl.isEnabled = isEnabled
22 | })
23 | }
24 | }
25 | private var mattePixelBuffer: CVPixelBuffer?
26 |
27 | override func viewDidLoad() {
28 | super.viewDidLoad()
29 |
30 | resetControls()
31 |
32 | PHPhotoLibrary.requestAuthorization({ status in
33 | switch status {
34 | case .authorized:
35 | let url = Bundle.main.url(forResource: "image-with-matte", withExtension: "jpg")!
36 | self.loadImage(at: url)
37 | default:
38 | fatalError()
39 | }
40 | })
41 | }
42 |
43 | override func loadImage(at url: URL) {
44 | imageSource = CGImageSourceCreateWithURL(url as CFURL, nil)!
45 | getPortraitMatte()
46 | guard let image = UIImage(contentsOfFile: url.path) else { fatalError() }
47 | self.image = image
48 |
49 | DispatchQueue.main.async { [weak self] in
50 | guard let self = self else { return }
51 | typeSegmentedCtl.selectedSegmentIndex = 0
52 | }
53 | drawImage(image)
54 | }
55 |
56 | private func showNoPortraitMatteAlert() {
57 | UIAlertController.showAlert(title: "No Portrait Matte", message: "This picture doesn't have portrait matte info. Plaease take a picture of a HUMAN with PORTRAIT mode.", on: self)
58 | }
59 |
60 | private func resetControls() {
61 | typeSegmentedCtl.isEnabled = false
62 | }
63 |
64 | private func drawImage(_ image: UIImage?) {
65 | DispatchQueue.main.async {
66 | self.imageView.image = image
67 | }
68 | }
69 |
70 | private func draw(pixelBuffer: CVPixelBuffer?) {
71 | var image: UIImage? = nil
72 | if let pixelBuffer = pixelBuffer {
73 | if let depthMapImage = UIImage(pixelBuffer: pixelBuffer) {
74 | image = depthMapImage
75 | }
76 | }
77 | drawImage(image)
78 | }
79 |
80 | private func getPortraitMatte() {
81 | var depthDataMap: CVPixelBuffer? = nil
82 | if let matteData = imageSource?.getPortraitEffectsMatteData() {
83 | depthDataMap = matteData.mattingImage
84 | }
85 | mattePixelBuffer = depthDataMap
86 | }
87 |
88 | private func updateView() {
89 | switch typeSegmentedCtl.selectedSegmentIndex {
90 | case 0:
91 | drawImage(image)
92 | case 1:
93 | guard let matte = mattePixelBuffer else {
94 | showNoPortraitMatteAlert()
95 | return
96 | }
97 | draw(pixelBuffer: matte)
98 | case 2:
99 | guard let cgOriginalImage = image?.cgImage else { return }
100 | guard let matte = mattePixelBuffer else {
101 | showNoPortraitMatteAlert()
102 | return
103 | }
104 | let orgImage = CIImage(cgImage: cgOriginalImage)
105 | let maskImage = CIImage(cvPixelBuffer: matte).resizeToSameSize(as: orgImage)
106 | let filter = CIFilter(name: "CIBlendWithMask", parameters: [
107 | kCIInputImageKey: orgImage,
108 | kCIInputMaskImageKey: maskImage])!
109 | let outputImage = filter.outputImage!
110 | drawImage(UIImage(ciImage: outputImage))
111 | default:
112 | fatalError()
113 | }
114 | }
115 |
116 | // MARK: - Actions
117 |
118 | @IBAction func typeSegmentChanged(_ sender: UISegmentedControl) {
119 | updateView()
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Samples/Realtime-Depth/RealtimeDepthViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RealtimeDepthViewController.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/20.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import MetalKit
10 | import AVFoundation
11 |
12 | class RealtimeDepthViewController: UIViewController {
13 |
14 | @IBOutlet weak var previewView: UIView!
15 | @IBOutlet weak var mtkView: MTKView!
16 | @IBOutlet weak var filterSwitch: UISwitch!
17 | @IBOutlet weak var disparitySwitch: UISwitch!
18 | @IBOutlet weak var equalizeSwitch: UISwitch!
19 |
20 | private var videoCapture: VideoCapture!
21 | var currentCameraType: CameraType = .back(true)
22 | private let serialQueue = DispatchQueue(label: "com.shu223.iOS-Depth-Sampler.queue")
23 |
24 | private var renderer: MetalRenderer!
25 | private var depthImage: CIImage?
26 | private var currentDrawableSize: CGSize!
27 |
28 | private var videoImage: CIImage?
29 |
30 | override func viewDidLoad() {
31 | super.viewDidLoad()
32 |
33 | let device = MTLCreateSystemDefaultDevice()!
34 | mtkView.device = device
35 | mtkView.backgroundColor = UIColor.clear
36 | mtkView.delegate = self
37 | renderer = MetalRenderer(metalDevice: device, renderDestination: mtkView)
38 | currentDrawableSize = mtkView.currentDrawable!.layer.drawableSize
39 |
40 | videoCapture = VideoCapture(cameraType: currentCameraType,
41 | preferredSpec: nil,
42 | previewContainer: previewView.layer)
43 |
44 | videoCapture.syncedDataBufferHandler = { [weak self] videoPixelBuffer, depthData, face in
45 | guard let self = self else { return }
46 |
47 | self.videoImage = CIImage(cvPixelBuffer: videoPixelBuffer)
48 |
49 | var useDisparity: Bool = false
50 | var applyHistoEq: Bool = false
51 | DispatchQueue.main.sync(execute: {
52 | useDisparity = self.disparitySwitch.isOn
53 | applyHistoEq = self.equalizeSwitch.isOn
54 | })
55 |
56 | self.serialQueue.async {
57 | guard let depthData = useDisparity ? depthData?.convertToDisparity() : depthData else { return }
58 |
59 | guard let ciImage = depthData.depthDataMap.transformedImage(targetSize: self.currentDrawableSize, rotationAngle: 0) else { return }
60 | self.depthImage = applyHistoEq ? ciImage.applyingFilter("YUCIHistogramEqualization") : ciImage
61 | }
62 | }
63 | videoCapture.setDepthFilterEnabled(filterSwitch.isOn)
64 | }
65 |
66 | override func viewWillAppear(_ animated: Bool) {
67 | super.viewWillAppear(animated)
68 | guard let videoCapture = videoCapture else {return}
69 | videoCapture.startCapture()
70 | }
71 |
72 | override func viewDidLayoutSubviews() {
73 | super.viewDidLayoutSubviews()
74 | guard let videoCapture = videoCapture else {return}
75 | videoCapture.resizePreview()
76 | }
77 |
78 | override func viewWillDisappear(_ animated: Bool) {
79 | guard let videoCapture = videoCapture else {return}
80 | videoCapture.imageBufferHandler = nil
81 | videoCapture.stopCapture()
82 | mtkView.delegate = nil
83 | super.viewWillDisappear(animated)
84 | }
85 |
86 | // MARK: - Actions
87 |
88 | @IBAction func cameraSwitchBtnTapped(_ sender: UIButton) {
89 | switch currentCameraType {
90 | case .back:
91 | currentCameraType = .front(true)
92 | case .front:
93 | currentCameraType = .back(true)
94 | }
95 | videoCapture.changeCamera(with: currentCameraType)
96 | }
97 |
98 | @IBAction func filterSwitched(_ sender: UISwitch) {
99 | videoCapture.setDepthFilterEnabled(sender.isOn)
100 | }
101 | }
102 |
103 | extension RealtimeDepthViewController: MTKViewDelegate {
104 | func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
105 | currentDrawableSize = size
106 | }
107 |
108 | func draw(in view: MTKView) {
109 | if let image = depthImage {
110 | renderer.update(with: image)
111 | }
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/AVDepthData+Utils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVDepthData+Utils.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/12.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | extension AVDepthData {
12 |
13 | func convertToDepth() -> AVDepthData {
14 | let targetType: OSType
15 | switch depthDataType {
16 | case kCVPixelFormatType_DisparityFloat16:
17 | targetType = kCVPixelFormatType_DepthFloat16
18 | case kCVPixelFormatType_DisparityFloat32:
19 | targetType = kCVPixelFormatType_DepthFloat32
20 | default:
21 | return self
22 | }
23 | return converting(toDepthDataType: targetType)
24 | }
25 |
26 | func convertToDisparity() -> AVDepthData {
27 | let targetType: OSType
28 | switch depthDataType {
29 | case kCVPixelFormatType_DepthFloat16:
30 | targetType = kCVPixelFormatType_DisparityFloat16
31 | case kCVPixelFormatType_DepthFloat32:
32 | targetType = kCVPixelFormatType_DisparityFloat32
33 | default:
34 | return self
35 | }
36 | return converting(toDepthDataType: targetType)
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/CIImage+Utils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CIImage+Utils.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/14.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | extension CIImage {
12 | func resizeToSameSize(as anotherImage: CIImage) -> CIImage {
13 | let size1 = extent.size
14 | let size2 = anotherImage.extent.size
15 | let transform = CGAffineTransform(scaleX: size2.width / size1.width, y: size2.height / size1.height)
16 | return transformed(by: transform)
17 | }
18 |
19 | func createCGImage() -> CGImage {
20 | let context = CIContext(options: nil)
21 | guard let cgImage = context.createCGImage(self, from: extent) else { fatalError() }
22 | return cgImage
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/CVPixelBuffer+CIImage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CVPixelBuffer+CIImage.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/12.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import CoreVideo
10 | import CoreImage
11 |
12 | extension CVPixelBuffer {
13 | func transformedImage(targetSize: CGSize, rotationAngle: CGFloat) -> CIImage? {
14 | let image = CIImage(cvPixelBuffer: self, options: [:])
15 | let scaleFactor = Float(targetSize.width) / Float(image.extent.width)
16 | return image.transformed(by: CGAffineTransform(rotationAngle: rotationAngle)).applyingFilter("CIBicubicScaleTransform", parameters: ["inputScale": scaleFactor])
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/PhotosUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PhotosUtils.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/12.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import Photos
10 | import UIKit
11 |
12 | extension PHAsset {
13 | class func fetchAssetsWithDepth() -> [PHAsset] {
14 | let resultCollections = PHAssetCollection.fetchAssetCollections(
15 | with: .smartAlbum,
16 | subtype: .smartAlbumDepthEffect,
17 | options: nil)
18 | var assets: [PHAsset] = []
19 | resultCollections.enumerateObjects({ collection, index, stop in
20 | let result = PHAsset.fetchAssets(in: collection, options: nil)
21 | result.enumerateObjects({ asset, index, stop in
22 | assets.append(asset)
23 | })
24 | })
25 | return assets
26 | }
27 |
28 | func requestColorImage(handler: @escaping (UIImage?) -> Void) {
29 | PHImageManager.default().requestImage(for: self, targetSize: PHImageManagerMaximumSize, contentMode: PHImageContentMode.aspectFit, options: nil) { (image, info) in
30 | handler(image)
31 | }
32 | }
33 |
34 | func hasPortraitMatte() -> Bool {
35 | var result: Bool = false
36 | let semaphore = DispatchSemaphore(value: 0)
37 | requestContentEditingInput(with: nil) { contentEditingInput, info in
38 | let imageSource = contentEditingInput?.createImageSource()
39 | result = imageSource?.getPortraitEffectsMatteData() != nil
40 | semaphore.signal()
41 | }
42 | semaphore.wait()
43 | return result
44 | }
45 | }
46 |
47 | extension PHContentEditingInput {
48 | func createDepthImage() -> CIImage {
49 | guard let url = fullSizeImageURL else { fatalError() }
50 | return CIImage(contentsOf: url, options: [CIImageOption.auxiliaryDisparity : true])!
51 | }
52 |
53 | func createImageSource() -> CGImageSource {
54 | guard let url = fullSizeImageURL else { fatalError() }
55 | return CGImageSourceCreateWithURL(url as CFURL, nil)!
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/UIAlertController+Utils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UIAlertController+Utils.swift
3 | // iOS-Depth-Sampler
4 | //
5 | // Created by Shuichi Tsutsumi on 2018/09/12.
6 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | extension UIAlertController {
12 |
13 | class func showAlert(title: String, message: String, on viewController: UIViewController) {
14 | let alert = UIAlertController(title: title, message: message,
15 | preferredStyle: .alert)
16 | let okAction = UIAlertAction(title: "OK", style: .cancel, handler: nil)
17 | alert.addAction(okAction)
18 | viewController.present(alert, animated: true, completion: nil)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/Utils/UIImage+Utils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+CVPixelBuffer.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 2018/08/28.
5 | // Copyright © 2018 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | //import VideoToolbox
10 |
11 | extension UIImage {
12 | // https://github.com/hollance/CoreMLHelpers
13 | // NOTE: This only works for RGB pixel buffers, not for grayscale.
14 | // public convenience init?(pixelBuffer: CVPixelBuffer) {
15 | // var cgImage: CGImage?
16 | // VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
17 | //
18 | // if let cgImage = cgImage {
19 | // self.init(cgImage: cgImage)
20 | // } else {
21 | // return nil
22 | // }
23 | // }
24 | //
25 | // /**
26 | // Creates a new UIImage from a CVPixelBuffer, using Core Image.
27 | // */
28 | // public convenience init?(pixelBuffer: CVPixelBuffer, context: CIContext) {
29 | // let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
30 | // let rect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer),
31 | // height: CVPixelBufferGetHeight(pixelBuffer))
32 | // if let cgImage = context.createCGImage(ciImage, from: rect) {
33 | // self.init(cgImage: cgImage)
34 | // } else {
35 | // return nil
36 | // }
37 | // }
38 |
39 | public convenience init?(pixelBuffer: CVPixelBuffer) {
40 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
41 | let pixelBufferWidth = CGFloat(CVPixelBufferGetWidth(pixelBuffer))
42 | let pixelBufferHeight = CGFloat(CVPixelBufferGetHeight(pixelBuffer))
43 | let imageRect:CGRect = CGRect(x: 0, y: 0, width: pixelBufferWidth, height: pixelBufferHeight)
44 | let ciContext = CIContext.init()
45 | guard let cgImage = ciContext.createCGImage(ciImage, from: imageRect) else {
46 | return nil
47 | }
48 | self.init(cgImage: cgImage)
49 | }
50 |
51 | func adjustedCIImage(targetSize: CGSize) -> CIImage? {
52 | guard let cgImage = cgImage else { fatalError() }
53 |
54 | let imageWidth = cgImage.width
55 | let imageHeight = cgImage.height
56 |
57 | // Video preview is running at 1280x720. Downscale background to same resolution
58 | let videoWidth = Int(targetSize.width)
59 | let videoHeight = Int(targetSize.height)
60 |
61 | let scaleX = CGFloat(imageWidth) / CGFloat(videoWidth)
62 | let scaleY = CGFloat(imageHeight) / CGFloat(videoHeight)
63 |
64 | let scale = min(scaleX, scaleY)
65 |
66 | // crop the image to have the right aspect ratio
67 | let cropSize = CGSize(width: CGFloat(videoWidth) * scale, height: CGFloat(videoHeight) * scale)
68 | let croppedImage = cgImage.cropping(to: CGRect(origin: CGPoint(
69 | x: (imageWidth - Int(cropSize.width)) / 2,
70 | y: (imageHeight - Int(cropSize.height)) / 2), size: cropSize))
71 |
72 | let colorSpace = CGColorSpaceCreateDeviceRGB()
73 | guard let context = CGContext(data: nil,
74 | width: videoWidth,
75 | height: videoHeight,
76 | bitsPerComponent: 8,
77 | bytesPerRow: 0,
78 | space: colorSpace,
79 | bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) else {
80 | print("error")
81 | return nil
82 | }
83 |
84 | let bounds = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: videoWidth, height: videoHeight))
85 | context.clear(bounds)
86 |
87 | context.draw(croppedImage!, in: bounds)
88 |
89 | guard let scaledImage = context.makeImage() else {
90 | print("failed")
91 | return nil
92 | }
93 |
94 | return CIImage(cgImage: scaledImage)
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/VideoCapture/AVCaptureDevice+Extension.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVCaptureDevice+Extension.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 4/3/16.
5 | // Copyright © 2016 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import AVFoundation
9 |
10 | extension AVCaptureDevice {
11 | private func formatWithHighestResolution(_ availableFormats: [AVCaptureDevice.Format]) -> AVCaptureDevice.Format?
12 | {
13 | var maxWidth: Int32 = 0
14 | var selectedFormat: AVCaptureDevice.Format?
15 | for format in availableFormats {
16 | let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
17 | let width = dimensions.width
18 | if width >= maxWidth {
19 | maxWidth = width
20 | selectedFormat = format
21 | }
22 | }
23 | return selectedFormat
24 | }
25 |
26 | func selectDepthFormat() {
27 | let availableFormats = formats.filter { format -> Bool in
28 | let validDepthFormats = format.supportedDepthDataFormats.filter{ depthFormat in
29 | return CMFormatDescriptionGetMediaSubType(depthFormat.formatDescription) == kCVPixelFormatType_DepthFloat32
30 | }
31 | return validDepthFormats.count > 0
32 | }
33 | guard let selectedFormat = formatWithHighestResolution(availableFormats) else { fatalError() }
34 |
35 | let depthFormats = selectedFormat.supportedDepthDataFormats
36 | let depth32formats = depthFormats.filter {
37 | CMFormatDescriptionGetMediaSubType($0.formatDescription) == kCVPixelFormatType_DepthFloat32
38 | }
39 | guard !depth32formats.isEmpty else { fatalError() }
40 | let selectedDepthFormat = depth32formats.max(by: {
41 | CMVideoFormatDescriptionGetDimensions($0.formatDescription).width
42 | < CMVideoFormatDescriptionGetDimensions($1.formatDescription).width
43 | })!
44 |
45 | print("selected format: \(selectedFormat), depth format: \(selectedDepthFormat)")
46 | try! lockForConfiguration()
47 | activeFormat = selectedFormat
48 | activeDepthDataFormat = selectedDepthFormat
49 | unlockForConfiguration()
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/VideoCapture/VideoCameraType.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoCameraType.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 4/3/16.
5 | // Copyright © 2016 Shuichi Tsutsumi. All rights reserved.
6 | //
7 |
8 | import AVFoundation
9 |
10 | enum CameraType {
11 | case back(Bool)
12 | case front(Bool)
13 |
14 | func captureDevice() -> AVCaptureDevice {
15 | let devices: [AVCaptureDevice]
16 | switch self {
17 | case .front(let requireDepth):
18 | var deviceTypes: [AVCaptureDevice.DeviceType] = [.builtInTrueDepthCamera]
19 | if !requireDepth {
20 | deviceTypes.append(.builtInWideAngleCamera)
21 | }
22 | devices = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: .video, position: .front).devices
23 | case .back(let requireDepth):
24 | var deviceTypes: [AVCaptureDevice.DeviceType] = [.builtInDualCamera]
25 | if !requireDepth {
26 | deviceTypes.append(contentsOf: [.builtInWideAngleCamera, .builtInTelephotoCamera])
27 | }
28 | devices = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: .video, position: .back).devices
29 | }
30 | guard let device = devices.first else {
31 | return AVCaptureDevice.default(for: .video)!
32 | }
33 | return device
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/iOS-Depth-Sampler-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
--------------------------------------------------------------------------------
/iOS-Depth-Sampler/iOS-Depth-Sampler.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.device.camera
8 |
9 | com.apple.security.network.client
10 |
11 | com.apple.security.personal-information.photos-library
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------