├── .gitignore
├── .travis.yml
├── Example
├── Podfile
├── Podfile.lock
├── Pods
│ ├── Local Podspecs
│ │ └── SuperResolutionKit.podspec.json
│ ├── Manifest.lock
│ ├── Pods.xcodeproj
│ │ └── project.pbxproj
│ └── Target Support Files
│ │ ├── Pods-SuperResolutionKit_Example
│ │ ├── Info.plist
│ │ ├── Pods-SuperResolutionKit_Example-acknowledgements.markdown
│ │ ├── Pods-SuperResolutionKit_Example-acknowledgements.plist
│ │ ├── Pods-SuperResolutionKit_Example-dummy.m
│ │ ├── Pods-SuperResolutionKit_Example-frameworks.sh
│ │ ├── Pods-SuperResolutionKit_Example-resources.sh
│ │ ├── Pods-SuperResolutionKit_Example-umbrella.h
│ │ ├── Pods-SuperResolutionKit_Example.debug.xcconfig
│ │ ├── Pods-SuperResolutionKit_Example.modulemap
│ │ └── Pods-SuperResolutionKit_Example.release.xcconfig
│ │ ├── Pods-SuperResolutionKit_Tests
│ │ ├── Info.plist
│ │ ├── Pods-SuperResolutionKit_Tests-acknowledgements.markdown
│ │ ├── Pods-SuperResolutionKit_Tests-acknowledgements.plist
│ │ ├── Pods-SuperResolutionKit_Tests-dummy.m
│ │ ├── Pods-SuperResolutionKit_Tests-frameworks.sh
│ │ ├── Pods-SuperResolutionKit_Tests-resources.sh
│ │ ├── Pods-SuperResolutionKit_Tests-umbrella.h
│ │ ├── Pods-SuperResolutionKit_Tests.debug.xcconfig
│ │ ├── Pods-SuperResolutionKit_Tests.modulemap
│ │ └── Pods-SuperResolutionKit_Tests.release.xcconfig
│ │ └── SuperResolutionKit
│ │ ├── Info.plist
│ │ ├── ResourceBundle-SuperResolutionKit-Info.plist
│ │ ├── SuperResolutionKit-dummy.m
│ │ ├── SuperResolutionKit-prefix.pch
│ │ ├── SuperResolutionKit-umbrella.h
│ │ ├── SuperResolutionKit.modulemap
│ │ └── SuperResolutionKit.xcconfig
├── SuperResolutionKit.xcodeproj
│ ├── project.pbxproj
│ ├── project.xcworkspace
│ │ └── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── xcschemes
│ │ └── SuperResolutionKit-Example.xcscheme
├── SuperResolutionKit.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
├── SuperResolutionKit
│ ├── AppDelegate.swift
│ ├── Base.lproj
│ │ ├── LaunchScreen.xib
│ │ └── Main.storyboard
│ ├── Images.xcassets
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Info.plist
│ ├── ListViewController.swift
│ ├── MangaViewController.swift
│ ├── ViewController.swift
│ ├── images
│ │ ├── Blackjack
│ │ │ └── lr
│ │ │ │ ├── 01bj-page10.jpg
│ │ │ │ ├── 01bj-page11.jpg
│ │ │ │ ├── 01bj-page12.jpg
│ │ │ │ ├── 01bj-page13.jpg
│ │ │ │ ├── 01bj-page14.jpg
│ │ │ │ ├── 01bj-page15.jpg
│ │ │ │ ├── 01bj-page7.jpg
│ │ │ │ ├── 01bj-page8.jpg
│ │ │ │ └── 01bj-page9.jpg
│ │ ├── Penki
│ │ │ ├── hr
│ │ │ │ ├── p1.png
│ │ │ │ ├── p10.png
│ │ │ │ ├── p2.png
│ │ │ │ ├── p3.png
│ │ │ │ ├── p4.png
│ │ │ │ ├── p5.png
│ │ │ │ ├── p6.png
│ │ │ │ ├── p7.png
│ │ │ │ ├── p8.png
│ │ │ │ └── p9.png
│ │ │ └── lr
│ │ │ │ ├── p1.png
│ │ │ │ ├── p10.png
│ │ │ │ ├── p2.png
│ │ │ │ ├── p3.png
│ │ │ │ ├── p4.png
│ │ │ │ ├── p5.png
│ │ │ │ ├── p6.png
│ │ │ │ ├── p7.png
│ │ │ │ ├── p8.png
│ │ │ │ └── p9.png
│ │ └── Photos
│ │ │ ├── hr
│ │ │ ├── img_001_SRF_2_HR_1_org.png
│ │ │ └── lena_org.jpg
│ │ │ └── lr
│ │ │ ├── img_002_SRF_2_HR.png
│ │ │ ├── img_002_SRF_8_HR.png
│ │ │ ├── img_003_SRF_2_HR.png
│ │ │ ├── img_003_SRF_8_HR.png
│ │ │ ├── img_012_SRF_2_HR.png
│ │ │ ├── img_012_SRF_8_HR.png
│ │ │ ├── img_013_SRF_2_HR.png
│ │ │ └── img_013_SRF_8_HR.png
│ └── lr_imgs
│ │ ├── 01bj-page10.jpg
│ │ ├── 01bj-page11.jpg
│ │ ├── 01bj-page12.jpg
│ │ ├── 01bj-page13.jpg
│ │ ├── 01bj-page14.jpg
│ │ ├── 01bj-page15.jpg
│ │ ├── 01bj-page7.jpg
│ │ ├── 01bj-page8.jpg
│ │ └── 01bj-page9.jpg
└── Tests
│ ├── Info.plist
│ └── Tests.swift
├── LICENSE
├── README.md
├── SuperResolutionKit.podspec
├── SuperResolutionKit
├── Assets
│ ├── .gitkeep
│ ├── FSRCNN.mlmodelc
│ │ ├── coremldata.bin
│ │ ├── model.espresso.net
│ │ ├── model.espresso.shape
│ │ ├── model.espresso.weights
│ │ └── model
│ │ │ └── coremldata.bin
│ ├── SRCNN-photo.mlmodelc
│ │ ├── coremldata.bin
│ │ ├── model.espresso.net
│ │ ├── model.espresso.shape
│ │ ├── model.espresso.weights
│ │ └── model
│ │ │ └── coremldata.bin
│ └── SRCNN.mlmodelc
│ │ ├── coremldata.bin
│ │ ├── model.espresso.net
│ │ ├── model.espresso.shape
│ │ ├── model.espresso.weights
│ │ └── model
│ │ └── coremldata.bin
└── Classes
│ ├── .gitkeep
│ ├── CoreMLHelpers
│ ├── Array.swift
│ ├── CVPixelBuffer+Helpers.swift
│ ├── MLMultiArray+Image.swift
│ ├── Math.swift
│ ├── MultiArray.swift
│ ├── NonMaxSuppression.swift
│ ├── Predictions.swift
│ └── UIImage+CVPixelBuffer.swift
│ ├── FSRCNNConverter.swift
│ ├── SRCNN.swift
│ ├── SRCNNConverter.swift
│ ├── SRCNNKit.h
│ └── UIImageView+SRCNN.swift
├── _Pods.xcodeproj
└── script
├── convert.py
├── coreml_convert.py
├── coreml_predict.py
├── dump_srcnn_inter_layer.py
├── dump_weight.py
├── fsrcnn
├── convert.py
├── pred_keras.py
└── train.py
├── packages.txt
├── pdf2img
└── pdf2png.workflow
│ └── Contents
│ ├── Info.plist
│ ├── QuickLook
│ └── Preview.png
│ └── document.wflow
├── plot.py
├── pred.py
├── s3sync.py
└── train.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # OS X
2 | .DS_Store
3 |
4 | # Xcode
5 | build/
6 | *.pbxuser
7 | !default.pbxuser
8 | *.mode1v3
9 | !default.mode1v3
10 | *.mode2v3
11 | !default.mode2v3
12 | *.perspectivev3
13 | !default.perspectivev3
14 | xcuserdata/
15 | *.xccheckout
16 | profile
17 | *.moved-aside
18 | DerivedData
19 | *.hmap
20 | *.ipa
21 |
22 | # Bundler
23 | .bundle
24 |
25 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
26 | # Carthage/Checkouts
27 |
28 | Carthage/Build
29 |
30 | # We recommend against adding the Pods directory to your .gitignore. However
31 | # you should judge for yourself, the pros and cons are mentioned at:
32 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-ignore-the-pods-directory-in-source-control
33 | #
34 | # Note: if you ignore the Pods directory, make sure to uncomment
35 | # `pod install` in .travis.yml
36 | #
37 | # Pods/
38 |
39 | data
40 | *.pyc
41 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # references:
2 | # * https://www.objc.io/issues/6-build-tools/travis-ci/
3 | # * https://github.com/supermarin/xcpretty#usage
4 |
5 | osx_image: xcode7.3
6 | language: objective-c
7 | # cache: cocoapods
8 | # podfile: Example/Podfile
9 | # before_install:
10 | # - gem install cocoapods # Since Travis is not always on latest version
11 | # - pod install --project-directory=Example
12 | script:
13 | - set -o pipefail && xcodebuild test -enableCodeCoverage YES -workspace Example/SuperResolutionKit.xcworkspace -scheme SuperResolutionKit-Example -sdk iphonesimulator9.3 ONLY_ACTIVE_ARCH=NO | xcpretty
14 | - pod lib lint
15 |
--------------------------------------------------------------------------------
/Example/Podfile:
--------------------------------------------------------------------------------
1 | use_frameworks!
2 | platform :ios, '11.0'
3 |
4 | target 'SuperResolutionKit_Example' do
5 | pod 'SuperResolutionKit', :path => '../'
6 |
7 | target 'SuperResolutionKit_Tests' do
8 | inherit! :search_paths
9 |
10 |
11 | end
12 | end
13 |
--------------------------------------------------------------------------------
/Example/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - SuperResolutionKit (0.1.0)
3 |
4 | DEPENDENCIES:
5 | - SuperResolutionKit (from `../`)
6 |
7 | EXTERNAL SOURCES:
8 | SuperResolutionKit:
9 | :path: "../"
10 |
11 | SPEC CHECKSUMS:
12 | SuperResolutionKit: 8925ef36ad912a9f4d7d648d73ce4937f264281d
13 |
14 | PODFILE CHECKSUM: e8521b55d2f3428cc0d5dd6d81c51d3c9aaa590e
15 |
16 | COCOAPODS: 1.5.3
17 |
--------------------------------------------------------------------------------
/Example/Pods/Local Podspecs/SuperResolutionKit.podspec.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "SuperResolutionKit",
3 | "version": "0.1.0",
4 | "summary": "Super resolution implementation with Keras/CoreML",
5 | "description": "This is a super resolution implementation with Keras/CoreML",
6 | "homepage": "https://github.com/kenmaz/",
7 | "license": {
8 | "type": "MIT",
9 | "file": "LICENSE"
10 | },
11 | "authors": {
12 | "kenmaz": "kentaro.matsumae@gmail.com"
13 | },
14 | "source": {
15 | "git": "https://github.com/kenmaz/SuperResolutionKit.git",
16 | "tag": "0.1.0"
17 | },
18 | "platforms": {
19 | "ios": "11.0"
20 | },
21 | "swift_version": "4.1",
22 | "source_files": [
23 | "SuperResolutionKit/Classes/*",
24 | "SuperResolutionKit/Classes/CoreMLHelpers/*"
25 | ],
26 | "resource_bundles": {
27 | "SuperResolutionKit": [
28 | "SuperResolutionKit/Assets/*"
29 | ]
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/Example/Pods/Manifest.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - SuperResolutionKit (0.1.0)
3 |
4 | DEPENDENCIES:
5 | - SuperResolutionKit (from `../`)
6 |
7 | EXTERNAL SOURCES:
8 | SuperResolutionKit:
9 | :path: "../"
10 |
11 | SPEC CHECKSUMS:
12 | SuperResolutionKit: 8925ef36ad912a9f4d7d648d73ce4937f264281d
13 |
14 | PODFILE CHECKSUM: e8521b55d2f3428cc0d5dd6d81c51d3c9aaa590e
15 |
16 | COCOAPODS: 1.5.3
17 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | ${PRODUCT_BUNDLE_IDENTIFIER}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | ${PRODUCT_NAME}
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 1.0.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | ${CURRENT_PROJECT_VERSION}
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-acknowledgements.markdown:
--------------------------------------------------------------------------------
1 | # Acknowledgements
2 | This application makes use of the following third party libraries:
3 |
4 | ## SuperResolutionKit
5 |
6 | Copyright (c) 2018 kenmaz
7 |
8 | Permission is hereby granted, free of charge, to any person obtaining a copy
9 | of this software and associated documentation files (the "Software"), to deal
10 | in the Software without restriction, including without limitation the rights
11 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | copies of the Software, and to permit persons to whom the Software is
13 | furnished to do so, subject to the following conditions:
14 |
15 | The above copyright notice and this permission notice shall be included in
16 | all copies or substantial portions of the Software.
17 |
18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 | THE SOFTWARE.
25 |
26 | Generated by CocoaPods - https://cocoapods.org
27 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-acknowledgements.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | PreferenceSpecifiers
6 |
7 |
8 | FooterText
9 | This application makes use of the following third party libraries:
10 | Title
11 | Acknowledgements
12 | Type
13 | PSGroupSpecifier
14 |
15 |
16 | FooterText
17 | Copyright (c) 2018 kenmaz <kentaro.matsumae@gmail.com>
18 |
19 | Permission is hereby granted, free of charge, to any person obtaining a copy
20 | of this software and associated documentation files (the "Software"), to deal
21 | in the Software without restriction, including without limitation the rights
22 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
23 | copies of the Software, and to permit persons to whom the Software is
24 | furnished to do so, subject to the following conditions:
25 |
26 | The above copyright notice and this permission notice shall be included in
27 | all copies or substantial portions of the Software.
28 |
29 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
30 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
31 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
32 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
33 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
34 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
35 | THE SOFTWARE.
36 |
37 | License
38 | MIT
39 | Title
40 | SuperResolutionKit
41 | Type
42 | PSGroupSpecifier
43 |
44 |
45 | FooterText
46 | Generated by CocoaPods - https://cocoapods.org
47 | Title
48 |
49 | Type
50 | PSGroupSpecifier
51 |
52 |
53 | StringsTable
54 | Acknowledgements
55 | Title
56 | Acknowledgements
57 |
58 |
59 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-dummy.m:
--------------------------------------------------------------------------------
1 | #import
2 | @interface PodsDummy_Pods_SuperResolutionKit_Example : NSObject
3 | @end
4 | @implementation PodsDummy_Pods_SuperResolutionKit_Example
5 | @end
6 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-frameworks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -e
3 | set -u
4 | set -o pipefail
5 |
6 | if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
7 | # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
8 | # frameworks to, so exit 0 (signalling the script phase was successful).
9 | exit 0
10 | fi
11 |
12 | echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
13 | mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
14 |
15 | COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
16 | SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
17 |
18 | # Used as a return value for each invocation of `strip_invalid_archs` function.
19 | STRIP_BINARY_RETVAL=0
20 |
21 | # This protects against multiple targets copying the same framework dependency at the same time. The solution
22 | # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
23 | RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
24 |
25 | # Copies and strips a vendored framework
26 | install_framework()
27 | {
28 | if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
29 | local source="${BUILT_PRODUCTS_DIR}/$1"
30 | elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
31 | local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
32 | elif [ -r "$1" ]; then
33 | local source="$1"
34 | fi
35 |
36 | local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
37 |
38 | if [ -L "${source}" ]; then
39 | echo "Symlinked..."
40 | source="$(readlink "${source}")"
41 | fi
42 |
43 | # Use filter instead of exclude so missing patterns don't throw errors.
44 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
45 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
46 |
47 | local basename
48 | basename="$(basename -s .framework "$1")"
49 | binary="${destination}/${basename}.framework/${basename}"
50 | if ! [ -r "$binary" ]; then
51 | binary="${destination}/${basename}"
52 | fi
53 |
54 | # Strip invalid architectures so "fat" simulator / device frameworks work on device
55 | if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
56 | strip_invalid_archs "$binary"
57 | fi
58 |
59 | # Resign the code if required by the build settings to avoid unstable apps
60 | code_sign_if_enabled "${destination}/$(basename "$1")"
61 |
62 | # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
63 | if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
64 | local swift_runtime_libs
65 | swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
66 | for lib in $swift_runtime_libs; do
67 | echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
68 | rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
69 | code_sign_if_enabled "${destination}/${lib}"
70 | done
71 | fi
72 | }
73 |
74 | # Copies and strips a vendored dSYM
75 | install_dsym() {
76 | local source="$1"
77 | if [ -r "$source" ]; then
78 | # Copy the dSYM into a the targets temp dir.
79 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
80 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
81 |
82 | local basename
83 | basename="$(basename -s .framework.dSYM "$source")"
84 | binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
85 |
86 | # Strip invalid architectures so "fat" simulator / device frameworks work on device
87 | if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
88 | strip_invalid_archs "$binary"
89 | fi
90 |
91 | if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
92 | # Move the stripped file into its final destination.
93 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
94 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
95 | else
96 | # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
97 | touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
98 | fi
99 | fi
100 | }
101 |
102 | # Signs a framework with the provided identity
103 | code_sign_if_enabled() {
104 | if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
105 | # Use the current code_sign_identitiy
106 | echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
107 | local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
108 |
109 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
110 | code_sign_cmd="$code_sign_cmd &"
111 | fi
112 | echo "$code_sign_cmd"
113 | eval "$code_sign_cmd"
114 | fi
115 | }
116 |
117 | # Strip invalid architectures
118 | strip_invalid_archs() {
119 | binary="$1"
120 | # Get architectures for current target binary
121 | binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
122 | # Intersect them with the architectures we are building for
123 | intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
124 | # If there are no archs supported by this binary then warn the user
125 | if [[ -z "$intersected_archs" ]]; then
126 | echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
127 | STRIP_BINARY_RETVAL=0
128 | return
129 | fi
130 | stripped=""
131 | for arch in $binary_archs; do
132 | if ! [[ "${ARCHS}" == *"$arch"* ]]; then
133 | # Strip non-valid architectures in-place
134 | lipo -remove "$arch" -output "$binary" "$binary" || exit 1
135 | stripped="$stripped $arch"
136 | fi
137 | done
138 | if [[ "$stripped" ]]; then
139 | echo "Stripped $binary of architectures:$stripped"
140 | fi
141 | STRIP_BINARY_RETVAL=1
142 | }
143 |
144 |
145 | if [[ "$CONFIGURATION" == "Debug" ]]; then
146 | install_framework "${BUILT_PRODUCTS_DIR}/SuperResolutionKit/SuperResolutionKit.framework"
147 | fi
148 | if [[ "$CONFIGURATION" == "Release" ]]; then
149 | install_framework "${BUILT_PRODUCTS_DIR}/SuperResolutionKit/SuperResolutionKit.framework"
150 | fi
151 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
152 | wait
153 | fi
154 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-resources.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -e
3 | set -u
4 | set -o pipefail
5 |
6 | if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
7 | # If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
8 | # resources to, so exit 0 (signalling the script phase was successful).
9 | exit 0
10 | fi
11 |
12 | mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
13 |
14 | RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
15 | > "$RESOURCES_TO_COPY"
16 |
17 | XCASSET_FILES=()
18 |
19 | # This protects against multiple targets copying the same framework dependency at the same time. The solution
20 | # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
21 | RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
22 |
23 | case "${TARGETED_DEVICE_FAMILY:-}" in
24 | 1,2)
25 | TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
26 | ;;
27 | 1)
28 | TARGET_DEVICE_ARGS="--target-device iphone"
29 | ;;
30 | 2)
31 | TARGET_DEVICE_ARGS="--target-device ipad"
32 | ;;
33 | 3)
34 | TARGET_DEVICE_ARGS="--target-device tv"
35 | ;;
36 | 4)
37 | TARGET_DEVICE_ARGS="--target-device watch"
38 | ;;
39 | *)
40 | TARGET_DEVICE_ARGS="--target-device mac"
41 | ;;
42 | esac
43 |
44 | install_resource()
45 | {
46 | if [[ "$1" = /* ]] ; then
47 | RESOURCE_PATH="$1"
48 | else
49 | RESOURCE_PATH="${PODS_ROOT}/$1"
50 | fi
51 | if [[ ! -e "$RESOURCE_PATH" ]] ; then
52 | cat << EOM
53 | error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
54 | EOM
55 | exit 1
56 | fi
57 | case $RESOURCE_PATH in
58 | *.storyboard)
59 | echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
60 | ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
61 | ;;
62 | *.xib)
63 | echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
64 | ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
65 | ;;
66 | *.framework)
67 | echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
68 | mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
69 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
70 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
71 | ;;
72 | *.xcdatamodel)
73 | echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
74 | xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
75 | ;;
76 | *.xcdatamodeld)
77 | echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
78 | xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
79 | ;;
80 | *.xcmappingmodel)
81 | echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
82 | xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
83 | ;;
84 | *.xcassets)
85 | ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
86 | XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
87 | ;;
88 | *)
89 | echo "$RESOURCE_PATH" || true
90 | echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
91 | ;;
92 | esac
93 | }
94 |
95 | mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
96 | rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
97 | if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
98 | mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
99 | rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
100 | fi
101 | rm -f "$RESOURCES_TO_COPY"
102 |
103 | if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
104 | then
105 | # Find all other xcassets (this unfortunately includes those of path pods and other targets).
106 | OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
107 | while read line; do
108 | if [[ $line != "${PODS_ROOT}*" ]]; then
109 | XCASSET_FILES+=("$line")
110 | fi
111 | done <<<"$OTHER_XCASSETS"
112 |
113 | if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
114 | printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
115 | else
116 | printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
117 | fi
118 | fi
119 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example-umbrella.h:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 |
14 | FOUNDATION_EXPORT double Pods_SuperResolutionKit_ExampleVersionNumber;
15 | FOUNDATION_EXPORT const unsigned char Pods_SuperResolutionKit_ExampleVersionString[];
16 |
17 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example.debug.xcconfig:
--------------------------------------------------------------------------------
1 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES
2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit"
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
5 | OTHER_CFLAGS = $(inherited) -iquote "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit/SuperResolutionKit.framework/Headers"
6 | OTHER_LDFLAGS = $(inherited) -framework "SuperResolutionKit"
7 | OTHER_SWIFT_FLAGS = $(inherited) "-D" "COCOAPODS"
8 | PODS_BUILD_DIR = ${BUILD_DIR}
9 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
10 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
11 | PODS_ROOT = ${SRCROOT}/Pods
12 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example.modulemap:
--------------------------------------------------------------------------------
1 | framework module Pods_SuperResolutionKit_Example {
2 | umbrella header "Pods-SuperResolutionKit_Example-umbrella.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Example/Pods-SuperResolutionKit_Example.release.xcconfig:
--------------------------------------------------------------------------------
1 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES
2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit"
3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
4 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
5 | OTHER_CFLAGS = $(inherited) -iquote "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit/SuperResolutionKit.framework/Headers"
6 | OTHER_LDFLAGS = $(inherited) -framework "SuperResolutionKit"
7 | OTHER_SWIFT_FLAGS = $(inherited) "-D" "COCOAPODS"
8 | PODS_BUILD_DIR = ${BUILD_DIR}
9 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
10 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
11 | PODS_ROOT = ${SRCROOT}/Pods
12 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | ${PRODUCT_BUNDLE_IDENTIFIER}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | ${PRODUCT_NAME}
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 1.0.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | ${CURRENT_PROJECT_VERSION}
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-acknowledgements.markdown:
--------------------------------------------------------------------------------
1 | # Acknowledgements
2 | This application makes use of the following third party libraries:
3 | Generated by CocoaPods - https://cocoapods.org
4 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-acknowledgements.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | PreferenceSpecifiers
6 |
7 |
8 | FooterText
9 | This application makes use of the following third party libraries:
10 | Title
11 | Acknowledgements
12 | Type
13 | PSGroupSpecifier
14 |
15 |
16 | FooterText
17 | Generated by CocoaPods - https://cocoapods.org
18 | Title
19 |
20 | Type
21 | PSGroupSpecifier
22 |
23 |
24 | StringsTable
25 | Acknowledgements
26 | Title
27 | Acknowledgements
28 |
29 |
30 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-dummy.m:
--------------------------------------------------------------------------------
1 | #import
2 | @interface PodsDummy_Pods_SuperResolutionKit_Tests : NSObject
3 | @end
4 | @implementation PodsDummy_Pods_SuperResolutionKit_Tests
5 | @end
6 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-frameworks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -e
3 | set -u
4 | set -o pipefail
5 |
6 | if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
7 | # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
8 | # frameworks to, so exit 0 (signalling the script phase was successful).
9 | exit 0
10 | fi
11 |
12 | echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
13 | mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
14 |
15 | COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
16 | SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
17 |
18 | # Used as a return value for each invocation of `strip_invalid_archs` function.
19 | STRIP_BINARY_RETVAL=0
20 |
21 | # This protects against multiple targets copying the same framework dependency at the same time. The solution
22 | # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
23 | RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
24 |
25 | # Copies and strips a vendored framework
26 | install_framework()
27 | {
28 | if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
29 | local source="${BUILT_PRODUCTS_DIR}/$1"
30 | elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
31 | local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
32 | elif [ -r "$1" ]; then
33 | local source="$1"
34 | fi
35 |
36 | local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
37 |
38 | if [ -L "${source}" ]; then
39 | echo "Symlinked..."
40 | source="$(readlink "${source}")"
41 | fi
42 |
43 | # Use filter instead of exclude so missing patterns don't throw errors.
44 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
45 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
46 |
47 | local basename
48 | basename="$(basename -s .framework "$1")"
49 | binary="${destination}/${basename}.framework/${basename}"
50 | if ! [ -r "$binary" ]; then
51 | binary="${destination}/${basename}"
52 | fi
53 |
54 | # Strip invalid architectures so "fat" simulator / device frameworks work on device
55 | if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
56 | strip_invalid_archs "$binary"
57 | fi
58 |
59 | # Resign the code if required by the build settings to avoid unstable apps
60 | code_sign_if_enabled "${destination}/$(basename "$1")"
61 |
62 | # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
63 | if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
64 | local swift_runtime_libs
65 | swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
66 | for lib in $swift_runtime_libs; do
67 | echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
68 | rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
69 | code_sign_if_enabled "${destination}/${lib}"
70 | done
71 | fi
72 | }
73 |
74 | # Copies and strips a vendored dSYM
75 | install_dsym() {
76 | local source="$1"
77 | if [ -r "$source" ]; then
78 | # Copy the dSYM into a the targets temp dir.
79 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
80 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
81 |
82 | local basename
83 | basename="$(basename -s .framework.dSYM "$source")"
84 | binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
85 |
86 | # Strip invalid architectures so "fat" simulator / device frameworks work on device
87 | if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
88 | strip_invalid_archs "$binary"
89 | fi
90 |
91 | if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
92 | # Move the stripped file into its final destination.
93 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
94 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
95 | else
96 | # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
97 | touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
98 | fi
99 | fi
100 | }
101 |
102 | # Signs a framework with the provided identity
103 | code_sign_if_enabled() {
104 | if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
105 | # Use the current code_sign_identitiy
106 | echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
107 | local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
108 |
109 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
110 | code_sign_cmd="$code_sign_cmd &"
111 | fi
112 | echo "$code_sign_cmd"
113 | eval "$code_sign_cmd"
114 | fi
115 | }
116 |
117 | # Strip invalid architectures
118 | strip_invalid_archs() {
119 | binary="$1"
120 | # Get architectures for current target binary
121 | binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
122 | # Intersect them with the architectures we are building for
123 | intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
124 | # If there are no archs supported by this binary then warn the user
125 | if [[ -z "$intersected_archs" ]]; then
126 | echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
127 | STRIP_BINARY_RETVAL=0
128 | return
129 | fi
130 | stripped=""
131 | for arch in $binary_archs; do
132 | if ! [[ "${ARCHS}" == *"$arch"* ]]; then
133 | # Strip non-valid architectures in-place
134 | lipo -remove "$arch" -output "$binary" "$binary" || exit 1
135 | stripped="$stripped $arch"
136 | fi
137 | done
138 | if [[ "$stripped" ]]; then
139 | echo "Stripped $binary of architectures:$stripped"
140 | fi
141 | STRIP_BINARY_RETVAL=1
142 | }
143 |
144 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
145 | wait
146 | fi
147 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-resources.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -e
3 | set -u
4 | set -o pipefail
5 |
6 | if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
7 | # If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
8 | # resources to, so exit 0 (signalling the script phase was successful).
9 | exit 0
10 | fi
11 |
12 | mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
13 |
14 | RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
15 | > "$RESOURCES_TO_COPY"
16 |
17 | XCASSET_FILES=()
18 |
19 | # This protects against multiple targets copying the same framework dependency at the same time. The solution
20 | # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
21 | RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
22 |
23 | case "${TARGETED_DEVICE_FAMILY:-}" in
24 | 1,2)
25 | TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
26 | ;;
27 | 1)
28 | TARGET_DEVICE_ARGS="--target-device iphone"
29 | ;;
30 | 2)
31 | TARGET_DEVICE_ARGS="--target-device ipad"
32 | ;;
33 | 3)
34 | TARGET_DEVICE_ARGS="--target-device tv"
35 | ;;
36 | 4)
37 | TARGET_DEVICE_ARGS="--target-device watch"
38 | ;;
39 | *)
40 | TARGET_DEVICE_ARGS="--target-device mac"
41 | ;;
42 | esac
43 |
44 | install_resource()
45 | {
46 | if [[ "$1" = /* ]] ; then
47 | RESOURCE_PATH="$1"
48 | else
49 | RESOURCE_PATH="${PODS_ROOT}/$1"
50 | fi
51 | if [[ ! -e "$RESOURCE_PATH" ]] ; then
52 | cat << EOM
53 | error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
54 | EOM
55 | exit 1
56 | fi
57 | case $RESOURCE_PATH in
58 | *.storyboard)
59 | echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
60 | ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
61 | ;;
62 | *.xib)
63 | echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
64 | ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
65 | ;;
66 | *.framework)
67 | echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
68 | mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
69 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
70 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
71 | ;;
72 | *.xcdatamodel)
73 | echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
74 | xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
75 | ;;
76 | *.xcdatamodeld)
77 | echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
78 | xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
79 | ;;
80 | *.xcmappingmodel)
81 | echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
82 | xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
83 | ;;
84 | *.xcassets)
85 | ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
86 | XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
87 | ;;
88 | *)
89 | echo "$RESOURCE_PATH" || true
90 | echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
91 | ;;
92 | esac
93 | }
94 |
95 | mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
96 | rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
97 | if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
98 | mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
99 | rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
100 | fi
101 | rm -f "$RESOURCES_TO_COPY"
102 |
103 | if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
104 | then
105 | # Find all other xcassets (this unfortunately includes those of path pods and other targets).
106 | OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
107 | while read line; do
108 | if [[ $line != "${PODS_ROOT}*" ]]; then
109 | XCASSET_FILES+=("$line")
110 | fi
111 | done <<<"$OTHER_XCASSETS"
112 |
113 | if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
114 | printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
115 | else
116 | printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
117 | fi
118 | fi
119 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests-umbrella.h:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 |
14 | FOUNDATION_EXPORT double Pods_SuperResolutionKit_TestsVersionNumber;
15 | FOUNDATION_EXPORT const unsigned char Pods_SuperResolutionKit_TestsVersionString[];
16 |
17 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests.debug.xcconfig:
--------------------------------------------------------------------------------
1 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit"
2 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
3 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
4 | OTHER_CFLAGS = $(inherited) -iquote "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit/SuperResolutionKit.framework/Headers"
5 | PODS_BUILD_DIR = ${BUILD_DIR}
6 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
7 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
8 | PODS_ROOT = ${SRCROOT}/Pods
9 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests.modulemap:
--------------------------------------------------------------------------------
1 | framework module Pods_SuperResolutionKit_Tests {
2 | umbrella header "Pods-SuperResolutionKit_Tests-umbrella.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/Pods-SuperResolutionKit_Tests/Pods-SuperResolutionKit_Tests.release.xcconfig:
--------------------------------------------------------------------------------
1 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit"
2 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
3 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
4 | OTHER_CFLAGS = $(inherited) -iquote "${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit/SuperResolutionKit.framework/Headers"
5 | PODS_BUILD_DIR = ${BUILD_DIR}
6 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
7 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
8 | PODS_ROOT = ${SRCROOT}/Pods
9 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | ${EXECUTABLE_NAME}
9 | CFBundleIdentifier
10 | ${PRODUCT_BUNDLE_IDENTIFIER}
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | ${PRODUCT_NAME}
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 0.1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | ${CURRENT_PROJECT_VERSION}
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/ResourceBundle-SuperResolutionKit-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleIdentifier
8 | ${PRODUCT_BUNDLE_IDENTIFIER}
9 | CFBundleInfoDictionaryVersion
10 | 6.0
11 | CFBundleName
12 | ${PRODUCT_NAME}
13 | CFBundlePackageType
14 | BNDL
15 | CFBundleShortVersionString
16 | 0.1.0
17 | CFBundleSignature
18 | ????
19 | CFBundleVersion
20 | 1
21 | NSPrincipalClass
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/SuperResolutionKit-dummy.m:
--------------------------------------------------------------------------------
1 | #import
2 | @interface PodsDummy_SuperResolutionKit : NSObject
3 | @end
4 | @implementation PodsDummy_SuperResolutionKit
5 | @end
6 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/SuperResolutionKit-prefix.pch:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/SuperResolutionKit-umbrella.h:
--------------------------------------------------------------------------------
1 | #ifdef __OBJC__
2 | #import
3 | #else
4 | #ifndef FOUNDATION_EXPORT
5 | #if defined(__cplusplus)
6 | #define FOUNDATION_EXPORT extern "C"
7 | #else
8 | #define FOUNDATION_EXPORT extern
9 | #endif
10 | #endif
11 | #endif
12 |
13 | #import "SRCNNKit.h"
14 |
15 | FOUNDATION_EXPORT double SuperResolutionKitVersionNumber;
16 | FOUNDATION_EXPORT const unsigned char SuperResolutionKitVersionString[];
17 |
18 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/SuperResolutionKit.modulemap:
--------------------------------------------------------------------------------
1 | framework module SuperResolutionKit {
2 | umbrella header "SuperResolutionKit-umbrella.h"
3 |
4 | export *
5 | module * { export * }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/Pods/Target Support Files/SuperResolutionKit/SuperResolutionKit.xcconfig:
--------------------------------------------------------------------------------
1 | CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/SuperResolutionKit
2 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
3 | OTHER_SWIFT_FLAGS = $(inherited) "-D" "COCOAPODS"
4 | PODS_BUILD_DIR = ${BUILD_DIR}
5 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
6 | PODS_ROOT = ${SRCROOT}
7 | PODS_TARGET_SRCROOT = ${PODS_ROOT}/../..
8 | PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
9 | SKIP_INSTALL = YES
10 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit.xcodeproj/xcshareddata/xcschemes/SuperResolutionKit-Example.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
38 |
39 |
44 |
45 |
47 |
53 |
54 |
55 |
56 |
57 |
63 |
64 |
65 |
66 |
67 |
68 |
78 |
80 |
86 |
87 |
88 |
89 |
90 |
91 |
97 |
99 |
105 |
106 |
107 |
108 |
110 |
111 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // SuperResolutionKit
4 | //
5 | // Created by kenmaz on 07/11/2018.
6 | // Copyright (c) 2018 kenmaz. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 |
17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
18 | // Override point for customization after application launch.
19 | return true
20 | }
21 |
22 | func applicationWillResignActive(_ application: UIApplication) {
23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
24 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
25 | }
26 |
27 | func applicationDidEnterBackground(_ application: UIApplication) {
28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
30 | }
31 |
32 | func applicationWillEnterForeground(_ application: UIApplication) {
33 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | func applicationDidBecomeActive(_ application: UIApplication) {
37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
38 | }
39 |
40 | func applicationWillTerminate(_ application: UIApplication) {
41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
42 | }
43 |
44 |
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/Base.lproj/LaunchScreen.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
25 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ios-marketing",
45 | "size" : "1024x1024",
46 | "scale" : "1x"
47 | }
48 | ],
49 | "info" : {
50 | "version" : 1,
51 | "author" : "xcode"
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/ListViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ListViewController.swift
3 | // SuperResolutionKit_Example
4 | //
5 | // Created by kenmaz on 2018/08/31.
6 | // Copyright © 2018年 CocoaPods. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class ListViewController: UITableViewController {
12 |
13 | enum MangaTitle: String {
14 | case blackJack = "Black Jack"
15 | case penki = "Penki"
16 | case photo = "Photo"
17 | }
18 |
19 | private let titles: [MangaTitle] = [.blackJack, .penki, .photo]
20 |
21 | override func viewDidLoad() {
22 | super.viewDidLoad()
23 | }
24 |
25 | // MARK: - Table view data source
26 |
27 | override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
28 | return titles.count
29 | }
30 |
31 | override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
32 | let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath)
33 | cell.textLabel?.text = titles[indexPath.row].rawValue
34 | return cell
35 | }
36 |
37 | override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
38 | tableView.deselectRow(at: indexPath, animated: true)
39 | let title = titles[indexPath.row]
40 | let images: [UIImage]
41 | switch title {
42 | case .blackJack:
43 | images = [#imageLiteral(resourceName: "01bj-page7.jpg"),#imageLiteral(resourceName: "01bj-page11.jpg"),#imageLiteral(resourceName: "01bj-page10.jpg"),#imageLiteral(resourceName: "01bj-page12.jpg"),#imageLiteral(resourceName: "01bj-page13.jpg"),#imageLiteral(resourceName: "01bj-page14.jpg"),#imageLiteral(resourceName: "01bj-page9.jpg"),#imageLiteral(resourceName: "01bj-page8.jpg"),#imageLiteral(resourceName: "01bj-page15.jpg")]
44 | case .penki:
45 | images = [#imageLiteral(resourceName: "p10.png"),#imageLiteral(resourceName: "p9.png"),#imageLiteral(resourceName: "p8.png"),#imageLiteral(resourceName: "p3.png"),#imageLiteral(resourceName: "p2.png"),#imageLiteral(resourceName: "p1.png"),#imageLiteral(resourceName: "p5.png"),#imageLiteral(resourceName: "p4.png"),#imageLiteral(resourceName: "p6.png"),#imageLiteral(resourceName: "p7.png")]
46 | case .photo:
47 | images = [#imageLiteral(resourceName: "img_002_SRF_2_HR.png"),#imageLiteral(resourceName: "img_002_SRF_8_HR.png"),#imageLiteral(resourceName: "img_003_SRF_8_HR.png"),#imageLiteral(resourceName: "img_012_SRF_2_HR.png"),#imageLiteral(resourceName: "img_012_SRF_8_HR.png"),#imageLiteral(resourceName: "img_013_SRF_2_HR.png"),#imageLiteral(resourceName: "img_013_SRF_8_HR.png")]
48 |
49 | }
50 | let vc = storyboard?.instantiateViewController(withIdentifier: "MangaViewController") as! MangaViewController
51 | vc.images = images
52 | present(vc, animated: true, completion: nil)
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/MangaViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MangaViewController.swift
3 | // SuperResolutionKit_Example
4 | //
5 | // Created by Kentaro Matsumae on 2018/08/18.
6 | // Copyright © 2018年 CocoaPods. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import SuperResolutionKit
11 |
12 | class MangaViewController: UIViewController {
13 |
14 | var images: [UIImage] = []
15 |
16 | private let pageViewController = UIPageViewController(transitionStyle: .scroll, navigationOrientation: .horizontal, options: nil)
17 |
18 | private var pageVCs: [UIViewController] = []
19 | private var currentPid: String = ""
20 |
21 | @IBOutlet weak var loadingIndicator: UIActivityIndicatorView!
22 | @IBOutlet weak var loadingView: UIView!
23 | @IBOutlet weak var loadingLabel: UILabel!
24 |
25 | override func viewDidLoad() {
26 | super.viewDidLoad()
27 |
28 | loadingView.isHidden = true
29 |
30 | self.pageVCs = images.map {
31 | let vc = self.storyboard!.instantiateViewController(withIdentifier: "View") as! ViewController
32 | vc.image = $0
33 | return vc
34 | }
35 | pageViewController.delegate = self
36 | pageViewController.dataSource = self
37 | pageViewController.setViewControllers([pageVCs[0]], direction: .forward, animated: true, completion: nil)
38 |
39 | view.insertSubview(pageViewController.view, at: 0)
40 | addChildViewController(pageViewController)
41 | pageViewController.didMove(toParentViewController: self)
42 | }
43 |
44 | var currentViewController: ViewController {
45 | return pageViewController.viewControllers?.first as! ViewController
46 | }
47 |
48 | @IBAction func closeButtonDidTap(_ sender: Any) {
49 | dismiss(animated: true, completion: nil)
50 | }
51 |
52 | @IBAction func resetDidTap(_ sender: Any) {
53 | currentViewController.reset(sender)
54 | }
55 |
56 | @IBAction func action1DidTap(_ sender: Any) {
57 | process { (completion) in
58 | currentViewController.imageView.setSRImage(image: currentViewController.image!, completion: completion)
59 | }
60 | }
61 |
62 | @IBAction func action1CDidTap(_ sender: Any) {
63 | process {[weak self] (completion) in
64 | let imageView = self!.currentViewController.imageView
65 | let src = self!.currentViewController.image!
66 | imageView.image = src
67 |
68 | DispatchQueue.global().async {
69 | let converter = SRCNNConverter(modelName: "SRCNN-photo")
70 | if let output = converter.convert(from: src) {
71 | DispatchQueue.main.async {
72 | imageView.image = output
73 | imageView.layer.add(CATransition(), forKey: nil)
74 | completion()
75 | }
76 | }
77 | }
78 | }
79 | }
80 |
81 | @IBAction func action2DidTap(_ sender: Any) {
82 | process { (completion) in
83 | currentViewController.imageView.setFSRImage(image: currentViewController.image!, completion: completion)
84 | }
85 | }
86 |
87 | private func process(execute:(_: @escaping ()->Void)->Void) {
88 | let pid = UUID().uuidString
89 | currentPid = pid
90 |
91 | loadingView.isHidden = false
92 | loadingIndicator.isHidden = false
93 | loadingLabel.text = "Prosessing..."
94 | let start = Date()
95 | execute() { [weak self] in
96 | let elapsed = Date().timeIntervalSince(start)
97 | self?.loadingIndicator.isHidden = true
98 | self?.loadingLabel.text = String(format: "Done: %.2f sec", elapsed)
99 | DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 3, execute: {
100 | if self?.currentPid == pid {
101 | self?.loadingView.isHidden = true
102 | }
103 | })
104 | }
105 | }
106 | }
107 |
108 | extension MangaViewController: UIPageViewControllerDataSource {
109 |
110 | public func pageViewController(_ pageViewController: UIPageViewController, viewControllerBefore viewController: UIViewController) -> UIViewController? {
111 | if let idx = pageVCs.index(of: viewController), 0 <= idx - 1 {
112 | return pageVCs[idx - 1]
113 | } else {
114 | return nil
115 | }
116 | }
117 |
118 | public func pageViewController(_ pageViewController: UIPageViewController, viewControllerAfter viewController: UIViewController) -> UIViewController? {
119 | if let idx = pageVCs.index(of: viewController), idx + 1 < pageVCs.count {
120 | return pageVCs[idx + 1]
121 | } else {
122 | return nil
123 | }
124 | }
125 |
126 | /*
127 | public func presentationCount(for pageViewController: UIPageViewController) -> Int {
128 | }
129 |
130 | public func presentationIndex(for pageViewController: UIPageViewController) -> Int {
131 | }
132 | */
133 | }
134 |
135 | extension MangaViewController: UIPageViewControllerDelegate {
136 | /*
137 | public func pageViewController(_ pageViewController: UIPageViewController, willTransitionTo pendingViewControllers: [UIViewController]) {
138 |
139 | }
140 |
141 | public func pageViewController(_ pageViewController: UIPageViewController, didFinishAnimating finished: Bool, previousViewControllers: [UIViewController], transitionCompleted completed: Bool) {
142 |
143 | }
144 |
145 | public func pageViewController(_ pageViewController: UIPageViewController, spineLocationFor orientation: UIInterfaceOrientation) -> UIPageViewControllerSpineLocation {
146 |
147 | }
148 |
149 | public func pageViewControllerSupportedInterfaceOrientations(_ pageViewController: UIPageViewController) -> UIInterfaceOrientationMask {
150 |
151 | }
152 |
153 | public func pageViewControllerPreferredInterfaceOrientationForPresentation(_ pageViewController: UIPageViewController) -> UIInterfaceOrientation {
154 |
155 | }
156 | */
157 |
158 | }
159 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // SuperResolutionKit
4 | //
5 | // Created by kenmaz on 07/11/2018.
6 | // Copyright (c) 2018 kenmaz. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import SuperResolutionKit
11 |
12 | class ViewController: UIViewController {
13 |
14 | var image: UIImage? = nil
15 | let imageView = UIImageView(image: nil)
16 |
17 | @IBOutlet weak var scrollView: UIScrollView!
18 |
19 | override func viewDidLoad() {
20 | super.viewDidLoad()
21 | scrollView.addSubview(imageView)
22 |
23 | reset(self)
24 | imageView.sizeToFit()
25 | imageView.frame.origin.y = 100
26 |
27 | if let size = image?.size {
28 | let ratio = view.frame.size.width / size.width
29 | scrollView.minimumZoomScale = ratio
30 | scrollView.setZoomScale(ratio, animated: false)
31 | }
32 | }
33 |
34 | func runSR(_ sender: Any) {
35 | imageView.setSRImage(image: image!)
36 | }
37 |
38 | func runFSR(_ sender: Any) {
39 | imageView.setFSRImage(image: image!)
40 | }
41 |
42 | func reset(_ sender: Any) {
43 | imageView.image = image
44 | imageView.sizeToFit()
45 | }
46 |
47 | @IBAction func doubleTap(_ sender: Any) {
48 | if scrollView.zoomScale == scrollView.minimumZoomScale {
49 | scrollView.setZoomScale(scrollView.maximumZoomScale, animated: true)
50 | } else {
51 | scrollView.setZoomScale(scrollView.minimumZoomScale, animated: true)
52 | }
53 | }
54 | }
55 |
56 | extension ViewController: UIScrollViewDelegate {
57 | func viewForZooming(in scrollView: UIScrollView) -> UIView? {
58 | return imageView
59 | }
60 | }
61 |
62 |
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page10.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page11.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page11.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page12.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page13.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page13.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page14.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page14.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page15.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page15.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page7.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page7.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page8.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page8.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page9.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Blackjack/lr/01bj-page9.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p1.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p10.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p2.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p3.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p4.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p5.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p6.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p7.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p8.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/hr/p9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/hr/p9.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p1.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p10.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p2.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p3.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p4.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p5.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p6.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p7.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p8.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Penki/lr/p9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Penki/lr/p9.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/hr/img_001_SRF_2_HR_1_org.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/hr/img_001_SRF_2_HR_1_org.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/hr/lena_org.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/hr/lena_org.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_002_SRF_2_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_002_SRF_2_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_002_SRF_8_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_002_SRF_8_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_003_SRF_2_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_003_SRF_2_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_003_SRF_8_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_003_SRF_8_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_012_SRF_2_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_012_SRF_2_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_012_SRF_8_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_012_SRF_8_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_013_SRF_2_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_013_SRF_2_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/images/Photos/lr/img_013_SRF_8_HR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/images/Photos/lr/img_013_SRF_8_HR.png
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page10.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page11.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page11.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page12.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page13.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page13.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page14.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page14.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page15.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page15.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page7.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page7.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page8.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page8.jpg
--------------------------------------------------------------------------------
/Example/SuperResolutionKit/lr_imgs/01bj-page9.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/Example/SuperResolutionKit/lr_imgs/01bj-page9.jpg
--------------------------------------------------------------------------------
/Example/Tests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 |
24 |
25 |
--------------------------------------------------------------------------------
/Example/Tests/Tests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | import SuperResolutionKit
3 |
4 | class Tests: XCTestCase {
5 |
6 | override func setUp() {
7 | super.setUp()
8 | // Put setup code here. This method is called before the invocation of each test method in the class.
9 | }
10 |
11 | override func tearDown() {
12 | // Put teardown code here. This method is called after the invocation of each test method in the class.
13 | super.tearDown()
14 | }
15 |
16 | func testExample() {
17 | // This is an example of a functional test case.
18 | XCTAssert(true, "Pass")
19 | }
20 |
21 | func testPerformanceExample() {
22 | // This is an example of a performance test case.
23 | self.measure() {
24 | // Put the code you want to measure the time of here.
25 | }
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 kenmaz
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SuperResolutionKit
2 |
3 | [](https://travis-ci.org/kenmaz/SuperResolutionKit)
4 | [](https://cocoapods.org/pods/SuperResolutionKit)
5 | [](https://cocoapods.org/pods/SuperResolutionKit)
6 | [](https://cocoapods.org/pods/SuperResolutionKit)
7 |
8 | Super resolution implementation with CoreML and Keras.
9 | See my presentaiton: https://www.tryswift.co/events/2018/nyc/#coreml
10 |
11 | ## Requirements
12 |
13 | ## Installation
14 |
15 | SuperResolutionKit is available through [CocoaPods](https://cocoapods.org). To install
16 | it, simply add the following line to your Podfile:
17 |
18 | ```ruby
19 | pod 'SuperResolutionKit'
20 | ```
21 |
22 | ## Usage
23 |
24 | ```swift
25 | // Using SRCNN
26 | imageView.setSRImage(image: image)
27 |
28 | // Using F-SRCNN (faster)
29 | imageView.setFSRImage(image: image)
30 | ```
31 |
32 | ## Author
33 |
34 | kenmaz, kentaro.matsumae@gmail.com
35 |
36 | ## License
37 |
38 | SuperResolutionKit is available under the MIT license. See the LICENSE file for more info.
39 |
40 | ### CoreMLHelpers
41 | CoreMLHelpers is copyright 2017 Matthijs Hollemans and is licensed under the terms of the [MIT license](https://github.com/hollance/CoreMLHelpers/blob/master/LICENSE.txt).
42 |
--------------------------------------------------------------------------------
/SuperResolutionKit.podspec:
--------------------------------------------------------------------------------
1 | #
2 | # Be sure to run `pod lib lint SuperResolutionKit.podspec' to ensure this is a
3 | # valid spec before submitting.
4 | #
5 | # Any lines starting with a # are optional, but their use is encouraged
6 | # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
7 | #
8 |
9 | Pod::Spec.new do |s|
10 | s.name = 'SuperResolutionKit'
11 | s.version = '0.0.1'
12 | s.summary = 'Super resolution implementation with Keras/CoreML'
13 |
14 | # This description is used to generate tags and improve search results.
15 | # * Think: What does it do? Why did you write it? What is the focus?
16 | # * Try to keep it short, snappy and to the point.
17 | # * Write the description between the DESC delimiters below.
18 | # * Finally, don't worry about the indent, CocoaPods strips it!
19 |
20 | s.description = <<-DESC
21 | This is a super resolution implementation with Keras/CoreML
22 | DESC
23 |
24 | s.homepage = 'https://github.com/kenmaz/'
25 | # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
26 | s.license = { :type => 'MIT', :file => 'LICENSE' }
27 | s.author = { 'kenmaz' => 'kentaro.matsumae@gmail.com' }
28 | s.source = { :git => 'https://github.com/kenmaz/SuperResolutionKit.git', :tag => s.version.to_s }
29 | s.social_media_url = 'https://twitter.com/kenmaz'
30 |
31 | s.ios.deployment_target = '11.0'
32 | s.swift_version = '4.1'
33 |
34 | s.source_files = [
35 | 'SuperResolutionKit/Classes/*',
36 | 'SuperResolutionKit/Classes/CoreMLHelpers/*'
37 | ]
38 | s.resource_bundles = {
39 | 'SuperResolutionKit' => [
40 | 'SuperResolutionKit/Assets/*'
41 | ]
42 | }
43 |
44 | # s.public_header_files = 'Pod/Classes/**/*.h'
45 | # s.frameworks = 'UIKit', 'MapKit'
46 | # s.dependency 'AFNetworking', '~> 2.3'
47 | end
48 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/.gitkeep
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/FSRCNN.mlmodelc/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/FSRCNN.mlmodelc/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model.espresso.net:
--------------------------------------------------------------------------------
1 | {
2 | "storage" : "model.espresso.weights",
3 | "properties" : {
4 |
5 | },
6 | "format_version" : 200,
7 | "layers" : [
8 | {
9 | "pad_r" : 0,
10 | "fused_relu" : 1,
11 | "fused_tanh" : 0,
12 | "pad_fill_mode" : 0,
13 | "pad_b" : 0,
14 | "pad_l" : 0,
15 | "top" : "conv2d_1__activation___output",
16 | "blob_weights" : 3,
17 | "K" : 3,
18 | "blob_biases" : 1,
19 | "name" : "conv2d_1",
20 | "has_batch_norm" : 0,
21 | "type" : "convolution",
22 | "n_groups" : 1,
23 | "pad_t" : 0,
24 | "has_biases" : 1,
25 | "C" : 56,
26 | "bottom" : "image",
27 | "weights" : {
28 |
29 | },
30 | "pad_mode" : 1,
31 | "Nx" : 5,
32 | "pad_value" : 0,
33 | "Ny" : 5,
34 | "n_parallel" : 1
35 | },
36 | {
37 | "pad_r" : 0,
38 | "fused_relu" : 1,
39 | "fused_tanh" : 0,
40 | "pad_fill_mode" : 0,
41 | "pad_b" : 0,
42 | "pad_l" : 0,
43 | "top" : "conv2d_2__activation___output",
44 | "blob_weights" : 7,
45 | "K" : 56,
46 | "blob_biases" : 5,
47 | "name" : "conv2d_2",
48 | "has_batch_norm" : 0,
49 | "type" : "convolution",
50 | "n_groups" : 1,
51 | "pad_t" : 0,
52 | "has_biases" : 1,
53 | "C" : 12,
54 | "bottom" : "conv2d_1__activation___output",
55 | "weights" : {
56 |
57 | },
58 | "pad_mode" : 1,
59 | "Nx" : 1,
60 | "pad_value" : 0,
61 | "Ny" : 1,
62 | "n_parallel" : 1
63 | },
64 | {
65 | "pad_r" : 0,
66 | "fused_relu" : 1,
67 | "fused_tanh" : 0,
68 | "pad_fill_mode" : 0,
69 | "pad_b" : 0,
70 | "pad_l" : 0,
71 | "top" : "conv2d_3__activation___output",
72 | "blob_weights" : 11,
73 | "K" : 12,
74 | "blob_biases" : 9,
75 | "name" : "conv2d_3",
76 | "has_batch_norm" : 0,
77 | "type" : "convolution",
78 | "n_groups" : 1,
79 | "pad_t" : 0,
80 | "has_biases" : 1,
81 | "C" : 12,
82 | "bottom" : "conv2d_2__activation___output",
83 | "weights" : {
84 |
85 | },
86 | "pad_mode" : 1,
87 | "Nx" : 3,
88 | "pad_value" : 0,
89 | "Ny" : 3,
90 | "n_parallel" : 1
91 | },
92 | {
93 | "pad_r" : 0,
94 | "fused_relu" : 1,
95 | "fused_tanh" : 0,
96 | "pad_fill_mode" : 0,
97 | "pad_b" : 0,
98 | "pad_l" : 0,
99 | "top" : "conv2d_4__activation___output",
100 | "blob_weights" : 15,
101 | "K" : 12,
102 | "blob_biases" : 13,
103 | "name" : "conv2d_4",
104 | "has_batch_norm" : 0,
105 | "type" : "convolution",
106 | "n_groups" : 1,
107 | "pad_t" : 0,
108 | "has_biases" : 1,
109 | "C" : 12,
110 | "bottom" : "conv2d_3__activation___output",
111 | "weights" : {
112 |
113 | },
114 | "pad_mode" : 1,
115 | "Nx" : 3,
116 | "pad_value" : 0,
117 | "Ny" : 3,
118 | "n_parallel" : 1
119 | },
120 | {
121 | "pad_r" : 0,
122 | "fused_relu" : 1,
123 | "fused_tanh" : 0,
124 | "pad_fill_mode" : 0,
125 | "pad_b" : 0,
126 | "pad_l" : 0,
127 | "top" : "conv2d_5__activation___output",
128 | "blob_weights" : 19,
129 | "K" : 12,
130 | "blob_biases" : 17,
131 | "name" : "conv2d_5",
132 | "has_batch_norm" : 0,
133 | "type" : "convolution",
134 | "n_groups" : 1,
135 | "pad_t" : 0,
136 | "has_biases" : 1,
137 | "C" : 12,
138 | "bottom" : "conv2d_4__activation___output",
139 | "weights" : {
140 |
141 | },
142 | "pad_mode" : 1,
143 | "Nx" : 3,
144 | "pad_value" : 0,
145 | "Ny" : 3,
146 | "n_parallel" : 1
147 | },
148 | {
149 | "pad_r" : 0,
150 | "fused_relu" : 1,
151 | "fused_tanh" : 0,
152 | "pad_fill_mode" : 0,
153 | "pad_b" : 0,
154 | "pad_l" : 0,
155 | "top" : "conv2d_6__activation___output",
156 | "blob_weights" : 23,
157 | "K" : 12,
158 | "blob_biases" : 21,
159 | "name" : "conv2d_6",
160 | "has_batch_norm" : 0,
161 | "type" : "convolution",
162 | "n_groups" : 1,
163 | "pad_t" : 0,
164 | "has_biases" : 1,
165 | "C" : 12,
166 | "bottom" : "conv2d_5__activation___output",
167 | "weights" : {
168 |
169 | },
170 | "pad_mode" : 1,
171 | "Nx" : 3,
172 | "pad_value" : 0,
173 | "Ny" : 3,
174 | "n_parallel" : 1
175 | },
176 | {
177 | "pad_r" : 0,
178 | "fused_relu" : 0,
179 | "fused_tanh" : 0,
180 | "pad_fill_mode" : 0,
181 | "pad_b" : 0,
182 | "pad_l" : 0,
183 | "top" : "conv2d_7_output",
184 | "blob_weights" : 27,
185 | "K" : 12,
186 | "blob_biases" : 25,
187 | "name" : "conv2d_7",
188 | "has_batch_norm" : 0,
189 | "type" : "convolution",
190 | "n_groups" : 1,
191 | "pad_t" : 0,
192 | "has_biases" : 1,
193 | "C" : 56,
194 | "bottom" : "conv2d_6__activation___output",
195 | "weights" : {
196 |
197 | },
198 | "pad_mode" : 1,
199 | "Nx" : 1,
200 | "pad_value" : 0,
201 | "Ny" : 1,
202 | "n_parallel" : 1
203 | },
204 | {
205 | "pad_r" : 0,
206 | "fused_relu" : 0,
207 | "fused_tanh" : 0,
208 | "pad_fill_mode" : 0,
209 | "pad_b" : 0,
210 | "pad_l" : 0,
211 | "top" : "output1",
212 | "blob_weights" : 31,
213 | "K" : 56,
214 | "blob_biases" : 29,
215 | "stride_x" : 2,
216 | "name" : "conv2d_transpose_1",
217 | "deconv_out_height" : 200,
218 | "has_batch_norm" : 0,
219 | "type" : "deconvolution",
220 | "n_groups" : 1,
221 | "pad_t" : 0,
222 | "stride_y" : 2,
223 | "has_biases" : 1,
224 | "C" : 3,
225 | "bottom" : "conv2d_7_output",
226 | "weights" : {
227 |
228 | },
229 | "Nx" : 9,
230 | "pad_mode" : 1,
231 | "deconv_out_width" : 200,
232 | "pad_value" : 0,
233 | "Ny" : 9,
234 | "n_parallel" : 1
235 | }
236 | ]
237 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model.espresso.shape:
--------------------------------------------------------------------------------
1 | {
2 | "layer_shapes" : {
3 | "output1" : {
4 | "k" : 3,
5 | "w" : 200,
6 | "n" : 1,
7 | "h" : 200
8 | },
9 | "conv2d_6__activation___output" : {
10 | "k" : 12,
11 | "w" : 100,
12 | "n" : 1,
13 | "h" : 100
14 | },
15 | "conv2d_2__activation___output" : {
16 | "k" : 12,
17 | "w" : 100,
18 | "n" : 1,
19 | "h" : 100
20 | },
21 | "conv2d_1__activation___output" : {
22 | "k" : 56,
23 | "w" : 100,
24 | "n" : 1,
25 | "h" : 100
26 | },
27 | "conv2d_4__activation___output" : {
28 | "k" : 12,
29 | "w" : 100,
30 | "n" : 1,
31 | "h" : 100
32 | },
33 | "conv2d_5__activation___output" : {
34 | "k" : 12,
35 | "w" : 100,
36 | "n" : 1,
37 | "h" : 100
38 | },
39 | "image" : {
40 | "k" : 3,
41 | "w" : 100,
42 | "n" : 1,
43 | "h" : 100
44 | },
45 | "conv2d_3__activation___output" : {
46 | "k" : 12,
47 | "w" : 100,
48 | "n" : 1,
49 | "h" : 100
50 | },
51 | "conv2d_7_output" : {
52 | "k" : 56,
53 | "w" : 100,
54 | "n" : 1,
55 | "h" : 100
56 | }
57 | }
58 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model.espresso.weights:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model.espresso.weights
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/FSRCNN.mlmodelc/model/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model.espresso.net:
--------------------------------------------------------------------------------
1 | {
2 | "storage" : "model.espresso.weights",
3 | "properties" : {
4 |
5 | },
6 | "format_version" : 200,
7 | "layers" : [
8 | {
9 | "pad_r" : 0,
10 | "fused_relu" : 1,
11 | "fused_tanh" : 0,
12 | "pad_fill_mode" : 0,
13 | "pad_b" : 0,
14 | "pad_l" : 0,
15 | "top" : "conv2d_1__activation___output",
16 | "blob_weights" : 3,
17 | "K" : 3,
18 | "blob_biases" : 1,
19 | "name" : "conv2d_1",
20 | "has_batch_norm" : 0,
21 | "type" : "convolution",
22 | "n_groups" : 1,
23 | "pad_t" : 0,
24 | "has_biases" : 1,
25 | "C" : 128,
26 | "bottom" : "image",
27 | "weights" : {
28 |
29 | },
30 | "pad_mode" : 1,
31 | "Nx" : 9,
32 | "pad_value" : 0,
33 | "Ny" : 9,
34 | "n_parallel" : 1
35 | },
36 | {
37 | "pad_r" : 0,
38 | "fused_relu" : 1,
39 | "fused_tanh" : 0,
40 | "pad_fill_mode" : 0,
41 | "pad_b" : 0,
42 | "pad_l" : 0,
43 | "top" : "conv2d_2__activation___output",
44 | "blob_weights" : 7,
45 | "K" : 128,
46 | "blob_biases" : 5,
47 | "name" : "conv2d_2",
48 | "has_batch_norm" : 0,
49 | "type" : "convolution",
50 | "n_groups" : 1,
51 | "pad_t" : 0,
52 | "has_biases" : 1,
53 | "C" : 64,
54 | "bottom" : "conv2d_1__activation___output",
55 | "weights" : {
56 |
57 | },
58 | "pad_mode" : 1,
59 | "Nx" : 3,
60 | "pad_value" : 0,
61 | "Ny" : 3,
62 | "n_parallel" : 1
63 | },
64 | {
65 | "pad_r" : 0,
66 | "fused_relu" : 0,
67 | "fused_tanh" : 0,
68 | "pad_fill_mode" : 0,
69 | "pad_b" : 0,
70 | "pad_l" : 0,
71 | "top" : "output1",
72 | "blob_weights" : 11,
73 | "K" : 64,
74 | "blob_biases" : 9,
75 | "name" : "conv2d_3",
76 | "has_batch_norm" : 0,
77 | "type" : "convolution",
78 | "n_groups" : 1,
79 | "pad_t" : 0,
80 | "has_biases" : 1,
81 | "C" : 3,
82 | "bottom" : "conv2d_2__activation___output",
83 | "weights" : {
84 |
85 | },
86 | "pad_mode" : 1,
87 | "Nx" : 5,
88 | "pad_value" : 0,
89 | "Ny" : 5,
90 | "n_parallel" : 1
91 | }
92 | ]
93 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model.espresso.shape:
--------------------------------------------------------------------------------
1 | {
2 | "layer_shapes" : {
3 | "conv2d_2__activation___output" : {
4 | "k" : 64,
5 | "w" : 200,
6 | "n" : 1,
7 | "h" : 200
8 | },
9 | "conv2d_1__activation___output" : {
10 | "k" : 128,
11 | "w" : 200,
12 | "n" : 1,
13 | "h" : 200
14 | },
15 | "image" : {
16 | "k" : 3,
17 | "w" : 200,
18 | "n" : 1,
19 | "h" : 200
20 | },
21 | "output1" : {
22 | "k" : 3,
23 | "w" : 200,
24 | "n" : 1,
25 | "h" : 200
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model.espresso.weights:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model.espresso.weights
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN-photo.mlmodelc/model/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN.mlmodelc/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN.mlmodelc/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN.mlmodelc/model.espresso.net:
--------------------------------------------------------------------------------
1 | {
2 | "storage" : "model.espresso.weights",
3 | "properties" : {
4 |
5 | },
6 | "format_version" : 200,
7 | "layers" : [
8 | {
9 | "pad_r" : 0,
10 | "fused_relu" : 1,
11 | "fused_tanh" : 0,
12 | "pad_fill_mode" : 0,
13 | "pad_b" : 0,
14 | "pad_l" : 0,
15 | "top" : "conv2d_1__activation___output",
16 | "blob_weights" : 3,
17 | "K" : 3,
18 | "blob_biases" : 1,
19 | "name" : "conv2d_1",
20 | "has_batch_norm" : 0,
21 | "type" : "convolution",
22 | "n_groups" : 1,
23 | "pad_t" : 0,
24 | "has_biases" : 1,
25 | "C" : 128,
26 | "bottom" : "image",
27 | "weights" : {
28 |
29 | },
30 | "pad_mode" : 1,
31 | "Nx" : 9,
32 | "pad_value" : 0,
33 | "Ny" : 9,
34 | "n_parallel" : 1
35 | },
36 | {
37 | "pad_r" : 0,
38 | "fused_relu" : 1,
39 | "fused_tanh" : 0,
40 | "pad_fill_mode" : 0,
41 | "pad_b" : 0,
42 | "pad_l" : 0,
43 | "top" : "conv2d_2__activation___output",
44 | "blob_weights" : 7,
45 | "K" : 128,
46 | "blob_biases" : 5,
47 | "name" : "conv2d_2",
48 | "has_batch_norm" : 0,
49 | "type" : "convolution",
50 | "n_groups" : 1,
51 | "pad_t" : 0,
52 | "has_biases" : 1,
53 | "C" : 64,
54 | "bottom" : "conv2d_1__activation___output",
55 | "weights" : {
56 |
57 | },
58 | "pad_mode" : 1,
59 | "Nx" : 3,
60 | "pad_value" : 0,
61 | "Ny" : 3,
62 | "n_parallel" : 1
63 | },
64 | {
65 | "pad_r" : 0,
66 | "fused_relu" : 0,
67 | "fused_tanh" : 0,
68 | "pad_fill_mode" : 0,
69 | "pad_b" : 0,
70 | "pad_l" : 0,
71 | "top" : "output1",
72 | "blob_weights" : 11,
73 | "K" : 64,
74 | "blob_biases" : 9,
75 | "name" : "conv2d_3",
76 | "has_batch_norm" : 0,
77 | "type" : "convolution",
78 | "n_groups" : 1,
79 | "pad_t" : 0,
80 | "has_biases" : 1,
81 | "C" : 3,
82 | "bottom" : "conv2d_2__activation___output",
83 | "weights" : {
84 |
85 | },
86 | "pad_mode" : 1,
87 | "Nx" : 5,
88 | "pad_value" : 0,
89 | "Ny" : 5,
90 | "n_parallel" : 1
91 | }
92 | ]
93 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN.mlmodelc/model.espresso.shape:
--------------------------------------------------------------------------------
1 | {
2 | "layer_shapes" : {
3 | "conv2d_2__activation___output" : {
4 | "k" : 64,
5 | "w" : 200,
6 | "n" : 1,
7 | "h" : 200
8 | },
9 | "conv2d_1__activation___output" : {
10 | "k" : 128,
11 | "w" : 200,
12 | "n" : 1,
13 | "h" : 200
14 | },
15 | "image" : {
16 | "k" : 3,
17 | "w" : 200,
18 | "n" : 1,
19 | "h" : 200
20 | },
21 | "output1" : {
22 | "k" : 3,
23 | "w" : 200,
24 | "n" : 1,
25 | "h" : 200
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN.mlmodelc/model.espresso.weights:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN.mlmodelc/model.espresso.weights
--------------------------------------------------------------------------------
/SuperResolutionKit/Assets/SRCNN.mlmodelc/model/coremldata.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Assets/SRCNN.mlmodelc/model/coremldata.bin
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/SuperResolutionKit/Classes/.gitkeep
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/Array.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Swift
24 |
25 | extension Array where Element: Comparable {
26 | /**
27 | Returns the index and value of the largest element in the array.
28 | */
29 | public func argmax() -> (Int, Element) {
30 | precondition(self.count > 0)
31 | var maxIndex = 0
32 | var maxValue = self[0]
33 | for i in 1.. maxValue {
35 | maxValue = self[i]
36 | maxIndex = i
37 | }
38 | }
39 | return (maxIndex, maxValue)
40 | }
41 |
42 | /**
43 | Returns the indices of the array's elements in sorted order.
44 | */
45 | public func argsort(by areInIncreasingOrder: (Element, Element) -> Bool) -> [Array.Index] {
46 | return self.indices.sorted { areInIncreasingOrder(self[$0], self[$1]) }
47 | }
48 |
49 | /**
50 | Returns a new array containing the elements at the specified indices.
51 | */
52 | public func gather(indices: [Array.Index]) -> [Element] {
53 | var a = [Element]()
54 | for i in indices { a.append(self[i]) }
55 | return a
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/CVPixelBuffer+Helpers.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Foundation
24 | import Accelerate
25 | import CoreImage
26 |
27 | /**
28 | Creates a RGB pixel buffer of the specified width and height.
29 | */
30 | public func createPixelBuffer(width: Int, height: Int) -> CVPixelBuffer? {
31 | var pixelBuffer: CVPixelBuffer?
32 | let status = CVPixelBufferCreate(nil, width, height,
33 | kCVPixelFormatType_32BGRA, nil,
34 | &pixelBuffer)
35 | if status != kCVReturnSuccess {
36 | print("Error: could not create resized pixel buffer", status)
37 | return nil
38 | }
39 | return pixelBuffer
40 | }
41 |
42 | /**
43 | First crops the pixel buffer, then resizes it.
44 | */
45 | public func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
46 | cropX: Int,
47 | cropY: Int,
48 | cropWidth: Int,
49 | cropHeight: Int,
50 | scaleWidth: Int,
51 | scaleHeight: Int) -> CVPixelBuffer? {
52 |
53 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
54 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
55 | print("Error: could not get pixel buffer base address")
56 | return nil
57 | }
58 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
59 | let offset = cropY*srcBytesPerRow + cropX*4
60 | var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
61 | height: vImagePixelCount(cropHeight),
62 | width: vImagePixelCount(cropWidth),
63 | rowBytes: srcBytesPerRow)
64 |
65 | let destBytesPerRow = scaleWidth*4
66 | guard let destData = malloc(scaleHeight*destBytesPerRow) else {
67 | print("Error: out of memory")
68 | return nil
69 | }
70 | var destBuffer = vImage_Buffer(data: destData,
71 | height: vImagePixelCount(scaleHeight),
72 | width: vImagePixelCount(scaleWidth),
73 | rowBytes: destBytesPerRow)
74 |
75 | let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImage_Flags(0))
76 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
77 | if error != kvImageNoError {
78 | print("Error:", error)
79 | free(destData)
80 | return nil
81 | }
82 |
83 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
84 | if let ptr = ptr {
85 | free(UnsafeMutableRawPointer(mutating: ptr))
86 | }
87 | }
88 |
89 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
90 | var dstPixelBuffer: CVPixelBuffer?
91 | let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight,
92 | pixelFormat, destData,
93 | destBytesPerRow, releaseCallback,
94 | nil, nil, &dstPixelBuffer)
95 | if status != kCVReturnSuccess {
96 | print("Error: could not create new pixel buffer")
97 | free(destData)
98 | return nil
99 | }
100 | return dstPixelBuffer
101 | }
102 |
103 | /**
104 | Resizes a CVPixelBuffer to a new width and height.
105 | */
106 | public func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
107 | width: Int, height: Int) -> CVPixelBuffer? {
108 | return resizePixelBuffer(pixelBuffer, cropX: 0, cropY: 0,
109 | cropWidth: CVPixelBufferGetWidth(pixelBuffer),
110 | cropHeight: CVPixelBufferGetHeight(pixelBuffer),
111 | scaleWidth: width, scaleHeight: height)
112 | }
113 |
114 | /**
115 | Resizes a CVPixelBuffer to a new width and height.
116 | */
117 | public func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
118 | width: Int, height: Int,
119 | output: CVPixelBuffer, context: CIContext) {
120 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
121 | let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
122 | let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
123 | let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
124 | let scaledImage = ciImage.transformed(by: scaleTransform)
125 | context.render(scaledImage, to: output)
126 | }
127 |
128 | /**
129 | Rotates CVPixelBuffer by the provided factor of 90 counterclock-wise.
130 | */
131 |
132 | public func rotate90PixelBuffer(_ srcPixelBuffer: CVPixelBuffer, factor: UInt8) -> CVPixelBuffer? {
133 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
134 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
135 | print("Error: could not get pixel buffer base address")
136 | return nil
137 | }
138 | let sourceWidth = CVPixelBufferGetWidth(srcPixelBuffer)
139 | let sourceHeight = CVPixelBufferGetHeight(srcPixelBuffer)
140 | var destWidth = sourceHeight
141 | var destHeight = sourceWidth
142 | var color = UInt8(0)
143 |
144 | if factor % 2 == 0 {
145 | destWidth = sourceWidth
146 | destHeight = sourceHeight
147 | }
148 |
149 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
150 | var srcBuffer = vImage_Buffer(data: srcData,
151 | height: vImagePixelCount(sourceHeight),
152 | width: vImagePixelCount(sourceWidth),
153 | rowBytes: srcBytesPerRow)
154 |
155 | let destBytesPerRow = destWidth*4
156 | guard let destData = malloc(destHeight*destBytesPerRow) else {
157 | print("Error: out of memory")
158 | return nil
159 | }
160 | var destBuffer = vImage_Buffer(data: destData,
161 | height: vImagePixelCount(destHeight),
162 | width: vImagePixelCount(destWidth),
163 | rowBytes: destBytesPerRow)
164 |
165 | let error = vImageRotate90_ARGB8888(&srcBuffer, &destBuffer, factor, &color, vImage_Flags(0))
166 |
167 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
168 | if error != kvImageNoError {
169 | print("Error:", error)
170 | free(destData)
171 | return nil
172 | }
173 |
174 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
175 | if let ptr = ptr {
176 | free(UnsafeMutableRawPointer(mutating: ptr))
177 | }
178 | }
179 |
180 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
181 | var dstPixelBuffer: CVPixelBuffer?
182 | let status = CVPixelBufferCreateWithBytes(nil, destWidth, destHeight,
183 | pixelFormat, destData,
184 | destBytesPerRow, releaseCallback,
185 | nil, nil, &dstPixelBuffer)
186 | if status != kCVReturnSuccess {
187 | print("Error: could not create new pixel buffer")
188 | free(destData)
189 | return nil
190 | }
191 | return dstPixelBuffer
192 | }
193 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/MLMultiArray+Image.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Foundation
24 | import CoreML
25 |
26 | extension MLMultiArray {
27 | /**
28 | Converts the multi-array to a UIImage.
29 | */
30 | public func image(offset: T, scale: T) -> UIImage? {
31 | return MultiArray(self).image(offset: offset, scale: scale)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/Math.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Foundation
24 |
25 | public func clamp(_ x: T, min: T, max: T) -> T {
26 | if x < min { return min }
27 | if x > max { return max }
28 | return x
29 | }
30 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/MultiArray.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Foundation
24 | import CoreML
25 | import Swift
26 |
27 | public protocol MultiArrayType: Comparable {
28 | static var multiArrayDataType: MLMultiArrayDataType { get }
29 | static func +(lhs: Self, rhs: Self) -> Self
30 | static func *(lhs: Self, rhs: Self) -> Self
31 | init(_: Int)
32 | var toUInt8: UInt8 { get }
33 | }
34 |
35 | extension Double: MultiArrayType {
36 | public static var multiArrayDataType: MLMultiArrayDataType { return .double }
37 | public var toUInt8: UInt8 { return UInt8(self) }
38 | }
39 |
40 | extension Float: MultiArrayType {
41 | public static var multiArrayDataType: MLMultiArrayDataType { return .float32 }
42 | public var toUInt8: UInt8 { return UInt8(self) }
43 | }
44 |
45 | extension Int32: MultiArrayType {
46 | public static var multiArrayDataType: MLMultiArrayDataType { return .int32 }
47 | public var toUInt8: UInt8 { return UInt8(self) }
48 | }
49 |
50 | /**
51 | Wrapper around MLMultiArray to make it more Swifty.
52 | */
53 | public struct MultiArray {
54 | public let array: MLMultiArray
55 | public let pointer: UnsafeMutablePointer
56 |
57 | private(set) public var strides: [Int]
58 | private(set) public var shape: [Int]
59 |
60 | /**
61 | Creates a new multi-array filled with all zeros.
62 | */
63 | public init(shape: [Int]) {
64 | let m = try! MLMultiArray(shape: shape as [NSNumber], dataType: T.multiArrayDataType)
65 | self.init(m)
66 | memset(pointer, 0, MemoryLayout.stride * count)
67 | }
68 |
69 | /**
70 | Creates a new multi-array initialized with the specified value.
71 | */
72 | public init(shape: [Int], initial: T) {
73 | self.init(shape: shape)
74 | for i in 0..(OpaquePointer(array.dataPointer))
91 | }
92 |
93 | /**
94 | Returns the number of elements in the entire array.
95 | */
96 | public var count: Int {
97 | return shape.reduce(1, *)
98 | }
99 |
100 | public subscript(a: Int) -> T {
101 | get { return pointer[a] }
102 | set { pointer[a] = newValue }
103 | }
104 |
105 | public subscript(a: Int, b: Int) -> T {
106 | get { return pointer[a*strides[0] + b*strides[1]] }
107 | set { pointer[a*strides[0] + b*strides[1]] = newValue }
108 | }
109 |
110 | public subscript(a: Int, b: Int, c: Int) -> T {
111 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2]] }
112 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2]] = newValue }
113 | }
114 |
115 | public subscript(a: Int, b: Int, c: Int, d: Int) -> T {
116 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3]] }
117 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3]] = newValue }
118 | }
119 |
120 | public subscript(a: Int, b: Int, c: Int, d: Int, e: Int) -> T {
121 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3] + e*strides[4]] }
122 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3] + e*strides[4]] = newValue }
123 | }
124 |
125 | public subscript(indices: [Int]) -> T {
126 | get { return pointer[offset(for: indices)] }
127 | set { pointer[offset(for: indices)] = newValue }
128 | }
129 |
130 | func offset(for indices: [Int]) -> Int {
131 | var offset = 0
132 | for i in 0.. MultiArray {
143 | precondition(order.count == strides.count)
144 | var newShape = shape
145 | var newStrides = strides
146 | for i in 0.. MultiArray {
157 | let newCount = dimensions.reduce(1, *)
158 | precondition(newCount == count, "Cannot reshape \(shape) to \(dimensions)")
159 |
160 | var newStrides = [Int](repeating: 0, count: dimensions.count)
161 | newStrides[dimensions.count - 1] = 1
162 | for i in stride(from: dimensions.count - 1, to: 0, by: -1) {
163 | newStrides[i - 1] = newStrides[i] * dimensions[i]
164 | }
165 |
166 | return MultiArray(array, dimensions, newStrides)
167 | }
168 | }
169 |
170 | extension MultiArray: CustomStringConvertible {
171 | public var description: String {
172 | return description([])
173 | }
174 |
175 | func description(_ indices: [Int]) -> String {
176 | func indent(_ x: Int) -> String {
177 | return String(repeating: " ", count: x)
178 | }
179 |
180 | // This function is called recursively for every dimension.
181 | // Add an entry for this dimension to the end of the array.
182 | var indices = indices + [0]
183 |
184 | let d = indices.count - 1 // the current dimension
185 | let N = shape[d] // how many elements in this dimension
186 |
187 | var s = "["
188 | if indices.count < shape.count { // not last dimension yet?
189 | for i in 0.. UIImage? {
226 | if shape.count == 3, let (b, w, h) = toRawBytesRGBA(offset: offset, scale: scale) {
227 | return UIImage.fromByteArrayRGBA(b, width: w, height: h)
228 | } else if shape.count == 2, let (b, w, h) = toRawBytesGray(offset: offset, scale: scale) {
229 | return UIImage.fromByteArrayGray(b, width: w, height: h)
230 | } else {
231 | return nil
232 | }
233 | }
234 |
235 | /**
236 | Converts the multi-array into an array of RGBA pixels.
237 |
238 | - Note: The multi-array must have shape (3, height, width). If your array
239 | has a different shape, use `reshape()` or `transpose()` first.
240 | */
241 | public func toRawBytesRGBA(offset: T, scale: T)
242 | -> (bytes: [UInt8], width: Int, height: Int)? {
243 | guard shape.count == 3 else {
244 | print("Expected a multi-array with 3 dimensions, got \(shape)")
245 | return nil
246 | }
247 | guard shape[0] == 3 else {
248 | print("Expected first dimension to have 3 channels, got \(shape[0])")
249 | return nil
250 | }
251 |
252 | let height = shape[1]
253 | let width = shape[2]
254 | var bytes = [UInt8](repeating: 0, count: height * width * 4)
255 |
256 | for h in 0.. (bytes: [UInt8], width: Int, height: Int)? {
280 | guard shape.count == 2 else {
281 | print("Expected a multi-array with 2 dimensions, got \(shape)")
282 | return nil
283 | }
284 |
285 | let height = shape[0]
286 | let width = shape[1]
287 | var bytes = [UInt8](repeating: 0, count: height * width)
288 |
289 | for h in 0.. UIImage? {
306 | guard shape.count == 3 else {
307 | print("Expected a multi-array with 3 dimensions, got \(shape)")
308 | return nil
309 | }
310 | guard channel >= 0 && channel < shape[0] else {
311 | print("Channel must be between 0 and \(shape[0] - 1)")
312 | return nil
313 | }
314 |
315 | let height = shape[1]
316 | let width = shape[2]
317 | var a = MultiArray(shape: [height, width])
318 | for y in 0.. Float {
32 | let areaA = a.width * a.height
33 | if areaA <= 0 { return 0 }
34 |
35 | let areaB = b.width * b.height
36 | if areaB <= 0 { return 0 }
37 |
38 | let intersectionMinX = max(a.minX, b.minX)
39 | let intersectionMinY = max(a.minY, b.minY)
40 | let intersectionMaxX = min(a.maxX, b.maxX)
41 | let intersectionMaxY = min(a.maxY, b.maxY)
42 | let intersectionArea = max(intersectionMaxY - intersectionMinY, 0) *
43 | max(intersectionMaxX - intersectionMinX, 0)
44 | return Float(intersectionArea / (areaA + areaB - intersectionArea))
45 | }
46 |
47 | public typealias NMSPrediction = (classIndex: Int, score: Float, rect: CGRect)
48 |
49 | /**
50 | Removes bounding boxes that overlap too much with other boxes that have
51 | a higher score.
52 | */
53 | public func nonMaxSuppression(predictions: [NMSPrediction], iouThreshold: Float, maxBoxes: Int) -> [Int] {
54 | return nonMaxSuppression(predictions: predictions,
55 | indices: Array(predictions.indices),
56 | iouThreshold: iouThreshold,
57 | maxBoxes: maxBoxes)
58 | }
59 |
60 | /**
61 | Removes bounding boxes that overlap too much with other boxes that have
62 | a higher score.
63 |
64 | Based on code from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/non_max_suppression_op.cc
65 |
66 | - Note: This version of NMS ignores the class of the bounding boxes. Since it
67 | selects the bounding boxes in a greedy fashion, if a certain class has many
68 | boxes that are selected, then it is possible none of the boxes of the other
69 | classes get selected.
70 |
71 | - Parameters:
72 | - predictions: an array of bounding boxes and their scores
73 | - indices: which predictions to look at
74 | - iouThreshold: used to decide whether boxes overlap too much
75 | - maxBoxes: the maximum number of boxes that will be selected
76 |
77 | - Returns: the array indices of the selected bounding boxes
78 | */
79 | public func nonMaxSuppression(predictions: [NMSPrediction],
80 | indices: [Int],
81 | iouThreshold: Float,
82 | maxBoxes: Int) -> [Int] {
83 |
84 | // Sort the boxes based on their confidence scores, from high to low.
85 | let sortedIndices = indices.sorted { predictions[$0].score > predictions[$1].score }
86 |
87 | var selected: [Int] = []
88 |
89 | // Loop through the bounding boxes, from highest score to lowest score,
90 | // and determine whether or not to keep each box.
91 | for i in 0..= maxBoxes { break }
93 |
94 | var shouldSelect = true
95 | let boxA = predictions[sortedIndices[i]]
96 |
97 | // Does the current box overlap one of the selected boxes more than the
98 | // given threshold amount? Then it's too similar, so don't keep it.
99 | for j in 0.. iouThreshold {
102 | shouldSelect = false
103 | break
104 | }
105 | }
106 |
107 | // This bounding box did not overlap too much with any previously selected
108 | // bounding box, so we'll keep it.
109 | if shouldSelect {
110 | selected.append(sortedIndices[i])
111 | }
112 | }
113 |
114 | return selected
115 | }
116 |
117 | /**
118 | Multi-class version of non maximum suppression.
119 |
120 | Where `nonMaxSuppression()` does not look at the class of the predictions at
121 | all, the multi-class version first selects the best bounding boxes for each
122 | class, and then keeps the best ones of those.
123 |
124 | With this method you can usually expect to see at least one bounding box for
125 | each class (unless all the scores for a given class are really low).
126 |
127 | Based on code from: https://github.com/tensorflow/models/blob/master/object_detection/core/post_processing.py
128 |
129 | - Parameters:
130 | - numClasses: the number of classes
131 | - predictions: an array of bounding boxes and their scores
132 | - scoreThreshold: used to only keep bounding boxes with a high enough score
133 | - iouThreshold: used to decide whether boxes overlap too much
134 | - maxPerClass: the maximum number of boxes that will be selected per class
135 | - maxTotal: maximum number of boxes that will be selected over all classes
136 |
137 | - Returns: the array indices of the selected bounding boxes
138 | */
139 | public func nonMaxSuppressionMultiClass(numClasses: Int,
140 | predictions: [NMSPrediction],
141 | scoreThreshold: Float,
142 | iouThreshold: Float,
143 | maxPerClass: Int,
144 | maxTotal: Int) -> [Int] {
145 | var selectedBoxes: [Int] = []
146 |
147 | // Look at all the classes one-by-one.
148 | for c in 0.. scoreThreshold {
158 | filteredBoxes.append(p)
159 | }
160 | }
161 | }
162 |
163 | // Only keep the best bounding boxes for this class.
164 | let nmsBoxes = nonMaxSuppression(predictions: predictions,
165 | indices: filteredBoxes,
166 | iouThreshold: iouThreshold,
167 | maxBoxes: maxPerClass)
168 |
169 | // Add the indices of the surviving boxes to the big list.
170 | selectedBoxes.append(contentsOf: nmsBoxes)
171 | }
172 |
173 | // Sort all the surviving boxes by score and only keep the best ones.
174 | let sortedBoxes = selectedBoxes.sorted { predictions[$0].score > predictions[$1].score }
175 | return Array(sortedBoxes.prefix(maxTotal))
176 | }
177 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/Predictions.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import Vision
24 |
25 | /**
26 | Returns the top `k` predictions from Core ML classification results as an
27 | array of `(String, Double)` pairs.
28 | */
29 | public func top(_ k: Int, _ prob: [String: Double]) -> [(String, Double)] {
30 | return Array(prob.map { x in (x.key, x.value) }
31 | .sorted(by: { a, b -> Bool in a.1 > b.1 })
32 | .prefix(through: min(k, prob.count) - 1))
33 | }
34 |
35 | /**
36 | Returns the top `k` predictions from Vision classification results as an
37 | array of `(String, Double)` pairs.
38 | */
39 | public func top(_ k: Int, _ observations: [VNClassificationObservation]) -> [(String, Double)] {
40 | // The Vision observations are sorted by confidence already.
41 | return observations.prefix(through: min(k, observations.count) - 1)
42 | .map { ($0.identifier, Double($0.confidence)) }
43 | }
44 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/CoreMLHelpers/UIImage+CVPixelBuffer.swift:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2017 M.I. Hollemans
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy
5 | of this software and associated documentation files (the "Software"), to
6 | deal in the Software without restriction, including without limitation the
7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | sell copies of the Software, and to permit persons to whom the Software is
9 | furnished to do so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in
12 | all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | IN THE SOFTWARE.
21 | */
22 |
23 | import UIKit
24 | import VideoToolbox
25 |
26 | extension UIImage {
27 | /**
28 | Resizes the image to width x height and converts it to an RGB CVPixelBuffer.
29 | */
30 | public func pixelBuffer(width: Int, height: Int) -> CVPixelBuffer? {
31 | return pixelBuffer(width: width, height: height,
32 | pixelFormatType: kCVPixelFormatType_32ARGB,
33 | colorSpace: CGColorSpaceCreateDeviceRGB(),
34 | alphaInfo: .noneSkipFirst)
35 | }
36 |
37 | /**
38 | Resizes the image to width x height and converts it to a grayscale CVPixelBuffer.
39 | */
40 | public func pixelBufferGray(width: Int, height: Int) -> CVPixelBuffer? {
41 | return pixelBuffer(width: width, height: height,
42 | pixelFormatType: kCVPixelFormatType_OneComponent8,
43 | colorSpace: CGColorSpaceCreateDeviceGray(),
44 | alphaInfo: .none)
45 | }
46 |
47 | func pixelBuffer(width: Int, height: Int, pixelFormatType: OSType,
48 | colorSpace: CGColorSpace, alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? {
49 | var maybePixelBuffer: CVPixelBuffer?
50 | let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
51 | kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue]
52 | let status = CVPixelBufferCreate(kCFAllocatorDefault,
53 | width,
54 | height,
55 | pixelFormatType,
56 | attrs as CFDictionary,
57 | &maybePixelBuffer)
58 |
59 | guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else {
60 | return nil
61 | }
62 |
63 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
64 | let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
65 |
66 | guard let context = CGContext(data: pixelData,
67 | width: width,
68 | height: height,
69 | bitsPerComponent: 8,
70 | bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
71 | space: colorSpace,
72 | bitmapInfo: alphaInfo.rawValue)
73 | else {
74 | return nil
75 | }
76 |
77 | UIGraphicsPushContext(context)
78 | context.translateBy(x: 0, y: CGFloat(height))
79 | context.scaleBy(x: 1, y: -1)
80 | self.draw(in: CGRect(x: 0, y: 0, width: width, height: height))
81 | UIGraphicsPopContext()
82 |
83 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
84 | return pixelBuffer
85 | }
86 | }
87 |
88 | extension UIImage {
89 | /**
90 | Creates a new UIImage from a CVPixelBuffer.
91 | NOTE: This only works for RGB pixel buffers, not for grayscale.
92 | */
93 | public convenience init?(pixelBuffer: CVPixelBuffer) {
94 | var cgImage: CGImage?
95 | VTCreateCGImageFromCVPixelBuffer(pixelBuffer, nil, &cgImage)
96 |
97 | if let cgImage = cgImage {
98 | self.init(cgImage: cgImage)
99 | } else {
100 | return nil
101 | }
102 | }
103 |
104 | /**
105 | Creates a new UIImage from a CVPixelBuffer, using Core Image.
106 | */
107 | public convenience init?(pixelBuffer: CVPixelBuffer, context: CIContext) {
108 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
109 | let rect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer),
110 | height: CVPixelBufferGetHeight(pixelBuffer))
111 | if let cgImage = context.createCGImage(ciImage, from: rect) {
112 | self.init(cgImage: cgImage)
113 | } else {
114 | return nil
115 | }
116 | }
117 | }
118 |
119 | extension UIImage {
120 | /**
121 | Creates a new UIImage from an array of RGBA bytes.
122 | */
123 | @nonobjc public class func fromByteArrayRGBA(_ bytes: [UInt8],
124 | width: Int,
125 | height: Int,
126 | scale: CGFloat = 0,
127 | orientation: UIImageOrientation = .up) -> UIImage? {
128 | return fromByteArray(bytes, width: width, height: height,
129 | scale: scale, orientation: orientation,
130 | bytesPerRow: width * 4,
131 | colorSpace: CGColorSpaceCreateDeviceRGB(),
132 | alphaInfo: .premultipliedLast)
133 | }
134 |
135 | /**
136 | Creates a new UIImage from an array of grayscale bytes.
137 | */
138 | @nonobjc public class func fromByteArrayGray(_ bytes: [UInt8],
139 | width: Int,
140 | height: Int,
141 | scale: CGFloat = 0,
142 | orientation: UIImageOrientation = .up) -> UIImage? {
143 | return fromByteArray(bytes, width: width, height: height,
144 | scale: scale, orientation: orientation,
145 | bytesPerRow: width,
146 | colorSpace: CGColorSpaceCreateDeviceGray(),
147 | alphaInfo: .none)
148 | }
149 |
150 | @nonobjc class func fromByteArray(_ bytes: [UInt8],
151 | width: Int,
152 | height: Int,
153 | scale: CGFloat,
154 | orientation: UIImageOrientation,
155 | bytesPerRow: Int,
156 | colorSpace: CGColorSpace,
157 | alphaInfo: CGImageAlphaInfo) -> UIImage? {
158 | var image: UIImage?
159 | bytes.withUnsafeBytes { ptr in
160 | if let context = CGContext(data: UnsafeMutableRawPointer(mutating: ptr.baseAddress!),
161 | width: width,
162 | height: height,
163 | bitsPerComponent: 8,
164 | bytesPerRow: bytesPerRow,
165 | space: colorSpace,
166 | bitmapInfo: alphaInfo.rawValue),
167 | let cgImage = context.makeImage() {
168 | image = UIImage(cgImage: cgImage, scale: scale, orientation: orientation)
169 | }
170 | }
171 | return image
172 | }
173 | }
174 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/FSRCNNConverter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FSRCNNConverter.swift
3 | // SuperResolutionKit
4 | //
5 | // Created by Kentaro Matsumae on 2018/07/14.
6 | //
7 |
8 | import Foundation
9 | import UIKit
10 | import CoreML
11 |
12 | public class FSRCNNConverter {
13 |
14 | public static let shared = FSRCNNConverter()
15 | private let shrinkSize = 0
16 |
17 | private let patchInSize = 100
18 | private let patchOutSize = 200
19 | private let model = SRCNN(modelName: "FSRCNN")
20 |
21 | private func resize2x(src: UIImage) -> UIImage? {
22 | let w = src.size.width
23 | let h = src.size.height
24 | let targetSize = CGSize(width: w * 2, height: h * 2)
25 | UIGraphicsBeginImageContext(targetSize)
26 | let ctx = UIGraphicsGetCurrentContext()!
27 | ctx.interpolationQuality = CGInterpolationQuality.high
28 | let transform = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: targetSize.height)
29 | ctx.concatenate(transform)
30 | let rect = CGRect(x: 0, y: 0, width: targetSize.width, height: targetSize.height)
31 | ctx.draw(src.cgImage!, in: rect)
32 | let dst = UIGraphicsGetImageFromCurrentImageContext()
33 | return dst
34 | }
35 |
36 | private func expand(src: UIImage) -> UIImage? {
37 | let w = Int(src.size.width)
38 | let h = Int(src.size.height)
39 | let exW = w + shrinkSize * 2
40 | let exH = h + shrinkSize * 2
41 | let targetSize = CGSize(width: exW, height: exH)
42 |
43 | UIGraphicsBeginImageContext(targetSize)
44 | let ctx = UIGraphicsGetCurrentContext()!
45 | ctx.setFillColor(UIColor.black.cgColor)
46 | ctx.addRect(CGRect(x: 0, y: 0, width: targetSize.width, height: targetSize.height))
47 | ctx.drawPath(using: .fill)
48 | let transform = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: targetSize.height)
49 | ctx.concatenate(transform)
50 | let rect = CGRect(x: shrinkSize, y: shrinkSize, width: w, height: h)
51 | ctx.draw(src.cgImage!, in: rect)
52 | let dst = UIGraphicsGetImageFromCurrentImageContext()
53 | return dst
54 | }
55 |
56 | struct PatchIn {
57 | let buff: CVPixelBuffer
58 | let position: CGPoint
59 | }
60 | struct PatchOut {
61 | let buff: MLMultiArray
62 | let position: CGPoint
63 | }
64 |
65 | struct Patch {
66 | let patchOutImage: CGImage
67 | let position: CGPoint
68 | }
69 |
70 | private func crop(src: UIImage) -> [PatchIn] {
71 | var patchesIn: [PatchIn] = []
72 |
73 | guard let cgimage = src.cgImage else {
74 | return []
75 | }
76 | let numY = Int(src.size.height) / patchInSize
77 | let numX = Int(src.size.width) / patchInSize
78 |
79 | for y in 0.. [PatchOut] {
98 | var outs: [PatchOut] = []
99 |
100 | for patch in patches {
101 | do {
102 | let res = try model.prediction(image: patch.buff)
103 | let out = PatchOut(buff: res.output1, position: patch.position)
104 | outs.append(out)
105 | } catch {
106 | print(error)
107 | continue
108 | }
109 | }
110 | return outs
111 | }
112 |
113 | private func render(patches: [PatchOut], size: CGSize) -> UIImage? {
114 | UIGraphicsBeginImageContext(size)
115 |
116 | for patch in patches {
117 | let pos = patch.position
118 | guard let image = patch.buff.image(offset: 0, scale: 255) else {
119 | fatalError()
120 | continue
121 | }
122 | let rect = CGRect(x: pos.x * CGFloat(patchOutSize),
123 | y: pos.y * CGFloat(patchOutSize),
124 | width: CGFloat(patchOutSize),
125 | height: CGFloat(patchOutSize))
126 | image.draw(in: rect)
127 | }
128 |
129 | let dst = UIGraphicsGetImageFromCurrentImageContext()
130 | return dst
131 | }
132 |
133 | public func convert(from src: UIImage) -> UIImage? {
134 | mesure("start Fast-SRCNN",0)
135 | let t = Date()
136 |
137 | let t0 = Date()
138 | mesure("resize", t0.timeIntervalSince(t))
139 |
140 | let t1 = Date()
141 | mesure("expand",t1.timeIntervalSince(t0))
142 |
143 | /////////////
144 | let patches = crop(src: src)
145 |
146 | let t2 = Date()
147 | mesure("crop",t2.timeIntervalSince(t1))
148 |
149 | /////////////
150 | let outPatches = predict(patches: patches)
151 |
152 | let t3 = Date()
153 | mesure("predict",t3.timeIntervalSince(t2))
154 | /////////////
155 | var size = src.size
156 | size.width *= 2
157 | size.height *= 2
158 | let res = render(patches: outPatches, size: size)
159 |
160 | let t4 = Date()
161 | mesure("render",t4.timeIntervalSince(t3))
162 | /////////////
163 |
164 | mesure("total",t4.timeIntervalSince(t))
165 | return res
166 | }
167 | private func mesure(_ msg: String, _ time: TimeInterval) {
168 | print(String(format: "\(msg):\t%.2f", time))
169 | }
170 | }
171 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/SRCNN.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SRCNN.swift
3 | //
4 | // This file was automatically generated and should not be edited.
5 | //
6 |
7 | import CoreML
8 |
9 |
10 | /// Model Prediction Input Type
11 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
12 | class SRCNNInput : MLFeatureProvider {
13 |
14 | /// image as color (kCVPixelFormatType_32BGRA) image buffer, 200 pixels wide by 200 pixels high
15 | var image: CVPixelBuffer
16 |
17 | var featureNames: Set {
18 | get {
19 | return ["image"]
20 | }
21 | }
22 |
23 | func featureValue(for featureName: String) -> MLFeatureValue? {
24 | if (featureName == "image") {
25 | return MLFeatureValue(pixelBuffer: image)
26 | }
27 | return nil
28 | }
29 |
30 | init(image: CVPixelBuffer) {
31 | self.image = image
32 | }
33 | }
34 |
35 |
36 | /// Model Prediction Output Type
37 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
38 | class SRCNNOutput : MLFeatureProvider {
39 |
40 | /// output1 as 3 x 200 x 200 3-dimensional array of doubles
41 | let output1: MLMultiArray
42 |
43 | var featureNames: Set {
44 | get {
45 | return ["output1"]
46 | }
47 | }
48 |
49 | func featureValue(for featureName: String) -> MLFeatureValue? {
50 | if (featureName == "output1") {
51 | return MLFeatureValue(multiArray: output1)
52 | }
53 | return nil
54 | }
55 |
56 | init(output1: MLMultiArray) {
57 | self.output1 = output1
58 | }
59 | }
60 |
61 |
62 | /// Class for model loading and prediction
63 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *)
64 | class SRCNN {
65 | var model: MLModel
66 |
67 | /**
68 | Construct a model with explicit path to mlmodel file
69 | - parameters:
70 | - url: the file url of the model
71 | - throws: an NSError object that describes the problem
72 | */
73 | init(contentsOf url: URL) throws {
74 | self.model = try MLModel(contentsOf: url)
75 | }
76 |
77 | /// Construct a model that automatically loads the model from the app's bundle
78 | convenience init(modelName: String) {
79 | let bundle = Bundle(for: SRCNN.self)
80 | let url = bundle.url(forResource: "SuperResolutionKit", withExtension: "bundle")!
81 | let srBundle = Bundle(url: url)!
82 | let assetPath = srBundle.url(forResource: modelName, withExtension: "mlmodelc")
83 | try! self.init(contentsOf: assetPath!)
84 | }
85 |
86 | /**
87 | Make a prediction using the structured interface
88 | - parameters:
89 | - input: the input to the prediction as SRCNNInput
90 | - throws: an NSError object that describes the problem
91 | - returns: the result of the prediction as SRCNNOutput
92 | */
93 | func prediction(input: SRCNNInput) throws -> SRCNNOutput {
94 | let outFeatures = try model.prediction(from: input)
95 | let result = SRCNNOutput(output1: outFeatures.featureValue(for: "output1")!.multiArrayValue!)
96 | return result
97 | }
98 |
99 | /**
100 | Make a prediction using the convenience interface
101 | - parameters:
102 | - image as color (kCVPixelFormatType_32BGRA) image buffer, 200 pixels wide by 200 pixels high
103 | - throws: an NSError object that describes the problem
104 | - returns: the result of the prediction as SRCNNOutput
105 | */
106 | func prediction(image: CVPixelBuffer) throws -> SRCNNOutput {
107 | let input_ = SRCNNInput(image: image)
108 | return try self.prediction(input: input_)
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/SRCNNConverter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SRCNN.swift
3 | // SRCNNKit
4 | //
5 | // Copyright (c) 2018 DeNA Co., Ltd. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import CoreML
10 |
11 | public class SRCNNConverter {
12 |
13 | public static let shared = SRCNNConverter()
14 | private let shrinkSize = 0
15 |
16 | private let patchInSize = 200
17 | private let patchOutSize = 200
18 | private let model: SRCNN
19 |
20 | public init(modelName: String = "SRCNN") {
21 | model = SRCNN(modelName: modelName)
22 | }
23 |
24 | private func resize2x(src: UIImage) -> UIImage? {
25 | let w = src.size.width
26 | let h = src.size.height
27 | let targetSize = CGSize(width: w * 2, height: h * 2)
28 | UIGraphicsBeginImageContext(targetSize)
29 | let ctx = UIGraphicsGetCurrentContext()!
30 | ctx.interpolationQuality = CGInterpolationQuality.high
31 | let transform = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: targetSize.height)
32 | ctx.concatenate(transform)
33 | let rect = CGRect(x: 0, y: 0, width: targetSize.width, height: targetSize.height)
34 | ctx.draw(src.cgImage!, in: rect)
35 | let dst = UIGraphicsGetImageFromCurrentImageContext()
36 | return dst
37 | }
38 |
39 | private func expand(src: UIImage) -> UIImage? {
40 | let w = Int(src.size.width)
41 | let h = Int(src.size.height)
42 | let exW = w + shrinkSize * 2
43 | let exH = h + shrinkSize * 2
44 | let targetSize = CGSize(width: exW, height: exH)
45 |
46 | UIGraphicsBeginImageContext(targetSize)
47 | let ctx = UIGraphicsGetCurrentContext()!
48 | ctx.setFillColor(UIColor.black.cgColor)
49 | ctx.addRect(CGRect(x: 0, y: 0, width: targetSize.width, height: targetSize.height))
50 | ctx.drawPath(using: .fill)
51 | let transform = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: targetSize.height)
52 | ctx.concatenate(transform)
53 | let rect = CGRect(x: shrinkSize, y: shrinkSize, width: w, height: h)
54 | ctx.draw(src.cgImage!, in: rect)
55 | let dst = UIGraphicsGetImageFromCurrentImageContext()
56 | return dst
57 | }
58 |
59 | struct PatchIn {
60 | let buff: CVPixelBuffer
61 | let position: CGPoint
62 | }
63 | struct PatchOut {
64 | let buff: MLMultiArray
65 | let position: CGPoint
66 | }
67 |
68 | struct Patch {
69 | let patchOutImage: CGImage
70 | let position: CGPoint
71 | }
72 |
73 | private func crop(src: UIImage) -> [PatchIn] {
74 | var patchesIn: [PatchIn] = []
75 |
76 | guard let cgimage = src.cgImage else {
77 | return []
78 | }
79 | let numY = Int(src.size.height) / patchOutSize
80 | let numX = Int(src.size.width) / patchOutSize
81 |
82 | for y in 0.. [PatchOut] {
101 | var outs: [PatchOut] = []
102 |
103 | for patch in patches {
104 | do {
105 | let res = try model.prediction(image: patch.buff)
106 | let out = PatchOut(buff: res.output1, position: patch.position)
107 | outs.append(out)
108 | } catch {
109 | print(error)
110 | continue
111 | }
112 | }
113 | return outs
114 | }
115 |
116 | private func render(patches: [PatchOut], size: CGSize) -> UIImage? {
117 | UIGraphicsBeginImageContext(size)
118 |
119 | for patch in patches {
120 | let pos = patch.position
121 | guard let image = patch.buff.image(offset: 0, scale: 255) else {
122 | fatalError()
123 | continue
124 | }
125 | let rect = CGRect(x: pos.x * CGFloat(patchOutSize),
126 | y: pos.y * CGFloat(patchOutSize),
127 | width: CGFloat(patchOutSize),
128 | height: CGFloat(patchOutSize))
129 | image.draw(in: rect)
130 | }
131 |
132 | let dst = UIGraphicsGetImageFromCurrentImageContext()
133 | return dst
134 | }
135 |
136 | public func convert(from src: UIImage) -> UIImage? {
137 | mesure("start SRCNN",0)
138 | let t = Date()
139 |
140 | /////////////
141 | guard let resized = resize2x(src: src) else {
142 | return nil
143 | }
144 | let t0 = Date()
145 | mesure("resize",t0.timeIntervalSince(t))
146 |
147 | /////////////
148 | guard let expanded = expand(src: resized) else {
149 | return nil
150 | }
151 |
152 | let t1 = Date()
153 | mesure("expand",t1.timeIntervalSince(t0))
154 |
155 | /////////////
156 | let patches = crop(src: expanded)
157 |
158 | let t2 = Date()
159 | mesure("crop",t2.timeIntervalSince(t1))
160 |
161 | /////////////
162 | let outPatches = predict(patches: patches)
163 |
164 | let t3 = Date()
165 | mesure("predict",t3.timeIntervalSince(t2))
166 | /////////////
167 | let res = render(patches: outPatches, size: resized.size)
168 |
169 | let t4 = Date()
170 | mesure("render",t4.timeIntervalSince(t3))
171 | /////////////
172 |
173 | mesure("total",t4.timeIntervalSince(t))
174 | return res
175 | }
176 | private func mesure(_ msg: String, _ time: TimeInterval) {
177 | print(String(format: "\(msg):\t%.2f", time))
178 | }
179 | }
180 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/SRCNNKit.h:
--------------------------------------------------------------------------------
1 | //
2 | // SRCNNKit.h
3 | // SRCNNKit
4 | //
5 | // Copyright (c) 2018 DeNA Co., Ltd. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | //! Project version number for SRCNNKit.
11 | FOUNDATION_EXPORT double SRCNNKitVersionNumber;
12 |
13 | //! Project version string for SRCNNKit.
14 | FOUNDATION_EXPORT const unsigned char SRCNNKitVersionString[];
15 |
16 | // In this header, you should import all the public headers of your framework using statements like #import
17 |
18 |
19 |
--------------------------------------------------------------------------------
/SuperResolutionKit/Classes/UIImageView+SRCNN.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UIImageView+SRCNN.swift
3 | // SRCNNKit
4 | //
5 | // Copyright (c) 2018 DeNA Co., Ltd. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | extension UIImageView {
11 |
12 | public func setSRImage(image src: UIImage) {
13 | setSRImage(image: src, completion: {})
14 | }
15 |
16 | public func setSRImage(image src: UIImage, completion: @escaping (() -> Void)) {
17 | self.image = src
18 | DispatchQueue.global().async { [weak self] in
19 | if let output = SRCNNConverter.shared.convert(from: src) {
20 | DispatchQueue.main.async {
21 | self?.image = output
22 | self?.layer.add(CATransition(), forKey: nil)
23 | completion()
24 | }
25 | }
26 | }
27 | }
28 |
29 | public func setFSRImage(image src: UIImage) {
30 | setFSRImage(image: src, completion: {})
31 | }
32 |
33 | public func setFSRImage(image src: UIImage, completion: @escaping (() -> Void)) {
34 | self.image = src
35 | DispatchQueue.global().async { [weak self] in
36 | if let output = FSRCNNConverter.shared.convert(from: src) {
37 | DispatchQueue.main.async {
38 | self?.image = output
39 | self?.layer.add(CATransition(), forKey: nil)
40 | completion()
41 | }
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/_Pods.xcodeproj:
--------------------------------------------------------------------------------
1 | Example/Pods/Pods.xcodeproj
--------------------------------------------------------------------------------
/script/convert.py:
--------------------------------------------------------------------------------
1 | from os import listdir, makedirs
2 | from os.path import isfile, join, exists
3 |
4 | import argparse
5 |
6 | parser = argparse.ArgumentParser()
7 | parser.add_argument("input_dir", help="Data input directory")
8 | parser.add_argument("output_dir", help="Data output directory")
9 | args = parser.parse_args()
10 |
11 | import numpy as np
12 | from scipy import misc
13 | from PIL import Image
14 |
15 | scale = 2.0
16 | input_size = 200
17 | label_size = 200
18 | stride = 200
19 |
20 | if not exists(args.output_dir):
21 | makedirs(args.output_dir)
22 | if not exists(join(args.output_dir, "input")):
23 | makedirs(join(args.output_dir, "input"))
24 | if not exists(join(args.output_dir, "label")):
25 | makedirs(join(args.output_dir, "label"))
26 |
27 | count = 1
28 | for f in listdir(args.input_dir):
29 | f = join(args.input_dir, f)
30 | if not isfile(f):
31 | continue
32 |
33 | image = np.asarray(Image.open(f).convert('RGB'))
34 | print(f, image.shape)
35 |
36 | h, w, c = image.shape
37 |
38 | scaled = misc.imresize(image, 1.0/scale, 'bicubic')
39 | scaled = misc.imresize(scaled, scale/1.0, 'bicubic')
40 |
41 | for y in range(0, h - input_size + 1, stride):
42 | for x in range(0, w - input_size + 1, stride):
43 | print(y,x)
44 | sub_img = scaled[y : y + input_size, x : x + input_size]
45 | sub_img_label = image[y : y + label_size, x : x + label_size]
46 | misc.imsave(join(args.output_dir, "input", str(count) + '.png'), sub_img)
47 | misc.imsave(join(args.output_dir, "label", str(count) + '.png'), sub_img_label)
48 |
49 | count += 1
50 |
--------------------------------------------------------------------------------
/script/coreml_convert.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | parser = argparse.ArgumentParser()
3 | parser.add_argument("model")
4 | parser.add_argument("out")
5 | args = parser.parse_args()
6 |
7 | import coremltools
8 | coreml_model = coremltools.converters.keras.convert(args.model,
9 | input_names = 'image',
10 | image_input_names = 'image',
11 | image_scale = 0.00392156863)
12 |
13 | import os
14 | coreml_model.save(os.path.join(args.out, 'SRCNN.mlmodel'))
15 |
16 |
--------------------------------------------------------------------------------
/script/coreml_predict.py:
--------------------------------------------------------------------------------
1 | import coremltools
2 | import numpy as np
3 | import argparse
4 | from PIL import Image
5 |
6 | parser = argparse.ArgumentParser()
7 | parser.add_argument("model", help="model file path")
8 | parser.add_argument("input", help="input patch image")
9 | parser.add_argument("output", help="output patch image")
10 | args = parser.parse_args()
11 |
12 | model = coremltools.models.MLModel(args.model)
13 | img = Image.open(args.input).convert('RGB')
14 | x = {'image': img}
15 | res = model.predict(x)
16 |
17 | out = np.asarray(res['output1'] * 255., np.uint8)
18 | print(out.shape)
19 | out = np.rollaxis(out, 0, 3)
20 | print(out.shape)
21 | outimg = Image.fromarray(out)
22 | outimg.save(args.output)
23 |
24 |
--------------------------------------------------------------------------------
/script/dump_srcnn_inter_layer.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential, Model
2 | from keras.layers import Conv2D, Input, BatchNormalization
3 | from keras.callbacks import ModelCheckpoint
4 | from keras.optimizers import SGD, Adam
5 | import numpy as np
6 | import math
7 | import os
8 | from keras.models import load_model
9 | from PIL import Image
10 |
11 | patch_size = 200
12 |
13 | def setup_session():
14 | import tensorflow as tf
15 | from keras.backend import tensorflow_backend
16 | config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
17 | session = tf.Session(config=config)
18 | tensorflow_backend.set_session(session)
19 |
20 | def predict(model, input_lr_patch_file, out_dir, scale = 2.0, ch = 1):
21 | basename = os.path.basename(input_lr_patch_file)
22 | filename, ext = os.path.splitext(basename)
23 |
24 | img = Image.open(input_lr_patch_file)
25 | img = img.convert('YCbCr')
26 |
27 | y_ary = np.asarray(img)[:,:,0]
28 | y_ary = np.uint8(y_ary)/255.
29 | y_ary = np.reshape(y_ary, (1, y_ary.shape[0],y_ary.shape[1], ch))
30 | dump_intermediate_layer('conv2d_1', model, y_ary, out_dir)
31 | dump_intermediate_layer('conv2d_2', model, y_ary, out_dir)
32 | dump_intermediate_layer('conv2d_3', model, y_ary, out_dir)
33 | """
34 | res = res*255.
35 | res = np.clip(res, 0, 255) #important
36 | res = np.uint8(res)
37 | res = res.reshape(res.shape[1],res.shape[2],res.shape[3])
38 | """
39 |
40 | def dump_intermediate_layer(layer_name, model, patch, out_dir):
41 | dump_model = Model(
42 | inputs=model.input,
43 | outputs=model.get_layer(layer_name).output)
44 | dump = dump_model.predict(patch, batch_size=1)
45 | num_ch = dump.shape[3]
46 |
47 | grid_layout = [int(math.ceil(num_ch/10.)), 10]
48 | grid_size = [grid_layout[0] * patch_size, grid_layout[1] * patch_size]
49 | dist_img = Image.new('L', grid_size)
50 | print grid_layout
51 |
52 | for ch in range(0, num_ch):
53 | res = dump[0,:,:,ch]
54 |
55 | w1 = res
56 | baseline = abs(min(w1.flatten().tolist()))
57 | w1 = w1 + baseline
58 | topval = max(w1.flatten().tolist())
59 | w1 = w1 / topval * 255
60 | w1 = np.uint8(w1)
61 | res = w1
62 |
63 | #res = res*255.
64 | #res = np.clip(res, 0, 255) #important
65 | #res = np.uint8(res)
66 | img = Image.fromarray(res)
67 | pos = (int(ch/grid_layout[1])*patch_size, int(ch%grid_layout[1])*patch_size)
68 | dist_img.paste(img, pos)
69 |
70 | path = '%s/%s_grid.png' % (out_dir, layer_name)
71 | print path
72 | dist_img.save(path)
73 |
74 | def save_as_img(prefix, out_dir, patch):
75 | patch = np.reshape(patch, (patch.shape[0],patch.shape[1]))
76 | img = Image.fromarray(patch)
77 | path = '%s/%s_patch.png' % (out_dir,prefix)
78 | img.save(path)
79 |
80 | if __name__ == "__main__":
81 | import argparse
82 | parser = argparse.ArgumentParser()
83 | parser.add_argument("model", help="model file path")
84 | parser.add_argument("input", help="row res patch image path")
85 | parser.add_argument("out_dir", help="output dir")
86 | parser.add_argument("-scale", type=int, default=2)
87 | args = parser.parse_args()
88 | #print(args)
89 |
90 | if not os.path.exists(args.out_dir):
91 | os.makedirs(args.out_dir)
92 |
93 | setup_session()
94 | model = load_model(args.model)
95 |
96 | predict(model, args.input, args.out_dir, args.scale)
97 | #print('fin')
98 |
--------------------------------------------------------------------------------
/script/dump_weight.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential, Model
2 | from keras.layers import Conv2D, Input, BatchNormalization
3 | from keras.callbacks import ModelCheckpoint
4 | from keras.optimizers import SGD, Adam
5 | import numpy as np
6 | import math
7 | import os
8 | from keras.models import load_model
9 | from PIL import Image
10 |
11 | def setup_session():
12 | import tensorflow as tf
13 | from keras.backend import tensorflow_backend
14 | config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
15 | session = tf.Session(config=config)
16 | tensorflow_backend.set_session(session)
17 |
18 | def save_as_img(prefix, out_dir, patch):
19 | patch = np.reshape(patch, (patch.shape[0],patch.shape[1]))
20 | img = Image.fromarray(patch)
21 | path = '%s/%s_patch.png' % (out_dir,prefix)
22 | img.save(path)
23 |
24 | def dump_weight(model, out_dir):
25 | weights = model.get_layer('conv2d_1').get_weights()
26 | w = weights[0]
27 | ch = w.shape[3]
28 | patch_size = w.shape[0]
29 |
30 | grid_layout = [int(math.ceil(ch/10.)), 10]
31 | grid_size = [grid_layout[0] * patch_size, grid_layout[1] * patch_size]
32 | space = 5
33 | grid_size = [grid_size[0] + space * grid_layout[0], grid_size[1] + space * grid_layout[1]]
34 | dist_img = Image.new('L', grid_size)
35 | print grid_layout
36 |
37 | for c in range(0, ch):
38 | mask = w[:,:,0,c]
39 |
40 | print weights[1][c]
41 |
42 | w1 = mask
43 | baseline = abs(min(w1.flatten().tolist()))
44 | w1 = w1 + baseline
45 | topval = max(w1.flatten().tolist())
46 | w1 = w1 / topval * 255
47 | w1 = np.uint8(w1)
48 | mask = w1
49 |
50 | #save_as_img('weight-%d'%c, out_dir, mask)
51 | img = Image.fromarray(mask)
52 | x = int(c/grid_layout[1])
53 | y = int(c%grid_layout[1])
54 | pos = (x*patch_size + x*space, y*patch_size + y*space)
55 | #print(pos)
56 | dist_img.paste(img, pos)
57 |
58 | path = '%s/weights.png' % out_dir
59 | print path
60 | dist_img.save(path)
61 |
62 | if __name__ == "__main__":
63 | import argparse
64 | parser = argparse.ArgumentParser()
65 | parser.add_argument("model", help="model file path")
66 | parser.add_argument("out_dir", help="output dir")
67 | args = parser.parse_args()
68 | #print(args)
69 |
70 | if not os.path.exists(args.out_dir):
71 | os.makedirs(args.out_dir)
72 |
73 | model = load_model(args.model)
74 | dump_weight(model, args.out_dir)
75 |
76 |
--------------------------------------------------------------------------------
/script/fsrcnn/convert.py:
--------------------------------------------------------------------------------
1 | from os import listdir, makedirs
2 | from os.path import isfile, join, exists
3 |
4 | import argparse
5 |
6 | parser = argparse.ArgumentParser()
7 | parser.add_argument("input_dir", help="Data input directory")
8 | parser.add_argument("output_dir", help="Data output directory")
9 | parser.add_argument("-scale", type=int, default=2, help="Scale")
10 | args = parser.parse_args()
11 | print(args)
12 |
13 | import numpy as np
14 | from scipy import misc
15 | from PIL import Image
16 | from tqdm import tqdm
17 |
18 | scale = float(args.scale)
19 | patch_size = 100
20 | label_size = int(patch_size * scale)
21 | stride = patch_size
22 |
23 | if not exists(args.output_dir):
24 | makedirs(args.output_dir)
25 | if not exists(join(args.output_dir, "input")):
26 | makedirs(join(args.output_dir, "input"))
27 | if not exists(join(args.output_dir, "label")):
28 | makedirs(join(args.output_dir, "label"))
29 |
30 | count = 1
31 | dirs = listdir(args.input_dir)
32 |
33 | for i in tqdm(range(len(dirs))):
34 | f = dirs[i]
35 | f = join(args.input_dir, f)
36 | if not isfile(f):
37 | continue
38 |
39 | image = np.asarray(Image.open(f).convert('RGB'))
40 | #print(f, image.shape)
41 |
42 | h, w, c = image.shape
43 |
44 | scaled = misc.imresize(image, 1.0/scale, 'bicubic')
45 |
46 | for y in range(0, h - label_size + 1, stride):
47 | for x in range(0, w - label_size + 1, stride):
48 | (x_p, y_p) = (int(x/scale), int(y/scale))
49 | #print(y,x,y_p,x_p)
50 | sub_img_patch = scaled[y_p : y_p + patch_size, x_p : x_p + patch_size]
51 | sub_img_label = image[y : y + label_size, x : x + label_size]
52 | misc.imsave(join(args.output_dir, "input", str(count) + '.png'), sub_img_patch)
53 | misc.imsave(join(args.output_dir, "label", str(count) + '.png'), sub_img_label)
54 |
55 | count += 1
56 |
--------------------------------------------------------------------------------
/script/fsrcnn/pred_keras.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Conv2D, Input, BatchNormalization
3 | from keras.callbacks import ModelCheckpoint
4 | from keras.optimizers import SGD, Adam
5 | import numpy
6 | import math
7 | import os
8 | from keras.models import load_model
9 | from PIL import Image
10 |
11 | patch_size = 100
12 | input_size = 200
13 | label_size = 200
14 |
15 | def setup_session():
16 | import tensorflow as tf
17 | from keras.backend import tensorflow_backend
18 | config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
19 | session = tf.Session(config=config)
20 | tensorflow_backend.set_session(session)
21 |
22 | def predict2(model, input_file, out_dir, scale = 2.0):
23 | basename = os.path.basename(input_file)
24 | filename, ext = os.path.splitext(basename)
25 |
26 | img = Image.open(input_file)
27 | img = img.convert('RGB')
28 | img.save('%s/%s_1_org%s' % (out_dir, filename, ext))
29 |
30 | lr_size = tuple([int(x/scale) for x in img.size])
31 | lr_img = img.resize(lr_size, Image.BICUBIC)
32 | lr_img.resize(img.size, Image.BICUBIC).save('%s/%s_3_lr%s' % (out_dir, filename, ext))
33 |
34 | lr_img = numpy.asarray(lr_img)
35 | hr_img = numpy.zeros((img.size[1], img.size[0], 3)) # h<->w exchanged
36 | print(hr_img.shape)
37 | print(lr_img.shape)
38 |
39 | h,w,c = lr_img.shape
40 | eh = h if h % patch_size == 0 else h + patch_size - (h % patch_size) # 240 % 100=40, 240+100-
41 | ew = w if w % patch_size == 0 else w + patch_size - (w % patch_size)
42 | print('extend:',eh,ew,c)
43 | lr_base = numpy.zeros((eh, ew, 3), dtype='uint8')
44 | lr_base[0:h, 0:w] = lr_img
45 | lr_img = lr_base
46 | hr_img = numpy.zeros((int(eh * scale), int(ew * scale), 3)) # h<->w exchanged
47 |
48 | for y in range(0, h, patch_size):
49 | for x in range(0, w, patch_size):
50 | patch = lr_img[y:y+patch_size, x:x+patch_size]
51 | save_as_img('lr',out_dir, patch, y, x)
52 | patch = patch/255.
53 | patch = patch.reshape(1, patch.shape[0],patch.shape[1],patch.shape[2])
54 | res = model.predict(patch, batch_size=1)
55 | res = res*255.
56 | res = numpy.clip(res, 0, 255) #important
57 | res = numpy.uint8(res)
58 | res = res.reshape(res.shape[1],res.shape[2],res.shape[3])
59 | save_as_img('hr',out_dir, res, y, x)
60 | dy = int(y * scale)
61 | dh = dy + int(patch_size * scale)
62 | dx = int(x * scale)
63 | dw = dx + int(patch_size * scale)
64 | hr_img[dy:dh,dx:dw] = res
65 |
66 | hr_img = numpy.uint8(hr_img)
67 | hr_img = Image.fromarray(hr_img)
68 | hr_img = hr_img.convert('RGB')
69 | h,w,c = numpy.asarray(img).shape
70 | hr_img = numpy.asarray(hr_img)[0:h,0:w]
71 | hr_img = Image.fromarray(hr_img)
72 | hr_img.save('%s/%s_2_hr%s' % (out_dir, filename, ext))
73 |
74 | def save_as_img(prefix, out_dir, patch, y, x):
75 | img = Image.fromarray(patch)
76 | path = '%s/%s_patch_%d_%d.png' % (out_dir,prefix,y,x)
77 | img.save(path)
78 |
79 | if __name__ == "__main__":
80 | import argparse
81 | parser = argparse.ArgumentParser()
82 | parser.add_argument("model", help="model file path")
83 | parser.add_argument("input", help="row res image path")
84 | parser.add_argument("out_dir", help="output dir")
85 | args = parser.parse_args()
86 | print(args)
87 |
88 | if not os.path.exists(args.out_dir):
89 | os.makedirs(args.out_dir)
90 |
91 | setup_session()
92 | model = load_model(args.model)
93 |
94 | predict2(model, args.input, args.out_dir)
95 | print('fin')
96 |
--------------------------------------------------------------------------------
/script/fsrcnn/train.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Conv2D, Conv2DTranspose, Input, BatchNormalization, PReLU
3 | from keras.callbacks import ModelCheckpoint, Callback, TensorBoard
4 | from keras.optimizers import SGD, Adam
5 | import numpy as np
6 | import math
7 | import os
8 | import random
9 | from os import listdir, makedirs
10 | from os.path import isfile, join, exists
11 | from PIL import Image
12 |
13 | import os.path, sys
14 | sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
15 | from s3sync import S3SyncCallback
16 |
17 | def model(scale = 2):
18 | d = 56
19 | s = 12
20 | m = 4
21 | c = 3
22 | SRCNN = Sequential()
23 | SRCNN.add(Conv2D(nb_filter=d, nb_row=5, nb_col=5, init='glorot_uniform', border_mode='same', bias=True, input_shape=(100, 100, 3)))
24 | SRCNN.add(PReLU(shared_axes=[1, 2]))
25 | SRCNN.add(Conv2D(nb_filter=s, nb_row=1, nb_col=1, init='glorot_uniform', border_mode='same', bias=True))
26 | SRCNN.add(PReLU(shared_axes=[1, 2]))
27 | for i in range(m):
28 | SRCNN.add(Conv2D(nb_filter=s, nb_row=3, nb_col=3, init='glorot_uniform', border_mode='same', bias=True))
29 | SRCNN.add(PReLU(shared_axes=[1, 2]))
30 | SRCNN.add(Conv2D(nb_filter=d, nb_row=1, nb_col=1, init='glorot_uniform', border_mode='same', bias=True))
31 | SRCNN.add(PReLU(shared_axes=[1, 2]))
32 | SRCNN.add(Conv2DTranspose(filters=3, kernel_size=(9,9), strides=(scale, scale), init='glorot_uniform', border_mode='same', bias=True))
33 |
34 | adam = Adam(lr=0.0003)
35 | SRCNN.compile(optimizer=adam, loss='mean_squared_error', metrics=['mean_squared_error'])
36 | return SRCNN
37 |
38 | class MyDataGenerator(object):
39 |
40 | def flow_from_directory(self, input_dir, label_dir, batch_size=32):
41 | images = []
42 | labels = []
43 | while True:
44 | files = listdir(input_dir)
45 | random.shuffle(files)
46 | for f in files:
47 | images.append(self.load_image(input_dir, f))
48 | labels.append(self.load_image(label_dir, f))
49 | if len(images) == batch_size:
50 | x_inputs = np.asarray(images)
51 | x_labels = np.asarray(labels)
52 | images = []
53 | labels = []
54 | yield x_inputs, x_labels
55 |
56 | def load_image(self, src_dir, f):
57 | X = np.asarray(Image.open(join(src_dir, f)).convert('RGB'), dtype='float32')
58 | X /= 255.
59 | return X
60 |
61 | def train(log_dir, model_dir, train_dir, test_dir, eval_img, scale, epochs, steps):
62 | srcnn_model = model(scale)
63 | print(srcnn_model.summary())
64 |
65 | datagen = MyDataGenerator()
66 | train_gen = datagen.flow_from_directory(os.path.join(
67 | train_dir, 'input'),
68 | os.path.join(train_dir, 'label'),
69 | batch_size = 10)
70 |
71 | val_gen = datagen.flow_from_directory(
72 | os.path.join(test_dir, 'input'),
73 | os.path.join(test_dir, 'label'),
74 | batch_size = 10)
75 |
76 | class PredCallback(Callback):
77 | def on_epoch_end(self, epoch, logs=None):
78 | pass
79 | #pred.predict(self.model, eval_img, 'base-%d.png' % epoch, 'out-%d.png' % epoch, False)
80 |
81 | class PSNRCallback(Callback):
82 | def on_epoch_end(self, epoch, logs=None):
83 | loss = logs['loss'] * 255.
84 | val_loss = logs['val_loss'] * 255.
85 | psnr = 20 * math.log10(255. / math.sqrt(loss))
86 | val_psnr = 20 * math.log10(255. / math.sqrt(val_loss))
87 | print("\n")
88 | print("PSNR:%s" % psnr)
89 | print("PSNR(val):%s" % val_psnr)
90 |
91 | pd_cb = PredCallback()
92 | ps_cb = PSNRCallback()
93 | md_cb = ModelCheckpoint(os.path.join(model_dir,'check.h5'), monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='min', period=1)
94 | tb_cb = TensorBoard(log_dir=log_dir)
95 | s3_cb = S3SyncCallback(s3_base_url='s3://tryswift/super-resolution-kit/log', log_dir=log_dir)
96 |
97 | srcnn_model.fit_generator(
98 | generator = train_gen,
99 | steps_per_epoch = steps,
100 | validation_data = val_gen,
101 | validation_steps = steps,
102 | epochs = epochs,
103 | callbacks=[ps_cb, md_cb, tb_cb, s3_cb])
104 |
105 | srcnn_model.save(os.path.join(model_dir,'model.h5'))
106 |
107 | if __name__ == "__main__":
108 | import argparse
109 | parser = argparse.ArgumentParser()
110 | parser.add_argument("log_dir")
111 | parser.add_argument("model_dir")
112 | parser.add_argument("train_dir")
113 | parser.add_argument("test_dir")
114 | parser.add_argument("--eval_img")
115 | parser.add_argument("-scale", type=int, default=2)
116 | parser.add_argument("-epochs", type=int, default=100)
117 | parser.add_argument("-steps", type=int, default=100)
118 | args = parser.parse_args()
119 | print(args)
120 |
121 | if not exists(args.model_dir):
122 | makedirs(args.model_dir)
123 |
124 | train(args.log_dir, args.model_dir, args.train_dir, args.test_dir, args.eval_img, args.scale, args.epochs, args.steps)
125 |
--------------------------------------------------------------------------------
/script/packages.txt:
--------------------------------------------------------------------------------
1 | absl-py==0.1.10
2 | backports.weakref==1.0.post1
3 | bleach==1.5.0
4 | coremltools==0.8
5 | enum34==1.1.6
6 | funcsigs==1.0.2
7 | futures==3.2.0
8 | graphviz==0.8.2
9 | h5py==2.7.1
10 | html5lib==0.9999999
11 | Keras==2.1.3
12 | Markdown==2.6.11
13 | mock==2.0.0
14 | numpy==1.14.0
15 | pbr==4.1.0
16 | Pillow==5.0.0
17 | protobuf==3.5.1
18 | pydot==1.2.4
19 | pydot-ng==1.0.0
20 | pyparsing==2.2.0
21 | python-utils==2.3.0
22 | PyYAML==3.12
23 | scipy==1.0.0
24 | six==1.10.0
25 | tensorflow==1.5.0
26 | tensorflow-tensorboard==1.5.1
27 | tqdm==4.23.4
28 | Werkzeug==0.14.1
29 |
--------------------------------------------------------------------------------
/script/pdf2img/pdf2png.workflow/Contents/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleName
6 | pdf2png
7 |
8 |
9 |
--------------------------------------------------------------------------------
/script/pdf2img/pdf2png.workflow/Contents/QuickLook/Preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenmaz/SuperResolutionKit/18415c2e748c35cc48b6b7e4f7d05fda8372c1c7/script/pdf2img/pdf2png.workflow/Contents/QuickLook/Preview.png
--------------------------------------------------------------------------------
/script/plot.py:
--------------------------------------------------------------------------------
1 | from keras.models import load_model
2 | from keras.utils import plot_model
3 | import argparse
4 | import os
5 |
6 | parser = argparse.ArgumentParser()
7 | parser.add_argument("model", help="model file path")
8 | parser.add_argument("out_dir")
9 | args = parser.parse_args()
10 | print(args)
11 |
12 | model = load_model(args.model)
13 | plot_model(model, show_shapes=True, to_file=os.path.join(args.out_dir,'model.png'))
14 |
15 |
16 |
--------------------------------------------------------------------------------
/script/pred.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Conv2D, Input, BatchNormalization
3 | from keras.callbacks import ModelCheckpoint
4 | from keras.optimizers import SGD, Adam
5 | import numpy
6 | import math
7 | import os
8 | from keras.models import load_model
9 | import cv2
10 | from PIL import Image
11 |
12 | input_size = 200
13 | label_size = 200
14 |
15 | def setup_session():
16 | import tensorflow as tf
17 | from keras.backend import tensorflow_backend
18 | config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
19 | session = tf.Session(config=config)
20 | tensorflow_backend.set_session(session)
21 |
22 | def predict(srcnn_model, input_path, out_dir, use_coreml):
23 | img = cv2.imread(input_path, cv2.IMREAD_COLOR)
24 | dst = numpy.copy(img)
25 | h,w,c = img.shape
26 | blk = numpy.zeros((h,w,c))
27 | blk[0:h, 0:w, :] = img
28 | dst_base = numpy.copy(blk)
29 | for y in range(0, h, label_size):
30 | for x in range(0, w, label_size):
31 | part = blk[y:y+input_size,x:x+input_size]
32 | in_path = "out/pred_in/%d-%d.png" % (y,x)
33 | ba_path = "out/pred_base/%d-%d.png" % (y,x)
34 | ot_path = "out/pred_out/%d-%d.png" % (y,x)
35 | #cv2.imwrite(in_path, part)
36 |
37 | base = make_base(part, ba_path, ot_path)
38 | dst_base[y:y+input_size,x:x+input_size] = base
39 |
40 | out = exec_pred(srcnn_model, part, ba_path, ot_path, use_coreml)
41 | dst[y:y+label_size,x:x+label_size] = out
42 | base_path = os.path.join(out_dir, 'base.png')
43 | out_path = os.path.join(out_dir, 'out.png')
44 | print(base_path, out_path)
45 | cv2.imwrite(base_path, dst_base)
46 | cv2.imwrite(out_path, dst)
47 |
48 | def make_base(img, ba_path, ot_path):
49 | h,w,c = img.shape
50 | if h < input_size or w < input_size:
51 | return
52 | img = img.astype(numpy.uint8)
53 | shape = img.shape
54 | img = cv2.resize(img, (int(shape[1] / 2), int(shape[0] / 2)), cv2.INTER_CUBIC)
55 | img = cv2.resize(img, (shape[1], shape[0]), cv2.INTER_CUBIC)
56 | return img
57 |
58 | def exec_pred(srcnn_model, img, ba_path, ot_path, use_coreml):
59 | h,w,c = img.shape
60 | if h < input_size or w < input_size:
61 | return
62 | img = img.astype(numpy.uint8)
63 | shape = img.shape
64 | img = cv2.resize(img, (int(shape[1] / 2), int(shape[0] / 2)), cv2.INTER_CUBIC)
65 | img = cv2.resize(img, (shape[1], shape[0]), cv2.INTER_CUBIC)
66 | cv2.imwrite(ba_path, img)
67 |
68 | Y = numpy.zeros((1, img.shape[0], img.shape[1], c), dtype=float)
69 | Y[0] = img.astype(float) / 255.
70 | #print(Y.shape)
71 | pre = run_pred(srcnn_model, Y, use_coreml)
72 | #print('pred_shape', pre.shape)
73 | pre[pre[:] > 255] = 255
74 | pre[pre[:] < 0] = 0
75 | pre = pre.astype(numpy.uint8)
76 | #img[6: -6, 6: -6] = pre[0]
77 | #img = img[6:-6, 6:-6]
78 | #cv2.imwrite(ot_path, img)
79 | return pre
80 |
81 | def run_pred(model, Y, use_coreml):
82 | if use_coreml:
83 | _, h, w, c = Y.shape
84 | img = Y.reshape((h,w,c))
85 | img = Image.fromarray(img)
86 | x = {'image': img}
87 | res = model.predict(x)
88 | out = np.asarray(res['output1'] * 255., np.uint8)
89 | out = np.rollaxis(out, 0, 3)
90 | return out
91 | else:
92 | return model.predict(Y, batch_size=1) * 255.
93 |
94 | if __name__ == "__main__":
95 | import argparse
96 | parser = argparse.ArgumentParser()
97 | parser.add_argument("model", help="model file path")
98 | parser.add_argument("input", help="data dir")
99 | parser.add_argument("out_dir", help="output dir")
100 | parser.add_argument("-coreml", help="FIXME:use coreml model", action='store_true')
101 | args = parser.parse_args()
102 | print(args)
103 |
104 | if not os.path.exists(args.out_dir):
105 | os.makedirs(args.out_dir)
106 |
107 | if args.coreml:
108 | import coremltools
109 | model = coremltools.models.MLModel(args.model)
110 | print(model)
111 | else:
112 | setup_session()
113 | model = load_model(args.model)
114 |
115 | predict(model, args.input, args.out_dir, args.coreml)
116 | print('fin')
117 |
--------------------------------------------------------------------------------
/script/s3sync.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, tzinfo
2 | from keras.callbacks import Callback
3 | import datetime
4 | import os
5 |
6 | class JST(tzinfo):
7 | def utcoffset(self, dt):
8 | return timedelta(hours=9)
9 |
10 | def dst(self, dt):
11 | return timedelta(0)
12 |
13 | def tzname(self, dt):
14 | return 'JST'
15 |
16 | class S3SyncCallback(Callback):
17 |
18 | def __init__(self, s3_base_url, log_dir, interval_epochs = 10):
19 | target = datetime.datetime.now(tz=JST()).strftime('%Y%m%d_%H%M%S')
20 | self.s3_url = '%s/%s' % (s3_base_url, target)
21 | self.log_dir = log_dir
22 | self.interval_epochs = interval_epochs
23 | print('s3 sync: %s, interval %d' % (self.s3_url, self.interval_epochs))
24 |
25 | def on_epoch_end(self, epoch, logs=None):
26 | if epoch > 0 and epoch % self.interval_epochs == 0:
27 | print('s3 sync..')
28 | self.sync()
29 |
30 | def sync(self):
31 | cmd = "aws s3 sync %s %s" % (self.log_dir, self.s3_url)
32 | print cmd
33 | res = (os.system(cmd) == 0)
34 | print res
35 | return res
36 |
37 |
--------------------------------------------------------------------------------
/script/train.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Conv2D, Input, BatchNormalization
3 | from keras.callbacks import ModelCheckpoint, Callback, TensorBoard
4 | from keras.optimizers import SGD, Adam
5 | import numpy as np
6 | import math
7 | import os
8 | import random
9 | from os import listdir, makedirs
10 | from os.path import isfile, join, exists
11 | from PIL import Image
12 |
13 | def model():
14 | SRCNN = Sequential()
15 | SRCNN.add(Conv2D(nb_filter=128, nb_row=9, nb_col=9, init='glorot_uniform', activation='relu', border_mode='same', bias=True, input_shape=(200, 200, 3)))
16 | SRCNN.add(Conv2D(nb_filter=64, nb_row=3, nb_col=3, init='glorot_uniform', activation='relu', border_mode='same', bias=True))
17 | SRCNN.add(Conv2D(nb_filter=3, nb_row=5, nb_col=5, init='glorot_uniform', activation='linear', border_mode='same', bias=True))
18 | adam = Adam(lr=0.0003)
19 | SRCNN.compile(optimizer=adam, loss='mean_squared_error', metrics=['mean_squared_error'])
20 | return SRCNN
21 |
22 | class MyDataGenerator(object):
23 |
24 | def flow_from_directory(self, input_dir, label_dir, batch_size=32):
25 | images = []
26 | labels = []
27 | while True:
28 | files = listdir(input_dir)
29 | random.shuffle(files)
30 | for f in files:
31 | images.append(self.load_image(input_dir, f))
32 | labels.append(self.load_image(label_dir, f))
33 | if len(images) == batch_size:
34 | x_inputs = np.asarray(images)
35 | x_labels = np.asarray(labels)
36 | images = []
37 | labels = []
38 | yield x_inputs, x_labels
39 |
40 | def load_image(self, src_dir, f):
41 | X = np.asarray(Image.open(join(src_dir, f)).convert('RGB'), dtype='float32')
42 | X /= 255.
43 | return X
44 |
45 | def train(log_dir, model_dir, train_dir, test_dir, eval_img):
46 | srcnn_model = model()
47 | print(srcnn_model.summary())
48 |
49 | datagen = MyDataGenerator()
50 | train_gen = datagen.flow_from_directory(os.path.join(
51 | train_dir, 'input'),
52 | os.path.join(train_dir, 'label'),
53 | batch_size = 10)
54 |
55 | val_gen = datagen.flow_from_directory(
56 | os.path.join(test_dir, 'input'),
57 | os.path.join(test_dir, 'label'),
58 | batch_size = 10)
59 |
60 | class PredCallback(Callback):
61 | def on_epoch_end(self, epoch, logs=None):
62 | pass
63 | #pred.predict(self.model, eval_img, 'base-%d.png' % epoch, 'out-%d.png' % epoch, False)
64 |
65 | class PSNRCallback(Callback):
66 | def on_epoch_end(self, epoch, logs=None):
67 | loss = logs['loss'] * 255.
68 | val_loss = logs['val_loss'] * 255.
69 | psnr = 20 * math.log10(255. / math.sqrt(loss))
70 | val_psnr = 20 * math.log10(255. / math.sqrt(val_loss))
71 | print("\n")
72 | print("PSNR:%s" % psnr)
73 | print("PSNR(val):%s" % val_psnr)
74 |
75 | pd_cb = PredCallback()
76 | ps_cb = PSNRCallback()
77 | md_cb = ModelCheckpoint(os.path.join(model_dir,'check.h5'), monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='min', period=1)
78 | tb_cb = TensorBoard(log_dir=log_dir)
79 |
80 | srcnn_model.fit_generator(
81 | generator = train_gen,
82 | steps_per_epoch = 100,
83 | validation_data = val_gen,
84 | validation_steps = 100,
85 | epochs = 100,
86 | callbacks=[pd_cb, ps_cb, md_cb, tb_cb])
87 |
88 | srcnn_model.save(os.path.join(model_dir,'model.h5'))
89 |
90 | if __name__ == "__main__":
91 | import argparse
92 | parser = argparse.ArgumentParser()
93 | parser.add_argument("log_dir")
94 | parser.add_argument("model_dir")
95 | parser.add_argument("train_dir")
96 | parser.add_argument("test_dir")
97 | parser.add_argument("--eval_img")
98 | args = parser.parse_args()
99 | print(args)
100 |
101 | if not exists(args.model_dir):
102 | makedirs(args.model_dir)
103 |
104 | train(args.log_dir, args.model_dir, args.train_dir, args.test_dir, args.eval_img)
105 |
--------------------------------------------------------------------------------