├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── android
└── mlkit-automl
│ ├── .gitignore
│ ├── .idea
│ ├── codeStyles
│ │ ├── Project.xml
│ │ └── codeStyleConfig.xml
│ ├── encodings.xml
│ ├── inspectionProfiles
│ │ ├── ktlint.xml
│ │ └── profiles_settings.xml
│ ├── misc.xml
│ └── runConfigurations.xml
│ ├── app
│ ├── build.gradle
│ └── src
│ │ └── main
│ │ ├── AndroidManifest.xml
│ │ ├── assets
│ │ └── automl
│ │ │ ├── dict.txt
│ │ │ ├── manifest.json
│ │ │ └── model.tflite
│ │ ├── java
│ │ └── com
│ │ │ └── google
│ │ │ └── firebase
│ │ │ └── codelab
│ │ │ └── mlkit
│ │ │ └── automl
│ │ │ ├── BaseActivity.kt
│ │ │ ├── ImageClassifier.kt
│ │ │ └── StillImageActivity.kt
│ │ └── res
│ │ ├── drawable-anydpi
│ │ ├── ic_photo_camera.xml
│ │ └── ic_photo_library.xml
│ │ ├── drawable-hdpi
│ │ ├── dandelion_2817950_640.jpg
│ │ ├── dandelion_4110356_640.jpg
│ │ ├── rose_1463562_640.jpg
│ │ ├── rose_3063284_640.jpg
│ │ ├── sunflower_1627193_640.jpg
│ │ └── sunflower_3292932_640.jpg
│ │ ├── layout
│ │ └── activity_still_image.xml
│ │ ├── mipmap-hdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-mdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxxhdpi
│ │ └── ic_launcher.png
│ │ ├── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ │ └── xml
│ │ └── file_paths.xml
│ ├── build.gradle
│ ├── gradle.properties
│ ├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
│ ├── gradlew
│ ├── gradlew.bat
│ └── settings.gradle
├── flower_photos.zip
└── ios
└── mlkit-automl
├── .gitignore
├── Images.xcassets
├── AppIcon.appiconset
│ ├── 100.png
│ ├── 1024.png
│ ├── 114.png
│ ├── 120.png
│ ├── 144.png
│ ├── 152.png
│ ├── 167.png
│ ├── 180.png
│ ├── 20.png
│ ├── 29.png
│ ├── 40.png
│ ├── 50.png
│ ├── 57.png
│ ├── 58.png
│ ├── 60.png
│ ├── 72.png
│ ├── 76.png
│ ├── 80.png
│ ├── 87.png
│ └── Contents.json
├── Contents.json
├── LaunchImage.launchimage
│ └── Contents.json
├── Logo.imageset
│ ├── Contents.json
│ ├── logo1024-universal-341@1x.png
│ ├── logo1024-universal-341@2x.png
│ └── logo1024-universal-341@3x.png
├── ic_account_circle_36pt.imageset
│ ├── Contents.json
│ ├── ic_account_circle_36pt.png
│ ├── ic_account_circle_36pt_2x.png
│ └── ic_account_circle_36pt_3x.png
├── ic_more_vert_white.imageset
│ ├── Contents.json
│ ├── ic_more_vert_white.png
│ ├── ic_more_vert_white_2x.png
│ └── ic_more_vert_white_3x.png
├── ic_send.imageset
│ ├── Contents.json
│ ├── ic_send.png
│ ├── ic_send_2x.png
│ └── ic_send_3x.png
├── photo_camera.imageset
│ ├── Contents.json
│ ├── photo_camera_2x.png
│ └── photo_camera_3x.png
├── photo_library.imageset
│ ├── Contents.json
│ ├── photo_library_2x.png
│ └── photo_library_3x.png
├── switch_camera.imageset
│ ├── Contents.json
│ ├── switch_camera_2x.png
│ └── switch_camera_3x.png
└── video_camera.imageset
│ ├── Contents.json
│ ├── video_camera_2x.png
│ └── video_camera_3x.png
├── LaunchScreen.xib
├── MLVisionExample.xcodeproj
└── project.pbxproj
├── MLVisionExample
├── AppDelegate.swift
├── Base.lproj
│ └── Main.storyboard
├── CameraViewController.swift
├── ImageClassifier.swift
├── Info.plist
├── MLKitExtensions.swift
├── UIUtilities.swift
└── ViewController.swift
├── Podfile
├── Podfile.lock
└── Resources
├── automl
├── dict.txt
├── manifest.json
└── model.tflite
└── flowers
├── dandelion_2817950_640.jpg
├── dandelion_4110356_640.jpg
├── rose_1463562_640.jpg
├── rose_3063284_640.jpg
├── sunflower_1627193_640.jpg
└── sunflower_3292932_640.jpg
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | ## Community Guidelines
26 |
27 | This project follows [Google's Open Source Community
28 | Guidelines](https://opensource.google.com/conduct/).
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Codelabs for ML Kit for Firebase
2 | ============
3 |
4 | This repository contains the code for the AutoML Vision for ML Kit codelab:
5 |
6 | * [Train and deploy on-device image classification model with AutoML Vision Edge in ML Kit](https://codelabs.developers.google.com/codelabs/automl-vision-edge-in-mlkit)
7 |
8 | Introduction
9 | ------------
10 | In these codelabs, you will learn:
11 |
12 | * How to train an image classification model using AutoML Vision Edge in ML Kit.
13 | * How to run it in a sample Android or iOS app using the ML Kit SDK.
14 |
15 | Pre-requisites
16 | --------------
17 | None.
18 |
19 | Getting Started
20 | ---------------
21 | Visit the Google codelabs site to follow along the guided steps.
22 |
23 | Screenshots
24 | -----------
25 |
26 | Support
27 | -------
28 |
29 | - Stack Overflow: http://stackoverflow.com/questions/tagged/firebase-mlkit
30 |
31 | If you've found an error in this sample, please file an issue:
32 | https://github.com/googlecodelabs/automl-vision-edge-in-mlkit/issues
33 |
34 | Patches are encouraged, and may be submitted by forking this project and
35 | submitting a pull request through GitHub.
36 |
37 | License
38 | -------
39 |
40 | Copyright 2019 Google LLC
41 |
42 | Licensed under the Apache License, Version 2.0 (the "License");
43 | you may not use this file except in compliance with the License.
44 | You may obtain a copy of the License at
45 |
46 | https://www.apache.org/licenses/LICENSE-2.0
47 |
48 | Unless required by applicable law or agreed to in writing, software
49 | distributed under the License is distributed on an "AS IS" BASIS,
50 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
51 | See the License for the specific language governing permissions and
52 | limitations under the License.
53 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 | *.aab
5 |
6 | # Files for the ART/Dalvik VM
7 | *.dex
8 |
9 | # Java class files
10 | *.class
11 |
12 | # Generated files
13 | bin/
14 | gen/
15 | out/
16 |
17 | # Gradle files
18 | .gradle/
19 | build/
20 |
21 | # Local configuration file (sdk path, etc)
22 | local.properties
23 |
24 | # Proguard folder generated by Eclipse
25 | proguard/
26 |
27 | # Log Files
28 | *.log
29 |
30 | # Android Studio Navigation editor temp files
31 | .navigation/
32 |
33 | # Android Studio captures folder
34 | captures/
35 |
36 | # IntelliJ
37 | *.iml
38 | .idea/workspace.xml
39 | .idea/tasks.xml
40 | .idea/gradle.xml
41 | .idea/assetWizardSettings.xml
42 | .idea/dictionaries
43 | .idea/libraries
44 | .idea/caches
45 | # Android Studio 3 in .gitignore file.
46 | .idea/caches/build_file_checksums.ser
47 | .idea/modules.xml
48 |
49 | # Keystore files
50 | # Uncomment the following lines if you do not want to check your keystore files in.
51 | #*.jks
52 | #*.keystore
53 |
54 | # External native build folder generated in Android Studio 2.2 and later
55 | .externalNativeBuild
56 |
57 | # Google Services (e.g. APIs or Firebase)
58 | google-services.json
59 |
60 | # Freeline
61 | freeline.py
62 | freeline/
63 | freeline_project_description.json
64 |
65 | # fastlane
66 | fastlane/report.xml
67 | fastlane/Preview.html
68 | fastlane/screenshots
69 | fastlane/test_output
70 | fastlane/readme.md
71 |
72 | # Version control
73 | vcs.xml
74 |
75 | # lint
76 | lint/intermediates/
77 | lint/generated/
78 | lint/outputs/
79 | lint/tmp/
80 | # lint/reports/
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/inspectionProfiles/ktlint.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/android/mlkit-automl/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android-extensions'
3 | apply plugin: 'kotlin-android'
4 |
5 | android {
6 | compileSdkVersion 28
7 | buildToolsVersion '28.0.3'
8 |
9 | defaultConfig {
10 | applicationId "com.google.firebase.codelab.mlkit.automl"
11 | minSdkVersion 21
12 | targetSdkVersion 28
13 | versionCode 1
14 | versionName "1.0"
15 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
16 | }
17 |
18 | lintOptions {
19 | abortOnError false
20 | }
21 |
22 | buildTypes {
23 | release {
24 | minifyEnabled false
25 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
26 | }
27 | }
28 |
29 | aaptOptions {
30 | noCompress "tflite"
31 | }
32 |
33 | compileOptions {
34 | sourceCompatibility JavaVersion.VERSION_1_8
35 | targetCompatibility JavaVersion.VERSION_1_8
36 | }
37 |
38 | packagingOptions {
39 | exclude 'META-INF/atomicfu.kotlin_module'
40 | }
41 | }
42 |
43 | dependencies {
44 | implementation fileTree(dir: 'libs', include: ['*.jar'])
45 |
46 | implementation 'androidx.appcompat:appcompat:1.0.0'
47 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
48 | implementation 'com.google.android.material:material:1.0.0'
49 | implementation 'androidx.annotation:annotation:1.0.0'
50 | implementation 'androidx.legacy:legacy-support-v13:1.0.0'
51 |
52 | implementation 'com.google.firebase:firebase-ml-vision:20.0.0'
53 | implementation 'com.google.firebase:firebase-ml-vision-automl:16.0.0'
54 |
55 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
56 |
57 | implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:1.2.0"
58 | implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.2.0"
59 | }
60 |
61 | apply plugin: 'com.google.gms.google-services'
62 | apply plugin: 'kotlin-android-extensions'
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
17 |
18 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
34 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/assets/automl/dict.txt:
--------------------------------------------------------------------------------
1 | daisy
2 | dandelion
3 | roses
4 | sunflowers
5 | tulips
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/assets/automl/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "modelFile": "model.tflite",
3 | "labelsFile": "dict.txt",
4 | "modelType": "IMAGE_LABELING"
5 | }
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/assets/automl/model.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/assets/automl/model.tflite
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/java/com/google/firebase/codelab/mlkit/automl/BaseActivity.kt:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2019 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.google.firebase.codelab.mlkit.automl
18 |
19 | import android.content.pm.PackageManager
20 | import androidx.appcompat.app.AppCompatActivity
21 | import android.os.Bundle
22 | import android.util.Log
23 | import androidx.core.app.ActivityCompat
24 | import androidx.core.content.ContextCompat
25 |
26 | /** Base activity that requests all needed permission at launch */
27 | abstract class BaseActivity : AppCompatActivity(),
28 | ActivityCompat.OnRequestPermissionsResultCallback {
29 |
30 | private val requiredPermissions: Array by lazy {
31 | try {
32 | this.packageManager.getPackageInfo(
33 | this.packageName,
34 | PackageManager.GET_PERMISSIONS
35 | ).requestedPermissions ?: arrayOf()
36 | } catch (e: PackageManager.NameNotFoundException) {
37 | arrayOf()
38 | }
39 | }
40 |
41 | private fun allPermissionsGranted() = requiredPermissions.none { !isPermissionGranted(it) }
42 |
43 | private fun requestRuntimePermissions() {
44 | val allNeededPermissions = requiredPermissions.filter { !isPermissionGranted(it) }
45 |
46 | if (allNeededPermissions.isNotEmpty()) {
47 | ActivityCompat.requestPermissions(
48 | this,
49 | allNeededPermissions.toTypedArray(),
50 | PERMISSION_REQUESTS
51 | )
52 | }
53 | }
54 |
55 | private fun isPermissionGranted(permission: String): Boolean {
56 | when (ContextCompat.checkSelfPermission(this, permission)) {
57 | PackageManager.PERMISSION_GRANTED -> {
58 | Log.i(TAG, "Permission granted: $permission")
59 | return true
60 | }
61 | else -> {
62 | Log.i(TAG, "Permission NOT granted: $permission")
63 | return false
64 | }
65 | }
66 | }
67 |
68 | override fun onCreate(savedInstanceState: Bundle?) {
69 | super.onCreate(savedInstanceState)
70 |
71 | if (!allPermissionsGranted()) {
72 | requestRuntimePermissions()
73 | }
74 | }
75 |
76 | companion object {
77 |
78 | /** Tag for the [Log]. */
79 | private const val TAG = "BaseActivity"
80 |
81 | private const val PERMISSION_REQUESTS = 1
82 |
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/java/com/google/firebase/codelab/mlkit/automl/ImageClassifier.kt:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2019 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.google.firebase.codelab.mlkit.automl
18 |
19 | import android.content.Context
20 | import android.graphics.Bitmap
21 | import android.os.SystemClock
22 | import android.util.Log
23 | import android.widget.Toast
24 | import com.google.android.gms.tasks.Task
25 | import com.google.android.gms.tasks.TaskCompletionSource
26 | import com.google.firebase.ml.common.FirebaseMLException
27 | import com.google.firebase.ml.common.modeldownload.FirebaseLocalModel
28 | import com.google.firebase.ml.common.modeldownload.FirebaseModelManager
29 | import com.google.firebase.ml.common.modeldownload.FirebaseRemoteModel
30 | import com.google.firebase.ml.vision.FirebaseVision
31 | import com.google.firebase.ml.vision.common.FirebaseVisionImage
32 | import com.google.firebase.ml.vision.label.FirebaseVisionImageLabel
33 | import com.google.firebase.ml.vision.label.FirebaseVisionImageLabeler
34 | import com.google.firebase.ml.vision.label.FirebaseVisionOnDeviceAutoMLImageLabelerOptions
35 | import java.io.IOException
36 | import java.util.Locale
37 |
38 | /** Classifies images with ML Kit AutoML. */
39 | class ImageClassifier
40 | /** Initializes an `ImageClassifier`. */
41 | @Throws(FirebaseMLException::class)
42 | internal constructor(context: Context) {
43 |
44 | /** MLKit AutoML Image Classifier */
45 | private val labeler: FirebaseVisionImageLabeler?
46 | private var remoteModelDownloadSucceeded = false
47 |
48 | init {
49 | val remoteModel = FirebaseRemoteModel.Builder(REMOTE_MODEL_NAME).build()
50 | FirebaseModelManager.getInstance()
51 | .registerRemoteModel(remoteModel)
52 |
53 | FirebaseModelManager.getInstance()
54 | .registerLocalModel(
55 | FirebaseLocalModel.Builder(LOCAL_MODEL_NAME)
56 | .setAssetFilePath(LOCAL_MODEL_PATH)
57 | .build()
58 | )
59 |
60 | val options = FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder()
61 | .setConfidenceThreshold(CONFIDENCE_THRESHOLD)
62 | .setLocalModelName(LOCAL_MODEL_NAME)
63 | .setRemoteModelName(REMOTE_MODEL_NAME)
64 | .build()
65 |
66 | labeler = FirebaseVision.getInstance().getOnDeviceAutoMLImageLabeler(options)
67 |
68 | Toast.makeText(
69 | context,
70 | "Begin downloading the remote AutoML model.",
71 | Toast.LENGTH_SHORT
72 | ).show()
73 |
74 | // Track the remote model download progress.
75 | FirebaseModelManager.getInstance()
76 | .downloadRemoteModelIfNeeded(remoteModel)
77 | .addOnCompleteListener { task ->
78 | if (task.isSuccessful) {
79 | Toast.makeText(
80 | context,
81 | "Download remote AutoML model success.",
82 | Toast.LENGTH_SHORT
83 | ).show()
84 | remoteModelDownloadSucceeded = true
85 | } else {
86 | val downloadingError = "Error downloading remote model."
87 | Log.e(TAG, downloadingError, task.exception)
88 | Toast.makeText(context, downloadingError, Toast.LENGTH_SHORT).show()
89 | }
90 | }
91 |
92 | Log.d(TAG, "Created a Firebase ML Kit AutoML Image Labeler.")
93 | }
94 |
95 | /** Classifies a frame from the preview stream. */
96 | internal fun classifyFrame(bitmap: Bitmap): Task {
97 | if (labeler == null) {
98 | Log.e(TAG, "Image classifier has not been initialized; Skipped.")
99 | val e = IllegalStateException("Uninitialized Classifier.")
100 |
101 | val completionSource = TaskCompletionSource()
102 | completionSource.setException(e)
103 | return completionSource.task
104 | }
105 |
106 | val startTime = SystemClock.uptimeMillis()
107 | val image = FirebaseVisionImage.fromBitmap(bitmap)
108 |
109 | return labeler.processImage(image).continueWith { task ->
110 | val endTime = SystemClock.uptimeMillis()
111 | Log.d(TAG, "Time to run model inference: " + java.lang.Long.toString(endTime - startTime))
112 |
113 | val labelProbList = task.result
114 |
115 | // Indicate whether the remote or local model is used.
116 | // Note: in most common cases, once a remote model is downloaded it will be used. However, in
117 | // very rare cases, the model itself might not be valid, and thus the local model is used. In
118 | // addition, since model download failures can be transient, and model download can also be
119 | // triggered in the background during inference, it is possible that a remote model is used
120 | // even if the first download fails.
121 | var textToShow = "Source: " +
122 | (if (this.remoteModelDownloadSucceeded) "Remote" else "Local") +
123 | " model\n"
124 | textToShow += "Latency: " + java.lang.Long.toString(endTime - startTime) + "ms\n"
125 | textToShow += if (labelProbList.isNullOrEmpty())
126 | "No Result"
127 | else
128 | printTopKLabels(labelProbList)
129 |
130 | // print the results
131 | textToShow
132 | }
133 | }
134 |
135 | /** Closes labeler to release resources. */
136 | internal fun close() {
137 | try {
138 | labeler?.close()
139 | } catch (e: IOException) {
140 | Log.e(TAG, "Unable to close the labeler instance", e)
141 | }
142 |
143 | }
144 |
145 | /** Prints top-K labels, to be shown in UI as the results. */
146 | private val printTopKLabels: (List) -> String = {
147 | it.joinToString(
148 | separator = "\n",
149 | limit = RESULTS_TO_SHOW
150 | ) { label ->
151 | String.format(Locale.getDefault(), "Label: %s, Confidence: %4.2f", label.text, label.confidence)
152 | }
153 | }
154 |
155 | companion object {
156 |
157 | /** Tag for the [Log]. */
158 | private const val TAG = "MLKitAutoMLCodelab"
159 |
160 | /** Name of the local model file stored in Assets. */
161 | private const val LOCAL_MODEL_NAME = "automl_image_labeling_model"
162 |
163 | /** Path of local model file stored in Assets. */
164 | private const val LOCAL_MODEL_PATH = "automl/manifest.json"
165 |
166 | /** Name of the remote model in Firebase ML Kit server. */
167 | private const val REMOTE_MODEL_NAME = "mlkit_flowers"
168 |
169 | /** Number of results to show in the UI. */
170 | private const val RESULTS_TO_SHOW = 3
171 |
172 | /** Min probability to classify the given image as belong to a category. */
173 | private const val CONFIDENCE_THRESHOLD = 0.6f
174 | }
175 | }
176 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/java/com/google/firebase/codelab/mlkit/automl/StillImageActivity.kt:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2019 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.google.firebase.codelab.mlkit.automl
18 |
19 | import android.app.Activity
20 | import android.content.Intent
21 | import android.graphics.Bitmap
22 | import android.graphics.BitmapFactory
23 | import android.net.Uri
24 | import android.os.Bundle
25 | import android.provider.MediaStore
26 | import android.util.Log
27 | import android.widget.Button
28 | import android.widget.ImageButton
29 | import android.widget.ImageView
30 | import android.widget.TextView
31 | import androidx.appcompat.app.AppCompatActivity
32 | import androidx.core.content.FileProvider
33 | import com.google.firebase.ml.common.FirebaseMLException
34 | import com.google.firebase.ml.vision.common.FirebaseVisionImage
35 | import java.io.File
36 | import java.io.IOException
37 | import java.text.SimpleDateFormat
38 | import java.util.Date
39 |
40 | class StillImageActivity : BaseActivity() {
41 |
42 | private var currentPhotoFile: File? = null
43 | private var imagePreview: ImageView? = null
44 | private var textView: TextView? = null
45 |
46 | private var classifier: ImageClassifier? = null
47 | private var currentImageIndex = 0
48 | private var bundledImageList: Array? = null
49 |
50 | override fun onCreate(savedInstanceState: Bundle?) {
51 | super.onCreate(savedInstanceState)
52 |
53 | setContentView(R.layout.activity_still_image)
54 | imagePreview = findViewById(R.id.image_preview)
55 | textView = findViewById(R.id.result_text)
56 | findViewById(R.id.photo_camera_button)?.setOnClickListener { takePhoto() }
57 | findViewById(R.id.photo_library_button)?.setOnClickListener { chooseFromLibrary() }
58 | findViewById(R.id.next_image_button)?.setOnClickListener { clickNextImage() }
59 |
60 | // Get list of bundled images.
61 | bundledImageList = resources.getStringArray(R.array.image_name_array)
62 |
63 | // Setup image classifier.
64 | try {
65 | classifier = ImageClassifier(this)
66 | } catch (e: FirebaseMLException) {
67 | textView?.text = getString(R.string.fail_to_initialize_img_classifier)
68 | }
69 |
70 | // Classify the first image in the bundled list.
71 | classifyBundledImage(currentImageIndex)
72 | }
73 |
74 | override fun onDestroy() {
75 | classifier?.close()
76 | super.onDestroy()
77 | }
78 |
79 | /** Create a file to pass to camera app */
80 | @Throws(IOException::class)
81 | private fun createImageFile(): File {
82 | // Create an image file name
83 | val timeStamp: String = SimpleDateFormat("yyyyMMdd_HHmmss").format(Date())
84 | val storageDir = cacheDir
85 | return createTempFile(
86 | "JPEG_${timeStamp}_", /* prefix */
87 | ".jpg", /* suffix */
88 | storageDir /* directory */
89 | ).apply {
90 | // Save a file: path for use with ACTION_VIEW intents.
91 | currentPhotoFile = this
92 | }
93 | }
94 |
95 | override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
96 | if (resultCode != Activity.RESULT_OK) return
97 |
98 | when (requestCode) {
99 | // Make use of FirebaseVisionImage.fromFilePath to take into account
100 | // Exif Orientation of the image files.
101 | REQUEST_IMAGE_CAPTURE -> {
102 | FirebaseVisionImage.fromFilePath(this, Uri.fromFile(currentPhotoFile)).also {
103 | classifyImage(it.bitmap)
104 | }
105 | }
106 | REQUEST_PHOTO_LIBRARY -> {
107 | val selectedImageUri = data?.data ?: return
108 | FirebaseVisionImage.fromFilePath(this, selectedImageUri).also {
109 | classifyImage(it.bitmap)
110 | }
111 | }
112 | }
113 | }
114 |
115 | /** Run image classification on the given [Bitmap] */
116 | private fun classifyImage(bitmap: Bitmap) {
117 | if (classifier == null) {
118 | textView?.text = getString(R.string.uninitialized_img_classifier_or_invalid_context)
119 | return
120 | }
121 |
122 | // Show image on screen.
123 | imagePreview?.setImageBitmap(bitmap)
124 |
125 | // Classify image.
126 | classifier?.classifyFrame(bitmap)?.
127 | addOnCompleteListener { task ->
128 | if (task.isSuccessful) {
129 | textView?.text = task.result
130 | } else {
131 | val e = task.exception
132 | Log.e(TAG, "Error classifying frame", e)
133 | textView?.text = e?.message
134 | }
135 | }
136 | }
137 |
138 | private fun chooseFromLibrary() {
139 | val intent = Intent(Intent.ACTION_PICK)
140 | intent.type = "image/*"
141 |
142 | // Only accept JPEG or PNG format.
143 | val mimeTypes = arrayOf("image/jpeg", "image/png")
144 | intent.putExtra(Intent.EXTRA_MIME_TYPES, mimeTypes)
145 |
146 | startActivityForResult(intent, REQUEST_PHOTO_LIBRARY)
147 | }
148 |
149 | private fun takePhoto() {
150 | Intent(MediaStore.ACTION_IMAGE_CAPTURE).also { takePictureIntent ->
151 | // Ensure that there's a camera activity to handle the intent.
152 | takePictureIntent.resolveActivity(packageManager)?.also {
153 | // Create the File where the photo should go.
154 | val photoFile: File? = try {
155 | createImageFile()
156 | } catch (e: IOException) {
157 | // Error occurred while creating the File.
158 | Log.e(TAG, "Unable to save image to run classification.", e)
159 | null
160 | }
161 | // Continue only if the File was successfully created.
162 | photoFile?.also {
163 | val photoURI: Uri = FileProvider.getUriForFile(
164 | this,
165 | "com.google.firebase.codelab.mlkit.automl.fileprovider",
166 | it
167 | )
168 | takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI)
169 | startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE)
170 | }
171 | }
172 | }
173 | }
174 |
175 | private fun clickNextImage() {
176 | val imageList = bundledImageList
177 | if (imageList.isNullOrEmpty()) { return }
178 |
179 | currentImageIndex = (currentImageIndex + 1) % imageList.size
180 | classifyBundledImage(currentImageIndex)
181 | }
182 |
183 | private fun classifyBundledImage(index: Int) {
184 | val imageList = bundledImageList
185 | if (imageList.isNullOrEmpty()) { return }
186 |
187 | val imageName = imageList[index]
188 | val drawableId = resources.getIdentifier(imageName, "drawable", packageName)
189 | val bitmap = BitmapFactory.decodeResource(resources, drawableId)
190 |
191 | classifyImage(bitmap)
192 | }
193 |
194 | companion object {
195 |
196 | /** Tag for the [Log]. */
197 | private const val TAG = "StillImageActivity"
198 |
199 | /** Request code for starting photo capture activity */
200 | private const val REQUEST_IMAGE_CAPTURE = 1
201 |
202 | /** Request code for starting photo library activity */
203 | private const val REQUEST_PHOTO_LIBRARY = 2
204 |
205 | }
206 | }
207 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-anydpi/ic_photo_camera.xml:
--------------------------------------------------------------------------------
1 |
7 |
10 |
13 |
14 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-anydpi/ic_photo_library.xml:
--------------------------------------------------------------------------------
1 |
7 |
10 |
11 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/dandelion_2817950_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/dandelion_2817950_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/dandelion_4110356_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/dandelion_4110356_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/rose_1463562_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/rose_1463562_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/rose_3063284_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/rose_3063284_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/sunflower_1627193_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/sunflower_1627193_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/drawable-hdpi/sunflower_3292932_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/drawable-hdpi/sunflower_3292932_640.jpg
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/layout/activity_still_image.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
14 |
15 |
23 |
24 |
32 |
33 |
34 |
35 |
41 |
42 |
49 |
50 |
56 |
57 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | #cc4285f4
19 | #e9e9e9
20 |
21 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 20sp
4 |
5 |
6 | 16dp
7 | 16dp
8 | 40dp
9 | 112dp
10 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 | AutoML Codelab
18 |
19 | Next Image
20 |
21 | Failed to initialize an image classifier.
22 | Uninitialized Classifier or invalid context.
23 |
24 |
25 | - sunflower_1627193_640
26 | - sunflower_3292932_640
27 | - dandelion_2817950_640
28 | - dandelion_4110356_640
29 | - rose_1463562_640
30 | - rose_3063284_640
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/android/mlkit-automl/app/src/main/res/xml/file_paths.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/android/mlkit-automl/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | ext.kotlin_version = '1.3.21'
5 |
6 | repositories {
7 | jcenter()
8 | google()
9 | }
10 |
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:3.4.0'
13 | classpath 'com.google.gms:google-services:4.2.0'
14 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
15 |
16 | // NOTE: Do not place your application dependencies here; they belong
17 | // in the individual module build.gradle files
18 | }
19 | }
20 |
21 | allprojects {
22 | repositories {
23 | mavenLocal()
24 | google()
25 | jcenter()
26 | }
27 | }
28 |
29 | task clean(type: Delete) {
30 | delete rootProject.buildDir
31 | }
32 |
--------------------------------------------------------------------------------
/android/mlkit-automl/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | android.enableJetifier=true
13 | android.useAndroidX=true
14 | org.gradle.jvmargs=-Xmx1536m
15 |
16 | # When configured, Gradle will run in incubating parallel mode.
17 | # This option should only be used with decoupled projects. More details, visit
18 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
19 | # org.gradle.parallel=true
20 |
--------------------------------------------------------------------------------
/android/mlkit-automl/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/android/mlkit-automl/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/android/mlkit-automl/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue Apr 23 17:29:53 JST 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/android/mlkit-automl/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/android/mlkit-automl/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/android/mlkit-automl/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/flower_photos.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/flower_photos.zip
--------------------------------------------------------------------------------
/ios/mlkit-automl/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 | xcschemes/
20 |
21 | ## Other
22 | *.moved-aside
23 | *.xccheckout
24 | *.xcscmblueprint
25 |
26 | ## Obj-C/Swift specific
27 | *.hmap
28 | *.ipa
29 | *.dSYM.zip
30 | *.dSYM
31 |
32 | ## Playgrounds
33 | timeline.xctimeline
34 | playground.xcworkspace
35 |
36 | # Swift Package Manager
37 | #
38 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
39 | # Packages/
40 | # Package.pins
41 | # Package.resolved
42 | .build/
43 |
44 | # CocoaPods
45 | #
46 | # We recommend against adding the Pods directory to your .gitignore. However
47 | # you should judge for yourself, the pros and cons are mentioned at:
48 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
49 | #
50 | Pods/
51 | #
52 | # Add this line if you want to avoid checking in source code from the Xcode workspace
53 | *.xcworkspace
54 |
55 | # Carthage
56 | #
57 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
58 | # Carthage/Checkouts
59 |
60 | Carthage/Build
61 |
62 | # fastlane
63 | #
64 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
65 | # screenshots whenever they are needed.
66 | # For more information about the recommended setup visit:
67 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
68 |
69 | fastlane/report.xml
70 | fastlane/Preview.html
71 | fastlane/screenshots/**/*.png
72 | fastlane/test_output
73 |
74 | # Code Injection
75 | #
76 | # After new code Injection tools there's a generated folder /iOSInjectionProject
77 | # https://github.com/johnno1962/injectionforxcode
78 |
79 | iOSInjectionProject/
80 |
81 | # Firebase
82 | GoogleService-Info.plist
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/100.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/1024.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/114.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/114.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/120.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/120.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/144.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/152.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/152.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/167.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/167.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/180.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/180.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/20.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/20.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/29.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/29.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/40.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/50.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/50.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/57.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/57.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/58.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/58.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/60.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/60.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/72.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/76.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/80.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/80.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/87.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/87.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "size" : "20x20",
5 | "idiom" : "iphone",
6 | "filename" : "40.png",
7 | "scale" : "2x"
8 | },
9 | {
10 | "size" : "20x20",
11 | "idiom" : "iphone",
12 | "filename" : "60.png",
13 | "scale" : "3x"
14 | },
15 | {
16 | "size" : "29x29",
17 | "idiom" : "iphone",
18 | "filename" : "29.png",
19 | "scale" : "1x"
20 | },
21 | {
22 | "size" : "29x29",
23 | "idiom" : "iphone",
24 | "filename" : "58.png",
25 | "scale" : "2x"
26 | },
27 | {
28 | "size" : "29x29",
29 | "idiom" : "iphone",
30 | "filename" : "87.png",
31 | "scale" : "3x"
32 | },
33 | {
34 | "size" : "40x40",
35 | "idiom" : "iphone",
36 | "filename" : "80.png",
37 | "scale" : "2x"
38 | },
39 | {
40 | "size" : "40x40",
41 | "idiom" : "iphone",
42 | "filename" : "120.png",
43 | "scale" : "3x"
44 | },
45 | {
46 | "size" : "57x57",
47 | "idiom" : "iphone",
48 | "filename" : "57.png",
49 | "scale" : "1x"
50 | },
51 | {
52 | "size" : "57x57",
53 | "idiom" : "iphone",
54 | "filename" : "114.png",
55 | "scale" : "2x"
56 | },
57 | {
58 | "size" : "60x60",
59 | "idiom" : "iphone",
60 | "filename" : "120.png",
61 | "scale" : "2x"
62 | },
63 | {
64 | "size" : "60x60",
65 | "idiom" : "iphone",
66 | "filename" : "180.png",
67 | "scale" : "3x"
68 | },
69 | {
70 | "size" : "20x20",
71 | "idiom" : "ipad",
72 | "filename" : "20.png",
73 | "scale" : "1x"
74 | },
75 | {
76 | "size" : "20x20",
77 | "idiom" : "ipad",
78 | "filename" : "40.png",
79 | "scale" : "2x"
80 | },
81 | {
82 | "size" : "29x29",
83 | "idiom" : "ipad",
84 | "filename" : "29.png",
85 | "scale" : "1x"
86 | },
87 | {
88 | "size" : "29x29",
89 | "idiom" : "ipad",
90 | "filename" : "58.png",
91 | "scale" : "2x"
92 | },
93 | {
94 | "size" : "40x40",
95 | "idiom" : "ipad",
96 | "filename" : "40.png",
97 | "scale" : "1x"
98 | },
99 | {
100 | "size" : "40x40",
101 | "idiom" : "ipad",
102 | "filename" : "80.png",
103 | "scale" : "2x"
104 | },
105 | {
106 | "size" : "50x50",
107 | "idiom" : "ipad",
108 | "filename" : "50.png",
109 | "scale" : "1x"
110 | },
111 | {
112 | "size" : "50x50",
113 | "idiom" : "ipad",
114 | "filename" : "100.png",
115 | "scale" : "2x"
116 | },
117 | {
118 | "size" : "72x72",
119 | "idiom" : "ipad",
120 | "filename" : "72.png",
121 | "scale" : "1x"
122 | },
123 | {
124 | "size" : "72x72",
125 | "idiom" : "ipad",
126 | "filename" : "144.png",
127 | "scale" : "2x"
128 | },
129 | {
130 | "size" : "76x76",
131 | "idiom" : "ipad",
132 | "filename" : "76.png",
133 | "scale" : "1x"
134 | },
135 | {
136 | "size" : "76x76",
137 | "idiom" : "ipad",
138 | "filename" : "152.png",
139 | "scale" : "2x"
140 | },
141 | {
142 | "size" : "83.5x83.5",
143 | "idiom" : "ipad",
144 | "filename" : "167.png",
145 | "scale" : "2x"
146 | },
147 | {
148 | "size" : "1024x1024",
149 | "idiom" : "ios-marketing",
150 | "filename" : "1024.png",
151 | "scale" : "1x"
152 | }
153 | ],
154 | "info" : {
155 | "version" : 1,
156 | "author" : "xcode"
157 | }
158 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/LaunchImage.launchimage/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "orientation" : "portrait",
5 | "idiom" : "iphone",
6 | "extent" : "full-screen",
7 | "minimum-system-version" : "8.0",
8 | "subtype" : "736h",
9 | "scale" : "3x"
10 | },
11 | {
12 | "orientation" : "landscape",
13 | "idiom" : "iphone",
14 | "extent" : "full-screen",
15 | "minimum-system-version" : "8.0",
16 | "subtype" : "736h",
17 | "scale" : "3x"
18 | },
19 | {
20 | "orientation" : "portrait",
21 | "idiom" : "iphone",
22 | "extent" : "full-screen",
23 | "minimum-system-version" : "8.0",
24 | "subtype" : "667h",
25 | "scale" : "2x"
26 | },
27 | {
28 | "orientation" : "portrait",
29 | "idiom" : "iphone",
30 | "extent" : "full-screen",
31 | "minimum-system-version" : "7.0",
32 | "scale" : "2x"
33 | },
34 | {
35 | "orientation" : "portrait",
36 | "idiom" : "iphone",
37 | "extent" : "full-screen",
38 | "minimum-system-version" : "7.0",
39 | "subtype" : "retina4",
40 | "scale" : "2x"
41 | },
42 | {
43 | "orientation" : "portrait",
44 | "idiom" : "ipad",
45 | "extent" : "full-screen",
46 | "minimum-system-version" : "7.0",
47 | "scale" : "1x"
48 | },
49 | {
50 | "orientation" : "landscape",
51 | "idiom" : "ipad",
52 | "extent" : "full-screen",
53 | "minimum-system-version" : "7.0",
54 | "scale" : "1x"
55 | },
56 | {
57 | "orientation" : "portrait",
58 | "idiom" : "ipad",
59 | "extent" : "full-screen",
60 | "minimum-system-version" : "7.0",
61 | "scale" : "2x"
62 | },
63 | {
64 | "orientation" : "landscape",
65 | "idiom" : "ipad",
66 | "extent" : "full-screen",
67 | "minimum-system-version" : "7.0",
68 | "scale" : "2x"
69 | },
70 | {
71 | "orientation" : "portrait",
72 | "idiom" : "iphone",
73 | "extent" : "full-screen",
74 | "scale" : "1x"
75 | },
76 | {
77 | "orientation" : "portrait",
78 | "idiom" : "iphone",
79 | "extent" : "full-screen",
80 | "scale" : "2x"
81 | },
82 | {
83 | "orientation" : "portrait",
84 | "idiom" : "iphone",
85 | "extent" : "full-screen",
86 | "subtype" : "retina4",
87 | "scale" : "2x"
88 | },
89 | {
90 | "orientation" : "portrait",
91 | "idiom" : "ipad",
92 | "extent" : "to-status-bar",
93 | "scale" : "1x"
94 | },
95 | {
96 | "orientation" : "portrait",
97 | "idiom" : "ipad",
98 | "extent" : "full-screen",
99 | "scale" : "1x"
100 | },
101 | {
102 | "orientation" : "landscape",
103 | "idiom" : "ipad",
104 | "extent" : "to-status-bar",
105 | "scale" : "1x"
106 | },
107 | {
108 | "orientation" : "landscape",
109 | "idiom" : "ipad",
110 | "extent" : "full-screen",
111 | "scale" : "1x"
112 | },
113 | {
114 | "orientation" : "portrait",
115 | "idiom" : "ipad",
116 | "extent" : "to-status-bar",
117 | "scale" : "2x"
118 | },
119 | {
120 | "orientation" : "portrait",
121 | "idiom" : "ipad",
122 | "extent" : "full-screen",
123 | "scale" : "2x"
124 | },
125 | {
126 | "orientation" : "landscape",
127 | "idiom" : "ipad",
128 | "extent" : "to-status-bar",
129 | "scale" : "2x"
130 | },
131 | {
132 | "orientation" : "landscape",
133 | "idiom" : "ipad",
134 | "extent" : "full-screen",
135 | "scale" : "2x"
136 | }
137 | ],
138 | "info" : {
139 | "version" : 1,
140 | "author" : "xcode"
141 | }
142 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/Logo.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "logo1024-universal-341@1x.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "logo1024-universal-341@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "logo1024-universal-341@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@1x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/Logo.imageset/logo1024-universal-341@3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "ic_account_circle_36pt.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "ic_account_circle_36pt_2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "ic_account_circle_36pt_3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | },
23 | "properties" : {
24 | "template-rendering-intent" : "template"
25 | }
26 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_account_circle_36pt.imageset/ic_account_circle_36pt_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "ic_more_vert_white.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "ic_more_vert_white_2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "ic_more_vert_white_3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | },
23 | "properties" : {
24 | "template-rendering-intent" : "template"
25 | }
26 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_more_vert_white.imageset/ic_more_vert_white_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_send.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "ic_send.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "ic_send_2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "ic_send_3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | },
23 | "properties" : {
24 | "template-rendering-intent" : "template"
25 | }
26 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/ic_send.imageset/ic_send_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_camera.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "photo_camera_2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "filename" : "photo_camera_3x.png",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "version" : 1,
20 | "author" : "xcode"
21 | }
22 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_camera.imageset/photo_camera_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/photo_camera.imageset/photo_camera_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_camera.imageset/photo_camera_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/photo_camera.imageset/photo_camera_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_library.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "photo_library_2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "filename" : "photo_library_3x.png",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "version" : 1,
20 | "author" : "xcode"
21 | }
22 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_library.imageset/photo_library_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/photo_library.imageset/photo_library_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/photo_library.imageset/photo_library_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/photo_library.imageset/photo_library_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/switch_camera.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "switch_camera_2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "filename" : "switch_camera_3x.png",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "version" : 1,
20 | "author" : "xcode"
21 | }
22 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/switch_camera.imageset/switch_camera_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/switch_camera.imageset/switch_camera_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/switch_camera.imageset/switch_camera_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/switch_camera.imageset/switch_camera_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/video_camera.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "video_camera_2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "filename" : "video_camera_3x.png",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "version" : 1,
20 | "author" : "xcode"
21 | }
22 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/video_camera.imageset/video_camera_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/video_camera.imageset/video_camera_2x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/Images.xcassets/video_camera.imageset/video_camera_3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Images.xcassets/video_camera.imageset/video_camera_3x.png
--------------------------------------------------------------------------------
/ios/mlkit-automl/LaunchScreen.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 1001E6F720CEE0DD00445CB3 /* UIUtilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1001E6F620CEE0DD00445CB3 /* UIUtilities.swift */; };
11 | 1001E6F920CF021E00445CB3 /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1001E6F820CF021E00445CB3 /* CameraViewController.swift */; };
12 | 10224B582087E43100A77316 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 10224B572087E43100A77316 /* ViewController.swift */; };
13 | 10224B5E2087E49F00A77316 /* MLKitExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 10224B5B2087E49F00A77316 /* MLKitExtensions.swift */; };
14 | 1084896A2243DBC2004D37A8 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 108489652243DBC2004D37A8 /* LaunchScreen.xib */; };
15 | 55FB34094B5F048189396A40 /* Pods_MLVisionExample.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0A150F53C4F41C9D05AC3FD /* Pods_MLVisionExample.framework */; };
16 | 7FB1B7DB226721F900887A8A /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B7DA226721F900887A8A /* GoogleService-Info.plist */; };
17 | 7FB1B7DE2268311D00887A8A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B7DD2268311D00887A8A /* Images.xcassets */; };
18 | 7FB1B7E02268344B00887A8A /* ImageClassifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FB1B7DF2268344B00887A8A /* ImageClassifier.swift */; };
19 | 7FB1B7FF22688A5100887A8A /* automl in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B7FE22688A5100887A8A /* automl */; };
20 | 7FB1B80722688A6300887A8A /* dandelion_4110356_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80122688A6300887A8A /* dandelion_4110356_640.jpg */; };
21 | 7FB1B80822688A6300887A8A /* rose_3063284_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80222688A6300887A8A /* rose_3063284_640.jpg */; };
22 | 7FB1B80922688A6300887A8A /* rose_1463562_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80322688A6300887A8A /* rose_1463562_640.jpg */; };
23 | 7FB1B80A22688A6300887A8A /* sunflower_3292932_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80422688A6300887A8A /* sunflower_3292932_640.jpg */; };
24 | 7FB1B80B22688A6300887A8A /* sunflower_1627193_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80522688A6300887A8A /* sunflower_1627193_640.jpg */; };
25 | 7FB1B80C22688A6300887A8A /* dandelion_2817950_640.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 7FB1B80622688A6300887A8A /* dandelion_2817950_640.jpg */; };
26 | BB287B3920729CE90069707A /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BB287B3820729CE90069707A /* AppDelegate.swift */; };
27 | BB287B3E20729CE90069707A /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BB287B3C20729CE90069707A /* Main.storyboard */; };
28 | /* End PBXBuildFile section */
29 |
30 | /* Begin PBXFileReference section */
31 | 1001E6F620CEE0DD00445CB3 /* UIUtilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UIUtilities.swift; sourceTree = ""; };
32 | 1001E6F820CF021E00445CB3 /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; };
33 | 10224B572087E43100A77316 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
34 | 10224B5B2087E49F00A77316 /* MLKitExtensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MLKitExtensions.swift; sourceTree = ""; };
35 | 108489652243DBC2004D37A8 /* LaunchScreen.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = LaunchScreen.xib; sourceTree = ""; };
36 | 316D2FE96CBFD69012921885 /* Pods-MLVisionExampleObjc.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MLVisionExampleObjc.debug.xcconfig"; path = "Pods/Target Support Files/Pods-MLVisionExampleObjc/Pods-MLVisionExampleObjc.debug.xcconfig"; sourceTree = ""; };
37 | 7FB1B7DA226721F900887A8A /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; };
38 | 7FB1B7DD2268311D00887A8A /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; };
39 | 7FB1B7DF2268344B00887A8A /* ImageClassifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageClassifier.swift; sourceTree = ""; };
40 | 7FB1B7FE22688A5100887A8A /* automl */ = {isa = PBXFileReference; lastKnownFileType = folder; name = automl; path = Resources/automl; sourceTree = ""; };
41 | 7FB1B80122688A6300887A8A /* dandelion_4110356_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = dandelion_4110356_640.jpg; sourceTree = ""; };
42 | 7FB1B80222688A6300887A8A /* rose_3063284_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = rose_3063284_640.jpg; sourceTree = ""; };
43 | 7FB1B80322688A6300887A8A /* rose_1463562_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = rose_1463562_640.jpg; sourceTree = ""; };
44 | 7FB1B80422688A6300887A8A /* sunflower_3292932_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = sunflower_3292932_640.jpg; sourceTree = ""; };
45 | 7FB1B80522688A6300887A8A /* sunflower_1627193_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = sunflower_1627193_640.jpg; sourceTree = ""; };
46 | 7FB1B80622688A6300887A8A /* dandelion_2817950_640.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = dandelion_2817950_640.jpg; sourceTree = ""; };
47 | A9BAAA1028D4A7BF4FFB4AC4 /* Pods_MLVisionExampleObjc.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_MLVisionExampleObjc.framework; sourceTree = BUILT_PRODUCTS_DIR; };
48 | BB287B3520729CE90069707A /* MLVisionExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = MLVisionExample.app; sourceTree = BUILT_PRODUCTS_DIR; };
49 | BB287B3820729CE90069707A /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
50 | BB287B3D20729CE90069707A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
51 | BB287B4420729CE90069707A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
52 | D0A150F53C4F41C9D05AC3FD /* Pods_MLVisionExample.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_MLVisionExample.framework; sourceTree = BUILT_PRODUCTS_DIR; };
53 | D3C0DE3E5E6077DACBC1038C /* Pods-MLVisionExampleObjc.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MLVisionExampleObjc.release.xcconfig"; path = "Pods/Target Support Files/Pods-MLVisionExampleObjc/Pods-MLVisionExampleObjc.release.xcconfig"; sourceTree = ""; };
54 | EC96B2749DA03A4E771B4331 /* Pods-MLVisionExample.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MLVisionExample.release.xcconfig"; path = "Pods/Target Support Files/Pods-MLVisionExample/Pods-MLVisionExample.release.xcconfig"; sourceTree = ""; };
55 | FD59483180A934624B3A92C2 /* Pods-MLVisionExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MLVisionExample.debug.xcconfig"; path = "Pods/Target Support Files/Pods-MLVisionExample/Pods-MLVisionExample.debug.xcconfig"; sourceTree = ""; };
56 | /* End PBXFileReference section */
57 |
58 | /* Begin PBXFrameworksBuildPhase section */
59 | BB287B3220729CE90069707A /* Frameworks */ = {
60 | isa = PBXFrameworksBuildPhase;
61 | buildActionMask = 2147483647;
62 | files = (
63 | 55FB34094B5F048189396A40 /* Pods_MLVisionExample.framework in Frameworks */,
64 | );
65 | runOnlyForDeploymentPostprocessing = 0;
66 | };
67 | /* End PBXFrameworksBuildPhase section */
68 |
69 | /* Begin PBXGroup section */
70 | 10224B642087E52600A77316 /* Resources */ = {
71 | isa = PBXGroup;
72 | children = (
73 | 7FB1B7FE22688A5100887A8A /* automl */,
74 | 7FB1B7DA226721F900887A8A /* GoogleService-Info.plist */,
75 | 108489652243DBC2004D37A8 /* LaunchScreen.xib */,
76 | 7FB1B7DD2268311D00887A8A /* Images.xcassets */,
77 | 7FB1B80022688A6300887A8A /* flowers */,
78 | );
79 | name = Resources;
80 | sourceTree = "";
81 | };
82 | 446F9D8D143ACBBBD4B9BF24 /* Frameworks */ = {
83 | isa = PBXGroup;
84 | children = (
85 | D0A150F53C4F41C9D05AC3FD /* Pods_MLVisionExample.framework */,
86 | A9BAAA1028D4A7BF4FFB4AC4 /* Pods_MLVisionExampleObjc.framework */,
87 | );
88 | name = Frameworks;
89 | sourceTree = "";
90 | };
91 | 72F589E8F4E2E28DA03DDCD7 /* Pods */ = {
92 | isa = PBXGroup;
93 | children = (
94 | FD59483180A934624B3A92C2 /* Pods-MLVisionExample.debug.xcconfig */,
95 | EC96B2749DA03A4E771B4331 /* Pods-MLVisionExample.release.xcconfig */,
96 | 316D2FE96CBFD69012921885 /* Pods-MLVisionExampleObjc.debug.xcconfig */,
97 | D3C0DE3E5E6077DACBC1038C /* Pods-MLVisionExampleObjc.release.xcconfig */,
98 | );
99 | name = Pods;
100 | sourceTree = "";
101 | };
102 | 7FB1B80022688A6300887A8A /* flowers */ = {
103 | isa = PBXGroup;
104 | children = (
105 | 7FB1B80122688A6300887A8A /* dandelion_4110356_640.jpg */,
106 | 7FB1B80222688A6300887A8A /* rose_3063284_640.jpg */,
107 | 7FB1B80322688A6300887A8A /* rose_1463562_640.jpg */,
108 | 7FB1B80422688A6300887A8A /* sunflower_3292932_640.jpg */,
109 | 7FB1B80522688A6300887A8A /* sunflower_1627193_640.jpg */,
110 | 7FB1B80622688A6300887A8A /* dandelion_2817950_640.jpg */,
111 | );
112 | name = flowers;
113 | path = Resources/flowers;
114 | sourceTree = "";
115 | };
116 | BB287B2C20729CE90069707A = {
117 | isa = PBXGroup;
118 | children = (
119 | BB287B3720729CE90069707A /* MLVisionExample */,
120 | 10224B642087E52600A77316 /* Resources */,
121 | BB287B3620729CE90069707A /* Products */,
122 | 72F589E8F4E2E28DA03DDCD7 /* Pods */,
123 | 446F9D8D143ACBBBD4B9BF24 /* Frameworks */,
124 | );
125 | sourceTree = "";
126 | };
127 | BB287B3620729CE90069707A /* Products */ = {
128 | isa = PBXGroup;
129 | children = (
130 | BB287B3520729CE90069707A /* MLVisionExample.app */,
131 | );
132 | name = Products;
133 | sourceTree = "";
134 | };
135 | BB287B3720729CE90069707A /* MLVisionExample */ = {
136 | isa = PBXGroup;
137 | children = (
138 | BB287B3820729CE90069707A /* AppDelegate.swift */,
139 | 10224B572087E43100A77316 /* ViewController.swift */,
140 | 10224B5B2087E49F00A77316 /* MLKitExtensions.swift */,
141 | 1001E6F620CEE0DD00445CB3 /* UIUtilities.swift */,
142 | 1001E6F820CF021E00445CB3 /* CameraViewController.swift */,
143 | 7FB1B7DF2268344B00887A8A /* ImageClassifier.swift */,
144 | BB287B4A2072A0200069707A /* Supporting Files */,
145 | BB287B4420729CE90069707A /* Info.plist */,
146 | );
147 | path = MLVisionExample;
148 | sourceTree = "";
149 | };
150 | BB287B4A2072A0200069707A /* Supporting Files */ = {
151 | isa = PBXGroup;
152 | children = (
153 | BB287B3C20729CE90069707A /* Main.storyboard */,
154 | );
155 | name = "Supporting Files";
156 | sourceTree = "";
157 | };
158 | /* End PBXGroup section */
159 |
160 | /* Begin PBXNativeTarget section */
161 | BB287B3420729CE90069707A /* MLVisionExample */ = {
162 | isa = PBXNativeTarget;
163 | buildConfigurationList = BB287B4720729CE90069707A /* Build configuration list for PBXNativeTarget "MLVisionExample" */;
164 | buildPhases = (
165 | 6EEA900773BA2BF92881E2E3 /* [CP] Check Pods Manifest.lock */,
166 | BB287B3120729CE90069707A /* Sources */,
167 | BB287B3220729CE90069707A /* Frameworks */,
168 | BB287B3320729CE90069707A /* Resources */,
169 | 4EF1B425A8A6D802772066EA /* [CP] Embed Pods Frameworks */,
170 | );
171 | buildRules = (
172 | );
173 | dependencies = (
174 | );
175 | name = MLVisionExample;
176 | productName = "ml-kit-sampler";
177 | productReference = BB287B3520729CE90069707A /* MLVisionExample.app */;
178 | productType = "com.apple.product-type.application";
179 | };
180 | /* End PBXNativeTarget section */
181 |
182 | /* Begin PBXProject section */
183 | BB287B2D20729CE90069707A /* Project object */ = {
184 | isa = PBXProject;
185 | attributes = {
186 | LastSwiftUpdateCheck = 0930;
187 | LastUpgradeCheck = 0930;
188 | ORGANIZATIONNAME = "Google Inc.";
189 | TargetAttributes = {
190 | BB287B3420729CE90069707A = {
191 | CreatedOnToolsVersion = 9.2;
192 | LastSwiftMigration = 1020;
193 | ProvisioningStyle = Manual;
194 | };
195 | };
196 | };
197 | buildConfigurationList = BB287B3020729CE90069707A /* Build configuration list for PBXProject "MLVisionExample" */;
198 | compatibilityVersion = "Xcode 8.0";
199 | developmentRegion = en;
200 | hasScannedForEncodings = 0;
201 | knownRegions = (
202 | en,
203 | Base,
204 | );
205 | mainGroup = BB287B2C20729CE90069707A;
206 | productRefGroup = BB287B3620729CE90069707A /* Products */;
207 | projectDirPath = "";
208 | projectRoot = "";
209 | targets = (
210 | BB287B3420729CE90069707A /* MLVisionExample */,
211 | );
212 | };
213 | /* End PBXProject section */
214 |
215 | /* Begin PBXResourcesBuildPhase section */
216 | BB287B3320729CE90069707A /* Resources */ = {
217 | isa = PBXResourcesBuildPhase;
218 | buildActionMask = 2147483647;
219 | files = (
220 | 7FB1B80822688A6300887A8A /* rose_3063284_640.jpg in Resources */,
221 | 7FB1B7DB226721F900887A8A /* GoogleService-Info.plist in Resources */,
222 | 1084896A2243DBC2004D37A8 /* LaunchScreen.xib in Resources */,
223 | 7FB1B80A22688A6300887A8A /* sunflower_3292932_640.jpg in Resources */,
224 | 7FB1B80B22688A6300887A8A /* sunflower_1627193_640.jpg in Resources */,
225 | 7FB1B80722688A6300887A8A /* dandelion_4110356_640.jpg in Resources */,
226 | 7FB1B7DE2268311D00887A8A /* Images.xcassets in Resources */,
227 | 7FB1B7FF22688A5100887A8A /* automl in Resources */,
228 | BB287B3E20729CE90069707A /* Main.storyboard in Resources */,
229 | 7FB1B80922688A6300887A8A /* rose_1463562_640.jpg in Resources */,
230 | 7FB1B80C22688A6300887A8A /* dandelion_2817950_640.jpg in Resources */,
231 | );
232 | runOnlyForDeploymentPostprocessing = 0;
233 | };
234 | /* End PBXResourcesBuildPhase section */
235 |
236 | /* Begin PBXShellScriptBuildPhase section */
237 | 4EF1B425A8A6D802772066EA /* [CP] Embed Pods Frameworks */ = {
238 | isa = PBXShellScriptBuildPhase;
239 | buildActionMask = 2147483647;
240 | files = (
241 | );
242 | inputFileListPaths = (
243 | );
244 | inputPaths = (
245 | "${SRCROOT}/Pods/Target Support Files/Pods-MLVisionExample/Pods-MLVisionExample-frameworks.sh",
246 | "${BUILT_PRODUCTS_DIR}/GTMSessionFetcher/GTMSessionFetcher.framework",
247 | "${BUILT_PRODUCTS_DIR}/GoogleAPIClientForREST/GoogleAPIClientForREST.framework",
248 | "${BUILT_PRODUCTS_DIR}/GoogleToolboxForMac/GoogleToolboxForMac.framework",
249 | "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework",
250 | "${BUILT_PRODUCTS_DIR}/Protobuf/Protobuf.framework",
251 | );
252 | name = "[CP] Embed Pods Frameworks";
253 | outputFileListPaths = (
254 | );
255 | outputPaths = (
256 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/GTMSessionFetcher.framework",
257 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/GoogleAPIClientForREST.framework",
258 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/GoogleToolboxForMac.framework",
259 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/GoogleUtilities.framework",
260 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Protobuf.framework",
261 | );
262 | runOnlyForDeploymentPostprocessing = 0;
263 | shellPath = /bin/sh;
264 | shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-MLVisionExample/Pods-MLVisionExample-frameworks.sh\"\n";
265 | showEnvVarsInLog = 0;
266 | };
267 | 6EEA900773BA2BF92881E2E3 /* [CP] Check Pods Manifest.lock */ = {
268 | isa = PBXShellScriptBuildPhase;
269 | buildActionMask = 2147483647;
270 | files = (
271 | );
272 | inputFileListPaths = (
273 | );
274 | inputPaths = (
275 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
276 | "${PODS_ROOT}/Manifest.lock",
277 | );
278 | name = "[CP] Check Pods Manifest.lock";
279 | outputFileListPaths = (
280 | );
281 | outputPaths = (
282 | "$(DERIVED_FILE_DIR)/Pods-MLVisionExample-checkManifestLockResult.txt",
283 | );
284 | runOnlyForDeploymentPostprocessing = 0;
285 | shellPath = /bin/sh;
286 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
287 | showEnvVarsInLog = 0;
288 | };
289 | /* End PBXShellScriptBuildPhase section */
290 |
291 | /* Begin PBXSourcesBuildPhase section */
292 | BB287B3120729CE90069707A /* Sources */ = {
293 | isa = PBXSourcesBuildPhase;
294 | buildActionMask = 2147483647;
295 | files = (
296 | 10224B582087E43100A77316 /* ViewController.swift in Sources */,
297 | 1001E6F920CF021E00445CB3 /* CameraViewController.swift in Sources */,
298 | BB287B3920729CE90069707A /* AppDelegate.swift in Sources */,
299 | 10224B5E2087E49F00A77316 /* MLKitExtensions.swift in Sources */,
300 | 7FB1B7E02268344B00887A8A /* ImageClassifier.swift in Sources */,
301 | 1001E6F720CEE0DD00445CB3 /* UIUtilities.swift in Sources */,
302 | );
303 | runOnlyForDeploymentPostprocessing = 0;
304 | };
305 | /* End PBXSourcesBuildPhase section */
306 |
307 | /* Begin PBXVariantGroup section */
308 | BB287B3C20729CE90069707A /* Main.storyboard */ = {
309 | isa = PBXVariantGroup;
310 | children = (
311 | BB287B3D20729CE90069707A /* Base */,
312 | );
313 | name = Main.storyboard;
314 | sourceTree = "";
315 | };
316 | /* End PBXVariantGroup section */
317 |
318 | /* Begin XCBuildConfiguration section */
319 | BB287B4520729CE90069707A /* Debug */ = {
320 | isa = XCBuildConfiguration;
321 | buildSettings = {
322 | ALWAYS_SEARCH_USER_PATHS = NO;
323 | CLANG_ANALYZER_NONNULL = YES;
324 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
325 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
326 | CLANG_CXX_LIBRARY = "libc++";
327 | CLANG_ENABLE_MODULES = YES;
328 | CLANG_ENABLE_OBJC_ARC = YES;
329 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
330 | CLANG_WARN_BOOL_CONVERSION = YES;
331 | CLANG_WARN_COMMA = YES;
332 | CLANG_WARN_CONSTANT_CONVERSION = YES;
333 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
334 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
335 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
336 | CLANG_WARN_EMPTY_BODY = YES;
337 | CLANG_WARN_ENUM_CONVERSION = YES;
338 | CLANG_WARN_INFINITE_RECURSION = YES;
339 | CLANG_WARN_INT_CONVERSION = YES;
340 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
341 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
342 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
343 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
344 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
345 | CLANG_WARN_STRICT_PROTOTYPES = YES;
346 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
347 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
348 | CLANG_WARN_UNREACHABLE_CODE = YES;
349 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
350 | CODE_SIGN_IDENTITY = "iPhone Developer";
351 | COPY_PHASE_STRIP = NO;
352 | DEBUG_INFORMATION_FORMAT = dwarf;
353 | ENABLE_STRICT_OBJC_MSGSEND = YES;
354 | ENABLE_TESTABILITY = YES;
355 | GCC_C_LANGUAGE_STANDARD = gnu11;
356 | GCC_DYNAMIC_NO_PIC = NO;
357 | GCC_NO_COMMON_BLOCKS = YES;
358 | GCC_OPTIMIZATION_LEVEL = 0;
359 | GCC_PREPROCESSOR_DEFINITIONS = (
360 | "DEBUG=1",
361 | "$(inherited)",
362 | );
363 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
364 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
365 | GCC_WARN_UNDECLARED_SELECTOR = YES;
366 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
367 | GCC_WARN_UNUSED_FUNCTION = YES;
368 | GCC_WARN_UNUSED_VARIABLE = YES;
369 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
370 | MTL_ENABLE_DEBUG_INFO = YES;
371 | ONLY_ACTIVE_ARCH = YES;
372 | SDKROOT = iphoneos;
373 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
374 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
375 | };
376 | name = Debug;
377 | };
378 | BB287B4620729CE90069707A /* Release */ = {
379 | isa = XCBuildConfiguration;
380 | buildSettings = {
381 | ALWAYS_SEARCH_USER_PATHS = NO;
382 | CLANG_ANALYZER_NONNULL = YES;
383 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
384 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
385 | CLANG_CXX_LIBRARY = "libc++";
386 | CLANG_ENABLE_MODULES = YES;
387 | CLANG_ENABLE_OBJC_ARC = YES;
388 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
389 | CLANG_WARN_BOOL_CONVERSION = YES;
390 | CLANG_WARN_COMMA = YES;
391 | CLANG_WARN_CONSTANT_CONVERSION = YES;
392 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
393 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
394 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
395 | CLANG_WARN_EMPTY_BODY = YES;
396 | CLANG_WARN_ENUM_CONVERSION = YES;
397 | CLANG_WARN_INFINITE_RECURSION = YES;
398 | CLANG_WARN_INT_CONVERSION = YES;
399 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
400 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
401 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
402 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
403 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
404 | CLANG_WARN_STRICT_PROTOTYPES = YES;
405 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
406 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
407 | CLANG_WARN_UNREACHABLE_CODE = YES;
408 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
409 | CODE_SIGN_IDENTITY = "iPhone Developer";
410 | COPY_PHASE_STRIP = NO;
411 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
412 | ENABLE_NS_ASSERTIONS = NO;
413 | ENABLE_STRICT_OBJC_MSGSEND = YES;
414 | GCC_C_LANGUAGE_STANDARD = gnu11;
415 | GCC_NO_COMMON_BLOCKS = YES;
416 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
417 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
418 | GCC_WARN_UNDECLARED_SELECTOR = YES;
419 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
420 | GCC_WARN_UNUSED_FUNCTION = YES;
421 | GCC_WARN_UNUSED_VARIABLE = YES;
422 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
423 | MTL_ENABLE_DEBUG_INFO = NO;
424 | SDKROOT = iphoneos;
425 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
426 | VALIDATE_PRODUCT = YES;
427 | };
428 | name = Release;
429 | };
430 | BB287B4820729CE90069707A /* Debug */ = {
431 | isa = XCBuildConfiguration;
432 | baseConfigurationReference = FD59483180A934624B3A92C2 /* Pods-MLVisionExample.debug.xcconfig */;
433 | buildSettings = {
434 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
435 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
436 | CODE_SIGN_IDENTITY = "iPhone Developer";
437 | CODE_SIGN_STYLE = Manual;
438 | DEVELOPMENT_TEAM = PNR9X9TVFG;
439 | ENABLE_BITCODE = NO;
440 | INFOPLIST_FILE = "$(SRCROOT)/MLVisionExample/Info.plist";
441 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
442 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
443 | PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.MLVisionExample;
444 | PRODUCT_NAME = "$(TARGET_NAME)";
445 | PROVISIONING_PROFILE = "";
446 | PROVISIONING_PROFILE_SPECIFIER = Wildcard;
447 | SWIFT_VERSION = 5.0;
448 | TARGETED_DEVICE_FAMILY = "1,2";
449 | };
450 | name = Debug;
451 | };
452 | BB287B4920729CE90069707A /* Release */ = {
453 | isa = XCBuildConfiguration;
454 | baseConfigurationReference = EC96B2749DA03A4E771B4331 /* Pods-MLVisionExample.release.xcconfig */;
455 | buildSettings = {
456 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
457 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
458 | CODE_SIGN_IDENTITY = "iPhone Developer";
459 | CODE_SIGN_STYLE = Manual;
460 | DEVELOPMENT_TEAM = PNR9X9TVFG;
461 | ENABLE_BITCODE = NO;
462 | INFOPLIST_FILE = "$(SRCROOT)/MLVisionExample/Info.plist";
463 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
464 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
465 | PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.MLVisionExample;
466 | PRODUCT_NAME = "$(TARGET_NAME)";
467 | PROVISIONING_PROFILE = "";
468 | PROVISIONING_PROFILE_SPECIFIER = Wildcard;
469 | SWIFT_VERSION = 5.0;
470 | TARGETED_DEVICE_FAMILY = "1,2";
471 | };
472 | name = Release;
473 | };
474 | /* End XCBuildConfiguration section */
475 |
476 | /* Begin XCConfigurationList section */
477 | BB287B3020729CE90069707A /* Build configuration list for PBXProject "MLVisionExample" */ = {
478 | isa = XCConfigurationList;
479 | buildConfigurations = (
480 | BB287B4520729CE90069707A /* Debug */,
481 | BB287B4620729CE90069707A /* Release */,
482 | );
483 | defaultConfigurationIsVisible = 0;
484 | defaultConfigurationName = Release;
485 | };
486 | BB287B4720729CE90069707A /* Build configuration list for PBXNativeTarget "MLVisionExample" */ = {
487 | isa = XCConfigurationList;
488 | buildConfigurations = (
489 | BB287B4820729CE90069707A /* Debug */,
490 | BB287B4920729CE90069707A /* Release */,
491 | );
492 | defaultConfigurationIsVisible = 0;
493 | defaultConfigurationName = Release;
494 | };
495 | /* End XCConfigurationList section */
496 | };
497 | rootObject = BB287B2D20729CE90069707A /* Project object */;
498 | }
499 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import UIKit
18 | import FirebaseCore
19 |
20 | @UIApplicationMain
21 | class AppDelegate: UIResponder, UIApplicationDelegate {
22 |
23 | var window: UIWindow?
24 |
25 | func application(
26 | _ application: UIApplication,
27 | didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
28 | ) -> Bool {
29 | FirebaseApp.configure()
30 | return true
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/CameraViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import AVFoundation
18 | import CoreVideo
19 | import UIKit
20 |
21 | class CameraViewController: UIViewController {
22 |
23 | private var isUsingFrontCamera = false
24 | private var previewLayer: AVCaptureVideoPreviewLayer!
25 | private lazy var captureSession = AVCaptureSession()
26 | private lazy var sessionQueue = DispatchQueue(label: Constant.sessionQueueLabel)
27 | private lazy var classifer = ImageClassifer()
28 | private var lastFrame: CMSampleBuffer?
29 |
30 | private lazy var previewOverlayView: UIImageView = {
31 | precondition(isViewLoaded)
32 | let previewOverlayView = UIImageView(frame: .zero)
33 | previewOverlayView.translatesAutoresizingMaskIntoConstraints = false
34 | return previewOverlayView
35 | }()
36 |
37 | // MARK: - IBOutlets
38 |
39 | @IBOutlet fileprivate weak var cameraView: UIView!
40 | @IBOutlet fileprivate weak var detectionResultText: UILabel!
41 |
42 | // MARK: - UIViewController
43 |
44 | override func viewDidLoad() {
45 | super.viewDidLoad()
46 |
47 | previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
48 | setUpPreviewOverlayView()
49 | setUpCaptureSessionOutput()
50 | setUpCaptureSessionInput()
51 | }
52 |
53 | override func viewDidAppear(_ animated: Bool) {
54 | super.viewDidAppear(animated)
55 |
56 | startSession()
57 | }
58 |
59 | override func viewDidDisappear(_ animated: Bool) {
60 | super.viewDidDisappear(animated)
61 |
62 | stopSession()
63 | }
64 |
65 | override func viewDidLayoutSubviews() {
66 | super.viewDidLayoutSubviews()
67 |
68 | previewLayer.frame = cameraView.frame
69 | }
70 |
71 | // MARK: - IBActions
72 |
73 | @IBAction func switchCamera(_ sender: Any) {
74 | isUsingFrontCamera = !isUsingFrontCamera
75 | setUpCaptureSessionInput()
76 | }
77 |
78 | // MARK: - On-Device AutoML Detections
79 |
80 | private func detectImageLabelsAutoMLOnDevice(in sampleBuffer: CMSampleBuffer) {
81 |
82 | let group = DispatchGroup()
83 | group.enter()
84 | classifer.classifySampleBuffer(sampleBuffer, isUsingFrontCamera: isUsingFrontCamera) {
85 | resultText, error in
86 | defer { group.leave() }
87 |
88 | self.updatePreviewOverlayView()
89 |
90 | if let error = error {
91 | self.detectionResultText.text = error.localizedDescription
92 | return
93 | }
94 |
95 | self.detectionResultText.text = resultText
96 | }
97 |
98 | group.wait()
99 | }
100 |
101 | // MARK: - Private
102 |
103 | private func setUpCaptureSessionOutput() {
104 | sessionQueue.async {
105 | self.captureSession.beginConfiguration()
106 | // When performing latency tests to determine ideal capture settings,
107 | // run the app in 'release' mode to get accurate performance metrics.
108 | self.captureSession.sessionPreset = AVCaptureSession.Preset.medium
109 |
110 | let output = AVCaptureVideoDataOutput()
111 | output.videoSettings =
112 | [(kCVPixelBufferPixelFormatTypeKey as String): kCVPixelFormatType_32BGRA]
113 | let outputQueue = DispatchQueue(label: Constant.videoDataOutputQueueLabel)
114 | output.setSampleBufferDelegate(self, queue: outputQueue)
115 | guard self.captureSession.canAddOutput(output) else {
116 | print("Failed to add capture session output.")
117 | return
118 | }
119 | self.captureSession.addOutput(output)
120 | self.captureSession.commitConfiguration()
121 | }
122 | }
123 |
124 | private func setUpCaptureSessionInput() {
125 | sessionQueue.async {
126 | let cameraPosition: AVCaptureDevice.Position = self.isUsingFrontCamera ? .front : .back
127 | guard let device = self.captureDevice(forPosition: cameraPosition) else {
128 | print("Failed to get capture device for camera position: \(cameraPosition)")
129 | return
130 | }
131 | do {
132 | self.captureSession.beginConfiguration()
133 | let currentInputs = self.captureSession.inputs
134 | for input in currentInputs {
135 | self.captureSession.removeInput(input)
136 | }
137 |
138 | let input = try AVCaptureDeviceInput(device: device)
139 | guard self.captureSession.canAddInput(input) else {
140 | print("Failed to add capture session input.")
141 | return
142 | }
143 | self.captureSession.addInput(input)
144 | self.captureSession.commitConfiguration()
145 | } catch {
146 | print("Failed to create capture device input: \(error.localizedDescription)")
147 | }
148 | }
149 | }
150 |
151 | private func startSession() {
152 | sessionQueue.async {
153 | self.captureSession.startRunning()
154 | }
155 | }
156 |
157 | private func stopSession() {
158 | sessionQueue.async {
159 | self.captureSession.stopRunning()
160 | }
161 | }
162 |
163 | private func setUpPreviewOverlayView() {
164 | cameraView.addSubview(previewOverlayView)
165 | NSLayoutConstraint.activate([
166 | previewOverlayView.topAnchor.constraint(greaterThanOrEqualTo: cameraView.topAnchor),
167 | previewOverlayView.centerYAnchor.constraint(equalTo: cameraView.centerYAnchor),
168 | previewOverlayView.leadingAnchor.constraint(equalTo: cameraView.leadingAnchor),
169 | previewOverlayView.trailingAnchor.constraint(equalTo: cameraView.trailingAnchor),
170 | previewOverlayView.bottomAnchor.constraint(lessThanOrEqualTo: cameraView.bottomAnchor),
171 | ])
172 | }
173 |
174 | private func captureDevice(forPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? {
175 | if #available(iOS 10.0, *) {
176 | let discoverySession = AVCaptureDevice.DiscoverySession(
177 | deviceTypes: [.builtInWideAngleCamera],
178 | mediaType: .video,
179 | position: .unspecified
180 | )
181 | return discoverySession.devices.first { $0.position == position }
182 | }
183 | return nil
184 | }
185 |
186 | private func updatePreviewOverlayView() {
187 | guard let lastFrame = lastFrame,
188 | let imageBuffer = CMSampleBufferGetImageBuffer(lastFrame)
189 | else {
190 | return
191 | }
192 |
193 | let ciImage = CIImage(cvPixelBuffer: imageBuffer)
194 | let context = CIContext(options: nil)
195 |
196 | guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
197 | return
198 | }
199 |
200 | // Rotates the camera feed to match with device's orientation
201 | let rotatedImage = UIImage(
202 | cgImage: cgImage,
203 | scale: Constant.originalScale,
204 | orientation: .right
205 | )
206 |
207 | // Mirrors the camera feed if using front camera to make it looks natural
208 | if isUsingFrontCamera {
209 | guard let rotatedCGImage = rotatedImage.cgImage else {
210 | return
211 | }
212 | let mirroredImage = UIImage(
213 | cgImage: rotatedCGImage,
214 | scale: Constant.originalScale,
215 | orientation: .leftMirrored
216 | )
217 | previewOverlayView.image = mirroredImage
218 | } else {
219 | previewOverlayView.image = rotatedImage
220 | }
221 | }
222 | }
223 |
224 | // MARK: AVCaptureVideoDataOutputSampleBufferDelegate
225 |
226 | extension CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
227 |
228 | func captureOutput(
229 | _ output: AVCaptureOutput,
230 | didOutput sampleBuffer: CMSampleBuffer,
231 | from connection: AVCaptureConnection
232 | ) {
233 | lastFrame = sampleBuffer
234 | detectImageLabelsAutoMLOnDevice(in: sampleBuffer)
235 | }
236 | }
237 |
238 | // MARK: - Constants
239 |
240 | private enum Constant {
241 | static let videoDataOutputQueueLabel = "com.google.firebaseml.visiondetector.VideoDataOutputQueue"
242 | static let sessionQueueLabel = "com.google.firebaseml.visiondetector.SessionQueue"
243 | static let originalScale: CGFloat = 1.0
244 | }
245 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/ImageClassifier.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import Foundation
18 | import FirebaseMLCommon
19 | import FirebaseMLVisionAutoML
20 |
21 | class ImageClassifer {
22 |
23 | private let vision = Vision.vision()
24 | private let modelManager = ModelManager.modelManager()
25 | private let autoMLOnDeviceLabeler: VisionImageLabeler
26 |
27 | private let localModel: LocalModel? = {
28 | guard let localModelFilePath = Bundle.main.path(
29 | forResource: Constant.autoMLManifestFileName,
30 | ofType: Constant.autoMLManifestFileType,
31 | inDirectory: Constant.autoMLManifestFolder
32 | ) else {
33 | print("Failed to find AutoML local model manifest file.")
34 | return nil
35 | }
36 |
37 | return LocalModel(name: Constant.localAutoMLModelName, path: localModelFilePath)
38 | }()
39 |
40 | private let remoteModel: RemoteModel = {
41 | let initialConditions = ModelDownloadConditions()
42 | let updateConditions = ModelDownloadConditions(
43 | allowsCellularAccess: false,
44 | allowsBackgroundDownloading: true
45 | )
46 | return RemoteModel(
47 | name: Constant.remoteAutoMLModelName,
48 | allowsModelUpdates: true,
49 | initialConditions: initialConditions,
50 | updateConditions: updateConditions
51 | )
52 | }()
53 |
54 | init() {
55 | // Load the remote AutoML model.
56 | modelManager.register(remoteModel)
57 | modelManager.download(remoteModel)
58 |
59 | // Load the local AutoML model.
60 | var localModelName: String?
61 | if let model = localModel {
62 | modelManager.register(model)
63 | localModelName = Constant.localAutoMLModelName
64 | }
65 |
66 | // Create AutoML image labeler.
67 | let options = VisionOnDeviceAutoMLImageLabelerOptions(
68 | remoteModelName: Constant.remoteAutoMLModelName,
69 | localModelName: localModelName
70 | )
71 | options.confidenceThreshold = Constant.labelConfidenceThreshold
72 | autoMLOnDeviceLabeler = vision.onDeviceAutoMLImageLabeler(options: options)
73 |
74 | // Set up to get notified when remote model download succeeded.
75 | setupRemoteModelDownloadNotification()
76 | }
77 | }
78 |
79 | // MARK: - Private
80 |
81 | extension ImageClassifer {
82 |
83 | /// Set up receiver to get notified about model download progress.
84 | private func setupRemoteModelDownloadNotification() {
85 | NotificationCenter.default.addObserver(
86 | forName: .firebaseMLModelDownloadDidSucceed,
87 | object: nil,
88 | queue: OperationQueue.main
89 | ) { _ in
90 | print("Sucessfully downloaded AutoML remote model")
91 | }
92 |
93 | NotificationCenter.default.addObserver(
94 | forName: .firebaseMLModelDownloadDidFail,
95 | object: nil,
96 | queue: OperationQueue.main
97 | ) { _ in
98 | print("Error: AutoML remote model download failed. Check if Constant.remoteAutoMLModelName",
99 | "matches with the model name you published in the Firebase Console.")
100 | }
101 | }
102 |
103 | }
104 |
105 | // MARK: - Classify image
106 |
107 | extension ImageClassifer {
108 |
109 | /// Classify the given UIImage instance. This method is useful to classify still images.
110 | func classifyImage(_ image: UIImage, completionHandler: @escaping ImageClassificationCompletion) {
111 | // Rotate the image so that its imageOrientation is always "up"
112 | guard let rotatedImage = image.imageOrientedUp() else {
113 | completionHandler(nil, ClassificationError.invalidInput)
114 | return
115 | }
116 |
117 | // Initialize a VisionImage object with the rotated image.
118 | let visionImage = VisionImage(image: rotatedImage)
119 |
120 | // Feed the image to ML Kit AutoML SDK.
121 | classifyVisionImage(visionImage, completionHandler: completionHandler)
122 | }
123 |
124 | /// Classify the given CMSampleBuffer instance.
125 | /// This method is useful to classify frames of video streams.
126 | func classifySampleBuffer(
127 | _ sampleBuffer: CMSampleBuffer,
128 | isUsingFrontCamera: Bool,
129 | completionHandler: @escaping ImageClassificationCompletion
130 | ) {
131 |
132 | let visionImage = VisionImage(buffer: sampleBuffer)
133 | let metadata = VisionImageMetadata()
134 | let orientation = UIUtilities.imageOrientation(
135 | fromDevicePosition: isUsingFrontCamera ? .front : .back
136 | )
137 | metadata.orientation = UIUtilities.visionImageOrientation(from: orientation)
138 | visionImage.metadata = metadata
139 |
140 | // Feed the image to ML Kit AutoML SDK.
141 | classifyVisionImage(visionImage, completionHandler: completionHandler)
142 | }
143 |
144 | /// Classify a VisionImage instance. This private method provides actual implementation of
145 | /// the image classification logic and is called by the public "classify" methods.
146 | private func classifyVisionImage(
147 | _ visionImage: VisionImage,
148 | completionHandler: @escaping ImageClassificationCompletion
149 | ) {
150 |
151 | // Return error if AutoML local model is not available.
152 | guard localModel != nil else {
153 | completionHandler(nil, ClassificationError.localModelNotAvailable)
154 | return
155 | }
156 |
157 | // Indicate whether the remote or local model is used.
158 | // Note: in most common cases, once a remote model is downloaded it will be used. However, in
159 | // very rare cases, the model itself might not be valid, and thus the local model is used. In
160 | // addition, since model download failures can be transient, and model download can also be
161 | // triggered in the background during inference, it is possible that a remote model is used
162 | // even if the first download fails.
163 | let isRemoteModelDownloaded = modelManager.isRemoteModelDownloaded(remoteModel)
164 | var result = "Source: " + (isRemoteModelDownloaded ? "Remote" : "Local") + " model\n"
165 |
166 | let startTime = DispatchTime.now()
167 |
168 | autoMLOnDeviceLabeler.process(visionImage) { detectedLabels, error in
169 | guard error == nil else {
170 | completionHandler(nil, error)
171 | return
172 | }
173 |
174 | // Measure inference latency and format it to show to user.
175 | let endTime = DispatchTime.now()
176 | let nanoTime = endTime.uptimeNanoseconds - startTime.uptimeNanoseconds
177 | let latencyInMs = round(Double(nanoTime) / 1_000_000)
178 | result += "Latency: \(latencyInMs)ms\n"
179 |
180 | // Format detection result to show to user.
181 | result += detectedLabels?.map { label -> String in
182 | return "Label: \(label.text), Confidence: \(label.confidence ?? 0)"
183 | }.joined(separator: "\n") ?? "No Result"
184 |
185 | completionHandler(result, nil)
186 | }
187 | }
188 | }
189 |
190 | // MARK: - Constants and types
191 |
192 | private enum Constant {
193 | /// Definition of AutoML local model
194 | static let localAutoMLModelName = "automl_image_labeling_model"
195 | static let autoMLManifestFileName = "manifest"
196 | static let autoMLManifestFileType = "json"
197 | static let autoMLManifestFolder = "automl"
198 |
199 | /// Definition of AutoML remote model.
200 | static let remoteAutoMLModelName = "mlkit_flowers"
201 |
202 | /// Config for AutoML Image Labeler classification task.
203 | static let labelConfidenceThreshold: Float = 0.6
204 | }
205 |
206 | private enum ClassificationError: Error {
207 | case invalidInput
208 | case localModelNotAvailable
209 | }
210 |
211 | extension ClassificationError: LocalizedError {
212 | public var errorDescription: String? {
213 | switch self {
214 | case .invalidInput:
215 | return "Invalid input image."
216 | case .localModelNotAvailable:
217 | return "AutoML local model is not available. Please check if you have " +
218 | "downloaded and added the model files to this project."
219 | }
220 | }
221 | }
222 |
223 | typealias ImageClassificationCompletion = (String?, Error?) -> Void
224 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIcons
10 |
11 | CFBundleIcons~ipad
12 |
13 | CFBundleIdentifier
14 | $(PRODUCT_BUNDLE_IDENTIFIER)
15 | CFBundleInfoDictionaryVersion
16 | 6.0
17 | CFBundleName
18 | $(PRODUCT_NAME)
19 | CFBundlePackageType
20 | APPL
21 | CFBundleShortVersionString
22 | 1.0
23 | CFBundleVersion
24 | 1
25 | LSRequiresIPhoneOS
26 |
27 | NSCameraUsageDescription
28 | We use the data from the camera for the image recognition.
29 | NSPhotoLibraryUsageDescription
30 | We use the data from the photo library for the image recognition.
31 | UILaunchStoryboardName
32 | LaunchScreen
33 | UIMainStoryboardFile
34 | Main
35 | UIRequiredDeviceCapabilities
36 |
37 | armv7
38 |
39 | UIRequiresFullScreen
40 |
41 | UISupportedInterfaceOrientations
42 |
43 | UIInterfaceOrientationPortrait
44 |
45 | UISupportedInterfaceOrientations~ipad
46 |
47 | UIInterfaceOrientationPortrait
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/MLKitExtensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import CoreGraphics
18 | import UIKit
19 |
20 | // MARK: - UIImage
21 |
22 | extension UIImage {
23 |
24 | /// Make a copy of the image and rotate it so that imageOrientation == .up.
25 | /// - Returns: The rotated image or `nil` if image could not be rotated.
26 | func imageOrientedUp() -> UIImage? {
27 | guard imageOrientation != .up else {
28 | // The image orientation is already is already up, so we don't need to do anything.
29 | return self.copy() as? UIImage
30 | }
31 |
32 | guard let cgImage = self.cgImage else {
33 | // CGImage is not available.
34 | return nil
35 | }
36 |
37 | guard let colorSpace = cgImage.colorSpace,
38 | let context = CGContext(
39 | data: nil,
40 | width: Int(size.width),
41 | height: Int(size.height),
42 | bitsPerComponent: cgImage.bitsPerComponent,
43 | bytesPerRow: 0,
44 | space: colorSpace,
45 | bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
46 | )
47 | else {
48 | // Unable to create CGContext.
49 | return nil
50 | }
51 |
52 | var transform = CGAffineTransform.identity
53 |
54 | switch imageOrientation {
55 | case .down, .downMirrored:
56 | transform = transform.translatedBy(x: size.width, y: size.height)
57 | transform = transform.rotated(by: CGFloat.pi)
58 | case .left, .leftMirrored:
59 | transform = transform.translatedBy(x: size.width, y: 0)
60 | transform = transform.rotated(by: CGFloat.pi / 2.0)
61 | case .right, .rightMirrored:
62 | transform = transform.translatedBy(x: 0, y: size.height)
63 | transform = transform.rotated(by: CGFloat.pi / -2.0)
64 | case .up, .upMirrored:
65 | break
66 | @unknown default:
67 | break
68 | }
69 |
70 | // Flip image one more time if needed to, this is to prevent flipped image.
71 | switch imageOrientation {
72 | case .upMirrored, .downMirrored:
73 | transform = transform.translatedBy(x: size.width, y: 0)
74 | transform = transform.scaledBy(x: -1, y: 1)
75 | case .leftMirrored, .rightMirrored:
76 | transform = transform.translatedBy(x: size.height, y: 0)
77 | transform = transform.scaledBy(x: -1, y: 1)
78 | case .up, .down, .left, .right:
79 | break
80 | @unknown default:
81 | break
82 | }
83 |
84 | context.concatenate(transform)
85 |
86 | switch imageOrientation {
87 | case .left, .leftMirrored, .right, .rightMirrored:
88 | context.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width))
89 | default:
90 | context.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
91 | }
92 |
93 | guard let newCGImage = context.makeImage() else { return nil }
94 |
95 | return .init(cgImage: newCGImage, scale: 1.0, orientation: .up)
96 | }
97 |
98 | // MARK: - Private
99 |
100 | /// The PNG or JPEG data representation of the image or `nil` if the conversion failed.
101 | private var data: Data? {
102 | return self.pngData() ?? self.jpegData(compressionQuality: Constant.jpegCompressionQuality)
103 | }
104 | }
105 |
106 | // MARK: - Constants
107 |
108 | private enum Constant {
109 | static let jpegCompressionQuality: CGFloat = 0.8
110 | }
111 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/UIUtilities.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import AVFoundation
18 | import UIKit
19 |
20 | import FirebaseMLVision
21 |
22 | /// Defines UI-related utilitiy methods for vision detection.
23 | public class UIUtilities {
24 |
25 | // MARK: - Public
26 |
27 | public static func imageOrientation(
28 | fromDevicePosition devicePosition: AVCaptureDevice.Position = .back
29 | ) -> UIImage.Orientation {
30 |
31 | var deviceOrientation = UIDevice.current.orientation
32 | if deviceOrientation == .faceDown || deviceOrientation == .faceUp ||
33 | deviceOrientation == .unknown {
34 | deviceOrientation = currentUIOrientation()
35 | }
36 | switch deviceOrientation {
37 | case .portrait:
38 | return devicePosition == .front ? .leftMirrored : .right
39 | case .landscapeLeft:
40 | return devicePosition == .front ? .downMirrored : .up
41 | case .portraitUpsideDown:
42 | return devicePosition == .front ? .rightMirrored : .left
43 | case .landscapeRight:
44 | return devicePosition == .front ? .upMirrored : .down
45 | case .faceDown, .faceUp, .unknown:
46 | return .up
47 | @unknown default:
48 | fatalError()
49 | }
50 | }
51 |
52 | public static func visionImageOrientation(
53 | from imageOrientation: UIImage.Orientation
54 | ) -> VisionDetectorImageOrientation {
55 |
56 | switch imageOrientation {
57 | case .up:
58 | return .topLeft
59 | case .down:
60 | return .bottomRight
61 | case .left:
62 | return .leftBottom
63 | case .right:
64 | return .rightTop
65 | case .upMirrored:
66 | return .topRight
67 | case .downMirrored:
68 | return .bottomLeft
69 | case .leftMirrored:
70 | return .leftTop
71 | case .rightMirrored:
72 | return .rightBottom
73 | @unknown default:
74 | fatalError()
75 | }
76 | }
77 |
78 | // MARK: - Private
79 |
80 | private static func currentUIOrientation() -> UIDeviceOrientation {
81 | let deviceOrientation = { () -> UIDeviceOrientation in
82 | switch UIApplication.shared.statusBarOrientation {
83 | case .landscapeLeft:
84 | return .landscapeRight
85 | case .landscapeRight:
86 | return .landscapeLeft
87 | case .portraitUpsideDown:
88 | return .portraitUpsideDown
89 | case .portrait, .unknown:
90 | return .portrait
91 | @unknown default:
92 | fatalError()
93 | }
94 | }
95 | guard Thread.isMainThread else {
96 | var currentOrientation: UIDeviceOrientation = .portrait
97 | DispatchQueue.main.sync {
98 | currentOrientation = deviceOrientation()
99 | }
100 | return currentOrientation
101 | }
102 | return deviceOrientation()
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/MLVisionExample/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright 2019 Google LLC
3 | //
4 | // Licensed under the Apache License, Version 2.0 (the "License");
5 | // you may not use this file except in compliance with the License.
6 | // You may obtain a copy of the License at
7 | //
8 | // https://www.apache.org/licenses/LICENSE-2.0
9 | //
10 | // Unless required by applicable law or agreed to in writing, software
11 | // distributed under the License is distributed on an "AS IS" BASIS,
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | // See the License for the specific language governing permissions and
14 | // limitations under the License.
15 | //
16 |
17 | import UIKit
18 |
19 | class ViewController: UIViewController, UINavigationControllerDelegate {
20 |
21 | /// An image picker for accessing the photo library or camera.
22 | private var imagePicker = UIImagePickerController()
23 |
24 | /// Image counter.
25 | private var currentImage = 0
26 |
27 | /// AutoML image classifier wrapper.
28 | private lazy var classifier = ImageClassifer()
29 |
30 | // MARK: - IBOutlets
31 |
32 | @IBOutlet fileprivate weak var imageView: UIImageView!
33 | @IBOutlet fileprivate weak var photoCameraButton: UIBarButtonItem!
34 | @IBOutlet fileprivate weak var videoCameraButton: UIBarButtonItem!
35 | @IBOutlet fileprivate weak var resultsLabelView: UILabel!
36 |
37 | // MARK: - UIViewController
38 |
39 | override func viewDidLoad() {
40 | super.viewDidLoad()
41 |
42 | imagePicker.delegate = self
43 | imagePicker.sourceType = .photoLibrary
44 |
45 | let isCameraAvailable = UIImagePickerController.isCameraDeviceAvailable(.front) ||
46 | UIImagePickerController.isCameraDeviceAvailable(.rear)
47 | if isCameraAvailable {
48 | // `CameraViewController` uses `AVCaptureDevice.DiscoverySession` which is only supported for
49 | // iOS 10 or newer.
50 | if #available(iOS 10.0, *) {
51 | videoCameraButton.isEnabled = true
52 | }
53 | } else {
54 | photoCameraButton.isEnabled = false
55 | }
56 |
57 | // Set up image view and classify the first image in the bundle.
58 | imageView.image = UIImage(named: Constant.images[currentImage])
59 | classifyImage()
60 | }
61 |
62 | override func viewWillAppear(_ animated: Bool) {
63 | super.viewWillAppear(animated)
64 |
65 | navigationController?.navigationBar.isHidden = true
66 | }
67 |
68 | override func viewWillDisappear(_ animated: Bool) {
69 | super.viewWillDisappear(animated)
70 |
71 | navigationController?.navigationBar.isHidden = false
72 | }
73 |
74 | // MARK: - IBActions
75 |
76 | @IBAction func openPhotoLibrary(_ sender: Any) {
77 | imagePicker.sourceType = .photoLibrary
78 | present(imagePicker, animated: true)
79 | }
80 |
81 | @IBAction func openCamera(_ sender: Any) {
82 | guard UIImagePickerController.isCameraDeviceAvailable(.front) ||
83 | UIImagePickerController.isCameraDeviceAvailable(.rear)
84 | else {
85 | return
86 | }
87 |
88 | imagePicker.sourceType = .camera
89 | present(imagePicker, animated: true)
90 | }
91 |
92 | @IBAction func changeImage(_ sender: Any) {
93 | nextImageAndClassify()
94 | }
95 |
96 | // MARK: - Private
97 |
98 | /// Clears the results text view and removes any frames that are visible.
99 | private func clearResults() {
100 | resultsLabelView.text = ""
101 | imageView.image = nil
102 | }
103 |
104 | /// Update the results text view with classification result.
105 | private func showResult(_ resultText: String) {
106 | self.resultsLabelView.text = resultText
107 | }
108 |
109 | /// Change to the next image available in app's bundle, and run image classification.
110 | private func nextImageAndClassify() {
111 | clearResults()
112 |
113 | currentImage = (currentImage + 1) % Constant.images.count
114 | imageView.image = UIImage(named: Constant.images[currentImage])
115 |
116 | classifyImage()
117 | }
118 |
119 | /// Run image classification on the image currently display in imageView.
120 | private func classifyImage() {
121 | guard let image = imageView.image else {
122 | print("Error: Attempted to run classification on a nil object")
123 | showResult("Error: invalid image")
124 | return
125 | }
126 |
127 | classifier.classifyImage(image) { resultText, error in
128 | // Handle classification error
129 | guard error == nil else {
130 | self.showResult(error!.localizedDescription)
131 | return
132 | }
133 |
134 | // We don't expect resultText and error to be both nil, so this is just a safeguard.
135 | guard resultText != nil else {
136 | self.showResult("Error: Unknown error occured")
137 | return
138 | }
139 |
140 | self.showResult(resultText!)
141 | }
142 | }
143 | }
144 |
145 | // MARK: - UIImagePickerControllerDelegate
146 |
147 | extension ViewController: UIImagePickerControllerDelegate {
148 |
149 | public func imagePickerController(
150 | _ picker: UIImagePickerController,
151 | didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]
152 | ) {
153 |
154 | if let pickedImage = info[.originalImage] as? UIImage {
155 | clearResults()
156 | imageView.image = pickedImage
157 | classifyImage()
158 | }
159 |
160 | dismiss(animated: true)
161 | }
162 | }
163 |
164 | // MARK: - Constants
165 |
166 | private enum Constant {
167 | static let images = ["sunflower_1627193_640.jpg", "sunflower_3292932_640.jpg",
168 | "dandelion_4110356_640.jpg", "dandelion_2817950_640.jpg",
169 | "rose_1463562_640.jpg", "rose_3063284_640.jpg"]
170 | }
171 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/Podfile:
--------------------------------------------------------------------------------
1 | source 'https://github.com/CocoaPods/Specs.git'
2 |
3 | platform :ios, '9.0'
4 | use_frameworks!
5 |
6 | target 'MLVisionExample' do
7 | pod 'FirebaseMLVision', '0.16.0'
8 | pod 'FirebaseMLCommon', '0.16.0'
9 | pod 'FirebaseMLVisionAutoML', '0.16.0'
10 | end
11 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - FirebaseCore (6.0.0):
3 | - GoogleUtilities/Environment (~> 6.0)
4 | - GoogleUtilities/Logger (~> 6.0)
5 | - FirebaseInstanceID (4.0.0):
6 | - FirebaseCore (~> 6.0)
7 | - GoogleUtilities/Environment (~> 6.0)
8 | - GoogleUtilities/UserDefaults (~> 6.0)
9 | - FirebaseMLCommon (0.16.0):
10 | - FirebaseCore (~> 6.0)
11 | - FirebaseInstanceID (~> 4.0)
12 | - GoogleUtilities/UserDefaults (~> 6.0)
13 | - GTMSessionFetcher/Core (~> 1.1)
14 | - FirebaseMLVision (0.16.0):
15 | - FirebaseCore (~> 6.0)
16 | - FirebaseMLCommon (~> 0.16)
17 | - GoogleAPIClientForREST/Core (~> 1.3)
18 | - GoogleAPIClientForREST/Vision (~> 1.3)
19 | - GoogleMobileVision/Detector (~> 1.4)
20 | - FirebaseMLVisionAutoML (0.16.0):
21 | - FirebaseCore (~> 6.0)
22 | - FirebaseMLCommon (~> 0.16)
23 | - FirebaseMLVision (~> 0.16)
24 | - TensorFlowLite (= 1.13.1)
25 | - GoogleAPIClientForREST/Core (1.3.8):
26 | - GTMSessionFetcher (>= 1.1.7)
27 | - GoogleAPIClientForREST/Vision (1.3.8):
28 | - GoogleAPIClientForREST/Core
29 | - GTMSessionFetcher (>= 1.1.7)
30 | - GoogleMobileVision/Detector (1.6.0):
31 | - GoogleToolboxForMac/Logger (~> 2.1)
32 | - "GoogleToolboxForMac/NSData+zlib (~> 2.1)"
33 | - GTMSessionFetcher/Core (~> 1.1)
34 | - Protobuf (~> 3.1)
35 | - GoogleToolboxForMac/Defines (2.2.0)
36 | - GoogleToolboxForMac/Logger (2.2.0):
37 | - GoogleToolboxForMac/Defines (= 2.2.0)
38 | - "GoogleToolboxForMac/NSData+zlib (2.2.0)":
39 | - GoogleToolboxForMac/Defines (= 2.2.0)
40 | - GoogleUtilities/Environment (6.0.0)
41 | - GoogleUtilities/Logger (6.0.0):
42 | - GoogleUtilities/Environment
43 | - GoogleUtilities/UserDefaults (6.0.0):
44 | - GoogleUtilities/Logger
45 | - GTMSessionFetcher (1.2.1):
46 | - GTMSessionFetcher/Full (= 1.2.1)
47 | - GTMSessionFetcher/Core (1.2.1)
48 | - GTMSessionFetcher/Full (1.2.1):
49 | - GTMSessionFetcher/Core (= 1.2.1)
50 | - Protobuf (3.7.0)
51 | - TensorFlowLite (1.13.1)
52 |
53 | DEPENDENCIES:
54 | - FirebaseMLCommon (= 0.16.0)
55 | - FirebaseMLVision (= 0.16.0)
56 | - FirebaseMLVisionAutoML (= 0.16.0)
57 |
58 | SPEC REPOS:
59 | https://github.com/cocoapods/specs.git:
60 | - FirebaseCore
61 | - FirebaseInstanceID
62 | - FirebaseMLCommon
63 | - FirebaseMLVision
64 | - FirebaseMLVisionAutoML
65 | - GoogleAPIClientForREST
66 | - GoogleMobileVision
67 | - GoogleToolboxForMac
68 | - GoogleUtilities
69 | - GTMSessionFetcher
70 | - Protobuf
71 | - TensorFlowLite
72 |
73 | SPEC CHECKSUMS:
74 | FirebaseCore: e38f025287b413255a53acc1945d048a112047f7
75 | FirebaseInstanceID: 0e0348a3c00a734fa376a070f5ad4533ad975cb5
76 | FirebaseMLCommon: d430756ba2a16bac8fefc81ea416e7f822f052fd
77 | FirebaseMLVision: 35d0a720334b141d70a55e7e817c1cac23f32285
78 | FirebaseMLVisionAutoML: ce20b5b0e169cafa07f5bf61048a86aa7df6232c
79 | GoogleAPIClientForREST: 5447a194eae517986cafe6421a5330b80b820591
80 | GoogleMobileVision: 31cfb4319fd0c03d80105680abd9eae9da5e3b47
81 | GoogleToolboxForMac: ff31605b7d66400dcec09bed5861689aebadda4d
82 | GoogleUtilities: f1faafc033ea203adf1783ce00af455bb99d0e5b
83 | GTMSessionFetcher: 32aeca0aa144acea523e1c8e053089dec2cb98ca
84 | Protobuf: 7a877b7f3e5964e3fce995e2eb323dbc6831bb5a
85 | TensorFlowLite: 8b9dc4eb32eac0f8cb660c66bca7604da56dcc5a
86 |
87 | PODFILE CHECKSUM: 66423bd622fd64de3d822190e84cb0430f513653
88 |
89 | COCOAPODS: 1.5.3
90 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/automl/dict.txt:
--------------------------------------------------------------------------------
1 | daisy
2 | dandelion
3 | roses
4 | sunflowers
5 | tulips
6 |
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/automl/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "modelFile": "model.tflite",
3 | "labelsFile": "dict.txt",
4 | "modelType": "IMAGE_LABELING"
5 | }
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/automl/model.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/automl/model.tflite
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/dandelion_2817950_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/dandelion_2817950_640.jpg
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/dandelion_4110356_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/dandelion_4110356_640.jpg
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/rose_1463562_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/rose_1463562_640.jpg
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/rose_3063284_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/rose_3063284_640.jpg
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/sunflower_1627193_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/sunflower_1627193_640.jpg
--------------------------------------------------------------------------------
/ios/mlkit-automl/Resources/flowers/sunflower_3292932_640.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/googlecodelabs/automl-vision-edge-in-mlkit/bec51615d216da3a93e15789b81bf57f80b98622/ios/mlkit-automl/Resources/flowers/sunflower_3292932_640.jpg
--------------------------------------------------------------------------------