├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── android
├── .classpath
├── .gitignore
├── .idea
│ └── misc.xml
├── .project
├── .settings
│ └── org.eclipse.buildship.core.prefs
├── build.gradle
├── gradle.properties
├── settings.gradle
└── src
│ └── main
│ ├── AndroidManifest.xml
│ └── java
│ └── sq
│ └── flutter
│ └── tflite
│ └── TflitePlugin.java
├── example
├── .gitignore
├── .metadata
├── README.md
├── android
│ ├── .gitignore
│ ├── .project
│ ├── .settings
│ │ └── org.eclipse.buildship.core.prefs
│ ├── app
│ │ ├── .classpath
│ │ ├── .project
│ │ ├── .settings
│ │ │ └── org.eclipse.buildship.core.prefs
│ │ ├── build.gradle
│ │ └── src
│ │ │ └── main
│ │ │ ├── AndroidManifest.xml
│ │ │ ├── java
│ │ │ └── sq
│ │ │ │ └── flutter
│ │ │ │ └── tfliteexample
│ │ │ │ └── MainActivity.java
│ │ │ └── res
│ │ │ ├── drawable
│ │ │ └── launch_background.xml
│ │ │ ├── mipmap-hdpi
│ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-mdpi
│ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xhdpi
│ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xxxhdpi
│ │ │ └── ic_launcher.png
│ │ │ └── values
│ │ │ └── styles.xml
│ ├── build.gradle
│ ├── gradle.properties
│ ├── gradle
│ │ └── wrapper
│ │ │ ├── gradle-wrapper.jar
│ │ │ └── gradle-wrapper.properties
│ ├── gradlew
│ ├── gradlew.bat
│ ├── settings.gradle
│ └── settings_aar.gradle
├── assets
│ ├── deeplabv3_257_mv_gpu.tflite
│ ├── deeplabv3_257_mv_gpu.txt
│ ├── mobilenet_v1_1.0_224.tflite
│ ├── mobilenet_v1_1.0_224.txt
│ ├── posenet_mv1_075_float_from_checkpoints.tflite
│ ├── ssd_mobilenet.tflite
│ ├── ssd_mobilenet.txt
│ ├── yolov2_tiny.tflite
│ └── yolov2_tiny.txt
├── ios
│ ├── .gitignore
│ ├── Flutter
│ │ ├── AppFrameworkInfo.plist
│ │ ├── Debug.xcconfig
│ │ └── Release.xcconfig
│ ├── Podfile
│ ├── Podfile.lock
│ ├── Runner.xcodeproj
│ │ ├── project.pbxproj
│ │ ├── project.xcworkspace
│ │ │ └── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── xcschemes
│ │ │ └── Runner.xcscheme
│ ├── Runner.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ └── Runner
│ │ ├── AppDelegate.h
│ │ ├── AppDelegate.m
│ │ ├── Assets.xcassets
│ │ ├── AppIcon.appiconset
│ │ │ ├── Contents.json
│ │ │ ├── Icon-App-1024x1024@1x.png
│ │ │ ├── Icon-App-20x20@1x.png
│ │ │ ├── Icon-App-20x20@2x.png
│ │ │ ├── Icon-App-20x20@3x.png
│ │ │ ├── Icon-App-29x29@1x.png
│ │ │ ├── Icon-App-29x29@2x.png
│ │ │ ├── Icon-App-29x29@3x.png
│ │ │ ├── Icon-App-40x40@1x.png
│ │ │ ├── Icon-App-40x40@2x.png
│ │ │ ├── Icon-App-40x40@3x.png
│ │ │ ├── Icon-App-60x60@2x.png
│ │ │ ├── Icon-App-60x60@3x.png
│ │ │ ├── Icon-App-76x76@1x.png
│ │ │ ├── Icon-App-76x76@2x.png
│ │ │ └── Icon-App-83.5x83.5@2x.png
│ │ └── LaunchImage.imageset
│ │ │ ├── Contents.json
│ │ │ ├── LaunchImage.png
│ │ │ ├── LaunchImage@2x.png
│ │ │ ├── LaunchImage@3x.png
│ │ │ └── README.md
│ │ ├── Base.lproj
│ │ ├── LaunchScreen.storyboard
│ │ └── Main.storyboard
│ │ ├── Info.plist
│ │ └── main.m
├── lib
│ └── main.dart
├── pubspec.yaml
├── test
│ └── widget_test.dart
└── yolo.jpg
├── ios
├── .gitignore
├── Assets
│ └── .gitkeep
├── Classes
│ ├── TflitePlugin.h
│ ├── TflitePlugin.mm
│ ├── ios_image_load.h
│ └── ios_image_load.mm
└── tflite.podspec
├── lib
└── tensorflow_lite_flutter.dart
├── pubspec.yaml
└── test
└── tflite_test.dart
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .dart_tool/
3 |
4 | .packages
5 | .pub/
6 | pubspec.lock
7 |
8 | build/
9 |
10 | # IntelliJ
11 | *.iml
12 | .idea/workspace.xml
13 | .idea/tasks.xml
14 | .idea/gradle.xml
15 | .idea/assetWizardSettings.xml
16 | .idea/dictionaries
17 | .idea/libraries
18 | .idea/caches
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 3.0.0 (2025-04-27)
2 |
3 | * Updated to support Flutter 3.16+ and Dart 3.2+
4 | * Improved documentation for all API methods with detailed parameter descriptions
5 | * Fixed lint errors and improved code formatting
6 | * Added comprehensive API documentation with examples for each ML task
7 | * Enhanced README with better installation and usage instructions
8 | * Updated dependencies to their latest versions
9 | * Added repository and issue tracker URLs to pubspec.yaml
10 |
11 | ## 2.0.1
12 |
13 | * Fixed compatibility issues with newer Flutter versions
14 | * Minor bug fixes and improvements
15 |
16 | ## 2.0.0
17 |
18 | * Initial release after fixing all issues from old plugin.
19 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 Qian Sha
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
14 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
15 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
17 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
18 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
19 | OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TensorFlow Lite Flutter
2 |
3 | [](https://pub.dev/packages/tensorflow_lite_flutter)
4 |
5 | A comprehensive Flutter plugin for accessing TensorFlow Lite API. This plugin provides a Dart interface to TensorFlow Lite models, allowing Flutter apps to perform on-device machine learning with high performance and low latency.
6 |
7 | ## Features
8 |
9 | Supports multiple ML tasks on both iOS and Android:
10 |
11 | - ✅ Image Classification
12 | - ✅ Object Detection (SSD MobileNet and YOLO)
13 | - ✅ Pix2Pix Image-to-Image Translation
14 | - ✅ Semantic Segmentation (Deeplab)
15 | - ✅ Pose Estimation (PoseNet)
16 |
17 | ## Table of Contents
18 |
19 | - [Installation](#installation)
20 | - [Setup](#setup)
21 | - [Android Configuration](#android-configuration)
22 | - [iOS Configuration](#ios-configuration)
23 | - [Usage](#usage)
24 | - [Loading Models](#loading-models)
25 | - [Image Classification](#image-classification)
26 | - [Object Detection](#object-detection)
27 | - [Pix2Pix](#pix2pix)
28 | - [Semantic Segmentation](#semantic-segmentation)
29 | - [Pose Estimation](#pose-estimation)
30 | - [Advanced Usage](#advanced-usage)
31 | - [GPU Acceleration](#gpu-acceleration)
32 | - [Performance Optimization](#performance-optimization)
33 | - [Examples](#examples)
34 | - [Troubleshooting](#troubleshooting)
35 |
36 | ## Version History
37 |
38 | ### v3.0.0 (Latest)
39 |
40 | - Updated to support Flutter 3.16+ and Dart 3.2+
41 | - Improved documentation and examples
42 | - Performance optimizations
43 |
44 | ### v2.0.1
45 |
46 | - iOS TensorFlow Lite library upgraded to TensorFlowLiteObjC 2.x
47 | - Changes to native code are denoted with `TFLITE2`
48 |
49 | ### v1.0.0
50 |
51 | - Updated to TensorFlow Lite API v1.12.0
52 | - No longer accepts parameter `inputSize` and `numChannels` (retrieved from input tensor)
53 | - `numThreads` moved to `Tflite.loadModel`
54 |
55 | ## Installation
56 |
57 | Add `tensorflow_lite_flutter` as a dependency in your `pubspec.yaml` file:
58 |
59 | ```yaml
60 | dependencies:
61 | flutter:
62 | sdk: flutter
63 | tensorflow_lite_flutter: ^3.0.0
64 | ```
65 |
66 | Then run:
67 |
68 | ```bash
69 | flutter pub get
70 | ```
71 |
72 | ## Setup
73 |
74 | ### Android Configuration
75 |
76 | 1. In `android/app/build.gradle`, add the following setting in the `android` block to ensure TensorFlow Lite model files aren't compressed:
77 |
78 | ```gradle
79 | aaptOptions {
80 | noCompress 'tflite'
81 | noCompress 'lite'
82 | }
83 | ```
84 |
85 | 2. If you're using models larger than 100MB, you may need to enable split APKs by adding the following to your `android/app/build.gradle` file:
86 |
87 | ```gradle
88 | android {
89 | // Other settings...
90 | defaultConfig {
91 | // Other settings...
92 | ndk {
93 | abiFilters 'armeabi-v7a', 'arm64-v8a'
94 | }
95 | }
96 |
97 | splits {
98 | abi {
99 | enable true
100 | reset()
101 | include 'armeabi-v7a', 'arm64-v8a'
102 | universalApk false
103 | }
104 | }
105 | }
106 | ```
107 |
108 | ### iOS Configuration
109 |
110 | Solutions to common build errors on iOS:
111 |
112 | 1. **'vector' file not found**
113 |
114 | Open `ios/Runner.xcworkspace` in Xcode, click Runner > Targets > Runner > Build Settings, search for `Compile Sources As`, and change the value to `Objective-C++`
115 |
116 | 2. **'tensorflow/lite/kernels/register.h' file not found**
117 |
118 | The plugin assumes the TensorFlow header files are located in path "tensorflow/lite/kernels".
119 |
120 | For earlier versions of TensorFlow, the header path may be "tensorflow/contrib/lite/kernels".
121 |
122 | Use `CONTRIB_PATH` to toggle the path. Uncomment `//#define CONTRIB_PATH` in the iOS implementation if needed.
123 |
124 | 3. **Memory usage issues**
125 |
126 | For large models, you may need to increase the memory available to your app. Add the following to your `ios/Runner/Info.plist`:
127 |
128 | ```xml
129 | NSAppTransportSecurity
130 |
131 | NSAllowsArbitraryLoads
132 |
133 |
134 | ```
135 |
136 | ## Usage
137 |
138 | ### Getting Started
139 |
140 | 1. Create an `assets` folder and place your model and label files in it. Add them to your `pubspec.yaml`:
141 |
142 | ```yaml
143 | assets:
144 | - assets/labels.txt
145 | - assets/mobilenet_v1_1.0_224.tflite
146 | ```
147 |
148 | 2. Import the library in your Dart code:
149 |
150 | ```dart
151 | import 'package:tensorflow_lite_flutter/tensorflow_lite_flutter.dart';
152 | ```
153 |
154 | ### Loading Models
155 |
156 | Before using any TensorFlow Lite model, you need to load it into memory:
157 |
158 | ```dart
159 | Future loadModel() async {
160 | try {
161 | String? result = await Tflite.loadModel(
162 | model: "assets/mobilenet_v1_1.0_224.tflite",
163 | labels: "assets/labels.txt",
164 | numThreads: 2, // Number of threads to use (default: 1)
165 | isAsset: true, // Is the model file an asset or a file? (default: true)
166 | useGpuDelegate: false // Use GPU acceleration? (default: false)
167 | );
168 | print('Model loaded successfully: $result');
169 | } catch (e) {
170 | print('Failed to load model: $e');
171 | }
172 | }
173 | ```
174 |
175 | ### Releasing Resources
176 |
177 | When you're done using the model, release the resources to free up memory:
178 |
179 | ```dart
180 | Future disposeModel() async {
181 | await Tflite.close();
182 | print('Model resources released');
183 | }
184 | ```
185 |
186 | ### GPU Acceleration
187 |
188 | To use GPU acceleration for faster inference:
189 |
190 | 1. Set `useGpuDelegate: true` when loading the model
191 | 2. For optimal performance in release mode, follow the [TensorFlow Lite GPU delegate optimization guide](https://www.tensorflow.org/lite/performance/gpu#step_5_release_mode)
192 |
193 | ```dart
194 | // Example with GPU acceleration enabled
195 | await Tflite.loadModel(
196 | model: "assets/model.tflite",
197 | labels: "assets/labels.txt",
198 | useGpuDelegate: true // Enable GPU acceleration
199 | );
200 | ```
201 |
202 | > **Note**: GPU acceleration works best for floating-point models and may not improve performance for quantized models.
203 |
204 | ### Image Classification
205 |
206 | #### Overview
207 |
208 | Image classification identifies what's in an image from a predefined set of categories. This plugin supports various image classification models like MobileNet, EfficientNet, and custom TensorFlow Lite models.
209 |
210 | #### Output Format
211 |
212 | The model returns a list of classifications with their confidence scores:
213 |
214 | ```json
215 | [
216 | {
217 | "index": 0,
218 | "label": "person",
219 | "confidence": 0.629
220 | },
221 | {
222 | "index": 1,
223 | "label": "dog",
224 | "confidence": 0.324
225 | }
226 | ]
227 | ```
228 |
229 | #### Classifying Images
230 |
231 | **From a file path:**
232 |
233 | ```dart
234 | Future classifyImage(String imagePath) async {
235 | try {
236 | // Run inference
237 | List? recognitions = await Tflite.runModelOnImage(
238 | path: imagePath, // Required: Path to the image file
239 | imageMean: 127.5, // Default: 117.0 (depends on your model)
240 | imageStd: 127.5, // Default: 1.0 (depends on your model)
241 | numResults: 5, // Default: 5 (maximum number of results)
242 | threshold: 0.2, // Default: 0.1 (minimum confidence threshold)
243 | asynch: true // Default: true (run in background)
244 | );
245 |
246 | // Process results
247 | if (recognitions != null) {
248 | for (var result in recognitions) {
249 | print('${result["label"]} - ${(result["confidence"] * 100).toStringAsFixed(2)}%');
250 | }
251 | }
252 | } catch (e) {
253 | print('Error classifying image: $e');
254 | }
255 | }
256 | ```
257 |
258 | **From binary data (useful for camera frames):**
259 |
260 | ```dart
261 | Future classifyImageBinary(Uint8List imageBytes, int inputSize) async {
262 | try {
263 | // Process image data to match model input requirements
264 | Uint8List processedData = imageToByteListFloat32(imageBytes, inputSize, 127.5, 127.5);
265 |
266 | // Run inference
267 | List? recognitions = await Tflite.runModelOnBinary(
268 | binary: processedData, // Required: Processed image data
269 | numResults: 5, // Default: 5
270 | threshold: 0.1, // Default: 0.1
271 | asynch: true // Default: true
272 | );
273 |
274 | // Process results
275 | if (recognitions != null) {
276 | for (var result in recognitions) {
277 | print('${result["label"]} - ${(result["confidence"] * 100).toStringAsFixed(2)}%');
278 | }
279 | }
280 | } catch (e) {
281 | print('Error classifying binary image: $e');
282 | }
283 | }
284 |
285 | // Helper function to prepare image data
286 | Uint8List imageToByteListFloat32(Uint8List imageBytes, int inputSize, double mean, double std) {
287 | var convertedBytes = Float32List(1 * inputSize * inputSize * 3);
288 | var buffer = Float32List.view(convertedBytes.buffer);
289 | int pixelIndex = 0;
290 |
291 | // Process image data to match model input format
292 | // ... (implementation depends on your image processing needs)
293 |
294 | return convertedBytes.buffer.asUint8List();
295 | }
296 |
297 | Uint8List imageToByteListFloat32(
298 | img.Image image, int inputSize, double mean, double std) {
299 | var convertedBytes = Float32List(1 * inputSize * inputSize * 3);
300 | var buffer = Float32List.view(convertedBytes.buffer);
301 | int pixelIndex = 0;
302 | for (var i = 0; i < inputSize; i++) {
303 | for (var j = 0; j < inputSize; j++) {
304 | var pixel = image.getPixel(j, i);
305 | buffer[pixelIndex++] = (img.getRed(pixel) - mean) / std;
306 | buffer[pixelIndex++] = (img.getGreen(pixel) - mean) / std;
307 | buffer[pixelIndex++] = (img.getBlue(pixel) - mean) / std;
308 | }
309 | }
310 | return convertedBytes.buffer.asUint8List();
311 | }
312 |
313 | Uint8List imageToByteListUint8(img.Image image, int inputSize) {
314 | var convertedBytes = Uint8List(1 * inputSize * inputSize * 3);
315 | var buffer = Uint8List.view(convertedBytes.buffer);
316 | int pixelIndex = 0;
317 | for (var i = 0; i < inputSize; i++) {
318 | for (var j = 0; j < inputSize; j++) {
319 | var pixel = image.getPixel(j, i);
320 | buffer[pixelIndex++] = img.getRed(pixel);
321 | buffer[pixelIndex++] = img.getGreen(pixel);
322 | buffer[pixelIndex++] = img.getBlue(pixel);
323 | }
324 | }
325 | return convertedBytes.buffer.asUint8List();
326 | }
327 | ```
328 |
329 | - Run on image stream (video frame):
330 |
331 | > Works with [camera plugin 4.0.0](https://pub.dartlang.org/packages/camera). Video format: (iOS) kCVPixelFormatType_32BGRA, (Android) YUV_420_888.
332 |
333 | ```dart
334 | var recognitions = await Tflite.runModelOnFrame(
335 | bytesList: img.planes.map((plane) {return plane.bytes;}).toList(),// required
336 | imageHeight: img.height,
337 | imageWidth: img.width,
338 | imageMean: 127.5, // defaults to 127.5
339 | imageStd: 127.5, // defaults to 127.5
340 | rotation: 90, // defaults to 90, Android only
341 | numResults: 2, // defaults to 5
342 | threshold: 0.1, // defaults to 0.1
343 | asynch: true // defaults to true
344 | );
345 | ```
346 |
347 | ### Object Detection
348 |
349 | #### Overview
350 |
351 | Object detection identifies and locates objects within an image. This plugin supports two popular object detection architectures:
352 |
353 | 1. **SSD MobileNet** - Fast and efficient for mobile devices
354 | 2. **YOLO** (You Only Look Once) - Higher accuracy but more computationally intensive
355 |
356 | #### SSD MobileNet
357 |
358 | **Output Format:**
359 |
360 | ```json
361 | [
362 | {
363 | "detectedClass": "hot dog",
364 | "confidenceInClass": 0.923,
365 | "rect": {
366 | "x": 0.15, // Normalized coordinates (0-1)
367 | "y": 0.33, // Normalized coordinates (0-1)
368 | "w": 0.80, // Width as percentage of image width
369 | "h": 0.27 // Height as percentage of image height
370 | }
371 | },
372 | {
373 | "detectedClass": "person",
374 | "confidenceInClass": 0.845,
375 | "rect": {
376 | "x": 0.52,
377 | "y": 0.18,
378 | "w": 0.35,
379 | "h": 0.75
380 | }
381 | }
382 | ]
383 | ```
384 |
385 | **Detecting Objects from an Image File:**
386 |
387 | ```dart
388 | Future detectObjectsOnImage(String imagePath) async {
389 | try {
390 | // Run inference
391 | List? detections = await Tflite.detectObjectOnImage(
392 | path: imagePath, // Required: Path to the image file
393 | model: "SSDMobileNet", // Default: "SSDMobileNet"
394 | imageMean: 127.5, // Default: 127.5
395 | imageStd: 127.5, // Default: 127.5
396 | threshold: 0.4, // Default: 0.1 (confidence threshold)
397 | numResultsPerClass: 2, // Default: 5 (max detections per class)
398 | asynch: true // Default: true (run in background)
399 | );
400 |
401 | // Process results
402 | if (detections != null) {
403 | for (var detection in detections) {
404 | final rect = detection["rect"];
405 | print('${detection["detectedClass"]} - ${(detection["confidenceInClass"] * 100).toStringAsFixed(2)}%');
406 | print('Location: x=${rect["x"]}, y=${rect["y"]}, w=${rect["w"]}, h=${rect["h"]}');
407 | }
408 | }
409 | } catch (e) {
410 | print('Error detecting objects: $e');
411 | }
412 | }
413 | ```
414 |
415 | **Detecting Objects from Binary Data:**
416 |
417 | ```dart
418 | Future detectObjectsOnBinary(Uint8List imageBytes) async {
419 | try {
420 | List? detections = await Tflite.detectObjectOnBinary(
421 | binary: imageBytes, // Required: Binary image data
422 | model: "SSDMobileNet", // Default: "SSDMobileNet"
423 | threshold: 0.4, // Default: 0.1
424 | numResultsPerClass: 2, // Default: 5
425 | asynch: true // Default: true
426 | );
427 |
428 | // Process results
429 | if (detections != null) {
430 | for (var detection in detections) {
431 | print('${detection["detectedClass"]} - ${(detection["confidenceInClass"] * 100).toStringAsFixed(2)}%');
432 | }
433 | }
434 | } catch (e) {
435 | print('Error detecting objects from binary: $e');
436 | }
437 | }
438 | ```
439 |
440 | **Detecting Objects from Camera Frames:**
441 |
442 | > Works with [camera plugin](https://pub.dev/packages/camera). Video format: (iOS) kCVPixelFormatType_32BGRA, (Android) YUV_420_888.
443 |
444 | ```dart
445 | Future detectObjectsOnFrame(CameraImage cameraImage) async {
446 | try {
447 | List? detections = await Tflite.detectObjectOnFrame(
448 | bytesList: cameraImage.planes.map((plane) => plane.bytes).toList(), // Required
449 | model: "SSDMobileNet", // Default: "SSDMobileNet"
450 | imageHeight: cameraImage.height,
451 | imageWidth: cameraImage.width,
452 | imageMean: 127.5, // Default: 127.5
453 | imageStd: 127.5, // Default: 127.5
454 | rotation: 90, // Default: 90, Android only
455 | numResults: 5, // Default: 5
456 | threshold: 0.4, // Default: 0.1
457 | asynch: true // Default: true
458 | );
459 |
460 | // Process results
461 | if (detections != null) {
462 | for (var detection in detections) {
463 | print('${detection["detectedClass"]} - ${(detection["confidenceInClass"] * 100).toStringAsFixed(2)}%');
464 | }
465 | }
466 | } catch (e) {
467 | print('Error detecting objects on frame: $e');
468 | }
469 | }
470 | ```
471 |
472 | #### YOLO (You Only Look Once)
473 |
474 | YOLO is another popular object detection model that's more accurate but slightly more computationally intensive than SSD MobileNet.
475 |
476 | **Using YOLO for Object Detection:**
477 |
478 | ```dart
479 | Future detectObjectsWithYOLO(String imagePath) async {
480 | // YOLO-specific anchors (can be customized based on your model)
481 | final List anchors = [
482 | 0.57273, 0.677385, 1.87446, 2.06253, 3.33843,
483 | 5.47434, 7.88282, 3.52778, 9.77052, 9.16828
484 | ];
485 |
486 | try {
487 | List? detections = await Tflite.detectObjectOnImage(
488 | path: imagePath, // Required: Path to the image file
489 | model: "YOLO", // Use YOLO model
490 | imageMean: 0.0, // Default: 127.5 (but YOLO typically uses 0.0)
491 | imageStd: 255.0, // Default: 127.5 (but YOLO typically uses 255.0)
492 | threshold: 0.3, // Default: 0.1
493 | numResultsPerClass: 2, // Default: 5
494 | anchors: anchors, // YOLO-specific parameter
495 | blockSize: 32, // Default: 32
496 | numBoxesPerBlock: 5, // Default: 5
497 | asynch: true // Default: true
498 | );
499 |
500 | // Process results (same format as SSD MobileNet)
501 | if (detections != null) {
502 | for (var detection in detections) {
503 | final rect = detection["rect"];
504 | print('${detection["detectedClass"]} - ${(detection["confidenceInClass"] * 100).toStringAsFixed(2)}%');
505 | print('Location: x=${rect["x"]}, y=${rect["y"]}, w=${rect["w"]}, h=${rect["h"]}');
506 | }
507 | }
508 | } catch (e) {
509 | print('Error detecting objects with YOLO: $e');
510 | }
511 | }
512 | ```
513 |
514 | - Run on binary:
515 |
516 | ```dart
517 | var recognitions = await Tflite.detectObjectOnBinary(
518 | binary: imageToByteListFloat32(resizedImage, 416, 0.0, 255.0), // required
519 | model: "YOLO",
520 | threshold: 0.3, // defaults to 0.1
521 | numResultsPerClass: 2,// defaults to 5
522 | anchors: anchors, // defaults to [0.57273,0.677385,1.87446,2.06253,3.33843,5.47434,7.88282,3.52778,9.77052,9.16828]
523 | blockSize: 32, // defaults to 32
524 | numBoxesPerBlock: 5, // defaults to 5
525 | asynch: true // defaults to true
526 | );
527 | ```
528 |
529 | - Run on image stream (video frame):
530 |
531 | > Works with [camera plugin 4.0.0](https://pub.dartlang.org/packages/camera). Video format: (iOS) kCVPixelFormatType_32BGRA, (Android) YUV_420_888.
532 |
533 | ```dart
534 | var recognitions = await Tflite.detectObjectOnFrame(
535 | bytesList: img.planes.map((plane) {return plane.bytes;}).toList(),// required
536 | model: "YOLO",
537 | imageHeight: img.height,
538 | imageWidth: img.width,
539 | imageMean: 0, // defaults to 127.5
540 | imageStd: 255.0, // defaults to 127.5
541 | numResults: 2, // defaults to 5
542 | threshold: 0.1, // defaults to 0.1
543 | numResultsPerClass: 2,// defaults to 5
544 | anchors: anchors, // defaults to [0.57273,0.677385,1.87446,2.06253,3.33843,5.47434,7.88282,3.52778,9.77052,9.16828]
545 | blockSize: 32, // defaults to 32
546 | numBoxesPerBlock: 5, // defaults to 5
547 | asynch: true // defaults to true
548 | );
549 | ```
550 |
551 | ### Pix2Pix
552 |
553 | > Thanks to [RP](https://github.com/shaqian/flutter_tflite/pull/18) from [Green Appers](https://github.com/GreenAppers)
554 |
555 | - Output format:
556 |
557 | The output of Pix2Pix inference is Uint8List type. Depending on the `outputType` used, the output is:
558 |
559 | - (if outputType is png) byte array of a png image
560 |
561 | - (otherwise) byte array of the raw output
562 |
563 | - Run on image:
564 |
565 | ```dart
566 | var result = await runPix2PixOnImage(
567 | path: filepath, // required
568 | imageMean: 0.0, // defaults to 0.0
569 | imageStd: 255.0, // defaults to 255.0
570 | asynch: true // defaults to true
571 | );
572 | ```
573 |
574 | - Run on binary:
575 |
576 | ```dart
577 | var result = await runPix2PixOnBinary(
578 | binary: binary, // required
579 | asynch: true // defaults to true
580 | );
581 | ```
582 |
583 | - Run on image stream (video frame):
584 |
585 | ```dart
586 | var result = await runPix2PixOnFrame(
587 | bytesList: img.planes.map((plane) {return plane.bytes;}).toList(),// required
588 | imageHeight: img.height, // defaults to 1280
589 | imageWidth: img.width, // defaults to 720
590 | imageMean: 127.5, // defaults to 0.0
591 | imageStd: 127.5, // defaults to 255.0
592 | rotation: 90, // defaults to 90, Android only
593 | asynch: true // defaults to true
594 | );
595 | ```
596 |
597 | ### Deeplab
598 |
599 | > Thanks to [RP](https://github.com/shaqian/flutter_tflite/pull/22) from [see--](https://github.com/see--) for Android implementation.
600 |
601 | - Output format:
602 |
603 | The output of Deeplab inference is Uint8List type. Depending on the `outputType` used, the output is:
604 |
605 | - (if outputType is png) byte array of a png image
606 |
607 | - (otherwise) byte array of r, g, b, a values of the pixels
608 |
609 | - Run on image:
610 |
611 | ```dart
612 | var result = await runSegmentationOnImage(
613 | path: filepath, // required
614 | imageMean: 0.0, // defaults to 0.0
615 | imageStd: 255.0, // defaults to 255.0
616 | labelColors: [...], // defaults to https://github.com/shaqian/flutter_tflite/blob/master/lib/tflite.dart#L219
617 | outputType: "png", // defaults to "png"
618 | asynch: true // defaults to true
619 | );
620 | ```
621 |
622 | - Run on binary:
623 |
624 | ```dart
625 | var result = await runSegmentationOnBinary(
626 | binary: binary, // required
627 | labelColors: [...], // defaults to https://github.com/shaqian/flutter_tflite/blob/master/lib/tflite.dart#L219
628 | outputType: "png", // defaults to "png"
629 | asynch: true // defaults to true
630 | );
631 | ```
632 |
633 | - Run on image stream (video frame):
634 |
635 | ```dart
636 | var result = await runSegmentationOnFrame(
637 | bytesList: img.planes.map((plane) {return plane.bytes;}).toList(),// required
638 | imageHeight: img.height, // defaults to 1280
639 | imageWidth: img.width, // defaults to 720
640 | imageMean: 127.5, // defaults to 0.0
641 | imageStd: 127.5, // defaults to 255.0
642 | rotation: 90, // defaults to 90, Android only
643 | labelColors: [...], // defaults to https://github.com/shaqian/flutter_tflite/blob/master/lib/tflite.dart#L219
644 | outputType: "png", // defaults to "png"
645 | asynch: true // defaults to true
646 | );
647 | ```
648 |
649 | ### PoseNet
650 |
651 | > Model is from [StackOverflow thread](https://stackoverflow.com/a/55288616).
652 |
653 | - Output format:
654 |
655 | `x, y` are between [0, 1]. You can scale `x` by the width and `y` by the height of the image.
656 |
657 | ```
658 | [ // array of poses/persons
659 | { // pose #1
660 | score: 0.6324902,
661 | keypoints: {
662 | 0: {
663 | x: 0.250,
664 | y: 0.125,
665 | part: nose,
666 | score: 0.9971070
667 | },
668 | 1: {
669 | x: 0.230,
670 | y: 0.105,
671 | part: leftEye,
672 | score: 0.9978438
673 | }
674 | ......
675 | }
676 | },
677 | { // pose #2
678 | score: 0.32534285,
679 | keypoints: {
680 | 0: {
681 | x: 0.402,
682 | y: 0.538,
683 | part: nose,
684 | score: 0.8798978
685 | },
686 | 1: {
687 | x: 0.380,
688 | y: 0.513,
689 | part: leftEye,
690 | score: 0.7090239
691 | }
692 | ......
693 | }
694 | },
695 | ......
696 | ]
697 | ```
698 |
699 | - Run on image:
700 |
701 | ```dart
702 | var result = await runPoseNetOnImage(
703 | path: filepath, // required
704 | imageMean: 125.0, // defaults to 125.0
705 | imageStd: 125.0, // defaults to 125.0
706 | numResults: 2, // defaults to 5
707 | threshold: 0.7, // defaults to 0.5
708 | nmsRadius: 10, // defaults to 20
709 | asynch: true // defaults to true
710 | );
711 | ```
712 |
713 | - Run on binary:
714 |
715 | ```dart
716 | var result = await runPoseNetOnBinary(
717 | binary: binary, // required
718 | numResults: 2, // defaults to 5
719 | threshold: 0.7, // defaults to 0.5
720 | nmsRadius: 10, // defaults to 20
721 | asynch: true // defaults to true
722 | );
723 | ```
724 |
725 | - Run on image stream (video frame):
726 |
727 | ```dart
728 | var result = await runPoseNetOnFrame(
729 | bytesList: img.planes.map((plane) {return plane.bytes;}).toList(),// required
730 | imageHeight: img.height, // defaults to 1280
731 | imageWidth: img.width, // defaults to 720
732 | imageMean: 125.0, // defaults to 125.0
733 | imageStd: 125.0, // defaults to 125.0
734 | rotation: 90, // defaults to 90, Android only
735 | numResults: 2, // defaults to 5
736 | threshold: 0.7, // defaults to 0.5
737 | nmsRadius: 10, // defaults to 20
738 | asynch: true // defaults to true
739 | );
740 | ```
741 |
742 | ## Example
743 |
744 | ### Prediction in Static Images
745 |
746 | Refer to the [example](https://github.com/draz26648/tensorflow_lite_flutter/tree/master/example).
747 |
748 | ## Run test cases
749 |
750 | `flutter test test/tflite_test.dart`
--------------------------------------------------------------------------------
/android/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/workspace.xml
5 | /.idea/libraries
6 | .DS_Store
7 | /build
8 | /captures
9 |
--------------------------------------------------------------------------------
/android/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | tflite
4 | Project tflite created by Buildship.
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 | org.eclipse.buildship.core.gradleprojectbuilder
15 |
16 |
17 |
18 |
19 |
20 | org.eclipse.jdt.core.javanature
21 | org.eclipse.buildship.core.gradleprojectnature
22 |
23 |
24 |
25 | 1703491490494
26 |
27 | 30
28 |
29 | org.eclipse.core.resources.regexFilterMatcher
30 | node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/android/.settings/org.eclipse.buildship.core.prefs:
--------------------------------------------------------------------------------
1 | arguments=--init-script C\:\\Users\\Draz\\AppData\\Roaming\\Code\\User\\globalStorage\\redhat.java\\1.25.1\\config_win\\org.eclipse.osgi\\55\\0\\.cp\\gradle\\init\\init.gradle --init-script C\:\\Users\\Draz\\AppData\\Roaming\\Code\\User\\globalStorage\\redhat.java\\1.25.1\\config_win\\org.eclipse.osgi\\55\\0\\.cp\\gradle\\protobuf\\init.gradle
2 | auto.sync=false
3 | build.scans.enabled=false
4 | connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(8.1.1))
5 | connection.project.dir=
6 | eclipse.preferences.version=1
7 | gradle.user.home=
8 | java.home=C\:/Users/Draz/.vscode/extensions/redhat.java-1.25.1-win32-x64/jre/17.0.9-win32-x86_64
9 | jvm.arguments=
10 | offline.mode=false
11 | override.workspace.settings=true
12 | show.console.view=true
13 | show.executions.view=true
14 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | group 'sq.flutter.tflite'
2 | version '1.0-SNAPSHOT'
3 |
4 | buildscript {
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 |
10 | dependencies {
11 | classpath 'com.android.tools.build:gradle:3.6.3'
12 | }
13 | }
14 |
15 | rootProject.allprojects {
16 | repositories {
17 | google()
18 | jcenter()
19 | }
20 | }
21 |
22 | apply plugin: 'com.android.library'
23 |
24 | android {
25 | compileSdkVersion 31
26 |
27 | defaultConfig {
28 | minSdkVersion 23
29 | testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner'
30 | }
31 | lintOptions {
32 | disable 'InvalidPackage'
33 | }
34 |
35 | dependencies {
36 | implementation 'org.tensorflow:tensorflow-lite:+'
37 | implementation 'org.tensorflow:tensorflow-lite-gpu:+'
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/android/gradle.properties:
--------------------------------------------------------------------------------
1 | org.gradle.jvmargs=-Xmx1536M
2 | android.useAndroidX=true
3 | android.enableJetifier=true
4 | android.enableR8=true
--------------------------------------------------------------------------------
/android/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'tflite'
2 |
--------------------------------------------------------------------------------
/android/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/example/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .dart_tool/
3 |
4 | .packages
5 | .pub/
6 |
7 | build/
8 |
9 | .flutter-plugins
10 | .flutter-plugins-dependencies
11 |
12 | flutter_export_environment.sh
13 | Flutter.podspec
14 |
15 | # IntelliJ
16 | *.iml
17 | .idea/workspace.xml
18 | .idea/tasks.xml
19 | .idea/gradle.xml
20 | .idea/assetWizardSettings.xml
21 | .idea/dictionaries
22 | .idea/libraries
23 | .idea/caches
--------------------------------------------------------------------------------
/example/.metadata:
--------------------------------------------------------------------------------
1 | # This file tracks properties of this Flutter project.
2 | # Used by Flutter tool to assess capabilities and perform upgrades etc.
3 | #
4 | # This file should be version controlled and should not be manually edited.
5 |
6 | version:
7 | revision: 3b309bda072a6b326e8aa4591a5836af600923ce
8 | channel: beta
9 |
--------------------------------------------------------------------------------
/example/README.md:
--------------------------------------------------------------------------------
1 | # tflite_example
2 |
3 | Use tflite plugin to run model on images. The image is captured by camera or selected from gallery (with the help of [image_picker](https://pub.dartlang.org/packages/image_picker) plugin).
4 |
5 | 
6 |
7 | ## Prerequisites
8 |
9 | Create a `assets` folder. From https://github.com/shaqian/flutter_tflite/tree/master/example/assets
10 | dowload the following files and place them in `assets` folder.
11 | - mobilenet_v1_1.0_224.tflite
12 | - mobilenet_v1_1.0_224.txt
13 | - ssd_mobilenet.tflite
14 | - ssd_mobilenet.txt
15 | - yolov2_tiny.tflite
16 | - yolov2_tiny.txt
17 | - deeplabv3_257_mv_gpu.tflite
18 | - deeplabv3_257_mv_gpu.txt
19 | - posenet_mv1_075_float_from_checkpoints.tflite
20 |
21 | ## Install
22 |
23 | ```
24 | flutter packages get
25 | ```
26 |
27 | ## Run
28 |
29 | ```
30 | flutter run
31 | ```
32 |
33 | ## Caveat
34 |
35 | ```recognizeImageBinary(image)``` (sample code for ```runModelOnBinary```) is slow on iOS when decoding image due to a [known issue](https://github.com/brendan-duncan/image/issues/55) with image package.
36 |
--------------------------------------------------------------------------------
/example/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | *.class
3 | .gradle
4 | /local.properties
5 | /.idea/workspace.xml
6 | /.idea/libraries
7 | .DS_Store
8 | /build
9 | /captures
10 | GeneratedPluginRegistrant.java
11 |
--------------------------------------------------------------------------------
/example/android/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | android
4 | Project android created by Buildship.
5 |
6 |
7 |
8 |
9 | org.eclipse.buildship.core.gradleprojectbuilder
10 |
11 |
12 |
13 |
14 |
15 | org.eclipse.buildship.core.gradleprojectnature
16 |
17 |
18 |
--------------------------------------------------------------------------------
/example/android/.settings/org.eclipse.buildship.core.prefs:
--------------------------------------------------------------------------------
1 | connection.project.dir=
2 | eclipse.preferences.version=1
3 |
--------------------------------------------------------------------------------
/example/android/app/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/example/android/app/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | app
4 | Project app created by Buildship.
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 | org.eclipse.buildship.core.gradleprojectbuilder
15 |
16 |
17 |
18 |
19 |
20 | org.eclipse.jdt.core.javanature
21 | org.eclipse.buildship.core.gradleprojectnature
22 |
23 |
24 |
25 | 1703492759744
26 |
27 | 30
28 |
29 | org.eclipse.core.resources.regexFilterMatcher
30 | node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/example/android/app/.settings/org.eclipse.buildship.core.prefs:
--------------------------------------------------------------------------------
1 | connection.project.dir=..
2 | eclipse.preferences.version=1
3 |
--------------------------------------------------------------------------------
/example/android/app/build.gradle:
--------------------------------------------------------------------------------
1 | def localProperties = new Properties()
2 | def localPropertiesFile = rootProject.file('local.properties')
3 | if (localPropertiesFile.exists()) {
4 | localPropertiesFile.withReader('UTF-8') { reader ->
5 | localProperties.load(reader)
6 | }
7 | }
8 |
9 | def flutterRoot = localProperties.getProperty('flutter.sdk')
10 | if (flutterRoot == null) {
11 | throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
12 | }
13 |
14 | def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
15 | if (flutterVersionCode == null) {
16 | flutterVersionCode = '1'
17 | }
18 |
19 | def flutterVersionName = localProperties.getProperty('flutter.versionName')
20 | if (flutterVersionName == null) {
21 | flutterVersionName = '1.0'
22 | }
23 |
24 | apply plugin: 'com.android.application'
25 | apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
26 |
27 | android {
28 | compileSdkVersion 28
29 |
30 | lintOptions {
31 | disable 'InvalidPackage'
32 | }
33 |
34 | aaptOptions {
35 | noCompress 'tflite'
36 | }
37 |
38 | defaultConfig {
39 | // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
40 | applicationId "sq.flutter.tfliteexample"
41 | minSdkVersion 19
42 | targetSdkVersion 28
43 | versionCode flutterVersionCode.toInteger()
44 | versionName flutterVersionName
45 | testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner'
46 | }
47 |
48 | buildTypes {
49 | release {
50 | // TODO: Add your own signing config for the release build.
51 | // Signing with the debug keys for now, so `flutter run --release` works.
52 | signingConfig signingConfigs.debug
53 | }
54 | }
55 | }
56 |
57 | flutter {
58 | source '../..'
59 | }
60 |
61 | dependencies {
62 | testImplementation 'junit:junit:4.12'
63 | androidTestImplementation 'androidx.test.ext:junit:1.1.1'
64 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.0'
65 | }
66 |
--------------------------------------------------------------------------------
/example/android/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
8 |
9 |
10 |
15 |
19 |
26 |
30 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/example/android/app/src/main/java/sq/flutter/tfliteexample/MainActivity.java:
--------------------------------------------------------------------------------
1 | package sq.flutter.tfliteexample;
2 |
3 | import android.os.Bundle;
4 | import io.flutter.app.FlutterActivity;
5 | import io.flutter.plugins.GeneratedPluginRegistrant;
6 |
7 | public class MainActivity extends FlutterActivity {
8 | @Override
9 | protected void onCreate(Bundle savedInstanceState) {
10 | super.onCreate(savedInstanceState);
11 | GeneratedPluginRegistrant.registerWith(this);
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/example/android/app/src/main/res/drawable/launch_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
12 |
13 |
--------------------------------------------------------------------------------
/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/example/android/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
9 |
--------------------------------------------------------------------------------
/example/android/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | repositories {
3 | google()
4 | jcenter()
5 | }
6 |
7 | dependencies {
8 | classpath 'com.android.tools.build:gradle:3.6.1'
9 | }
10 | }
11 |
12 | allprojects {
13 | repositories {
14 | google()
15 | jcenter()
16 | }
17 | }
18 |
19 | rootProject.buildDir = '../build'
20 | subprojects {
21 | project.buildDir = "${rootProject.buildDir}/${project.name}"
22 | }
23 | subprojects {
24 | project.evaluationDependsOn(':app')
25 | }
26 |
27 | task clean(type: Delete) {
28 | delete rootProject.buildDir
29 | }
30 |
--------------------------------------------------------------------------------
/example/android/gradle.properties:
--------------------------------------------------------------------------------
1 | org.gradle.jvmargs=-Xmx1536M
2 | target-platform=android-arm64
3 | android.useAndroidX=true
4 | android.enableJetifier=true
5 |
--------------------------------------------------------------------------------
/example/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/example/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sat Mar 28 00:33:22 ICT 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
7 |
--------------------------------------------------------------------------------
/example/android/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/example/android/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/example/android/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
3 | def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
4 |
5 | def plugins = new Properties()
6 | def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
7 | if (pluginsFile.exists()) {
8 | pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) }
9 | }
10 |
11 | plugins.each { name, path ->
12 | def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
13 | include ":$name"
14 | project(":$name").projectDir = pluginDirectory
15 | }
16 |
--------------------------------------------------------------------------------
/example/android/settings_aar.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/example/assets/deeplabv3_257_mv_gpu.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/assets/deeplabv3_257_mv_gpu.tflite
--------------------------------------------------------------------------------
/example/assets/deeplabv3_257_mv_gpu.txt:
--------------------------------------------------------------------------------
1 | background
2 | aeroplane
3 | biyclce
4 | bird
5 | boat
6 | bottle
7 | bus
8 | car
9 | cat
10 | chair
11 | cow
12 | diningtable
13 | dog
14 | horse
15 | motorbike
16 | person
17 | potted plant
18 | sheep
19 | sofa
20 | train
21 | tv-monitor
22 |
--------------------------------------------------------------------------------
/example/assets/mobilenet_v1_1.0_224.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/assets/mobilenet_v1_1.0_224.tflite
--------------------------------------------------------------------------------
/example/assets/mobilenet_v1_1.0_224.txt:
--------------------------------------------------------------------------------
1 | background
2 | tench
3 | goldfish
4 | great white shark
5 | tiger shark
6 | hammerhead
7 | electric ray
8 | stingray
9 | cock
10 | hen
11 | ostrich
12 | brambling
13 | goldfinch
14 | house finch
15 | junco
16 | indigo bunting
17 | robin
18 | bulbul
19 | jay
20 | magpie
21 | chickadee
22 | water ouzel
23 | kite
24 | bald eagle
25 | vulture
26 | great grey owl
27 | European fire salamander
28 | common newt
29 | eft
30 | spotted salamander
31 | axolotl
32 | bullfrog
33 | tree frog
34 | tailed frog
35 | loggerhead
36 | leatherback turtle
37 | mud turtle
38 | terrapin
39 | box turtle
40 | banded gecko
41 | common iguana
42 | American chameleon
43 | whiptail
44 | agama
45 | frilled lizard
46 | alligator lizard
47 | Gila monster
48 | green lizard
49 | African chameleon
50 | Komodo dragon
51 | African crocodile
52 | American alligator
53 | triceratops
54 | thunder snake
55 | ringneck snake
56 | hognose snake
57 | green snake
58 | king snake
59 | garter snake
60 | water snake
61 | vine snake
62 | night snake
63 | boa constrictor
64 | rock python
65 | Indian cobra
66 | green mamba
67 | sea snake
68 | horned viper
69 | diamondback
70 | sidewinder
71 | trilobite
72 | harvestman
73 | scorpion
74 | black and gold garden spider
75 | barn spider
76 | garden spider
77 | black widow
78 | tarantula
79 | wolf spider
80 | tick
81 | centipede
82 | black grouse
83 | ptarmigan
84 | ruffed grouse
85 | prairie chicken
86 | peacock
87 | quail
88 | partridge
89 | African grey
90 | macaw
91 | sulphur-crested cockatoo
92 | lorikeet
93 | coucal
94 | bee eater
95 | hornbill
96 | hummingbird
97 | jacamar
98 | toucan
99 | drake
100 | red-breasted merganser
101 | goose
102 | black swan
103 | tusker
104 | echidna
105 | platypus
106 | wallaby
107 | koala
108 | wombat
109 | jellyfish
110 | sea anemone
111 | brain coral
112 | flatworm
113 | nematode
114 | conch
115 | snail
116 | slug
117 | sea slug
118 | chiton
119 | chambered nautilus
120 | Dungeness crab
121 | rock crab
122 | fiddler crab
123 | king crab
124 | American lobster
125 | spiny lobster
126 | crayfish
127 | hermit crab
128 | isopod
129 | white stork
130 | black stork
131 | spoonbill
132 | flamingo
133 | little blue heron
134 | American egret
135 | bittern
136 | crane
137 | limpkin
138 | European gallinule
139 | American coot
140 | bustard
141 | ruddy turnstone
142 | red-backed sandpiper
143 | redshank
144 | dowitcher
145 | oystercatcher
146 | pelican
147 | king penguin
148 | albatross
149 | grey whale
150 | killer whale
151 | dugong
152 | sea lion
153 | Chihuahua
154 | Japanese spaniel
155 | Maltese dog
156 | Pekinese
157 | Shih-Tzu
158 | Blenheim spaniel
159 | papillon
160 | toy terrier
161 | Rhodesian ridgeback
162 | Afghan hound
163 | basset
164 | beagle
165 | bloodhound
166 | bluetick
167 | black-and-tan coonhound
168 | Walker hound
169 | English foxhound
170 | redbone
171 | borzoi
172 | Irish wolfhound
173 | Italian greyhound
174 | whippet
175 | Ibizan hound
176 | Norwegian elkhound
177 | otterhound
178 | Saluki
179 | Scottish deerhound
180 | Weimaraner
181 | Staffordshire bullterrier
182 | American Staffordshire terrier
183 | Bedlington terrier
184 | Border terrier
185 | Kerry blue terrier
186 | Irish terrier
187 | Norfolk terrier
188 | Norwich terrier
189 | Yorkshire terrier
190 | wire-haired fox terrier
191 | Lakeland terrier
192 | Sealyham terrier
193 | Airedale
194 | cairn
195 | Australian terrier
196 | Dandie Dinmont
197 | Boston bull
198 | miniature schnauzer
199 | giant schnauzer
200 | standard schnauzer
201 | Scotch terrier
202 | Tibetan terrier
203 | silky terrier
204 | soft-coated wheaten terrier
205 | West Highland white terrier
206 | Lhasa
207 | flat-coated retriever
208 | curly-coated retriever
209 | golden retriever
210 | Labrador retriever
211 | Chesapeake Bay retriever
212 | German short-haired pointer
213 | vizsla
214 | English setter
215 | Irish setter
216 | Gordon setter
217 | Brittany spaniel
218 | clumber
219 | English springer
220 | Welsh springer spaniel
221 | cocker spaniel
222 | Sussex spaniel
223 | Irish water spaniel
224 | kuvasz
225 | schipperke
226 | groenendael
227 | malinois
228 | briard
229 | kelpie
230 | komondor
231 | Old English sheepdog
232 | Shetland sheepdog
233 | collie
234 | Border collie
235 | Bouvier des Flandres
236 | Rottweiler
237 | German shepherd
238 | Doberman
239 | miniature pinscher
240 | Greater Swiss Mountain dog
241 | Bernese mountain dog
242 | Appenzeller
243 | EntleBucher
244 | boxer
245 | bull mastiff
246 | Tibetan mastiff
247 | French bulldog
248 | Great Dane
249 | Saint Bernard
250 | Eskimo dog
251 | malamute
252 | Siberian husky
253 | dalmatian
254 | affenpinscher
255 | basenji
256 | pug
257 | Leonberg
258 | Newfoundland
259 | Great Pyrenees
260 | Samoyed
261 | Pomeranian
262 | chow
263 | keeshond
264 | Brabancon griffon
265 | Pembroke
266 | Cardigan
267 | toy poodle
268 | miniature poodle
269 | standard poodle
270 | Mexican hairless
271 | timber wolf
272 | white wolf
273 | red wolf
274 | coyote
275 | dingo
276 | dhole
277 | African hunting dog
278 | hyena
279 | red fox
280 | kit fox
281 | Arctic fox
282 | grey fox
283 | tabby
284 | tiger cat
285 | Persian cat
286 | Siamese cat
287 | Egyptian cat
288 | cougar
289 | lynx
290 | leopard
291 | snow leopard
292 | jaguar
293 | lion
294 | tiger
295 | cheetah
296 | brown bear
297 | American black bear
298 | ice bear
299 | sloth bear
300 | mongoose
301 | meerkat
302 | tiger beetle
303 | ladybug
304 | ground beetle
305 | long-horned beetle
306 | leaf beetle
307 | dung beetle
308 | rhinoceros beetle
309 | weevil
310 | fly
311 | bee
312 | ant
313 | grasshopper
314 | cricket
315 | walking stick
316 | cockroach
317 | mantis
318 | cicada
319 | leafhopper
320 | lacewing
321 | dragonfly
322 | damselfly
323 | admiral
324 | ringlet
325 | monarch
326 | cabbage butterfly
327 | sulphur butterfly
328 | lycaenid
329 | starfish
330 | sea urchin
331 | sea cucumber
332 | wood rabbit
333 | hare
334 | Angora
335 | hamster
336 | porcupine
337 | fox squirrel
338 | marmot
339 | beaver
340 | guinea pig
341 | sorrel
342 | zebra
343 | hog
344 | wild boar
345 | warthog
346 | hippopotamus
347 | ox
348 | water buffalo
349 | bison
350 | ram
351 | bighorn
352 | ibex
353 | hartebeest
354 | impala
355 | gazelle
356 | Arabian camel
357 | llama
358 | weasel
359 | mink
360 | polecat
361 | black-footed ferret
362 | otter
363 | skunk
364 | badger
365 | armadillo
366 | three-toed sloth
367 | orangutan
368 | gorilla
369 | chimpanzee
370 | gibbon
371 | siamang
372 | guenon
373 | patas
374 | baboon
375 | macaque
376 | langur
377 | colobus
378 | proboscis monkey
379 | marmoset
380 | capuchin
381 | howler monkey
382 | titi
383 | spider monkey
384 | squirrel monkey
385 | Madagascar cat
386 | indri
387 | Indian elephant
388 | African elephant
389 | lesser panda
390 | giant panda
391 | barracouta
392 | eel
393 | coho
394 | rock beauty
395 | anemone fish
396 | sturgeon
397 | gar
398 | lionfish
399 | puffer
400 | abacus
401 | abaya
402 | academic gown
403 | accordion
404 | acoustic guitar
405 | aircraft carrier
406 | airliner
407 | airship
408 | altar
409 | ambulance
410 | amphibian
411 | analog clock
412 | apiary
413 | apron
414 | ashcan
415 | assault rifle
416 | backpack
417 | bakery
418 | balance beam
419 | balloon
420 | ballpoint
421 | Band Aid
422 | banjo
423 | bannister
424 | barbell
425 | barber chair
426 | barbershop
427 | barn
428 | barometer
429 | barrel
430 | barrow
431 | baseball
432 | basketball
433 | bassinet
434 | bassoon
435 | bathing cap
436 | bath towel
437 | bathtub
438 | beach wagon
439 | beacon
440 | beaker
441 | bearskin
442 | beer bottle
443 | beer glass
444 | bell cote
445 | bib
446 | bicycle-built-for-two
447 | bikini
448 | binder
449 | binoculars
450 | birdhouse
451 | boathouse
452 | bobsled
453 | bolo tie
454 | bonnet
455 | bookcase
456 | bookshop
457 | bottlecap
458 | bow
459 | bow tie
460 | brass
461 | brassiere
462 | breakwater
463 | breastplate
464 | broom
465 | bucket
466 | buckle
467 | bulletproof vest
468 | bullet train
469 | butcher shop
470 | cab
471 | caldron
472 | candle
473 | cannon
474 | canoe
475 | can opener
476 | cardigan
477 | car mirror
478 | carousel
479 | carpenter's kit
480 | carton
481 | car wheel
482 | cash machine
483 | cassette
484 | cassette player
485 | castle
486 | catamaran
487 | CD player
488 | cello
489 | cellular telephone
490 | chain
491 | chainlink fence
492 | chain mail
493 | chain saw
494 | chest
495 | chiffonier
496 | chime
497 | china cabinet
498 | Christmas stocking
499 | church
500 | cinema
501 | cleaver
502 | cliff dwelling
503 | cloak
504 | clog
505 | cocktail shaker
506 | coffee mug
507 | coffeepot
508 | coil
509 | combination lock
510 | computer keyboard
511 | confectionery
512 | container ship
513 | convertible
514 | corkscrew
515 | cornet
516 | cowboy boot
517 | cowboy hat
518 | cradle
519 | crane
520 | crash helmet
521 | crate
522 | crib
523 | Crock Pot
524 | croquet ball
525 | crutch
526 | cuirass
527 | dam
528 | desk
529 | desktop computer
530 | dial telephone
531 | diaper
532 | digital clock
533 | digital watch
534 | dining table
535 | dishrag
536 | dishwasher
537 | disk brake
538 | dock
539 | dogsled
540 | dome
541 | doormat
542 | drilling platform
543 | drum
544 | drumstick
545 | dumbbell
546 | Dutch oven
547 | electric fan
548 | electric guitar
549 | electric locomotive
550 | entertainment center
551 | envelope
552 | espresso maker
553 | face powder
554 | feather boa
555 | file
556 | fireboat
557 | fire engine
558 | fire screen
559 | flagpole
560 | flute
561 | folding chair
562 | football helmet
563 | forklift
564 | fountain
565 | fountain pen
566 | four-poster
567 | freight car
568 | French horn
569 | frying pan
570 | fur coat
571 | garbage truck
572 | gasmask
573 | gas pump
574 | goblet
575 | go-kart
576 | golf ball
577 | golfcart
578 | gondola
579 | gong
580 | gown
581 | grand piano
582 | greenhouse
583 | grille
584 | grocery store
585 | guillotine
586 | hair slide
587 | hair spray
588 | half track
589 | hammer
590 | hamper
591 | hand blower
592 | hand-held computer
593 | handkerchief
594 | hard disc
595 | harmonica
596 | harp
597 | harvester
598 | hatchet
599 | holster
600 | home theater
601 | honeycomb
602 | hook
603 | hoopskirt
604 | horizontal bar
605 | horse cart
606 | hourglass
607 | iPod
608 | iron
609 | jack-o'-lantern
610 | jean
611 | jeep
612 | jersey
613 | jigsaw puzzle
614 | jinrikisha
615 | joystick
616 | kimono
617 | knee pad
618 | knot
619 | lab coat
620 | ladle
621 | lampshade
622 | laptop
623 | lawn mower
624 | lens cap
625 | letter opener
626 | library
627 | lifeboat
628 | lighter
629 | limousine
630 | liner
631 | lipstick
632 | Loafer
633 | lotion
634 | loudspeaker
635 | loupe
636 | lumbermill
637 | magnetic compass
638 | mailbag
639 | mailbox
640 | maillot
641 | maillot
642 | manhole cover
643 | maraca
644 | marimba
645 | mask
646 | matchstick
647 | maypole
648 | maze
649 | measuring cup
650 | medicine chest
651 | megalith
652 | microphone
653 | microwave
654 | military uniform
655 | milk can
656 | minibus
657 | miniskirt
658 | minivan
659 | missile
660 | mitten
661 | mixing bowl
662 | mobile home
663 | Model T
664 | modem
665 | monastery
666 | monitor
667 | moped
668 | mortar
669 | mortarboard
670 | mosque
671 | mosquito net
672 | motor scooter
673 | mountain bike
674 | mountain tent
675 | mouse
676 | mousetrap
677 | moving van
678 | muzzle
679 | nail
680 | neck brace
681 | necklace
682 | nipple
683 | notebook
684 | obelisk
685 | oboe
686 | ocarina
687 | odometer
688 | oil filter
689 | organ
690 | oscilloscope
691 | overskirt
692 | oxcart
693 | oxygen mask
694 | packet
695 | paddle
696 | paddlewheel
697 | padlock
698 | paintbrush
699 | pajama
700 | palace
701 | panpipe
702 | paper towel
703 | parachute
704 | parallel bars
705 | park bench
706 | parking meter
707 | passenger car
708 | patio
709 | pay-phone
710 | pedestal
711 | pencil box
712 | pencil sharpener
713 | perfume
714 | Petri dish
715 | photocopier
716 | pick
717 | pickelhaube
718 | picket fence
719 | pickup
720 | pier
721 | piggy bank
722 | pill bottle
723 | pillow
724 | ping-pong ball
725 | pinwheel
726 | pirate
727 | pitcher
728 | plane
729 | planetarium
730 | plastic bag
731 | plate rack
732 | plow
733 | plunger
734 | Polaroid camera
735 | pole
736 | police van
737 | poncho
738 | pool table
739 | pop bottle
740 | pot
741 | potter's wheel
742 | power drill
743 | prayer rug
744 | printer
745 | prison
746 | projectile
747 | projector
748 | puck
749 | punching bag
750 | purse
751 | quill
752 | quilt
753 | racer
754 | racket
755 | radiator
756 | radio
757 | radio telescope
758 | rain barrel
759 | recreational vehicle
760 | reel
761 | reflex camera
762 | refrigerator
763 | remote control
764 | restaurant
765 | revolver
766 | rifle
767 | rocking chair
768 | rotisserie
769 | rubber eraser
770 | rugby ball
771 | rule
772 | running shoe
773 | safe
774 | safety pin
775 | saltshaker
776 | sandal
777 | sarong
778 | sax
779 | scabbard
780 | scale
781 | school bus
782 | schooner
783 | scoreboard
784 | screen
785 | screw
786 | screwdriver
787 | seat belt
788 | sewing machine
789 | shield
790 | shoe shop
791 | shoji
792 | shopping basket
793 | shopping cart
794 | shovel
795 | shower cap
796 | shower curtain
797 | ski
798 | ski mask
799 | sleeping bag
800 | slide rule
801 | sliding door
802 | slot
803 | snorkel
804 | snowmobile
805 | snowplow
806 | soap dispenser
807 | soccer ball
808 | sock
809 | solar dish
810 | sombrero
811 | soup bowl
812 | space bar
813 | space heater
814 | space shuttle
815 | spatula
816 | speedboat
817 | spider web
818 | spindle
819 | sports car
820 | spotlight
821 | stage
822 | steam locomotive
823 | steel arch bridge
824 | steel drum
825 | stethoscope
826 | stole
827 | stone wall
828 | stopwatch
829 | stove
830 | strainer
831 | streetcar
832 | stretcher
833 | studio couch
834 | stupa
835 | submarine
836 | suit
837 | sundial
838 | sunglass
839 | sunglasses
840 | sunscreen
841 | suspension bridge
842 | swab
843 | sweatshirt
844 | swimming trunks
845 | swing
846 | switch
847 | syringe
848 | table lamp
849 | tank
850 | tape player
851 | teapot
852 | teddy
853 | television
854 | tennis ball
855 | thatch
856 | theater curtain
857 | thimble
858 | thresher
859 | throne
860 | tile roof
861 | toaster
862 | tobacco shop
863 | toilet seat
864 | torch
865 | totem pole
866 | tow truck
867 | toyshop
868 | tractor
869 | trailer truck
870 | tray
871 | trench coat
872 | tricycle
873 | trimaran
874 | tripod
875 | triumphal arch
876 | trolleybus
877 | trombone
878 | tub
879 | turnstile
880 | typewriter keyboard
881 | umbrella
882 | unicycle
883 | upright
884 | vacuum
885 | vase
886 | vault
887 | velvet
888 | vending machine
889 | vestment
890 | viaduct
891 | violin
892 | volleyball
893 | waffle iron
894 | wall clock
895 | wallet
896 | wardrobe
897 | warplane
898 | washbasin
899 | washer
900 | water bottle
901 | water jug
902 | water tower
903 | whiskey jug
904 | whistle
905 | wig
906 | window screen
907 | window shade
908 | Windsor tie
909 | wine bottle
910 | wing
911 | wok
912 | wooden spoon
913 | wool
914 | worm fence
915 | wreck
916 | yawl
917 | yurt
918 | web site
919 | comic book
920 | crossword puzzle
921 | street sign
922 | traffic light
923 | book jacket
924 | menu
925 | plate
926 | guacamole
927 | consomme
928 | hot pot
929 | trifle
930 | ice cream
931 | ice lolly
932 | French loaf
933 | bagel
934 | pretzel
935 | cheeseburger
936 | hotdog
937 | mashed potato
938 | head cabbage
939 | broccoli
940 | cauliflower
941 | zucchini
942 | spaghetti squash
943 | acorn squash
944 | butternut squash
945 | cucumber
946 | artichoke
947 | bell pepper
948 | cardoon
949 | mushroom
950 | Granny Smith
951 | strawberry
952 | orange
953 | lemon
954 | fig
955 | pineapple
956 | banana
957 | jackfruit
958 | custard apple
959 | pomegranate
960 | hay
961 | carbonara
962 | chocolate sauce
963 | dough
964 | meat loaf
965 | pizza
966 | potpie
967 | burrito
968 | red wine
969 | espresso
970 | cup
971 | eggnog
972 | alp
973 | bubble
974 | cliff
975 | coral reef
976 | geyser
977 | lakeside
978 | promontory
979 | sandbar
980 | seashore
981 | valley
982 | volcano
983 | ballplayer
984 | groom
985 | scuba diver
986 | rapeseed
987 | daisy
988 | yellow lady's slipper
989 | corn
990 | acorn
991 | hip
992 | buckeye
993 | coral fungus
994 | agaric
995 | gyromitra
996 | stinkhorn
997 | earthstar
998 | hen-of-the-woods
999 | bolete
1000 | ear
1001 | toilet tissue
1002 |
--------------------------------------------------------------------------------
/example/assets/posenet_mv1_075_float_from_checkpoints.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/assets/posenet_mv1_075_float_from_checkpoints.tflite
--------------------------------------------------------------------------------
/example/assets/ssd_mobilenet.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/assets/ssd_mobilenet.tflite
--------------------------------------------------------------------------------
/example/assets/ssd_mobilenet.txt:
--------------------------------------------------------------------------------
1 | ???
2 | person
3 | bicycle
4 | car
5 | motorcycle
6 | airplane
7 | bus
8 | train
9 | truck
10 | boat
11 | traffic light
12 | fire hydrant
13 | ???
14 | stop sign
15 | parking meter
16 | bench
17 | bird
18 | cat
19 | dog
20 | horse
21 | sheep
22 | cow
23 | elephant
24 | bear
25 | zebra
26 | giraffe
27 | ???
28 | backpack
29 | umbrella
30 | ???
31 | ???
32 | handbag
33 | tie
34 | suitcase
35 | frisbee
36 | skis
37 | snowboard
38 | sports ball
39 | kite
40 | baseball bat
41 | baseball glove
42 | skateboard
43 | surfboard
44 | tennis racket
45 | bottle
46 | ???
47 | wine glass
48 | cup
49 | fork
50 | knife
51 | spoon
52 | bowl
53 | banana
54 | apple
55 | sandwich
56 | orange
57 | broccoli
58 | carrot
59 | hot dog
60 | pizza
61 | donut
62 | cake
63 | chair
64 | couch
65 | potted plant
66 | bed
67 | ???
68 | dining table
69 | ???
70 | ???
71 | toilet
72 | ???
73 | tv
74 | laptop
75 | mouse
76 | remote
77 | keyboard
78 | cell phone
79 | microwave
80 | oven
81 | toaster
82 | sink
83 | refrigerator
84 | ???
85 | book
86 | clock
87 | vase
88 | scissors
89 | teddy bear
90 | hair drier
91 | toothbrush
92 |
--------------------------------------------------------------------------------
/example/assets/yolov2_tiny.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/assets/yolov2_tiny.tflite
--------------------------------------------------------------------------------
/example/assets/yolov2_tiny.txt:
--------------------------------------------------------------------------------
1 | person
2 | bicycle
3 | car
4 | motorbike
5 | aeroplane
6 | bus
7 | train
8 | truck
9 | boat
10 | traffic light
11 | fire hydrant
12 | stop sign
13 | parking meter
14 | bench
15 | bird
16 | cat
17 | dog
18 | horse
19 | sheep
20 | cow
21 | elephant
22 | bear
23 | zebra
24 | giraffe
25 | backpack
26 | umbrella
27 | handbag
28 | tie
29 | suitcase
30 | frisbee
31 | skis
32 | snowboard
33 | sports ball
34 | kite
35 | baseball bat
36 | baseball glove
37 | skateboard
38 | surfboard
39 | tennis racket
40 | bottle
41 | wine glass
42 | cup
43 | fork
44 | knife
45 | spoon
46 | bowl
47 | banana
48 | apple
49 | sandwich
50 | orange
51 | broccoli
52 | carrot
53 | hot dog
54 | pizza
55 | donut
56 | cake
57 | chair
58 | sofa
59 | pottedplant
60 | bed
61 | diningtable
62 | toilet
63 | tvmonitor
64 | laptop
65 | mouse
66 | remote
67 | keyboard
68 | cell phone
69 | microwave
70 | oven
71 | toaster
72 | sink
73 | refrigerator
74 | book
75 | clock
76 | vase
77 | scissors
78 | teddy bear
79 | hair drier
80 | toothbrush
81 |
--------------------------------------------------------------------------------
/example/ios/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .vagrant/
3 | .sconsign.dblite
4 | .svn/
5 |
6 | .DS_Store
7 | *.swp
8 | profile
9 |
10 | DerivedData/
11 | build/
12 | GeneratedPluginRegistrant.h
13 | GeneratedPluginRegistrant.m
14 |
15 | .generated/
16 |
17 | *.pbxuser
18 | *.mode1v3
19 | *.mode2v3
20 | *.perspectivev3
21 |
22 | !default.pbxuser
23 | !default.mode1v3
24 | !default.mode2v3
25 | !default.perspectivev3
26 |
27 | xcuserdata
28 |
29 | *.moved-aside
30 |
31 | *.pyc
32 | *sync/
33 | Icon?
34 | .tags*
35 |
36 | /Flutter/app.flx
37 | /Flutter/app.zip
38 | /Flutter/flutter_assets/
39 | /Flutter/App.framework
40 | /Flutter/Flutter.framework
41 | /Flutter/Generated.xcconfig
42 | /ServiceDefinitions.json
43 |
44 | Pods/
45 | .symlinks/
46 |
--------------------------------------------------------------------------------
/example/ios/Flutter/AppFrameworkInfo.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | App
9 | CFBundleIdentifier
10 | io.flutter.flutter.app
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | App
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1.0
23 | MinimumOSVersion
24 | 8.0
25 |
26 |
27 |
--------------------------------------------------------------------------------
/example/ios/Flutter/Debug.xcconfig:
--------------------------------------------------------------------------------
1 | #include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
2 | #include "Generated.xcconfig"
3 |
--------------------------------------------------------------------------------
/example/ios/Flutter/Release.xcconfig:
--------------------------------------------------------------------------------
1 | #include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
2 | #include "Generated.xcconfig"
3 |
--------------------------------------------------------------------------------
/example/ios/Podfile:
--------------------------------------------------------------------------------
1 | # Uncomment this line to define a global platform for your project
2 | # platform :ios, '9.0'
3 |
4 | # CocoaPods analytics sends network stats synchronously affecting flutter build latency.
5 | ENV['COCOAPODS_DISABLE_STATS'] = 'true'
6 |
7 | project 'Runner', {
8 | 'Debug' => :debug,
9 | 'Profile' => :release,
10 | 'Release' => :release,
11 | }
12 |
13 | def flutter_root
14 | generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
15 | unless File.exist?(generated_xcode_build_settings_path)
16 | raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
17 | end
18 |
19 | File.foreach(generated_xcode_build_settings_path) do |line|
20 | matches = line.match(/FLUTTER_ROOT\=(.*)/)
21 | return matches[1].strip if matches
22 | end
23 | raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
24 | end
25 |
26 | require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
27 |
28 | flutter_ios_podfile_setup
29 |
30 | target 'Runner' do
31 | flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
32 | end
33 |
34 | post_install do |installer|
35 | installer.pods_project.targets.each do |target|
36 | flutter_additional_ios_build_settings(target)
37 | end
38 | end
39 |
--------------------------------------------------------------------------------
/example/ios/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - Flutter (1.0.0)
3 | - image_picker (0.0.1):
4 | - Flutter
5 | - TensorFlowLiteC (2.2.0)
6 | - tflite (1.1.2):
7 | - Flutter
8 | - TensorFlowLiteC
9 |
10 | DEPENDENCIES:
11 | - Flutter (from `Flutter`)
12 | - image_picker (from `.symlinks/plugins/image_picker/ios`)
13 | - tflite (from `.symlinks/plugins/tflite/ios`)
14 |
15 | SPEC REPOS:
16 | trunk:
17 | - TensorFlowLiteC
18 |
19 | EXTERNAL SOURCES:
20 | Flutter:
21 | :path: Flutter
22 | image_picker:
23 | :path: ".symlinks/plugins/image_picker/ios"
24 | tflite:
25 | :path: ".symlinks/plugins/tflite/ios"
26 |
27 | SPEC CHECKSUMS:
28 | Flutter: 434fef37c0980e73bb6479ef766c45957d4b510c
29 | image_picker: a211f28b95a560433c00f5cd3773f4710a20404d
30 | TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
31 | tflite: f0403a894740019d63ab5662253bba5b2dd37296
32 |
33 | PODFILE CHECKSUM: 8e679eca47255a8ca8067c4c67aab20e64cb974d
34 |
35 | COCOAPODS: 1.10.1
36 |
--------------------------------------------------------------------------------
/example/ios/Runner.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
11 | 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
12 | 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; };
13 | 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
14 | 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
15 | 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
16 | 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
17 | 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
18 | A8FCB07931B147D0C738D807 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = A4A034B01AB21E851714E03C /* libPods-Runner.a */; };
19 | /* End PBXBuildFile section */
20 |
21 | /* Begin PBXCopyFilesBuildPhase section */
22 | 9705A1C41CF9048500538489 /* Embed Frameworks */ = {
23 | isa = PBXCopyFilesBuildPhase;
24 | buildActionMask = 2147483647;
25 | dstPath = "";
26 | dstSubfolderSpec = 10;
27 | files = (
28 | );
29 | name = "Embed Frameworks";
30 | runOnlyForDeploymentPostprocessing = 0;
31 | };
32 | /* End PBXCopyFilesBuildPhase section */
33 |
34 | /* Begin PBXFileReference section */
35 | 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; };
36 | 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; };
37 | 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; };
38 | 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; };
39 | 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
40 | 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
41 | 864E0E2308AE5F3A9409E901 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; };
42 | 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; };
43 | 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; };
44 | 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
45 | 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
46 | 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
47 | 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
48 | 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
49 | 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
50 | A4A034B01AB21E851714E03C /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
51 | E0C0C115F9024C6ADB3B2DB5 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; };
52 | /* End PBXFileReference section */
53 |
54 | /* Begin PBXFrameworksBuildPhase section */
55 | 97C146EB1CF9000F007C117D /* Frameworks */ = {
56 | isa = PBXFrameworksBuildPhase;
57 | buildActionMask = 2147483647;
58 | files = (
59 | A8FCB07931B147D0C738D807 /* libPods-Runner.a in Frameworks */,
60 | );
61 | runOnlyForDeploymentPostprocessing = 0;
62 | };
63 | /* End PBXFrameworksBuildPhase section */
64 |
65 | /* Begin PBXGroup section */
66 | 7670CC45CF9B055E20C18D9C /* Frameworks */ = {
67 | isa = PBXGroup;
68 | children = (
69 | A4A034B01AB21E851714E03C /* libPods-Runner.a */,
70 | );
71 | name = Frameworks;
72 | sourceTree = "";
73 | };
74 | 8EE3D73475BA2048B61051C2 /* Pods */ = {
75 | isa = PBXGroup;
76 | children = (
77 | E0C0C115F9024C6ADB3B2DB5 /* Pods-Runner.debug.xcconfig */,
78 | 864E0E2308AE5F3A9409E901 /* Pods-Runner.release.xcconfig */,
79 | );
80 | name = Pods;
81 | sourceTree = "";
82 | };
83 | 9740EEB11CF90186004384FC /* Flutter */ = {
84 | isa = PBXGroup;
85 | children = (
86 | 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
87 | 9740EEB21CF90195004384FC /* Debug.xcconfig */,
88 | 7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
89 | 9740EEB31CF90195004384FC /* Generated.xcconfig */,
90 | );
91 | name = Flutter;
92 | sourceTree = "";
93 | };
94 | 97C146E51CF9000F007C117D = {
95 | isa = PBXGroup;
96 | children = (
97 | 9740EEB11CF90186004384FC /* Flutter */,
98 | 97C146F01CF9000F007C117D /* Runner */,
99 | 97C146EF1CF9000F007C117D /* Products */,
100 | 8EE3D73475BA2048B61051C2 /* Pods */,
101 | 7670CC45CF9B055E20C18D9C /* Frameworks */,
102 | );
103 | sourceTree = "";
104 | };
105 | 97C146EF1CF9000F007C117D /* Products */ = {
106 | isa = PBXGroup;
107 | children = (
108 | 97C146EE1CF9000F007C117D /* Runner.app */,
109 | );
110 | name = Products;
111 | sourceTree = "";
112 | };
113 | 97C146F01CF9000F007C117D /* Runner */ = {
114 | isa = PBXGroup;
115 | children = (
116 | 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */,
117 | 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */,
118 | 97C146FA1CF9000F007C117D /* Main.storyboard */,
119 | 97C146FD1CF9000F007C117D /* Assets.xcassets */,
120 | 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
121 | 97C147021CF9000F007C117D /* Info.plist */,
122 | 97C146F11CF9000F007C117D /* Supporting Files */,
123 | 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
124 | 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
125 | );
126 | path = Runner;
127 | sourceTree = "";
128 | };
129 | 97C146F11CF9000F007C117D /* Supporting Files */ = {
130 | isa = PBXGroup;
131 | children = (
132 | 97C146F21CF9000F007C117D /* main.m */,
133 | );
134 | name = "Supporting Files";
135 | sourceTree = "";
136 | };
137 | /* End PBXGroup section */
138 |
139 | /* Begin PBXNativeTarget section */
140 | 97C146ED1CF9000F007C117D /* Runner */ = {
141 | isa = PBXNativeTarget;
142 | buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
143 | buildPhases = (
144 | FAEC5F0CFA3366178E53C4C5 /* [CP] Check Pods Manifest.lock */,
145 | 9740EEB61CF901F6004384FC /* Run Script */,
146 | 97C146EA1CF9000F007C117D /* Sources */,
147 | 97C146EB1CF9000F007C117D /* Frameworks */,
148 | 97C146EC1CF9000F007C117D /* Resources */,
149 | 9705A1C41CF9048500538489 /* Embed Frameworks */,
150 | 3B06AD1E1E4923F5004D2608 /* Thin Binary */,
151 | );
152 | buildRules = (
153 | );
154 | dependencies = (
155 | );
156 | name = Runner;
157 | productName = Runner;
158 | productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
159 | productType = "com.apple.product-type.application";
160 | };
161 | /* End PBXNativeTarget section */
162 |
163 | /* Begin PBXProject section */
164 | 97C146E61CF9000F007C117D /* Project object */ = {
165 | isa = PBXProject;
166 | attributes = {
167 | LastUpgradeCheck = 0910;
168 | ORGANIZATIONNAME = "The Chromium Authors";
169 | TargetAttributes = {
170 | 97C146ED1CF9000F007C117D = {
171 | CreatedOnToolsVersion = 7.3.1;
172 | DevelopmentTeam = ZJG3P98JS9;
173 | };
174 | };
175 | };
176 | buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
177 | compatibilityVersion = "Xcode 3.2";
178 | developmentRegion = English;
179 | hasScannedForEncodings = 0;
180 | knownRegions = (
181 | English,
182 | en,
183 | Base,
184 | );
185 | mainGroup = 97C146E51CF9000F007C117D;
186 | productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
187 | projectDirPath = "";
188 | projectRoot = "";
189 | targets = (
190 | 97C146ED1CF9000F007C117D /* Runner */,
191 | );
192 | };
193 | /* End PBXProject section */
194 |
195 | /* Begin PBXResourcesBuildPhase section */
196 | 97C146EC1CF9000F007C117D /* Resources */ = {
197 | isa = PBXResourcesBuildPhase;
198 | buildActionMask = 2147483647;
199 | files = (
200 | 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
201 | 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
202 | 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */,
203 | 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
204 | 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
205 | );
206 | runOnlyForDeploymentPostprocessing = 0;
207 | };
208 | /* End PBXResourcesBuildPhase section */
209 |
210 | /* Begin PBXShellScriptBuildPhase section */
211 | 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
212 | isa = PBXShellScriptBuildPhase;
213 | buildActionMask = 2147483647;
214 | files = (
215 | );
216 | inputPaths = (
217 | );
218 | name = "Thin Binary";
219 | outputPaths = (
220 | );
221 | runOnlyForDeploymentPostprocessing = 0;
222 | shellPath = /bin/sh;
223 | shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
224 | };
225 | 9740EEB61CF901F6004384FC /* Run Script */ = {
226 | isa = PBXShellScriptBuildPhase;
227 | buildActionMask = 2147483647;
228 | files = (
229 | );
230 | inputPaths = (
231 | );
232 | name = "Run Script";
233 | outputPaths = (
234 | );
235 | runOnlyForDeploymentPostprocessing = 0;
236 | shellPath = /bin/sh;
237 | shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
238 | };
239 | FAEC5F0CFA3366178E53C4C5 /* [CP] Check Pods Manifest.lock */ = {
240 | isa = PBXShellScriptBuildPhase;
241 | buildActionMask = 2147483647;
242 | files = (
243 | );
244 | inputPaths = (
245 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
246 | "${PODS_ROOT}/Manifest.lock",
247 | );
248 | name = "[CP] Check Pods Manifest.lock";
249 | outputPaths = (
250 | "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
251 | );
252 | runOnlyForDeploymentPostprocessing = 0;
253 | shellPath = /bin/sh;
254 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
255 | showEnvVarsInLog = 0;
256 | };
257 | /* End PBXShellScriptBuildPhase section */
258 |
259 | /* Begin PBXSourcesBuildPhase section */
260 | 97C146EA1CF9000F007C117D /* Sources */ = {
261 | isa = PBXSourcesBuildPhase;
262 | buildActionMask = 2147483647;
263 | files = (
264 | 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */,
265 | 97C146F31CF9000F007C117D /* main.m in Sources */,
266 | 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
267 | );
268 | runOnlyForDeploymentPostprocessing = 0;
269 | };
270 | /* End PBXSourcesBuildPhase section */
271 |
272 | /* Begin PBXVariantGroup section */
273 | 97C146FA1CF9000F007C117D /* Main.storyboard */ = {
274 | isa = PBXVariantGroup;
275 | children = (
276 | 97C146FB1CF9000F007C117D /* Base */,
277 | );
278 | name = Main.storyboard;
279 | sourceTree = "";
280 | };
281 | 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
282 | isa = PBXVariantGroup;
283 | children = (
284 | 97C147001CF9000F007C117D /* Base */,
285 | );
286 | name = LaunchScreen.storyboard;
287 | sourceTree = "";
288 | };
289 | /* End PBXVariantGroup section */
290 |
291 | /* Begin XCBuildConfiguration section */
292 | 97C147031CF9000F007C117D /* Debug */ = {
293 | isa = XCBuildConfiguration;
294 | buildSettings = {
295 | ALWAYS_SEARCH_USER_PATHS = NO;
296 | CLANG_ANALYZER_NONNULL = YES;
297 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
298 | CLANG_CXX_LIBRARY = "libc++";
299 | CLANG_ENABLE_MODULES = YES;
300 | CLANG_ENABLE_OBJC_ARC = YES;
301 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
302 | CLANG_WARN_BOOL_CONVERSION = YES;
303 | CLANG_WARN_COMMA = YES;
304 | CLANG_WARN_CONSTANT_CONVERSION = YES;
305 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
306 | CLANG_WARN_EMPTY_BODY = YES;
307 | CLANG_WARN_ENUM_CONVERSION = YES;
308 | CLANG_WARN_INFINITE_RECURSION = YES;
309 | CLANG_WARN_INT_CONVERSION = YES;
310 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
311 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
312 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
313 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
314 | CLANG_WARN_STRICT_PROTOTYPES = YES;
315 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
316 | CLANG_WARN_UNREACHABLE_CODE = YES;
317 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
318 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
319 | COPY_PHASE_STRIP = NO;
320 | DEBUG_INFORMATION_FORMAT = dwarf;
321 | ENABLE_STRICT_OBJC_MSGSEND = YES;
322 | ENABLE_TESTABILITY = YES;
323 | GCC_C_LANGUAGE_STANDARD = gnu99;
324 | GCC_DYNAMIC_NO_PIC = NO;
325 | GCC_NO_COMMON_BLOCKS = YES;
326 | GCC_OPTIMIZATION_LEVEL = 0;
327 | GCC_PREPROCESSOR_DEFINITIONS = (
328 | "DEBUG=1",
329 | "$(inherited)",
330 | );
331 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
332 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
333 | GCC_WARN_UNDECLARED_SELECTOR = YES;
334 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
335 | GCC_WARN_UNUSED_FUNCTION = YES;
336 | GCC_WARN_UNUSED_VARIABLE = YES;
337 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
338 | MTL_ENABLE_DEBUG_INFO = YES;
339 | ONLY_ACTIVE_ARCH = YES;
340 | SDKROOT = iphoneos;
341 | TARGETED_DEVICE_FAMILY = "1,2";
342 | };
343 | name = Debug;
344 | };
345 | 97C147041CF9000F007C117D /* Release */ = {
346 | isa = XCBuildConfiguration;
347 | buildSettings = {
348 | ALWAYS_SEARCH_USER_PATHS = NO;
349 | CLANG_ANALYZER_NONNULL = YES;
350 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
351 | CLANG_CXX_LIBRARY = "libc++";
352 | CLANG_ENABLE_MODULES = YES;
353 | CLANG_ENABLE_OBJC_ARC = YES;
354 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
355 | CLANG_WARN_BOOL_CONVERSION = YES;
356 | CLANG_WARN_COMMA = YES;
357 | CLANG_WARN_CONSTANT_CONVERSION = YES;
358 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
359 | CLANG_WARN_EMPTY_BODY = YES;
360 | CLANG_WARN_ENUM_CONVERSION = YES;
361 | CLANG_WARN_INFINITE_RECURSION = YES;
362 | CLANG_WARN_INT_CONVERSION = YES;
363 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
364 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
365 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
366 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
367 | CLANG_WARN_STRICT_PROTOTYPES = YES;
368 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
369 | CLANG_WARN_UNREACHABLE_CODE = YES;
370 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
371 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
372 | COPY_PHASE_STRIP = NO;
373 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
374 | ENABLE_NS_ASSERTIONS = NO;
375 | ENABLE_STRICT_OBJC_MSGSEND = YES;
376 | GCC_C_LANGUAGE_STANDARD = gnu99;
377 | GCC_NO_COMMON_BLOCKS = YES;
378 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
379 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
380 | GCC_WARN_UNDECLARED_SELECTOR = YES;
381 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
382 | GCC_WARN_UNUSED_FUNCTION = YES;
383 | GCC_WARN_UNUSED_VARIABLE = YES;
384 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
385 | MTL_ENABLE_DEBUG_INFO = NO;
386 | ONLY_ACTIVE_ARCH = YES;
387 | SDKROOT = iphoneos;
388 | TARGETED_DEVICE_FAMILY = "1,2";
389 | VALIDATE_PRODUCT = YES;
390 | };
391 | name = Release;
392 | };
393 | 97C147061CF9000F007C117D /* Debug */ = {
394 | isa = XCBuildConfiguration;
395 | baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
396 | buildSettings = {
397 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
398 | CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
399 | DEVELOPMENT_TEAM = ZJG3P98JS9;
400 | ENABLE_BITCODE = NO;
401 | FRAMEWORK_SEARCH_PATHS = (
402 | "$(inherited)",
403 | "$(PROJECT_DIR)/Flutter",
404 | );
405 | HEADER_SEARCH_PATHS = (
406 | "$(inherited)",
407 | "'${SRCROOT}/Pods/TensorFlowLite/Frameworks/tensorflow_lite.framework/Headers'",
408 | "\"${PODS_ROOT}/Headers/Public\"",
409 | "\"${PODS_ROOT}/Headers/Public/Flutter\"",
410 | "\"${PODS_ROOT}/Headers/Public/TensorFlowLite\"",
411 | "\"${PODS_ROOT}/Headers/Public/tflite\"",
412 | );
413 | INFOPLIST_FILE = Runner/Info.plist;
414 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
415 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
416 | LIBRARY_SEARCH_PATHS = (
417 | "$(inherited)",
418 | "$(PROJECT_DIR)/Flutter",
419 | );
420 | PRODUCT_BUNDLE_IDENTIFIER = sq.flutter.tfliteExample;
421 | PRODUCT_NAME = "$(TARGET_NAME)";
422 | VERSIONING_SYSTEM = "apple-generic";
423 | };
424 | name = Debug;
425 | };
426 | 97C147071CF9000F007C117D /* Release */ = {
427 | isa = XCBuildConfiguration;
428 | baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
429 | buildSettings = {
430 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
431 | CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
432 | DEVELOPMENT_TEAM = ZJG3P98JS9;
433 | ENABLE_BITCODE = NO;
434 | FRAMEWORK_SEARCH_PATHS = (
435 | "$(inherited)",
436 | "$(PROJECT_DIR)/Flutter",
437 | );
438 | HEADER_SEARCH_PATHS = (
439 | "$(inherited)",
440 | "'${SRCROOT}/Pods/TensorFlowLite/Frameworks/tensorflow_lite.framework/Headers'",
441 | "\"${PODS_ROOT}/Headers/Public\"",
442 | "\"${PODS_ROOT}/Headers/Public/Flutter\"",
443 | "\"${PODS_ROOT}/Headers/Public/TensorFlowLite\"",
444 | "\"${PODS_ROOT}/Headers/Public/tflite\"",
445 | );
446 | INFOPLIST_FILE = Runner/Info.plist;
447 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
448 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
449 | LIBRARY_SEARCH_PATHS = (
450 | "$(inherited)",
451 | "$(PROJECT_DIR)/Flutter",
452 | );
453 | PRODUCT_BUNDLE_IDENTIFIER = sq.flutter.tfliteExample;
454 | PRODUCT_NAME = "$(TARGET_NAME)";
455 | VERSIONING_SYSTEM = "apple-generic";
456 | };
457 | name = Release;
458 | };
459 | /* End XCBuildConfiguration section */
460 |
461 | /* Begin XCConfigurationList section */
462 | 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
463 | isa = XCConfigurationList;
464 | buildConfigurations = (
465 | 97C147031CF9000F007C117D /* Debug */,
466 | 97C147041CF9000F007C117D /* Release */,
467 | );
468 | defaultConfigurationIsVisible = 0;
469 | defaultConfigurationName = Release;
470 | };
471 | 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
472 | isa = XCConfigurationList;
473 | buildConfigurations = (
474 | 97C147061CF9000F007C117D /* Debug */,
475 | 97C147071CF9000F007C117D /* Release */,
476 | );
477 | defaultConfigurationIsVisible = 0;
478 | defaultConfigurationName = Release;
479 | };
480 | /* End XCConfigurationList section */
481 | };
482 | rootObject = 97C146E61CF9000F007C117D /* Project object */;
483 | }
484 |
--------------------------------------------------------------------------------
/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
37 |
38 |
39 |
40 |
41 |
42 |
52 |
54 |
60 |
61 |
62 |
63 |
69 |
71 |
77 |
78 |
79 |
80 |
82 |
83 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/example/ios/Runner.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/example/ios/Runner/AppDelegate.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 |
4 | @interface AppDelegate : FlutterAppDelegate
5 |
6 | @end
7 |
--------------------------------------------------------------------------------
/example/ios/Runner/AppDelegate.m:
--------------------------------------------------------------------------------
1 | #include "AppDelegate.h"
2 | #include "GeneratedPluginRegistrant.h"
3 |
4 | @implementation AppDelegate
5 |
6 | - (BOOL)application:(UIApplication *)application
7 | didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
8 | [GeneratedPluginRegistrant registerWithRegistry:self];
9 | // Override point for customization after application launch.
10 | return [super application:application didFinishLaunchingWithOptions:launchOptions];
11 | }
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "size" : "20x20",
5 | "idiom" : "iphone",
6 | "filename" : "Icon-App-20x20@2x.png",
7 | "scale" : "2x"
8 | },
9 | {
10 | "size" : "20x20",
11 | "idiom" : "iphone",
12 | "filename" : "Icon-App-20x20@3x.png",
13 | "scale" : "3x"
14 | },
15 | {
16 | "size" : "29x29",
17 | "idiom" : "iphone",
18 | "filename" : "Icon-App-29x29@1x.png",
19 | "scale" : "1x"
20 | },
21 | {
22 | "size" : "29x29",
23 | "idiom" : "iphone",
24 | "filename" : "Icon-App-29x29@2x.png",
25 | "scale" : "2x"
26 | },
27 | {
28 | "size" : "29x29",
29 | "idiom" : "iphone",
30 | "filename" : "Icon-App-29x29@3x.png",
31 | "scale" : "3x"
32 | },
33 | {
34 | "size" : "40x40",
35 | "idiom" : "iphone",
36 | "filename" : "Icon-App-40x40@2x.png",
37 | "scale" : "2x"
38 | },
39 | {
40 | "size" : "40x40",
41 | "idiom" : "iphone",
42 | "filename" : "Icon-App-40x40@3x.png",
43 | "scale" : "3x"
44 | },
45 | {
46 | "size" : "60x60",
47 | "idiom" : "iphone",
48 | "filename" : "Icon-App-60x60@2x.png",
49 | "scale" : "2x"
50 | },
51 | {
52 | "size" : "60x60",
53 | "idiom" : "iphone",
54 | "filename" : "Icon-App-60x60@3x.png",
55 | "scale" : "3x"
56 | },
57 | {
58 | "size" : "20x20",
59 | "idiom" : "ipad",
60 | "filename" : "Icon-App-20x20@1x.png",
61 | "scale" : "1x"
62 | },
63 | {
64 | "size" : "20x20",
65 | "idiom" : "ipad",
66 | "filename" : "Icon-App-20x20@2x.png",
67 | "scale" : "2x"
68 | },
69 | {
70 | "size" : "29x29",
71 | "idiom" : "ipad",
72 | "filename" : "Icon-App-29x29@1x.png",
73 | "scale" : "1x"
74 | },
75 | {
76 | "size" : "29x29",
77 | "idiom" : "ipad",
78 | "filename" : "Icon-App-29x29@2x.png",
79 | "scale" : "2x"
80 | },
81 | {
82 | "size" : "40x40",
83 | "idiom" : "ipad",
84 | "filename" : "Icon-App-40x40@1x.png",
85 | "scale" : "1x"
86 | },
87 | {
88 | "size" : "40x40",
89 | "idiom" : "ipad",
90 | "filename" : "Icon-App-40x40@2x.png",
91 | "scale" : "2x"
92 | },
93 | {
94 | "size" : "76x76",
95 | "idiom" : "ipad",
96 | "filename" : "Icon-App-76x76@1x.png",
97 | "scale" : "1x"
98 | },
99 | {
100 | "size" : "76x76",
101 | "idiom" : "ipad",
102 | "filename" : "Icon-App-76x76@2x.png",
103 | "scale" : "2x"
104 | },
105 | {
106 | "size" : "83.5x83.5",
107 | "idiom" : "ipad",
108 | "filename" : "Icon-App-83.5x83.5@2x.png",
109 | "scale" : "2x"
110 | },
111 | {
112 | "size" : "1024x1024",
113 | "idiom" : "ios-marketing",
114 | "filename" : "Icon-App-1024x1024@1x.png",
115 | "scale" : "1x"
116 | }
117 | ],
118 | "info" : {
119 | "version" : 1,
120 | "author" : "xcode"
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "LaunchImage.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "LaunchImage@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "LaunchImage@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
--------------------------------------------------------------------------------
/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md:
--------------------------------------------------------------------------------
1 | # Launch Screen Assets
2 |
3 | You can customize the launch screen with your own desired assets by replacing the image files in this directory.
4 |
5 | You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.
--------------------------------------------------------------------------------
/example/ios/Runner/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/example/ios/Runner/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/example/ios/Runner/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | tflite_example
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | $(FLUTTER_BUILD_NAME)
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | $(FLUTTER_BUILD_NUMBER)
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UISupportedInterfaceOrientations
30 |
31 | UIInterfaceOrientationPortrait
32 | UIInterfaceOrientationLandscapeLeft
33 | UIInterfaceOrientationLandscapeRight
34 |
35 | UISupportedInterfaceOrientations~ipad
36 |
37 | UIInterfaceOrientationPortrait
38 | UIInterfaceOrientationPortraitUpsideDown
39 | UIInterfaceOrientationLandscapeLeft
40 | UIInterfaceOrientationLandscapeRight
41 |
42 | UIViewControllerBasedStatusBarAppearance
43 |
44 | NSPhotoLibraryUsageDescription
45 | We need your permission to access photo gallery
46 | NSCameraUsageDescription
47 | We need your permission to use phone camera
48 | NSMicrophoneUsageDescription
49 | We need your permission to use microsphone
50 |
51 |
52 |
--------------------------------------------------------------------------------
/example/ios/Runner/main.m:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import "AppDelegate.h"
4 |
5 | int main(int argc, char* argv[]) {
6 | @autoreleasepool {
7 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/example/lib/main.dart:
--------------------------------------------------------------------------------
1 | // ignore_for_file: unnecessary_null_comparison
2 |
3 | import 'dart:async';
4 | import 'dart:io';
5 | import 'dart:math';
6 | import 'dart:typed_data';
7 | import 'package:flutter/material.dart';
8 | import 'package:flutter/services.dart';
9 | import 'package:image_picker/image_picker.dart';
10 | import 'package:tensorflow_lite_flutter/tensorflow_lite_flutter.dart';
11 | import 'package:image/image.dart' as img;
12 |
13 | void main() => runApp(new App());
14 |
15 | const String mobile = "MobileNet";
16 | const String ssd = "SSD MobileNet";
17 | const String yolo = "Tiny YOLOv2";
18 | const String deeplab = "DeepLab";
19 | const String posenet = "PoseNet";
20 |
21 | class App extends StatelessWidget {
22 | @override
23 | Widget build(BuildContext context) {
24 | return MaterialApp(
25 | home: MyApp(),
26 | );
27 | }
28 | }
29 |
30 | class MyApp extends StatefulWidget {
31 | @override
32 | _MyAppState createState() => new _MyAppState();
33 | }
34 |
35 | class _MyAppState extends State {
36 | late File _image;
37 | late List _recognitions;
38 | String _model = mobile;
39 | late double _imageHeight;
40 | late double _imageWidth;
41 | bool _busy = false;
42 |
43 | Future predictImagePicker() async {
44 | var imagePicker = ImagePicker();
45 | var image = await imagePicker.pickImage(source: ImageSource.gallery);
46 | if (image == null) return;
47 | setState(() {
48 | _busy = true;
49 | });
50 | predictImage(image as File);
51 | }
52 |
53 | Future predictImage(File image) async {
54 | switch (_model) {
55 | case yolo:
56 | await yolov2Tiny(image);
57 | break;
58 | case ssd:
59 | await ssdMobileNet(image);
60 | break;
61 | case deeplab:
62 | await segmentMobileNet(image);
63 | break;
64 | case posenet:
65 | await poseNet(image);
66 | break;
67 | default:
68 | await recognizeImage(image);
69 | // await recognizeImageBinary(image);
70 | }
71 |
72 | new FileImage(image)
73 | .resolve(new ImageConfiguration())
74 | .addListener(ImageStreamListener((ImageInfo info, bool _) {
75 | setState(() {
76 | _imageHeight = info.image.height.toDouble();
77 | _imageWidth = info.image.width.toDouble();
78 | });
79 | }));
80 |
81 | setState(() {
82 | _image = image;
83 | _busy = false;
84 | });
85 | }
86 |
87 | @override
88 | void initState() {
89 | super.initState();
90 |
91 | _busy = true;
92 |
93 | loadModel().then((val) {
94 | setState(() {
95 | _busy = false;
96 | });
97 | });
98 | }
99 |
100 | Future loadModel() async {
101 | Tflite.close();
102 | try {
103 | String? res;
104 | switch (_model) {
105 | case yolo:
106 | res = await Tflite.loadModel(
107 | model: "assets/yolov2_tiny.tflite",
108 | labels: "assets/yolov2_tiny.txt",
109 | // useGpuDelegate: true,
110 | );
111 | break;
112 | case ssd:
113 | res = await Tflite.loadModel(
114 | model: "assets/ssd_mobilenet.tflite",
115 | labels: "assets/ssd_mobilenet.txt",
116 | // useGpuDelegate: true,
117 | );
118 | break;
119 | case deeplab:
120 | res = await Tflite.loadModel(
121 | model: "assets/deeplabv3_257_mv_gpu.tflite",
122 | labels: "assets/deeplabv3_257_mv_gpu.txt",
123 | // useGpuDelegate: true,
124 | );
125 | break;
126 | case posenet:
127 | res = await Tflite.loadModel(
128 | model: "assets/posenet_mv1_075_float_from_checkpoints.tflite",
129 | // useGpuDelegate: true,
130 | );
131 | break;
132 | default:
133 | res = await Tflite.loadModel(
134 | model: "assets/mobilenet_v1_1.0_224.tflite",
135 | labels: "assets/mobilenet_v1_1.0_224.txt",
136 | // useGpuDelegate: true,
137 | );
138 | }
139 | print(res);
140 | } on PlatformException {
141 | print('Failed to load model.');
142 | }
143 | }
144 |
145 | Uint8List imageToByteListFloat32(
146 | img.Image image, int inputSize, double mean, double std) {
147 | var convertedBytes = Float32List(1 * inputSize * inputSize * 3);
148 | var buffer = Float32List.view(convertedBytes.buffer);
149 | int pixelIndex = 0;
150 | for (var i = 0; i < inputSize; i++) {
151 | for (var j = 0; j < inputSize; j++) {
152 | var pixel = image.getPixel(j, i);
153 | buffer[pixelIndex++] = (img.getRed(pixel) - mean) / std;
154 | buffer[pixelIndex++] = (img.getGreen(pixel) - mean) / std;
155 | buffer[pixelIndex++] = (img.getBlue(pixel) - mean) / std;
156 | }
157 | }
158 | return convertedBytes.buffer.asUint8List();
159 | }
160 |
161 | Uint8List imageToByteListUint8(img.Image image, int inputSize) {
162 | var convertedBytes = Uint8List(1 * inputSize * inputSize * 3);
163 | var buffer = Uint8List.view(convertedBytes.buffer);
164 | int pixelIndex = 0;
165 | for (var i = 0; i < inputSize; i++) {
166 | for (var j = 0; j < inputSize; j++) {
167 | var pixel = image.getPixel(j, i);
168 | buffer[pixelIndex++] = img.getRed(pixel);
169 | buffer[pixelIndex++] = img.getGreen(pixel);
170 | buffer[pixelIndex++] = img.getBlue(pixel);
171 | }
172 | }
173 | return convertedBytes.buffer.asUint8List();
174 | }
175 |
176 | Future recognizeImage(File image) async {
177 | int startTime = new DateTime.now().millisecondsSinceEpoch;
178 | var recognitions = await Tflite.runModelOnImage(
179 | path: image.path,
180 | numResults: 6,
181 | threshold: 0.05,
182 | imageMean: 127.5,
183 | imageStd: 127.5,
184 | );
185 | setState(() {
186 | _recognitions = recognitions!;
187 | });
188 | int endTime = new DateTime.now().millisecondsSinceEpoch;
189 | print("Inference took ${endTime - startTime}ms");
190 | }
191 |
192 | Future recognizeImageBinary(File image) async {
193 | int startTime = new DateTime.now().millisecondsSinceEpoch;
194 | var imageBytes = (await rootBundle.load(image.path)).buffer;
195 | img.Image? oriImage = img.decodeJpg(imageBytes.asUint8List());
196 | img.Image resizedImage = img.copyResize(oriImage!, height: 224, width: 224);
197 | var recognitions = await Tflite.runModelOnBinary(
198 | binary: imageToByteListFloat32(resizedImage, 224, 127.5, 127.5),
199 | numResults: 6,
200 | threshold: 0.05,
201 | );
202 | setState(() {
203 | _recognitions = recognitions!;
204 | });
205 | int endTime = new DateTime.now().millisecondsSinceEpoch;
206 | print("Inference took ${endTime - startTime}ms");
207 | }
208 |
209 | Future yolov2Tiny(File image) async {
210 | int startTime = new DateTime.now().millisecondsSinceEpoch;
211 | var recognitions = await Tflite.detectObjectOnImage(
212 | path: image.path,
213 | model: "YOLO",
214 | threshold: 0.3,
215 | imageMean: 0.0,
216 | imageStd: 255.0,
217 | numResultsPerClass: 1,
218 | );
219 | // var imageBytes = (await rootBundle.load(image.path)).buffer;
220 | // img.Image oriImage = img.decodeJpg(imageBytes.asUint8List());
221 | // img.Image resizedImage = img.copyResize(oriImage, 416, 416);
222 | // var recognitions = await Tflite.detectObjectOnBinary(
223 | // binary: imageToByteListFloat32(resizedImage, 416, 0.0, 255.0),
224 | // model: "YOLO",
225 | // threshold: 0.3,
226 | // numResultsPerClass: 1,
227 | // );
228 | setState(() {
229 | _recognitions = recognitions!;
230 | });
231 | int endTime = new DateTime.now().millisecondsSinceEpoch;
232 | print("Inference took ${endTime - startTime}ms");
233 | }
234 |
235 | Future ssdMobileNet(File image) async {
236 | int startTime = new DateTime.now().millisecondsSinceEpoch;
237 | var recognitions = await Tflite.detectObjectOnImage(
238 | path: image.path,
239 | numResultsPerClass: 1,
240 | );
241 | // var imageBytes = (await rootBundle.load(image.path)).buffer;
242 | // img.Image oriImage = img.decodeJpg(imageBytes.asUint8List());
243 | // img.Image resizedImage = img.copyResize(oriImage, 300, 300);
244 | // var recognitions = await Tflite.detectObjectOnBinary(
245 | // binary: imageToByteListUint8(resizedImage, 300),
246 | // numResultsPerClass: 1,
247 | // );
248 | setState(() {
249 | _recognitions = recognitions!;
250 | });
251 | int endTime = new DateTime.now().millisecondsSinceEpoch;
252 | print("Inference took ${endTime - startTime}ms");
253 | }
254 |
255 | Future segmentMobileNet(File image) async {
256 | int startTime = new DateTime.now().millisecondsSinceEpoch;
257 | var recognitions = await Tflite.runSegmentationOnImage(
258 | path: image.path,
259 | imageMean: 127.5,
260 | imageStd: 127.5,
261 | );
262 |
263 | setState(() {
264 | _recognitions = recognitions!;
265 | });
266 | int endTime = new DateTime.now().millisecondsSinceEpoch;
267 | print("Inference took ${endTime - startTime}");
268 | }
269 |
270 | Future poseNet(File image) async {
271 | int startTime = new DateTime.now().millisecondsSinceEpoch;
272 | var recognitions = await Tflite.runPoseNetOnImage(
273 | path: image.path,
274 | numResults: 2,
275 | );
276 |
277 | print(recognitions);
278 |
279 | setState(() {
280 | _recognitions = recognitions!;
281 | });
282 | int endTime = new DateTime.now().millisecondsSinceEpoch;
283 | print("Inference took ${endTime - startTime}ms");
284 | }
285 |
286 | onSelect(model) async {
287 | setState(() {
288 | _busy = true;
289 | _model = model;
290 | _recognitions = [];
291 | });
292 | await loadModel();
293 |
294 | predictImage(_image);
295 | }
296 |
297 | List renderBoxes(Size screen) {
298 | double factorX = screen.width;
299 | double factorY = _imageHeight / _imageWidth * screen.width;
300 | Color blue = Color.fromRGBO(37, 213, 253, 1.0);
301 | return _recognitions.map((re) {
302 | return Positioned(
303 | left: re["rect"]["x"] * factorX,
304 | top: re["rect"]["y"] * factorY,
305 | width: re["rect"]["w"] * factorX,
306 | height: re["rect"]["h"] * factorY,
307 | child: Container(
308 | decoration: BoxDecoration(
309 | borderRadius: BorderRadius.all(Radius.circular(8.0)),
310 | border: Border.all(
311 | color: blue,
312 | width: 2,
313 | ),
314 | ),
315 | child: Text(
316 | "${re["detectedClass"]} ${(re["confidenceInClass"] * 100).toStringAsFixed(0)}%",
317 | style: TextStyle(
318 | background: Paint()..color = blue,
319 | color: Colors.white,
320 | fontSize: 12.0,
321 | ),
322 | ),
323 | ),
324 | );
325 | }).toList();
326 | }
327 |
328 | List renderKeypoints(Size screen) {
329 | double factorX = screen.width;
330 | double factorY = _imageHeight / _imageWidth * screen.width;
331 |
332 | var lists = [];
333 | _recognitions.forEach((re) {
334 | var color = Color((Random().nextDouble() * 0xFFFFFF).toInt() << 0)
335 | .withOpacity(1.0);
336 | var list = re["keypoints"].values.map((k) {
337 | return Positioned(
338 | left: k["x"] * factorX - 6,
339 | top: k["y"] * factorY - 6,
340 | width: 100,
341 | height: 12,
342 | child: Text(
343 | "● ${k["part"]}",
344 | style: TextStyle(
345 | color: color,
346 | fontSize: 12.0,
347 | ),
348 | ),
349 | );
350 | }).toList();
351 |
352 | lists..addAll(list);
353 | });
354 |
355 | return lists;
356 | }
357 |
358 | @override
359 | Widget build(BuildContext context) {
360 | Size size = MediaQuery.of(context).size;
361 | List stackChildren = [];
362 |
363 | if (_model == deeplab) {
364 | stackChildren.add(Positioned(
365 | top: 0.0,
366 | left: 0.0,
367 | width: size.width,
368 | child: _image == null
369 | ? Text('No image selected.')
370 | : Container(
371 | decoration: BoxDecoration(
372 | image: DecorationImage(
373 | alignment: Alignment.topCenter,
374 | image: MemoryImage(Uint8List.fromList(
375 | _recognitions.map((e) => e as int).toList())),
376 | fit: BoxFit.fill)),
377 | child: Opacity(opacity: 0.3, child: Image.file(_image))),
378 | ));
379 | } else {
380 | stackChildren.add(Positioned(
381 | top: 0.0,
382 | left: 0.0,
383 | width: size.width,
384 | child: _image == null ? Text('No image selected.') : Image.file(_image),
385 | ));
386 | }
387 |
388 | if (_model == mobile) {
389 | stackChildren.add(Center(
390 | child: Column(
391 | children: _recognitions != null
392 | ? _recognitions.map((res) {
393 | return Text(
394 | "${res["index"]} - ${res["label"]}: ${res["confidence"].toStringAsFixed(3)}",
395 | style: TextStyle(
396 | color: Colors.black,
397 | fontSize: 20.0,
398 | background: Paint()..color = Colors.white,
399 | ),
400 | );
401 | }).toList()
402 | : [],
403 | ),
404 | ));
405 | } else if (_model == ssd || _model == yolo) {
406 | stackChildren.addAll(renderBoxes(size));
407 | } else if (_model == posenet) {
408 | stackChildren.addAll(renderKeypoints(size));
409 | }
410 |
411 | if (_busy) {
412 | stackChildren.add(const Opacity(
413 | child: ModalBarrier(dismissible: false, color: Colors.grey),
414 | opacity: 0.3,
415 | ));
416 | stackChildren.add(const Center(child: CircularProgressIndicator()));
417 | }
418 |
419 | return Scaffold(
420 | appBar: AppBar(
421 | title: const Text('tflite example app'),
422 | actions: [
423 | PopupMenuButton(
424 | onSelected: onSelect,
425 | itemBuilder: (context) {
426 | List> menuEntries = [
427 | const PopupMenuItem(
428 | child: Text(mobile),
429 | value: mobile,
430 | ),
431 | const PopupMenuItem(
432 | child: Text(ssd),
433 | value: ssd,
434 | ),
435 | const PopupMenuItem(
436 | child: Text(yolo),
437 | value: yolo,
438 | ),
439 | const PopupMenuItem(
440 | child: Text(deeplab),
441 | value: deeplab,
442 | ),
443 | const PopupMenuItem(
444 | child: Text(posenet),
445 | value: posenet,
446 | )
447 | ];
448 | return menuEntries;
449 | },
450 | )
451 | ],
452 | ),
453 | body: Stack(
454 | children: stackChildren,
455 | ),
456 | floatingActionButton: FloatingActionButton(
457 | onPressed: predictImagePicker,
458 | tooltip: 'Pick Image',
459 | child: Icon(Icons.image),
460 | ),
461 | );
462 | }
463 | }
464 |
--------------------------------------------------------------------------------
/example/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: tflite_example
2 | description: Demonstrates how to use the tflite plugin.
3 |
4 | # The following defines the version and build number for your application.
5 | # A version number is three numbers separated by dots, like 1.2.43
6 | # followed by an optional build number separated by a +.
7 | # Both the version and the builder number may be overridden in flutter
8 | # build by specifying --build-name and --build-number, respectively.
9 | # Read more about versioning at semver.org.
10 | version: 1.0.0+1
11 |
12 | environment:
13 | sdk: ">=3.0.6-dev.68.0 <4.0.0"
14 |
15 | dependencies:
16 |
17 | # The following adds the Cupertino Icons font to your application.
18 | # Use with the CupertinoIcons class for iOS style icons.
19 | cupertino_icons: ^1.0.2
20 | flutter:
21 | sdk: flutter
22 |
23 | dev_dependencies:
24 | flutter_test:
25 | sdk: flutter
26 | image: ^3.0.1
27 | image_picker: ^1.0.5
28 | test: ^1.12.0
29 | tensorflow_lite_flutter:
30 | path: ../
31 | # For information on the generic Dart part of this file, see the
32 | # following page: https://www.dartlang.org/tools/pub/pubspec
33 | # The following section is specific to Flutter.
34 | flutter:
35 |
36 | # The following line ensures that the Material Icons font is
37 | # included with your application, so that you can use the icons in
38 | # the material Icons class.
39 | uses-material-design: true
40 |
41 | # To add assets to your application, add an assets section, like this:
42 | assets:
43 | - assets/mobilenet_v1_1.0_224.txt
44 | - assets/mobilenet_v1_1.0_224.tflite
45 | - assets/yolov2_tiny.tflite
46 | - assets/yolov2_tiny.txt
47 | - assets/ssd_mobilenet.tflite
48 | - assets/ssd_mobilenet.txt
49 | - assets/deeplabv3_257_mv_gpu.tflite
50 | - assets/deeplabv3_257_mv_gpu.txt
51 | - assets/posenet_mv1_075_float_from_checkpoints.tflite
52 |
--------------------------------------------------------------------------------
/example/test/widget_test.dart:
--------------------------------------------------------------------------------
1 | // This is a basic Flutter widget test.
2 | // To perform an interaction with a widget in your test, use the WidgetTester utility that Flutter
3 | // provides. For example, you can send tap and scroll gestures. You can also use WidgetTester to
4 | // find child widgets in the widget tree, read text, and verify that the values of widget properties
5 | // are correct.
6 |
7 | import 'package:flutter/material.dart';
8 | import 'package:flutter_test/flutter_test.dart';
9 |
10 | import '../lib/main.dart';
11 |
12 | void main() {
13 | testWidgets('Verify Platform version', (WidgetTester tester) async {
14 | // Build our app and trigger a frame.
15 | await tester.pumpWidget(new MyApp());
16 |
17 | // Verify that platform version is retrieved.
18 | expect(
19 | find.byWidgetPredicate(
20 | (Widget widget) =>
21 | widget is Text && widget.data!.startsWith('Running on:'),
22 | ),
23 | findsOneWidget);
24 | });
25 | }
26 |
--------------------------------------------------------------------------------
/example/yolo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/example/yolo.jpg
--------------------------------------------------------------------------------
/ios/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .vagrant/
3 | .sconsign.dblite
4 | .svn/
5 |
6 | .DS_Store
7 | *.swp
8 | profile
9 |
10 | DerivedData/
11 | build/
12 | GeneratedPluginRegistrant.h
13 | GeneratedPluginRegistrant.m
14 |
15 | .generated/
16 |
17 | *.pbxuser
18 | *.mode1v3
19 | *.mode2v3
20 | *.perspectivev3
21 |
22 | !default.pbxuser
23 | !default.mode1v3
24 | !default.mode2v3
25 | !default.perspectivev3
26 |
27 | xcuserdata
28 |
29 | *.moved-aside
30 |
31 | *.pyc
32 | *sync/
33 | Icon?
34 | .tags*
35 |
36 | /Flutter/Generated.xcconfig
37 |
--------------------------------------------------------------------------------
/ios/Assets/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/draz26648/tensorflow_lite_flutter/fc55c07816f7c35b354bc401c578fee20972a7b1/ios/Assets/.gitkeep
--------------------------------------------------------------------------------
/ios/Classes/TflitePlugin.h:
--------------------------------------------------------------------------------
1 | #import
2 |
3 | @interface TflitePlugin : NSObject
4 | @end
5 |
--------------------------------------------------------------------------------
/ios/Classes/ios_image_load.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | std::vector LoadImageFromFile(const char* file_name,
4 | int* out_width,
5 | int* out_height,
6 | int* out_channels);
7 |
8 | NSData *CompressImage(NSMutableData*,
9 | int width,
10 | int height,
11 | int bytesPerPixel);
12 |
13 |
--------------------------------------------------------------------------------
/ios/Classes/ios_image_load.mm:
--------------------------------------------------------------------------------
1 | #import
2 | #include "ios_image_load.h"
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #import
10 | #import
11 |
12 | std::vector LoadImageFromFile(const char* file_name,
13 | int* out_width, int* out_height,
14 | int* out_channels) {
15 | FILE* file_handle = fopen(file_name, "rb");
16 | fseek(file_handle, 0, SEEK_END);
17 | const size_t bytes_in_file = ftell(file_handle);
18 | fseek(file_handle, 0, SEEK_SET);
19 | std::vector file_data(bytes_in_file);
20 | fread(file_data.data(), 1, bytes_in_file, file_handle);
21 | fclose(file_handle);
22 |
23 | CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
24 | bytes_in_file,
25 | kCFAllocatorNull);
26 | CGDataProviderRef image_provider = CGDataProviderCreateWithCFData(file_data_ref);
27 |
28 | const char* suffix = strrchr(file_name, '.');
29 | if (!suffix || suffix == file_name) {
30 | suffix = "";
31 | }
32 | CGImageRef image;
33 | if (strcasecmp(suffix, ".png") == 0) {
34 | image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
35 | kCGRenderingIntentDefault);
36 | } else if ((strcasecmp(suffix, ".jpg") == 0) ||
37 | (strcasecmp(suffix, ".jpeg") == 0)) {
38 | image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
39 | kCGRenderingIntentDefault);
40 | } else {
41 | CFRelease(image_provider);
42 | CFRelease(file_data_ref);
43 | fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
44 | out_width = 0;
45 | out_height = 0;
46 | *out_channels = 0;
47 | return std::vector();
48 | }
49 |
50 | int width = (int)CGImageGetWidth(image);
51 | int height = (int)CGImageGetHeight(image);
52 | const int channels = 4;
53 | CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
54 | const int bytes_per_row = (width * channels);
55 | const int bytes_in_image = (bytes_per_row * height);
56 | std::vector result(bytes_in_image);
57 | const int bits_per_component = 8;
58 |
59 | CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
60 | bits_per_component, bytes_per_row, color_space,
61 | kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
62 | CGColorSpaceRelease(color_space);
63 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
64 | CGContextRelease(context);
65 | CFRelease(image);
66 | CFRelease(image_provider);
67 | CFRelease(file_data_ref);
68 |
69 | *out_width = width;
70 | *out_height = height;
71 | *out_channels = channels;
72 | return result;
73 | }
74 |
75 | NSData *CompressImage(NSMutableData *image, int width, int height, int bytesPerPixel) {
76 | const int channels = 4;
77 | CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
78 | CGContextRef context = CGBitmapContextCreate([image mutableBytes], width, height,
79 | bytesPerPixel*8, width*channels*bytesPerPixel, color_space,
80 | kCGImageAlphaPremultipliedLast | (bytesPerPixel == 4 ? kCGBitmapFloatComponents : kCGBitmapByteOrder32Big));
81 | CGColorSpaceRelease(color_space);
82 | if (context == nil) return nil;
83 |
84 | CGImageRef imgRef = CGBitmapContextCreateImage(context);
85 | CGContextRelease(context);
86 | if (imgRef == nil) return nil;
87 |
88 | UIImage* img = [UIImage imageWithCGImage:imgRef];
89 | CGImageRelease(imgRef);
90 | if (img == nil) return nil;
91 |
92 | return UIImagePNGRepresentation(img);
93 | }
94 |
--------------------------------------------------------------------------------
/ios/tflite.podspec:
--------------------------------------------------------------------------------
1 | #
2 | # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
3 | #
4 | Pod::Spec.new do |s|
5 | s.name = 'tflite'
6 | s.version = '1.1.2'
7 | s.summary = 'A Flutter plugin for accessing TensorFlow Lite.'
8 | s.description = <<-DESC
9 | A Flutter plugin for accessing TensorFlow Lite. Supports both iOS and Android.
10 | DESC
11 | s.homepage = 'https://github.com/shaqian/flutter_tflite'
12 | s.license = { :file => '../LICENSE' }
13 | s.author = { 'Qian Sha' => 'https://github.com/shaqian' }
14 | s.source = { :path => '.' }
15 | s.source_files = 'Classes/**/*'
16 | s.public_header_files = 'Classes/**/*.h'
17 | s.dependency 'Flutter'
18 | s.dependency 'TensorFlowLiteC'
19 | s.xcconfig = { 'USER_HEADER_SEARCH_PATHS' => '$(inherited) "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/tflite" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/Flutter" "${PODS_ROOT}/Headers/Public/TensorFlowLite/tensorflow_lite" "${PODS_ROOT}/Headers/Public/tflite" "${PODS_ROOT}/TensorFlowLite/Frameworks/tensorflow_lite.framework/Headers" "${PODS_ROOT}/TensorFlowLiteC/Frameworks/TensorFlowLiteC.framework/Headers"' }
20 |
21 | s.ios.deployment_target = '9.0'
22 | s.static_framework = true
23 | end
24 |
25 |
--------------------------------------------------------------------------------
/lib/tensorflow_lite_flutter.dart:
--------------------------------------------------------------------------------
1 | import 'dart:async';
2 | import 'package:flutter/services.dart';
3 |
4 | /// TensorFlow Lite plugin for Flutter
5 | ///
6 | /// This class provides methods to interact with TensorFlow Lite models for
7 | /// various machine learning tasks including image classification, object detection,
8 | /// image-to-image translation, semantic segmentation, and pose estimation.
9 | class Tflite {
10 | /// Method channel for communicating with native code
11 | static const MethodChannel _channel = MethodChannel('tflite');
12 |
13 | /// Loads a TensorFlow Lite model into memory
14 | ///
15 | /// [model] - Path to the model file (required)
16 | /// [labels] - Path to the labels file (optional)
17 | /// [numThreads] - Number of threads to use for inference (default: 1)
18 | /// [isAsset] - Whether the model is an asset or a file path (default: true)
19 | /// [useGpuDelegate] - Whether to use GPU acceleration (default: false)
20 | ///
21 | /// Returns a message indicating success or failure
22 | static Future loadModel({
23 | required String model,
24 | String labels = "",
25 | int numThreads = 1,
26 | bool isAsset = true,
27 | bool useGpuDelegate = false,
28 | }) async {
29 | return await _channel.invokeMethod(
30 | 'loadModel',
31 | {
32 | "model": model,
33 | "labels": labels,
34 | "numThreads": numThreads,
35 | "isAsset": isAsset,
36 | 'useGpuDelegate': useGpuDelegate
37 | },
38 | );
39 | }
40 |
41 | /// Runs inference on an image file for image classification
42 | ///
43 | /// [path] - Path to the image file (required)
44 | /// [imageMean] - Mean normalization value (default: 117.0)
45 | /// [imageStd] - Standard deviation normalization value (default: 1.0)
46 | /// [numResults] - Maximum number of results to return (default: 5)
47 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
48 | /// [asynch] - Whether to run inference asynchronously (default: true)
49 | ///
50 | /// Returns a list of classification results, each containing:
51 | /// - index: The class index
52 | /// - label: The class label (if a labels file was provided)
53 | /// - confidence: The confidence score (between 0-1)
54 | static Future runModelOnImage({
55 | required String path,
56 | double imageMean = 117.0,
57 | double imageStd = 1.0,
58 | int numResults = 5,
59 | double threshold = 0.1,
60 | bool asynch = true,
61 | }) async {
62 | return await _channel.invokeMethod(
63 | 'runModelOnImage',
64 | {
65 | "path": path,
66 | "imageMean": imageMean,
67 | "imageStd": imageStd,
68 | "numResults": numResults,
69 | "threshold": threshold,
70 | "asynch": asynch,
71 | },
72 | );
73 | }
74 |
75 | /// Runs inference on binary image data for image classification
76 | ///
77 | /// [binary] - Binary image data (required)
78 | /// [numResults] - Maximum number of results to return (default: 5)
79 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
80 | /// [asynch] - Whether to run inference asynchronously (default: true)
81 | ///
82 | /// Returns a list of classification results, each containing:
83 | /// - index: The class index
84 | /// - label: The class label (if a labels file was provided)
85 | /// - confidence: The confidence score (between 0-1)
86 | static Future runModelOnBinary({
87 | required Uint8List binary,
88 | int numResults = 5,
89 | double threshold = 0.1,
90 | bool asynch = true,
91 | }) async {
92 | return await _channel.invokeMethod(
93 | 'runModelOnBinary',
94 | {
95 | "binary": binary,
96 | "numResults": numResults,
97 | "threshold": threshold,
98 | "asynch": asynch,
99 | },
100 | );
101 | }
102 |
103 | /// Runs inference on camera frame data for image classification
104 | ///
105 | /// [bytesList] - List of byte arrays from camera planes (required)
106 | /// [imageHeight] - Height of the image (default: 1280)
107 | /// [imageWidth] - Width of the image (default: 720)
108 | /// [imageMean] - Mean normalization value (default: 127.5)
109 | /// [imageStd] - Standard deviation normalization value (default: 127.5)
110 | /// [rotation] - Rotation of the image in degrees, Android only (default: 90)
111 | /// [numResults] - Maximum number of results to return (default: 5)
112 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
113 | /// [asynch] - Whether to run inference asynchronously (default: true)
114 | ///
115 | /// Returns a list of classification results, each containing:
116 | /// - index: The class index
117 | /// - label: The class label (if a labels file was provided)
118 | /// - confidence: The confidence score (between 0-1)
119 | static Future runModelOnFrame({
120 | required List bytesList,
121 | int imageHeight = 1280,
122 | int imageWidth = 720,
123 | double imageMean = 127.5,
124 | double imageStd = 127.5,
125 | int rotation = 90, // Android only
126 | int numResults = 5,
127 | double threshold = 0.1,
128 | bool asynch = true,
129 | }) async {
130 | return await _channel.invokeMethod(
131 | 'runModelOnFrame',
132 | {
133 | "bytesList": bytesList,
134 | "imageHeight": imageHeight,
135 | "imageWidth": imageWidth,
136 | "imageMean": imageMean,
137 | "imageStd": imageStd,
138 | "rotation": rotation,
139 | "numResults": numResults,
140 | "threshold": threshold,
141 | "asynch": asynch,
142 | },
143 | );
144 | }
145 |
146 | /// Default anchor values for YOLO object detection model
147 | ///
148 | /// These anchors are used for the YOLO model to define the default shapes
149 | /// of bounding boxes at different scales
150 | static const List anchors = [
151 | 0.57273,
152 | 0.677385,
153 | 1.87446,
154 | 2.06253,
155 | 3.33843,
156 | 5.47434,
157 | 7.88282,
158 | 3.52778,
159 | 9.77052,
160 | 9.16828
161 | ];
162 |
163 | /// Detects objects in an image file using either SSD MobileNet or YOLO models
164 | ///
165 | /// [path] - Path to the image file (required)
166 | /// [model] - Model to use: "SSDMobileNet" or "YOLO" (default: "SSDMobileNet")
167 | /// [imageMean] - Mean normalization value (default: 127.5)
168 | /// [imageStd] - Standard deviation normalization value (default: 127.5)
169 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
170 | /// [numResultsPerClass] - Maximum number of results per class (default: 5)
171 | /// [anchors] - Anchor values for YOLO model (default: predefined anchors)
172 | /// [blockSize] - Block size for YOLO model (default: 32)
173 | /// [numBoxesPerBlock] - Number of boxes per block for YOLO model (default: 5)
174 | /// [asynch] - Whether to run inference asynchronously (default: true)
175 | ///
176 | /// Returns a list of detected objects, each containing:
177 | /// - detectedClass: The class label of the detected object
178 | /// - confidenceInClass: The confidence score (between 0-1)
179 | /// - rect: Object containing x, y, w, h coordinates (normalized 0-1)
180 | static Future detectObjectOnImage({
181 | required String path,
182 | String model = "SSDMobileNet",
183 | double imageMean = 127.5,
184 | double imageStd = 127.5,
185 | double threshold = 0.1,
186 | int numResultsPerClass = 5,
187 | // Used in YOLO only
188 | List anchors = anchors,
189 | int blockSize = 32,
190 | int numBoxesPerBlock = 5,
191 | bool asynch = true,
192 | }) async {
193 | return await _channel.invokeMethod(
194 | 'detectObjectOnImage',
195 | {
196 | "path": path,
197 | "model": model,
198 | "imageMean": imageMean,
199 | "imageStd": imageStd,
200 | "threshold": threshold,
201 | "numResultsPerClass": numResultsPerClass,
202 | "anchors": anchors,
203 | "blockSize": blockSize,
204 | "numBoxesPerBlock": numBoxesPerBlock,
205 | "asynch": asynch,
206 | },
207 | );
208 | }
209 |
210 | /// Detects objects in binary image data using either SSD MobileNet or YOLO models
211 | ///
212 | /// [binary] - Binary image data (required)
213 | /// [model] - Model to use: "SSDMobileNet" or "YOLO" (default: "SSDMobileNet")
214 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
215 | /// [numResultsPerClass] - Maximum number of results per class (default: 5)
216 | /// [anchors] - Anchor values for YOLO model (default: predefined anchors)
217 | /// [blockSize] - Block size for YOLO model (default: 32)
218 | /// [numBoxesPerBlock] - Number of boxes per block for YOLO model (default: 5)
219 | /// [asynch] - Whether to run inference asynchronously (default: true)
220 | ///
221 | /// Returns a list of detected objects, each containing:
222 | /// - detectedClass: The class label of the detected object
223 | /// - confidenceInClass: The confidence score (between 0-1)
224 | /// - rect: Object containing x, y, w, h coordinates (normalized 0-1)
225 | static Future detectObjectOnBinary({
226 | required Uint8List binary,
227 | String model = "SSDMobileNet",
228 | double threshold = 0.1,
229 | int numResultsPerClass = 5,
230 | // Used in YOLO only
231 | List anchors = anchors,
232 | int blockSize = 32,
233 | int numBoxesPerBlock = 5,
234 | bool asynch = true,
235 | }) async {
236 | return await _channel.invokeMethod(
237 | 'detectObjectOnBinary',
238 | {
239 | "binary": binary,
240 | "model": model,
241 | "threshold": threshold,
242 | "numResultsPerClass": numResultsPerClass,
243 | "anchors": anchors,
244 | "blockSize": blockSize,
245 | "numBoxesPerBlock": numBoxesPerBlock,
246 | "asynch": asynch,
247 | },
248 | );
249 | }
250 |
251 | /// Detects objects in camera frame data using either SSD MobileNet or YOLO models
252 | ///
253 | /// [bytesList] - List of byte arrays from camera planes (required)
254 | /// [model] - Model to use: "SSDMobileNet" or "YOLO" (default: "SSDMobileNet")
255 | /// [imageHeight] - Height of the image (default: 1280)
256 | /// [imageWidth] - Width of the image (default: 720)
257 | /// [imageMean] - Mean normalization value (default: 127.5)
258 | /// [imageStd] - Standard deviation normalization value (default: 127.5)
259 | /// [threshold] - Minimum confidence threshold for results (default: 0.1)
260 | /// [numResultsPerClass] - Maximum number of results per class (default: 5)
261 | /// [rotation] - Rotation of the image in degrees, Android only (default: 90)
262 | /// [anchors] - Anchor values for YOLO model (default: predefined anchors)
263 | /// [blockSize] - Block size for YOLO model (default: 32)
264 | /// [numBoxesPerBlock] - Number of boxes per block for YOLO model (default: 5)
265 | /// [asynch] - Whether to run inference asynchronously (default: true)
266 | ///
267 | /// Returns a list of detected objects, each containing:
268 | /// - detectedClass: The class label of the detected object
269 | /// - confidenceInClass: The confidence score (between 0-1)
270 | /// - rect: Object containing x, y, w, h coordinates (normalized 0-1)
271 | static Future detectObjectOnFrame({
272 | required List bytesList,
273 | String model = "SSDMobileNet",
274 | int imageHeight = 1280,
275 | int imageWidth = 720,
276 | double imageMean = 127.5,
277 | double imageStd = 127.5,
278 | double threshold = 0.1,
279 | int numResultsPerClass = 5,
280 | int rotation = 90, // Android only
281 | // Used in YOLO only
282 | List anchors = anchors,
283 | int blockSize = 32,
284 | int numBoxesPerBlock = 5,
285 | bool asynch = true,
286 | }) async {
287 | return await _channel.invokeMethod(
288 | 'detectObjectOnFrame',
289 | {
290 | "bytesList": bytesList,
291 | "model": model,
292 | "imageHeight": imageHeight,
293 | "imageWidth": imageWidth,
294 | "imageMean": imageMean,
295 | "imageStd": imageStd,
296 | "rotation": rotation,
297 | "threshold": threshold,
298 | "numResultsPerClass": numResultsPerClass,
299 | "anchors": anchors,
300 | "blockSize": blockSize,
301 | "numBoxesPerBlock": numBoxesPerBlock,
302 | "asynch": asynch,
303 | },
304 | );
305 | }
306 |
307 | static Future close() async {
308 | return await _channel.invokeMethod('close');
309 | }
310 |
311 | /// Runs Pix2Pix image-to-image translation on an image file
312 | ///
313 | /// Pix2Pix is a conditional GAN that transforms images from one domain to another
314 | /// (e.g., sketch to photo, day to night, etc.)
315 | ///
316 | /// [path] - Path to the image file (required)
317 | /// [imageMean] - Mean normalization value (default: 0)
318 | /// [imageStd] - Standard deviation normalization value (default: 255.0)
319 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
320 | /// [asynch] - Whether to run inference asynchronously (default: true)
321 | ///
322 | /// Returns a Uint8List containing:
323 | /// - If outputType is "png": PNG image data that can be displayed directly
324 | /// - If outputType is "raw": Raw pixel data in RGBA format
325 | static Future runPix2PixOnImage({
326 | required String path,
327 | double imageMean = 0,
328 | double imageStd = 255.0,
329 | String outputType = "png",
330 | bool asynch = true,
331 | }) async {
332 | return await _channel.invokeMethod(
333 | 'runPix2PixOnImage',
334 | {
335 | "path": path,
336 | "imageMean": imageMean,
337 | "imageStd": imageStd,
338 | "outputType": outputType,
339 | "asynch": asynch,
340 | },
341 | );
342 | }
343 |
344 | /// Runs Pix2Pix image-to-image translation on binary image data
345 | ///
346 | /// [binary] - Binary image data (required)
347 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
348 | /// [asynch] - Whether to run inference asynchronously (default: true)
349 | ///
350 | /// Returns a Uint8List containing:
351 | /// - If outputType is "png": PNG image data that can be displayed directly
352 | /// - If outputType is "raw": Raw pixel data in RGBA format
353 | static Future runPix2PixOnBinary({
354 | required Uint8List binary,
355 | String outputType = "png",
356 | bool asynch = true,
357 | }) async {
358 | return await _channel.invokeMethod(
359 | 'runPix2PixOnBinary',
360 | {
361 | "binary": binary,
362 | "outputType": outputType,
363 | "asynch": asynch,
364 | },
365 | );
366 | }
367 |
368 | /// Runs Pix2Pix image-to-image translation on camera frame data
369 | ///
370 | /// [bytesList] - List of byte arrays from camera planes (required)
371 | /// [imageHeight] - Height of the image (default: 1280)
372 | /// [imageWidth] - Width of the image (default: 720)
373 | /// [imageMean] - Mean normalization value (default: 0)
374 | /// [imageStd] - Standard deviation normalization value (default: 255.0)
375 | /// [rotation] - Rotation of the image in degrees, Android only (default: 90)
376 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
377 | /// [asynch] - Whether to run inference asynchronously (default: true)
378 | ///
379 | /// Returns a Uint8List containing:
380 | /// - If outputType is "png": PNG image data that can be displayed directly
381 | /// - If outputType is "raw": Raw pixel data in RGBA format
382 | static Future runPix2PixOnFrame({
383 | required List bytesList,
384 | int imageHeight = 1280,
385 | int imageWidth = 720,
386 | double imageMean = 0,
387 | double imageStd = 255.0,
388 | int rotation = 90, // Android only
389 | String outputType = "png",
390 | bool asynch = true,
391 | }) async {
392 | return await _channel.invokeMethod(
393 | 'runPix2PixOnFrame',
394 | {
395 | "bytesList": bytesList,
396 | "imageHeight": imageHeight,
397 | "imageWidth": imageWidth,
398 | "imageMean": imageMean,
399 | "imageStd": imageStd,
400 | "rotation": rotation,
401 | "outputType": outputType,
402 | "asynch": asynch,
403 | },
404 | );
405 | }
406 |
407 | // https://github.com/meetshah1995/pytorch-semseg/blob/master/ptsemseg/loader/pascal_voc_loader.py
408 | /// Default color palette for Pascal VOC dataset semantic segmentation
409 | ///
410 | /// Each color represents a different class in the Pascal VOC dataset
411 | /// These colors are used to visualize the segmentation results
412 | static final List pascalVOCLabelColors = [
413 | Color.fromARGB(255, 0, 0, 0).value, // background
414 | Color.fromARGB(255, 128, 0, 0).value, // aeroplane
415 | Color.fromARGB(255, 0, 128, 0).value, // bicycle
416 | Color.fromARGB(255, 128, 128, 0).value, // bird
417 | Color.fromARGB(255, 0, 0, 128).value, // boat
418 | Color.fromARGB(255, 128, 0, 128).value, // bottle
419 | Color.fromARGB(255, 0, 128, 128).value, // bus
420 | Color.fromARGB(255, 128, 128, 128).value, // car
421 | Color.fromARGB(255, 64, 0, 0).value, // cat
422 | Color.fromARGB(255, 192, 0, 0).value, // chair
423 | Color.fromARGB(255, 64, 128, 0).value, // cow
424 | Color.fromARGB(255, 192, 128, 0).value, // diningtable
425 | Color.fromARGB(255, 64, 0, 128).value, // dog
426 | Color.fromARGB(255, 192, 0, 128).value, // horse
427 | Color.fromARGB(255, 64, 128, 128).value, // motorbike
428 | Color.fromARGB(255, 192, 128, 128).value, // person
429 | Color.fromARGB(255, 0, 64, 0).value, // potted plant
430 | Color.fromARGB(255, 128, 64, 0).value, // sheep
431 | Color.fromARGB(255, 0, 192, 0).value, // sofa
432 | Color.fromARGB(255, 128, 192, 0).value, // train
433 | Color.fromARGB(255, 0, 64, 128).value, // tv-monitor
434 | ];
435 |
436 | /// Runs semantic segmentation on an image file
437 | ///
438 | /// Semantic segmentation classifies each pixel in an image, assigning it to a specific class
439 | /// (e.g., person, car, road, etc.)
440 | ///
441 | /// [path] - Path to the image file (required)
442 | /// [imageMean] - Mean normalization value (default: 0)
443 | /// [imageStd] - Standard deviation normalization value (default: 255.0)
444 | /// [labelColors] - List of colors to use for visualization (default: pascalVOCLabelColors)
445 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
446 | /// [asynch] - Whether to run inference asynchronously (default: true)
447 | ///
448 | /// Returns a Uint8List containing:
449 | /// - If outputType is "png": PNG image data with colored segmentation mask
450 | /// - If outputType is "raw": Raw pixel data with class indices
451 | static Future runSegmentationOnImage({
452 | required String path,
453 | double imageMean = 0,
454 | double imageStd = 255.0,
455 | List? labelColors,
456 | String outputType = "png",
457 | bool asynch = true,
458 | }) async {
459 | return await _channel.invokeMethod(
460 | 'runSegmentationOnImage',
461 | {
462 | "path": path,
463 | "imageMean": imageMean,
464 | "imageStd": imageStd,
465 | "labelColors": labelColors ?? pascalVOCLabelColors,
466 | "outputType": outputType,
467 | "asynch": asynch,
468 | },
469 | );
470 | }
471 |
472 | /// Runs semantic segmentation on binary image data
473 | ///
474 | /// [binary] - Binary image data (required)
475 | /// [labelColors] - List of colors to use for visualization (default: pascalVOCLabelColors)
476 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
477 | /// [asynch] - Whether to run inference asynchronously (default: true)
478 | ///
479 | /// Returns a Uint8List containing:
480 | /// - If outputType is "png": PNG image data with colored segmentation mask
481 | /// - If outputType is "raw": Raw pixel data with class indices
482 | static Future runSegmentationOnBinary({
483 | required Uint8List binary,
484 | List? labelColors,
485 | String outputType = "png",
486 | bool asynch = true,
487 | }) async {
488 | return await _channel.invokeMethod(
489 | 'runSegmentationOnBinary',
490 | {
491 | "binary": binary,
492 | "labelColors": labelColors ?? pascalVOCLabelColors,
493 | "outputType": outputType,
494 | "asynch": asynch,
495 | },
496 | );
497 | }
498 |
499 | /// Runs semantic segmentation on camera frame data
500 | ///
501 | /// [bytesList] - List of byte arrays from camera planes (required)
502 | /// [imageHeight] - Height of the image (default: 1280)
503 | /// [imageWidth] - Width of the image (default: 720)
504 | /// [imageMean] - Mean normalization value (default: 0)
505 | /// [imageStd] - Standard deviation normalization value (default: 255.0)
506 | /// [rotation] - Rotation of the image in degrees, Android only (default: 90)
507 | /// [labelColors] - List of colors to use for visualization (default: pascalVOCLabelColors)
508 | /// [outputType] - Output format, either "png" or "raw" (default: "png")
509 | /// [asynch] - Whether to run inference asynchronously (default: true)
510 | ///
511 | /// Returns a Uint8List containing:
512 | /// - If outputType is "png": PNG image data with colored segmentation mask
513 | /// - If outputType is "raw": Raw pixel data with class indices
514 | static Future runSegmentationOnFrame({
515 | required List bytesList,
516 | int imageHeight = 1280,
517 | int imageWidth = 720,
518 | double imageMean = 0,
519 | double imageStd = 255.0,
520 | int rotation = 90, // Android only
521 | List? labelColors,
522 | String outputType = "png",
523 | bool asynch = true,
524 | }) async {
525 | return await _channel.invokeMethod(
526 | 'runSegmentationOnFrame',
527 | {
528 | "bytesList": bytesList,
529 | "imageHeight": imageHeight,
530 | "imageWidth": imageWidth,
531 | "imageMean": imageMean,
532 | "imageStd": imageStd,
533 | "rotation": rotation,
534 | "labelColors": labelColors ?? pascalVOCLabelColors,
535 | "outputType": outputType,
536 | "asynch": asynch,
537 | },
538 | );
539 | }
540 |
541 | /// Runs PoseNet human pose estimation on an image file
542 | ///
543 | /// PoseNet detects human figures in images and estimates the pose by finding
544 | /// body keypoints (e.g., nose, eyes, ears, shoulders, elbows, wrists, etc.)
545 | ///
546 | /// [path] - Path to the image file (required)
547 | /// [imageMean] - Mean normalization value (default: 127.5)
548 | /// [imageStd] - Standard deviation normalization value (default: 127.5)
549 | /// [numResults] - Maximum number of pose results to return (default: 5)
550 | /// [threshold] - Minimum confidence threshold for keypoints (default: 0.5)
551 | /// [nmsRadius] - Non-maximum suppression radius (default: 20)
552 | /// [asynch] - Whether to run inference asynchronously (default: true)
553 | ///
554 | /// Returns a list of detected poses, each containing:
555 | /// - score: Overall confidence score for the pose
556 | /// - keypoints: Map of keypoint positions and confidence scores
557 | /// Each keypoint contains x, y (normalized 0-1), part name, and confidence score
558 | static Future runPoseNetOnImage({
559 | required String path,
560 | double imageMean = 127.5,
561 | double imageStd = 127.5,
562 | int numResults = 5,
563 | double threshold = 0.5,
564 | int nmsRadius = 20,
565 | bool asynch = true,
566 | }) async {
567 | return await _channel.invokeMethod(
568 | 'runPoseNetOnImage',
569 | {
570 | "path": path,
571 | "imageMean": imageMean,
572 | "imageStd": imageStd,
573 | "numResults": numResults,
574 | "threshold": threshold,
575 | "nmsRadius": nmsRadius,
576 | "asynch": asynch,
577 | },
578 | );
579 | }
580 |
581 | /// Runs PoseNet human pose estimation on binary image data
582 | ///
583 | /// [binary] - Binary image data (required)
584 | /// [numResults] - Maximum number of pose results to return (default: 5)
585 | /// [threshold] - Minimum confidence threshold for keypoints (default: 0.5)
586 | /// [nmsRadius] - Non-maximum suppression radius (default: 20)
587 | /// [asynch] - Whether to run inference asynchronously (default: true)
588 | ///
589 | /// Returns a list of detected poses, each containing:
590 | /// - score: Overall confidence score for the pose
591 | /// - keypoints: Map of keypoint positions and confidence scores
592 | /// Each keypoint contains x, y (normalized 0-1), part name, and confidence score
593 | static Future runPoseNetOnBinary({
594 | required Uint8List binary,
595 | int numResults = 5,
596 | double threshold = 0.5,
597 | int nmsRadius = 20,
598 | bool asynch = true,
599 | }) async {
600 | return await _channel.invokeMethod(
601 | 'runPoseNetOnBinary',
602 | {
603 | "binary": binary,
604 | "numResults": numResults,
605 | "threshold": threshold,
606 | "nmsRadius": nmsRadius,
607 | "asynch": asynch,
608 | },
609 | );
610 | }
611 |
612 | /// Runs PoseNet human pose estimation on camera frame data
613 | ///
614 | /// [bytesList] - List of byte arrays from camera planes (required)
615 | /// [imageHeight] - Height of the image (default: 1280)
616 | /// [imageWidth] - Width of the image (default: 720)
617 | /// [imageMean] - Mean normalization value (default: 127.5)
618 | /// [imageStd] - Standard deviation normalization value (default: 127.5)
619 | /// [rotation] - Rotation of the image in degrees, Android only (default: 90)
620 | /// [numResults] - Maximum number of pose results to return (default: 5)
621 | /// [threshold] - Minimum confidence threshold for keypoints (default: 0.5)
622 | /// [nmsRadius] - Non-maximum suppression radius (default: 20)
623 | /// [asynch] - Whether to run inference asynchronously (default: true)
624 | ///
625 | /// Returns a list of detected poses, each containing:
626 | /// - score: Overall confidence score for the pose
627 | /// - keypoints: Map of keypoint positions and confidence scores
628 | /// Each keypoint contains x, y (normalized 0-1), part name, and confidence score
629 | static Future runPoseNetOnFrame({
630 | required List bytesList,
631 | int imageHeight = 1280,
632 | int imageWidth = 720,
633 | double imageMean = 127.5,
634 | double imageStd = 127.5,
635 | int rotation = 90, // Android only
636 | int numResults = 5,
637 | double threshold = 0.5,
638 | int nmsRadius = 20,
639 | bool asynch = true,
640 | }) async {
641 | return await _channel.invokeMethod(
642 | 'runPoseNetOnFrame',
643 | {
644 | "bytesList": bytesList,
645 | "imageHeight": imageHeight,
646 | "imageWidth": imageWidth,
647 | "imageMean": imageMean,
648 | "imageStd": imageStd,
649 | "rotation": rotation,
650 | "numResults": numResults,
651 | "threshold": threshold,
652 | "nmsRadius": nmsRadius,
653 | "asynch": asynch,
654 | },
655 | );
656 | }
657 | }
658 |
--------------------------------------------------------------------------------
/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: tensorflow_lite_flutter
2 | description: A Flutter plugin for accessing TensorFlow Lite. Supports both iOS and Android.
3 | version: 3.0.0
4 | homepage: https://github.com/draz26648/tflite_edited
5 | repository: https://github.com/draz26648/tflite_edited
6 | issue_tracker: https://github.com/draz26648/tflite_edited/issues
7 |
8 | environment:
9 | sdk: '>=3.2.0 <4.0.0'
10 | flutter: ">=3.16.0"
11 |
12 | dependencies:
13 | flutter:
14 | sdk: flutter
15 | meta: ^1.10.0
16 |
17 | dev_dependencies:
18 | flutter_test:
19 | sdk: flutter
20 | flutter_lints: ^3.0.1
21 | test: ^1.24.9
22 |
23 | # The following section is specific to Flutter.
24 | flutter:
25 | plugin:
26 | platforms:
27 | android:
28 | package: sq.flutter.tflite
29 | pluginClass: TflitePlugin
30 | ios:
31 | pluginClass: TflitePlugin
32 |
--------------------------------------------------------------------------------
/test/tflite_test.dart:
--------------------------------------------------------------------------------
1 | // ignore_for_file: deprecated_member_use
2 |
3 | import 'package:flutter/services.dart';
4 | import 'package:flutter_test/flutter_test.dart';
5 | import 'package:tensorflow_lite_flutter/tensorflow_lite_flutter.dart';
6 |
7 | void main() {
8 | TestWidgetsFlutterBinding.ensureInitialized();
9 | const MethodChannel channel = MethodChannel(
10 | 'tflite',
11 | );
12 |
13 | final List log = [];
14 |
15 | setUp(() async {
16 | channel.setMockMethodCallHandler((MethodCall methodCall) {
17 | log.add(methodCall);
18 | return null;
19 | });
20 | log.clear();
21 | });
22 | test('loadModel', () async {
23 | await Tflite.loadModel(
24 | model: 'assets/mobilenet_v1_1.0_224.tflite',
25 | labels: 'assets/mobilenet_v1_1.0_224.txt',
26 | numThreads: 2,
27 | isAsset: false,
28 | useGpuDelegate: true,
29 | );
30 | expect(
31 | log,
32 | [
33 | isMethodCall(
34 | 'loadModel',
35 | arguments: {
36 | 'model': 'assets/mobilenet_v1_1.0_224.tflite',
37 | 'labels': 'assets/mobilenet_v1_1.0_224.txt',
38 | 'numThreads': 2,
39 | 'isAsset': false,
40 | 'useGpuDelegate': true,
41 | },
42 | ),
43 | ],
44 | );
45 | });
46 |
47 | test('runModelOnImage', () async {
48 | await Tflite.runModelOnImage(
49 | path: '/image/path',
50 | imageMean: 127.5,
51 | imageStd: 0.5,
52 | numResults: 6,
53 | threshold: 0.1,
54 | asynch: false,
55 | );
56 | expect(
57 | log,
58 | [
59 | isMethodCall(
60 | 'runModelOnImage',
61 | arguments: {
62 | 'path': '/image/path',
63 | 'imageMean': 127.5,
64 | 'imageStd': 0.5,
65 | 'numResults': 6,
66 | 'threshold': 0.1,
67 | 'asynch': false,
68 | },
69 | ),
70 | ],
71 | );
72 | });
73 |
74 | test('runModelOnBinary', () async {
75 | await Tflite.runModelOnBinary(
76 | binary: Uint8List.fromList([
77 | 0,
78 | 1,
79 | 2,
80 | ]),
81 | numResults: 15,
82 | threshold: 0.8,
83 | asynch: false,
84 | );
85 | expect(
86 | log,
87 | [
88 | isMethodCall(
89 | 'runModelOnBinary',
90 | arguments: {
91 | 'binary': Uint8List.fromList([
92 | 0,
93 | 1,
94 | 2,
95 | ]),
96 | 'numResults': 15,
97 | 'threshold': 0.8,
98 | 'asynch': false,
99 | },
100 | ),
101 | ],
102 | );
103 | });
104 |
105 | test('runModelOnFrame', () async {
106 | await Tflite.runModelOnFrame(
107 | bytesList: [
108 | Uint8List.fromList([
109 | 0,
110 | 1,
111 | 2,
112 | ]),
113 | Uint8List.fromList([
114 | 0,
115 | 1,
116 | 2,
117 | ]),
118 | ],
119 | imageHeight: 100,
120 | imageWidth: 200,
121 | imageMean: 127.5,
122 | imageStd: 0.5,
123 | rotation: 30,
124 | numResults: 10,
125 | threshold: 0.2,
126 | asynch: false,
127 | );
128 | expect(
129 | log,
130 | [
131 | isMethodCall(
132 | 'runModelOnFrame',
133 | arguments: {
134 | 'bytesList': [
135 | Uint8List.fromList([
136 | 0,
137 | 1,
138 | 2,
139 | ]),
140 | Uint8List.fromList([
141 | 0,
142 | 1,
143 | 2,
144 | ]),
145 | ],
146 | 'imageHeight': 100,
147 | 'imageWidth': 200,
148 | 'imageMean': 127.5,
149 | 'imageStd': 0.5,
150 | 'rotation': 30,
151 | 'numResults': 10,
152 | 'threshold': 0.2,
153 | 'asynch': false,
154 | },
155 | ),
156 | ],
157 | );
158 | });
159 |
160 | test('detectObjectOnImage', () async {
161 | await Tflite.detectObjectOnImage(
162 | path: '/image/path',
163 | model: 'YOLO',
164 | imageMean: 127.5,
165 | imageStd: 0.5,
166 | threshold: 0.1,
167 | numResultsPerClass: 5,
168 | anchors: [
169 | 1,
170 | 2,
171 | 3,
172 | 4,
173 | ],
174 | blockSize: 32,
175 | numBoxesPerBlock: 5,
176 | asynch: false,
177 | );
178 | expect(
179 | log,
180 | [
181 | isMethodCall(
182 | 'detectObjectOnImage',
183 | arguments: {
184 | 'path': '/image/path',
185 | 'model': 'YOLO',
186 | 'imageMean': 127.5,
187 | 'imageStd': 0.5,
188 | 'threshold': 0.1,
189 | 'numResultsPerClass': 5,
190 | 'anchors': [
191 | 1,
192 | 2,
193 | 3,
194 | 4,
195 | ],
196 | 'blockSize': 32,
197 | 'numBoxesPerBlock': 5,
198 | 'asynch': false,
199 | },
200 | ),
201 | ],
202 | );
203 | });
204 |
205 | test('detectObjectOnBinary', () async {
206 | await Tflite.detectObjectOnBinary(
207 | binary: Uint8List.fromList([
208 | 0,
209 | 1,
210 | 2,
211 | ]),
212 | model: "YOLO",
213 | threshold: 0.2,
214 | numResultsPerClass: 10,
215 | anchors: [
216 | 1,
217 | 2,
218 | 3,
219 | 4,
220 | ],
221 | blockSize: 32,
222 | numBoxesPerBlock: 5,
223 | asynch: false,
224 | );
225 | expect(
226 | log,
227 | [
228 | isMethodCall(
229 | 'detectObjectOnBinary',
230 | arguments: {
231 | 'binary': Uint8List.fromList([
232 | 0,
233 | 1,
234 | 2,
235 | ]),
236 | 'model': "YOLO",
237 | 'threshold': 0.2,
238 | 'numResultsPerClass': 10,
239 | 'anchors': [
240 | 1,
241 | 2,
242 | 3,
243 | 4,
244 | ],
245 | 'blockSize': 32,
246 | 'numBoxesPerBlock': 5,
247 | 'asynch': false,
248 | },
249 | ),
250 | ],
251 | );
252 | });
253 |
254 | test('detectObjectOnFrame', () async {
255 | await Tflite.detectObjectOnFrame(
256 | bytesList: [
257 | Uint8List.fromList([
258 | 0,
259 | 1,
260 | 2,
261 | ]),
262 | Uint8List.fromList([
263 | 0,
264 | 1,
265 | 2,
266 | ]),
267 | ],
268 | model: "YOLO",
269 | imageHeight: 100,
270 | imageWidth: 200,
271 | imageMean: 127.5,
272 | imageStd: 0.5,
273 | rotation: 30,
274 | threshold: 0.2,
275 | numResultsPerClass: 10,
276 | anchors: [
277 | 1,
278 | 2,
279 | 3,
280 | 4,
281 | ],
282 | blockSize: 32,
283 | numBoxesPerBlock: 5,
284 | asynch: false,
285 | );
286 | expect(
287 | log,
288 | [
289 | isMethodCall(
290 | 'detectObjectOnFrame',
291 | arguments: {
292 | 'bytesList': [
293 | Uint8List.fromList([
294 | 0,
295 | 1,
296 | 2,
297 | ]),
298 | Uint8List.fromList([
299 | 0,
300 | 1,
301 | 2,
302 | ]),
303 | ],
304 | 'model': "YOLO",
305 | 'imageHeight': 100,
306 | 'imageWidth': 200,
307 | 'imageMean': 127.5,
308 | 'imageStd': 0.5,
309 | 'rotation': 30,
310 | 'threshold': 0.2,
311 | 'numResultsPerClass': 10,
312 | 'anchors': [
313 | 1,
314 | 2,
315 | 3,
316 | 4,
317 | ],
318 | 'blockSize': 32,
319 | 'numBoxesPerBlock': 5,
320 | 'asynch': false,
321 | },
322 | ),
323 | ],
324 | );
325 | });
326 |
327 | test('runPix2PixOnImage', () async {
328 | await Tflite.runPix2PixOnImage(
329 | path: '/image/path',
330 | imageMean: 127.5,
331 | imageStd: 0.5,
332 | outputType: 'png',
333 | asynch: false,
334 | );
335 | expect(
336 | log,
337 | [
338 | isMethodCall(
339 | 'runPix2PixOnImage',
340 | arguments: {
341 | 'path': '/image/path',
342 | 'imageMean': 127.5,
343 | 'imageStd': 0.5,
344 | 'outputType': 'png',
345 | 'asynch': false,
346 | },
347 | ),
348 | ],
349 | );
350 | });
351 |
352 | test('runPix2PixOnBinary', () async {
353 | await Tflite.runPix2PixOnBinary(
354 | binary: Uint8List.fromList([
355 | 0,
356 | 1,
357 | 2,
358 | ]),
359 | outputType: 'png',
360 | asynch: false,
361 | );
362 | expect(
363 | log,
364 | [
365 | isMethodCall(
366 | 'runPix2PixOnBinary',
367 | arguments: {
368 | 'binary': Uint8List.fromList([
369 | 0,
370 | 1,
371 | 2,
372 | ]),
373 | 'outputType': 'png',
374 | 'asynch': false,
375 | },
376 | ),
377 | ],
378 | );
379 | });
380 |
381 | test('runPix2PixOnFrame', () async {
382 | await Tflite.runPix2PixOnFrame(
383 | bytesList: [
384 | Uint8List.fromList([
385 | 0,
386 | 1,
387 | 2,
388 | ]),
389 | Uint8List.fromList([
390 | 0,
391 | 1,
392 | 2,
393 | ]),
394 | ],
395 | imageHeight: 100,
396 | imageWidth: 200,
397 | imageMean: 127.5,
398 | imageStd: 0.5,
399 | rotation: 30,
400 | outputType: 'png',
401 | asynch: false,
402 | );
403 | expect(
404 | log,
405 | [
406 | isMethodCall(
407 | 'runPix2PixOnFrame',
408 | arguments: {
409 | 'bytesList': [
410 | Uint8List.fromList([
411 | 0,
412 | 1,
413 | 2,
414 | ]),
415 | Uint8List.fromList([
416 | 0,
417 | 1,
418 | 2,
419 | ]),
420 | ],
421 | 'imageHeight': 100,
422 | 'imageWidth': 200,
423 | 'imageMean': 127.5,
424 | 'imageStd': 0.5,
425 | 'rotation': 30,
426 | 'outputType': 'png',
427 | 'asynch': false,
428 | },
429 | ),
430 | ],
431 | );
432 | });
433 |
434 | test('runSegmentationOnImage', () async {
435 | await Tflite.runSegmentationOnImage(
436 | path: '/image/path',
437 | imageMean: 127.5,
438 | imageStd: 0.5,
439 | labelColors: [
440 | 1,
441 | 2,
442 | 3,
443 | ],
444 | outputType: 'png',
445 | asynch: false,
446 | );
447 | expect(
448 | log,
449 | [
450 | isMethodCall(
451 | 'runSegmentationOnImage',
452 | arguments: {
453 | 'path': '/image/path',
454 | 'imageMean': 127.5,
455 | 'imageStd': 0.5,
456 | 'labelColors': [
457 | 1,
458 | 2,
459 | 3,
460 | ],
461 | 'outputType': 'png',
462 | 'asynch': false,
463 | },
464 | ),
465 | ],
466 | );
467 | });
468 |
469 | test('runSegmentationOnBinary', () async {
470 | await Tflite.runSegmentationOnBinary(
471 | binary: Uint8List.fromList([
472 | 0,
473 | 1,
474 | 2,
475 | ]),
476 | labelColors: [
477 | 1,
478 | 2,
479 | 3,
480 | ],
481 | outputType: 'png',
482 | asynch: false,
483 | );
484 | expect(
485 | log,
486 | [
487 | isMethodCall(
488 | 'runSegmentationOnBinary',
489 | arguments: {
490 | 'binary': Uint8List.fromList([
491 | 0,
492 | 1,
493 | 2,
494 | ]),
495 | 'labelColors': [
496 | 1,
497 | 2,
498 | 3,
499 | ],
500 | 'outputType': 'png',
501 | 'asynch': false,
502 | },
503 | ),
504 | ],
505 | );
506 | });
507 |
508 | test('runSegmentationOnFrame', () async {
509 | await Tflite.runSegmentationOnFrame(
510 | bytesList: [
511 | Uint8List.fromList([
512 | 0,
513 | 1,
514 | 2,
515 | ]),
516 | Uint8List.fromList([
517 | 0,
518 | 1,
519 | 2,
520 | ]),
521 | ],
522 | imageHeight: 100,
523 | imageWidth: 200,
524 | imageMean: 127.5,
525 | imageStd: 0.5,
526 | rotation: 30,
527 | labelColors: [
528 | 1,
529 | 2,
530 | 3,
531 | ],
532 | outputType: 'png',
533 | asynch: false,
534 | );
535 | expect(
536 | log,
537 | [
538 | isMethodCall(
539 | 'runSegmentationOnFrame',
540 | arguments: {
541 | 'bytesList': [
542 | Uint8List.fromList([
543 | 0,
544 | 1,
545 | 2,
546 | ]),
547 | Uint8List.fromList([
548 | 0,
549 | 1,
550 | 2,
551 | ]),
552 | ],
553 | 'imageHeight': 100,
554 | 'imageWidth': 200,
555 | 'imageMean': 127.5,
556 | 'imageStd': 0.5,
557 | 'rotation': 30,
558 | 'labelColors': [
559 | 1,
560 | 2,
561 | 3,
562 | ],
563 | 'outputType': 'png',
564 | 'asynch': false,
565 | },
566 | ),
567 | ],
568 | );
569 | });
570 | }
571 |
--------------------------------------------------------------------------------