├── ios ├── Assets │ └── .gitkeep ├── Classes │ ├── FLTGoogleMlVisionPlugin.h │ ├── ImageLabeler.m │ ├── TextRecognizer.m │ ├── BarcodeDetector.m │ ├── FLTGoogleMlVisionPlugin.m │ └── FaceDetector.m └── google_ml_vision.podspec ├── android ├── gradle.properties ├── settings.gradle ├── src │ └── main │ │ ├── AndroidManifest.xml │ │ └── java │ │ └── com │ │ └── brianmtully │ │ └── flutter │ │ └── plugins │ │ └── googlemlvision │ │ ├── Detector.java │ │ ├── GoogleMlVisionPlugin.java │ │ ├── GMLKImageLabeler.java │ │ ├── GMLKTextRecognizer.java │ │ ├── GoogleMlVisionHandler.java │ │ ├── GMLKFaceDetector.java │ │ └── GMLKBarcodeDetector.java ├── user-agent.gradle └── build.gradle ├── example ├── android │ ├── settings_aar.gradle │ ├── gradle.properties │ ├── app │ │ ├── src │ │ │ ├── main │ │ │ │ ├── res │ │ │ │ │ ├── mipmap-hdpi │ │ │ │ │ │ └── ic_launcher.png │ │ │ │ │ ├── mipmap-mdpi │ │ │ │ │ │ └── ic_launcher.png │ │ │ │ │ ├── mipmap-xhdpi │ │ │ │ │ │ └── ic_launcher.png │ │ │ │ │ ├── mipmap-xxhdpi │ │ │ │ │ │ └── ic_launcher.png │ │ │ │ │ ├── mipmap-xxxhdpi │ │ │ │ │ │ └── ic_launcher.png │ │ │ │ │ ├── values │ │ │ │ │ │ └── styles.xml │ │ │ │ │ └── drawable │ │ │ │ │ │ └── launch_background.xml │ │ │ │ ├── java │ │ │ │ │ └── com │ │ │ │ │ │ └── brianmtully │ │ │ │ │ │ └── flutter │ │ │ │ │ │ └── plugins │ │ │ │ │ │ └── googlemlvisionexample │ │ │ │ │ │ └── MainActivity.java │ │ │ │ └── AndroidManifest.xml │ │ │ └── androidTest │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── brianmtully │ │ │ │ └── flutter │ │ │ │ └── plugins │ │ │ │ └── googlemlvision │ │ │ │ └── MainActivityTest.java │ │ ├── gradle │ │ │ └── wrapper │ │ │ │ └── gradle-wrapper.properties │ │ └── build.gradle │ ├── gradle │ │ └── wrapper │ │ │ └── gradle-wrapper.properties │ ├── settings.gradle │ └── build.gradle ├── assets │ ├── span_book.jpg │ ├── test_face.jpg │ ├── test_text.png │ ├── test_barcode.jpg │ ├── test_contact_barcode.jpg │ └── test_driver_license_barcode.png ├── ios │ ├── Flutter │ │ ├── Debug.xcconfig │ │ ├── Release.xcconfig │ │ └── AppFrameworkInfo.plist │ ├── Runner │ │ ├── AppDelegate.h │ │ ├── Assets.xcassets │ │ │ ├── LaunchImage.imageset │ │ │ │ ├── LaunchImage.png │ │ │ │ ├── LaunchImage@2x.png │ │ │ │ ├── LaunchImage@3x.png │ │ │ │ ├── README.md │ │ │ │ └── Contents.json │ │ │ └── AppIcon.appiconset │ │ │ │ ├── Icon-App-20x20@1x.png │ │ │ │ ├── Icon-App-20x20@2x.png │ │ │ │ ├── Icon-App-20x20@3x.png │ │ │ │ ├── Icon-App-29x29@1x.png │ │ │ │ ├── Icon-App-29x29@2x.png │ │ │ │ ├── Icon-App-29x29@3x.png │ │ │ │ ├── Icon-App-40x40@1x.png │ │ │ │ ├── Icon-App-40x40@2x.png │ │ │ │ ├── Icon-App-40x40@3x.png │ │ │ │ ├── Icon-App-60x60@2x.png │ │ │ │ ├── Icon-App-60x60@3x.png │ │ │ │ ├── Icon-App-76x76@1x.png │ │ │ │ ├── Icon-App-76x76@2x.png │ │ │ │ ├── Icon-App-1024x1024@1x.png │ │ │ │ ├── Icon-App-83.5x83.5@2x.png │ │ │ │ └── Contents.json │ │ ├── main.m │ │ ├── AppDelegate.m │ │ ├── GoogleService-Info.plist │ │ ├── Base.lproj │ │ │ ├── Main.storyboard │ │ │ └── LaunchScreen.storyboard │ │ └── Info.plist │ ├── Runner.xcodeproj │ │ ├── project.xcworkspace │ │ │ ├── contents.xcworkspacedata │ │ │ └── xcshareddata │ │ │ │ └── IDEWorkspaceChecks.plist │ │ └── xcshareddata │ │ │ └── xcschemes │ │ │ └── Runner.xcscheme │ └── Runner.xcworkspace │ │ ├── contents.xcworkspacedata │ │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist ├── .metadata ├── analysis_options.yaml ├── lib │ ├── colors.dart │ ├── main.dart │ ├── scanner_utils.dart │ ├── picture_scanner.dart │ ├── camera_preview_scanner.dart │ └── detector_painters.dart ├── test_driver │ ├── google_ml_vision_test.dart │ ├── image_labeler.dart │ ├── text_recognizer.dart │ ├── face_detector.dart │ ├── google_ml_vision.dart │ └── barcode_detector.dart ├── README.md └── pubspec.yaml ├── CHANGELOG.md ├── lib ├── google_ml_vision.dart └── src │ ├── image_labeler.dart │ ├── text_recognizer.dart │ ├── google_vision.dart │ ├── face_detector.dart │ └── barcode_detector.dart ├── pubspec.yaml ├── .gitignore ├── LICENSE ├── test └── image_labeler_test.dart └── README.md /ios/Assets/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /android/gradle.properties: -------------------------------------------------------------------------------- 1 | org.gradle.jvmargs=-Xmx1536M 2 | -------------------------------------------------------------------------------- /example/android/settings_aar.gradle: -------------------------------------------------------------------------------- 1 | include ':app' 2 | -------------------------------------------------------------------------------- /android/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'google_ml_vision' 2 | -------------------------------------------------------------------------------- /example/assets/span_book.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/span_book.jpg -------------------------------------------------------------------------------- /example/assets/test_face.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/test_face.jpg -------------------------------------------------------------------------------- /example/assets/test_text.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/test_text.png -------------------------------------------------------------------------------- /example/assets/test_barcode.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/test_barcode.jpg -------------------------------------------------------------------------------- /example/ios/Flutter/Debug.xcconfig: -------------------------------------------------------------------------------- 1 | #include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" 2 | #include "Generated.xcconfig" 3 | -------------------------------------------------------------------------------- /example/android/gradle.properties: -------------------------------------------------------------------------------- 1 | org.gradle.jvmargs=-Xmx1536M 2 | android.enableJetifier=true 3 | android.useAndroidX=true 4 | android.enableR8=true 5 | -------------------------------------------------------------------------------- /example/assets/test_contact_barcode.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/test_contact_barcode.jpg -------------------------------------------------------------------------------- /example/ios/Flutter/Release.xcconfig: -------------------------------------------------------------------------------- 1 | #include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" 2 | #include "Generated.xcconfig" 3 | -------------------------------------------------------------------------------- /example/ios/Runner/AppDelegate.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | @interface AppDelegate : FlutterAppDelegate 5 | 6 | @end 7 | -------------------------------------------------------------------------------- /example/assets/test_driver_license_barcode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/assets/test_driver_license_barcode.png -------------------------------------------------------------------------------- /android/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | -------------------------------------------------------------------------------- /example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brianmtully/flutter_google_ml_vision/HEAD/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png -------------------------------------------------------------------------------- /example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /example/android/app/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /example/android/app/src/main/java/com/brianmtully/flutter/plugins/googlemlvisionexample/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.brianmtully.flutter.plugins.googlemlvisionexample; 2 | 3 | import io.flutter.embedding.android.FlutterActivity; 4 | 5 | public class MainActivity extends FlutterActivity {} 6 | -------------------------------------------------------------------------------- /example/ios/Runner/main.m: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import "AppDelegate.h" 4 | 5 | int main(int argc, char* argv[]) { 6 | @autoreleasepool { 7 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /example/android/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Fri Jun 23 08:50:38 CEST 2017 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip 7 | -------------------------------------------------------------------------------- /example/ios/Runner.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /example/.metadata: -------------------------------------------------------------------------------- 1 | # This file tracks properties of this Flutter project. 2 | # Used by Flutter tool to assess capabilities and perform upgrades etc. 3 | # 4 | # This file should be version controlled and should not be manually edited. 5 | 6 | version: 7 | revision: d454629a0646fcbd0f5a8d61998d0ea801d52909 8 | channel: master 9 | -------------------------------------------------------------------------------- /example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md: -------------------------------------------------------------------------------- 1 | # Launch Screen Assets 2 | 3 | You can customize the launch screen with your own desired assets by replacing the image files in this directory. 4 | 5 | You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images. -------------------------------------------------------------------------------- /example/android/app/src/main/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | -------------------------------------------------------------------------------- /example/analysis_options.yaml: -------------------------------------------------------------------------------- 1 | # This is a temporary file to allow us to land a new set of linter rules in a 2 | # series of manageable patches instead of one gigantic PR. It disables some of 3 | # the new lints that are already failing on this plugin, for this plugin. It 4 | # should be deleted and the failing lints addressed as soon as possible. 5 | 6 | include: ../../../analysis_options.yaml 7 | 8 | linter: 9 | rules: 10 | avoid_print: false 11 | -------------------------------------------------------------------------------- /example/lib/colors.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'package:flutter/material.dart'; 8 | 9 | const Color kShrinePink50 = Color(0xFFFEEAE6); 10 | const Color kShrinePink100 = Color(0xFFFEDBD0); 11 | const Color kShrineFrameBrown = Color(0x8A442C2E); 12 | const Color kShrineScrim = Color(0x73442C2E); 13 | -------------------------------------------------------------------------------- /example/ios/Runner/AppDelegate.m: -------------------------------------------------------------------------------- 1 | #include "AppDelegate.h" 2 | #include "GeneratedPluginRegistrant.h" 3 | 4 | @implementation AppDelegate 5 | 6 | - (BOOL)application:(UIApplication *)application 7 | didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 8 | [GeneratedPluginRegistrant registerWithRegistry:self]; 9 | // Override point for customization after application launch. 10 | return [super application:application didFinishLaunchingWithOptions:launchOptions]; 11 | } 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /example/test_driver/google_ml_vision_test.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'package:flutter_driver/flutter_driver.dart'; 8 | 9 | Future main() async { 10 | final FlutterDriver driver = await FlutterDriver.connect(); 11 | await driver.requestData(null, timeout: const Duration(minutes: 1)); 12 | await driver.close(); 13 | } 14 | -------------------------------------------------------------------------------- /example/README.md: -------------------------------------------------------------------------------- 1 | # google_ml_vision_example 2 | 3 | Demonstrates how to use the google_ml_vision plugin. 4 | 5 | ## Usage 6 | 7 | This example uses the *image_picker* plugin to get images from the device gallery. If using an iOS 8 | device you will have to configure you project with the correct permissions seen under iOS 9 | configuration [here.](https://pub.dev/packages/image_picker). 10 | 11 | ## Getting Started 12 | 13 | For help getting started with Flutter, view our online 14 | [documentation.](https://flutter.io/) 15 | -------------------------------------------------------------------------------- /example/android/app/src/main/res/drawable/launch_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 12 | 13 | -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "LaunchImage.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "filename" : "LaunchImage@2x.png", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "idiom" : "universal", 15 | "filename" : "LaunchImage@3x.png", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "version" : 1, 21 | "author" : "xcode" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /example/android/settings.gradle: -------------------------------------------------------------------------------- 1 | include ':app' 2 | 3 | def flutterProjectRoot = rootProject.projectDir.parentFile.toPath() 4 | 5 | def plugins = new Properties() 6 | def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins') 7 | if (pluginsFile.exists()) { 8 | pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) } 9 | } 10 | 11 | plugins.each { name, path -> 12 | def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile() 13 | include ":$name" 14 | project(":$name").projectDir = pluginDirectory 15 | } 16 | -------------------------------------------------------------------------------- /example/android/app/src/androidTest/java/com/brianmtully/flutter/plugins/googlemlvision/MainActivityTest.java: -------------------------------------------------------------------------------- 1 | package com.brianmtully.flutter.plugins.googlemlvision; 2 | 3 | import androidx.test.rule.ActivityTestRule; 4 | import dev.flutter.plugins.e2e.FlutterTestRunner; 5 | import com.brianmtully.flutter.plugins.googlemlvisionexample.MainActivity; 6 | import org.junit.Rule; 7 | import org.junit.runner.RunWith; 8 | 9 | @RunWith(FlutterTestRunner.class) 10 | public class MainActivityTest { 11 | @Rule public ActivityTestRule rule = new ActivityTestRule<>(MainActivity.class); 12 | } 13 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 0.0.1 2 | 3 | * Initial release 4 | 5 | 6 | ## 0.0.3+1 7 | 8 | * Null Safety 9 | 10 | ## 0.0.4 11 | 12 | * Face Detection All Points functionality 13 | * dependency updates 14 | * dart format code 15 | 16 | ## 0.0.4+1 17 | 18 | * Update LICENSE 19 | 20 | ## 0.0.4+2 21 | 22 | * iOS dependency issue 23 | 24 | ## 0.0.5 25 | 26 | * Face Detection AllPoints fix 27 | * Example app changes 28 | 29 | ## 0.0.6 30 | 31 | * Library Version Updates For Firebase Compatibility 32 | 33 | ## 0.0.7 34 | 35 | * fix driver License detection 36 | 37 | ## 0.0.8 38 | 39 | * Updated podspec file -------------------------------------------------------------------------------- /lib/google_ml_vision.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | library google_ml_vision; 6 | 7 | import 'dart:async'; 8 | import 'dart:io'; 9 | import 'dart:typed_data'; 10 | import 'dart:ui'; 11 | 12 | import 'package:flutter/foundation.dart'; 13 | import 'package:flutter/services.dart'; 14 | 15 | part 'src/barcode_detector.dart'; 16 | part 'src/face_detector.dart'; 17 | part 'src/google_vision.dart'; 18 | part 'src/image_labeler.dart'; 19 | part 'src/text_recognizer.dart'; 20 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/Detector.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | package com.brianmtully.flutter.plugins.googlemlvision; 6 | 7 | import com.google.mlkit.vision.common.InputImage; 8 | import io.flutter.plugin.common.MethodChannel; 9 | import java.io.IOException; 10 | 11 | public interface Detector { 12 | void handleDetection(final InputImage image, final MethodChannel.Result result); 13 | 14 | void close() throws IOException; 15 | } 16 | -------------------------------------------------------------------------------- /example/android/build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | 3 | 4 | repositories { 5 | google() 6 | jcenter() 7 | } 8 | 9 | dependencies { 10 | classpath 'com.android.tools.build:gradle:3.5.4' 11 | } 12 | } 13 | 14 | allprojects { 15 | repositories { 16 | google() 17 | jcenter() 18 | mavenLocal() 19 | } 20 | } 21 | 22 | rootProject.buildDir = '../build' 23 | subprojects { 24 | project.buildDir = "${rootProject.buildDir}/${project.name}" 25 | } 26 | subprojects { 27 | project.evaluationDependsOn(':app') 28 | } 29 | 30 | task clean(type: Delete) { 31 | delete rootProject.buildDir 32 | } 33 | -------------------------------------------------------------------------------- /pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: google_ml_vision 2 | description: Flutter plugin for Google ML Kit on-device vision apis. It includes barcode scanning, image labeling, text recognition and face detection. 3 | homepage: https://github.com/brianmtully/flutter_google_ml_vision 4 | version: 0.0.8 5 | 6 | environment: 7 | sdk: '>=2.12.0 <3.0.0' 8 | flutter: ">=1.20.0" 9 | 10 | dependencies: 11 | flutter: 12 | sdk: flutter 13 | 14 | dev_dependencies: 15 | flutter_driver: 16 | sdk: flutter 17 | flutter_test: 18 | sdk: flutter 19 | pedantic: ^1.8.0 20 | test: any 21 | 22 | flutter: 23 | plugin: 24 | platforms: 25 | android: 26 | package: com.brianmtully.flutter.plugins.googlemlvision 27 | pluginClass: GoogleMlVisionPlugin 28 | ios: 29 | pluginClass: FLTGoogleMlVisionPlugin 30 | 31 | false_secrets: 32 | - 'example/ios/Runner/GoogleService-Info.plist' -------------------------------------------------------------------------------- /ios/Classes/FLTGoogleMlVisionPlugin.h: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import 6 | 7 | @import MLKitVision; 8 | 9 | @interface FLTGoogleMlVisionPlugin : NSObject 10 | + (void)handleError:(NSError *)error result:(FlutterResult)result; 11 | @end 12 | 13 | @protocol Detector 14 | @required 15 | - (instancetype)initWithOptions:(NSDictionary *)options; 16 | - (void)handleDetection:(MLKVisionImage *)image result:(FlutterResult)result; 17 | @optional 18 | @end 19 | 20 | @interface BarcodeDetector : NSObject 21 | @end 22 | 23 | @interface FaceDetector : NSObject 24 | @end 25 | 26 | @interface ImageLabeler : NSObject 27 | @end 28 | 29 | @interface TextRecognizer : NSObject 30 | @end 31 | -------------------------------------------------------------------------------- /example/ios/Flutter/AppFrameworkInfo.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | App 9 | CFBundleIdentifier 10 | io.flutter.flutter.app 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | App 15 | CFBundlePackageType 16 | FMWK 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1.0 23 | MinimumOSVersion 24 | 8.0 25 | 26 | 27 | -------------------------------------------------------------------------------- /example/test_driver/image_labeler.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | part of 'google_ml_vision.dart'; 8 | 9 | void imageLabelerTests() { 10 | group('$ImageLabeler', () { 11 | final ImageLabeler labeler = GoogleVision.instance.imageLabeler(); 12 | 13 | test('processImage', () async { 14 | final String tmpFilename = await _loadImage('assets/test_barcode.jpg'); 15 | final GoogleVisionImage visionImage = 16 | GoogleVisionImage.fromFilePath(tmpFilename); 17 | 18 | final List labels = await labeler.processImage(visionImage); 19 | 20 | expect(labels.length, greaterThan(0)); 21 | }); 22 | 23 | test('close', () { 24 | expect(labeler.close(), completes); 25 | }); 26 | }); 27 | } 28 | -------------------------------------------------------------------------------- /example/test_driver/text_recognizer.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | part of 'google_ml_vision.dart'; 8 | 9 | void textRecognizerTests() { 10 | GoogleVisionImage visionImage; 11 | 12 | setUp(() async { 13 | final tmpFilename = await _loadImage('assets/test_text.png'); 14 | visionImage = GoogleVisionImage.fromFilePath(tmpFilename); 15 | }); 16 | 17 | group('$TextRecognizer', () { 18 | final recognizer = GoogleVision.instance.textRecognizer(); 19 | 20 | test('processImage', () async { 21 | final text = await recognizer.processImage(visionImage); 22 | 23 | expect(text.text, 'TEXT'); 24 | }); 25 | 26 | test('close', () { 27 | expect(recognizer.close(), completes); 28 | }); 29 | }); 30 | } 31 | -------------------------------------------------------------------------------- /android/user-agent.gradle: -------------------------------------------------------------------------------- 1 | import java.util.regex.Matcher 2 | import java.util.regex.Pattern 3 | 4 | String libraryVersionName = "UNKNOWN" 5 | String libraryName = "google-ml-vis" 6 | File pubspec = new File(project.projectDir.parentFile, 'pubspec.yaml') 7 | 8 | if (pubspec.exists()) { 9 | String yaml = pubspec.text 10 | // Using \s*['|"]?([^\n|'|"]*)['|"]? to extract version number. 11 | Matcher versionMatcher = Pattern.compile("^version:\\s*['|\"]?([^\\n|'|\"]*)['|\"]?\$", Pattern.MULTILINE).matcher(yaml) 12 | if (versionMatcher.find()) libraryVersionName = versionMatcher.group(1).replaceAll("\\+", "-") 13 | } 14 | 15 | android { 16 | defaultConfig { 17 | // BuildConfig.VERSION_NAME 18 | buildConfigField 'String', 'LIBRARY_VERSION', "\"${libraryVersionName}\"" 19 | // BuildConfig.LIBRARY_NAME 20 | buildConfigField 'String', 'LIBRARY_NAME', "\"${libraryName}\"" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /example/pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: google_ml_vision_example 2 | description: Demonstrates how to use the google_ml_vision plugin. 3 | author: Brian Tully 4 | 5 | environment: 6 | sdk: ">=2.12.0 <3.0.0" 7 | flutter: ">=1.10.0" 8 | 9 | dependencies: 10 | camera: ^0.8.1 11 | cupertino_icons: ^1.0.2 12 | google_ml_vision: 13 | path: ../ 14 | flutter: 15 | sdk: flutter 16 | image_picker: ^0.6.0 17 | image: ^3.0.2 18 | 19 | dependency_overrides: 20 | archive: ^3.0.0 21 | crypto: ^3.0.0 22 | 23 | dev_dependencies: 24 | e2e: ^0.6.1 25 | flutter_driver: 26 | sdk: flutter 27 | flutter_test: 28 | sdk: flutter 29 | path: ^1.8.0 30 | path_provider: ^2.0.0 31 | pedantic: ^1.8.0 32 | test: any 33 | 34 | flutter: 35 | uses-material-design: true 36 | assets: 37 | - assets/test_face.jpg 38 | - assets/test_barcode.jpg 39 | - assets/test_contact_barcode.jpg 40 | - assets/test_text.png 41 | - assets/span_book.jpg 42 | -------------------------------------------------------------------------------- /example/test_driver/face_detector.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | part of 'google_ml_vision.dart'; 8 | 9 | void faceDetectorTests() { 10 | group('$FaceDetector', () { 11 | final FaceDetector detector = GoogleVision.instance.faceDetector( 12 | const FaceDetectorOptions( 13 | enableContours: true, mode: FaceDetectorMode.accurate), 14 | ); 15 | 16 | test('processImage', () async { 17 | final String tmpFilename = await _loadImage('assets/test_face.jpg'); 18 | final GoogleVisionImage visionImage = 19 | GoogleVisionImage.fromFilePath(tmpFilename); 20 | 21 | final List faces = await detector.processImage(visionImage); 22 | 23 | expect(faces.length, 1); 24 | expect( 25 | faces[0].getContour(FaceContourType.allPoints).positionsList, 26 | isNotEmpty, 27 | ); 28 | }); 29 | 30 | test('close', () { 31 | expect(detector.close(), completes); 32 | }); 33 | }); 34 | } 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .atom/ 3 | .idea/ 4 | .vscode/* 5 | .melos_tool/* 6 | !.vscode/tasks.json 7 | !.vscode/settings.json 8 | 9 | .packages 10 | .pub/ 11 | .dart_tool/ 12 | pubspec.lock 13 | flutter_export_environment.sh 14 | 15 | Podfile.lock 16 | Pods/ 17 | Podfile 18 | .symlinks/ 19 | **/Flutter/ephemeral/ 20 | **/Flutter/Flutter.podspec 21 | **/Flutter/App.framework/ 22 | **/Flutter/Flutter.framework/ 23 | **/Flutter/Generated.xcconfig 24 | **/Flutter/flutter_assets/ 25 | ServiceDefinitions.json 26 | xcuserdata/ 27 | 28 | local.properties 29 | keystore.properties 30 | .gradle/ 31 | gradlew 32 | gradlew.bat 33 | gradle-wrapper.jar 34 | *.iml 35 | 36 | GeneratedPluginRegistrant.h 37 | GeneratedPluginRegistrant.m 38 | GeneratedPluginRegistrant.java 39 | GeneratedPluginRegistrant.swift 40 | generated_plugin_registrant.dart 41 | 42 | build/ 43 | .flutter-plugins 44 | .flutter-plugins-dependencies 45 | 46 | .project 47 | .classpath 48 | .settings 49 | .last_build_id 50 | 51 | # Docs 52 | 53 | # Dependencies 54 | node_modules 55 | 56 | # Production 57 | website/build 58 | 59 | # Generated files 60 | .docusaurus 61 | .cache-loader 62 | 63 | # Misc 64 | .env.local 65 | .env.development.local 66 | .env.test.local 67 | .env.production.local 68 | 69 | npm-debug.log* 70 | yarn-debug.log* 71 | yarn-error.log* 72 | 73 | firebase-debug.log 74 | firestore-debug.log 75 | ui-debug.log -------------------------------------------------------------------------------- /example/ios/Runner/GoogleService-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | AD_UNIT_ID_FOR_BANNER_TEST 6 | ca-app-pub-3940256099942544/2934735716 7 | AD_UNIT_ID_FOR_INTERSTITIAL_TEST 8 | ca-app-pub-3940256099942544/4411468910 9 | CLIENT_ID 10 | 479882132969-gjp4e63ogu2h6guttj2ie6t3f10ic7i8.apps.googleusercontent.com 11 | REVERSED_CLIENT_ID 12 | com.googleusercontent.apps.479882132969-gjp4e63ogu2h6guttj2ie6t3f10ic7i8 13 | API_KEY 14 | AIzaSyBECOwLTAN6PU4Aet1b2QLGIb3kRK8Xjew 15 | GCM_SENDER_ID 16 | 479882132969 17 | PLIST_VERSION 18 | 1 19 | BUNDLE_ID 20 | com.brianmtully.flutter.plugins.googleMlVisionExample 21 | PROJECT_ID 22 | my-flutter-proj 23 | STORAGE_BUCKET 24 | my-flutter-proj.appspot.com 25 | IS_ADS_ENABLED 26 | 27 | IS_ANALYTICS_ENABLED 28 | 29 | IS_APPINVITE_ENABLED 30 | 31 | IS_GCM_ENABLED 32 | 33 | IS_SIGNIN_ENABLED 34 | 35 | GOOGLE_APP_ID 36 | 1:479882132969:ios:249503bd2f4091a3 37 | DATABASE_URL 38 | https://my-flutter-proj.firebaseio.com 39 | 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Brian M Tully. All rights reserved. 2 | // 3 | // Redistribution and use in source and binary forms, with or without 4 | // modification, are permitted provided that the following conditions are 5 | // met: 6 | // 7 | // * Redistributions of source code must retain the above copyright 8 | // notice, this list of conditions and the following disclaimer. 9 | // * Redistributions in binary form must reproduce the above 10 | // copyright notice, this list of conditions and the following disclaimer 11 | // in the documentation and/or other materials provided with the 12 | // distribution. 13 | // * Neither the name of Google Inc. nor the names of its 14 | // contributors may be used to endorse or promote products derived from 15 | // this software without specific prior written permission. 16 | // 17 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /ios/google_ml_vision.podspec: -------------------------------------------------------------------------------- 1 | # 2 | # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html 3 | # 4 | 5 | require 'yaml' 6 | pubspec = YAML.load_file(File.join('..', 'pubspec.yaml')) 7 | libraryVersion = pubspec['version'].gsub('+', '-') 8 | 9 | Pod::Spec.new do |s| 10 | s.name = 'google_ml_vision' 11 | s.version = libraryVersion 12 | s.summary = 'Flutter plugin for Google ML Kit' 13 | s.description = <<-DESC 14 | Plugin for Google ML Kit 15 | DESC 16 | s.homepage = 'https://github.com/brianmtully/flutter_google_ml_vision' 17 | s.license = { :file => '../LICENSE' } 18 | s.author = { 'Brian M Tully' => 'btully1@gmail.com' } 19 | s.source = { :path => '.' } 20 | s.source_files = 'Classes/**/*' 21 | s.public_header_files = 'Classes/**/*.h' 22 | s.dependency 'Flutter' 23 | s.platform = :ios, '11.0' 24 | s.dependency 'GoogleMLKit/BarcodeScanning', '~> 3.2.0' 25 | s.dependency 'GoogleMLKit/FaceDetection', '~> 3.2.0' 26 | s.dependency 'GoogleMLKit/ImageLabeling', '~> 3.2.0' 27 | s.dependency 'GoogleMLKit/TextRecognition', '~> 3.2.0' 28 | # Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported. 29 | s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } 30 | # Mobile vision doesn't support 32 bit ios 31 | s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphoneos*]' => 'arm64' } 32 | s.static_framework = true 33 | s.pod_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => "LIBRARY_VERSION=\\@\\\"#{libraryVersion}\\\" LIBRARY_NAME=\\@\\\"google--ml-vis\\\"" } 34 | end 35 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GoogleMlVisionPlugin.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | package com.brianmtully.flutter.plugins.googlemlvision; 5 | 6 | import androidx.annotation.NonNull; 7 | 8 | import io.flutter.embedding.engine.plugins.FlutterPlugin; 9 | import io.flutter.plugin.common.MethodCall; 10 | import io.flutter.plugin.common.MethodChannel; 11 | import io.flutter.plugin.common.MethodChannel.MethodCallHandler; 12 | import io.flutter.plugin.common.MethodChannel.Result; 13 | import io.flutter.plugin.common.PluginRegistry.Registrar; 14 | 15 | /** FlutterPlugindExamplePlugin */ 16 | public class GoogleMlVisionPlugin implements FlutterPlugin { 17 | /// The MethodChannel that will the communication between Flutter and native Android 18 | /// 19 | /// This local reference serves to register the plugin with the Flutter Engine and unregister it 20 | /// when the Flutter Engine is detached from the Activity 21 | private MethodChannel channel; 22 | 23 | @Override 24 | public void onAttachedToEngine(@NonNull FlutterPluginBinding flutterPluginBinding) { 25 | channel = new MethodChannel(flutterPluginBinding.getBinaryMessenger(), "plugins.flutter.brianmtully.com/google_ml_vision"); 26 | 27 | channel.setMethodCallHandler(new MlVisionHandler(flutterPluginBinding.getApplicationContext())); 28 | } 29 | 30 | 31 | @Override 32 | public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { 33 | channel.setMethodCallHandler(null); 34 | } 35 | } 36 | 37 | -------------------------------------------------------------------------------- /example/ios/Runner/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /example/test_driver/google_ml_vision.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'dart:async'; 8 | import 'dart:io'; 9 | import 'dart:typed_data'; 10 | 11 | import 'package:flutter/services.dart'; 12 | import 'package:flutter_driver/driver_extension.dart'; 13 | import 'package:flutter_test/flutter_test.dart'; 14 | import 'package:google_ml_vision/google_ml_vision.dart'; 15 | import 'package:path/path.dart' as path; 16 | import 'package:path_provider/path_provider.dart'; 17 | 18 | part 'barcode_detector.dart'; 19 | part 'face_detector.dart'; 20 | part 'image_labeler.dart'; 21 | part 'text_recognizer.dart'; 22 | 23 | void main() { 24 | final Completer completer = Completer(); 25 | enableFlutterDriverExtension(handler: (_) => completer.future); 26 | tearDownAll(() => completer.complete(null)); 27 | 28 | group('$GoogleVision', () { 29 | barcodeDetectorTests(); 30 | faceDetectorTests(); 31 | imageLabelerTests(); 32 | textRecognizerTests(); 33 | }); 34 | } 35 | 36 | int nextHandle = 0; 37 | 38 | // Since there is no way to get the full asset filename, this method loads the 39 | // image into a temporary file. 40 | Future _loadImage(String assetFilename) async { 41 | final Directory directory = await getTemporaryDirectory(); 42 | 43 | final String tmpFilename = path.join( 44 | directory.path, 45 | 'tmp${nextHandle++}.jpg', 46 | ); 47 | 48 | final ByteData data = await rootBundle.load(assetFilename); 49 | final Uint8List bytes = data.buffer.asUint8List( 50 | data.offsetInBytes, 51 | data.lengthInBytes, 52 | ); 53 | 54 | await File(tmpFilename).writeAsBytes(bytes); 55 | 56 | return tmpFilename; 57 | } 58 | -------------------------------------------------------------------------------- /example/android/app/build.gradle: -------------------------------------------------------------------------------- 1 | def localProperties = new Properties() 2 | def localPropertiesFile = rootProject.file('local.properties') 3 | if (localPropertiesFile.exists()) { 4 | localPropertiesFile.withReader('UTF-8') { reader -> 5 | localProperties.load(reader) 6 | } 7 | } 8 | 9 | def flutterRoot = localProperties.getProperty('flutter.sdk') 10 | if (flutterRoot == null) { 11 | throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") 12 | } 13 | 14 | def flutterVersionCode = localProperties.getProperty('flutter.versionCode') 15 | if (flutterVersionCode == null) { 16 | flutterVersionCode = '1' 17 | } 18 | 19 | def flutterVersionName = localProperties.getProperty('flutter.versionName') 20 | if (flutterVersionName == null) { 21 | flutterVersionName = '1.0' 22 | } 23 | 24 | 25 | apply plugin: 'com.android.application' 26 | apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" 27 | 28 | android { 29 | compileSdkVersion 29 30 | 31 | lintOptions { 32 | disable 'InvalidPackage' 33 | } 34 | 35 | defaultConfig { 36 | applicationId "com.brianmtully.flutter.plugins.googlemlvisionexample" 37 | minSdkVersion 21 38 | targetSdkVersion 28 39 | versionCode flutterVersionCode.toInteger() 40 | versionName flutterVersionName 41 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" 42 | } 43 | 44 | buildTypes { 45 | release { 46 | signingConfig signingConfigs.debug 47 | } 48 | } 49 | 50 | dependencies { 51 | androidTestImplementation 'androidx.test:runner:1.2.0' 52 | androidTestImplementation 'androidx.test:rules:1.2.0' 53 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' 54 | } 55 | } 56 | 57 | flutter { 58 | source '../..' 59 | } 60 | -------------------------------------------------------------------------------- /ios/Classes/ImageLabeler.m: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import "FLTGoogleMlVisionPlugin.h" 6 | 7 | @import MLKitImageLabelingCommon; 8 | @import MLKitImageLabeling; 9 | 10 | @interface ImageLabeler () 11 | @property MLKImageLabeler *labeler; 12 | @end 13 | 14 | @implementation ImageLabeler 15 | - (instancetype)initWithOptions:(NSDictionary *)options { 16 | self = [super init]; 17 | if (self) { 18 | _labeler = [MLKImageLabeler imageLabelerWithOptions:[ImageLabeler parseOptions:options]]; 19 | } 20 | return self; 21 | } 22 | 23 | - (void)handleDetection:(MLKVisionImage *)image result:(FlutterResult)result { 24 | [_labeler processImage:image 25 | completion:^(NSArray *_Nullable labels, NSError *_Nullable error) { 26 | if (error) { 27 | [FLTGoogleMlVisionPlugin handleError:error result:result]; 28 | return; 29 | } else if (!labels) { 30 | result(@[]); 31 | } 32 | 33 | NSMutableArray *labelData = [NSMutableArray array]; 34 | for (MLKImageLabel *label in labels) { 35 | NSDictionary *data = @{ 36 | @"confidence" : @(label.confidence), 37 | @"entityID" : @(label.index), 38 | @"text" : label.text, 39 | }; 40 | [labelData addObject:data]; 41 | } 42 | 43 | result(labelData); 44 | }]; 45 | } 46 | 47 | + (MLKImageLabelerOptions *)parseOptions:(NSDictionary *)optionsData { 48 | NSNumber *conf = optionsData[@"confidenceThreshold"]; 49 | 50 | MLKImageLabelerOptions *options = [MLKImageLabelerOptions new]; 51 | options.confidenceThreshold = conf; 52 | 53 | return options; 54 | } 55 | 56 | @end -------------------------------------------------------------------------------- /example/ios/Runner/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSCameraUsageDescription 6 | Can I use the camera please? 7 | NSPhotoLibraryUsageDescription 8 | 9 | LSApplicationCategoryType 10 | 11 | CFBundleDevelopmentRegion 12 | en 13 | CFBundleExecutable 14 | $(EXECUTABLE_NAME) 15 | CFBundleIdentifier 16 | $(PRODUCT_BUNDLE_IDENTIFIER) 17 | CFBundleInfoDictionaryVersion 18 | 6.0 19 | CFBundleName 20 | google_ml_vision_example 21 | CFBundlePackageType 22 | APPL 23 | CFBundleShortVersionString 24 | 1.0 25 | CFBundleSignature 26 | ???? 27 | CFBundleVersion 28 | 1 29 | LSRequiresIPhoneOS 30 | 31 | UILaunchStoryboardName 32 | LaunchScreen 33 | UIMainStoryboardFile 34 | Main 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationPortrait 38 | UIInterfaceOrientationLandscapeLeft 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | UIInterfaceOrientationLandscapeLeft 46 | UIInterfaceOrientationLandscapeRight 47 | 48 | UIViewControllerBasedStatusBarAppearance 49 | 50 | NSBonjourServices 51 | 52 | _dartobservatory._tcp 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /example/lib/main.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'package:flutter/material.dart'; 8 | 9 | import 'camera_preview_scanner.dart'; 10 | import 'material_barcode_scanner.dart'; 11 | import 'picture_scanner.dart'; 12 | 13 | void main() { 14 | runApp( 15 | MaterialApp( 16 | routes: { 17 | '/': (BuildContext context) => _ExampleList(), 18 | '/$PictureScanner': (BuildContext context) => const PictureScanner(), 19 | '/$CameraPreviewScanner': (BuildContext context) => 20 | const CameraPreviewScanner(), 21 | '/$MaterialBarcodeScanner': (BuildContext context) => 22 | const MaterialBarcodeScanner(), 23 | }, 24 | ), 25 | ); 26 | } 27 | 28 | class _ExampleList extends StatefulWidget { 29 | @override 30 | State createState() => _ExampleListState(); 31 | } 32 | 33 | class _ExampleListState extends State<_ExampleList> { 34 | static final List _exampleWidgetNames = [ 35 | '$PictureScanner', 36 | '$CameraPreviewScanner', 37 | '$MaterialBarcodeScanner', 38 | ]; 39 | 40 | @override 41 | Widget build(BuildContext context) { 42 | return Scaffold( 43 | appBar: AppBar( 44 | title: const Text('Example List'), 45 | ), 46 | body: ListView.builder( 47 | itemCount: _exampleWidgetNames.length, 48 | itemBuilder: (BuildContext context, int index) { 49 | final String widgetName = _exampleWidgetNames[index]; 50 | 51 | return Container( 52 | decoration: const BoxDecoration( 53 | border: Border(bottom: BorderSide(color: Colors.grey)), 54 | ), 55 | child: ListTile( 56 | title: Text(widgetName), 57 | onTap: () => Navigator.pushNamed(context, '/$widgetName'), 58 | ), 59 | ); 60 | }, 61 | ), 62 | ); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /example/android/app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | 8 | 9 | 10 | 15 | 18 | 25 | 26 | 27 | 28 | 29 | 30 | 36 | 37 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /android/build.gradle: -------------------------------------------------------------------------------- 1 | group 'com.brianmtully.plugins.googlemlvision' 2 | version '0.0.6' 3 | 4 | buildscript { 5 | repositories { 6 | google() 7 | jcenter() 8 | } 9 | 10 | dependencies { 11 | classpath 'com.android.tools.build:gradle:3.5.4' 12 | classpath 'com.google.android.gms:strict-version-matcher-plugin:1.2.1' 13 | } 14 | } 15 | 16 | rootProject.allprojects { 17 | repositories { 18 | google() 19 | jcenter() 20 | } 21 | } 22 | 23 | apply plugin: 'com.android.library' 24 | apply plugin: 'com.google.android.gms.strict-version-matcher-plugin' 25 | 26 | android { 27 | compileSdkVersion 29 28 | 29 | defaultConfig { 30 | minSdkVersion 19 31 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" 32 | } 33 | lintOptions { 34 | disable 'InvalidPackage' 35 | } 36 | dependencies { 37 | implementation 'com.google.mlkit:face-detection:16.1.2' 38 | implementation 'com.google.mlkit:barcode-scanning:16.2.0' 39 | implementation 'com.google.mlkit:image-labeling:17.0.5' 40 | implementation 'com.google.mlkit:object-detection:16.2.6' 41 | implementation 'com.google.android.gms:play-services-mlkit-text-recognition:16.3.0' 42 | } 43 | } 44 | 45 | // TODO(bparrishMines): Remove this hack once androidx.lifecycle is included on stable. https://github.com/flutter/flutter/issues/42348 46 | afterEvaluate { 47 | def containsEmbeddingDependencies = false 48 | for (def configuration : configurations.all) { 49 | for (def dependency : configuration.dependencies) { 50 | if (dependency.group == 'com.brianmtully' && 51 | dependency.name.startsWith('flutter_embedding') && 52 | dependency.isTransitive()) 53 | { 54 | containsEmbeddingDependencies = true 55 | break 56 | } 57 | } 58 | } 59 | if (!containsEmbeddingDependencies) { 60 | android { 61 | dependencies { 62 | def lifecycle_version = "1.1.1" 63 | compileOnly "android.arch.lifecycle:runtime:$lifecycle_version" 64 | compileOnly "android.arch.lifecycle:common:$lifecycle_version" 65 | compileOnly "android.arch.lifecycle:common-java8:$lifecycle_version" 66 | } 67 | } 68 | } 69 | } 70 | 71 | apply from: file("./user-agent.gradle") 72 | -------------------------------------------------------------------------------- /example/ios/Runner/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /example/lib/scanner_utils.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'dart:async'; 8 | import 'dart:typed_data'; 9 | import 'dart:ui'; 10 | 11 | import 'package:camera/camera.dart'; 12 | import 'package:google_ml_vision/google_ml_vision.dart'; 13 | import 'package:flutter/foundation.dart'; 14 | 15 | class ScannerUtils { 16 | ScannerUtils._(); 17 | 18 | static Future getCamera(CameraLensDirection dir) async { 19 | return availableCameras().then( 20 | (List cameras) => cameras.firstWhere( 21 | (CameraDescription camera) => camera.lensDirection == dir, 22 | ), 23 | ); 24 | } 25 | 26 | static Future detect({ 27 | @required CameraImage image, 28 | @required Future Function(GoogleVisionImage image) detectInImage, 29 | @required int imageRotation, 30 | }) async { 31 | return detectInImage( 32 | GoogleVisionImage.fromBytes( 33 | _concatenatePlanes(image.planes), 34 | _buildMetaData(image, _rotationIntToImageRotation(imageRotation)), 35 | ), 36 | ); 37 | } 38 | 39 | static Uint8List _concatenatePlanes(List planes) { 40 | final WriteBuffer allBytes = WriteBuffer(); 41 | planes.forEach((Plane plane) => allBytes.putUint8List(plane.bytes)); 42 | return allBytes.done().buffer.asUint8List(); 43 | } 44 | 45 | static GoogleVisionImageMetadata _buildMetaData( 46 | CameraImage image, 47 | ImageRotation rotation, 48 | ) { 49 | return GoogleVisionImageMetadata( 50 | rawFormat: image.format.raw, 51 | size: Size(image.width.toDouble(), image.height.toDouble()), 52 | rotation: rotation, 53 | planeData: image.planes.map( 54 | (Plane plane) { 55 | return GoogleVisionImagePlaneMetadata( 56 | bytesPerRow: plane.bytesPerRow, 57 | height: plane.height, 58 | width: plane.width, 59 | ); 60 | }, 61 | ).toList(), 62 | ); 63 | } 64 | 65 | static ImageRotation _rotationIntToImageRotation(int rotation) { 66 | switch (rotation) { 67 | case 0: 68 | return ImageRotation.rotation0; 69 | case 90: 70 | return ImageRotation.rotation90; 71 | case 180: 72 | return ImageRotation.rotation180; 73 | default: 74 | assert(rotation == 270); 75 | return ImageRotation.rotation270; 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GMLKImageLabeler.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | package com.brianmtully.flutter.plugins.googlemlvision; 6 | 7 | import androidx.annotation.NonNull; 8 | import com.google.android.gms.tasks.OnFailureListener; 9 | import com.google.android.gms.tasks.OnSuccessListener; 10 | import com.google.mlkit.vision.common.InputImage; 11 | import com.google.mlkit.vision.label.ImageLabel; 12 | import com.google.mlkit.vision.label.ImageLabeling; 13 | import com.google.mlkit.vision.label.ImageLabeler; 14 | import com.google.mlkit.vision.label.defaults.ImageLabelerOptions; 15 | import io.flutter.plugin.common.MethodChannel; 16 | import java.io.IOException; 17 | import java.util.ArrayList; 18 | import java.util.HashMap; 19 | import java.util.List; 20 | import java.util.Map; 21 | import android.util.Log; 22 | 23 | class GMLKImageLabeler implements Detector { 24 | private final ImageLabeler labeler; 25 | 26 | GMLKImageLabeler(Map options) { 27 | labeler = ImageLabeling.getClient(ImageLabelerOptions.DEFAULT_OPTIONS); 28 | } 29 | 30 | @Override 31 | public void handleDetection(final InputImage image, final MethodChannel.Result result) { 32 | labeler 33 | .process(image) 34 | .addOnSuccessListener( 35 | new OnSuccessListener>() { 36 | @Override 37 | public void onSuccess(List visionLabels) { 38 | List> labels = new ArrayList<>(visionLabels.size()); 39 | for (ImageLabel label : visionLabels) { 40 | Map labelData = new HashMap<>(); 41 | labelData.put("confidence", (double) label.getConfidence()); 42 | labelData.put("entityId", String.valueOf(label.getIndex())); 43 | labelData.put("text", label.getText()); 44 | 45 | labels.add(labelData); 46 | } 47 | 48 | result.success(labels); 49 | } 50 | }) 51 | .addOnFailureListener( 52 | new OnFailureListener() { 53 | @Override 54 | public void onFailure(@NonNull Exception e) { 55 | result.error("imageLabelerError", e.getLocalizedMessage(), null); 56 | } 57 | }); 58 | } 59 | 60 | @Override 61 | public void close() throws IOException { 62 | labeler.close(); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "size" : "20x20", 5 | "idiom" : "iphone", 6 | "filename" : "Icon-App-20x20@2x.png", 7 | "scale" : "2x" 8 | }, 9 | { 10 | "size" : "20x20", 11 | "idiom" : "iphone", 12 | "filename" : "Icon-App-20x20@3x.png", 13 | "scale" : "3x" 14 | }, 15 | { 16 | "size" : "29x29", 17 | "idiom" : "iphone", 18 | "filename" : "Icon-App-29x29@1x.png", 19 | "scale" : "1x" 20 | }, 21 | { 22 | "size" : "29x29", 23 | "idiom" : "iphone", 24 | "filename" : "Icon-App-29x29@2x.png", 25 | "scale" : "2x" 26 | }, 27 | { 28 | "size" : "29x29", 29 | "idiom" : "iphone", 30 | "filename" : "Icon-App-29x29@3x.png", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "size" : "40x40", 35 | "idiom" : "iphone", 36 | "filename" : "Icon-App-40x40@2x.png", 37 | "scale" : "2x" 38 | }, 39 | { 40 | "size" : "40x40", 41 | "idiom" : "iphone", 42 | "filename" : "Icon-App-40x40@3x.png", 43 | "scale" : "3x" 44 | }, 45 | { 46 | "size" : "60x60", 47 | "idiom" : "iphone", 48 | "filename" : "Icon-App-60x60@2x.png", 49 | "scale" : "2x" 50 | }, 51 | { 52 | "size" : "60x60", 53 | "idiom" : "iphone", 54 | "filename" : "Icon-App-60x60@3x.png", 55 | "scale" : "3x" 56 | }, 57 | { 58 | "size" : "20x20", 59 | "idiom" : "ipad", 60 | "filename" : "Icon-App-20x20@1x.png", 61 | "scale" : "1x" 62 | }, 63 | { 64 | "size" : "20x20", 65 | "idiom" : "ipad", 66 | "filename" : "Icon-App-20x20@2x.png", 67 | "scale" : "2x" 68 | }, 69 | { 70 | "size" : "29x29", 71 | "idiom" : "ipad", 72 | "filename" : "Icon-App-29x29@1x.png", 73 | "scale" : "1x" 74 | }, 75 | { 76 | "size" : "29x29", 77 | "idiom" : "ipad", 78 | "filename" : "Icon-App-29x29@2x.png", 79 | "scale" : "2x" 80 | }, 81 | { 82 | "size" : "40x40", 83 | "idiom" : "ipad", 84 | "filename" : "Icon-App-40x40@1x.png", 85 | "scale" : "1x" 86 | }, 87 | { 88 | "size" : "40x40", 89 | "idiom" : "ipad", 90 | "filename" : "Icon-App-40x40@2x.png", 91 | "scale" : "2x" 92 | }, 93 | { 94 | "size" : "76x76", 95 | "idiom" : "ipad", 96 | "filename" : "Icon-App-76x76@1x.png", 97 | "scale" : "1x" 98 | }, 99 | { 100 | "size" : "76x76", 101 | "idiom" : "ipad", 102 | "filename" : "Icon-App-76x76@2x.png", 103 | "scale" : "2x" 104 | }, 105 | { 106 | "size" : "83.5x83.5", 107 | "idiom" : "ipad", 108 | "filename" : "Icon-App-83.5x83.5@2x.png", 109 | "scale" : "2x" 110 | }, 111 | { 112 | "size" : "1024x1024", 113 | "idiom" : "ios-marketing", 114 | "filename" : "Icon-App-1024x1024@1x.png", 115 | "scale" : "1x" 116 | } 117 | ], 118 | "info" : { 119 | "version" : 1, 120 | "author" : "xcode" 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /example/test_driver/barcode_detector.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | part of 'google_ml_vision.dart'; 8 | 9 | void barcodeDetectorTests() { 10 | group('$BarcodeDetector', () { 11 | final BarcodeDetector detector = GoogleVision.instance.barcodeDetector(); 12 | 13 | test('detectInImage with pdf417', () async { 14 | final String tmpFilename = 15 | await _loadImage('assets/test_driver_license_barcode.png'); 16 | final GoogleVisionImage visionImage = 17 | GoogleVisionImage.fromFilePath(tmpFilename); 18 | 19 | final List barcodes = await detector.detectInImage( 20 | visionImage, 21 | ); 22 | final driverLicense = barcodes.first.driverLicense; 23 | 24 | expect(barcodes.length, 1); 25 | expect(driverLicense, isNotNull); 26 | expect(driverLicense, isInstanceOf()); 27 | }); 28 | 29 | test('detectInImage', () async { 30 | final String tmpFilename = await _loadImage('assets/test_barcode.jpg'); 31 | final GoogleVisionImage visionImage = 32 | GoogleVisionImage.fromFilePath(tmpFilename); 33 | 34 | final List barcodes = await detector.detectInImage( 35 | visionImage, 36 | ); 37 | 38 | expect(barcodes.length, 1); 39 | }); 40 | 41 | test('detectInImage contactInfo', () async { 42 | final String tmpFilename = await _loadImage( 43 | 'assets/test_contact_barcode.jpg', 44 | ); 45 | 46 | final GoogleVisionImage visionImage = GoogleVisionImage.fromFilePath( 47 | tmpFilename, 48 | ); 49 | 50 | final BarcodeDetector detector = GoogleVision.instance.barcodeDetector(); 51 | final List barcodes = await detector.detectInImage( 52 | visionImage, 53 | ); 54 | 55 | expect(barcodes, hasLength(1)); 56 | final BarcodeContactInfo info = barcodes[0].contactInfo; 57 | 58 | final BarcodePersonName name = info.name; 59 | expect(name.first, 'John'); 60 | expect(name.last, 'Doe'); 61 | expect(name.formattedName, 'John Doe'); 62 | expect(name.middle, anyOf(isNull, isEmpty)); 63 | expect(name.prefix, anyOf(isNull, isEmpty)); 64 | expect(name.pronunciation, anyOf(isNull, isEmpty)); 65 | expect(name.suffix, anyOf(isNull, isEmpty)); 66 | 67 | expect(info.jobTitle, anyOf(isNull, isEmpty)); 68 | expect(info.organization, anyOf(isNull, isEmpty)); 69 | expect(info.urls, ['http://www.example.com']); 70 | expect(info.addresses, anyOf(isNull, isEmpty)); 71 | 72 | expect(info.emails, hasLength(1)); 73 | final BarcodeEmail email = info.emails[0]; 74 | expect(email.address, 'email@example.com'); 75 | expect(email.body, anyOf(isNull, isEmpty)); 76 | expect(email.subject, anyOf(isNull, isEmpty)); 77 | expect(email.type, BarcodeEmailType.unknown); 78 | 79 | expect(info.phones, hasLength(1)); 80 | final BarcodePhone phone = info.phones[0]; 81 | expect(phone.number, '555-555-5555'); 82 | expect(phone.type, BarcodePhoneType.unknown); 83 | }); 84 | 85 | test('close', () { 86 | expect(detector.close(), completes); 87 | }); 88 | }); 89 | } 90 | -------------------------------------------------------------------------------- /example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /test/image_labeler_test.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | 6 | 7 | import 'package:google_ml_vision/google_ml_vision.dart'; 8 | import 'package:flutter/services.dart'; 9 | import 'package:flutter_test/flutter_test.dart'; 10 | 11 | void main() { 12 | TestWidgetsFlutterBinding.ensureInitialized(); 13 | 14 | group('$GoogleVision', () { 15 | final List log = []; 16 | dynamic returnValue; 17 | 18 | setUp(() { 19 | GoogleVision.channel 20 | .setMockMethodCallHandler((MethodCall methodCall) async { 21 | log.add(methodCall); 22 | 23 | switch (methodCall.method) { 24 | case 'ImageLabeler#processImage': 25 | return returnValue; 26 | default: 27 | return null; 28 | } 29 | }); 30 | log.clear(); 31 | GoogleVision.nextHandle = 0; 32 | }); 33 | 34 | group('$ImageLabeler', () { 35 | test('processImage', () async { 36 | final List labelData = [ 37 | { 38 | 'confidence': 0.6, 39 | 'entityId': 'hello', 40 | 'text': 'friend', 41 | }, 42 | { 43 | 'confidence': 0.8, 44 | 'entityId': 'hi', 45 | 'text': 'brother', 46 | }, 47 | { 48 | 'confidence': 1, 49 | 'entityId': 'hey', 50 | 'text': 'sister', 51 | }, 52 | ]; 53 | 54 | returnValue = labelData; 55 | 56 | final ImageLabeler detector = GoogleVision.instance.imageLabeler( 57 | const ImageLabelerOptions(confidenceThreshold: 0.2), 58 | ); 59 | 60 | final GoogleVisionImage image = GoogleVisionImage.fromFilePath( 61 | 'empty', 62 | ); 63 | 64 | final List labels = await detector.processImage(image); 65 | 66 | expect(log, [ 67 | isMethodCall( 68 | 'ImageLabeler#processImage', 69 | arguments: { 70 | 'handle': 0, 71 | 'type': 'file', 72 | 'path': 'empty', 73 | 'bytes': null, 74 | 'metadata': null, 75 | 'options': { 76 | 'modelType': 'onDevice', 77 | 'confidenceThreshold': 0.2, 78 | }, 79 | }, 80 | ), 81 | ]); 82 | 83 | expect(labels[0].confidence, 0.6); 84 | expect(labels[0].entityId, 'hello'); 85 | expect(labels[0].text, 'friend'); 86 | 87 | expect(labels[1].confidence, 0.8); 88 | expect(labels[1].entityId, 'hi'); 89 | expect(labels[1].text, 'brother'); 90 | 91 | expect(labels[2].confidence, 1.0); 92 | expect(labels[2].entityId, 'hey'); 93 | expect(labels[2].text, 'sister'); 94 | }); 95 | 96 | test('processImage no blocks', () async { 97 | returnValue = []; 98 | 99 | final ImageLabeler detector = GoogleVision.instance.imageLabeler( 100 | const ImageLabelerOptions(), 101 | ); 102 | final GoogleVisionImage image = GoogleVisionImage.fromFilePath('empty'); 103 | 104 | final List labels = await detector.processImage(image); 105 | 106 | expect(log, [ 107 | isMethodCall( 108 | 'ImageLabeler#processImage', 109 | arguments: { 110 | 'handle': 0, 111 | 'type': 'file', 112 | 'path': 'empty', 113 | 'bytes': null, 114 | 'metadata': null, 115 | 'options': { 116 | 'modelType': 'onDevice', 117 | 'confidenceThreshold': 0.5, 118 | }, 119 | }, 120 | ), 121 | ]); 122 | 123 | expect(labels, isEmpty); 124 | }); 125 | }); 126 | }); 127 | } 128 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Google ML Kit Vision Plugin 2 | 3 | (https://pub.dev/packages/google_ml_vision) 4 | 5 | A Flutter plugin to use the capabilities of on-device Google ML Kit Vision APIs 6 | 7 | ## Usage 8 | 9 | To use this plugin, add `google_ml_vision` as a [dependency in your pubspec.yaml file](https://flutter.io/platform-plugins/). 10 | 11 | 12 | ## Using an ML Vision Detector 13 | 14 | ### 1. Create a `GoogleVisionImage`. 15 | 16 | Create a `GoogleVisionImage` object from your image. To create a `GoogleVisionImage` from an image `File` object: 17 | 18 | ```dart 19 | final File imageFile = getImageFile(); 20 | final GoogleVisionImage visionImage = GoogleVisionImage.fromFile(imageFile); 21 | ``` 22 | 23 | ### 2. Create an instance of a detector. 24 | 25 | ```dart 26 | final BarcodeDetector barcodeDetector = GoogleVision.instance.barcodeDetector(); 27 | final FaceDetector faceDetector = GoogleVision.instance.faceDetector(); 28 | final ImageLabeler labeler = GoogleVision.instance.imageLabeler(); 29 | final TextRecognizer textRecognizer = GoogleVision.instance.textRecognizer(); 30 | ``` 31 | 32 | You can also configure all detectors, except `TextRecognizer`, with desired options. 33 | 34 | ```dart 35 | final ImageLabeler labeler = GoogleVision.instance.imageLabeler( 36 | ImageLabelerOptions(confidenceThreshold: 0.75), 37 | ); 38 | ``` 39 | 40 | ### 3. Call `detectInImage()` or `processImage()` with `visionImage`. 41 | 42 | ```dart 43 | final List barcodes = await barcodeDetector.detectInImage(visionImage); 44 | final List faces = await faceDetector.processImage(visionImage); 45 | final List labels = await labeler.processImage(visionImage); 46 | final VisionText visionText = await textRecognizer.processImage(visionImage); 47 | ``` 48 | 49 | ### 4. Extract data. 50 | 51 | a. Extract barcodes. 52 | 53 | ```dart 54 | for (Barcode barcode in barcodes) { 55 | final Rectangle boundingBox = barcode.boundingBox; 56 | final List> cornerPoints = barcode.cornerPoints; 57 | 58 | final String rawValue = barcode.rawValue; 59 | 60 | final BarcodeValueType valueType = barcode.valueType; 61 | 62 | // See API reference for complete list of supported types 63 | switch (valueType) { 64 | case BarcodeValueType.wifi: 65 | final String ssid = barcode.wifi.ssid; 66 | final String password = barcode.wifi.password; 67 | final BarcodeWiFiEncryptionType type = barcode.wifi.encryptionType; 68 | break; 69 | case BarcodeValueType.url: 70 | final String title = barcode.url.title; 71 | final String url = barcode.url.url; 72 | break; 73 | } 74 | } 75 | ``` 76 | 77 | b. Extract faces. 78 | 79 | ```dart 80 | for (Face face in faces) { 81 | final Rectangle boundingBox = face.boundingBox; 82 | 83 | final double rotY = face.headEulerAngleY; // Head is rotated to the right rotY degrees 84 | final double rotZ = face.headEulerAngleZ; // Head is tilted sideways rotZ degrees 85 | 86 | // If landmark detection was enabled with FaceDetectorOptions (mouth, ears, 87 | // eyes, cheeks, and nose available): 88 | final FaceLandmark leftEar = face.getLandmark(FaceLandmarkType.leftEar); 89 | if (leftEar != null) { 90 | final Point leftEarPos = leftEar.position; 91 | } 92 | 93 | // If classification was enabled with FaceDetectorOptions: 94 | if (face.smilingProbability != null) { 95 | final double smileProb = face.smilingProbability; 96 | } 97 | 98 | // If face tracking was enabled with FaceDetectorOptions: 99 | if (face.trackingId != null) { 100 | final int id = face.trackingId; 101 | } 102 | } 103 | ``` 104 | 105 | c. Extract labels. 106 | 107 | ```dart 108 | for (ImageLabel label in labels) { 109 | final String text = label.text; 110 | final String entityId = label.entityId; 111 | final double confidence = label.confidence; 112 | } 113 | ``` 114 | 115 | d. Extract text. 116 | 117 | ```dart 118 | String text = visionText.text; 119 | for (TextBlock block in visionText.blocks) { 120 | final Rect boundingBox = block.boundingBox; 121 | final List cornerPoints = block.cornerPoints; 122 | final String text = block.text; 123 | final List languages = block.recognizedLanguages; 124 | 125 | for (TextLine line in block.lines) { 126 | // Same getters as TextBlock 127 | for (TextElement element in line.elements) { 128 | // Same getters as TextBlock 129 | } 130 | } 131 | } 132 | ``` 133 | 134 | ### 5. Release resources with `close()`. 135 | 136 | ```dart 137 | barcodeDetector.close(); 138 | faceDetector.close(); 139 | labeler.close(); 140 | textRecognizer.close(); 141 | ``` 142 | 143 | ## Getting Started 144 | 145 | See the `example` directory for a complete sample app using Google Machine Learning. 146 | -------------------------------------------------------------------------------- /ios/Classes/TextRecognizer.m: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import "FLTGoogleMlVisionPlugin.h" 6 | 7 | @import MLKitTextRecognition; 8 | @import MLKitTextRecognitionCommon.MLKTextRecognizedLanguage; 9 | @import MLKitTextRecognitionCommon.MLKText; 10 | @import MLKitTextRecognitionCommon.MLKTextBlock; 11 | @import MLKitTextRecognitionCommon.MLKTextLine; 12 | @import MLKitTextRecognitionCommon.MLKTextElement; 13 | 14 | @interface TextRecognizer () 15 | @property MLKTextRecognizer *recognizer; 16 | @end 17 | 18 | @implementation TextRecognizer 19 | - (instancetype)initWithOptions:(NSDictionary *)options { 20 | self = [super init]; 21 | if (self) { 22 | _recognizer = [MLKTextRecognizer textRecognizer]; 23 | } 24 | return self; 25 | } 26 | 27 | - (void)handleDetection:(MLKVisionImage *)image result:(FlutterResult)result { 28 | [_recognizer processImage:image 29 | completion:^(MLKText *_Nullable visionText, NSError *_Nullable error) { 30 | if (error) { 31 | [FLTGoogleMlVisionPlugin handleError:error result:result]; 32 | return; 33 | } else if (!visionText) { 34 | result(@{@"text" : @"", @"blocks" : @[]}); 35 | return; 36 | } 37 | 38 | NSMutableDictionary *visionTextData = [NSMutableDictionary dictionary]; 39 | visionTextData[@"text"] = visionText.text; 40 | 41 | NSMutableArray *allBlockData = [NSMutableArray array]; 42 | for (MLKTextBlock *block in visionText.blocks) { 43 | NSMutableDictionary *blockData = [NSMutableDictionary dictionary]; 44 | 45 | [self addData:blockData 46 | cornerPoints:block.cornerPoints 47 | frame:block.frame 48 | languages:block.recognizedLanguages 49 | text:block.text]; 50 | 51 | NSMutableArray *allLineData = [NSMutableArray array]; 52 | for (MLKTextLine *line in block.lines) { 53 | NSMutableDictionary *lineData = [NSMutableDictionary dictionary]; 54 | 55 | [self addData:lineData 56 | cornerPoints:line.cornerPoints 57 | frame:line.frame 58 | languages:line.recognizedLanguages 59 | text:line.text]; 60 | 61 | NSMutableArray *allElementData = [NSMutableArray array]; 62 | for (MLKTextElement *element in line.elements) { 63 | NSMutableDictionary *elementData = [NSMutableDictionary dictionary]; 64 | 65 | [self addData:elementData 66 | cornerPoints:element.cornerPoints 67 | frame:element.frame 68 | languages:[NSArray new] 69 | text:element.text]; 70 | 71 | [allElementData addObject:elementData]; 72 | } 73 | 74 | lineData[@"elements"] = allElementData; 75 | [allLineData addObject:lineData]; 76 | } 77 | 78 | blockData[@"lines"] = allLineData; 79 | [allBlockData addObject:blockData]; 80 | } 81 | 82 | visionTextData[@"blocks"] = allBlockData; 83 | result(visionTextData); 84 | }]; 85 | } 86 | 87 | - (void)addData:(NSMutableDictionary *)addTo 88 | cornerPoints:(NSArray *)cornerPoints 89 | frame:(CGRect)frame 90 | languages:(NSArray *)languages 91 | text:(NSString *)text { 92 | __block NSMutableArray *points = [NSMutableArray array]; 93 | 94 | for (NSValue *point in cornerPoints) { 95 | [points addObject:@[ @(point.CGPointValue.x), @(point.CGPointValue.y) ]]; 96 | } 97 | 98 | __block NSMutableArray *allLanguageData = [NSMutableArray array]; 99 | for (MLKTextRecognizedLanguage *language in languages) { 100 | [allLanguageData addObject:@{ 101 | @"languageCode" : language.languageCode ? language.languageCode : [NSNull null] 102 | }]; 103 | } 104 | 105 | [addTo addEntriesFromDictionary:@{ 106 | @"points" : points, 107 | @"left" : @(frame.origin.x), 108 | @"top" : @(frame.origin.y), 109 | @"width" : @(frame.size.width), 110 | @"height" : @(frame.size.height), 111 | @"recognizedLanguages" : allLanguageData, 112 | @"text" : text, 113 | }]; 114 | } 115 | @end -------------------------------------------------------------------------------- /lib/src/image_labeler.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | part of google_ml_vision; 6 | 7 | /// Used for finding [ImageLabel]s in a supplied image. 8 | /// 9 | /// When you use the API, you get a list of the entities that were recognized: 10 | /// people, things, places, activities, and so on. Each label found comes with a 11 | /// score that indicates the confidence the ML model has in its relevance. With 12 | /// this information, you can perform tasks such as automatic metadata 13 | /// generation and content moderation. 14 | /// 15 | /// A image labeler is created via 16 | /// `imageLabeler([ImageLabelerOptions options])`: 17 | /// 18 | /// ```dart 19 | /// final GoogleVisionImage image = 20 | /// GoogleVisionImage.fromFilePath('path/to/file'); 21 | /// 22 | /// final ImageLabeler imageLabeler = 23 | /// GoogleVision.instance.imageLabeler(options); 24 | /// 25 | /// final List labels = await imageLabeler.processImage(image); 26 | /// ``` 27 | class ImageLabeler { 28 | ImageLabeler._({ 29 | required ImageLabelerOptions options, 30 | required int handle, 31 | }) : _options = options, 32 | _handle = handle; 33 | 34 | final ImageLabelerOptions _options; 35 | final int _handle; 36 | bool _hasBeenOpened = false; 37 | bool _isClosed = false; 38 | 39 | /// Finds entities in the input image. 40 | Future> processImage(GoogleVisionImage visionImage) async { 41 | assert(!_isClosed); 42 | _hasBeenOpened = true; 43 | 44 | final reply = await GoogleVision.channel.invokeListMethod( 45 | 'ImageLabeler#processImage', 46 | { 47 | 'handle': _handle, 48 | 'options': { 49 | 'confidenceThreshold': _options.confidenceThreshold, 50 | }, 51 | }..addAll(visionImage._serialize()), 52 | ); 53 | 54 | final List labels = []; 55 | for (final dynamic data in reply!) { 56 | labels.add(ImageLabel._(data)); 57 | } 58 | 59 | return labels; 60 | } 61 | 62 | /// Release resources used by this labeler. 63 | Future close() { 64 | if (!_hasBeenOpened) _isClosed = true; 65 | if (_isClosed) return Future.value(); 66 | 67 | _isClosed = true; 68 | return GoogleVision.channel.invokeMethod( 69 | 'ImageLabeler#close', 70 | {'handle': _handle}, 71 | ); 72 | } 73 | } 74 | 75 | /// Options for on device image labeler. 76 | /// 77 | /// Confidence threshold could be provided for the label detection. For example, 78 | /// if the confidence threshold is set to 0.7, only labels with 79 | /// confidence >= 0.7 would be returned. The default threshold is 0.5. 80 | class ImageLabelerOptions { 81 | /// Constructor for [ImageLabelerOptions]. 82 | /// 83 | /// Confidence threshold could be provided for the label detection. 84 | /// For example, if the confidence threshold is set to 0.7, only labels with 85 | /// confidence >= 0.7 would be returned. The default threshold is 0.5. 86 | const ImageLabelerOptions({this.confidenceThreshold = 0.5}) 87 | : assert(confidenceThreshold >= 0.0), 88 | assert(confidenceThreshold <= 1.0); 89 | 90 | /// The minimum confidence threshold of labels to be detected. 91 | /// 92 | /// Required to be in range [0.0, 1.0]. 93 | final double confidenceThreshold; 94 | } 95 | 96 | /// Options for cloud image labeler. 97 | /// 98 | /// Confidence threshold could be provided for the label detection. For example, 99 | /// if the confidence threshold is set to 0.7, only labels with 100 | /// confidence >= 0.7 would be returned. The default threshold is 0.5. 101 | class CloudImageLabelerOptions { 102 | /// Constructor for [CloudImageLabelerOptions]. 103 | /// 104 | /// Confidence threshold could be provided for the label detection. 105 | /// For example, if the confidence threshold is set to 0.7, only labels with 106 | /// confidence >= 0.7 would be returned. The default threshold is 0.5. 107 | const CloudImageLabelerOptions({this.confidenceThreshold = 0.5}) 108 | : assert(confidenceThreshold >= 0.0), 109 | assert(confidenceThreshold <= 1.0); 110 | 111 | /// The minimum confidence threshold of labels to be detected. 112 | /// 113 | /// Required to be in range [0.0, 1.0]. 114 | final double confidenceThreshold; 115 | } 116 | 117 | /// Represents an entity label detected by [ImageLabeler] and [CloudImageLabeler]. 118 | class ImageLabel { 119 | ImageLabel._(dynamic data) 120 | : confidence = data['confidence']?.toDouble(), 121 | entityId = data['entityId'], 122 | text = data['text']; 123 | 124 | /// The overall confidence of the result. Range [0.0, 1.0]. 125 | final double? confidence; 126 | 127 | /// The opaque entity ID. 128 | /// 129 | /// IDs are available in Google Knowledge Graph Search API 130 | /// https://developers.google.com/knowledge-graph/ 131 | final String? entityId; 132 | 133 | /// A detected label from the given image. 134 | /// 135 | /// The label returned here is in English only. The end developer should use 136 | /// [entityId] to retrieve unique id. 137 | final String? text; 138 | } 139 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GMLKTextRecognizer.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | package com.brianmtully.flutter.plugins.googlemlvision; 6 | 7 | import android.graphics.Point; 8 | import android.graphics.Rect; 9 | import androidx.annotation.NonNull; 10 | import com.google.android.gms.tasks.OnFailureListener; 11 | import com.google.android.gms.tasks.OnSuccessListener; 12 | import com.google.mlkit.vision.common.InputImage; 13 | import com.google.mlkit.vision.text.Text; 14 | import com.google.mlkit.vision.text.TextRecognition; 15 | import com.google.mlkit.vision.text.TextRecognizer; 16 | import io.flutter.plugin.common.MethodChannel; 17 | import java.io.IOException; 18 | import java.util.ArrayList; 19 | import java.util.HashMap; 20 | import java.util.List; 21 | import java.util.Map; 22 | import android.util.Log; 23 | 24 | class GMLKTextRecognizer implements Detector { 25 | private final TextRecognizer recognizer; 26 | 27 | GMLKTextRecognizer(Map options) { 28 | recognizer = TextRecognition.getClient(); 29 | } 30 | 31 | @Override 32 | public void handleDetection(final InputImage image, final MethodChannel.Result result) { 33 | recognizer 34 | .process(image) 35 | .addOnSuccessListener( 36 | new OnSuccessListener() { 37 | @Override 38 | public void onSuccess(Text googleVisionText) { 39 | Map visionTextData = new HashMap<>(); 40 | visionTextData.put("text", googleVisionText.getText()); 41 | 42 | List> allBlockData = new ArrayList<>(); 43 | for (Text.TextBlock block : googleVisionText.getTextBlocks()) { 44 | Map blockData = new HashMap<>(); 45 | addData( 46 | blockData, 47 | block.getBoundingBox(), 48 | block.getCornerPoints(), 49 | block.getRecognizedLanguage(), 50 | block.getText()); 51 | 52 | List> allLineData = new ArrayList<>(); 53 | for (Text.Line line : block.getLines()) { 54 | Map lineData = new HashMap<>(); 55 | addData( 56 | lineData, 57 | line.getBoundingBox(), 58 | line.getCornerPoints(), 59 | line.getRecognizedLanguage(), 60 | line.getText()); 61 | 62 | List> allElementData = new ArrayList<>(); 63 | for (Text.Element element : line.getElements()) { 64 | Map elementData = new HashMap<>(); 65 | addData( 66 | elementData, 67 | element.getBoundingBox(), 68 | element.getCornerPoints(), 69 | element.getRecognizedLanguage(), 70 | element.getText()); 71 | 72 | allElementData.add(elementData); 73 | } 74 | lineData.put("elements", allElementData); 75 | allLineData.add(lineData); 76 | } 77 | blockData.put("lines", allLineData); 78 | allBlockData.add(blockData); 79 | } 80 | 81 | visionTextData.put("blocks", allBlockData); 82 | result.success(visionTextData); 83 | } 84 | }) 85 | .addOnFailureListener( 86 | new OnFailureListener() { 87 | @Override 88 | public void onFailure(@NonNull Exception exception) { 89 | result.error("textRecognizerError", exception.getLocalizedMessage(), null); 90 | } 91 | }); 92 | } 93 | 94 | private void addData( 95 | Map addTo, 96 | Rect boundingBox, 97 | Point[] cornerPoints, 98 | String language, 99 | String text) { 100 | 101 | if (boundingBox != null) { 102 | addTo.put("left", (double) boundingBox.left); 103 | addTo.put("top", (double) boundingBox.top); 104 | addTo.put("width", (double) boundingBox.width()); 105 | addTo.put("height", (double) boundingBox.height()); 106 | } 107 | 108 | List points = new ArrayList<>(); 109 | if (cornerPoints != null) { 110 | for (Point point : cornerPoints) { 111 | points.add(new double[] {(double) point.x, (double) point.y}); 112 | } 113 | } 114 | addTo.put("points", points); 115 | 116 | List> allLanguageData = new ArrayList<>(); 117 | // for (RecognizedLanguage language : languages) { 118 | Map languageData = new HashMap<>(); 119 | languageData.put("languageCode", language); 120 | allLanguageData.add(languageData); 121 | //} 122 | addTo.put("recognizedLanguages", allLanguageData); 123 | addTo.put("text", text); 124 | } 125 | 126 | @Override 127 | public void close() throws IOException { 128 | recognizer.close(); 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /lib/src/text_recognizer.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | part of google_ml_vision; 6 | 7 | /// Detector for performing optical character recognition(OCR) on an input image. 8 | /// 9 | /// A text recognizer is created via `textRecognizer()` in [GoogleVision]: 10 | /// 11 | /// ```dart 12 | /// final GoogleVisionImage image = 13 | /// GoogleVisionImage.fromFilePath('path/to/file'); 14 | /// 15 | /// final TextRecognizer textRecognizer = 16 | /// GoogleVision.instance.textRecognizer(); 17 | /// 18 | /// final List recognizedText = 19 | /// await textRecognizer.processImage(image); 20 | /// ``` 21 | class TextRecognizer { 22 | TextRecognizer._({ 23 | required int handle, 24 | }) : _handle = handle; 25 | 26 | final int _handle; 27 | 28 | bool _hasBeenOpened = false; 29 | bool _isClosed = false; 30 | 31 | /// Detects [VisionText] from a [GoogleVisionImage]. 32 | Future processImage(GoogleVisionImage visionImage) async { 33 | assert(!_isClosed); 34 | 35 | _hasBeenOpened = true; 36 | 37 | final reply = await GoogleVision.channel.invokeMapMethod( 38 | 'TextRecognizer#processImage', 39 | { 40 | 'handle': _handle, 41 | 'options': {}, 42 | }..addAll(visionImage._serialize()), 43 | ); 44 | 45 | return VisionText._(reply!); 46 | } 47 | 48 | /// Releases resources used by this recognizer. 49 | Future close() { 50 | if (!_hasBeenOpened) _isClosed = true; 51 | if (_isClosed) return Future.value(); 52 | 53 | _isClosed = true; 54 | return GoogleVision.channel.invokeMethod( 55 | 'TextRecognizer#close', 56 | {'handle': _handle}, 57 | ); 58 | } 59 | } 60 | 61 | /// Recognized text in an image. 62 | class VisionText { 63 | VisionText._(Map data) 64 | : text = data['text'], 65 | blocks = List.unmodifiable(data['blocks'] 66 | .map((dynamic block) => TextBlock._(block))); 67 | 68 | /// String representation of the recognized text. 69 | final String? text; 70 | 71 | /// All recognized text broken down into individual blocks/paragraphs. 72 | final List blocks; 73 | } 74 | 75 | /// Abstract class representing dimensions of recognized text in an image. 76 | abstract class TextContainer { 77 | TextContainer._(Map data) 78 | : boundingBox = data['left'] != null 79 | ? Rect.fromLTWH( 80 | data['left'], 81 | data['top'], 82 | data['width'], 83 | data['height'], 84 | ) 85 | : null, 86 | confidence = data['confidence']?.toDouble(), 87 | cornerPoints = List.unmodifiable( 88 | data['points'].map((dynamic point) => Offset( 89 | point[0], 90 | point[1], 91 | ))), 92 | recognizedLanguages = List.unmodifiable( 93 | data['recognizedLanguages'].map( 94 | (dynamic language) => RecognizedLanguage._(language))), 95 | text = data['text']; 96 | 97 | /// Axis-aligned bounding rectangle of the detected text. 98 | /// 99 | /// The point (0, 0) is defined as the upper-left corner of the image. 100 | /// 101 | /// Could be null even if text is found. 102 | final Rect? boundingBox; 103 | 104 | /// The confidence of the recognized text block. 105 | /// 106 | /// The value is null for onDevice text recognizer. 107 | final double? confidence; 108 | 109 | /// The four corner points in clockwise direction starting with top-left. 110 | /// 111 | /// Due to the possible perspective distortions, this is not necessarily a 112 | /// rectangle. Parts of the region could be outside of the image. 113 | /// 114 | /// Could be empty even if text is found. 115 | final List cornerPoints; 116 | 117 | /// All detected languages from recognized text. 118 | /// 119 | /// On-device text recognizers only detect Latin-based languages, while cloud 120 | /// text recognizers can detect multiple languages. If no languages are 121 | /// recognized, the list is empty. 122 | final List recognizedLanguages; 123 | 124 | /// The recognized text as a string. 125 | /// 126 | /// Returned in reading order for the language. For Latin, this is top to 127 | /// bottom within a Block, and left-to-right within a Line. 128 | final String? text; 129 | } 130 | 131 | /// A block of text (think of it as a paragraph) as deemed by the OCR engine. 132 | class TextBlock extends TextContainer { 133 | TextBlock._(Map block) 134 | : lines = List.unmodifiable( 135 | block['lines'].map((dynamic line) => TextLine._(line))), 136 | super._(block); 137 | 138 | /// The contents of the text block, broken down into individual lines. 139 | final List lines; 140 | } 141 | 142 | /// Represents a line of text. 143 | class TextLine extends TextContainer { 144 | TextLine._(Map line) 145 | : elements = List.unmodifiable(line['elements'] 146 | .map((dynamic element) => TextElement._(element))), 147 | super._(line); 148 | 149 | /// The contents of this line, broken down into individual elements. 150 | final List elements; 151 | } 152 | 153 | /// Roughly equivalent to a space-separated "word." 154 | /// 155 | /// The API separates elements into words in most Latin languages, but could 156 | /// separate by characters in others. 157 | /// 158 | /// If a word is split between two lines by a hyphen, each part is encoded as a 159 | /// separate element. 160 | class TextElement extends TextContainer { 161 | TextElement._(Map element) : super._(element); 162 | } 163 | -------------------------------------------------------------------------------- /example/lib/picture_scanner.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'dart:async'; 8 | import 'dart:io'; 9 | import 'dart:ui'; 10 | 11 | import 'package:google_ml_vision/google_ml_vision.dart'; 12 | import 'package:flutter/material.dart'; 13 | import 'package:image_picker/image_picker.dart'; 14 | 15 | import 'detector_painters.dart'; 16 | 17 | class PictureScanner extends StatefulWidget { 18 | const PictureScanner({Key key}) : super(key: key); 19 | 20 | @override 21 | State createState() => _PictureScannerState(); 22 | } 23 | 24 | class _PictureScannerState extends State { 25 | File _imageFile; 26 | Size _imageSize; 27 | dynamic _scanResults; 28 | Detector _currentDetector = Detector.text; 29 | final BarcodeDetector _barcodeDetector = 30 | GoogleVision.instance.barcodeDetector(); 31 | final FaceDetector _faceDetector = GoogleVision.instance.faceDetector(); 32 | final ImageLabeler _imageLabeler = GoogleVision.instance.imageLabeler(); 33 | final TextRecognizer _recognizer = GoogleVision.instance.textRecognizer(); 34 | 35 | Future _getAndScanImage() async { 36 | setState(() { 37 | _imageFile = null; 38 | _imageSize = null; 39 | }); 40 | 41 | final File pickedImage = 42 | await ImagePicker.pickImage(source: ImageSource.gallery); 43 | final File imageFile = File(pickedImage.path); 44 | 45 | setState(() { 46 | _imageFile = imageFile; 47 | }); 48 | 49 | if (imageFile != null) { 50 | await Future.wait([ 51 | _getImageSize(imageFile), 52 | _scanImage(imageFile), 53 | ]); 54 | } 55 | } 56 | 57 | Future _getImageSize(File imageFile) async { 58 | final Completer completer = Completer(); 59 | 60 | final Image image = Image.file(imageFile); 61 | image.image.resolve(const ImageConfiguration()).addListener( 62 | ImageStreamListener((ImageInfo info, bool _) { 63 | completer.complete(Size( 64 | info.image.width.toDouble(), 65 | info.image.height.toDouble(), 66 | )); 67 | }), 68 | ); 69 | 70 | final Size imageSize = await completer.future; 71 | setState(() { 72 | _imageSize = imageSize; 73 | }); 74 | } 75 | 76 | Future _scanImage(File imageFile) async { 77 | setState(() { 78 | _scanResults = null; 79 | }); 80 | 81 | final GoogleVisionImage visionImage = GoogleVisionImage.fromFile(imageFile); 82 | 83 | dynamic results; 84 | switch (_currentDetector) { 85 | case Detector.barcode: 86 | results = await _barcodeDetector.detectInImage(visionImage); 87 | break; 88 | case Detector.face: 89 | results = await _faceDetector.processImage(visionImage); 90 | break; 91 | case Detector.label: 92 | results = await _imageLabeler.processImage(visionImage); 93 | break; 94 | case Detector.text: 95 | results = await _recognizer.processImage(visionImage); 96 | print(results.blocks); 97 | for (final TextBlock block in results.blocks) { 98 | for (final TextLine line in block.lines) { 99 | for (final TextElement element in line.elements) { 100 | print(element.text); 101 | } 102 | } 103 | } 104 | 105 | break; 106 | default: 107 | return; 108 | } 109 | 110 | setState(() { 111 | _scanResults = results; 112 | }); 113 | } 114 | 115 | CustomPaint _buildResults(Size imageSize, dynamic results) { 116 | CustomPainter painter; 117 | 118 | switch (_currentDetector) { 119 | case Detector.barcode: 120 | painter = BarcodeDetectorPainter(_imageSize, results); 121 | break; 122 | case Detector.face: 123 | painter = FaceDetectorPainter(_imageSize, results); 124 | break; 125 | case Detector.label: 126 | painter = LabelDetectorPainter(_imageSize, results); 127 | break; 128 | case Detector.text: 129 | painter = TextDetectorPainter(_imageSize, results); 130 | break; 131 | default: 132 | break; 133 | } 134 | 135 | return CustomPaint( 136 | painter: painter, 137 | ); 138 | } 139 | 140 | Widget _buildImage() { 141 | return Container( 142 | constraints: const BoxConstraints.expand(), 143 | decoration: BoxDecoration( 144 | image: DecorationImage( 145 | image: Image.file(_imageFile).image, 146 | fit: BoxFit.fitWidth, 147 | ), 148 | ), 149 | child: _imageSize == null || _scanResults == null 150 | ? const Center( 151 | child: Text( 152 | 'Scanning...', 153 | style: TextStyle( 154 | color: Colors.green, 155 | fontSize: 30, 156 | ), 157 | ), 158 | ) 159 | : _buildResults(_imageSize, _scanResults), 160 | ); 161 | } 162 | 163 | @override 164 | Widget build(BuildContext context) { 165 | return Scaffold( 166 | appBar: AppBar( 167 | title: const Text('Picture Scanner'), 168 | actions: [ 169 | PopupMenuButton( 170 | onSelected: (Detector result) { 171 | _currentDetector = result; 172 | if (_imageFile != null) _scanImage(_imageFile); 173 | }, 174 | itemBuilder: (BuildContext context) => >[ 175 | const PopupMenuItem( 176 | value: Detector.barcode, 177 | child: Text('Detect Barcode'), 178 | ), 179 | const PopupMenuItem( 180 | value: Detector.face, 181 | child: Text('Detect Face'), 182 | ), 183 | const PopupMenuItem( 184 | value: Detector.label, 185 | child: Text('Detect Label'), 186 | ), 187 | const PopupMenuItem( 188 | value: Detector.text, 189 | child: Text('Detect Text'), 190 | ), 191 | ], 192 | ), 193 | ], 194 | ), 195 | body: _imageFile == null 196 | ? const Center(child: Text('No image selected.')) 197 | : _buildImage(), 198 | floatingActionButton: FloatingActionButton( 199 | onPressed: _getAndScanImage, 200 | tooltip: 'Pick Image', 201 | child: const Icon(Icons.add_a_photo), 202 | ), 203 | ); 204 | } 205 | 206 | @override 207 | void dispose() { 208 | _barcodeDetector.close(); 209 | _faceDetector.close(); 210 | _imageLabeler.close(); 211 | _recognizer.close(); 212 | super.dispose(); 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GoogleMlVisionHandler.java: -------------------------------------------------------------------------------- 1 | package com.brianmtully.flutter.plugins.googlemlvision; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.graphics.Matrix; 7 | import android.net.Uri; 8 | import android.util.SparseArray; 9 | 10 | import androidx.annotation.NonNull; 11 | import androidx.exifinterface.media.ExifInterface; 12 | import com.google.mlkit.vision.common.InputImage; 13 | import io.flutter.plugin.common.MethodCall; 14 | import io.flutter.plugin.common.MethodChannel; 15 | import io.flutter.plugin.common.MethodChannel.MethodCallHandler; 16 | import io.flutter.plugin.common.MethodChannel.Result; 17 | import java.io.File; 18 | import java.io.IOException; 19 | import java.util.Map; 20 | import android.util.Log; 21 | 22 | class MlVisionHandler implements MethodCallHandler { 23 | private final SparseArray detectors = new SparseArray<>(); 24 | private final Context applicationContext; 25 | 26 | MlVisionHandler(Context applicationContext) { 27 | this.applicationContext = applicationContext; 28 | } 29 | 30 | @Override 31 | public void onMethodCall(@NonNull MethodCall call, @NonNull Result result) { 32 | switch (call.method) { 33 | case "BarcodeDetector#detectInImage": 34 | case "FaceDetector#processImage": 35 | case "ImageLabeler#processImage": 36 | case "TextRecognizer#processImage": 37 | handleDetection(call, result); 38 | break; 39 | case "BarcodeDetector#close": 40 | case "FaceDetector#close": 41 | case "ImageLabeler#close": 42 | case "TextRecognizer#close": 43 | closeDetector(call, result); 44 | break; 45 | default: 46 | result.notImplemented(); 47 | } 48 | } 49 | 50 | private void handleDetection(MethodCall call, MethodChannel.Result result) { 51 | Map options = call.argument("options"); 52 | 53 | InputImage image; 54 | Map imageData = call.arguments(); 55 | try { 56 | image = dataToVisionImage(imageData); 57 | } catch (IOException exception) { 58 | result.error("MLVisionDetectorIOError", exception.getLocalizedMessage(), null); 59 | return; 60 | } 61 | Detector detector = getDetector(call); 62 | if (detector == null) { 63 | switch (call.method.split("#")[0]) { 64 | case "BarcodeDetector": 65 | detector = new GMLKBarcodeDetector(options); 66 | break; 67 | case "FaceDetector": 68 | detector = new GMLKFaceDetector(options); 69 | break; 70 | case "ImageLabeler": 71 | detector = new GMLKImageLabeler(options); 72 | break; 73 | case "TextRecognizer": 74 | detector = new GMLKTextRecognizer(options); 75 | break; 76 | } 77 | 78 | final Integer handle = call.argument("handle"); 79 | addDetector(handle, detector); 80 | } 81 | 82 | detector.handleDetection(image, result); 83 | } 84 | 85 | private void closeDetector(final MethodCall call, final MethodChannel.Result result) { 86 | final Detector detector = getDetector(call); 87 | 88 | if (detector == null) { 89 | final Integer handle = call.argument("handle"); 90 | final String message = String.format("Object for handle does not exists: %s", handle); 91 | throw new IllegalArgumentException(message); 92 | } 93 | 94 | try { 95 | detector.close(); 96 | result.success(null); 97 | } catch (IOException e) { 98 | final String code = String.format("%sIOError", detector.getClass().getSimpleName()); 99 | result.error(code, e.getLocalizedMessage(), null); 100 | } finally { 101 | final Integer handle = call.argument("handle"); 102 | detectors.remove(handle); 103 | } 104 | } 105 | 106 | private InputImage dataToVisionImage(Map imageData) throws IOException { 107 | String imageType = (String) imageData.get("type"); 108 | assert imageType != null; 109 | 110 | switch (imageType) { 111 | case "file": 112 | final String imageFilePath = (String) imageData.get("path"); 113 | final int rotation = getImageExifOrientation(imageFilePath); 114 | 115 | if (rotation == 0) { 116 | File file = new File(imageFilePath); 117 | return InputImage.fromFilePath(this.applicationContext, Uri.fromFile(file)); 118 | } 119 | 120 | Matrix matrix = new Matrix(); 121 | matrix.postRotate(rotation); 122 | 123 | final Bitmap bitmap = BitmapFactory.decodeFile(imageFilePath); 124 | final Bitmap rotatedBitmap = 125 | Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); 126 | 127 | return InputImage.fromBitmap(rotatedBitmap, 0); 128 | case "bytes": 129 | @SuppressWarnings("unchecked") 130 | Map metadata = (Map) imageData.get("metadata"); 131 | 132 | 133 | byte[] bytes = (byte[]) imageData.get("bytes"); 134 | assert bytes != null; 135 | Double width = (Double)metadata.get("width"); 136 | int intWidth = width.intValue(); 137 | 138 | Double height = (Double)metadata.get("height"); 139 | int intHeight = height.intValue(); 140 | try { 141 | InputImage inputImage = InputImage.fromByteArray(bytes,intWidth,intHeight,(int)metadata.get("rotation"), 17); //842094169 142 | return inputImage; 143 | } catch(IllegalArgumentException exception) { 144 | Log.e("GoogleMLVision ", "exception:", exception); 145 | return null; 146 | } 147 | default: 148 | throw new IllegalArgumentException(String.format("No image type for: %s", imageType)); 149 | } 150 | } 151 | 152 | private int getImageExifOrientation(String imageFilePath) throws IOException { 153 | ExifInterface exif = new ExifInterface(imageFilePath); 154 | int orientation = 155 | exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); 156 | 157 | switch (orientation) { 158 | case ExifInterface.ORIENTATION_ROTATE_90: 159 | return 90; 160 | case ExifInterface.ORIENTATION_ROTATE_180: 161 | return 180; 162 | case ExifInterface.ORIENTATION_ROTATE_270: 163 | return 270; 164 | default: 165 | return 0; 166 | } 167 | } 168 | 169 | private void addDetector(final int handle, final Detector detector) { 170 | if (detectors.get(handle) != null) { 171 | final String message = String.format("Object for handle already exists: %s", handle); 172 | throw new IllegalArgumentException(message); 173 | } 174 | detectors.put(handle, detector); 175 | } 176 | 177 | private Detector getDetector(final MethodCall call) { 178 | final Integer handle = call.argument("handle"); 179 | return detectors.get(handle); 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /example/lib/camera_preview_scanner.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'package:camera/camera.dart'; 8 | import 'package:google_ml_vision/google_ml_vision.dart'; 9 | import 'package:flutter/foundation.dart'; 10 | import 'package:flutter/material.dart'; 11 | 12 | import 'detector_painters.dart'; 13 | import 'scanner_utils.dart'; 14 | 15 | class CameraPreviewScanner extends StatefulWidget { 16 | const CameraPreviewScanner({Key key}) : super(key: key); 17 | 18 | @override 19 | State createState() => _CameraPreviewScannerState(); 20 | } 21 | 22 | class _CameraPreviewScannerState extends State { 23 | dynamic _scanResults; 24 | CameraController _camera; 25 | Detector _currentDetector = Detector.text; 26 | bool _isDetecting = false; 27 | CameraLensDirection _direction = CameraLensDirection.back; 28 | 29 | final BarcodeDetector _barcodeDetector = 30 | GoogleVision.instance.barcodeDetector(); 31 | final FaceDetector _faceDetector = GoogleVision.instance 32 | .faceDetector(FaceDetectorOptions(enableContours: true)); 33 | final ImageLabeler _imageLabeler = GoogleVision.instance.imageLabeler(); 34 | final TextRecognizer _recognizer = GoogleVision.instance.textRecognizer(); 35 | 36 | @override 37 | void initState() { 38 | super.initState(); 39 | _initializeCamera(); 40 | } 41 | 42 | Future _initializeCamera() async { 43 | final CameraDescription description = 44 | await ScannerUtils.getCamera(_direction); 45 | 46 | _camera = CameraController( 47 | description, 48 | defaultTargetPlatform == TargetPlatform.iOS 49 | ? ResolutionPreset.high 50 | : ResolutionPreset.high, 51 | enableAudio: false, 52 | ); 53 | await _camera.initialize(); 54 | 55 | await _camera.startImageStream((CameraImage image) { 56 | if (_isDetecting) return; 57 | 58 | _isDetecting = true; 59 | 60 | ScannerUtils.detect( 61 | image: image, 62 | detectInImage: _getDetectionMethod(), 63 | imageRotation: description.sensorOrientation, 64 | ).then( 65 | (dynamic results) { 66 | if (_currentDetector == null) return; 67 | setState(() { 68 | _scanResults = results; 69 | }); 70 | }, 71 | ).whenComplete(() => Future.delayed( 72 | Duration( 73 | milliseconds: 100, 74 | ), 75 | () => {_isDetecting = false})); 76 | }); 77 | } 78 | 79 | Future Function(GoogleVisionImage image) _getDetectionMethod() { 80 | switch (_currentDetector) { 81 | case Detector.text: 82 | return _recognizer.processImage; 83 | case Detector.barcode: 84 | return _barcodeDetector.detectInImage; 85 | case Detector.label: 86 | return _imageLabeler.processImage; 87 | case Detector.face: 88 | return _faceDetector.processImage; 89 | } 90 | 91 | return null; 92 | } 93 | 94 | Widget _buildResults() { 95 | const Text noResultsText = Text('No results!'); 96 | 97 | if (_scanResults == null || 98 | _camera == null || 99 | !_camera.value.isInitialized) { 100 | return noResultsText; 101 | } 102 | 103 | CustomPainter painter; 104 | 105 | final Size imageSize = Size( 106 | _camera.value.previewSize.height, 107 | _camera.value.previewSize.width, 108 | ); 109 | 110 | switch (_currentDetector) { 111 | case Detector.barcode: 112 | if (_scanResults is! List) return noResultsText; 113 | 114 | painter = BarcodeDetectorPainter(imageSize, _scanResults); 115 | break; 116 | case Detector.face: 117 | if (_scanResults is! List) return noResultsText; 118 | painter = FaceDetectorPainter(imageSize, _scanResults); 119 | break; 120 | case Detector.label: 121 | if (_scanResults is! List) return noResultsText; 122 | painter = LabelDetectorPainter(imageSize, _scanResults); 123 | break; 124 | case Detector.cloudLabel: 125 | if (_scanResults is! List) return noResultsText; 126 | painter = LabelDetectorPainter(imageSize, _scanResults); 127 | break; 128 | default: 129 | assert(_currentDetector == Detector.text); 130 | if (_scanResults is! VisionText) return noResultsText; 131 | painter = TextDetectorPainter(imageSize, _scanResults); 132 | } 133 | 134 | return CustomPaint( 135 | painter: painter, 136 | ); 137 | } 138 | 139 | Widget _buildImage() { 140 | return Container( 141 | constraints: const BoxConstraints.expand(), 142 | child: _camera == null 143 | ? const Center( 144 | child: Text( 145 | 'Initializing Camera...', 146 | style: TextStyle( 147 | color: Colors.green, 148 | fontSize: 30, 149 | ), 150 | ), 151 | ) 152 | : Stack( 153 | fit: StackFit.expand, 154 | children: [ 155 | CameraPreview(_camera), 156 | _buildResults(), 157 | ], 158 | ), 159 | ); 160 | } 161 | 162 | Future _toggleCameraDirection() async { 163 | if (_direction == CameraLensDirection.back) { 164 | _direction = CameraLensDirection.front; 165 | } else { 166 | _direction = CameraLensDirection.back; 167 | } 168 | 169 | await _camera.stopImageStream(); 170 | await _camera.dispose(); 171 | 172 | setState(() { 173 | _camera = null; 174 | }); 175 | 176 | await _initializeCamera(); 177 | } 178 | 179 | @override 180 | Widget build(BuildContext context) { 181 | return Scaffold( 182 | appBar: AppBar( 183 | title: const Text('ML Vision Example'), 184 | actions: [ 185 | PopupMenuButton( 186 | onSelected: (Detector result) { 187 | _currentDetector = result; 188 | }, 189 | itemBuilder: (BuildContext context) => >[ 190 | const PopupMenuItem( 191 | value: Detector.barcode, 192 | child: Text('Detect Barcode'), 193 | ), 194 | const PopupMenuItem( 195 | value: Detector.face, 196 | child: Text('Detect Face'), 197 | ), 198 | const PopupMenuItem( 199 | value: Detector.label, 200 | child: Text('Detect Label'), 201 | ), 202 | const PopupMenuItem( 203 | value: Detector.text, 204 | child: Text('Detect Text'), 205 | ), 206 | ], 207 | ), 208 | ], 209 | ), 210 | body: _buildImage(), 211 | floatingActionButton: FloatingActionButton( 212 | onPressed: _toggleCameraDirection, 213 | child: _direction == CameraLensDirection.back 214 | ? const Icon(Icons.camera_front) 215 | : const Icon(Icons.camera_rear), 216 | ), 217 | ); 218 | } 219 | 220 | @override 221 | void dispose() { 222 | _camera.dispose().then((_) { 223 | _barcodeDetector.close(); 224 | _faceDetector.close(); 225 | _imageLabeler.close(); 226 | _recognizer.close(); 227 | }); 228 | 229 | _currentDetector = null; 230 | super.dispose(); 231 | } 232 | } 233 | -------------------------------------------------------------------------------- /example/lib/detector_painters.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // @dart=2.9 6 | 7 | import 'dart:ui' as ui; 8 | import 'dart:math'; 9 | import 'package:google_ml_vision/google_ml_vision.dart'; 10 | import 'package:flutter/material.dart'; 11 | 12 | enum Detector { 13 | barcode, 14 | face, 15 | label, 16 | cloudLabel, 17 | text, 18 | } 19 | 20 | const List> faceMaskConnections = [ 21 | Point(0, 4), 22 | Point(0, 55), 23 | Point(4, 7), 24 | Point(4, 55), 25 | Point(4, 51), 26 | Point(7, 11), 27 | Point(7, 51), 28 | Point(7, 130), 29 | Point(51, 55), 30 | Point(51, 80), 31 | Point(55, 72), 32 | Point(72, 76), 33 | Point(76, 80), 34 | Point(80, 84), 35 | Point(84, 72), 36 | Point(72, 127), 37 | Point(72, 130), 38 | Point(130, 127), 39 | Point(117, 130), 40 | Point(11, 117), 41 | Point(11, 15), 42 | Point(15, 18), 43 | Point(18, 21), 44 | Point(21, 121), 45 | Point(15, 121), 46 | Point(21, 25), 47 | Point(25, 125), 48 | Point(125, 128), 49 | Point(128, 127), 50 | Point(128, 29), 51 | Point(25, 29), 52 | Point(29, 32), 53 | Point(32, 0), 54 | Point(0, 45), 55 | Point(32, 41), 56 | Point(41, 29), 57 | Point(41, 45), 58 | Point(45, 64), 59 | Point(45, 32), 60 | Point(64, 68), 61 | Point(68, 56), 62 | Point(56, 60), 63 | Point(60, 64), 64 | Point(56, 41), 65 | Point(64, 128), 66 | Point(64, 127), 67 | Point(125, 93), 68 | Point(93, 117), 69 | Point(117, 121), 70 | Point(121, 125), 71 | ]; 72 | 73 | class BarcodeDetectorPainter extends CustomPainter { 74 | BarcodeDetectorPainter(this.absoluteImageSize, this.barcodeLocations); 75 | 76 | final Size absoluteImageSize; 77 | final List barcodeLocations; 78 | 79 | @override 80 | void paint(Canvas canvas, Size size) { 81 | final double scaleX = size.width / absoluteImageSize.width; 82 | final double scaleY = size.height / absoluteImageSize.height; 83 | 84 | Rect scaleRect(Barcode barcode) { 85 | return Rect.fromLTRB( 86 | barcode.boundingBox.left * scaleX, 87 | barcode.boundingBox.top * scaleY, 88 | barcode.boundingBox.right * scaleX, 89 | barcode.boundingBox.bottom * scaleY, 90 | ); 91 | } 92 | 93 | final Paint paint = Paint() 94 | ..style = PaintingStyle.stroke 95 | ..strokeWidth = 2.0; 96 | 97 | for (final Barcode barcode in barcodeLocations) { 98 | paint.color = Colors.green; 99 | canvas.drawRect(scaleRect(barcode), paint); 100 | } 101 | } 102 | 103 | @override 104 | bool shouldRepaint(BarcodeDetectorPainter oldDelegate) { 105 | return oldDelegate.absoluteImageSize != absoluteImageSize || 106 | oldDelegate.barcodeLocations != barcodeLocations; 107 | } 108 | } 109 | 110 | class FaceDetectorPainter extends CustomPainter { 111 | FaceDetectorPainter(this.absoluteImageSize, this.faces); 112 | 113 | final Size absoluteImageSize; 114 | final List faces; 115 | 116 | @override 117 | void paint(Canvas canvas, Size size) { 118 | final double scaleX = size.width / absoluteImageSize.width; 119 | final double scaleY = size.height / absoluteImageSize.height; 120 | 121 | final Paint paint = Paint() 122 | ..style = PaintingStyle.stroke 123 | ..strokeWidth = 2.0 124 | ..color = Colors.red; 125 | 126 | final Paint greenPaint = Paint() 127 | ..style = PaintingStyle.stroke 128 | ..strokeWidth = 2.0 129 | ..color = Colors.green; 130 | 131 | for (final Face face in faces) { 132 | final contour = face.getContour((FaceContourType.allPoints)); 133 | canvas.drawPoints( 134 | ui.PointMode.points, 135 | contour.positionsList 136 | .map((offset) => Offset((offset.dx * scaleX), offset.dy * scaleY)) 137 | .toList(), 138 | paint); 139 | /*for (int i = 0; i < contour.positionsList.length - 1; i++) { 140 | canvas.drawLine(contour.positionsList[i].scale(scaleX, scaleY), 141 | contour.positionsList[i + 1].scale(scaleX, scaleY), paint); 142 | }*/ 143 | for (final connection in faceMaskConnections) { 144 | canvas.drawLine( 145 | contour.positionsList[connection.x].scale(scaleX, scaleY), 146 | contour.positionsList[connection.y].scale(scaleX, scaleY), 147 | paint); 148 | } 149 | 150 | canvas.drawRect( 151 | Rect.fromLTRB( 152 | face.boundingBox.left * scaleX, 153 | face.boundingBox.top * scaleY, 154 | face.boundingBox.right * scaleX, 155 | face.boundingBox.bottom * scaleY, 156 | ), 157 | greenPaint, 158 | ); 159 | } 160 | } 161 | 162 | @override 163 | bool shouldRepaint(FaceDetectorPainter oldDelegate) { 164 | return oldDelegate.absoluteImageSize != absoluteImageSize || 165 | oldDelegate.faces != faces; 166 | } 167 | } 168 | 169 | class LabelDetectorPainter extends CustomPainter { 170 | LabelDetectorPainter(this.absoluteImageSize, this.labels); 171 | 172 | final Size absoluteImageSize; 173 | final List labels; 174 | 175 | @override 176 | void paint(Canvas canvas, Size size) { 177 | final ui.ParagraphBuilder builder = ui.ParagraphBuilder( 178 | ui.ParagraphStyle( 179 | textAlign: TextAlign.left, 180 | fontSize: 23, 181 | textDirection: TextDirection.ltr), 182 | ); 183 | 184 | builder.pushStyle(ui.TextStyle(color: Colors.green)); 185 | for (final ImageLabel label in labels) { 186 | builder.addText('Label: ${label.text}, ' 187 | 'Confidence: ${label.confidence.toStringAsFixed(2)}\n'); 188 | } 189 | builder.pop(); 190 | 191 | canvas.drawParagraph( 192 | builder.build() 193 | ..layout(ui.ParagraphConstraints( 194 | width: size.width, 195 | )), 196 | const Offset(0, 0), 197 | ); 198 | } 199 | 200 | @override 201 | bool shouldRepaint(LabelDetectorPainter oldDelegate) { 202 | return oldDelegate.absoluteImageSize != absoluteImageSize || 203 | oldDelegate.labels != labels; 204 | } 205 | } 206 | 207 | // Paints rectangles around all the text in the image. 208 | class TextDetectorPainter extends CustomPainter { 209 | TextDetectorPainter(this.absoluteImageSize, this.visionText); 210 | 211 | final Size absoluteImageSize; 212 | final VisionText visionText; 213 | 214 | @override 215 | void paint(Canvas canvas, Size size) { 216 | final double scaleX = size.width / absoluteImageSize.width; 217 | final double scaleY = size.height / absoluteImageSize.height; 218 | 219 | Rect scaleRect(TextContainer container) { 220 | return Rect.fromLTRB( 221 | container.boundingBox.left * scaleX, 222 | container.boundingBox.top * scaleY, 223 | container.boundingBox.right * scaleX, 224 | container.boundingBox.bottom * scaleY, 225 | ); 226 | } 227 | 228 | final Paint paint = Paint() 229 | ..style = PaintingStyle.stroke 230 | ..strokeWidth = 2.0; 231 | 232 | for (final TextBlock block in visionText.blocks) { 233 | print(block.text); 234 | for (final TextLine line in block.lines) { 235 | for (final TextElement element in line.elements) { 236 | paint.color = Colors.green; 237 | canvas.drawRect(scaleRect(element), paint); 238 | } 239 | 240 | paint.color = Colors.yellow; 241 | canvas.drawRect(scaleRect(line), paint); 242 | } 243 | 244 | paint.color = Colors.red; 245 | canvas.drawRect(scaleRect(block), paint); 246 | } 247 | } 248 | 249 | @override 250 | bool shouldRepaint(TextDetectorPainter oldDelegate) { 251 | return oldDelegate.absoluteImageSize != absoluteImageSize || 252 | oldDelegate.visionText != visionText; 253 | } 254 | } 255 | -------------------------------------------------------------------------------- /lib/src/google_vision.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | part of google_ml_vision; 6 | 7 | enum _ImageType { file, bytes } 8 | 9 | /// Indicates the image rotation. 10 | /// 11 | /// Rotation is counter-clockwise. 12 | enum ImageRotation { rotation0, rotation90, rotation180, rotation270 } 13 | 14 | /// Detected language from text recognition in regular and document images. 15 | class RecognizedLanguage { 16 | RecognizedLanguage._(dynamic data) : languageCode = data['languageCode']; 17 | 18 | /// The BCP-47 language code, such as, en-US or sr-Latn. For more information, 19 | /// see http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. 20 | final String? languageCode; 21 | } 22 | 23 | /// The Google machine learning vision API. 24 | /// 25 | /// You can get an instance by calling [GoogleVision.instance] and then get 26 | /// a detector from the instance: 27 | /// 28 | /// ```dart 29 | /// TextRecognizer textRecognizer = GoogleVision.instance.textRecognizer(); 30 | /// ``` 31 | class GoogleVision { 32 | GoogleVision._(); 33 | 34 | @visibleForTesting 35 | static const MethodChannel channel = 36 | MethodChannel('plugins.flutter.brianmtully.com/google_ml_vision'); 37 | 38 | @visibleForTesting 39 | static int nextHandle = 0; 40 | 41 | /// Singleton of [GoogleVision]. 42 | /// 43 | /// Use this get an instance of a detector: 44 | /// 45 | /// ```dart 46 | /// TextRecognizer textRecognizer = GoogleVision.instance.textRecognizer(); 47 | /// ``` 48 | static final GoogleVision instance = GoogleVision._(); 49 | 50 | /// Creates an instance of [BarcodeDetector]. 51 | BarcodeDetector barcodeDetector([BarcodeDetectorOptions? options]) { 52 | return BarcodeDetector._( 53 | options ?? const BarcodeDetectorOptions(), 54 | nextHandle++, 55 | ); 56 | } 57 | 58 | /// Creates an instance of [FaceDetector]. 59 | FaceDetector faceDetector([FaceDetectorOptions? options]) { 60 | return FaceDetector._( 61 | options ?? const FaceDetectorOptions(), 62 | nextHandle++, 63 | ); 64 | } 65 | 66 | /// Creates an on device instance of [ImageLabeler]. 67 | ImageLabeler imageLabeler([ImageLabelerOptions? options]) { 68 | return ImageLabeler._( 69 | options: options ?? const ImageLabelerOptions(), 70 | handle: nextHandle++, 71 | ); 72 | } 73 | 74 | /// Creates an instance of [TextRecognizer]. 75 | TextRecognizer textRecognizer() { 76 | return TextRecognizer._( 77 | handle: nextHandle++, 78 | ); 79 | } 80 | } 81 | 82 | /// Represents an image object used for both on-device and cloud API detectors. 83 | /// 84 | /// Create an instance by calling one of the factory constructors. 85 | class GoogleVisionImage { 86 | GoogleVisionImage._({ 87 | required _ImageType type, 88 | GoogleVisionImageMetadata? metadata, 89 | String? filePath, 90 | Uint8List? bytes, 91 | }) : _filePath = filePath, 92 | _metadata = metadata, 93 | _bytes = bytes, 94 | _type = type; 95 | 96 | /// Construct a [GoogleVisionImage] from a file. 97 | factory GoogleVisionImage.fromFile(File imageFile) { 98 | return GoogleVisionImage._( 99 | type: _ImageType.file, 100 | filePath: imageFile.path, 101 | ); 102 | } 103 | 104 | /// Construct a [GoogleVisionImage] from a file path. 105 | factory GoogleVisionImage.fromFilePath(String imagePath) { 106 | return GoogleVisionImage._( 107 | type: _ImageType.file, 108 | filePath: imagePath, 109 | ); 110 | } 111 | 112 | /// Construct a [GoogleVisionImage] from a list of bytes. 113 | /// 114 | /// On Android, expects `android.graphics.ImageFormat.NV21` format. Note: 115 | /// Concatenating the planes of `android.graphics.ImageFormat.YUV_420_888` 116 | /// into a single plane, converts it to `android.graphics.ImageFormat.NV21`. 117 | /// 118 | /// On iOS, expects `kCVPixelFormatType_32BGRA` format. However, this should 119 | /// work with most formats from `kCVPixelFormatType_*`. 120 | factory GoogleVisionImage.fromBytes( 121 | Uint8List bytes, 122 | GoogleVisionImageMetadata metadata, 123 | ) { 124 | return GoogleVisionImage._( 125 | type: _ImageType.bytes, 126 | bytes: bytes, 127 | metadata: metadata, 128 | ); 129 | } 130 | 131 | final Uint8List? _bytes; 132 | final String? _filePath; 133 | final GoogleVisionImageMetadata? _metadata; 134 | final _ImageType _type; 135 | 136 | Map _serialize() => { 137 | 'type': _enumToString(_type), 138 | 'bytes': _bytes, 139 | 'path': _filePath, 140 | 'metadata': _type == _ImageType.bytes ? _metadata!._serialize() : null, 141 | }; 142 | } 143 | 144 | /// Plane attributes to create the image buffer on iOS. 145 | /// 146 | /// When using iOS, [bytesPerRow], [height], and [width] throw [AssertionError] 147 | /// if `null`. 148 | class GoogleVisionImagePlaneMetadata { 149 | GoogleVisionImagePlaneMetadata({ 150 | required this.bytesPerRow, 151 | this.height, 152 | this.width, 153 | }) : assert(defaultTargetPlatform != TargetPlatform.iOS || height != null), 154 | assert(defaultTargetPlatform != TargetPlatform.iOS || width != null); 155 | 156 | /// The row stride for this color plane, in bytes. 157 | final int bytesPerRow; 158 | 159 | /// Height of the pixel buffer on iOS. 160 | final int? height; 161 | 162 | /// Width of the pixel buffer on iOS. 163 | final int? width; 164 | 165 | Map _serialize() => { 166 | 'bytesPerRow': bytesPerRow, 167 | 'height': height, 168 | 'width': width, 169 | }; 170 | } 171 | 172 | /// Image metadata used by [GoogleVision] detectors. 173 | /// 174 | /// [rotation] defaults to [ImageRotation.rotation0]. Currently only rotates on 175 | /// Android. 176 | /// 177 | /// When using iOS, [rawFormat] and [planeData] throw [AssertionError] if 178 | /// `null`. 179 | class GoogleVisionImageMetadata { 180 | GoogleVisionImageMetadata({ 181 | required this.size, 182 | this.rawFormat, 183 | this.planeData, 184 | this.rotation = ImageRotation.rotation0, 185 | }) : assert( 186 | defaultTargetPlatform != TargetPlatform.iOS || rawFormat != null, 187 | ), 188 | assert( 189 | defaultTargetPlatform != TargetPlatform.iOS || planeData != null, 190 | ); 191 | 192 | /// Size of the image in pixels. 193 | final Size size; 194 | 195 | /// Rotation of the image for Android. 196 | /// 197 | /// Not currently used on iOS. 198 | final ImageRotation rotation; 199 | 200 | /// Raw version of the format from the iOS platform. 201 | /// 202 | /// Since iOS can use any planar format, this format will be used to create 203 | /// the image buffer on iOS. 204 | /// 205 | /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers. 206 | /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc 207 | /// 208 | /// Not used on Android. 209 | final Object? rawFormat; 210 | 211 | /// The plane attributes to create the image buffer on iOS. 212 | /// 213 | /// Not used on Android. 214 | final List? planeData; 215 | 216 | int _imageRotationToInt(ImageRotation rotation) { 217 | switch (rotation) { 218 | case ImageRotation.rotation90: 219 | return 90; 220 | case ImageRotation.rotation180: 221 | return 180; 222 | case ImageRotation.rotation270: 223 | return 270; 224 | default: 225 | assert(rotation == ImageRotation.rotation0); 226 | return 0; 227 | } 228 | } 229 | 230 | Map _serialize() => { 231 | 'width': size.width, 232 | 'height': size.height, 233 | 'rotation': _imageRotationToInt(rotation), 234 | 'rawFormat': rawFormat, 235 | 'planeData': planeData 236 | ?.map((GoogleVisionImagePlaneMetadata plane) => plane._serialize()) 237 | .toList(), 238 | }; 239 | } 240 | 241 | String _enumToString(dynamic enumValue) { 242 | final String enumString = enumValue.toString(); 243 | return enumString.substring(enumString.indexOf('.') + 1); 244 | } 245 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GMLKFaceDetector.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | package com.brianmtully.flutter.plugins.googlemlvision; 6 | 7 | import android.graphics.PointF; 8 | 9 | import androidx.annotation.NonNull; 10 | import com.google.android.gms.tasks.OnFailureListener; 11 | import com.google.android.gms.tasks.OnSuccessListener; 12 | import com.google.mlkit.vision.common.InputImage; 13 | import com.google.mlkit.vision.face.Face; 14 | import com.google.mlkit.vision.face.FaceDetection; 15 | import com.google.mlkit.vision.face.FaceDetector; 16 | import com.google.mlkit.vision.face.FaceContour; 17 | import com.google.mlkit.vision.face.FaceDetectorOptions; 18 | import com.google.mlkit.vision.face.FaceLandmark; 19 | import io.flutter.plugin.common.MethodChannel; 20 | import java.io.IOException; 21 | import java.util.ArrayList; 22 | import java.util.HashMap; 23 | import java.util.List; 24 | import java.util.Map; 25 | 26 | class GMLKFaceDetector implements Detector { 27 | private final FaceDetector detector; 28 | 29 | GMLKFaceDetector(Map options) { 30 | detector = FaceDetection.getClient(parseOptions(options)); 31 | } 32 | 33 | @Override 34 | public void handleDetection(final InputImage image, final MethodChannel.Result result) { 35 | detector 36 | .process(image) 37 | .addOnSuccessListener( 38 | new OnSuccessListener>() { 39 | @Override 40 | public void onSuccess(List foundFaces) { 41 | List> faces = new ArrayList<>(foundFaces.size()); 42 | for (Face face : foundFaces) { 43 | Map faceData = new HashMap<>(); 44 | 45 | faceData.put("left", (double) face.getBoundingBox().left); 46 | faceData.put("top", (double) face.getBoundingBox().top); 47 | faceData.put("width", (double) face.getBoundingBox().width()); 48 | faceData.put("height", (double) face.getBoundingBox().height()); 49 | 50 | faceData.put("headEulerAngleY", face.getHeadEulerAngleY()); 51 | faceData.put("headEulerAngleZ", face.getHeadEulerAngleZ()); 52 | if (face.getSmilingProbability() != null) { 53 | faceData.put("smilingProbability", face.getSmilingProbability()); 54 | } 55 | 56 | if (face.getLeftEyeOpenProbability() 57 | != null) { 58 | faceData.put("leftEyeOpenProbability", face.getLeftEyeOpenProbability()); 59 | } 60 | 61 | if (face.getRightEyeOpenProbability() 62 | != null) { 63 | faceData.put("rightEyeOpenProbability", face.getRightEyeOpenProbability()); 64 | } 65 | 66 | if (face.getTrackingId() != null) { 67 | faceData.put("trackingId", face.getTrackingId()); 68 | } 69 | 70 | faceData.put("landmarks", getLandmarkData(face)); 71 | 72 | faceData.put("contours", getContourData(face)); 73 | 74 | faces.add(faceData); 75 | } 76 | 77 | result.success(faces); 78 | } 79 | }) 80 | .addOnFailureListener( 81 | new OnFailureListener() { 82 | @Override 83 | public void onFailure(@NonNull Exception exception) { 84 | result.error("faceDetectorError", exception.getLocalizedMessage(), null); 85 | } 86 | }); 87 | } 88 | 89 | private Map getLandmarkData(Face face) { 90 | Map landmarks = new HashMap<>(); 91 | 92 | landmarks.put("bottomMouth", landmarkPosition(face, FaceLandmark.MOUTH_BOTTOM)); 93 | landmarks.put("leftCheek", landmarkPosition(face, FaceLandmark.LEFT_CHEEK)); 94 | landmarks.put("leftEar", landmarkPosition(face, FaceLandmark.LEFT_EAR)); 95 | landmarks.put("leftEye", landmarkPosition(face, FaceLandmark.LEFT_EYE)); 96 | landmarks.put("leftMouth", landmarkPosition(face, FaceLandmark.MOUTH_LEFT)); 97 | landmarks.put("noseBase", landmarkPosition(face, FaceLandmark.NOSE_BASE)); 98 | landmarks.put("rightCheek", landmarkPosition(face, FaceLandmark.RIGHT_CHEEK)); 99 | landmarks.put("rightEar", landmarkPosition(face, FaceLandmark.RIGHT_EAR)); 100 | landmarks.put("rightEye", landmarkPosition(face, FaceLandmark.RIGHT_EYE)); 101 | landmarks.put("rightMouth", landmarkPosition(face, FaceLandmark.MOUTH_RIGHT)); 102 | 103 | return landmarks; 104 | } 105 | 106 | private Map> getContourData(Face face) { 107 | Map> contours = new HashMap<>(); 108 | 109 | contours.put("allPoints", allContourPoints(face)); 110 | contours.put("face", contourPosition(face, FaceContour.FACE)); 111 | contours.put("leftEye", contourPosition(face, FaceContour.LEFT_EYE)); 112 | contours.put( 113 | "leftEyebrowBottom", contourPosition(face, FaceContour.LEFT_EYEBROW_BOTTOM)); 114 | contours.put( 115 | "leftEyebrowTop", contourPosition(face, FaceContour.LEFT_EYEBROW_TOP)); 116 | contours.put( 117 | "lowerLipBottom", contourPosition(face, FaceContour.LOWER_LIP_BOTTOM)); 118 | contours.put("lowerLipTop", contourPosition(face, FaceContour.LOWER_LIP_TOP)); 119 | contours.put("noseBottom", contourPosition(face, FaceContour.NOSE_BOTTOM)); 120 | contours.put("noseBridge", contourPosition(face, FaceContour.NOSE_BRIDGE)); 121 | contours.put("rightEye", contourPosition(face, FaceContour.RIGHT_EYE)); 122 | contours.put( 123 | "rightEyebrowBottom", 124 | contourPosition(face, FaceContour.RIGHT_EYEBROW_BOTTOM)); 125 | contours.put( 126 | "rightEyebrowTop", contourPosition(face, FaceContour.RIGHT_EYEBROW_TOP)); 127 | contours.put( 128 | "upperLipBottom", contourPosition(face, FaceContour.UPPER_LIP_BOTTOM)); 129 | contours.put("upperLipTop", contourPosition(face, FaceContour.UPPER_LIP_TOP)); 130 | 131 | return contours; 132 | } 133 | 134 | private double[] landmarkPosition(Face face, int landmarkInt) { 135 | FaceLandmark landmark = face.getLandmark(landmarkInt); 136 | if (landmark != null) { 137 | 138 | return new double[] {landmark.getPosition().x, landmark.getPosition().y}; 139 | } 140 | 141 | return null; 142 | } 143 | 144 | private List contourPosition(Face face, int contourInt) { 145 | FaceContour contour = face.getContour(contourInt); 146 | if (contour != null) { 147 | List contourPoints = contour.getPoints(); 148 | List result = new ArrayList(); 149 | 150 | for (int i = 0; i < contourPoints.size(); i++) { 151 | result.add(new double[] {contourPoints.get(i).x, contourPoints.get(i).y}); 152 | } 153 | 154 | return result; 155 | } 156 | 157 | return null; 158 | } 159 | 160 | private List allContourPoints(Face face) { 161 | List contours = face.getAllContours(); 162 | List result = new ArrayList(); 163 | for (int i = 0; i < contours.size(); i++) { 164 | List contourPoints = contours.get(i).getPoints(); 165 | for (int j = 0; j < contourPoints.size(); j++) { 166 | result.add(new double[]{contourPoints.get(j).x, contourPoints.get(j).y}); 167 | } 168 | 169 | } 170 | return result; 171 | } 172 | 173 | 174 | private FaceDetectorOptions parseOptions(Map options) { 175 | int classification = 176 | (boolean) options.get("enableClassification") 177 | ? FaceDetectorOptions.CLASSIFICATION_MODE_ALL 178 | : FaceDetectorOptions.CLASSIFICATION_MODE_NONE; 179 | 180 | int landmark = 181 | (boolean) options.get("enableLandmarks") 182 | ? FaceDetectorOptions.LANDMARK_MODE_ALL 183 | : FaceDetectorOptions.LANDMARK_MODE_NONE; 184 | 185 | int contours = 186 | (boolean) options.get("enableContours") 187 | ? FaceDetectorOptions.CONTOUR_MODE_ALL 188 | : FaceDetectorOptions.CONTOUR_MODE_NONE; 189 | 190 | int mode; 191 | switch ((String) options.get("mode")) { 192 | case "accurate": 193 | mode = FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE; 194 | break; 195 | case "fast": 196 | mode = FaceDetectorOptions.PERFORMANCE_MODE_FAST; 197 | break; 198 | default: 199 | throw new IllegalArgumentException("Not a mode:" + options.get("mode")); 200 | } 201 | 202 | FaceDetectorOptions.Builder builder = 203 | new FaceDetectorOptions.Builder() 204 | .setClassificationMode(classification) 205 | .setLandmarkMode(landmark) 206 | .setContourMode(contours) 207 | .setMinFaceSize((float) ((double) options.get("minFaceSize"))) 208 | .setPerformanceMode(mode); 209 | 210 | if ((boolean) options.get("enableTracking")) { 211 | builder.enableTracking(); 212 | } 213 | 214 | return builder.build(); 215 | } 216 | 217 | @Override 218 | public void close() throws IOException { 219 | detector.close(); 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /lib/src/face_detector.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | part of google_ml_vision; 6 | 7 | /// Option for controlling additional trade-offs in performing face detection. 8 | /// 9 | /// Accurate tends to detect more faces and may be more precise in determining 10 | /// values such as position, at the cost of speed. 11 | enum FaceDetectorMode { accurate, fast } 12 | 13 | /// Available face landmarks detected by [FaceDetector]. 14 | enum FaceLandmarkType { 15 | bottomMouth, 16 | leftCheek, 17 | leftEar, 18 | leftEye, 19 | leftMouth, 20 | noseBase, 21 | rightCheek, 22 | rightEar, 23 | rightEye, 24 | rightMouth, 25 | } 26 | 27 | /// Available face contour types detected by [FaceDetector]. 28 | enum FaceContourType { 29 | allPoints, 30 | face, 31 | leftEye, 32 | leftEyebrowBottom, 33 | leftEyebrowTop, 34 | lowerLipBottom, 35 | lowerLipTop, 36 | noseBottom, 37 | noseBridge, 38 | rightEye, 39 | rightEyebrowBottom, 40 | rightEyebrowTop, 41 | upperLipBottom, 42 | upperLipTop 43 | } 44 | 45 | /// Detector for detecting faces in an input image. 46 | /// 47 | /// A face detector is created via 48 | /// `faceDetector([FaceDetectorOptions options])` in [GoogleVision]: 49 | /// 50 | /// ```dart 51 | /// final GoogleVisionImage image = 52 | /// GoogleVisionImage.fromFilePath('path/to/file'); 53 | /// 54 | /// final FaceDetector faceDetector = GoogleVision.instance.faceDetector(); 55 | /// 56 | /// final List faces = await faceDetector.processImage(image); 57 | /// ``` 58 | class FaceDetector { 59 | FaceDetector._(this.options, this._handle); 60 | 61 | /// The options for the face detector. 62 | final FaceDetectorOptions options; 63 | final int _handle; 64 | bool _hasBeenOpened = false; 65 | bool _isClosed = false; 66 | 67 | /// Detects faces in the input image. 68 | Future> processImage(GoogleVisionImage visionImage) async { 69 | assert(!_isClosed); 70 | _hasBeenOpened = true; 71 | 72 | final reply = await GoogleVision.channel.invokeListMethod( 73 | 'FaceDetector#processImage', 74 | { 75 | 'handle': _handle, 76 | 'options': { 77 | 'enableClassification': options.enableClassification, 78 | 'enableLandmarks': options.enableLandmarks, 79 | 'enableContours': options.enableContours, 80 | 'enableTracking': options.enableTracking, 81 | 'minFaceSize': options.minFaceSize, 82 | 'mode': _enumToString(options.mode), 83 | }, 84 | }..addAll(visionImage._serialize()), 85 | ); 86 | 87 | final List faces = []; 88 | for (final dynamic data in reply!) { 89 | faces.add(Face._(data)); 90 | } 91 | 92 | return faces; 93 | } 94 | 95 | /// Release resources used by this detector. 96 | Future close() { 97 | if (!_hasBeenOpened) _isClosed = true; 98 | if (_isClosed) return Future.value(); 99 | 100 | _isClosed = true; 101 | return GoogleVision.channel.invokeMethod( 102 | 'FaceDetector#close', 103 | {'handle': _handle}, 104 | ); 105 | } 106 | } 107 | 108 | /// Immutable options for configuring features of [FaceDetector]. 109 | /// 110 | /// Used to configure features such as classification, face tracking, speed, 111 | /// etc. 112 | class FaceDetectorOptions { 113 | /// Constructor for [FaceDetectorOptions]. 114 | /// 115 | /// The parameter minFaceValue must be between 0.0 and 1.0, inclusive. 116 | const FaceDetectorOptions({ 117 | this.enableClassification = false, 118 | this.enableLandmarks = false, 119 | this.enableContours = false, 120 | this.enableTracking = false, 121 | this.minFaceSize = 0.1, 122 | this.mode = FaceDetectorMode.fast, 123 | }) : assert(minFaceSize >= 0.0), 124 | assert(minFaceSize <= 1.0); 125 | 126 | /// Whether to run additional classifiers for characterizing attributes. 127 | /// 128 | /// E.g. "smiling" and "eyes open". 129 | final bool enableClassification; 130 | 131 | /// Whether to detect [FaceLandmark]s. 132 | final bool enableLandmarks; 133 | 134 | /// Whether to detect [FaceContour]s. 135 | final bool enableContours; 136 | 137 | /// Whether to enable face tracking. 138 | /// 139 | /// If enabled, the detector will maintain a consistent ID for each face when 140 | /// processing consecutive frames. 141 | final bool enableTracking; 142 | 143 | /// The smallest desired face size. 144 | /// 145 | /// Expressed as a proportion of the width of the head to the image width. 146 | /// 147 | /// Must be a value between 0.0 and 1.0. 148 | final double minFaceSize; 149 | 150 | /// Option for controlling additional accuracy / speed trade-offs. 151 | final FaceDetectorMode mode; 152 | } 153 | 154 | /// Represents a face detected by [FaceDetector]. 155 | class Face { 156 | Face._(dynamic data) 157 | : boundingBox = Rect.fromLTWH( 158 | data['left'], 159 | data['top'], 160 | data['width'], 161 | data['height'], 162 | ), 163 | headEulerAngleY = data['headEulerAngleY'], 164 | headEulerAngleZ = data['headEulerAngleZ'], 165 | leftEyeOpenProbability = data['leftEyeOpenProbability'], 166 | rightEyeOpenProbability = data['rightEyeOpenProbability'], 167 | smilingProbability = data['smilingProbability'], 168 | trackingId = data['trackingId'], 169 | _landmarks = Map.fromIterables( 170 | FaceLandmarkType.values, 171 | FaceLandmarkType.values.map((FaceLandmarkType type) { 172 | final List? pos = data['landmarks'][_enumToString(type)]; 173 | return (pos == null) 174 | ? null 175 | : FaceLandmark._( 176 | type, 177 | Offset(pos[0], pos[1]), 178 | ); 179 | })), 180 | _contours = Map.fromIterables( 181 | FaceContourType.values, 182 | FaceContourType.values.map((FaceContourType type) { 183 | /// added empty map to pass the tests 184 | final List? arr = 185 | (data['contours'] ?? {})[_enumToString(type)]; 186 | return (arr == null) 187 | ? null 188 | : FaceContour._( 189 | type, 190 | arr 191 | .map((dynamic pos) => Offset(pos[0], pos[1])) 192 | .toList(), 193 | ); 194 | })); 195 | 196 | final Map _landmarks; 197 | final Map _contours; 198 | 199 | /// The axis-aligned bounding rectangle of the detected face. 200 | /// 201 | /// The point (0, 0) is defined as the upper-left corner of the image. 202 | final Rect boundingBox; 203 | 204 | /// The rotation of the face about the vertical axis of the image. 205 | /// 206 | /// Represented in degrees. 207 | /// 208 | /// A face with a positive Euler Y angle is turned to the camera's right and 209 | /// to its left. 210 | /// 211 | /// The Euler Y angle is guaranteed only when using the "accurate" mode 212 | /// setting of the face detector (as opposed to the "fast" mode setting, which 213 | /// takes some shortcuts to make detection faster). 214 | final double? headEulerAngleY; 215 | 216 | /// The rotation of the face about the axis pointing out of the image. 217 | /// 218 | /// Represented in degrees. 219 | /// 220 | /// A face with a positive Euler Z angle is rotated counter-clockwise relative 221 | /// to the camera. 222 | /// 223 | /// ML Kit always reports the Euler Z angle of a detected face. 224 | final double? headEulerAngleZ; 225 | 226 | /// Probability that the face's left eye is open. 227 | /// 228 | /// A value between 0.0 and 1.0 inclusive, or null if probability was not 229 | /// computed. 230 | final double? leftEyeOpenProbability; 231 | 232 | /// Probability that the face's right eye is open. 233 | /// 234 | /// A value between 0.0 and 1.0 inclusive, or null if probability was not 235 | /// computed. 236 | final double? rightEyeOpenProbability; 237 | 238 | /// Probability that the face is smiling. 239 | /// 240 | /// A value between 0.0 and 1.0 inclusive, or null if probability was not 241 | /// computed. 242 | final double? smilingProbability; 243 | 244 | /// The tracking ID if the tracking is enabled. 245 | /// 246 | /// Null if tracking was not enabled. 247 | final int? trackingId; 248 | 249 | /// Gets the landmark based on the provided [FaceLandmarkType]. 250 | /// 251 | /// Null if landmark was not detected. 252 | FaceLandmark? getLandmark(FaceLandmarkType landmark) => _landmarks[landmark]; 253 | 254 | /// Gets the contour based on the provided [FaceContourType]. 255 | /// 256 | /// Null if contour was not detected. 257 | FaceContour? getContour(FaceContourType contour) => _contours[contour]; 258 | } 259 | 260 | /// Represent a face landmark. 261 | /// 262 | /// A landmark is a point on a detected face, such as an eye, nose, or mouth. 263 | class FaceLandmark { 264 | FaceLandmark._(this.type, this.position); 265 | 266 | /// The [FaceLandmarkType] of this landmark. 267 | final FaceLandmarkType type; 268 | 269 | /// Gets a 2D point for landmark position. 270 | /// 271 | /// The point (0, 0) is defined as the upper-left corner of the image. 272 | final Offset position; 273 | } 274 | 275 | /// Represent a face contour. 276 | /// 277 | /// Contours of facial features. 278 | class FaceContour { 279 | FaceContour._(this.type, this.positionsList); 280 | 281 | /// The [FaceContourType] of this contour. 282 | final FaceContourType type; 283 | 284 | /// Gets a 2D point [List] for contour positions. 285 | /// 286 | /// The point (0, 0) is defined as the upper-left corner of the image. 287 | final List positionsList; 288 | } 289 | -------------------------------------------------------------------------------- /ios/Classes/BarcodeDetector.m: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import "FLTGoogleMlVisionPlugin.h" 6 | 7 | @import MLKitBarcodeScanning; 8 | 9 | @interface BarcodeDetector () 10 | @property MLKBarcodeScanner *detector; 11 | @end 12 | 13 | @implementation BarcodeDetector 14 | - (instancetype)initWithOptions:(NSDictionary *)options { 15 | self = [super init]; 16 | if (self) { 17 | _detector = 18 | [MLKBarcodeScanner barcodeScannerWithOptions:[BarcodeDetector parseOptions:options]]; 19 | } 20 | return self; 21 | } 22 | 23 | - (void)handleDetection:(MLKVisionImage *)image result:(FlutterResult)result { 24 | [_detector processImage:image 25 | completion:^(NSArray *barcodes, NSError *error) { 26 | if (error) { 27 | [FLTGoogleMlVisionPlugin handleError:error result:result]; 28 | return; 29 | } else if (!barcodes) { 30 | result(@[]); 31 | return; 32 | } 33 | 34 | NSMutableArray *ret = [NSMutableArray array]; 35 | for (MLKBarcode *barcode in barcodes) { 36 | [ret addObject:visionBarcodeToDictionary(barcode)]; 37 | } 38 | result(ret); 39 | }]; 40 | } 41 | 42 | NSDictionary *visionBarcodeToDictionary(MLKBarcode *barcode) { 43 | __block NSMutableArray *points = [NSMutableArray array]; 44 | 45 | for (NSValue *point in barcode.cornerPoints) { 46 | [points addObject:@[ @(point.CGPointValue.x), @(point.CGPointValue.y) ]]; 47 | } 48 | 49 | return @{ 50 | @"rawValue" : barcode.rawValue, 51 | @"displayValue" : barcode.displayValue ? barcode.displayValue : [NSNull null], 52 | @"left" : @(barcode.frame.origin.x), 53 | @"top" : @(barcode.frame.origin.y), 54 | @"width" : @(barcode.frame.size.width), 55 | @"height" : @(barcode.frame.size.height), 56 | @"format" : @(barcode.format), 57 | @"valueType" : @(barcode.valueType), 58 | @"points" : points, 59 | @"wifi" : barcode.wifi ? visionBarcodeWiFiToDictionary(barcode.wifi) : [NSNull null], 60 | @"email" : barcode.email ? visionBarcodeEmailToDictionary(barcode.email) : [NSNull null], 61 | @"phone" : barcode.phone ? visionBarcodePhoneToDictionary(barcode.phone) : [NSNull null], 62 | @"sms" : barcode.sms ? visionBarcodeSMSToDictionary(barcode.sms) : [NSNull null], 63 | @"url" : barcode.URL ? visionBarcodeURLToDictionary(barcode.URL) : [NSNull null], 64 | @"geoPoint" : barcode.geoPoint ? visionBarcodeGeoPointToDictionary(barcode.geoPoint) 65 | : [NSNull null], 66 | @"contactInfo" : barcode.contactInfo ? barcodeContactInfoToDictionary(barcode.contactInfo) 67 | : [NSNull null], 68 | @"calendarEvent" : barcode.calendarEvent ? calendarEventToDictionary(barcode.calendarEvent) 69 | : [NSNull null], 70 | @"driverLicense" : barcode.driverLicense ? driverLicenseToDictionary(barcode.driverLicense) 71 | : [NSNull null], 72 | }; 73 | } 74 | 75 | NSDictionary *visionBarcodeWiFiToDictionary(MLKBarcodeWiFi *wifi) { 76 | return @{ 77 | @"ssid" : wifi.ssid, 78 | @"password" : wifi.password, 79 | @"encryptionType" : @(wifi.type), 80 | }; 81 | } 82 | 83 | NSDictionary *visionBarcodeEmailToDictionary(MLKBarcodeEmail *email) { 84 | return @{ 85 | @"address" : email.address, 86 | @"body" : email.body, 87 | @"subject" : email.subject, 88 | @"type" : @(email.type), 89 | }; 90 | } 91 | 92 | NSDictionary *visionBarcodePhoneToDictionary(MLKBarcodePhone *phone) { 93 | return @{ 94 | @"number" : phone.number, 95 | @"type" : @(phone.type), 96 | }; 97 | } 98 | 99 | NSDictionary *visionBarcodeSMSToDictionary(MLKBarcodeSMS *sms) { 100 | return @{ 101 | @"phoneNumber" : sms.phoneNumber, 102 | @"message" : sms.message, 103 | }; 104 | } 105 | 106 | NSDictionary *visionBarcodeURLToDictionary(MLKBarcodeURLBookmark *url) { 107 | return @{ 108 | @"title" : url.title ? url.title : [NSNull null], 109 | @"url" : url.url ? url.url : [NSNull null], 110 | }; 111 | } 112 | 113 | NSDictionary *visionBarcodeGeoPointToDictionary(MLKBarcodeGeoPoint *geo) { 114 | return @{ 115 | @"longitude" : @(geo.longitude), 116 | @"latitude" : @(geo.latitude), 117 | }; 118 | } 119 | 120 | NSDictionary *barcodeContactInfoToDictionary(MLKBarcodeContactInfo *contact) { 121 | __block NSMutableArray *addresses = [NSMutableArray array]; 122 | [contact.addresses enumerateObjectsUsingBlock:^(MLKBarcodeAddress *_Nonnull address, 123 | NSUInteger idx, BOOL *_Nonnull stop) { 124 | __block NSMutableArray *addressLines = [NSMutableArray array]; 125 | [address.addressLines enumerateObjectsUsingBlock:^(NSString *_Nonnull addressLine, 126 | NSUInteger idx, BOOL *_Nonnull stop) { 127 | [addressLines addObject:addressLine]; 128 | }]; 129 | [addresses addObject:@{ 130 | @"addressLines" : addressLines, 131 | @"type" : @(address.type), 132 | }]; 133 | }]; 134 | 135 | __block NSMutableArray *emails = [NSMutableArray array]; 136 | [contact.emails enumerateObjectsUsingBlock:^(MLKBarcodeEmail *_Nonnull email, NSUInteger idx, 137 | BOOL *_Nonnull stop) { 138 | [emails addObject:@{ 139 | @"address" : email.address ? email.address : [NSNull null], 140 | @"body" : email.body ? email.body : [NSNull null], 141 | @"subject" : email.subject ? email.subject : [NSNull null], 142 | @"type" : @(email.type), 143 | }]; 144 | }]; 145 | 146 | __block NSMutableArray *phones = [NSMutableArray array]; 147 | [contact.phones enumerateObjectsUsingBlock:^(MLKBarcodePhone *_Nonnull phone, NSUInteger idx, 148 | BOOL *_Nonnull stop) { 149 | [phones addObject:@{ 150 | @"number" : phone.number ? phone.number : [NSNull null], 151 | @"type" : @(phone.type), 152 | }]; 153 | }]; 154 | 155 | __block NSMutableArray *urls = [NSMutableArray array]; 156 | [contact.urls 157 | enumerateObjectsUsingBlock:^(NSString *_Nonnull url, NSUInteger idx, BOOL *_Nonnull stop) { 158 | [urls addObject:url]; 159 | }]; 160 | return @{ 161 | @"addresses" : addresses, 162 | @"emails" : emails, 163 | @"phones" : phones, 164 | @"urls" : urls, 165 | @"name" : @{ 166 | @"formattedName" : contact.name.formattedName ? contact.name.formattedName : [NSNull null], 167 | @"first" : contact.name.first ? contact.name.first : [NSNull null], 168 | @"last" : contact.name.last ? contact.name.last : [NSNull null], 169 | @"middle" : contact.name.middle ? contact.name.middle : [NSNull null], 170 | @"prefix" : contact.name.prefix ? contact.name.prefix : [NSNull null], 171 | @"pronunciation" : contact.name.pronunciation ? contact.name.pronunciation : [NSNull null], 172 | @"suffix" : contact.name.suffix ? contact.name.suffix : [NSNull null], 173 | }, 174 | @"jobTitle" : contact.jobTitle ? contact.jobTitle : [NSNull null], 175 | @"organization" : contact.organization ? contact.jobTitle : [NSNull null], 176 | }; 177 | } 178 | 179 | NSDictionary *calendarEventToDictionary(MLKBarcodeCalendarEvent *calendar) { 180 | NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 181 | dateFormatter.locale = [[NSLocale alloc] initWithLocaleIdentifier:@"en_US_POSIX"]; 182 | dateFormatter.dateFormat = @"yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"; 183 | dateFormatter.timeZone = [NSTimeZone timeZoneForSecondsFromGMT:0]; 184 | return @{ 185 | @"eventDescription" : calendar.eventDescription? calendar.eventDescription : [NSNull null], 186 | @"location" : calendar.location? calendar.location : [NSNull null], 187 | @"organizer" : calendar.organizer? calendar.organizer : [NSNull null], 188 | @"status" : calendar.status? calendar.status : [NSNull null], 189 | @"summary" : calendar.summary? calendar.summary : [NSNull null], 190 | @"start" : calendar.start? [dateFormatter stringFromDate:calendar.start] : [NSNull null], 191 | @"end" : calendar.end? [dateFormatter stringFromDate:calendar.end] : [NSNull null], 192 | }; 193 | } 194 | 195 | NSDictionary *driverLicenseToDictionary(MLKBarcodeDriverLicense *license) { 196 | return @{ 197 | @"firstName" : license.firstName ? license.firstName : [NSNull null], 198 | @"middleName" : license.middleName? license.middleName : [NSNull null], 199 | @"lastName" : license.lastName? license.lastName : [NSNull null], 200 | @"gender" : license.gender? license.gender : [NSNull null], 201 | @"addressCity" : license.addressCity? license.addressCity : [NSNull null], 202 | @"addressStreet" : license.addressStreet? license.addressStreet : [NSNull null], 203 | @"addressState" : license.addressState? license.addressState : [NSNull null], 204 | @"addressZip" : license.addressZip? license.addressZip : [NSNull null], 205 | @"birthDate" : license.birthDate? license.birthDate : [NSNull null], 206 | @"documentType" : license.documentType? license.documentType : [NSNull null], 207 | @"licenseNumber" : license.licenseNumber? license.licenseNumber : [NSNull null], 208 | @"expiryDate" : license.expiryDate? license.expiryDate : [NSNull null], 209 | @"issuingDate" : license.issuingDate? license.issuingDate : [NSNull null], 210 | @"issuingCountry" : license.issuingCountry? license.issuingCountry : [NSNull null], 211 | }; 212 | } 213 | 214 | + (MLKBarcodeScannerOptions *)parseOptions:(NSDictionary *)optionsData { 215 | NSNumber *barcodeFormat = optionsData[@"barcodeFormats"]; 216 | return 217 | [[MLKBarcodeScannerOptions alloc] initWithFormats:(MLKBarcodeFormat)barcodeFormat.intValue]; 218 | } 219 | @end 220 | -------------------------------------------------------------------------------- /ios/Classes/FLTGoogleMlVisionPlugin.m: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import "FLTGoogleMlVisionPlugin.h" 6 | 7 | static FlutterError *getFlutterError(NSError *error) { 8 | return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] 9 | message:error.domain 10 | details:error.localizedDescription]; 11 | } 12 | 13 | @implementation FLTGoogleMlVisionPlugin 14 | static NSMutableDictionary> *detectors; 15 | 16 | + (void)handleError:(NSError *)error result:(FlutterResult)result { 17 | result(getFlutterError(error)); 18 | } 19 | 20 | + (void)registerWithRegistrar:(NSObject *)registrar { 21 | detectors = [NSMutableDictionary new]; 22 | FlutterMethodChannel *channel = 23 | [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.brianmtully.com/google_ml_vision" 24 | binaryMessenger:[registrar messenger]]; 25 | FLTGoogleMlVisionPlugin *instance = [[FLTGoogleMlVisionPlugin alloc] init]; 26 | [registrar addMethodCallDelegate:instance channel:channel]; 27 | 28 | //SEL sel = NSSelectorFromString(@"registerLibrary:withVersion:"); 29 | /* if ([FIRApp respondsToSelector:sel]) { 30 | [FIRApp performSelector:sel withObject:LIBRARY_NAME withObject:LIBRARY_VERSION]; 31 | }*/ 32 | } 33 | 34 | - (instancetype)init { 35 | self = [super init]; 36 | if (self) { 37 | } 38 | return self; 39 | } 40 | 41 | - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result { 42 | if ([@"BarcodeDetector#detectInImage" isEqualToString:call.method] || 43 | [@"FaceDetector#processImage" isEqualToString:call.method] || 44 | [@"ImageLabeler#processImage" isEqualToString:call.method] || 45 | [@"TextRecognizer#processImage" isEqualToString:call.method]) { 46 | [self handleDetection:call result:result]; 47 | } else if ([@"BarcodeDetector#close" isEqualToString:call.method] || 48 | [@"FaceDetector#close" isEqualToString:call.method] || 49 | [@"ImageLabeler#close" isEqualToString:call.method] || 50 | [@"TextRecognizer#close" isEqualToString:call.method]) { 51 | NSNumber *handle = call.arguments[@"handle"]; 52 | [detectors removeObjectForKey:handle]; 53 | result(nil); 54 | } else { 55 | result(FlutterMethodNotImplemented); 56 | } 57 | } 58 | 59 | - (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result { 60 | MLKVisionImage *image = [self dataToVisionImage:call.arguments]; 61 | NSDictionary *options = call.arguments[@"options"]; 62 | 63 | NSNumber *handle = call.arguments[@"handle"]; 64 | id detector = detectors[handle]; 65 | if (!detector) { 66 | if ([call.method hasPrefix:@"BarcodeDetector"]) { 67 | detector = [[BarcodeDetector alloc] initWithOptions:options]; 68 | } else if ([call.method hasPrefix:@"FaceDetector"]) { 69 | detector = [[FaceDetector alloc] initWithOptions:options]; 70 | } else if ([call.method hasPrefix:@"ImageLabeler"]) { 71 | detector = [[ImageLabeler alloc] initWithOptions:options]; 72 | } else if ([call.method hasPrefix:@"TextRecognizer"]) { 73 | detector = [[TextRecognizer alloc] initWithOptions:options]; 74 | } 75 | 76 | [FLTGoogleMlVisionPlugin addDetector:handle detector:detector]; 77 | } 78 | 79 | [detectors[handle] handleDetection:image result:result]; 80 | } 81 | 82 | - (MLKVisionImage *)dataToVisionImage:(NSDictionary *)imageData { 83 | NSString *imageType = imageData[@"type"]; 84 | 85 | if ([@"file" isEqualToString:imageType]) { 86 | return [self filePathToVisionImage:imageData[@"path"]]; 87 | } else if ([@"bytes" isEqualToString:imageType]) { 88 | return [self bytesToVisionImage:imageData]; 89 | } else { 90 | NSString *errorReason = [NSString stringWithFormat:@"No image type for: %@", imageType]; 91 | @throw [NSException exceptionWithName:NSInvalidArgumentException 92 | reason:errorReason 93 | userInfo:nil]; 94 | } 95 | } 96 | 97 | - (MLKVisionImage *)filePathToVisionImage:(NSString *)filePath { 98 | UIImage *image = [UIImage imageWithContentsOfFile:filePath]; 99 | 100 | if (image.imageOrientation != UIImageOrientationUp) { 101 | CGImageRef imgRef = image.CGImage; 102 | CGRect bounds = CGRectMake(0, 0, CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); 103 | UIGraphicsBeginImageContext(bounds.size); 104 | CGContextDrawImage(UIGraphicsGetCurrentContext(), bounds, imgRef); 105 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); 106 | UIGraphicsEndImageContext(); 107 | 108 | image = newImage; 109 | } 110 | 111 | return [[MLKVisionImage alloc] initWithImage:image]; 112 | } 113 | 114 | - (MLKVisionImage *)bytesToVisionImage:(NSDictionary *)imageData { 115 | FlutterStandardTypedData *byteData = imageData[@"bytes"]; 116 | NSData *imageBytes = byteData.data; 117 | 118 | NSDictionary *metadata = imageData[@"metadata"]; 119 | NSArray *planeData = metadata[@"planeData"]; 120 | size_t planeCount = planeData.count; 121 | 122 | NSNumber *width = metadata[@"width"]; 123 | NSNumber *height = metadata[@"height"]; 124 | 125 | NSNumber *rawFormat = metadata[@"rawFormat"]; 126 | FourCharCode format = FOUR_CHAR_CODE(rawFormat.unsignedIntValue); 127 | 128 | CVPixelBufferRef pxBuffer = NULL; 129 | if (planeCount == 0) { 130 | @throw [NSException exceptionWithName:NSInvalidArgumentException 131 | reason:@"Can't create image buffer with 0 planes." 132 | userInfo:nil]; 133 | } else if (planeCount == 1) { 134 | NSDictionary *plane = planeData[0]; 135 | NSNumber *bytesPerRow = plane[@"bytesPerRow"]; 136 | 137 | pxBuffer = [self bytesToPixelBuffer:width.unsignedLongValue 138 | height:height.unsignedLongValue 139 | format:format 140 | baseAddress:(void *)imageBytes.bytes 141 | bytesPerRow:bytesPerRow.unsignedLongValue]; 142 | } else { 143 | pxBuffer = [self planarBytesToPixelBuffer:width.unsignedLongValue 144 | height:height.unsignedLongValue 145 | format:format 146 | baseAddress:(void *)imageBytes.bytes 147 | dataSize:imageBytes.length 148 | planeCount:planeCount 149 | planeData:planeData]; 150 | } 151 | 152 | return [self pixelBufferToVisionImage:pxBuffer]; 153 | } 154 | 155 | - (CVPixelBufferRef)bytesToPixelBuffer:(size_t)width 156 | height:(size_t)height 157 | format:(FourCharCode)format 158 | baseAddress:(void *)baseAddress 159 | bytesPerRow:(size_t)bytesPerRow { 160 | CVPixelBufferRef pxBuffer = NULL; 161 | CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, format, baseAddress, bytesPerRow, 162 | NULL, NULL, NULL, &pxBuffer); 163 | return pxBuffer; 164 | } 165 | 166 | - (CVPixelBufferRef)planarBytesToPixelBuffer:(size_t)width 167 | height:(size_t)height 168 | format:(FourCharCode)format 169 | baseAddress:(void *)baseAddress 170 | dataSize:(size_t)dataSize 171 | planeCount:(size_t)planeCount 172 | planeData:(NSArray *)planeData { 173 | size_t widths[planeCount]; 174 | size_t heights[planeCount]; 175 | size_t bytesPerRows[planeCount]; 176 | 177 | void *baseAddresses[planeCount]; 178 | baseAddresses[0] = baseAddress; 179 | 180 | size_t lastAddressIndex = 0; // Used to get base address for each plane 181 | for (int i = 0; i < planeCount; i++) { 182 | NSDictionary *plane = planeData[i]; 183 | 184 | NSNumber *width = plane[@"width"]; 185 | NSNumber *height = plane[@"height"]; 186 | NSNumber *bytesPerRow = plane[@"bytesPerRow"]; 187 | 188 | widths[i] = width.unsignedLongValue; 189 | heights[i] = height.unsignedLongValue; 190 | bytesPerRows[i] = bytesPerRow.unsignedLongValue; 191 | 192 | if (i > 0) { 193 | size_t addressIndex = lastAddressIndex + heights[i - 1] * bytesPerRows[i - 1]; 194 | baseAddresses[i] = baseAddress + addressIndex; 195 | lastAddressIndex = addressIndex; 196 | } 197 | } 198 | 199 | CVPixelBufferRef pxBuffer = NULL; 200 | CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault, width, height, format, NULL, dataSize, 201 | planeCount, baseAddresses, widths, heights, bytesPerRows, NULL, 202 | NULL, NULL, &pxBuffer); 203 | 204 | return pxBuffer; 205 | } 206 | 207 | - (MLKVisionImage *)pixelBufferToVisionImage:(CVPixelBufferRef)pixelBufferRef { 208 | CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; 209 | 210 | CIContext *temporaryContext = [CIContext contextWithOptions:nil]; 211 | CGImageRef videoImage = 212 | [temporaryContext createCGImage:ciImage 213 | fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBufferRef), 214 | CVPixelBufferGetHeight(pixelBufferRef))]; 215 | 216 | UIImage *uiImage = [UIImage imageWithCGImage:videoImage]; 217 | CVPixelBufferRelease(pixelBufferRef); 218 | CGImageRelease(videoImage); 219 | return [[MLKVisionImage alloc] initWithImage:uiImage]; 220 | } 221 | 222 | + (void)addDetector:(NSNumber *)handle detector:(id)detector { 223 | if (detectors[handle]) { 224 | NSString *reason = 225 | [[NSString alloc] initWithFormat:@"Object for handle already exists: %d", handle.intValue]; 226 | @throw [[NSException alloc] initWithName:NSInvalidArgumentException reason:reason userInfo:nil]; 227 | } 228 | 229 | detectors[handle] = detector; 230 | } 231 | @end 232 | -------------------------------------------------------------------------------- /ios/Classes/FaceDetector.m: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #import "FLTGoogleMlVisionPlugin.h" 6 | 7 | @import MLKitFaceDetection; 8 | 9 | @interface FaceDetector () 10 | @property MLKFaceDetector *detector; 11 | @end 12 | 13 | @implementation FaceDetector 14 | - (instancetype)initWithOptions:(NSDictionary *)options { 15 | self = [super init]; 16 | if (self) { 17 | _detector = [MLKFaceDetector faceDetectorWithOptions:[FaceDetector parseOptions:options]]; 18 | } 19 | return self; 20 | } 21 | 22 | - (void)handleDetection:(MLKVisionImage *)image result:(FlutterResult)result { 23 | [_detector 24 | processImage:image 25 | completion:^(NSArray *_Nullable faces, NSError *_Nullable error) { 26 | if (error) { 27 | [FLTGoogleMlVisionPlugin handleError:error result:result]; 28 | return; 29 | } else if (!faces) { 30 | result(@[]); 31 | return; 32 | } 33 | NSMutableArray *faceData = [NSMutableArray array]; 34 | for (MLKFace *face in faces) { 35 | id smileProb = face.hasSmilingProbability ? @(face.smilingProbability) : [NSNull null]; 36 | id leftProb = 37 | face.hasLeftEyeOpenProbability ? @(face.leftEyeOpenProbability) : [NSNull null]; 38 | id rightProb = 39 | face.hasRightEyeOpenProbability ? @(face.rightEyeOpenProbability) : [NSNull null]; 40 | 41 | NSDictionary *data = @{ 42 | @"left" : @(face.frame.origin.x), 43 | @"top" : @(face.frame.origin.y), 44 | @"width" : @(face.frame.size.width), 45 | @"height" : @(face.frame.size.height), 46 | @"headEulerAngleY" : face.hasHeadEulerAngleY ? @(face.headEulerAngleY) 47 | : [NSNull null], 48 | @"headEulerAngleZ" : face.hasHeadEulerAngleZ ? @(face.headEulerAngleZ) 49 | : [NSNull null], 50 | @"smilingProbability" : smileProb, 51 | @"leftEyeOpenProbability" : leftProb, 52 | @"rightEyeOpenProbability" : rightProb, 53 | @"trackingId" : face.hasTrackingID ? @(face.trackingID) : [NSNull null], 54 | @"landmarks" : @{ 55 | @"bottomMouth" : [FaceDetector getLandmarkPosition:face 56 | landmark:MLKFaceLandmarkTypeMouthBottom], 57 | @"leftCheek" : [FaceDetector getLandmarkPosition:face 58 | landmark:MLKFaceLandmarkTypeLeftCheek], 59 | @"leftEar" : [FaceDetector getLandmarkPosition:face 60 | landmark:MLKFaceLandmarkTypeLeftEar], 61 | @"leftEye" : [FaceDetector getLandmarkPosition:face 62 | landmark:MLKFaceLandmarkTypeLeftEye], 63 | @"leftMouth" : [FaceDetector getLandmarkPosition:face 64 | landmark:MLKFaceLandmarkTypeMouthLeft], 65 | @"noseBase" : [FaceDetector getLandmarkPosition:face 66 | landmark:MLKFaceLandmarkTypeNoseBase], 67 | @"rightCheek" : [FaceDetector getLandmarkPosition:face 68 | landmark:MLKFaceLandmarkTypeRightCheek], 69 | @"rightEar" : [FaceDetector getLandmarkPosition:face 70 | landmark:MLKFaceLandmarkTypeRightEar], 71 | @"rightEye" : [FaceDetector getLandmarkPosition:face 72 | landmark:MLKFaceLandmarkTypeRightEye], 73 | @"rightMouth" : [FaceDetector getLandmarkPosition:face 74 | landmark:MLKFaceLandmarkTypeMouthRight], 75 | }, 76 | @"contours" : @{ 77 | @"allPoints" : [FaceDetector getAllContourPoints:face], 78 | @"face" : [FaceDetector getContourPoints:face contour:MLKFaceContourTypeFace], 79 | @"leftEye" : [FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftEye], 80 | @"leftEyebrowBottom" : 81 | [FaceDetector getContourPoints:face 82 | contour:MLKFaceContourTypeLeftEyebrowBottom], 83 | @"leftEyebrowTop" : 84 | [FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftEyebrowTop], 85 | @"lowerLipBottom" : 86 | [FaceDetector getContourPoints:face contour:MLKFaceContourTypeLowerLipBottom], 87 | @"lowerLipTop" : [FaceDetector getContourPoints:face 88 | contour:MLKFaceContourTypeLowerLipTop], 89 | @"noseBottom" : [FaceDetector getContourPoints:face 90 | contour:MLKFaceContourTypeNoseBottom], 91 | @"noseBridge" : [FaceDetector getContourPoints:face 92 | contour:MLKFaceContourTypeNoseBridge], 93 | @"rightEye" : [FaceDetector getContourPoints:face 94 | contour:MLKFaceContourTypeRightEye], 95 | @"rightEyebrowBottom" : 96 | [FaceDetector getContourPoints:face 97 | contour:MLKFaceContourTypeRightEyebrowBottom], 98 | @"rightEyebrowTop" : 99 | [FaceDetector getContourPoints:face contour:MLKFaceContourTypeRightEyebrowTop], 100 | @"upperLipBottom" : 101 | [FaceDetector getContourPoints:face contour:MLKFaceContourTypeUpperLipBottom], 102 | @"upperLipTop" : [FaceDetector getContourPoints:face 103 | contour:MLKFaceContourTypeUpperLipTop], 104 | } 105 | }; 106 | 107 | [faceData addObject:data]; 108 | } 109 | 110 | result(faceData); 111 | }]; 112 | } 113 | 114 | + (id)getLandmarkPosition:(MLKFace *)face landmark:(MLKFaceLandmarkType)landmarkType { 115 | MLKFaceLandmark *landmark = [face landmarkOfType:landmarkType]; 116 | if (landmark) { 117 | return @[ @(landmark.position.x), @(landmark.position.y) ]; 118 | } 119 | 120 | return [NSNull null]; 121 | } 122 | 123 | + (id)getContourPoints:(MLKFace *)face contour:(MLKFaceContourType)contourType { 124 | MLKFaceContour *contour = [face contourOfType:contourType]; 125 | if (contour) { 126 | NSArray *contourPoints = contour.points; 127 | NSMutableArray *result = [[NSMutableArray alloc] initWithCapacity:[contourPoints count]]; 128 | for (int i = 0; i < [contourPoints count]; i++) { 129 | MLKVisionPoint *point = [contourPoints objectAtIndex:i]; 130 | [result insertObject:@[ @(point.x), @(point.y) ] atIndex:i]; 131 | } 132 | return [result copy]; 133 | } 134 | 135 | //return [NSNull null]; 136 | return [NSMutableArray new]; 137 | } 138 | 139 | // Order based on ML Kit constant orders https://developers.google.com/android/reference/com/google/mlkit/vision/face/FaceContour 140 | // Order from ML Kit documentation is not valid for lips https://developers.google.com/ml-kit/vision/face-detection/face-detection-concepts#contours 141 | + (id)getAllContourPoints:(MLKFace *)face { 142 | NSMutableArray *result = [[NSMutableArray alloc] init]; 143 | NSLog(@"DETECTING ALL POINTS"); 144 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeFace]]; 145 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftEyebrowTop]]; 146 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftEyebrowBottom]]; 147 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeRightEyebrowTop]]; 148 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeRightEyebrowBottom]]; 149 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftEye]]; 150 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeRightEye]]; 151 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeUpperLipTop]]; 152 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeUpperLipBottom]]; 153 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLowerLipTop]]; 154 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLowerLipBottom]]; 155 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeNoseBridge]]; 156 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeNoseBottom]]; 157 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeLeftCheek]]; 158 | [result addObjectsFromArray:[FaceDetector getContourPoints:face contour:MLKFaceContourTypeRightCheek]]; 159 | 160 | return [result copy]; 161 | } 162 | 163 | + (MLKFaceDetectorOptions *)parseOptions:(NSDictionary *)optionsData { 164 | MLKFaceDetectorOptions *options = [[MLKFaceDetectorOptions alloc] init]; 165 | 166 | NSNumber *enableClassification = optionsData[@"enableClassification"]; 167 | if (enableClassification.boolValue) { 168 | options.classificationMode = MLKFaceDetectorClassificationModeAll; 169 | } else { 170 | options.classificationMode = MLKFaceDetectorClassificationModeNone; 171 | } 172 | 173 | NSNumber *enableLandmarks = optionsData[@"enableLandmarks"]; 174 | if (enableLandmarks.boolValue) { 175 | options.landmarkMode = MLKFaceDetectorLandmarkModeAll; 176 | } else { 177 | options.landmarkMode = MLKFaceDetectorLandmarkModeNone; 178 | } 179 | 180 | NSNumber *enableContours = optionsData[@"enableContours"]; 181 | if (enableContours.boolValue) { 182 | options.contourMode = MLKFaceDetectorContourModeAll; 183 | } else { 184 | options.contourMode = MLKFaceDetectorContourModeNone; 185 | } 186 | 187 | NSNumber *enableTracking = optionsData[@"enableTracking"]; 188 | options.trackingEnabled = enableTracking.boolValue; 189 | 190 | NSNumber *minFaceSize = optionsData[@"minFaceSize"]; 191 | options.minFaceSize = [minFaceSize doubleValue]; 192 | 193 | NSString *mode = optionsData[@"mode"]; 194 | if ([mode isEqualToString:@"accurate"]) { 195 | options.performanceMode = MLKFaceDetectorPerformanceModeAccurate; 196 | } else if ([mode isEqualToString:@"fast"]) { 197 | options.performanceMode = MLKFaceDetectorPerformanceModeFast; 198 | } 199 | 200 | return options; 201 | } 202 | @end 203 | -------------------------------------------------------------------------------- /android/src/main/java/com/brianmtully/flutter/plugins/googlemlvision/GMLKBarcodeDetector.java: -------------------------------------------------------------------------------- 1 | // Copyright 2019 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | package com.brianmtully.flutter.plugins.googlemlvision; 6 | 7 | import android.graphics.Point; 8 | import android.graphics.Rect; 9 | import androidx.annotation.NonNull; 10 | import com.google.android.gms.tasks.OnFailureListener; 11 | import com.google.android.gms.tasks.OnSuccessListener; 12 | import com.google.mlkit.vision.barcode.Barcode; 13 | import com.google.mlkit.vision.barcode.BarcodeScanning; 14 | import com.google.mlkit.vision.barcode.BarcodeScanner; 15 | import com.google.mlkit.vision.barcode.BarcodeScannerOptions; 16 | import com.google.mlkit.vision.common.InputImage; 17 | import io.flutter.plugin.common.MethodChannel; 18 | import java.io.IOException; 19 | import java.util.ArrayList; 20 | import java.util.Arrays; 21 | import java.util.HashMap; 22 | import java.util.List; 23 | import java.util.Map; 24 | 25 | class GMLKBarcodeDetector implements Detector { 26 | private final BarcodeScanner detector; 27 | 28 | GMLKBarcodeDetector(Map options) { 29 | detector = BarcodeScanning.getClient(parseOptions(options)); 30 | } 31 | 32 | @Override 33 | public void handleDetection(final InputImage image, final MethodChannel.Result result) { 34 | detector 35 | .process(image) 36 | .addOnSuccessListener( 37 | new OnSuccessListener>() { 38 | @Override 39 | public void onSuccess(List visionBarcodes) { 40 | List> barcodes = new ArrayList<>(); 41 | 42 | for (Barcode barcode : visionBarcodes) { 43 | Map barcodeMap = new HashMap<>(); 44 | 45 | Rect bounds = barcode.getBoundingBox(); 46 | if (bounds != null) { 47 | barcodeMap.put("left", (double) bounds.left); 48 | barcodeMap.put("top", (double) bounds.top); 49 | barcodeMap.put("width", (double) bounds.width()); 50 | barcodeMap.put("height", (double) bounds.height()); 51 | } 52 | 53 | List points = new ArrayList<>(); 54 | if (barcode.getCornerPoints() != null) { 55 | for (Point point : barcode.getCornerPoints()) { 56 | points.add(new double[] {(double) point.x, (double) point.y}); 57 | } 58 | } 59 | barcodeMap.put("points", points); 60 | 61 | barcodeMap.put("rawValue", barcode.getRawValue()); 62 | barcodeMap.put("displayValue", barcode.getDisplayValue()); 63 | barcodeMap.put("format", barcode.getFormat()); 64 | barcodeMap.put("valueType", barcode.getValueType()); 65 | 66 | Map typeValue = new HashMap<>(); 67 | switch (barcode.getValueType()) { 68 | case Barcode.TYPE_EMAIL: 69 | Barcode.Email email = barcode.getEmail(); 70 | 71 | typeValue.put("type", email.getType()); 72 | typeValue.put("address", email.getAddress()); 73 | typeValue.put("body", email.getBody()); 74 | typeValue.put("subject", email.getSubject()); 75 | 76 | barcodeMap.put("email", typeValue); 77 | break; 78 | case Barcode.TYPE_PHONE: 79 | Barcode.Phone phone = barcode.getPhone(); 80 | 81 | typeValue.put("number", phone.getNumber()); 82 | typeValue.put("type", phone.getType()); 83 | 84 | barcodeMap.put("phone", typeValue); 85 | break; 86 | case Barcode.TYPE_SMS: 87 | Barcode.Sms sms = barcode.getSms(); 88 | 89 | typeValue.put("message", sms.getMessage()); 90 | typeValue.put("phoneNumber", sms.getPhoneNumber()); 91 | 92 | barcodeMap.put("sms", typeValue); 93 | break; 94 | case Barcode.TYPE_URL: 95 | Barcode.UrlBookmark urlBookmark = barcode.getUrl(); 96 | 97 | typeValue.put("title", urlBookmark.getTitle()); 98 | typeValue.put("url", urlBookmark.getUrl()); 99 | 100 | barcodeMap.put("url", typeValue); 101 | break; 102 | case Barcode.TYPE_WIFI: 103 | Barcode.WiFi wifi = barcode.getWifi(); 104 | 105 | typeValue.put("ssid", wifi.getSsid()); 106 | typeValue.put("password", wifi.getPassword()); 107 | typeValue.put("encryptionType", wifi.getEncryptionType()); 108 | 109 | barcodeMap.put("wifi", typeValue); 110 | break; 111 | case Barcode.TYPE_GEO: 112 | Barcode.GeoPoint geoPoint = barcode.getGeoPoint(); 113 | 114 | typeValue.put("latitude", geoPoint.getLat()); 115 | typeValue.put("longitude", geoPoint.getLng()); 116 | 117 | barcodeMap.put("geoPoint", typeValue); 118 | break; 119 | case Barcode.TYPE_CONTACT_INFO: 120 | Barcode.ContactInfo contactInfo = barcode.getContactInfo(); 121 | 122 | List> addresses = new ArrayList<>(); 123 | for (Barcode.Address address : contactInfo.getAddresses()) { 124 | Map addressMap = new HashMap<>(); 125 | if (address.getAddressLines() != null) { 126 | addressMap.put("addressLines", Arrays.asList(address.getAddressLines())); 127 | } 128 | addressMap.put("type", address.getType()); 129 | 130 | addresses.add(addressMap); 131 | } 132 | typeValue.put("addresses", addresses); 133 | 134 | List> emails = new ArrayList<>(); 135 | for (Barcode.Email contactEmail : contactInfo.getEmails()) { 136 | Map emailMap = new HashMap<>(); 137 | emailMap.put("address", contactEmail.getAddress()); 138 | emailMap.put("type", contactEmail.getType()); 139 | emailMap.put("body", contactEmail.getBody()); 140 | emailMap.put("subject", contactEmail.getSubject()); 141 | 142 | emails.add(emailMap); 143 | } 144 | typeValue.put("emails", emails); 145 | 146 | Map nameMap = new HashMap<>(); 147 | Barcode.PersonName name = contactInfo.getName(); 148 | if (name != null) { 149 | nameMap.put("formattedName", name.getFormattedName()); 150 | nameMap.put("first", name.getFirst()); 151 | nameMap.put("last", name.getLast()); 152 | nameMap.put("middle", name.getMiddle()); 153 | nameMap.put("prefix", name.getPrefix()); 154 | nameMap.put("pronunciation", name.getPronunciation()); 155 | nameMap.put("suffix", name.getSuffix()); 156 | } 157 | typeValue.put("name", nameMap); 158 | 159 | List> phones = new ArrayList<>(); 160 | for (Barcode.Phone contactPhone : contactInfo.getPhones()) { 161 | Map phoneMap = new HashMap<>(); 162 | phoneMap.put("number", contactPhone.getNumber()); 163 | phoneMap.put("type", contactPhone.getType()); 164 | 165 | phones.add(phoneMap); 166 | } 167 | typeValue.put("phones", phones); 168 | 169 | if (contactInfo.getUrls() != null) { 170 | typeValue.put("urls", Arrays.asList(contactInfo.getUrls())); 171 | } 172 | typeValue.put("jobTitle", contactInfo.getTitle()); 173 | typeValue.put("organization", contactInfo.getOrganization()); 174 | 175 | barcodeMap.put("contactInfo", typeValue); 176 | break; 177 | case Barcode.TYPE_CALENDAR_EVENT: 178 | Barcode.CalendarEvent calendarEvent = 179 | barcode.getCalendarEvent(); 180 | 181 | typeValue.put("eventDescription", calendarEvent.getDescription()); 182 | typeValue.put("location", calendarEvent.getLocation()); 183 | typeValue.put("organizer", calendarEvent.getOrganizer()); 184 | typeValue.put("status", calendarEvent.getStatus()); 185 | typeValue.put("summary", calendarEvent.getSummary()); 186 | if (calendarEvent.getStart() != null) { 187 | typeValue.put("start", calendarEvent.getStart().getRawValue()); 188 | } 189 | if (calendarEvent.getEnd() != null) { 190 | typeValue.put("end", calendarEvent.getEnd().getRawValue()); 191 | } 192 | 193 | barcodeMap.put("calendarEvent", typeValue); 194 | break; 195 | case Barcode.TYPE_DRIVER_LICENSE: 196 | Barcode.DriverLicense driverLicense = 197 | barcode.getDriverLicense(); 198 | 199 | typeValue.put("firstName", driverLicense.getFirstName()); 200 | typeValue.put("middleName", driverLicense.getMiddleName()); 201 | typeValue.put("lastName", driverLicense.getLastName()); 202 | typeValue.put("gender", driverLicense.getGender()); 203 | typeValue.put("addressCity", driverLicense.getAddressCity()); 204 | typeValue.put("addressStreet", driverLicense.getAddressStreet()); 205 | typeValue.put("addressState", driverLicense.getAddressState()); 206 | typeValue.put("addressZip", driverLicense.getAddressZip()); 207 | typeValue.put("birthDate", driverLicense.getBirthDate()); 208 | typeValue.put("documentType", driverLicense.getDocumentType()); 209 | typeValue.put("licenseNumber", driverLicense.getLicenseNumber()); 210 | typeValue.put("expiryDate", driverLicense.getExpiryDate()); 211 | typeValue.put("issuingDate", driverLicense.getIssueDate()); 212 | typeValue.put("issuingCountry", driverLicense.getIssuingCountry()); 213 | 214 | barcodeMap.put("driverLicense", typeValue); 215 | break; 216 | } 217 | 218 | barcodes.add(barcodeMap); 219 | } 220 | result.success(barcodes); 221 | } 222 | }) 223 | .addOnFailureListener( 224 | new OnFailureListener() { 225 | @Override 226 | public void onFailure(@NonNull Exception exception) { 227 | result.error("barcodeDetectorError", exception.getLocalizedMessage(), null); 228 | } 229 | }); 230 | } 231 | 232 | private BarcodeScannerOptions parseOptions(Map optionsData) { 233 | Integer barcodeFormats = (Integer) optionsData.get("barcodeFormats"); 234 | return new BarcodeScannerOptions.Builder() 235 | .setBarcodeFormats(barcodeFormats) 236 | .build(); 237 | } 238 | 239 | @Override 240 | public void close() throws IOException { 241 | detector.close(); 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /lib/src/barcode_detector.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | part of google_ml_vision; 6 | 7 | /// Enumeration of supported barcode content value types for [Barcode.valueType]. 8 | /// 9 | /// Note that the built-in parsers only recognize a few popular value 10 | /// structures. For your specific use case, you may want to implement your own 11 | /// parsing logic. 12 | enum BarcodeValueType { 13 | /// Unknown Barcode value types. 14 | unknown, 15 | 16 | /// Barcode value type for contact info. 17 | contactInfo, 18 | 19 | /// Barcode value type for email addresses. 20 | email, 21 | 22 | /// Barcode value type for ISBNs. 23 | isbn, 24 | 25 | /// Barcode value type for phone numbers. 26 | phone, 27 | 28 | /// Barcode value type for product codes. 29 | product, 30 | 31 | /// Barcode value type for SMS details. 32 | sms, 33 | 34 | /// Barcode value type for plain text. 35 | text, 36 | 37 | /// Barcode value type for URLs/bookmarks. 38 | url, 39 | 40 | /// Barcode value type for Wi-Fi access point details. 41 | wifi, 42 | 43 | /// Barcode value type for geographic coordinates. 44 | geographicCoordinates, 45 | 46 | /// Barcode value type for calendar events. 47 | calendarEvent, 48 | 49 | /// Barcode value type for driver's license data. 50 | driverLicense, 51 | } 52 | 53 | /// The type of email for [BarcodeEmail.type]. 54 | enum BarcodeEmailType { 55 | /// Unknown email type. 56 | unknown, 57 | 58 | /// Barcode work email type. 59 | work, 60 | 61 | /// Barcode home email type. 62 | home, 63 | } 64 | 65 | /// The type of phone number for [BarcodePhone.type]. 66 | enum BarcodePhoneType { 67 | /// Unknown phone type. 68 | unknown, 69 | 70 | /// Barcode work phone type. 71 | work, 72 | 73 | /// Barcode home phone type. 74 | home, 75 | 76 | /// Barcode fax phone type. 77 | fax, 78 | 79 | /// Barcode mobile phone type. 80 | mobile, 81 | } 82 | 83 | /// Wifi encryption type constants for [BarcodeWiFi.encryptionType]. 84 | enum BarcodeWiFiEncryptionType { 85 | /// Barcode unknown Wi-Fi encryption type. 86 | unknown, 87 | 88 | /// Barcode open Wi-Fi encryption type. 89 | open, 90 | 91 | /// Barcode WPA Wi-Fi encryption type. 92 | wpa, 93 | 94 | /// Barcode WEP Wi-Fi encryption type. 95 | wep, 96 | } 97 | 98 | /// Address type constants for [BarcodeAddress.type] 99 | enum BarcodeAddressType { 100 | /// Barcode unknown address type. 101 | unknown, 102 | 103 | /// Barcode work address type. 104 | work, 105 | 106 | /// Barcode home address type. 107 | home, 108 | } 109 | 110 | /// Class containing supported barcode format constants for [BarcodeDetector]. 111 | /// 112 | /// Passed to [BarcodeDetectorOptions] to set which formats the detector should 113 | /// detect. 114 | /// 115 | /// Also, represents possible values for [Barcode.format]. 116 | class BarcodeFormat { 117 | const BarcodeFormat._(this.value); 118 | 119 | /// Barcode format constant representing the union of all supported formats. 120 | static const BarcodeFormat all = BarcodeFormat._(0xFFFF); 121 | 122 | /// Barcode format unknown to the current SDK. 123 | static const BarcodeFormat unknown = BarcodeFormat._(0); 124 | 125 | /// Barcode format constant for Code 128. 126 | static const BarcodeFormat code128 = BarcodeFormat._(0x0001); 127 | 128 | /// Barcode format constant for Code 39. 129 | static const BarcodeFormat code39 = BarcodeFormat._(0x0002); 130 | 131 | /// Barcode format constant for Code 93. 132 | static const BarcodeFormat code93 = BarcodeFormat._(0x0004); 133 | 134 | /// Barcode format constant for CodaBar. 135 | static const BarcodeFormat codabar = BarcodeFormat._(0x0008); 136 | 137 | /// Barcode format constant for Data Matrix. 138 | static const BarcodeFormat dataMatrix = BarcodeFormat._(0x0010); 139 | 140 | /// Barcode format constant for EAN-13. 141 | static const BarcodeFormat ean13 = BarcodeFormat._(0x0020); 142 | 143 | /// Barcode format constant for EAN-8. 144 | static const BarcodeFormat ean8 = BarcodeFormat._(0x0040); 145 | 146 | /// Barcode format constant for ITF (Interleaved Two-of-Five). 147 | static const BarcodeFormat itf = BarcodeFormat._(0x0080); 148 | 149 | /// Barcode format constant for QR Code. 150 | static const BarcodeFormat qrCode = BarcodeFormat._(0x0100); 151 | 152 | /// Barcode format constant for UPC-A. 153 | static const BarcodeFormat upca = BarcodeFormat._(0x0200); 154 | 155 | /// Barcode format constant for UPC-E. 156 | static const BarcodeFormat upce = BarcodeFormat._(0x0400); 157 | 158 | /// Barcode format constant for PDF-417. 159 | static const BarcodeFormat pdf417 = BarcodeFormat._(0x0800); 160 | 161 | /// Barcode format constant for AZTEC. 162 | static const BarcodeFormat aztec = BarcodeFormat._(0x1000); 163 | 164 | /// Raw BarcodeFormat value. 165 | final int value; 166 | 167 | BarcodeFormat operator |(BarcodeFormat other) => 168 | BarcodeFormat._(value | other.value); 169 | } 170 | 171 | /// Detector for performing barcode scanning on an input image. 172 | /// 173 | /// A barcode detector is created via 174 | /// `barcodeDetector([BarcodeDetectorOptions options])` in [GoogleVision]: 175 | /// 176 | /// ```dart 177 | /// final GoogleVisionImage image = 178 | /// GoogleVisionImage.fromFilePath('path/to/file'); 179 | /// 180 | /// final BarcodeDetector barcodeDetector = 181 | /// GoogleVision.instance.barcodeDetector(); 182 | /// 183 | /// final List barcodes = await barcodeDetector.detectInImage(image); 184 | /// ``` 185 | class BarcodeDetector { 186 | BarcodeDetector._(this.options, this._handle); 187 | 188 | /// The options for configuring this detector. 189 | final BarcodeDetectorOptions options; 190 | final int _handle; 191 | bool _hasBeenOpened = false; 192 | bool _isClosed = false; 193 | 194 | /// Detects barcodes in the input image. 195 | Future> detectInImage(GoogleVisionImage visionImage) async { 196 | assert(!_isClosed); 197 | _hasBeenOpened = true; 198 | 199 | final reply = await GoogleVision.channel.invokeListMethod( 200 | 'BarcodeDetector#detectInImage', 201 | { 202 | 'handle': _handle, 203 | 'options': { 204 | 'barcodeFormats': options.barcodeFormats.value, 205 | }, 206 | }..addAll(visionImage._serialize()), 207 | ); 208 | 209 | final List barcodes = 210 | reply!.map((barcode) => Barcode._(barcode)).toList(); 211 | 212 | return barcodes; 213 | } 214 | 215 | /// Release resources used by this detector. 216 | Future close() { 217 | if (!_hasBeenOpened) _isClosed = true; 218 | if (_isClosed) return Future.value(); 219 | 220 | _isClosed = true; 221 | return GoogleVision.channel.invokeMethod( 222 | 'BarcodeDetector#close', 223 | {'handle': _handle}, 224 | ); 225 | } 226 | } 227 | 228 | /// Immutable options to configure [BarcodeDetector]. 229 | /// 230 | /// Sets which barcode formats the detector will detect. Defaults to 231 | /// [BarcodeFormat.all]. 232 | /// 233 | /// Example usage: 234 | /// ```dart 235 | /// final BarcodeDetectorOptions options = 236 | /// BarcodeDetectorOptions(barcodeFormats: BarcodeFormat.aztec | BarcodeFormat.ean8); 237 | /// ``` 238 | class BarcodeDetectorOptions { 239 | const BarcodeDetectorOptions({this.barcodeFormats = BarcodeFormat.all}); 240 | 241 | final BarcodeFormat barcodeFormats; 242 | } 243 | 244 | // TODO(bparrishMines): Normalize default string values. Some values return null on iOS while Android returns empty string. 245 | /// Represents a single recognized barcode and its value. 246 | class Barcode { 247 | Barcode._(Map _data) 248 | : boundingBox = _data['left'] != null 249 | ? Rect.fromLTWH( 250 | _data['left'], 251 | _data['top'], 252 | _data['width'], 253 | _data['height'], 254 | ) 255 | : null, 256 | rawValue = _data['rawValue'], 257 | displayValue = _data['displayValue'], 258 | format = BarcodeFormat._(_data['format']), 259 | _cornerPoints = _data['points'] 260 | ?.map((dynamic item) => Offset( 261 | item[0], 262 | item[1], 263 | )) 264 | ?.toList(), 265 | valueType = BarcodeValueType.values[_data['valueType']], 266 | email = _data['email'] == null ? null : BarcodeEmail._(_data['email']), 267 | phone = _data['phone'] == null ? null : BarcodePhone._(_data['phone']), 268 | sms = _data['sms'] == null ? null : BarcodeSMS._(_data['sms']), 269 | url = _data['url'] == null ? null : BarcodeURLBookmark._(_data['url']), 270 | wifi = _data['wifi'] == null ? null : BarcodeWiFi._(_data['wifi']), 271 | geoPoint = _data['geoPoint'] == null 272 | ? null 273 | : BarcodeGeoPoint._(_data['geoPoint']), 274 | contactInfo = _data['contactInfo'] == null 275 | ? null 276 | : BarcodeContactInfo._(_data['contactInfo']), 277 | calendarEvent = _data['calendarEvent'] == null 278 | ? null 279 | : BarcodeCalendarEvent._(_data['calendarEvent']), 280 | driverLicense = _data['driverLicense'] == null 281 | ? null 282 | : BarcodeDriverLicense._(_data['driverLicense']); 283 | 284 | final List _cornerPoints; 285 | 286 | /// The bounding rectangle of the detected barcode. 287 | /// 288 | /// Could be null if the bounding rectangle can not be determined. 289 | final Rect? boundingBox; 290 | 291 | /// Barcode value as it was encoded in the barcode. 292 | /// 293 | /// Structured values are not parsed, for example: 'MEBKM:TITLE:Google;URL://www.google.com;;'. 294 | /// 295 | /// Null if nothing found. 296 | final String? rawValue; 297 | 298 | /// Barcode value in a user-friendly format. 299 | /// 300 | /// May omit some of the information encoded in the barcode. 301 | /// For example, if rawValue is 'MEBKM:TITLE:Google;URL://www.google.com;;', 302 | /// the displayValue might be '//www.google.com'. 303 | /// If valueType = [BarcodeValueType.text], this field will be equal to rawValue. 304 | /// 305 | /// This value may be multiline, for example, when line breaks are encoded into the original TEXT barcode value. 306 | /// May include the supplement value. 307 | /// 308 | /// Null if nothing found. 309 | final String? displayValue; 310 | 311 | /// The barcode format, for example [BarcodeFormat.ean13]. 312 | final BarcodeFormat format; 313 | 314 | /// The four corner points in clockwise direction starting with top-left. 315 | /// 316 | /// Due to the possible perspective distortions, this is not necessarily a rectangle. 317 | List get cornerPoints => List.from(_cornerPoints); 318 | 319 | /// The format type of the barcode value. 320 | /// 321 | /// For example, [BarcodeValueType.text], [BarcodeValueType.product], [BarcodeValueType.url], etc. 322 | /// 323 | /// If the value structure cannot be parsed, [BarcodeValueType.text] will be returned. 324 | /// If the recognized structure type is not defined in your current version of SDK, [BarcodeValueType.unknown] will be returned. 325 | /// 326 | /// Note that the built-in parsers only recognize a few popular value structures. 327 | /// For your specific use case, you might want to directly consume rawValue 328 | /// and implement your own parsing logic. 329 | final BarcodeValueType valueType; 330 | 331 | /// Parsed email details. (set iff [valueType] is [BarcodeValueType.email]). 332 | final BarcodeEmail? email; 333 | 334 | /// Parsed phone details. (set iff [valueType] is [BarcodeValueType.phone]). 335 | final BarcodePhone? phone; 336 | 337 | /// Parsed SMS details. (set iff [valueType] is [BarcodeValueType.sms]). 338 | final BarcodeSMS? sms; 339 | 340 | /// Parsed URL bookmark details. (set iff [valueType] is [BarcodeValueType.url]). 341 | final BarcodeURLBookmark? url; 342 | 343 | /// Parsed WiFi AP details. (set iff [valueType] is [BarcodeValueType.wifi]). 344 | final BarcodeWiFi? wifi; 345 | 346 | /// Parsed geo coordinates. (set iff [valueType] is [BarcodeValueType.geographicCoordinates]). 347 | final BarcodeGeoPoint? geoPoint; 348 | 349 | /// Parsed contact details. (set iff [valueType] is [BarcodeValueType.contactInfo]). 350 | final BarcodeContactInfo? contactInfo; 351 | 352 | /// Parsed calendar event details. (set iff [valueType] is [BarcodeValueType.calendarEvent]). 353 | final BarcodeCalendarEvent? calendarEvent; 354 | 355 | /// Parsed driver's license details. (set iff [valueType] is [BarcodeValueType.driverLicense]). 356 | final BarcodeDriverLicense? driverLicense; 357 | } 358 | 359 | /// An email message from a 'MAILTO:' or similar QRCode type. 360 | class BarcodeEmail { 361 | BarcodeEmail._(Map data) 362 | : type = BarcodeEmailType.values[data['type']], 363 | address = data['address'], 364 | body = data['body'], 365 | subject = data['subject']; 366 | 367 | /// The email's address. 368 | final String? address; 369 | 370 | /// The email's body. 371 | final String? body; 372 | 373 | /// The email's subject. 374 | final String? subject; 375 | 376 | /// The type of the email. 377 | final BarcodeEmailType type; 378 | } 379 | 380 | /// Phone number info. 381 | class BarcodePhone { 382 | BarcodePhone._(Map data) 383 | : number = data['number'], 384 | type = BarcodePhoneType.values[data['type']]; 385 | 386 | /// Phone number. 387 | final String? number; 388 | 389 | /// Type of the phone number. 390 | /// 391 | /// See also: 392 | /// 393 | /// * [BarcodePhoneType] 394 | final BarcodePhoneType type; 395 | } 396 | 397 | /// An sms message from an 'SMS:' or similar QRCode type. 398 | class BarcodeSMS { 399 | BarcodeSMS._(Map data) 400 | : message = data['message'], 401 | phoneNumber = data['phoneNumber']; 402 | 403 | /// An SMS message body. 404 | final String? message; 405 | 406 | /// An SMS message phone number. 407 | final String? phoneNumber; 408 | } 409 | 410 | /// A URL and title from a 'MEBKM:' or similar QRCode type. 411 | class BarcodeURLBookmark { 412 | BarcodeURLBookmark._(Map data) 413 | : title = data['title'], 414 | url = data['url']; 415 | 416 | /// A URL bookmark title. 417 | final String? title; 418 | 419 | /// A URL bookmark url. 420 | final String? url; 421 | } 422 | 423 | /// A wifi network parameters from a 'WIFI:' or similar QRCode type. 424 | class BarcodeWiFi { 425 | BarcodeWiFi._(Map data) 426 | : ssid = data['ssid'], 427 | password = data['password'], 428 | encryptionType = 429 | BarcodeWiFiEncryptionType.values[data['encryptionType']]; 430 | 431 | /// A Wi-Fi access point SSID. 432 | final String? ssid; 433 | 434 | /// A Wi-Fi access point password. 435 | final String? password; 436 | 437 | /// The encryption type of the WIFI 438 | /// 439 | /// See all [BarcodeWiFiEncryptionType] 440 | final BarcodeWiFiEncryptionType encryptionType; 441 | } 442 | 443 | /// GPS coordinates from a 'GEO:' or similar QRCode type. 444 | class BarcodeGeoPoint { 445 | BarcodeGeoPoint._(Map data) 446 | : latitude = data['latitude'], 447 | longitude = data['longitude']; 448 | 449 | /// A location latitude. 450 | final double? latitude; 451 | 452 | /// A location longitude. 453 | final double? longitude; 454 | } 455 | 456 | /// A person's or organization's business card. 457 | class BarcodeContactInfo { 458 | BarcodeContactInfo._(Map data) 459 | : addresses = data['addresses'] == null 460 | ? null 461 | : List.unmodifiable(data['addresses'] 462 | .map((dynamic item) => BarcodeAddress._(item))), 463 | emails = data['emails'] == null 464 | ? null 465 | : List.unmodifiable(data['emails'] 466 | .map((dynamic item) => BarcodeEmail._(item))), 467 | name = data['name'] == null ? null : BarcodePersonName._(data['name']), 468 | phones = data['phones'] == null 469 | ? null 470 | : List.unmodifiable(data['phones'] 471 | .map((dynamic item) => BarcodePhone._(item))), 472 | urls = data['urls'] == null 473 | ? null 474 | : List.unmodifiable( 475 | data['urls'].map((dynamic item) { 476 | final String s = item; 477 | return s; 478 | })), 479 | jobTitle = data['jobTitle'], 480 | organization = data['organization']; 481 | 482 | /// Contact person's addresses. 483 | /// 484 | /// Could be an empty list if nothing found. 485 | final List? addresses; 486 | 487 | /// Contact person's emails. 488 | /// 489 | /// Could be an empty list if nothing found. 490 | final List? emails; 491 | 492 | /// Contact person's name. 493 | final BarcodePersonName? name; 494 | 495 | /// Contact person's phones. 496 | /// 497 | /// Could be an empty list if nothing found. 498 | final List? phones; 499 | 500 | /// Contact urls associated with this person. 501 | final List? urls; 502 | 503 | /// Contact person's title. 504 | final String? jobTitle; 505 | 506 | /// Contact person's organization. 507 | final String? organization; 508 | } 509 | 510 | /// An address. 511 | class BarcodeAddress { 512 | BarcodeAddress._(Map data) 513 | : addressLines = List.unmodifiable( 514 | data['addressLines'].map((dynamic item) { 515 | final String s = item; 516 | return s; 517 | })), 518 | type = BarcodeAddressType.values[data['type']]; 519 | 520 | /// Formatted address, multiple lines when appropriate. 521 | /// 522 | /// This field always contains at least one line. 523 | final List addressLines; 524 | 525 | /// Type of the address. 526 | /// 527 | /// See also: 528 | /// 529 | /// * [BarcodeAddressType] 530 | final BarcodeAddressType type; 531 | } 532 | 533 | /// A person's name, both formatted version and individual name components. 534 | class BarcodePersonName { 535 | BarcodePersonName._(Map data) 536 | : formattedName = data['formattedName'], 537 | first = data['first'], 538 | last = data['last'], 539 | middle = data['middle'], 540 | prefix = data['prefix'], 541 | pronunciation = data['pronunciation'], 542 | suffix = data['suffix']; 543 | 544 | /// The properly formatted name. 545 | final String? formattedName; 546 | 547 | /// First name 548 | final String? first; 549 | 550 | /// Last name 551 | final String? last; 552 | 553 | /// Middle name 554 | final String? middle; 555 | 556 | /// Prefix of the name 557 | final String? prefix; 558 | 559 | /// Designates a text string to be set as the kana name in the phonebook. Used for Japanese contacts. 560 | final String? pronunciation; 561 | 562 | /// Suffix of the person's name 563 | final String? suffix; 564 | } 565 | 566 | /// DateTime data type used in calendar events. 567 | class BarcodeCalendarEvent { 568 | BarcodeCalendarEvent._(Map data) 569 | : eventDescription = data['eventDescription'], 570 | location = data['location'], 571 | organizer = data['organizer'], 572 | status = data['status'], 573 | summary = data['summary'], 574 | start = DateTime.parse(data['start']), 575 | end = DateTime.parse(data['end']); 576 | 577 | /// The description of the calendar event. 578 | final String? eventDescription; 579 | 580 | /// The location of the calendar event. 581 | final String? location; 582 | 583 | /// The organizer of the calendar event. 584 | final String? organizer; 585 | 586 | /// The status of the calendar event. 587 | final String? status; 588 | 589 | /// The summary of the calendar event. 590 | final String? summary; 591 | 592 | /// The start date time of the calendar event. 593 | final DateTime start; 594 | 595 | /// The end date time of the calendar event. 596 | final DateTime end; 597 | } 598 | 599 | /// A driver license or ID card. 600 | class BarcodeDriverLicense { 601 | BarcodeDriverLicense._(Map data) 602 | : firstName = data['firstName'], 603 | middleName = data['middleName'], 604 | lastName = data['lastName'], 605 | gender = data['gender'], 606 | addressCity = data['addressCity'], 607 | addressState = data['addressState'], 608 | addressStreet = data['addressStreet'], 609 | addressZip = data['addressZip'], 610 | birthDate = data['birthDate'], 611 | documentType = data['documentType'], 612 | licenseNumber = data['licenseNumber'], 613 | expiryDate = data['expiryDate'], 614 | issuingDate = data['issuingDate'], 615 | issuingCountry = data['issuingCountry']; 616 | 617 | /// Holder's first name. 618 | final String? firstName; 619 | 620 | /// Holder's middle name. 621 | final String? middleName; 622 | 623 | /// Holder's last name. 624 | final String? lastName; 625 | 626 | /// Holder's gender. 1 - male, 2 - female. 627 | final String? gender; 628 | 629 | /// City of holder's address. 630 | final String? addressCity; 631 | 632 | /// State of holder's address. 633 | final String? addressState; 634 | 635 | /// Holder's street address. 636 | final String? addressStreet; 637 | 638 | /// Zip code of holder's address. 639 | final String? addressZip; 640 | 641 | /// Birth date of the holder. 642 | final String? birthDate; 643 | 644 | /// "DL" for driver licenses, "ID" for ID cards. 645 | final String? documentType; 646 | 647 | /// Driver license ID number. 648 | final String? licenseNumber; 649 | 650 | /// Expiry date of the license. 651 | final String? expiryDate; 652 | 653 | /// Issue date of the license. 654 | /// 655 | /// The date format depends on the issuing country. MMDDYYYY for the US, YYYYMMDD for Canada. 656 | final String? issuingDate; 657 | 658 | /// Country in which DL/ID was issued. US = "USA", Canada = "CAN". 659 | final String? issuingCountry; 660 | } 661 | --------------------------------------------------------------------------------