├── logo.png ├── mobilefacenet.tflite ├── FaceNetAuthentication-Logo.png ├── user.model.dart ├── main.dart ├── google-services.json ├── database.dart ├── app_button.dart ├── app_text_field.dart ├── camera_header.dart ├── FacePainter.dart ├── ml_kit_service.dart ├── camera.service.dart ├── pubspec.yaml ├── profile.dart ├── logo.svg ├── facenet.service.dart ├── sign-up.dart ├── auth-action-button.dart ├── sign-in.dart └── home.dart /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/The-Assembly/Authenticate-via-TensorFlow-Facial-Recognition-in-Flutter/HEAD/logo.png -------------------------------------------------------------------------------- /mobilefacenet.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/The-Assembly/Authenticate-via-TensorFlow-Facial-Recognition-in-Flutter/HEAD/mobilefacenet.tflite -------------------------------------------------------------------------------- /FaceNetAuthentication-Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/The-Assembly/Authenticate-via-TensorFlow-Facial-Recognition-in-Flutter/HEAD/FaceNetAuthentication-Logo.png -------------------------------------------------------------------------------- /user.model.dart: -------------------------------------------------------------------------------- 1 | import 'package:flutter/material.dart'; 2 | 3 | class User { 4 | String user; 5 | String password; 6 | 7 | User({@required this.user, @required this.password}); 8 | 9 | static User fromDB(String dbuser) { 10 | return new User(user: dbuser.split(':')[0], password: dbuser.split(':')[1]); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /main.dart: -------------------------------------------------------------------------------- 1 | import 'package:face_net_authentication/pages/home.dart'; 2 | import 'package:flutter/material.dart'; 3 | 4 | void main() { 5 | runApp(MyApp()); 6 | } 7 | 8 | class MyApp extends StatelessWidget { 9 | @override 10 | Widget build(BuildContext context) { 11 | return MaterialApp( 12 | theme: ThemeData( 13 | primarySwatch: Colors.blue, 14 | visualDensity: VisualDensity.adaptivePlatformDensity, 15 | ), 16 | home: MyHomePage(), 17 | ); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /google-services.json: -------------------------------------------------------------------------------- 1 | { 2 | "project_info": { 3 | "project_number": "908209109268", 4 | "firebase_url": "https://facerecognitionauth.firebaseio.com", 5 | "project_id": "facerecognitionauth", 6 | "storage_bucket": "facerecognitionauth.appspot.com" 7 | }, 8 | "client": [ 9 | { 10 | "client_info": { 11 | "mobilesdk_app_id": "1:908209109268:android:22aa62a69fde9e6a0e6a01", 12 | "android_client_info": { 13 | "package_name": "com.example.FaceNet" 14 | } 15 | }, 16 | "oauth_client": [ 17 | { 18 | "client_id": "908209109268-8833dnentno68sbhh1q3sbd5k0epm8a3.apps.googleusercontent.com", 19 | "client_type": 3 20 | } 21 | ], 22 | "api_key": [ 23 | { 24 | "current_key": "AIzaSyDzlmpvIAirzjtO_1TDasHocRk3UrTrkig" 25 | } 26 | ], 27 | "services": { 28 | "appinvite_service": { 29 | "other_platform_oauth_client": [ 30 | { 31 | "client_id": "908209109268-8833dnentno68sbhh1q3sbd5k0epm8a3.apps.googleusercontent.com", 32 | "client_type": 3 33 | } 34 | ] 35 | } 36 | } 37 | } 38 | ], 39 | "configuration_version": "1" 40 | } -------------------------------------------------------------------------------- /database.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | import 'dart:io'; 3 | import 'package:path_provider/path_provider.dart'; 4 | 5 | class DataBaseService { 6 | // singleton boilerplate 7 | static final DataBaseService _cameraServiceService = 8 | DataBaseService._internal(); 9 | 10 | factory DataBaseService() { 11 | return _cameraServiceService; 12 | } 13 | // singleton boilerplate 14 | DataBaseService._internal(); 15 | 16 | /// file that stores the data on filesystem 17 | File jsonFile; 18 | 19 | /// Data learned on memory 20 | Map _db = Map(); 21 | Map get db => this._db; 22 | 23 | /// loads a simple json file. 24 | Future loadDB() async { 25 | var tempDir = await getApplicationDocumentsDirectory(); 26 | String _embPath = tempDir.path + '/emb.json'; 27 | 28 | jsonFile = new File(_embPath); 29 | 30 | if (jsonFile.existsSync()) { 31 | _db = json.decode(jsonFile.readAsStringSync()); 32 | } 33 | } 34 | 35 | /// [Name]: name of the new user 36 | /// [Data]: Face representation for Machine Learning model 37 | Future saveData(String user, String password, List modelData) async { 38 | String userAndPass = user + ':' + password; 39 | _db[userAndPass] = modelData; 40 | jsonFile.writeAsStringSync(json.encode(_db)); 41 | } 42 | 43 | /// deletes the created users 44 | cleanDB() { 45 | this._db = Map(); 46 | jsonFile.writeAsStringSync(json.encode({})); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /app_button.dart: -------------------------------------------------------------------------------- 1 | import 'package:flutter/material.dart'; 2 | 3 | class AppButton extends StatelessWidget { 4 | AppButton( 5 | {this.onPressed, 6 | this.text, 7 | this.color = const Color(0xFF0F0BDB), 8 | this.icon = const Icon( 9 | Icons.add, 10 | color: Colors.white, 11 | )}); 12 | final Function onPressed; 13 | final String text; 14 | final Icon icon; 15 | final Color color; 16 | @override 17 | Widget build(BuildContext context) { 18 | return InkWell( 19 | onTap: onPressed, 20 | child: Container( 21 | decoration: BoxDecoration( 22 | borderRadius: BorderRadius.circular(10), 23 | color: color, 24 | boxShadow: [ 25 | BoxShadow( 26 | color: Colors.blue.withOpacity(0.1), 27 | blurRadius: 1, 28 | offset: Offset(0, 2), 29 | ), 30 | ], 31 | ), 32 | alignment: Alignment.center, 33 | padding: EdgeInsets.symmetric(vertical: 14, horizontal: 16), 34 | width: MediaQuery.of(context).size.width * 0.8, 35 | child: Row( 36 | mainAxisAlignment: MainAxisAlignment.center, 37 | children: [ 38 | Text( 39 | text, 40 | style: TextStyle(color: Colors.white), 41 | ), 42 | SizedBox( 43 | width: 10, 44 | ), 45 | icon 46 | ], 47 | ), 48 | ), 49 | ); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /app_text_field.dart: -------------------------------------------------------------------------------- 1 | import 'package:flutter/material.dart'; 2 | 3 | class AppTextField extends StatelessWidget { 4 | AppTextField( 5 | {Key key, 6 | @required this.labelText, 7 | @required this.controller, 8 | this.keyboardType = TextInputType.text, 9 | this.autofocus = false, 10 | this.isPassword = false}) 11 | : super(key: key); 12 | 13 | final String labelText; 14 | final TextEditingController controller; 15 | final TextInputType keyboardType; 16 | final bool autofocus; 17 | final bool isPassword; 18 | 19 | @override 20 | Widget build(BuildContext context) { 21 | return TextField( 22 | controller: this.controller, 23 | autofocus: this.autofocus, 24 | cursorColor: Color(0xFF5BC8AA), 25 | decoration: InputDecoration( 26 | floatingLabelBehavior: FloatingLabelBehavior.never, 27 | labelText: labelText, 28 | border: InputBorder.none, 29 | filled: true, 30 | fillColor: Colors.grey[200], 31 | enabledBorder: new OutlineInputBorder( 32 | borderSide: BorderSide.none, 33 | borderRadius: const BorderRadius.all( 34 | const Radius.circular(10.0), 35 | ), 36 | ), 37 | focusedBorder: new OutlineInputBorder( 38 | borderSide: BorderSide.none, 39 | borderRadius: const BorderRadius.all( 40 | const Radius.circular(10.0), 41 | ), 42 | ), 43 | ), 44 | obscureText: isPassword, 45 | keyboardType: keyboardType, 46 | ); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /camera_header.dart: -------------------------------------------------------------------------------- 1 | import 'package:flutter/material.dart'; 2 | 3 | class CameraHeader extends StatelessWidget { 4 | CameraHeader(this.title, {this.onBackPressed}); 5 | final String title; 6 | final Function onBackPressed; 7 | 8 | @override 9 | Widget build(BuildContext context) { 10 | return Container( 11 | width: MediaQuery.of(context).size.width, 12 | child: Row( 13 | mainAxisAlignment: MainAxisAlignment.spaceBetween, 14 | children: [ 15 | InkWell( 16 | onTap: onBackPressed, 17 | child: Container( 18 | margin: EdgeInsets.all(20), 19 | decoration: BoxDecoration( 20 | color: Colors.white, 21 | borderRadius: BorderRadius.circular(10), 22 | ), 23 | height: 50, 24 | width: 50, 25 | child: Center(child: Icon(Icons.arrow_back)), 26 | ), 27 | ), 28 | Text( 29 | title, 30 | style: TextStyle( 31 | color: Colors.white, fontWeight: FontWeight.w600, fontSize: 20), 32 | textAlign: TextAlign.center, 33 | ), 34 | SizedBox( 35 | width: 90, 36 | ) 37 | ], 38 | ), 39 | height: 150, 40 | decoration: BoxDecoration( 41 | gradient: LinearGradient( 42 | begin: Alignment.topCenter, 43 | end: Alignment.bottomCenter, 44 | colors: [Colors.black, Colors.transparent], 45 | ), 46 | ), 47 | ); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /FacePainter.dart: -------------------------------------------------------------------------------- 1 | import 'dart:ui'; 2 | import 'package:google_ml_kit/google_ml_kit.dart'; 3 | import 'package:flutter/foundation.dart'; 4 | import 'package:flutter/material.dart'; 5 | 6 | class FacePainter extends CustomPainter { 7 | FacePainter({@required this.imageSize, @required this.face}); 8 | final Size imageSize; 9 | double scaleX, scaleY; 10 | Face face; 11 | @override 12 | void paint(Canvas canvas, Size size) { 13 | if (face == null) return; 14 | 15 | Paint paint; 16 | 17 | if (this.face.headEulerAngleY > 10 || this.face.headEulerAngleY < -10) { 18 | paint = Paint() 19 | ..style = PaintingStyle.stroke 20 | ..strokeWidth = 3.0 21 | ..color = Colors.red; 22 | } else { 23 | paint = Paint() 24 | ..style = PaintingStyle.stroke 25 | ..strokeWidth = 3.0 26 | ..color = Colors.green; 27 | } 28 | 29 | scaleX = size.width / imageSize.width; 30 | scaleY = size.height / imageSize.height; 31 | 32 | canvas.drawRRect( 33 | _scaleRect( 34 | rect: face.boundingBox, 35 | imageSize: imageSize, 36 | widgetSize: size, 37 | scaleX: scaleX, 38 | scaleY: scaleY), 39 | paint); 40 | } 41 | 42 | @override 43 | bool shouldRepaint(FacePainter oldDelegate) { 44 | return oldDelegate.imageSize != imageSize || oldDelegate.face != face; 45 | } 46 | } 47 | 48 | RRect _scaleRect( 49 | {@required Rect rect, 50 | @required Size imageSize, 51 | @required Size widgetSize, 52 | double scaleX, 53 | double scaleY}) { 54 | return RRect.fromLTRBR( 55 | (widgetSize.width - rect.left.toDouble() * scaleX), 56 | rect.top.toDouble() * scaleY, 57 | widgetSize.width - rect.right.toDouble() * scaleX, 58 | rect.bottom.toDouble() * scaleY, 59 | Radius.circular(10)); 60 | } 61 | -------------------------------------------------------------------------------- /ml_kit_service.dart: -------------------------------------------------------------------------------- 1 | import 'package:face_net_authentication/services/camera.service.dart'; 2 | import 'package:camera/camera.dart'; 3 | import 'package:google_ml_kit/google_ml_kit.dart'; 4 | import 'package:flutter/material.dart'; 5 | 6 | class MLKitService { 7 | // singleton boilerplate 8 | static final MLKitService _cameraServiceService = MLKitService._internal(); 9 | 10 | factory MLKitService() { 11 | return _cameraServiceService; 12 | } 13 | // singleton boilerplate 14 | MLKitService._internal(); 15 | 16 | // service injection 17 | CameraService _cameraService = CameraService(); 18 | 19 | FaceDetector _faceDetector; 20 | FaceDetector get faceDetector => this._faceDetector; 21 | 22 | void initialize() { 23 | this._faceDetector = GoogleMlKit.vision.faceDetector( 24 | FaceDetectorOptions( 25 | mode: FaceDetectorMode.accurate, 26 | ), 27 | ); 28 | } 29 | 30 | Future> getFacesFromImage(CameraImage image) async { 31 | /// preprocess the image 🧑🏻‍🔧 32 | InputImageData _firebaseImageMetadata = InputImageData( 33 | imageRotation: _cameraService.cameraRotation, 34 | inputImageFormat: InputImageFormatMethods.fromRawValue(image.format.raw), 35 | size: Size(image.width.toDouble(), image.height.toDouble()), 36 | planeData: image.planes.map( 37 | (Plane plane) { 38 | return InputImagePlaneMetadata( 39 | bytesPerRow: plane.bytesPerRow, 40 | height: plane.height, 41 | width: plane.width, 42 | ); 43 | }, 44 | ).toList(), 45 | ); 46 | 47 | /// Transform the image input for the _faceDetector 🎯 48 | InputImage _firebaseVisionImage = InputImage.fromBytes( 49 | bytes: image.planes[0].bytes, 50 | inputImageData: _firebaseImageMetadata, 51 | ); 52 | 53 | /// proces the image and makes inference 🤖 54 | List faces = 55 | await this._faceDetector.processImage(_firebaseVisionImage); 56 | return faces; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /camera.service.dart: -------------------------------------------------------------------------------- 1 | import 'dart:ui'; 2 | 3 | import 'package:camera/camera.dart'; 4 | import 'package:google_ml_kit/google_ml_kit.dart'; 5 | 6 | class CameraService { 7 | // singleton boilerplate 8 | static final CameraService _cameraServiceService = CameraService._internal(); 9 | 10 | factory CameraService() { 11 | return _cameraServiceService; 12 | } 13 | // singleton boilerplate 14 | CameraService._internal(); 15 | 16 | CameraController _cameraController; 17 | CameraController get cameraController => this._cameraController; 18 | 19 | CameraDescription _cameraDescription; 20 | 21 | InputImageRotation _cameraRotation; 22 | InputImageRotation get cameraRotation => this._cameraRotation; 23 | 24 | String _imagePath; 25 | String get imagePath => this._imagePath; 26 | 27 | Future startService(CameraDescription cameraDescription) async { 28 | this._cameraDescription = cameraDescription; 29 | this._cameraController = CameraController( 30 | this._cameraDescription, 31 | ResolutionPreset.high, 32 | enableAudio: false, 33 | ); 34 | 35 | // sets the rotation of the image 36 | this._cameraRotation = rotationIntToImageRotation( 37 | this._cameraDescription.sensorOrientation, 38 | ); 39 | 40 | // Next, initialize the controller. This returns a Future. 41 | return this._cameraController.initialize(); 42 | } 43 | 44 | InputImageRotation rotationIntToImageRotation(int rotation) { 45 | switch (rotation) { 46 | case 90: 47 | return InputImageRotation.Rotation_90deg; 48 | case 180: 49 | return InputImageRotation.Rotation_180deg; 50 | case 270: 51 | return InputImageRotation.Rotation_270deg; 52 | default: 53 | return InputImageRotation.Rotation_0deg; 54 | } 55 | } 56 | 57 | /// takes the picture and saves it in the given path 📸 58 | Future takePicture() async { 59 | XFile file = await _cameraController.takePicture(); 60 | this._imagePath = file.path; 61 | return file; 62 | } 63 | 64 | /// returns the image size 📏 65 | Size getImageSize() { 66 | return Size( 67 | _cameraController.value.previewSize.height, 68 | _cameraController.value.previewSize.width, 69 | ); 70 | } 71 | 72 | dispose() { 73 | this._cameraController.dispose(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: face_net_authentication 2 | description: A new Flutter project. 3 | 4 | # The following line prevents the package from being accidentally published to 5 | # pub.dev using `pub publish`. This is preferred for private packages. 6 | publish_to: 'none' # Remove this line if you wish to publish to pub.dev 7 | 8 | # The following defines the version and build number for your application. 9 | # A version number is three numbers separated by dots, like 1.2.43 10 | # followed by an optional build number separated by a +. 11 | # Both the version and the builder number may be overridden in flutter 12 | # build by specifying --build-name and --build-number, respectively. 13 | # In Android, build-name is used as versionName while build-number used as versionCode. 14 | # Read more about Android versioning at https://developer.android.com/studio/publish/versioning 15 | # In iOS, build-name is used as CFBundleShortVersionString while build-number used as CFBundleVersion. 16 | # Read more about iOS versioning at 17 | # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html 18 | version: 1.0.0+1 19 | 20 | environment: 21 | sdk: ">=2.7.0 <3.0.0" 22 | 23 | dependencies: 24 | flutter: 25 | sdk: flutter 26 | 27 | 28 | # The following adds the Cupertino Icons font to your application. 29 | # Use with the CupertinoIcons class for iOS style icons. 30 | cupertino_icons: ^1.0.2 31 | tflite_flutter: ^0.8.0 32 | camera: ^0.8.1 33 | path_provider: ^2.0.1 34 | path: ^1.8.0 35 | google_ml_kit: ^0.5.0 36 | image: ^3.0.2 37 | font_awesome_flutter: ^9.0.0 38 | url_launcher: ^6.0.3 39 | 40 | dev_dependencies: 41 | flutter_test: 42 | sdk: flutter 43 | 44 | # For information on the generic Dart part of this file, see the 45 | # following page: https://dart.dev/tools/pub/pubspec 46 | 47 | # The following section is specific to Flutter. 48 | flutter: 49 | 50 | # The following line ensures that the Material Icons font is 51 | # included with your application, so that you can use the icons in 52 | # the material Icons class. 53 | uses-material-design: true 54 | 55 | # To add assets to your application, add an assets section, like this: 56 | assets: 57 | - assets/mobilefacenet.tflite 58 | - assets/logo.png 59 | # - images/a_dot_ham.jpeg 60 | 61 | # An image asset can refer to one or more resolution-specific "variants", see 62 | # https://flutter.dev/assets-and-images/#resolution-aware. 63 | 64 | # For details regarding adding assets from package dependencies, see 65 | # https://flutter.dev/assets-and-images/#from-packages 66 | 67 | # To add custom fonts to your application, add a fonts section here, 68 | # in this "flutter" section. Each entry in this list should have a 69 | # "family" key with the font family name, and a "fonts" key with a 70 | # list giving the asset and other descriptors for the font. For 71 | # example: 72 | # fonts: 73 | # - family: Schyler 74 | # fonts: 75 | # - asset: fonts/Schyler-Regular.ttf 76 | # - asset: fonts/Schyler-Italic.ttf 77 | # style: italic 78 | # - family: Trajan Pro 79 | # fonts: 80 | # - asset: fonts/TrajanPro.ttf 81 | # - asset: fonts/TrajanPro_Bold.ttf 82 | # weight: 700 83 | # 84 | # For details regarding fonts from package dependencies, 85 | # see https://flutter.dev/custom-fonts/#from-packages 86 | -------------------------------------------------------------------------------- /profile.dart: -------------------------------------------------------------------------------- 1 | import 'dart:io'; 2 | 3 | import 'package:face_net_authentication/pages/widgets/app_button.dart'; 4 | import 'package:flutter/material.dart'; 5 | 6 | import 'home.dart'; 7 | import 'dart:math' as math; 8 | 9 | class Profile extends StatelessWidget { 10 | const Profile(this.username, {Key key, this.imagePath}) : super(key: key); 11 | final String username; 12 | final String imagePath; 13 | 14 | 15 | 16 | @override 17 | Widget build(BuildContext context) { 18 | final double mirror = math.pi; 19 | return Scaffold( 20 | backgroundColor: Color(0XFFC7FFBE), 21 | body: SafeArea( 22 | child: Container( 23 | child: Column( 24 | children: [ 25 | Row( 26 | children: [ 27 | Container( 28 | decoration: BoxDecoration( 29 | borderRadius: BorderRadius.circular(10), 30 | color: Colors.black, 31 | image: DecorationImage( 32 | fit: BoxFit.cover, 33 | image: FileImage(File(imagePath)), 34 | ), 35 | ), 36 | margin: EdgeInsets.all(20), 37 | width: 50, 38 | height: 50, 39 | // child: Transform( 40 | // alignment: Alignment.center, 41 | // child: FittedBox( 42 | // fit: BoxFit.cover, 43 | // child: Image.file(File(imagePath)), 44 | // ), 45 | // transform: Matrix4.rotationY(mirror)), 46 | ), 47 | Text( 48 | 'Hi ' + username + '!', 49 | style: TextStyle(fontSize: 22, fontWeight: FontWeight.w600), 50 | ), 51 | ], 52 | ), 53 | Container( 54 | margin: EdgeInsets.all(20), 55 | padding: EdgeInsets.all(20), 56 | decoration: BoxDecoration( 57 | color: Color(0xFFFEFFC1), 58 | borderRadius: BorderRadius.circular(10), 59 | ), 60 | child: Column( 61 | children: [ 62 | Icon( 63 | Icons.warning_amber_outlined, 64 | size: 30, 65 | ), 66 | SizedBox( 67 | height: 10, 68 | ), 69 | Text( 70 | '''If you think this project seems interesting and need some help implementing it, dont hesitate and lets get in touch!''', 71 | style: TextStyle(fontSize: 16), 72 | textAlign: TextAlign.left, 73 | ), 74 | Divider( 75 | height: 30, 76 | ), 77 | 78 | ], 79 | ), 80 | ), 81 | Spacer(), 82 | AppButton( 83 | text: "LOG OUT", 84 | onPressed: () { 85 | Navigator.push( 86 | context, 87 | MaterialPageRoute(builder: (context) => MyHomePage()), 88 | ); 89 | }, 90 | icon: Icon( 91 | Icons.logout, 92 | color: Colors.white, 93 | ), 94 | color: Color(0xFFFF6161), 95 | ), 96 | SizedBox( 97 | height: 20, 98 | ) 99 | ], 100 | ), 101 | ), 102 | ), 103 | ); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /facenet.service.dart: -------------------------------------------------------------------------------- 1 | import 'dart:math'; 2 | import 'dart:typed_data'; 3 | import 'package:face_net_authentication/pages/db/database.dart'; 4 | import 'package:camera/camera.dart'; 5 | import 'package:google_ml_kit/google_ml_kit.dart'; 6 | import 'package:tflite_flutter/tflite_flutter.dart' as tflite; 7 | import 'package:image/image.dart' as imglib; 8 | 9 | class FaceNetService { 10 | // singleton boilerplate 11 | static final FaceNetService _faceNetService = FaceNetService._internal(); 12 | 13 | factory FaceNetService() { 14 | return _faceNetService; 15 | } 16 | // singleton boilerplate 17 | FaceNetService._internal(); 18 | 19 | DataBaseService _dataBaseService = DataBaseService(); 20 | 21 | tflite.Interpreter _interpreter; 22 | 23 | double threshold = 1.0; 24 | 25 | List _predictedData; 26 | List get predictedData => this._predictedData; 27 | 28 | // saved users data 29 | dynamic data = {}; 30 | 31 | Future loadModel() async { 32 | try { 33 | final gpuDelegateV2 = tflite.GpuDelegateV2( 34 | options: tflite.GpuDelegateOptionsV2( 35 | false, 36 | tflite.TfLiteGpuInferenceUsage.fastSingleAnswer, 37 | tflite.TfLiteGpuInferencePriority.minLatency, 38 | tflite.TfLiteGpuInferencePriority.auto, 39 | tflite.TfLiteGpuInferencePriority.auto)); 40 | 41 | var interpreterOptions = tflite.InterpreterOptions() 42 | ..addDelegate(gpuDelegateV2); 43 | this._interpreter = await tflite.Interpreter.fromAsset( 44 | 'mobilefacenet.tflite', 45 | options: interpreterOptions); 46 | print('model loaded successfully'); 47 | } catch (e) { 48 | print('Failed to load model.'); 49 | print(e); 50 | } 51 | } 52 | 53 | setCurrentPrediction(CameraImage cameraImage, Face face) { 54 | /// crops the face from the image and transforms it to an array of data 55 | List input = _preProcess(cameraImage, face); 56 | 57 | /// then reshapes input and ouput to model format 🧑‍🔧 58 | input = input.reshape([1, 112, 112, 3]); 59 | List output = List.generate(1, (index) => List.filled(192, 0)); 60 | 61 | /// runs and transforms the data 🤖 62 | this._interpreter.run(input, output); 63 | output = output.reshape([192]); 64 | 65 | this._predictedData = List.from(output); 66 | } 67 | 68 | /// takes the predicted data previously saved and do inference 69 | String predict() { 70 | /// search closer user prediction if exists 71 | return _searchResult(this._predictedData); 72 | } 73 | 74 | /// _preProess: crops the image to be more easy 75 | /// to detect and transforms it to model input. 76 | /// [cameraImage]: current image 77 | /// [face]: face detected 78 | List _preProcess(CameraImage image, Face faceDetected) { 79 | // crops the face 💇 80 | imglib.Image croppedImage = _cropFace(image, faceDetected); 81 | imglib.Image img = imglib.copyResizeCropSquare(croppedImage, 112); 82 | 83 | // transforms the cropped face to array data 84 | Float32List imageAsList = imageToByteListFloat32(img); 85 | return imageAsList; 86 | } 87 | 88 | /// crops the face from the image 💇 89 | /// [cameraImage]: current image 90 | /// [face]: face detected 91 | _cropFace(CameraImage image, Face faceDetected) { 92 | imglib.Image convertedImage = _convertCameraImage(image); 93 | double x = faceDetected.boundingBox.left - 10.0; 94 | double y = faceDetected.boundingBox.top - 10.0; 95 | double w = faceDetected.boundingBox.width + 10.0; 96 | double h = faceDetected.boundingBox.height + 10.0; 97 | return imglib.copyCrop( 98 | convertedImage, x.round(), y.round(), w.round(), h.round()); 99 | } 100 | 101 | /// converts ___CameraImage___ type to ___Image___ type 102 | /// [image]: image to be converted 103 | imglib.Image _convertCameraImage(CameraImage image) { 104 | int width = image.width; 105 | int height = image.height; 106 | var img = imglib.Image(width, height); 107 | const int hexFF = 0xFF000000; 108 | final int uvyButtonStride = image.planes[1].bytesPerRow; 109 | final int uvPixelStride = image.planes[1].bytesPerPixel; 110 | for (int x = 0; x < width; x++) { 111 | for (int y = 0; y < height; y++) { 112 | final int uvIndex = 113 | uvPixelStride * (x / 2).floor() + uvyButtonStride * (y / 2).floor(); 114 | final int index = y * width + x; 115 | final yp = image.planes[0].bytes[index]; 116 | final up = image.planes[1].bytes[uvIndex]; 117 | final vp = image.planes[2].bytes[uvIndex]; 118 | int r = (yp + vp * 1436 / 1024 - 179).round().clamp(0, 255); 119 | int g = (yp - up * 46549 / 131072 + 44 - vp * 93604 / 131072 + 91) 120 | .round() 121 | .clamp(0, 255); 122 | int b = (yp + up * 1814 / 1024 - 227).round().clamp(0, 255); 123 | img.data[index] = hexFF | (b << 16) | (g << 8) | r; 124 | } 125 | } 126 | var img1 = imglib.copyRotate(img, -90); 127 | return img1; 128 | } 129 | 130 | Float32List imageToByteListFloat32(imglib.Image image) { 131 | /// input size = 112 132 | var convertedBytes = Float32List(1 * 112 * 112 * 3); 133 | var buffer = Float32List.view(convertedBytes.buffer); 134 | int pixelIndex = 0; 135 | 136 | for (var i = 0; i < 112; i++) { 137 | for (var j = 0; j < 112; j++) { 138 | var pixel = image.getPixel(j, i); 139 | 140 | /// mean: 128 141 | /// std: 128 142 | buffer[pixelIndex++] = (imglib.getRed(pixel) - 128) / 128; 143 | buffer[pixelIndex++] = (imglib.getGreen(pixel) - 128) / 128; 144 | buffer[pixelIndex++] = (imglib.getBlue(pixel) - 128) / 128; 145 | } 146 | } 147 | return convertedBytes.buffer.asFloat32List(); 148 | } 149 | 150 | /// searchs the result in the DDBB (this function should be performed by Backend) 151 | /// [predictedData]: Array that represents the face by the MobileFaceNet model 152 | String _searchResult(List predictedData) { 153 | Map data = _dataBaseService.db; 154 | 155 | /// if no faces saved 156 | if (data?.length == 0) return null; 157 | double minDist = 999; 158 | double currDist = 0.0; 159 | String predRes; 160 | 161 | /// search the closest result 👓 162 | for (String label in data.keys) { 163 | currDist = _euclideanDistance(data[label], predictedData); 164 | if (currDist <= threshold && currDist < minDist) { 165 | minDist = currDist; 166 | predRes = label; 167 | } 168 | } 169 | return predRes; 170 | } 171 | 172 | /// Adds the power of the difference between each point 173 | /// then computes the sqrt of the result 📐 174 | double _euclideanDistance(List e1, List e2) { 175 | if (e1 == null || e2 == null) throw Exception("Null argument"); 176 | 177 | double sum = 0.0; 178 | for (int i = 0; i < e1.length; i++) { 179 | sum += pow((e1[i] - e2[i]), 2); 180 | } 181 | return sqrt(sum); 182 | } 183 | 184 | void setPredictedData(value) { 185 | this._predictedData = value; 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /sign-up.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:io'; 3 | import 'dart:math' as math; 4 | import 'package:face_net_authentication/pages/widgets/FacePainter.dart'; 5 | import 'package:face_net_authentication/pages/widgets/auth-action-button.dart'; 6 | import 'package:face_net_authentication/pages/widgets/camera_header.dart'; 7 | import 'package:face_net_authentication/services/camera.service.dart'; 8 | import 'package:face_net_authentication/services/facenet.service.dart'; 9 | import 'package:face_net_authentication/services/ml_kit_service.dart'; 10 | import 'package:camera/camera.dart'; 11 | import 'package:google_ml_kit/google_ml_kit.dart'; 12 | import 'package:flutter/material.dart'; 13 | 14 | class SignUp extends StatefulWidget { 15 | final CameraDescription cameraDescription; 16 | 17 | const SignUp({Key key, @required this.cameraDescription}) : super(key: key); 18 | 19 | @override 20 | SignUpState createState() => SignUpState(); 21 | } 22 | 23 | class SignUpState extends State { 24 | String imagePath; 25 | Face faceDetected; 26 | Size imageSize; 27 | 28 | bool _detectingFaces = false; 29 | bool pictureTaked = false; 30 | 31 | Future _initializeControllerFuture; 32 | bool cameraInitializated = false; 33 | 34 | // switchs when the user press the camera 35 | bool _saving = false; 36 | bool _bottomSheetVisible = false; 37 | 38 | // service injection 39 | MLKitService _mlKitService = MLKitService(); 40 | CameraService _cameraService = CameraService(); 41 | FaceNetService _faceNetService = FaceNetService(); 42 | 43 | @override 44 | void initState() { 45 | super.initState(); 46 | 47 | /// starts the camera & start framing faces 48 | _start(); 49 | } 50 | 51 | @override 52 | void dispose() { 53 | // Dispose of the controller when the widget is disposed. 54 | _cameraService.dispose(); 55 | super.dispose(); 56 | } 57 | 58 | /// starts the camera & start framing faces 59 | _start() async { 60 | _initializeControllerFuture = 61 | _cameraService.startService(widget.cameraDescription); 62 | await _initializeControllerFuture; 63 | 64 | setState(() { 65 | cameraInitializated = true; 66 | }); 67 | 68 | _frameFaces(); 69 | } 70 | 71 | /// handles the button pressed event 72 | Future onShot() async { 73 | if (faceDetected == null) { 74 | showDialog( 75 | context: context, 76 | builder: (context) { 77 | return AlertDialog( 78 | content: Text('No face detected!'), 79 | ); 80 | }, 81 | ); 82 | 83 | return false; 84 | } else { 85 | _saving = true; 86 | await Future.delayed(Duration(milliseconds: 500)); 87 | await _cameraService.cameraController.stopImageStream(); 88 | await Future.delayed(Duration(milliseconds: 200)); 89 | XFile file = await _cameraService.takePicture(); 90 | imagePath = file.path; 91 | 92 | setState(() { 93 | _bottomSheetVisible = true; 94 | pictureTaked = true; 95 | }); 96 | 97 | return true; 98 | } 99 | } 100 | 101 | /// draws rectangles when detects faces 102 | _frameFaces() { 103 | imageSize = _cameraService.getImageSize(); 104 | 105 | _cameraService.cameraController.startImageStream((image) async { 106 | if (_cameraService.cameraController != null) { 107 | // if its currently busy, avoids overprocessing 108 | if (_detectingFaces) return; 109 | 110 | _detectingFaces = true; 111 | 112 | try { 113 | List faces = await _mlKitService.getFacesFromImage(image); 114 | 115 | if (faces.length > 0) { 116 | setState(() { 117 | faceDetected = faces[0]; 118 | }); 119 | 120 | if (_saving) { 121 | _faceNetService.setCurrentPrediction(image, faceDetected); 122 | setState(() { 123 | _saving = false; 124 | }); 125 | } 126 | } else { 127 | setState(() { 128 | faceDetected = null; 129 | }); 130 | } 131 | 132 | _detectingFaces = false; 133 | } catch (e) { 134 | print(e); 135 | _detectingFaces = false; 136 | } 137 | } 138 | }); 139 | } 140 | 141 | _onBackPressed() { 142 | Navigator.of(context).pop(); 143 | } 144 | 145 | _reload() { 146 | setState(() { 147 | _bottomSheetVisible = false; 148 | cameraInitializated = false; 149 | pictureTaked = false; 150 | }); 151 | this._start(); 152 | } 153 | 154 | @override 155 | Widget build(BuildContext context) { 156 | final double mirror = math.pi; 157 | final width = MediaQuery.of(context).size.width; 158 | final height = MediaQuery.of(context).size.height; 159 | return Scaffold( 160 | body: Stack( 161 | children: [ 162 | FutureBuilder( 163 | future: _initializeControllerFuture, 164 | builder: (context, snapshot) { 165 | if (snapshot.connectionState == ConnectionState.done) { 166 | if (pictureTaked) { 167 | return Container( 168 | width: width, 169 | height: height, 170 | child: Transform( 171 | alignment: Alignment.center, 172 | child: FittedBox( 173 | fit: BoxFit.cover, 174 | child: Image.file(File(imagePath)), 175 | ), 176 | transform: Matrix4.rotationY(mirror)), 177 | ); 178 | } else { 179 | return Transform.scale( 180 | scale: 1.0, 181 | child: AspectRatio( 182 | aspectRatio: MediaQuery.of(context).size.aspectRatio, 183 | child: OverflowBox( 184 | alignment: Alignment.center, 185 | child: FittedBox( 186 | fit: BoxFit.fitHeight, 187 | child: Container( 188 | width: width, 189 | height: width * 190 | _cameraService 191 | .cameraController.value.aspectRatio, 192 | child: Stack( 193 | fit: StackFit.expand, 194 | children: [ 195 | CameraPreview( 196 | _cameraService.cameraController), 197 | CustomPaint( 198 | painter: FacePainter( 199 | face: faceDetected, 200 | imageSize: imageSize), 201 | ), 202 | ], 203 | ), 204 | ), 205 | ), 206 | ), 207 | ), 208 | ); 209 | } 210 | } else { 211 | return Center(child: CircularProgressIndicator()); 212 | } 213 | }, 214 | ), 215 | CameraHeader( 216 | "SIGN UP", 217 | onBackPressed: _onBackPressed, 218 | ) 219 | ], 220 | ), 221 | floatingActionButtonLocation: FloatingActionButtonLocation.centerFloat, 222 | floatingActionButton: !_bottomSheetVisible 223 | ? AuthActionButton( 224 | _initializeControllerFuture, 225 | onPressed: onShot, 226 | isLogin: false, 227 | reload: _reload, 228 | ) 229 | : Container()); 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /auth-action-button.dart: -------------------------------------------------------------------------------- 1 | import 'dart:io'; 2 | 3 | import 'package:face_net_authentication/pages/db/database.dart'; 4 | import 'package:face_net_authentication/pages/models/user.model.dart'; 5 | import 'package:face_net_authentication/pages/profile.dart'; 6 | import 'package:face_net_authentication/pages/widgets/app_button.dart'; 7 | import 'package:face_net_authentication/services/camera.service.dart'; 8 | import 'package:face_net_authentication/services/facenet.service.dart'; 9 | import 'package:flutter/material.dart'; 10 | import '../home.dart'; 11 | import 'app_text_field.dart'; 12 | 13 | class AuthActionButton extends StatefulWidget { 14 | AuthActionButton(this._initializeControllerFuture, 15 | {Key key, @required this.onPressed, @required this.isLogin, this.reload}); 16 | final Future _initializeControllerFuture; 17 | final Function onPressed; 18 | final bool isLogin; 19 | final Function reload; 20 | @override 21 | _AuthActionButtonState createState() => _AuthActionButtonState(); 22 | } 23 | 24 | class _AuthActionButtonState extends State { 25 | /// service injection 26 | final FaceNetService _faceNetService = FaceNetService(); 27 | final DataBaseService _dataBaseService = DataBaseService(); 28 | final CameraService _cameraService = CameraService(); 29 | 30 | final TextEditingController _userTextEditingController = 31 | TextEditingController(text: ''); 32 | final TextEditingController _passwordTextEditingController = 33 | TextEditingController(text: ''); 34 | 35 | User predictedUser; 36 | 37 | Future _signUp(context) async { 38 | /// gets predicted data from facenet service (user face detected) 39 | List predictedData = _faceNetService.predictedData; 40 | String user = _userTextEditingController.text; 41 | String password = _passwordTextEditingController.text; 42 | 43 | /// creates a new user in the 'database' 44 | await _dataBaseService.saveData(user, password, predictedData); 45 | 46 | /// resets the face stored in the face net sevice 47 | this._faceNetService.setPredictedData(null); 48 | Navigator.push(context, 49 | MaterialPageRoute(builder: (BuildContext context) => MyHomePage())); 50 | } 51 | 52 | Future _signIn(context) async { 53 | String password = _passwordTextEditingController.text; 54 | 55 | if (this.predictedUser.password == password) { 56 | Navigator.push( 57 | context, 58 | MaterialPageRoute( 59 | builder: (BuildContext context) => Profile( 60 | this.predictedUser.user, 61 | imagePath: _cameraService.imagePath, 62 | ))); 63 | } else { 64 | showDialog( 65 | context: context, 66 | builder: (context) { 67 | return AlertDialog( 68 | content: Text('Wrong password!'), 69 | ); 70 | }, 71 | ); 72 | } 73 | } 74 | 75 | String _predictUser() { 76 | String userAndPass = _faceNetService.predict(); 77 | return userAndPass ?? null; 78 | } 79 | 80 | @override 81 | Widget build(BuildContext context) { 82 | return InkWell( 83 | onTap: () async { 84 | try { 85 | // Ensure that the camera is initialized. 86 | await widget._initializeControllerFuture; 87 | // onShot event (takes the image and predict output) 88 | bool faceDetected = await widget.onPressed(); 89 | 90 | if (faceDetected) { 91 | if (widget.isLogin) { 92 | var userAndPass = _predictUser(); 93 | if (userAndPass != null) { 94 | this.predictedUser = User.fromDB(userAndPass); 95 | } 96 | } 97 | PersistentBottomSheetController bottomSheetController = 98 | Scaffold.of(context) 99 | .showBottomSheet((context) => signSheet(context)); 100 | 101 | bottomSheetController.closed.whenComplete(() => widget.reload()); 102 | } 103 | } catch (e) { 104 | // If an error occurs, log the error to the console. 105 | print(e); 106 | } 107 | }, 108 | child: Container( 109 | decoration: BoxDecoration( 110 | borderRadius: BorderRadius.circular(10), 111 | color: Color(0xFF0F0BDB), 112 | boxShadow: [ 113 | BoxShadow( 114 | color: Colors.blue.withOpacity(0.1), 115 | blurRadius: 1, 116 | offset: Offset(0, 2), 117 | ), 118 | ], 119 | ), 120 | alignment: Alignment.center, 121 | padding: EdgeInsets.symmetric(vertical: 14, horizontal: 16), 122 | width: MediaQuery.of(context).size.width * 0.8, 123 | height: 60, 124 | child: Row( 125 | mainAxisAlignment: MainAxisAlignment.center, 126 | children: [ 127 | Text( 128 | 'CAPTURE', 129 | style: TextStyle(color: Colors.white), 130 | ), 131 | SizedBox( 132 | width: 10, 133 | ), 134 | Icon(Icons.camera_alt, color: Colors.white) 135 | ], 136 | ), 137 | ), 138 | ); 139 | } 140 | 141 | signSheet(context) { 142 | return Container( 143 | padding: EdgeInsets.all(20), 144 | child: Column( 145 | mainAxisSize: MainAxisSize.min, 146 | mainAxisAlignment: MainAxisAlignment.spaceBetween, 147 | children: [ 148 | widget.isLogin && predictedUser != null 149 | ? Container( 150 | child: Text( 151 | 'Welcome back, ' + predictedUser.user + '.', 152 | style: TextStyle(fontSize: 20), 153 | ), 154 | ) 155 | : widget.isLogin 156 | ? Container( 157 | child: Text( 158 | 'User not found 😞', 159 | style: TextStyle(fontSize: 20), 160 | )) 161 | : Container(), 162 | Container( 163 | child: Column( 164 | children: [ 165 | !widget.isLogin 166 | ? AppTextField( 167 | controller: _userTextEditingController, 168 | labelText: "Your Name", 169 | ) 170 | : Container(), 171 | SizedBox(height: 10), 172 | widget.isLogin && predictedUser == null 173 | ? Container() 174 | : AppTextField( 175 | controller: _passwordTextEditingController, 176 | labelText: "Password", 177 | isPassword: true, 178 | ), 179 | SizedBox(height: 10), 180 | Divider(), 181 | SizedBox(height: 10), 182 | widget.isLogin && predictedUser != null 183 | ? AppButton( 184 | text: 'LOGIN', 185 | onPressed: () async { 186 | _signIn(context); 187 | }, 188 | icon: Icon( 189 | Icons.login, 190 | color: Colors.white, 191 | ), 192 | ) 193 | : !widget.isLogin 194 | ? AppButton( 195 | text: 'SIGN UP', 196 | onPressed: () async { 197 | await _signUp(context); 198 | }, 199 | icon: Icon( 200 | Icons.person_add, 201 | color: Colors.white, 202 | ), 203 | ) 204 | : Container(), 205 | ], 206 | ), 207 | ), 208 | ], 209 | ), 210 | ); 211 | } 212 | 213 | @override 214 | void dispose() { 215 | super.dispose(); 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /sign-in.dart: -------------------------------------------------------------------------------- 1 | // A screen that allows users to take a picture using a given camera. 2 | import 'dart:async'; 3 | import 'dart:io'; 4 | import 'package:face_net_authentication/pages/widgets/FacePainter.dart'; 5 | import 'package:face_net_authentication/pages/widgets/auth-action-button.dart'; 6 | import 'package:face_net_authentication/pages/widgets/camera_header.dart'; 7 | import 'package:face_net_authentication/services/camera.service.dart'; 8 | import 'package:face_net_authentication/services/facenet.service.dart'; 9 | import 'package:face_net_authentication/services/ml_kit_service.dart'; 10 | import 'package:camera/camera.dart'; 11 | import 'package:google_ml_kit/google_ml_kit.dart'; 12 | import 'package:flutter/material.dart'; 13 | import 'dart:math' as math; 14 | 15 | class SignIn extends StatefulWidget { 16 | final CameraDescription cameraDescription; 17 | 18 | const SignIn({ 19 | Key key, 20 | @required this.cameraDescription, 21 | }) : super(key: key); 22 | 23 | @override 24 | SignInState createState() => SignInState(); 25 | } 26 | 27 | class SignInState extends State { 28 | /// Service injection 29 | CameraService _cameraService = CameraService(); 30 | MLKitService _mlKitService = MLKitService(); 31 | FaceNetService _faceNetService = FaceNetService(); 32 | 33 | Future _initializeControllerFuture; 34 | 35 | bool cameraInitializated = false; 36 | bool _detectingFaces = false; 37 | bool pictureTaked = false; 38 | 39 | // switchs when the user press the camera 40 | bool _saving = false; 41 | bool _bottomSheetVisible = false; 42 | 43 | String imagePath; 44 | Size imageSize; 45 | Face faceDetected; 46 | 47 | @override 48 | void initState() { 49 | super.initState(); 50 | 51 | /// starts the camera & start framing faces 52 | _start(); 53 | } 54 | 55 | @override 56 | void dispose() { 57 | // Dispose of the controller when the widget is disposed. 58 | _cameraService.dispose(); 59 | super.dispose(); 60 | } 61 | 62 | /// starts the camera & start framing faces 63 | _start() async { 64 | _initializeControllerFuture = 65 | _cameraService.startService(widget.cameraDescription); 66 | await _initializeControllerFuture; 67 | 68 | setState(() { 69 | cameraInitializated = true; 70 | }); 71 | 72 | _frameFaces(); 73 | } 74 | 75 | /// draws rectangles when detects faces 76 | _frameFaces() { 77 | imageSize = _cameraService.getImageSize(); 78 | 79 | _cameraService.cameraController.startImageStream((image) async { 80 | if (_cameraService.cameraController != null) { 81 | // if its currently busy, avoids overprocessing 82 | if (_detectingFaces) return; 83 | 84 | _detectingFaces = true; 85 | 86 | try { 87 | List faces = await _mlKitService.getFacesFromImage(image); 88 | 89 | if (faces != null) { 90 | if (faces.length > 0) { 91 | // preprocessing the image 92 | setState(() { 93 | faceDetected = faces[0]; 94 | }); 95 | 96 | if (_saving) { 97 | _saving = false; 98 | _faceNetService.setCurrentPrediction(image, faceDetected); 99 | } 100 | } else { 101 | setState(() { 102 | faceDetected = null; 103 | }); 104 | } 105 | } 106 | 107 | _detectingFaces = false; 108 | } catch (e) { 109 | print(e); 110 | _detectingFaces = false; 111 | } 112 | } 113 | }); 114 | } 115 | 116 | /// handles the button pressed event 117 | Future onShot() async { 118 | if (faceDetected == null) { 119 | showDialog( 120 | context: context, 121 | builder: (context) { 122 | return AlertDialog( 123 | content: Text('No face detected!'), 124 | ); 125 | }, 126 | ); 127 | 128 | return false; 129 | } else { 130 | _saving = true; 131 | 132 | await Future.delayed(Duration(milliseconds: 500)); 133 | await _cameraService.cameraController.stopImageStream(); 134 | await Future.delayed(Duration(milliseconds: 200)); 135 | XFile file = await _cameraService.takePicture(); 136 | 137 | setState(() { 138 | _bottomSheetVisible = true; 139 | pictureTaked = true; 140 | imagePath = file.path; 141 | }); 142 | 143 | return true; 144 | } 145 | } 146 | 147 | _onBackPressed() { 148 | Navigator.of(context).pop(); 149 | } 150 | 151 | _reload() { 152 | setState(() { 153 | _bottomSheetVisible = false; 154 | cameraInitializated = false; 155 | pictureTaked = false; 156 | }); 157 | this._start(); 158 | } 159 | 160 | @override 161 | Widget build(BuildContext context) { 162 | final double mirror = math.pi; 163 | final width = MediaQuery.of(context).size.width; 164 | final height = MediaQuery.of(context).size.height; 165 | return Scaffold( 166 | body: Stack( 167 | children: [ 168 | FutureBuilder( 169 | future: _initializeControllerFuture, 170 | builder: (context, snapshot) { 171 | if (snapshot.connectionState == ConnectionState.done) { 172 | if (pictureTaked) { 173 | return Container( 174 | width: width, 175 | height: height, 176 | child: Transform( 177 | alignment: Alignment.center, 178 | child: FittedBox( 179 | fit: BoxFit.cover, 180 | child: Image.file(File(imagePath)), 181 | ), 182 | transform: Matrix4.rotationY(mirror)), 183 | ); 184 | } else { 185 | return Transform.scale( 186 | scale: 1.0, 187 | child: AspectRatio( 188 | aspectRatio: MediaQuery.of(context).size.aspectRatio, 189 | child: OverflowBox( 190 | alignment: Alignment.center, 191 | child: FittedBox( 192 | fit: BoxFit.fitHeight, 193 | child: Container( 194 | width: width, 195 | height: width * 196 | _cameraService 197 | .cameraController.value.aspectRatio, 198 | child: Stack( 199 | fit: StackFit.expand, 200 | children: [ 201 | CameraPreview( 202 | _cameraService.cameraController), 203 | CustomPaint( 204 | painter: FacePainter( 205 | face: faceDetected, 206 | imageSize: imageSize), 207 | ) 208 | ], 209 | ), 210 | ), 211 | ), 212 | ), 213 | ), 214 | ); 215 | } 216 | } else { 217 | return Center(child: CircularProgressIndicator()); 218 | } 219 | }), 220 | CameraHeader( 221 | "LOGIN", 222 | onBackPressed: _onBackPressed, 223 | ) 224 | ], 225 | ), 226 | floatingActionButtonLocation: FloatingActionButtonLocation.centerFloat, 227 | floatingActionButton: !_bottomSheetVisible 228 | ? AuthActionButton( 229 | _initializeControllerFuture, 230 | onPressed: onShot, 231 | isLogin: true, 232 | reload: _reload, 233 | ) 234 | : Container(), 235 | ); 236 | } 237 | } 238 | -------------------------------------------------------------------------------- /home.dart: -------------------------------------------------------------------------------- 1 | import 'package:face_net_authentication/pages/db/database.dart'; 2 | import 'package:face_net_authentication/pages/sign-in.dart'; 3 | import 'package:face_net_authentication/pages/sign-up.dart'; 4 | import 'package:face_net_authentication/services/facenet.service.dart'; 5 | import 'package:face_net_authentication/services/ml_kit_service.dart'; 6 | import 'package:camera/camera.dart'; 7 | import 'package:flutter/material.dart'; 8 | 9 | 10 | class MyHomePage extends StatefulWidget { 11 | MyHomePage({Key key}) : super(key: key); 12 | @override 13 | _MyHomePageState createState() => _MyHomePageState(); 14 | } 15 | 16 | class _MyHomePageState extends State { 17 | // Services injection 18 | FaceNetService _faceNetService = FaceNetService(); 19 | MLKitService _mlKitService = MLKitService(); 20 | DataBaseService _dataBaseService = DataBaseService(); 21 | 22 | CameraDescription cameraDescription; 23 | bool loading = false; 24 | 25 | 26 | 27 | @override 28 | void initState() { 29 | super.initState(); 30 | _startUp(); 31 | } 32 | 33 | /// 1 Obtain a list of the available cameras on the device. 34 | /// 2 loads the face net model 35 | _startUp() async { 36 | _setLoading(true); 37 | 38 | List cameras = await availableCameras(); 39 | 40 | /// takes the front camera 41 | cameraDescription = cameras.firstWhere( 42 | (CameraDescription camera) => 43 | camera.lensDirection == CameraLensDirection.front, 44 | ); 45 | 46 | // start the services 47 | await _faceNetService.loadModel(); 48 | await _dataBaseService.loadDB(); 49 | _mlKitService.initialize(); 50 | 51 | _setLoading(false); 52 | } 53 | 54 | // shows or hides the circular progress indicator 55 | _setLoading(bool value) { 56 | setState(() { 57 | loading = value; 58 | }); 59 | } 60 | 61 | 62 | @override 63 | Widget build(BuildContext context) { 64 | return Scaffold( 65 | backgroundColor: Colors.red, 66 | appBar: AppBar( 67 | leading: Container(), 68 | elevation: 0, 69 | backgroundColor: Colors.transparent, 70 | actions: [ 71 | Padding( 72 | padding: EdgeInsets.only(right: 20, top: 20), 73 | child: PopupMenuButton( 74 | child: Icon( 75 | Icons.more_vert, 76 | color: Colors.black, 77 | ), 78 | onSelected: (value) { 79 | switch (value) { 80 | case 'Clear DB': 81 | _dataBaseService.cleanDB(); 82 | break; 83 | } 84 | }, 85 | itemBuilder: (BuildContext context) { 86 | return {'Clear DB'}.map((String choice) { 87 | return PopupMenuItem( 88 | value: choice, 89 | child: Text(choice), 90 | ); 91 | }).toList(); 92 | }, 93 | ), 94 | ), 95 | ], 96 | ), 97 | body: !loading 98 | ? SafeArea( 99 | child: Center( 100 | child: Column( 101 | mainAxisAlignment: MainAxisAlignment.spaceEvenly, 102 | children: [ 103 | Image(image: AssetImage('assets/logo.png')), 104 | Container( 105 | width: MediaQuery.of(context).size.width * 0.8, 106 | child: Column( 107 | children: [ 108 | Text( 109 | "FACE RECOGNITION AUTHENTICATION", 110 | style: TextStyle( 111 | fontSize: 25, fontWeight: FontWeight.bold), 112 | textAlign: TextAlign.center, 113 | ), 114 | SizedBox( 115 | height: 20, 116 | ), 117 | Text( 118 | "Demo application that uses Flutter and tensorflow to implement authentication with facial recognition", 119 | style: TextStyle( 120 | fontSize: 16, 121 | ), 122 | textAlign: TextAlign.center, 123 | ), 124 | ], 125 | ), 126 | ), 127 | Column( 128 | children: [ 129 | InkWell( 130 | onTap: () { 131 | Navigator.push( 132 | context, 133 | MaterialPageRoute( 134 | builder: (BuildContext context) => SignIn( 135 | cameraDescription: cameraDescription, 136 | ), 137 | ), 138 | ); 139 | }, 140 | child: Container( 141 | decoration: BoxDecoration( 142 | borderRadius: BorderRadius.circular(10), 143 | color: Colors.white, 144 | boxShadow: [ 145 | BoxShadow( 146 | color: Colors.blue.withOpacity(0.1), 147 | blurRadius: 1, 148 | offset: Offset(0, 2), 149 | ), 150 | ], 151 | ), 152 | alignment: Alignment.center, 153 | padding: EdgeInsets.symmetric( 154 | vertical: 14, horizontal: 16), 155 | width: MediaQuery.of(context).size.width * 0.8, 156 | child: Row( 157 | mainAxisAlignment: MainAxisAlignment.center, 158 | children: [ 159 | Text( 160 | 'LOGIN', 161 | style: TextStyle(color: Color(0xFF0F0BDB)), 162 | ), 163 | SizedBox( 164 | width: 10, 165 | ), 166 | Icon(Icons.login, color: Color(0xFF0F0BDB)) 167 | ], 168 | ), 169 | ), 170 | ), 171 | SizedBox( 172 | height: 10, 173 | ), 174 | InkWell( 175 | onTap: () { 176 | Navigator.push( 177 | context, 178 | MaterialPageRoute( 179 | builder: (BuildContext context) => SignUp( 180 | cameraDescription: cameraDescription, 181 | ), 182 | ), 183 | ); 184 | }, 185 | child: Container( 186 | decoration: BoxDecoration( 187 | borderRadius: BorderRadius.circular(10), 188 | color: Color(0xFF0F0BDB), 189 | boxShadow: [ 190 | BoxShadow( 191 | color: Colors.blue.withOpacity(0.1), 192 | blurRadius: 1, 193 | offset: Offset(0, 2), 194 | ), 195 | ], 196 | ), 197 | alignment: Alignment.center, 198 | padding: EdgeInsets.symmetric( 199 | vertical: 14, horizontal: 16), 200 | width: MediaQuery.of(context).size.width * 0.8, 201 | child: Row( 202 | mainAxisAlignment: MainAxisAlignment.center, 203 | children: [ 204 | Text( 205 | 'SIGN UP', 206 | style: TextStyle(color: Colors.white), 207 | ), 208 | SizedBox( 209 | width: 10, 210 | ), 211 | Icon(Icons.person_add, color: Colors.white) 212 | ], 213 | ), 214 | ), 215 | ), 216 | 217 | ], 218 | ) 219 | ], 220 | ), 221 | ), 222 | ) 223 | : Center( 224 | child: CircularProgressIndicator(), 225 | ), 226 | ); 227 | } 228 | } 229 | --------------------------------------------------------------------------------