├── .gitignore ├── GETTING_STARTED.md ├── README.md ├── assets ├── brfv4_face_textures.js ├── brfv4_img_glasses.png ├── brfv4_lion.png ├── brfv4_logo.png ├── brfv4_model.json ├── brfv4_occlusion_head.json ├── brfv4_portrait_chris.jpg ├── brfv4_portrait_marcel.jpg ├── brfv4_two_faces.jpg ├── brfv4_woman_old.jpg └── brfv4_women_young.jpg ├── brfv4_landmarks.jpg ├── css └── brfv4_examples_theme.css ├── eula.txt ├── face_texture_overlay.html ├── index.html ├── js ├── BRFv4Demo.js ├── BRFv4DemoMinimalImage.js ├── BRFv4DemoMinimalWebcam.js ├── examples │ ├── face_detection │ │ ├── detect_in_center.js │ │ ├── detect_in_whole_image.js │ │ ├── detect_larger_faces.js │ │ └── detect_smaller_faces.js │ ├── face_tracking │ │ ├── ThreeJS_example.js │ │ ├── blink_detection.js │ │ ├── blink_detection_center.js │ │ ├── candide_overlay.js │ │ ├── color_libs.js │ │ ├── extended_face_shape.js │ │ ├── face_swap_two_faces.js │ │ ├── face_texture_overlay.js │ │ ├── png_mask_overlay.js │ │ ├── restrict_to_center.js │ │ ├── smile_detection.js │ │ ├── track_multiple_faces.js │ │ ├── track_single_face.js │ │ └── yawn_detection.js │ └── point_tracking │ │ ├── track_multiple_points.js │ │ └── track_points_and_face.js ├── libs │ ├── brf_asmjs │ │ ├── BRFv4_JS_TK210219_v4.2.0_trial.js │ │ └── BRFv4_JS_TK210219_v4.2.0_trial.js.mem │ ├── brf_wasm │ │ ├── BRFv4_JS_TK210219_v4.2.0_trial.js │ │ ├── BRFv4_JS_TK210219_v4.2.0_trial.wasm │ │ └── BRFv4_JS_TK210219_v4.2.0_trial.worker.js │ ├── createjs │ │ ├── BitmapData.js │ │ ├── easeljs.min.js │ │ └── preloadjs.min.js │ ├── highlight │ │ ├── highlight.pack.js │ │ └── highlight_tomorrow.css │ ├── quicksettings │ │ ├── quicksettings.js │ │ ├── quicksettings.min.css │ │ ├── quicksettings.min.js │ │ └── quicksettings_tiny.min.css │ └── threejs │ │ ├── Detector.js │ │ └── three.min.js └── utils │ ├── BRFv4DOMUtils.js │ ├── BRFv4DownloadChooser.js │ ├── BRFv4Drawing3DUtils_ThreeJS.js │ ├── BRFv4DrawingUtils_CreateJS.js │ ├── BRFv4ExampleChooser.js │ ├── BRFv4ExtendedFace.js │ ├── BRFv4PointUtils.js │ ├── BRFv4PublicAPI.js │ ├── BRFv4SetupChooser.js │ ├── BRFv4SetupExample.js │ ├── BRFv4SetupPicture.js │ ├── BRFv4SetupWebcam.js │ └── BRFv4Stats.js ├── minimalImage.html ├── minimalWebcam.html ├── minimalWebcamFaceSwap.html ├── minimalWebcamPNGOverlay.html ├── minimalWebcamWorker.html └── readme_img.jpg /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea/ 2 | -------------------------------------------------------------------------------- /GETTING_STARTED.md: -------------------------------------------------------------------------------- 1 | # Beyond Reality Face SDK - v4.1.0 (BRFv4) - Getting Started ... 2 | 3 | ### ... with brfv4_javascript_examples 4 | 5 | + Download or clone this Github repository. 6 | + Make sure to run the index.html file on a local server, eg. if you open this repo in Webstorm and run the index.html, Webstorm 7 | will automatically create a localhost for you. You could also use MAMP or any other tool to create a local server. 8 | 9 | This repo has three entry points. 10 | 11 | BRFDemo.js loaded by index.html creates the same UI as our [online JS Demo](https://tastenkunst.github.io/brfv4_javascript_examples/). 12 | 13 | Tools used for this demo: 14 | 15 | + [CreateJS](http://createjs.com/) to draw the everything interesting on a , 16 | + [ThreeJS](https://threejs.org/) to put a 3D model on top of a face, 17 | + [Quicksettings](https://github.com/bit101/quicksettings) for building the UI and 18 | + [Highlight.js](https://github.com/isagalaev/highlight.js) for showing the code snippets. 19 | 20 | There are also two minimal examples that don't have any dependencies apart from BRFv4: 21 | 22 | #### "Minimal Webcam" ([minimalWebcam.html](https://tastenkunst.github.io/brfv4_javascript_examples/minimalWebcam.html)) 23 | 24 | ##### DOM 25 | 26 | We need a <video> for the webcam playback and a <canvas> to draw the video data to and get the pixel array from. 27 | 28 | ##### JS 29 | 30 | ```javascript 31 | function startCamera() {} 32 | function waitForSDK() {} 33 | function initSDK() {} 34 | function trackFaces() {} 35 | ``` 36 | ##### startCamera: 37 | 38 | BRFv4 needs to know the dimensions of the image data. So the first thing to do is initializing the webcam. 39 | A camera may or may not be able to deliver the requested resolution. That's why we need to wait for the final video 40 | dimensions before initializing the SDK. 41 | 42 | ##### waitForSDK: 43 | 44 | The SDK itself also needs to initialize its data internally. So after obtaining the dimensions we need to 45 | wait for the SDK to be ready (this usually takes loading time for 9MB .mem file and 3 seconds to initialize) and then ... 46 | 47 | ##### initSDK: 48 | 49 | Dimensions are known and the SDK is ready. So now it's time to call BRFManager.init with the image dimensions. 50 | Then we setup an interval of 30FPS to ... 51 | 52 | ##### trackFaces: 53 | 54 | We want to look into a mirror. That's why the canvas gets transformed, the video gets drawn and the canvas 55 | gets transformed again to draw the results. In this example we draw the 68 landmarks of a face. 56 | 57 | #### "Minimal Image" ([minimalImage.html](https://tastenkunst.github.io/brfv4_javascript_examples/minimalImage.html)) 58 | 59 | ##### DOM 60 | 61 | We need an <img> and a <canvas> to draw the image data to and get the pixel array from. 62 | 63 | ##### JS 64 | 65 | ```javascript 66 | function waitForSDK() {} 67 | function initSDK() {} 68 | function trackFaces() {} 69 | ``` 70 | 71 | Since the img tag gets loaded before JS is executed we don't need to wait for the image dimensions and can immediately ... 72 | 73 | ##### waitForSDK: 74 | 75 | The SDK itself needs to initialize its data internally. Once it is ready (this usually takes loading time for 9MB .mem 76 | file and 3 seconds to initialize) we can ... 77 | 78 | ##### initSDK: 79 | 80 | Dimensions are known and the SDK is ready. So now it's time to call BRFManager.init with the image dimensions. 81 | 82 | ##### trackFaces: 83 | 84 | BRFv4 is made to work on an camera stream. While calling BRFManager.update() once might be alright, it is best 85 | to make sure that we get a proper result be calling BRFManager.update() a few times in a row (maybe 10 times). 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## BRFv5 Released! 2 | 3 | BRFv4 will be discontinued in 2020. 4 | 5 | Check out the new BRFv5 JavaScript SDK here: 6 | 7 | ### [https://github.com/tastenkunst/brfv5-browser](https://github.com/tastenkunst/brfv5-browser) 8 | 9 | ## Beyond Reality Face SDK - v4.1.0 (BRFv4) - Readme 10 | 11 | ### What is BRFv4? 12 | 13 | It is a real time face detection and tracking SDK. You put in image data (camera stream or single picture) and it outputs facial data. 14 | 15 | ![alt text](readme_img.jpg "BRFv4") 16 | 17 | ### Ready to try! 18 | 19 | Read the EULA (eula.txt) carefully before using the SDK. Once you decide to use BRFv4 commercially, you will get a 20 | separate license agreement, that you must agree to. You can try the SDK free of charge to evaluate if it fits your projects' 21 | needs. Once you decided to use BRFv4 in your project, please contact us for a commercial license: 22 | 23 | + http://www.tastenkunst.com/#/contact 24 | 25 | ### Visit us online. 26 | 27 | + [Github](https://github.com/Tastenkunst) 28 | + [Demo](https://tastenkunst.github.io/brfv4_javascript_examples/) 29 | + [Docs / API](https://tastenkunst.github.io/brfv4_docs/) 30 | + [What can I do with it?](https://tastenkunst.github.io/brfv4_docs/what_can_i_do_with_it.html) 31 | + [Website](https://www.beyond-reality-face.com) 32 | + [Facebook](https://www.facebook.com/BeyondRealityFace) 33 | + [Twitter](https://twitter.com/tastenkunst) 34 | 35 | ### Getting started. 36 | 37 | To test BRFv4 simply visit the Javascript demo site: 38 | 39 | + https://tastenkunst.github.io/brfv4_javascript_examples/ 40 | 41 | This page also includes all available packages for download. 42 | 43 | ### Which platforms does it support? 44 | 45 | #### HTML5/Browser – Javascript (works in Chrome/Firefox/Edge/Opera/Safari 11) 46 | Run the index.html on a local server. 47 | 48 | #### iOS - ObjectiveC/C++ 49 | Open the Xcode project. Attach your iOS device and run the example app on your device. 50 | 51 | #### Android - Java 52 | Open the Android Studio project. Attach your Android device and run the example app on your device. 53 | 54 | #### macOS - C++ utilizing OpenCV for camera access and drawing 55 | Have [OpenCV](http://opencv.org/) brewed (opencv3) on your system. Open the Xcode project and just run it on your Mac. 56 | 57 | #### Windows - C++ utilizing OpenCV for camera access and drawing 58 | Good luck in trying to compile [OpenCV](http://opencv.org/) for your Windows. Update the Visual Studio (2017) project properties that mention 59 | OpenCV. Then run the Release x64 target. Fingers crossed! 60 | 61 | #### Adobe AIR - Actionscript 3 on Windows, macOS, iOS and Android 62 | Use your preferred IDE. Add the src folder and the ANE itself to your class path and run the example class on your 63 | desired device (not in a simulator). Unfortunately we had to discontinue Flash Player (SWF in browser) support. 64 | 65 | ### Technical overview 66 | 67 | BRFv4 comes with the following components: 68 | 69 | + face detection - Finds faces (rectangles) in an image/camera stream 70 | + face tracking - Finds 68 facial landmarks/features 71 | + point tracking - Tracks points in a webcam stream 72 | 73 | All available packages have roughly the same content and come with a set of examples to show SDK use cases. 74 | 75 | ### What image size does BRFv4 need? 76 | 77 | You can input any image size. 78 | 79 | Internally BRFv4 uses a DYNx480 (landscape) or 480xDYN (portrait) image for the analysis. So 480px is the base size that every other input size gets scaled to, eg. 80 | 81 | landscape: 82 | 83 | + 640 x 480 -> 640 x 480 // fastest, no scaling 84 | + 1280 x 720 -> 854 x 480 85 | + 1920 x 1080 -> 854 x 480 86 | 87 | portrait: 88 | 89 | + 480 x 640 -> 480 x 640 // fastest, no scaling 90 | + 720 x 1280 -> 480 x 854 91 | + 1080 x 1920 -> 480 x 854 92 | 93 | BRFv4 scales the results up again, so you don't have to do that yourself. 94 | All parameters named *size or *width are pixel values based on the actual image size. 95 | eg. telling BRF what face sizes to initially detect: 96 | 97 | ```markdown 98 | brfManager.setFaceDetectionParams(int minFaceSize, int maxFaceSize, int stepSize, int minMergeNeighbors); 99 | ``` 100 | If you work with a 640x480 camera stream, it would be something like this: 101 | ```markdown 102 | brfManager.setFaceDetectionParams(144, 432, 12, 8); 103 | ``` 104 | Where as if you work with a 1280x720 camera stream, you will need something like this: 105 | ```markdown 106 | brfManager.setFaceDetectionParams(216, 648, 12, 8); 107 | ``` 108 | In the examples we generalize that a bit: 109 | ```javascript 110 | // We have either a landscape area (desktop), then choose height or 111 | // we have a portrait area (mobile), then choose width as max face size. 112 | 113 | var maxFaceSize = _faceDetectionRoi.height; 114 | 115 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 116 | maxFaceSize = _faceDetectionRoi.width; 117 | } 118 | 119 | brfManager.setFaceDetectionParams(maxFaceSize * 0.30, maxFaceSize * 0.90, 12, 8); 120 | ``` 121 | More on that in the API, see link above. 122 | 123 | ### FAQ 124 | 125 | Can I track other objects like hands or neck? 126 | + No, it is tracking faces only. 127 | 128 | Can you increase the performance? 129 | + We could remove some calculations in a commercial version, if you want to, but this comes at the price of reduced accuracy. 130 | 131 | Can you make the library smaller? 132 | + Usually the descriptor would be 80MB and more. It's already only 9MB for most platforms. So: We could go down in 1,5MB steps, but this will also massively decrease accuracy. 133 | Once you bought a license you can choose which size you want to go with. 134 | 135 | ### Release notes 136 | 137 | v4.1.0 - 11th July 2018 138 | 139 | + All: Changed 3D calculation model a bit. This might result in slightly different placement and rotationX. 140 | + Info: We started to work on BRFv5 (yeha!) 141 | 142 | v4.0.1 - 09th November 2017 143 | 144 | + JS: Added: WASM export to Javascript SDK. 145 | + JS: Fix: Found a workaround for iOS 11 (in Safari) for starting the camera. 146 | + JS: Updated: CreateJS to v1.0.2 (easel) and v1.0.1 (preload). 147 | + JS: Updated: ThreeJS to r88. 148 | + Minor cleanups 149 | + Known issue: JS SDK is slow in Chrome 63 because of this bug: https://bugs.chromium.org/p/chromium/issues/detail?id=768775 150 | 151 | v4.0.0 - 20th June 2017 152 | 153 | It's done! After over a year of development Tastenkunst is proud to announce the release of BRFv4. 154 | 155 | + Changed: Completely rewritten the C++ core: image handling, face detection and tracking algorithms etc. 156 | + Changed: Image data can now be of any site. BRFv4 will handle the scaling internally. 157 | + Changed: Point tracking and face tracking can now be done simultaneously. 158 | + Changed: Face tracking algorithm changed from ASM to ERT. This comes with an increased file size though (For JS up from 2MB to 10MB) 159 | + Added: Multi face tracking. It is now possible to track more than one face. 160 | + Added: Example project for native Android (Java, Android Studio project) 161 | + Added: Example project for macOS (C++, Xcode project, needs brewed OpenCV for camera handling and drawing) 162 | + Added: Example project for Windows (C++, Visual Studio 2017 project, needs OpenCV for camera handling and drawing) 163 | + Added: Adobe AIR native extension now supports Windows, macOS, iOS and Android. 164 | + Removed: Support for Flash Player (SWF in Browser). 165 | 166 | ### Licenses 167 | 168 | Used Haar Cascade: haarcascade_frontalface_default.xml 169 | ``` 170 | 213 | ``` 214 | -------------------------------------------------------------------------------- /assets/brfv4_img_glasses.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_img_glasses.png -------------------------------------------------------------------------------- /assets/brfv4_lion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_lion.png -------------------------------------------------------------------------------- /assets/brfv4_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_logo.png -------------------------------------------------------------------------------- /assets/brfv4_portrait_chris.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_portrait_chris.jpg -------------------------------------------------------------------------------- /assets/brfv4_portrait_marcel.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_portrait_marcel.jpg -------------------------------------------------------------------------------- /assets/brfv4_two_faces.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_two_faces.jpg -------------------------------------------------------------------------------- /assets/brfv4_woman_old.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_woman_old.jpg -------------------------------------------------------------------------------- /assets/brfv4_women_young.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/assets/brfv4_women_young.jpg -------------------------------------------------------------------------------- /brfv4_landmarks.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/brfv4_landmarks.jpg -------------------------------------------------------------------------------- /css/brfv4_examples_theme.css: -------------------------------------------------------------------------------- 1 | html, body { position: absolute; width: 100%; height: 100%; background-color: #ffffff; margin: 0; padding: 0; font-family: Arial, sans-serif; font-size: 16px; color: #005f9c; overflow: auto; } 2 | canvas { position: absolute; background-color: transparent; display: block; margin: 0; padding: 0; } 3 | video { position: absolute; background-color: transparent; display: none; margin: 0; padding: 0; } 4 | 5 | #_wrapper { position: absolute; width: 100%; height: 100%; } 6 | #_content { position: absolute; width: 640px; height: 480px; background-color: #f7f7f7; margin: auto; top: 0; left: 0; right: 0; } 7 | #_progressBar { position: absolute; width: 1px; height: 480px; background-color: #000000; margin: auto; top: 0; left: 0; } 8 | #_subline { position: absolute; width: 640px; height: 40px; background-color: transparent; margin: auto; left: 0; right: 0; top: 490px; } 9 | #_highlight { position: absolute; width: 640px; height: auto; background-color: transparent; margin: auto; left: 0; right: 0; top: 525px; font-size: 8pt; } 10 | #_brfv4_logo { position: absolute; width: 200px; left: 0; bottom: 0; opacity: 0.5; } 11 | #_stats { position: absolute; width: 80px; height: 48px; left: 0; top: 0; } 12 | 13 | #_settingsLeft { position: absolute; width: 160px; height: 480px; background-color: transparent; } 14 | #_settingsRight { position: absolute; width: 255px; height: 100%; background-color: transparent; right: 0; } 15 | #_msg { position: absolute; width: 100%; height: 40px; background-color: transparent; bottom: 0; } 16 | -------------------------------------------------------------------------------- /eula.txt: -------------------------------------------------------------------------------- 1 | EULA. 2 | End User License Agreement. 3 | 4 | Those are our terms for the trial license. 5 | 6 | END-USER LICENSE AGREEMENT FOR "Beyond Reality Face SDK - trial version" 7 | IMPORTANT PLEASE READ THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT CAREFULLY BEFORE CONTINUING 8 | WITH THIS DOWNLOAD: Tastenkunst End-User License Agreement ("EULA") is a legal agreement between you 9 | (either an individual or a single entity) and Tastenkunst for the Tastenkunst software product(s) 10 | identified above which may include associated software components, media, printed materials, and 11 | "online" or electronic documentation ("SOFTWARE PRODUCT"). By downloading, copying, or otherwise 12 | using the SOFTWARE PRODUCT, you agree to be bound by the terms of this EULA. This license agreement 13 | represents the entire agreement concerning the program between you and Tastenkunst, (referred to as 14 | "licenser"), and it supersedes any prior proposal, representation, or understanding between the parties. 15 | If you do not agree to the terms of this EULA, do not download or use the SOFTWARE PRODUCT. The 16 | SOFTWARE PRODUCT is protected by copyright laws and international copyright treaties, as well as other 17 | intellectual property laws and treaties. The SOFTWARE PRODUCT is licensed, not sold. 18 | 19 | 1. GRANT OF LICENSE. 20 | The SOFTWARE PRODUCT is licensed as follows: 21 | 22 | (a) Download and Use. 23 | Tastenkunst grants you the right to download and use copies of the SOFTWARE PRODUCT on your computer. 24 | 25 | (b) Backup Copies. 26 | You may also make copies of the SOFTWARE PRODUCT as may be necessary for backup and archival purposes. 27 | 28 | 2. DESCRIPTION OF OTHER RIGHTS AND LIMITATIONS. 29 | (a) Maintenance of Copyright Notices. You must not remove or alter any copyright notices on any and 30 | all copies of the SOFTWARE PRODUCT. (b) Distribution. You may not distribute registered copies of 31 | the SOFTWARE PRODUCT to third parties. Evaluation versions available for download from Tastenkunst's 32 | websites may be freely distributed. (c) Prohibition on Reverse Engineering, Decompilation, and 33 | Disassembly. You may not reverse engineer, decompile, or disassemble the SOFTWARE PRODUCT, except 34 | and only to the extent that such activity is expressly permitted by applicable law notwithstanding 35 | this limitation. (d) Rental. You may not rent, lease, or lend the SOFTWARE PRODUCT. (e) Support 36 | Services. Tastenkunst may provide you with support services related to the SOFTWARE PRODUCT ("Support 37 | Services"). Any supplemental software code provided to you as part of the Support Services shall be 38 | considered part of the SOFTWARE PRODUCT and subject to the terms and conditions of this EULA. (f) 39 | Compliance with Applicable Laws. You must comply with all applicable laws regarding use of the 40 | SOFTWARE PRODUCT. 41 | 42 | 3. TERMINATION 43 | Without prejudice to any other rights, Tastenkunst may terminate this EULA if you fail to comply 44 | with the terms and conditions of this EULA. In such event, you must destroy all copies of the 45 | SOFTWARE PRODUCT in your possession. 46 | 47 | 4. COPYRIGHT 48 | All title, including but not limited to copyrights, in and to the SOFTWARE PRODUCT and any copies 49 | thereof are owned by Tastenkunst or its suppliers. All title and intellectual property rights in 50 | and to the content which may be accessed through use of the SOFTWARE PRODUCT is the property of 51 | the respective content owner and may be protected by applicable copyright or other intellectual 52 | property laws and treaties. This EULA grants you no rights to use such content. All rights not 53 | expressly granted are reserved by Tastenkunst. 54 | 55 | 5. NO WARRANTIES 56 | Tastenkunst expressly disclaims any warranty for the SOFTWARE PRODUCT. The SOFTWARE PRODUCT is 57 | provided 'As Is' without any express or implied warranty of any kind, including but not limited 58 | to any warranties of merchantability, noninfringement, or fitness of a particular purpose. 59 | Tastenkunst does not warrant or assume responsibility for the accuracy or completeness of any 60 | information, text, graphics, links or other items contained within the SOFTWARE PRODUCT. Tastenkunst 61 | makes no warranties respecting any harm that may be caused by the transmission of a computer 62 | virus, worm, time bomb, logic bomb, or other such computer program. Tastenkunst further expressly 63 | disclaims any warranty or representation to Authorized Users or to any third party. 64 | 65 | 6. LIMITATION OF LIABILITY 66 | In no event shall Tastenkunst be liable for any damages (including, without limitation, lost 67 | profits, business interruption, or lost information) rising out of 'Authorized Users' use of 68 | or inability to use the SOFTWARE PRODUCT, even if Tastenkunst has been advised of the possibility 69 | of such damages. In no event will Tastenkunst be liable for loss of data or for indirect, special, 70 | incidental, consequential (including lost profit), or other damages based in contract, tort or 71 | otherwise. Tastenkunst shall have no liability with respect to the content of the SOFTWARE PRODUCT 72 | or any part thereof, including but not limited to errors or omissions contained therein, libel, 73 | infringements of rights of publicity, privacy, trademark rights, business interruption, personal 74 | injury, loss of privacy, moral rights or the disclosure of confidential information. 75 | 76 | -------------------------------------------------------------------------------- /face_texture_overlay.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | 8 | 9 | 10 | 11 | 15 | 16 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Beyond Reality Face - BRFv4 - HTML5/Javascript face tracking 9 | 10 | 11 | 12 | 13 | 16 | 17 | 18 | 19 | 20 | 21 |
22 | 23 |
24 | 25 | 26 | 27 | 28 | 29 | 30 |
31 |
32 | 33 | 34 | 35 |
36 | 37 |
38 |
39 |
40 | 41 |
42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /js/BRFv4Demo.js: -------------------------------------------------------------------------------- 1 | // 2 | // Namespace: brfv4Example structures these examples. 3 | // 4 | 5 | var brfv4Example = { 6 | 7 | appId: "com.tastenkunst.brfv4.js.examples", // Choose your own app id. 8 chars minimum. 8 | 9 | loader: { queuePreloader: null }, // preloading/example loading 10 | imageData: { // image data source handling 11 | webcam: { stream: null }, // either webcam ... 12 | picture: {} // ... or pictures/images 13 | }, 14 | dom: {}, // html dom stuff 15 | gui: {}, // QuickSettings elements 16 | drawing: {}, // drawing the results using createJS 17 | drawing3d: { // all 3D engine functions 18 | t3d: {}//, // ThreeJS stuff 19 | //f3d: {} // Flare3D stuff (coming later) 20 | }, 21 | stats: {} // fps meter 22 | }; 23 | 24 | var brfv4BaseURL = "js/libs/brf_wasm/"; 25 | 26 | (function() { 27 | 28 | // detect WebAssembly support and load either WASM or ASM version of BRFv4 29 | var support = (typeof WebAssembly === 'object'); 30 | 31 | if(support) { 32 | // from https://github.com/brion/min-wasm-fail/blob/master/min-wasm-fail.js 33 | function testSafariWebAssemblyBug() { 34 | var bin = new Uint8Array([0,97,115,109,1,0,0,0,1,6,1,96,1,127,1,127,3,2,1,0,5,3,1,0,1,7,8,1,4,116,101,115,116,0,0,10,16,1,14,0,32,0,65,1,54,2,0,32,0,40,2,0,11]); 35 | var mod = new WebAssembly.Module(bin); 36 | var inst = new WebAssembly.Instance(mod, {}); 37 | 38 | // test storing to and loading from a non-zero location via a parameter. 39 | // Safari on iOS 11.2.5 returns 0 unexpectedly at non-zero locations 40 | return (inst.exports.test(4) !== 0); 41 | } 42 | 43 | if (!testSafariWebAssemblyBug()) { 44 | support = false; 45 | } 46 | } 47 | 48 | if (!support) { brfv4BaseURL = "js/libs/brf_asmjs/"; } 49 | 50 | console.log("Checking support of WebAssembly: " + support + " " + (support ? "loading WASM (not ASM)." : "loading ASM (not WASM).")); 51 | 52 | })(); 53 | 54 | // 55 | // Namespace: brfv4 is the (mandatory) namespace for the BRFv4 library. 56 | // 57 | 58 | var brfv4 = {locateFile: function(fileName) { return brfv4BaseURL + fileName; }}; 59 | 60 | // 61 | // Demo entry point: preloading js files. 62 | // 63 | 64 | brfv4Example.start = function() { 65 | 66 | brfv4Example.loader.preload([ 67 | 68 | brfv4BaseURL + "BRFv4_JS_TK210219_v4.2.0_trial.js", // BRFv4 SDK 69 | 70 | "https://webrtc.github.io/adapter/adapter-latest.js", // webcam polyfill for older browsers 71 | 72 | "js/libs/quicksettings/quicksettings.min.css", // gui elements 73 | "js/libs/quicksettings/quicksettings.js", 74 | 75 | "js/libs/highlight/highlight_tomorrow.css", // code highlighter 76 | "js/libs/highlight/highlight.pack.js", 77 | 78 | "js/libs/createjs/easeljs.min.js", // canvas drawing lib 79 | "js/libs/threejs/three.min.js", // ThreeJS: a 3D engine 80 | 81 | "js/utils/BRFv4DOMUtils.js", // DOM handling 82 | "js/utils/BRFv4Stats.js", // FPS meter 83 | 84 | "js/utils/BRFv4DrawingUtils_CreateJS.js", // BRF result drawing 85 | "js/utils/BRFv4Drawing3DUtils_ThreeJS.js", // ThreeJS 3d object placement. 86 | 87 | "js/utils/BRFv4SetupWebcam.js", // webcam handling 88 | "js/utils/BRFv4SetupPicture.js", // picture/image handling 89 | "js/utils/BRFv4SetupExample.js", // overall example setup 90 | 91 | "js/utils/BRFv4PointUtils.js", // some calculation helpers 92 | 93 | "js/utils/BRFv4SetupChooser.js", // gui: choose either webcam or picture 94 | "js/utils/BRFv4ExampleChooser.js", // gui: choose an example 95 | "js/utils/BRFv4DownloadChooser.js", // gui: choose which package to download 96 | 97 | // example to load on startup, others can be chosen via the example chooser GUI. 98 | 99 | "js/examples/face_tracking/track_single_face.js" // start with this example 100 | 101 | ], function() { 102 | 103 | brfv4Example.init("webcam"); 104 | 105 | }); 106 | }; 107 | 108 | // 109 | // Helper stuff: logging and loading 110 | // 111 | 112 | // Custom way to write to a log/error to console. 113 | 114 | brfv4Example.trace = function(msg, error) { 115 | if(typeof window !== 'undefined' && window.console) { 116 | var now = (window.performance.now() / 1000).toFixed(3); 117 | if(error) { window.console.error(now + ': ', msg); } 118 | else { window.console.log(now + ': ', msg); } 119 | } 120 | }; 121 | 122 | // loading of javascript files: 123 | // 124 | // preload(filesToLoad, callback) // filesToLoad (array) 125 | // loadExample(filesToLoad, callback) // filesToLoad (array) 126 | // setProgressBar(percent, visible) 127 | 128 | (function () { 129 | "use strict"; 130 | 131 | var loader = brfv4Example.loader; 132 | 133 | loader.preload = function (filesToLoad, callback) { 134 | 135 | if (loader.queuePreloader !== null || !filesToLoad) { 136 | return; 137 | } 138 | 139 | function onPreloadProgress(event) { 140 | loader.setProgressBar(event.loaded, true); 141 | } 142 | 143 | function onPreloadComplete(event) { 144 | loader.setProgressBar(1.0, false); 145 | if(callback) callback(); 146 | } 147 | 148 | var queue = loader.queuePreloader = new createjs.LoadQueue(true); 149 | queue.on("progress", onPreloadProgress); 150 | queue.on("complete", onPreloadComplete); 151 | queue.loadManifest(filesToLoad, true); 152 | }; 153 | 154 | loader.loadExample = function (filesToLoad, callback) { 155 | 156 | function onProgress(event) { 157 | loader.setProgressBar(event.loaded, true); 158 | } 159 | 160 | function onComplete(event) { 161 | loader.setProgressBar(1.0, false); 162 | if(callback) callback(); 163 | } 164 | 165 | var queue = loader.queueExamples = new createjs.LoadQueue(true); 166 | queue.on("progress", onProgress); 167 | queue.on("complete", onComplete); 168 | queue.loadManifest(filesToLoad, true); 169 | }; 170 | 171 | loader.setProgressBar = function(percent, visible) { 172 | 173 | var bar = document.getElementById("_progressBar"); 174 | if(!bar) return; 175 | 176 | if(percent < 0.0) percent = 0.0; 177 | if(percent > 1.0) percent = 1.0; 178 | 179 | var width = Math.round(percent * 640); 180 | var color = 0xe7e7e7; 181 | 182 | bar.style.width = width + "px"; 183 | bar.style.backgroundColor = "#" + color.toString(16); 184 | bar.style.display = visible ? "block" : "none"; 185 | }; 186 | })(); -------------------------------------------------------------------------------- /js/BRFv4DemoMinimalImage.js: -------------------------------------------------------------------------------- 1 | // Even for a minimal example there are several functions that are commonly used by all minimal examples, eg. adding 2 | // the correct script (wasm or asm.js), starting the webcam etc. 3 | 4 | // Once we know whether wasm is supported we add the correct library script and initialize the example. 5 | 6 | var _isWebAssemblySupported = (function() { 7 | 8 | function testSafariWebAssemblyBug() { 9 | 10 | var bin = new Uint8Array([0,97,115,109,1,0,0,0,1,6,1,96,1,127,1,127,3,2,1,0,5,3,1,0,1,7,8,1,4,116,101,115,116,0,0,10,16,1,14,0,32,0,65,1,54,2,0,32,0,40,2,0,11]); 11 | var mod = new WebAssembly.Module(bin); 12 | var inst = new WebAssembly.Instance(mod, {}); 13 | 14 | // test storing to and loading from a non-zero location via a parameter. 15 | // Safari on iOS 11.2.5 returns 0 unexpectedly at non-zero locations 16 | 17 | return (inst.exports.test(4) !== 0); 18 | } 19 | 20 | var isWebAssemblySupported = (typeof WebAssembly === 'object'); 21 | 22 | if(isWebAssemblySupported && !testSafariWebAssemblyBug()) { 23 | isWebAssemblySupported = false; 24 | } 25 | 26 | return isWebAssemblySupported; 27 | })(); 28 | 29 | function readWASMBinary(url, onload, onerror, onprogress) { 30 | 31 | var xhr = new XMLHttpRequest(); 32 | 33 | xhr.open("GET", url, true); 34 | xhr.responseType = "arraybuffer"; 35 | xhr.onload = function xhr_onload() { 36 | if (xhr.status === 200 || xhr.status === 0 && xhr.response) { 37 | onload(xhr.response); 38 | return; 39 | } 40 | onerror() 41 | }; 42 | xhr.onerror = onerror; 43 | xhr.onprogress = onprogress; 44 | xhr.send(null); 45 | } 46 | 47 | function addBRFScript() { 48 | 49 | var script = document.createElement("script"); 50 | 51 | script.setAttribute("type", "text/javascript"); 52 | script.setAttribute("async", true); 53 | script.setAttribute("src", brfv4BaseURL + brfv4SDKName + ".js"); 54 | 55 | document.getElementsByTagName("head")[0].appendChild(script); 56 | } 57 | 58 | // Some necessary global vars... (will need to refactor Stats for BRFv5.) 59 | 60 | var brfv4Example = { stats: {} }; 61 | var brfv4BaseURL = _isWebAssemblySupported ? "js/libs/brf_wasm/" : "js/libs/brf_asmjs/"; 62 | var brfv4SDKName = "BRFv4_JS_TK210219_v4.2.0_trial"; // the currently available library 63 | var brfv4WASMBuffer = null; 64 | 65 | var handleTrackingResults = function(brfv4, faces, imageDataCtx) { 66 | 67 | // Overwrite this function in your minimal example HTML file. 68 | 69 | for(var i = 0; i < faces.length; i++) { 70 | 71 | var face = faces[i]; 72 | 73 | if(face.state === brfv4.BRFState.FACE_TRACKING_START || 74 | face.state === brfv4.BRFState.FACE_TRACKING) { 75 | 76 | imageDataCtx.strokeStyle="#00a0ff"; 77 | 78 | for(var k = 0; k < face.vertices.length; k += 2) { 79 | imageDataCtx.beginPath(); 80 | imageDataCtx.arc(face.vertices[k], face.vertices[k + 1], 2, 0, 2 * Math.PI); 81 | imageDataCtx.stroke(); 82 | } 83 | } 84 | } 85 | }; 86 | 87 | var onResize = function() { 88 | // implement this function in your minimal example, eg. fill the whole browser. 89 | }; 90 | 91 | var onInitBRFv4 = function(brfManager, resolution) { 92 | // Will be called when BRFv4 was initialized. 93 | // implement this function in your minimal example. 94 | }; 95 | 96 | function initExample() { 97 | 98 | // This function is called after the BRFv4 script was added. 99 | 100 | // BRFv4 needs the correct input image data size for initialization. 101 | // That's why we need to get the img and its dimension first 102 | 103 | // Once the dimension of the img is known we need to wait for 104 | // BRFv4 to be ready to be initialized (waitForSDK, initSDK) 105 | 106 | // Once BRFv4 was initialized, we can track faces (trackFaces) 107 | 108 | var image = document.getElementById("_image"); // our analyzed image 109 | var imageData = document.getElementById("_imageData"); // image data for BRFv4 110 | var imageDataCtx = null; // only fetch the context once 111 | 112 | var brfv4 = null; // the library namespace 113 | var brfManager = null; // the API 114 | var resolution = null; // the image size 115 | var timeoutId = -1; 116 | 117 | handleImageInput(); 118 | 119 | function handleImageInput() { 120 | 121 | console.log("handleImageInput"); 122 | 123 | // Resize the canvas to match the img size. 124 | imageData.width = image.width; 125 | imageData.height = image.height; 126 | imageDataCtx = imageData.getContext("2d"); 127 | 128 | window.addEventListener("resize", onResize); 129 | onResize(); 130 | 131 | waitForSDK(); 132 | } 133 | 134 | function waitForSDK() { 135 | 136 | if(brfv4 === null && window.hasOwnProperty("initializeBRF")) { 137 | 138 | // Set up the namespace and initialize BRFv4. 139 | // locateFile tells the asm.js version where to find the .mem file. 140 | // wasmBinary gets the preloaded .wasm file. 141 | 142 | brfv4 = { 143 | locateFile: function(fileName) { return brfv4BaseURL + fileName; }, 144 | wasmBinary: brfv4WASMBuffer // Add loaded WASM file to Module 145 | }; 146 | 147 | initializeBRF(brfv4); 148 | } 149 | 150 | if(brfv4 && brfv4.sdkReady) { 151 | 152 | initSDK(); 153 | 154 | } else { 155 | 156 | setTimeout(waitForSDK, 250); // wait a bit... 157 | } 158 | } 159 | 160 | function initSDK() { 161 | 162 | // The brfv4 namespace is now filled with the API classes and objects. 163 | // We can now initialize the BRFManager and the tracking API. 164 | 165 | resolution = new brfv4.Rectangle(0, 0, imageData.width, imageData.height); 166 | brfManager = new brfv4.BRFManager(); 167 | brfManager.init(resolution, resolution, "com.tastenkunst.brfv4.js.examples.minimal.image"); 168 | brfManager.setNumFacesToTrack(2); 169 | 170 | onInitBRFv4(brfManager, resolution); 171 | 172 | trackFaces(); 173 | } 174 | 175 | function trackFaces() { 176 | 177 | if(brfv4Example.stats.start) brfv4Example.stats.start(); 178 | 179 | var timeStart = window.performance.now(); 180 | 181 | imageDataCtx.drawImage(image, 0, 0, resolution.width, resolution.height); 182 | 183 | var data = imageDataCtx.getImageData(0, 0, resolution.width, resolution.height).data; 184 | 185 | // BRFv4 is meant to be used with a webcam stream. 186 | // A single image should be updated multiple times. 187 | 188 | for(var i = 0; i < 1; i++) { 189 | brfManager.update(data); 190 | } 191 | 192 | handleTrackingResults(brfv4, brfManager.getFaces(), imageDataCtx); 193 | 194 | if(brfv4Example.stats.end) brfv4Example.stats.end(); 195 | 196 | if(timeoutId >= 0) { 197 | clearTimeout(timeoutId); 198 | } 199 | 200 | var elapstedMs = window.performance.now() - timeStart; 201 | 202 | // Choosing 1 FPS to show how the tracking converges. 203 | // Update brf 10 times and comment this out to have the same effect without waiting. 204 | // Sometimes it doesn't converge at all, so 10 updates should be enough for one/two faces. 205 | timeoutId = setTimeout(function() { trackFaces(); }, (1000 / 1) - elapstedMs); 206 | } 207 | } 208 | 209 | (function() { 210 | 211 | // detect WebAssembly support and load either WASM or ASM version of BRFv4 212 | 213 | console.log("Checking support of WebAssembly: " + 214 | _isWebAssemblySupported + " " + (_isWebAssemblySupported ? "loading WASM (not ASM)." : "loading ASM (not WASM).")); 215 | 216 | if(_isWebAssemblySupported) { 217 | 218 | readWASMBinary(brfv4BaseURL + brfv4SDKName + ".wasm", 219 | function(r) { 220 | 221 | brfv4WASMBuffer = r; // see function waitForSDK. The ArrayBuffer needs to be added to the module object. 222 | 223 | addBRFScript(); 224 | initExample(); 225 | 226 | }, 227 | function (e) { console.error(e); }, 228 | function (p) { console.log(p); } 229 | ); 230 | 231 | } else { 232 | 233 | addBRFScript(); 234 | initExample(); 235 | } 236 | 237 | })(); -------------------------------------------------------------------------------- /js/BRFv4DemoMinimalWebcam.js: -------------------------------------------------------------------------------- 1 | // Even for a minimal example there are several functions that are commonly used by all minimal examples, eg. adding 2 | // the correct script (wasm or asm.js), starting the webcam etc. 3 | 4 | // Once we know whether wasm is supported we add the correct library script and initialize the example. 5 | 6 | var _isWebAssemblySupported = (function() { 7 | 8 | function testSafariWebAssemblyBug() { 9 | 10 | var bin = new Uint8Array([0,97,115,109,1,0,0,0,1,6,1,96,1,127,1,127,3,2,1,0,5,3,1,0,1,7,8,1,4,116,101,115,116,0,0,10,16,1,14,0,32,0,65,1,54,2,0,32,0,40,2,0,11]); 11 | var mod = new WebAssembly.Module(bin); 12 | var inst = new WebAssembly.Instance(mod, {}); 13 | 14 | // test storing to and loading from a non-zero location via a parameter. 15 | // Safari on iOS 11.2.5 returns 0 unexpectedly at non-zero locations 16 | 17 | return (inst.exports.test(4) !== 0); 18 | } 19 | 20 | var isWebAssemblySupported = (typeof WebAssembly === 'object'); 21 | 22 | if(isWebAssemblySupported && !testSafariWebAssemblyBug()) { 23 | isWebAssemblySupported = false; 24 | } 25 | 26 | return isWebAssemblySupported; 27 | })(); 28 | 29 | function readWASMBinary(url, onload, onerror, onprogress) { 30 | 31 | var xhr = new XMLHttpRequest(); 32 | 33 | xhr.open("GET", url, true); 34 | xhr.responseType = "arraybuffer"; 35 | xhr.onload = function xhr_onload() { 36 | if (xhr.status === 200 || xhr.status === 0 && xhr.response) { 37 | onload(xhr.response); 38 | return; 39 | } 40 | onerror() 41 | }; 42 | xhr.onerror = onerror; 43 | xhr.onprogress = onprogress; 44 | xhr.send(null); 45 | } 46 | 47 | function addBRFScript() { 48 | 49 | var script = document.createElement("script"); 50 | 51 | script.setAttribute("type", "text/javascript"); 52 | script.setAttribute("async", true); 53 | script.setAttribute("src", brfv4BaseURL + brfv4SDKName + ".js"); 54 | 55 | document.getElementsByTagName("head")[0].appendChild(script); 56 | } 57 | 58 | // Some necessary global vars... (will need to refactor Stats for BRFv5.) 59 | 60 | var brfv4Example = { stats: {} }; 61 | var brfv4BaseURL = _isWebAssemblySupported ? "js/libs/brf_wasm/" : "js/libs/brf_asmjs/"; 62 | var brfv4SDKName = "BRFv4_JS_TK210219_v4.2.0_trial"; // the currently available library 63 | var brfv4WASMBuffer = null; 64 | 65 | var handleTrackingResults = function(brfv4, faces, imageDataCtx) { 66 | 67 | // Overwrite this function in your minimal example HTML file. 68 | 69 | for(var i = 0; i < faces.length; i++) { 70 | 71 | var face = faces[i]; 72 | 73 | if(face.state === brfv4.BRFState.FACE_TRACKING_START || 74 | face.state === brfv4.BRFState.FACE_TRACKING) { 75 | 76 | imageDataCtx.strokeStyle="#00a0ff"; 77 | 78 | for(var k = 0; k < face.vertices.length; k += 2) { 79 | imageDataCtx.beginPath(); 80 | imageDataCtx.arc(face.vertices[k], face.vertices[k + 1], 2, 0, 2 * Math.PI); 81 | imageDataCtx.stroke(); 82 | } 83 | } 84 | } 85 | }; 86 | 87 | var onResize = function() { 88 | // implement this function in your minimal example, eg. fill the whole browser. 89 | }; 90 | 91 | var onInitBRFv4 = function(brfManager, resolution) { 92 | // Will be called when BRFv4 was initialized. 93 | // implement this function in your minimal example. 94 | }; 95 | 96 | function initExample() { 97 | 98 | // This function is called after the BRFv4 script was added. 99 | 100 | // BRFv4 needs the correct input image data size for initialization. 101 | // That's why we need to start the camera stream first and get the correct 102 | // video stream dimension. (startCamera, onStreamFetched, onStreamDimensionsAvailable) 103 | 104 | // Once the dimension of the video stream is known we need to wait for 105 | // BRFv4 to be ready to be initialized (waitForSDK, initSDK) 106 | 107 | // Once BRFv4 was initialized, we can track faces (trackFaces) 108 | 109 | var webcam = document.getElementById("_webcam"); // our webcam video 110 | var imageData = document.getElementById("_imageData"); // image data for BRFv4 111 | var imageDataCtx = null; // only fetch the context once 112 | 113 | var brfv4 = null; // the library namespace 114 | var brfManager = null; // the API 115 | var resolution = null; // the video stream resolution (usually 640x480) 116 | var timeoutId = -1; 117 | 118 | // iOS has this weird behavior that it freezes the camera stream, if the CPU get's 119 | // stressed too much, but it doesn't unfreeze the stream upon CPU relaxation. 120 | // A workaround is to get the video stream dimension and then turn the stream off 121 | // until BRFv4 was successfully initialized (takes about 3 seconds of heavy CPU work). 122 | 123 | var isIOS = (/iPad|iPhone|iPod/.test(window.navigator.userAgent) && !window.MSStream); 124 | 125 | startCamera(); 126 | 127 | function startCamera() { 128 | 129 | console.log("startCamera"); 130 | 131 | // Start video playback once the camera was fetched to get the actual stream dimension. 132 | function onStreamFetched (mediaStream) { 133 | 134 | console.log("onStreamFetched"); 135 | 136 | webcam.srcObject = mediaStream; 137 | webcam.play(); 138 | 139 | // Check whether we know the stream dimension yet, if so, start BRFv4. 140 | function onStreamDimensionsAvailable () { 141 | 142 | console.log("onStreamDimensionsAvailable: " + (webcam.videoWidth !== 0)); 143 | 144 | if(webcam.videoWidth === 0) { 145 | 146 | setTimeout(onStreamDimensionsAvailable, 100); 147 | 148 | } else { 149 | 150 | // Resize the canvas to match the webcam video size. 151 | imageData.width = webcam.videoWidth; 152 | imageData.height = webcam.videoHeight; 153 | imageDataCtx = imageData.getContext("2d"); 154 | 155 | window.addEventListener("resize", onResize); 156 | onResize(); 157 | 158 | // on iOS we want to close the video stream first and 159 | // wait for the heavy BRFv4 initialization to finish. 160 | // Once that is done, we start the stream again. 161 | 162 | // as discussed above, close the stream on iOS and wait for BRFv4 to be initialized. 163 | 164 | if(isIOS) { 165 | 166 | webcam.pause(); 167 | webcam.srcObject.getTracks().forEach(function(track) { 168 | track.stop(); 169 | }); 170 | } 171 | 172 | waitForSDK(); 173 | } 174 | } 175 | 176 | // imageDataCtx is not null if we restart the camera stream on iOS. 177 | 178 | if(imageDataCtx === null) { 179 | 180 | onStreamDimensionsAvailable(); 181 | 182 | } else { 183 | 184 | trackFaces(); 185 | } 186 | } 187 | 188 | // start the camera stream... 189 | 190 | window.navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480, frameRate: 30 } }) 191 | .then(onStreamFetched).catch(function () { alert("No camera available."); }); 192 | } 193 | 194 | function waitForSDK() { 195 | 196 | if(brfv4 === null && window.hasOwnProperty("initializeBRF")) { 197 | 198 | // Set up the namespace and initialize BRFv4. 199 | // locateFile tells the asm.js version where to find the .mem file. 200 | // wasmBinary gets the preloaded .wasm file. 201 | 202 | brfv4 = { 203 | locateFile: function(fileName) { return brfv4BaseURL + fileName; }, 204 | wasmBinary: brfv4WASMBuffer // Add loaded WASM file to Module 205 | }; 206 | 207 | initializeBRF(brfv4); 208 | } 209 | 210 | if(brfv4 && brfv4.sdkReady) { 211 | 212 | initSDK(); 213 | 214 | } else { 215 | 216 | setTimeout(waitForSDK, 250); // wait a bit... 217 | } 218 | } 219 | 220 | function initSDK() { 221 | 222 | // The brfv4 namespace is now filled with the API classes and objects. 223 | // We can now initialize the BRFManager and the tracking API. 224 | 225 | resolution = new brfv4.Rectangle(0, 0, imageData.width, imageData.height); 226 | brfManager = new brfv4.BRFManager(); 227 | brfManager.init(resolution, resolution, "com.tastenkunst.brfv4.js.examples.minimal.webcam"); 228 | 229 | onInitBRFv4(brfManager, resolution); 230 | 231 | if(isIOS) { 232 | 233 | // Start the camera stream again on iOS. 234 | 235 | setTimeout(function () { 236 | 237 | console.log('delayed camera restart for iOS'); 238 | 239 | startCamera(); 240 | 241 | }, 2000) 242 | 243 | } else { 244 | 245 | trackFaces(); 246 | } 247 | } 248 | 249 | function trackFaces() { 250 | 251 | if(brfv4Example.stats.start) brfv4Example.stats.start(); 252 | 253 | var timeStart = window.performance.now(); 254 | 255 | imageDataCtx.setTransform(-1.0, 0, 0, 1, resolution.width, 0); // A virtual mirror should be... mirrored 256 | imageDataCtx.drawImage(webcam, 0, 0, resolution.width, resolution.height); 257 | imageDataCtx.setTransform( 1.0, 0, 0, 1, 0, 0); // unmirrored for drawing the results 258 | 259 | brfManager.update(imageDataCtx.getImageData(0, 0, resolution.width, resolution.height).data); 260 | 261 | handleTrackingResults(brfv4, brfManager.getFaces(), imageDataCtx); 262 | 263 | if(brfv4Example.stats.end) brfv4Example.stats.end(); 264 | 265 | if(timeoutId >= 0) { 266 | clearTimeout(timeoutId); 267 | } 268 | 269 | var elapstedMs = window.performance.now() - timeStart; 270 | 271 | // We don't need 60 FPS, the camera will deliver at 30 FPS anyway. 272 | timeoutId = setTimeout(function() { trackFaces(); }, (1000 / 30) - elapstedMs); 273 | } 274 | } 275 | 276 | (function() { 277 | 278 | // detect WebAssembly support and load either WASM or ASM version of BRFv4 279 | 280 | console.log("Checking support of WebAssembly: " + 281 | _isWebAssemblySupported + " " + (_isWebAssemblySupported ? "loading WASM (not ASM)." : "loading ASM (not WASM).")); 282 | 283 | if(_isWebAssemblySupported) { 284 | 285 | readWASMBinary(brfv4BaseURL + brfv4SDKName + ".wasm", 286 | function(r) { 287 | 288 | brfv4WASMBuffer = r; // see function waitForSDK. The ArrayBuffer needs to be added to the module object. 289 | 290 | addBRFScript(); 291 | initExample(); 292 | 293 | }, 294 | function (e) { console.error(e); }, 295 | function (p) { console.log(p); } 296 | ); 297 | 298 | } else { 299 | 300 | addBRFScript(); 301 | initExample(); 302 | } 303 | 304 | })(); -------------------------------------------------------------------------------- /js/examples/face_detection/detect_in_center.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | 10 | // We explicitly set the mode to run in: BRFMode.FACE_DETECTION. 11 | 12 | brfManager.setMode(brfv4.BRFMode.FACE_DETECTION); 13 | 14 | // Then we limit the face detection region of interest to be in the central 15 | // part of the overall analysed image (green rectangle). 16 | 17 | _faceDetectionRoi.setTo( 18 | resolution.width * 0.25, resolution.height * 0.10, 19 | resolution.width * 0.50, resolution.height * 0.80 20 | ); 21 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 22 | 23 | // We can have either a landscape area (desktop), then choose height or 24 | // we can have a portrait area (mobile), then choose width as max face size. 25 | 26 | var maxFaceSize = _faceDetectionRoi.height; 27 | 28 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 29 | maxFaceSize = _faceDetectionRoi.width; 30 | } 31 | 32 | // Merged faces (yellow) will only show up if they are at least 30% of maxFaceSize. 33 | // Move away from the camera to see the merged detected faces (yellow) disappear. 34 | 35 | // Btw. the following settings are the default settings set by BRFv4 on init. 36 | 37 | brfManager.setFaceDetectionParams(maxFaceSize * 0.30, maxFaceSize * 0.90, 12, 8); 38 | }; 39 | 40 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 41 | 42 | brfManager.update(imageData); 43 | 44 | // Drawing the results: 45 | 46 | draw.clear(); 47 | 48 | // Show the region of interest (green). 49 | 50 | draw.drawRect(_faceDetectionRoi, false, 2.0, 0x8aff00, 0.5); 51 | 52 | // Then draw all detected faces (blue). 53 | 54 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 55 | 56 | // In the end add the merged detected faces that have at least 12 detected faces 57 | // in a certain area (yellow). 58 | 59 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 60 | 61 | // Now print the face sizes: 62 | 63 | printSize(brfManager.getMergedDetectedFaces(), false); 64 | }; 65 | 66 | function printSize(rects, printAlwaysMinMax) { 67 | 68 | var maxWidth = 0; 69 | var minWidth = 9999; 70 | 71 | for(var i = 0, l = rects.length; i < l; i++) { 72 | 73 | if(rects[i].width < minWidth) { 74 | minWidth = rects[i].width; 75 | } 76 | 77 | if(rects[i].width > maxWidth) { 78 | maxWidth = rects[i].width; 79 | } 80 | } 81 | 82 | if(maxWidth > 0) { 83 | 84 | var str = ""; 85 | 86 | // One face or same size: name it size, otherwise name it min/max. 87 | 88 | if(minWidth === maxWidth && !printAlwaysMinMax) { 89 | str = "size: " + maxWidth.toFixed(0); 90 | } else { 91 | str = "min: " + minWidth.toFixed(0) + " max: " + maxWidth.toFixed(0); 92 | } 93 | 94 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect face in " + 95 | "center\nLimit detection area to the center of the image. " + str); 96 | } 97 | } 98 | 99 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect face in center\n" + 100 | "Limit detection area (region of interest) to the center of the image."); 101 | 102 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 103 | })(); -------------------------------------------------------------------------------- /js/examples/face_detection/detect_in_whole_image.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | 10 | // We explicitly set the mode to run in: BRFMode.FACE_DETECTION. 11 | 12 | brfManager.setMode(brfv4.BRFMode.FACE_DETECTION); 13 | 14 | // Then we set the face detection region of interest to be 15 | // most/all of the overall analysed image (green rectangle, 100%). 16 | 17 | _faceDetectionRoi.setTo( 18 | resolution.width * 0.00, resolution.height * 0.00, 19 | resolution.width * 1.00, resolution.height * 1.00 20 | ); 21 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 22 | 23 | // We can have either a landscape area (desktop), then choose height or 24 | // we can have a portrait area (mobile), then choose width as max face size. 25 | 26 | var maxFaceSize = _faceDetectionRoi.height; 27 | 28 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 29 | maxFaceSize = _faceDetectionRoi.width; 30 | } 31 | 32 | // Merged faces (yellow) will only show up if they are at least 30% of maxFaceSize. 33 | // Move away from the camera to see the merged detected faces (yellow) disappear. 34 | 35 | // Btw. the following settings are the default settings set by BRFv4 on init. 36 | 37 | brfManager.setFaceDetectionParams(maxFaceSize * 0.30, maxFaceSize * 1.00, 12, 8); 38 | }; 39 | 40 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 41 | 42 | brfManager.update(imageData); 43 | 44 | // Drawing the results: 45 | 46 | draw.clear(); 47 | 48 | // Show the region of interest (green). 49 | 50 | draw.drawRect(_faceDetectionRoi, false, 4.0, 0x8aff00, 0.5); 51 | 52 | // Then draw all detected faces (blue). 53 | 54 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 55 | 56 | // In the end add the merged detected faces that have at least 12 detected faces 57 | // in a certain area (yellow). 58 | 59 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 60 | 61 | // Now print the face sizes: 62 | 63 | printSize(brfManager.getMergedDetectedFaces(), false); 64 | }; 65 | 66 | function printSize(rects, printAlwaysMinMax) { 67 | 68 | var maxWidth = 0; 69 | var minWidth = 9999; 70 | 71 | for(var i = 0, l = rects.length; i < l; i++) { 72 | 73 | if(rects[i].width < minWidth) { 74 | minWidth = rects[i].width; 75 | } 76 | 77 | if(rects[i].width > maxWidth) { 78 | maxWidth = rects[i].width; 79 | } 80 | } 81 | 82 | if(maxWidth > 0) { 83 | 84 | var str = ""; 85 | 86 | // One face or same size: name it size, otherwise name it min/max. 87 | 88 | if(minWidth === maxWidth && !printAlwaysMinMax) { 89 | str = "size: " + maxWidth.toFixed(0); 90 | } else { 91 | str = "min: " + minWidth.toFixed(0) + " max: " + maxWidth.toFixed(0); 92 | } 93 | 94 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect faces in " + 95 | "the whole image\nSet most of the/the whole image as detection area. " + str); 96 | } 97 | } 98 | 99 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect faces in the " + 100 | "whole image\nSet most of the/the whole image as detection area (region of interest)."); 101 | 102 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 103 | })(); -------------------------------------------------------------------------------- /js/examples/face_detection/detect_larger_faces.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | 10 | // We explicitly set the mode to run in: BRFMode.FACE_DETECTION. 11 | 12 | brfManager.setMode(brfv4.BRFMode.FACE_DETECTION); 13 | 14 | // Then we set the face detection region of interest to be 15 | // most/all of the overall analysed image (green rectangle, 100%). 16 | 17 | _faceDetectionRoi.setTo( 18 | resolution.width * 0.00, resolution.height * 0.00, 19 | resolution.width * 1.00, resolution.height * 1.00 20 | ); 21 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 22 | 23 | // We can have either a landscape area (desktop), then choose height or 24 | // we can have a portrait area (mobile), then choose width as max face size. 25 | 26 | var maxFaceSize = _faceDetectionRoi.height; 27 | 28 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 29 | maxFaceSize = _faceDetectionRoi.width; 30 | } 31 | 32 | // Merged faces (yellow) will only show up if they are at least 60% of maxFaceSize. 33 | // So: come really close the your webcam to see a detection result. 34 | 35 | // Default would be 30% of maxFaceSize as minimal value and 90% of maxFaceSize 36 | // as maximal value, but here we set larger desired sizes. 37 | 38 | brfManager.setFaceDetectionParams(maxFaceSize * 0.60, maxFaceSize * 1.00, 12, 8); 39 | }; 40 | 41 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 42 | 43 | brfManager.update(imageData); 44 | 45 | // Drawing the results: 46 | 47 | draw.clear(); 48 | 49 | // Show the region of interest (green). 50 | 51 | draw.drawRect(_faceDetectionRoi, false, 2.0, 0x8aff00, 0.5); 52 | 53 | // Then draw all detected faces (blue). 54 | 55 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 56 | 57 | // In the end add the merged detected faces that have at least 12 detected faces 58 | // in a certain area (yellow). 59 | 60 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 61 | 62 | // Now print the face sizes: 63 | 64 | printSize(brfManager.getMergedDetectedFaces(), true); 65 | }; 66 | 67 | function printSize(rects, printAlwaysMinMax) { 68 | 69 | var maxWidth = 0; 70 | var minWidth = 9999; 71 | 72 | for(var i = 0, l = rects.length; i < l; i++) { 73 | 74 | if(rects[i].width < minWidth) { 75 | minWidth = rects[i].width; 76 | } 77 | 78 | if(rects[i].width > maxWidth) { 79 | maxWidth = rects[i].width; 80 | } 81 | } 82 | 83 | if(maxWidth > 0) { 84 | 85 | var str = ""; 86 | 87 | // One face or same size: name it size, otherwise name it min/max. 88 | 89 | if(minWidth === maxWidth && !printAlwaysMinMax) { 90 | str = "size: " + maxWidth.toFixed(0); 91 | } else { 92 | str = "min: " + minWidth.toFixed(0) + " max: " + maxWidth.toFixed(0); 93 | } 94 | 95 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect large " + 96 | "faces\nCome closer to the webcam to see detection results. " + str); 97 | } 98 | } 99 | 100 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect large faces\n" + 101 | "Come closer to the webcam to see detection results."); 102 | 103 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 104 | })(); -------------------------------------------------------------------------------- /js/examples/face_detection/detect_smaller_faces.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | 10 | // We explicitly set the mode to run in: BRFMode.FACE_DETECTION. 11 | 12 | brfManager.setMode(brfv4.BRFMode.FACE_DETECTION); 13 | 14 | // Then we set the face detection region of interest to be 15 | // most/all of the overall analysed image (green rectangle, 100%). 16 | 17 | _faceDetectionRoi.setTo( 18 | resolution.width * 0.00, resolution.height * 0.00, 19 | resolution.width * 1.00, resolution.height * 1.00 20 | ); 21 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 22 | 23 | // Face detection: 24 | // 25 | // Internally BRFv4 uses a DYNx480 (landscape) or 480xDYN (portrait) 26 | // image for it's analysis. So 480px is the base size that every other 27 | // input size compares to (eg. 1280x720 -> 854x480). 28 | // 29 | // The minimum detectable face size for the following resolutions are: 30 | // 31 | // 640 x 480: 24px ( 480 / 480 = 1.00 * 24 = 24) (base) 32 | // 1280 x 720: 36px ( 720 / 480 = 1.50 * 24 = 36) 33 | // 1920 x 1080: 54px (1080 / 480 = 2.25 * 24 = 54) 34 | // 35 | // Also: faces (blue) are only detected at step sizes multiple of 12. 36 | // So the actual face detection layers (sizes) are: 37 | // 38 | // 640 x 480: 24, 36, 48, 60, 72, ..., 456, 468, 480 39 | // 1280 x 720: 36, 54, 72, 90, 108, ..., 684, 702, 720 40 | // 1920 x 1080: 54, 81, 108, 135, 162, ..., 1026, 1053, 1080 41 | // 42 | // Detected faces (blue) get merged (yellow) if they are 43 | // + roughly placed in the same location, 44 | // + roughly the same size and 45 | // + have at least minMergeNeighbors of other rectangle in the same spot (4 in this case). 46 | 47 | // Let's set some small values. To see a result you need to be far away 48 | // from the webcam or hold an image with a small face in front of your webcam. 49 | 50 | var stepSize = 12; // multiple of 12, either: 12, 24, 36 etc. 51 | var minFaceSize = _faceDetectionRoi.height * 0.10; // 48 for 480, 72 for 720, 108 for 1080 52 | var maxFaceSize = minFaceSize + stepSize * 2; // 72 for 480, 108 for 720, 162 for 1080 53 | 54 | brfManager.setFaceDetectionParams(minFaceSize, maxFaceSize, stepSize, 4); 55 | }; 56 | 57 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 58 | 59 | brfManager.update(imageData); 60 | 61 | // Drawing the results: 62 | 63 | draw.clear(); 64 | 65 | // Show the region of interest (green). 66 | 67 | draw.drawRect(_faceDetectionRoi, false, 2.0, 0x8aff00, 0.5); 68 | 69 | // Then draw all detected faces (blue). 70 | 71 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 72 | 73 | // In the end add the merged detected faces that have at least 4 detected faces 74 | // in a certain area (yellow). 75 | 76 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 77 | 78 | // Now print the face sizes: 79 | 80 | printSize(brfManager.getMergedDetectedFaces(), true); 81 | }; 82 | 83 | function printSize(rects, printAlwaysMinMax) { 84 | 85 | var maxWidth = 0; 86 | var minWidth = 9999; 87 | 88 | for(var i = 0, l = rects.length; i < l; i++) { 89 | 90 | if(rects[i].width < minWidth) { 91 | minWidth = rects[i].width; 92 | } 93 | 94 | if(rects[i].width > maxWidth) { 95 | maxWidth = rects[i].width; 96 | } 97 | } 98 | 99 | if(maxWidth > 0) { 100 | 101 | var str = ""; 102 | 103 | // One face or same size: name it size, otherwise name it min/max. 104 | 105 | if(minWidth === maxWidth && !printAlwaysMinMax) { 106 | str = "size: " + maxWidth.toFixed(0); 107 | } else { 108 | str = "min: " + minWidth.toFixed(0) + " max: " + maxWidth.toFixed(0); 109 | } 110 | 111 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect small faces\n" + 112 | "Limit the maxFaceSize and minFaceSize to detect small faces. " + str); 113 | } 114 | } 115 | 116 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face detection - detect small faces\n" + 117 | "Limit the maxFaceSize and minFaceSize to detect small faces. "); 118 | 119 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 120 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/ThreeJS_example.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var t3d = brfv4Example.drawing3d.t3d; 5 | var numFacesToTrack = 1; 6 | 7 | function loadModels() { 8 | 9 | if(t3d) { 10 | 11 | // Remove all models and load new ones. 12 | 13 | t3d.removeAll(); 14 | t3d.loadOcclusionHead("assets/brfv4_occlusion_head.json", numFacesToTrack); 15 | t3d.loadModel("assets/brfv4_model.json", numFacesToTrack); 16 | } 17 | } 18 | 19 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 20 | 21 | brfManager.init(resolution, resolution, brfv4Example.appId); 22 | brfManager.setNumFacesToTrack(numFacesToTrack); 23 | 24 | // Relax starting conditions to eventually find more faces. 25 | 26 | var maxFaceSize = resolution.height; 27 | 28 | if(resolution.width < resolution.height) { 29 | maxFaceSize = resolution.width; 30 | } 31 | 32 | brfManager.setFaceDetectionParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 12, 8); 33 | brfManager.setFaceTrackingStartParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 32, 35, 32); 34 | brfManager.setFaceTrackingResetParams( maxFaceSize * 0.15, maxFaceSize * 1.00, 40, 55, 32); 35 | 36 | loadModels(); 37 | }; 38 | 39 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 40 | 41 | brfManager.update(imageData); 42 | 43 | if(t3d) t3d.hideAll(); // Hide 3d models. Only show them on top of tracked faces. 44 | 45 | draw.clear(); 46 | 47 | var faces = brfManager.getFaces(); 48 | 49 | for(var i = 0; i < faces.length; i++) { 50 | 51 | var face = faces[i]; 52 | 53 | if(face.state === brfv4.BRFState.FACE_TRACKING) { 54 | 55 | // Draw the 68 facial feature points as reference. 56 | 57 | draw.drawVertices(face.vertices, 2.0, false, 0x00a0ff, 0.4); 58 | 59 | // Set the 3D model according to the tracked results. 60 | 61 | if(t3d) t3d.update(i, face, true); 62 | } 63 | } 64 | 65 | if(t3d) { t3d.render(); } 66 | }; 67 | 68 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 69 | 70 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face_tracking - ThreeJS example.\n" + 71 | "Tracks up to " + numFacesToTrack + " faces and puts glasses on them."); 72 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/blink_detection.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | brfManager.init(resolution, resolution, brfv4Example.appId); 6 | }; 7 | 8 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 9 | 10 | brfManager.update(imageData); 11 | 12 | draw.clear(); 13 | 14 | // Face detection results: a rough rectangle used to start the face tracking. 15 | 16 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 17 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 18 | 19 | var faces = brfManager.getFaces(); // default: one face, only one element in that array. 20 | 21 | for(var i = 0; i < faces.length; i++) { 22 | 23 | var face = faces[i]; 24 | 25 | if(face.state === brfv4.BRFState.FACE_TRACKING) { 26 | 27 | // simple blink detection 28 | 29 | // A simple approach with quite a lot false positives. Fast movement can't be 30 | // handled properly. This code is quite good when it comes to 31 | // staring contest apps though. 32 | 33 | // It basically compares the old positions of the eye points to the current ones. 34 | // If rapid movement of the current points was detected it's considered a blink. 35 | 36 | var v = face.vertices; 37 | 38 | if(_oldFaceShapeVertices.length === 0) storeFaceShapeVertices(v); 39 | 40 | var k, l, yLE, yRE; 41 | 42 | // Left eye movement (y) 43 | 44 | for(k = 36, l = 41, yLE = 0; k <= l; k++) { 45 | yLE += v[k * 2 + 1] - _oldFaceShapeVertices[k * 2 + 1]; 46 | } 47 | yLE /= 6; 48 | 49 | // Right eye movement (y) 50 | 51 | for(k = 42, l = 47, yRE = 0; k <= l; k++) { 52 | yRE += v[k * 2 + 1] - _oldFaceShapeVertices[k * 2 + 1]; 53 | } 54 | 55 | yRE /= 6; 56 | 57 | var yN = 0; 58 | 59 | // Compare to overall movement (nose y) 60 | 61 | yN += v[27 * 2 + 1] - _oldFaceShapeVertices[27 * 2 + 1]; 62 | yN += v[28 * 2 + 1] - _oldFaceShapeVertices[28 * 2 + 1]; 63 | yN += v[29 * 2 + 1] - _oldFaceShapeVertices[29 * 2 + 1]; 64 | yN += v[30 * 2 + 1] - _oldFaceShapeVertices[30 * 2 + 1]; 65 | yN /= 4; 66 | 67 | var blinkRatio = Math.abs((yLE + yRE) / yN); 68 | 69 | if((blinkRatio > 12 && (yLE > 0.4 || yRE > 0.4))) { 70 | console.log("blink " + blinkRatio.toFixed(2) + " " + yLE.toFixed(2) + " " + 71 | yRE.toFixed(2) + " " + yN.toFixed(2)); 72 | 73 | blink(); 74 | } 75 | 76 | // Let the color of the shape show whether you blinked. 77 | 78 | var color = 0x00a0ff; 79 | 80 | if(_blinked) { 81 | color = 0xffd200; 82 | } 83 | 84 | // Face Tracking results: 68 facial feature points. 85 | 86 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, color, 0.4); 87 | draw.drawVertices( face.vertices, 2.0, false, color, 0.4); 88 | 89 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face tracking - simple blink" + 90 | "detection.\nDetects an eye blink: " + (_blinked ? "Yes" : "No")); 91 | 92 | storeFaceShapeVertices(v); 93 | } 94 | } 95 | }; 96 | 97 | function blink() { 98 | _blinked = true; 99 | 100 | if(_timeOut > -1) { clearTimeout(_timeOut); } 101 | 102 | _timeOut = setTimeout(resetBlink, 150); 103 | } 104 | 105 | function resetBlink() { 106 | _blinked = false; 107 | } 108 | 109 | function storeFaceShapeVertices(vertices) { 110 | for(var i = 0, l = vertices.length; i < l; i++) { 111 | _oldFaceShapeVertices[i] = vertices[i]; 112 | } 113 | } 114 | 115 | var _oldFaceShapeVertices = []; 116 | var _blinked = false; 117 | var _timeOut = -1; 118 | 119 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face tracking - simple blink detection.\n" + 120 | "Detects a blink of the eyes: "); 121 | 122 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 123 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/blink_detection_center.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | // Then we limit the face detection region of interest to be in the central 9 | // part of the overall analysed image (green rectangle). 10 | 11 | _faceDetectionRoi.setTo( 12 | resolution.width * 0.25, resolution.height * 0.10, 13 | resolution.width * 0.50, resolution.height * 0.80 14 | ); 15 | 16 | brfManager.init(resolution, _faceDetectionRoi, brfv4Example.appId); 17 | 18 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 19 | 20 | // We can have either a landscape area (desktop), then choose height or 21 | // we can have a portrait area (mobile), then choose width as max face size. 22 | 23 | var maxFaceSize = _faceDetectionRoi.height; 24 | 25 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 26 | maxFaceSize = _faceDetectionRoi.width; 27 | } 28 | 29 | // Merged faces (yellow) will only show up if they are at least 30% of maxFaceSize. 30 | // Move away from the camera to see the merged detected faces (yellow) disappear. 31 | 32 | // Btw. the following settings are the default settings set by BRFv4 on init. 33 | 34 | brfManager.setFaceDetectionParams(maxFaceSize * 0.30, maxFaceSize * 0.90, 12, 8); 35 | }; 36 | 37 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 38 | 39 | brfManager.update(imageData); 40 | 41 | draw.clear(); 42 | 43 | // Face detection results: a rough rectangle used to start the face tracking. 44 | 45 | draw.drawRect(_faceDetectionRoi, false, 2.0, 0x8aff00, 0.5); 46 | 47 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 48 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 49 | 50 | var faces = brfManager.getFaces(); // default: one face, only one element in that array. 51 | 52 | for(var i = 0; i < faces.length; i++) { 53 | 54 | var face = faces[i]; 55 | 56 | if(face.state === brfv4.BRFState.FACE_TRACKING) { 57 | 58 | // simple blink detection 59 | 60 | // A simple approach with quite a lot false positives. Fast movement can't be 61 | // handled properly. This code is quite good when it comes to 62 | // staring contest apps though. 63 | 64 | // It basically compares the old positions of the eye points to the current ones. 65 | // If rapid movement of the current points was detected it's considered a blink. 66 | 67 | var v = face.vertices; 68 | 69 | if(_oldFaceShapeVertices.length === 0) storeFaceShapeVertices(v); 70 | 71 | var k, l, yLE, yRE; 72 | 73 | // Left eye movement (y) 74 | 75 | for(k = 36, l = 41, yLE = 0; k <= l; k++) { 76 | yLE += v[k * 2 + 1] - _oldFaceShapeVertices[k * 2 + 1]; 77 | } 78 | yLE /= 6; 79 | 80 | // Right eye movement (y) 81 | 82 | for(k = 42, l = 47, yRE = 0; k <= l; k++) { 83 | yRE += v[k * 2 + 1] - _oldFaceShapeVertices[k * 2 + 1]; 84 | } 85 | 86 | yRE /= 6; 87 | 88 | var yN = 0; 89 | 90 | // Compare to overall movement (nose y) 91 | 92 | yN += v[27 * 2 + 1] - _oldFaceShapeVertices[27 * 2 + 1]; 93 | yN += v[28 * 2 + 1] - _oldFaceShapeVertices[28 * 2 + 1]; 94 | yN += v[29 * 2 + 1] - _oldFaceShapeVertices[29 * 2 + 1]; 95 | yN += v[30 * 2 + 1] - _oldFaceShapeVertices[30 * 2 + 1]; 96 | yN /= 4; 97 | 98 | var blinkRatio = Math.abs((yLE + yRE) / yN); 99 | 100 | if((blinkRatio > 12 && (yLE > 0.4 || yRE > 0.4))) { 101 | console.log("blink " + blinkRatio.toFixed(2) + " " + yLE.toFixed(2) + " " + 102 | yRE.toFixed(2) + " " + yN.toFixed(2)); 103 | 104 | blink(); 105 | } 106 | 107 | // Let the color of the shape show whether you blinked. 108 | 109 | var color = 0x00a0ff; 110 | 111 | if(_blinked) { 112 | color = 0xffd200; 113 | } 114 | 115 | // Face Tracking results: 68 facial feature points. 116 | 117 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, color, 0.4); 118 | draw.drawVertices( face.vertices, 2.0, false, color, 0.4); 119 | 120 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face tracking - simple blink" + 121 | "detection.\nDetects an eye blink: " + (_blinked ? "Yes" : "No")); 122 | 123 | storeFaceShapeVertices(v); 124 | } 125 | } 126 | }; 127 | 128 | function blink() { 129 | _blinked = true; 130 | 131 | if(_timeOut > -1) { clearTimeout(_timeOut); } 132 | 133 | _timeOut = setTimeout(resetBlink, 150); 134 | } 135 | 136 | function resetBlink() { 137 | _blinked = false; 138 | } 139 | 140 | function storeFaceShapeVertices(vertices) { 141 | for(var i = 0, l = vertices.length; i < l; i++) { 142 | _oldFaceShapeVertices[i] = vertices[i]; 143 | } 144 | } 145 | 146 | var _oldFaceShapeVertices = []; 147 | var _blinked = false; 148 | var _timeOut = -1; 149 | 150 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face tracking - simple blink detection.\n" + 151 | "Detects a blink of the eyes: "); 152 | 153 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 154 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/candide_overlay.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | 6 | // By default everything necessary for a single face tracking app 7 | // is set up for you in brfManager.init. There is actually no 8 | // need to configure much more for a jump start. 9 | 10 | brfManager.init(resolution, resolution, brfv4Example.appId); 11 | }; 12 | 13 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 14 | 15 | // In a webcam example imageData is the mirrored webcam video feed. 16 | // In an image example imageData is the (not mirrored) image content. 17 | 18 | brfManager.update(imageData); 19 | 20 | // Drawing the results: 21 | 22 | draw.clear(); 23 | 24 | // Face detection results: a rough rectangle used to start the face tracking. 25 | 26 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 27 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 28 | 29 | // Get all faces. The default setup only tracks one face. 30 | 31 | var faces = brfManager.getFaces(); 32 | 33 | for(var i = 0; i < faces.length; i++) { 34 | 35 | var face = faces[i]; 36 | 37 | if( face.state === brfv4.BRFState.FACE_TRACKING) { 38 | 39 | // Instead of drawing the 68 landmarks this time we draw the Candide3 model shape (yellow). 40 | 41 | draw.drawTriangles( face.candideVertices, face.candideTriangles, false, 1.0, 0xffd200, 0.4); 42 | draw.drawVertices( face.candideVertices, 2.0, false, 0xffd200, 0.4); 43 | 44 | // And for a reference also draw the 68 landmarks (blue). 45 | 46 | draw.drawVertices( face.vertices, 2.0, false, 0x00a1ff, 0.4); 47 | } 48 | } 49 | }; 50 | 51 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - candide shape overlay\n" + 52 | "The Candide 3 model is calculated from the 68 landmarks."); 53 | 54 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 55 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/color_libs.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | brfManager.init(resolution, resolution, brfv4Example.appId); 6 | }; 7 | 8 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 9 | 10 | brfManager.update(imageData); 11 | 12 | draw.clear(); 13 | 14 | // Face detection results: a rough rectangle used to start the face tracking. 15 | 16 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 17 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 18 | 19 | var faces = brfManager.getFaces(); // default: one face, only one element in that array. 20 | 21 | for(var i = 0; i < faces.length; i++) { 22 | 23 | var face = faces[i]; 24 | 25 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 26 | face.state === brfv4.BRFState.FACE_TRACKING) { 27 | 28 | // Face tracking results: 68 facial feature points. 29 | 30 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 31 | draw.drawVertices( face.vertices, 2.0, false, 0x00a0ff, 0.4); 32 | 33 | // Now just draw all the triangles of the mouth in a certain color. 34 | 35 | draw.fillTriangles( face.vertices, libTriangles, false, 0xff7900, 0.8); 36 | } 37 | } 38 | }; 39 | 40 | var libTriangles = [ 41 | 48, 49, 60, 42 | 48, 59, 60, 43 | 49, 50, 61, 44 | 49, 60, 61, 45 | 50, 51, 62, 46 | 50, 61, 62, 47 | 51, 52, 62, 48 | 52, 53, 63, 49 | 52, 62, 63, 50 | 53, 54, 64, 51 | 53, 63, 64, 52 | 54, 55, 64, 53 | 55, 56, 65, 54 | 55, 64, 65, 55 | 56, 57, 66, 56 | 56, 65, 66, 57 | 57, 58, 66, 58 | 58, 59, 67, 59 | 58, 66, 67, 60 | 59, 60, 67 61 | //, // mouth whole 62 | // 60, 61, 67, 63 | // 61, 62, 66, 64 | // 61, 66, 67, 65 | // 62, 63, 66, 66 | // 63, 64, 65, 67 | // 63, 65, 66 68 | ]; 69 | 70 | brfv4Example.dom.updateHeadline("BRFv4 - intermediate - face tracking - color libs.\n" + 71 | "Draws triangles with a certain fill color."); 72 | 73 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 74 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/extended_face_shape.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _extendedShape = new brfv4.BRFv4ExtendedFace(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | brfManager.init(resolution, resolution, brfv4Example.appId); 8 | }; 9 | 10 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 11 | 12 | brfManager.update(imageData); 13 | 14 | draw.clear(); 15 | 16 | // Face detection results: a rough rectangle used to start the face tracking. 17 | 18 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 19 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 20 | 21 | // Get all faces. The default setup only tracks one face. 22 | 23 | var faces = brfManager.getFaces(); 24 | 25 | for(var i = 0; i < faces.length; i++) { 26 | 27 | var face = faces[i]; 28 | 29 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 30 | face.state === brfv4.BRFState.FACE_TRACKING) { 31 | 32 | // The extended face shape is calculated from the usual 68 facial features. 33 | // The additional landmarks are just estimated, they are not actually tracked. 34 | 35 | _extendedShape.update(face); 36 | 37 | // Then we draw all 74 landmarks of the _extendedShape. 38 | 39 | draw.drawTriangles( _extendedShape.vertices, _extendedShape.triangles, 40 | false, 1.0, 0x00a0ff, 0.4); 41 | draw.drawVertices( _extendedShape.vertices, 2.0, false, 0x00a0ff, 0.4); 42 | } 43 | } 44 | }; 45 | 46 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - extended face shape\n" + 47 | "There are 6 more landmarks for the forehead calculated from the 68 landmarks."); 48 | 49 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 50 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/face_swap_two_faces.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | // We need two face textures thus create two canvases that will hold the 5 | // extracted faces. 6 | 7 | var _size = 256; // texture size 8 | 9 | var _extractedFace0 = brfv4Example.dom.createCanvas("_extractedFace0", _size, _size, null); 10 | var _extractedFace1 = brfv4Example.dom.createCanvas("_extractedFace1", _size, _size, null); 11 | 12 | var _ctxFace0 = _extractedFace0.getContext("2d"); 13 | var _ctxFace1 = _extractedFace1.getContext("2d"); 14 | 15 | // BRF analysis image data. 16 | 17 | var _imageData = brfv4Example.dom.getElement("_imageData"); 18 | var _resolution = null; 19 | 20 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 21 | 22 | _resolution = resolution; 23 | 24 | brfManager.init(resolution, resolution, brfv4Example.appId); 25 | brfManager.setNumFacesToTrack(2); // two faces 26 | 27 | // Relax starting conditions to eventually find more faces. 28 | 29 | var maxFaceSize = resolution.height; 30 | 31 | if(resolution.width < resolution.height) { 32 | maxFaceSize = resolution.width; 33 | } 34 | 35 | brfManager.setFaceDetectionParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 12, 8); 36 | brfManager.setFaceTrackingStartParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 32, 35, 32); 37 | brfManager.setFaceTrackingResetParams( maxFaceSize * 0.15, maxFaceSize * 1.00, 40, 55, 32); 38 | }; 39 | 40 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 41 | 42 | brfManager.update(imageData); 43 | 44 | if(!_resolution || !_imageData) return; 45 | 46 | draw.clear(); // also clears separate _faceSub canvas 47 | 48 | var faces = brfManager.getFaces(); 49 | 50 | if(faces.length < 2) { 51 | return; 52 | } 53 | 54 | var face0 = faces[0]; 55 | var face1 = faces[1]; 56 | 57 | // leave out the inner mouth, remove the last 6 triangles: 58 | 59 | var triangles = face0.triangles.concat(); 60 | triangles.splice(triangles.length - 3 * 6, 3 * 6); 61 | 62 | if( face0.state === brfv4.BRFState.FACE_TRACKING && 63 | face1.state === brfv4.BRFState.FACE_TRACKING) { 64 | 65 | _ctxFace0.clearRect(0, 0, _size, _size); 66 | _ctxFace1.clearRect(0, 0, _size, _size); 67 | 68 | var uvData0 = prepareFaceTexture(face0, _ctxFace0); 69 | var uvData1 = prepareFaceTexture(face1, _ctxFace1); 70 | 71 | draw.drawTexture(face0.vertices, triangles, uvData1, _extractedFace1); 72 | draw.drawTexture(face1.vertices, triangles, uvData0, _extractedFace0); 73 | } 74 | 75 | // optional visualize the tracking results as dots. 76 | 77 | if(face0.state === brfv4.BRFState.FACE_TRACKING) { 78 | draw.drawVertices(face0.vertices, 2.0, false, 0x00a0ff, 0.4); 79 | } 80 | 81 | if(face1.state === brfv4.BRFState.FACE_TRACKING) { 82 | draw.drawVertices(face1.vertices, 2.0, false, 0x00a0ff, 0.4); 83 | } 84 | }; 85 | 86 | function prepareFaceTexture(face, ctx) { 87 | 88 | var f = _size / face.bounds.width; 89 | 90 | if (face.bounds.height > face.bounds.width) { 91 | f = _size / face.bounds.height; 92 | } 93 | 94 | ctx.drawImage(_imageData, 95 | -face.bounds.x * f, -face.bounds.y * f, 96 | _resolution.width * f , _resolution.height * f); 97 | 98 | var uvData = []; 99 | 100 | for(var u = 0; u < face.vertices.length; u += 2) { 101 | var ux = (((face.vertices[u] - face.bounds.x) * f) / _size); 102 | var uy = (((face.vertices[u+1] - face.bounds.y) * f) / _size); 103 | uvData.push(ux); 104 | uvData.push(uy); 105 | } 106 | 107 | return uvData; 108 | } 109 | 110 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face swap of two faces.\n" + 111 | "Switch faces with a friend."); 112 | 113 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 114 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/face_texture_overlay.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | // assets/face_textures.js includes a texture jpg file as data url 5 | // This texture can be viewed by opening face_texture_overlay.html 6 | 7 | var faceTex = brfv4Example.faceTextures.marcel_0; 8 | var texture = new Image(); 9 | texture.src = faceTex.tex; 10 | 11 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 12 | brfManager.init(resolution, resolution, brfv4Example.appId); 13 | }; 14 | 15 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 16 | 17 | brfManager.update(imageData); 18 | 19 | draw.clear(); 20 | 21 | // Face detection results: a rough rectangle used to start the face tracking. 22 | 23 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 24 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 25 | 26 | // Get all faces. The default setup only tracks one face. 27 | 28 | var faces = brfManager.getFaces(); 29 | 30 | for(var i = 0; i < faces.length; i++) { 31 | 32 | var face = faces[i]; 33 | 34 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 35 | face.state === brfv4.BRFState.FACE_TRACKING) { 36 | 37 | //draw.drawTriangles(face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 38 | draw.drawVertices(face.vertices, 2.0, false, 0x00a0ff, 0.4); 39 | 40 | // Now draw the texture onto the vertices/triangles using UV mapping. 41 | 42 | // draw.drawTexture(face.vertices, face.triangles, faceTex.uv, texture); 43 | 44 | // ... or if you want to leave out the inner mouth, remove the last 6 triangles: 45 | 46 | var triangles = face.triangles.concat(); 47 | 48 | triangles.splice(triangles.length - 3 * 6, 3 * 6); 49 | 50 | draw.drawTexture(face.vertices, triangles, faceTex.uv, texture); 51 | } 52 | } 53 | }; 54 | 55 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - a face texture overlay.\n" + 56 | "Have fun being Marcel! :D"); 57 | 58 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 59 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/png_mask_overlay.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var numFacesToTrack = 2; // Set the number of faces to detect and track. 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution, draw) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | brfManager.setNumFacesToTrack(numFacesToTrack); 10 | 11 | // Relax starting conditions to eventually find more faces. 12 | 13 | var maxFaceSize = resolution.height; 14 | 15 | if(resolution.width < resolution.height) { 16 | maxFaceSize = resolution.width; 17 | } 18 | 19 | brfManager.setFaceDetectionParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 12, 8); 20 | brfManager.setFaceTrackingStartParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 32, 35, 32); 21 | brfManager.setFaceTrackingResetParams( maxFaceSize * 0.15, maxFaceSize * 1.00, 40, 55, 32); 22 | 23 | // Load all image masks for quick switching. 24 | 25 | prepareImages(draw); 26 | 27 | // Add a click event to cycle through the image overlays. 28 | 29 | draw.clickArea.addEventListener("click", onClicked); 30 | draw.clickArea.mouseEnabled = true; 31 | }; 32 | 33 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 34 | 35 | brfManager.update(imageData); 36 | 37 | draw.clear(); 38 | 39 | // Face detection results: a rough rectangle used to start the face tracking. 40 | 41 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 42 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 43 | 44 | // Get all faces. The default setup only tracks one face. 45 | 46 | var faces = brfManager.getFaces(); 47 | 48 | // If no face was tracked: hide the image overlays. 49 | 50 | for(var i = 0; i < faces.length; i++) { 51 | 52 | var face = faces[i]; // get face 53 | var baseNode = _baseNodes[i]; // get image container 54 | 55 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 56 | face.state === brfv4.BRFState.FACE_TRACKING) { 57 | 58 | // Face Tracking results: 68 facial feature points. 59 | 60 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 61 | draw.drawVertices( face.vertices, 2.0, false, 0x00a0ff, 0.4); 62 | 63 | // Set position to be nose top and calculate rotation. 64 | 65 | baseNode.x = face.points[27].x; 66 | baseNode.y = face.points[27].y; 67 | 68 | baseNode.scaleX = (face.scale / 480) * (1 - toDegree(Math.abs(face.rotationY)) / 110.0); 69 | baseNode.scaleY = (face.scale / 480) * (1 - toDegree(Math.abs(face.rotationX)) / 110.0); 70 | baseNode.rotation = toDegree(face.rotationZ); 71 | 72 | baseNode.alpha = 1.0; 73 | 74 | } else { 75 | 76 | baseNode.alpha = 0.0; 77 | } 78 | } 79 | }; 80 | 81 | function onClicked(event) { 82 | var i = _images.indexOf(_image) + 1; 83 | 84 | if(i === _images.length) { 85 | i = 0; 86 | } 87 | 88 | _image = _images[i]; 89 | changeImage(_image, i); 90 | } 91 | 92 | function changeImage(bitmap, index) { 93 | 94 | bitmap.scaleX = _imageScales[index]; 95 | bitmap.scaleY = _imageScales[index]; 96 | 97 | bitmap.x = -parseInt(bitmap.getBounds().width * bitmap.scaleX * 0.50); 98 | bitmap.y = -parseInt(bitmap.getBounds().height * bitmap.scaleY * 0.45); 99 | 100 | for(var i = 0; i < numFacesToTrack; i++) { 101 | 102 | var baseNode = _baseNodes[i]; 103 | baseNode.removeAllChildren(); 104 | 105 | if(i === 0) { 106 | baseNode.addChild(bitmap); 107 | } else { 108 | baseNode.addChild(bitmap.clone()); 109 | } 110 | } 111 | } 112 | 113 | function prepareImages(draw) { 114 | 115 | draw.imageContainer.removeAllChildren(); 116 | 117 | var i = 0; 118 | var l = 0; 119 | 120 | for(i = 0, l = numFacesToTrack; i < l; i++) { 121 | var baseNode = new createjs.Container(); 122 | draw.imageContainer.addChild(baseNode); 123 | _baseNodes.push(baseNode); 124 | } 125 | 126 | for(i = 0, l = _imageURLs.length; i < l; i++) { 127 | _images[i] = new createjs.Bitmap(_imageURLs[i]); 128 | 129 | if(i === 0) { 130 | _image = _images[i]; 131 | _image.image.onload = function() { 132 | changeImage(_image, 0); 133 | } 134 | } 135 | } 136 | } 137 | 138 | var _imageURLs = ["assets/brfv4_lion.png", "assets/brfv4_img_glasses.png"]; 139 | var _imageScales = [3.3, 1.0]; 140 | 141 | var _images = []; 142 | var _image = null; 143 | 144 | var _baseNodes = []; 145 | 146 | var toDegree = brfv4.BRFv4PointUtils.toDegree; 147 | 148 | brfv4Example.dom.updateHeadline("BRFv4 - advanced - face tracking - PNG/mask image overlay.\n" + 149 | "Click to cycle through images."); 150 | 151 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 152 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/restrict_to_center.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _faceDetectionRoi = new brfv4.Rectangle(); 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | brfManager.init(resolution, resolution, brfv4Example.appId); 9 | 10 | // Sometimes you want to restrict the position and pose of a face. 11 | 12 | // In this setup we will restrict pick up of the face to the center of the image 13 | // and we will let BRFv4 reset if the user turns his head too much. 14 | 15 | // We limit the face detection region of interest to be in the central 16 | // part of the overall analysed image (green rectangle). 17 | 18 | _faceDetectionRoi.setTo( 19 | resolution.width * 0.25, resolution.height * 0.10, 20 | resolution.width * 0.50, resolution.height * 0.80 21 | ); 22 | brfManager.setFaceDetectionRoi(_faceDetectionRoi); 23 | 24 | // We can have either a landscape area (desktop), then choose height or 25 | // we can have a portrait area (mobile), then choose width as max face size. 26 | 27 | var maxFaceSize = _faceDetectionRoi.height; 28 | 29 | if(_faceDetectionRoi.width < _faceDetectionRoi.height) { 30 | maxFaceSize = _faceDetectionRoi.width; 31 | } 32 | 33 | // Use the usual detection distances to be able to tell the user what to do. 34 | 35 | brfManager.setFaceDetectionParams(maxFaceSize * 0.30, maxFaceSize * 1.00, 12, 8); 36 | 37 | // Set up the pickup parameters for the face tracking: 38 | // startMinFaceSize, startMaxFaceSize, startRotationX/Y/Z 39 | 40 | // Faces will only get picked up, if they look straight into the camera 41 | // and have a certain size (distance to camera). 42 | 43 | brfManager.setFaceTrackingStartParams(maxFaceSize * 0.50, maxFaceSize * 0.70, 15, 15, 15); 44 | 45 | // Set up the reset conditions for the face tracking: 46 | // resetMinFaceSize, resetMaxFaceSize, resetRotationX/Y/Z 47 | 48 | // Face tracking will reset to face detection, if the face turns too much or leaves 49 | // the desired distance to the camera. 50 | 51 | brfManager.setFaceTrackingResetParams(maxFaceSize * 0.45, maxFaceSize * 0.75, 25, 25, 25); 52 | }; 53 | 54 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 55 | 56 | brfManager.update(imageData); 57 | 58 | draw.clear(); 59 | 60 | draw.drawRect(_faceDetectionRoi, false, 2.0, 0x8aff00, 0.5); 61 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 62 | 63 | var mergedFaces = brfManager.getMergedDetectedFaces(); 64 | 65 | draw.drawRects(mergedFaces, false, 2.0, 0xffd200, 1.0); 66 | 67 | var faces = brfManager.getFaces(); 68 | var oneFaceTracked = false; 69 | 70 | for(var i = 0; i < faces.length; i++) { 71 | 72 | var face = faces[i]; 73 | 74 | if(face.state === brfv4.BRFState.FACE_TRACKING) { 75 | 76 | // Read the rotation of the face and draw it 77 | // green if the face is frontal or 78 | // red if the user turns the head too much. 79 | 80 | var maxRot = brfv4.BRFv4PointUtils.toDegree( 81 | Math.max( 82 | Math.abs(face.rotationX), 83 | Math.abs(face.rotationY), 84 | Math.abs(face.rotationZ) 85 | ) 86 | ); 87 | 88 | var percent = maxRot / 20.0; 89 | 90 | if(percent < 0.0) { percent = 0.0; } 91 | if(percent > 1.0) { percent = 1.0; } 92 | 93 | var color = 94 | (((0xff * percent) & 0xff) << 16) + 95 | (((0xff * (1.0 - percent) & 0xff) << 8)); 96 | 97 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, color, 0.4); 98 | draw.drawVertices( face.vertices, 2.0, false, color, 0.4); 99 | 100 | oneFaceTracked = true; 101 | } 102 | } 103 | 104 | // Check, if the face is too close or too far way and tell the user what to do. 105 | 106 | if(!oneFaceTracked && mergedFaces.length > 0) { 107 | 108 | var mergedFace = mergedFaces[0]; 109 | 110 | if(mergedFace.width < _faceDetectionRoi.width * 0.50) { // startMinFaceSize 111 | 112 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - restrict to frontal and center\n" + 113 | "Only track a face if it is in a certain distance. Come closer."); 114 | 115 | } else if(mergedFace.width > _faceDetectionRoi.width * 0.70) { // startMaxFaceSize 116 | 117 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - restrict to frontal and center\n" + 118 | "Only track a face if it is in a certain distance. Move further away."); 119 | } 120 | 121 | } else { 122 | 123 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - restrict to frontal and center\n" + 124 | "Only track a face if it is in a certain distance to the camera and is frontal."); 125 | } 126 | }; 127 | 128 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - restrict to frontal and center\n" + 129 | "Only track a face if it is in a certain distance to the camera and is frontal."); 130 | 131 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 132 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/smile_detection.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | brfManager.init(resolution, resolution, brfv4Example.appId); 6 | }; 7 | 8 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 9 | 10 | brfManager.update(imageData); 11 | 12 | draw.clear(); 13 | 14 | // Face detection results: a rough rectangle used to start the face tracking. 15 | 16 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 17 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 18 | 19 | var faces = brfManager.getFaces(); // default: one face, only one element in that array. 20 | 21 | for(var i = 0; i < faces.length; i++) { 22 | 23 | var face = faces[i]; 24 | 25 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 26 | face.state === brfv4.BRFState.FACE_TRACKING) { 27 | 28 | // Smile Detection 29 | 30 | setPoint(face.vertices, 48, p0); // mouth corner left 31 | setPoint(face.vertices, 54, p1); // mouth corner right 32 | 33 | var mouthWidth = calcDistance(p0, p1); 34 | 35 | setPoint(face.vertices, 39, p1); // left eye inner corner 36 | setPoint(face.vertices, 42, p0); // right eye outer corner 37 | 38 | var eyeDist = calcDistance(p0, p1); 39 | var smileFactor = mouthWidth / eyeDist; 40 | 41 | smileFactor -= 1.40; // 1.40 - neutral, 1.70 smiling 42 | 43 | if(smileFactor > 0.25) smileFactor = 0.25; 44 | if(smileFactor < 0.00) smileFactor = 0.00; 45 | 46 | smileFactor *= 4.0; 47 | 48 | if(smileFactor < 0.0) { smileFactor = 0.0; } 49 | if(smileFactor > 1.0) { smileFactor = 1.0; } 50 | 51 | // Let the color show you how much you are smiling. 52 | 53 | var color = 54 | (((0xff * (1.0 - smileFactor) & 0xff) << 16)) + 55 | (((0xff * smileFactor) & 0xff) << 8); 56 | 57 | // Face Tracking results: 68 facial feature points. 58 | 59 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, color, 0.4); 60 | draw.drawVertices( face.vertices, 2.0, false, color, 0.4); 61 | 62 | brfv4Example.dom.updateHeadline("BRFv4 - intermediate - face tracking - simple " + 63 | "smile detection.\nDetects how much someone is smiling. smile factor: " + 64 | (smileFactor * 100).toFixed(0) + "%"); 65 | } 66 | } 67 | }; 68 | 69 | var p0 = new brfv4.Point(); 70 | var p1 = new brfv4.Point(); 71 | 72 | var setPoint = brfv4.BRFv4PointUtils.setPoint; 73 | var calcDistance = brfv4.BRFv4PointUtils.calcDistance; 74 | 75 | brfv4Example.dom.updateHeadline("BRFv4 - intermediate - face tracking - simple smile " + 76 | "detection.\nDetects how much someone is smiling."); 77 | 78 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 79 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/track_multiple_faces.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var numFacesToTrack = 2; // Set the number of faces to detect and track. 5 | 6 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 7 | 8 | // By default everything necessary for a single face tracking app 9 | // is set up for you in brfManager.init. 10 | 11 | brfManager.init(resolution, resolution, brfv4Example.appId); 12 | 13 | // But here we tell BRFv4 to track multiple faces. In this case two. 14 | 15 | // While the first face is getting tracked the face detection 16 | // is performed in parallel and is looking for a second face. 17 | 18 | brfManager.setNumFacesToTrack(numFacesToTrack); 19 | 20 | // Relax starting conditions to eventually find more faces. 21 | 22 | var maxFaceSize = resolution.height; 23 | 24 | if(resolution.width < resolution.height) { 25 | maxFaceSize = resolution.width; 26 | } 27 | 28 | brfManager.setFaceDetectionParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 12, 8); 29 | brfManager.setFaceTrackingStartParams( maxFaceSize * 0.20, maxFaceSize * 1.00, 32, 35, 32); 30 | brfManager.setFaceTrackingResetParams( maxFaceSize * 0.15, maxFaceSize * 1.00, 40, 55, 32); 31 | }; 32 | 33 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 34 | 35 | brfManager.update(imageData); 36 | 37 | // Drawing the results: 38 | 39 | draw.clear(); 40 | 41 | // Get all faces. We get numFacesToTrack faces in that array. 42 | 43 | var faces = brfManager.getFaces(); 44 | 45 | for(var i = 0; i < faces.length; i++) { 46 | 47 | var face = faces[i]; 48 | 49 | // Every face has it's own states. 50 | // While the first face might already be tracking, 51 | // the second face might just try to detect a face. 52 | 53 | if(face.state === brfv4.BRFState.FACE_DETECTION) { 54 | 55 | // Face detection results: a rough rectangle used to start the face tracking. 56 | 57 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 58 | 59 | } else if( face.state === brfv4.BRFState.FACE_TRACKING_START || 60 | face.state === brfv4.BRFState.FACE_TRACKING) { 61 | 62 | // Face tracking results: 68 facial feature points. 63 | 64 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 65 | draw.drawVertices( face.vertices, 2.0, false, 0x00a0ff, 0.4); 66 | } 67 | } 68 | }; 69 | 70 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - track multiple faces\n" + 71 | "Detect and track " + numFacesToTrack + " faces and draw their 68 facial landmarks."); 72 | 73 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 74 | 75 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/track_single_face.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | 6 | // By default everything necessary for a single face tracking app 7 | // is set up for you in brfManager.init. There is actually no 8 | // need to configure much more for a jump start. 9 | 10 | brfManager.init(resolution, resolution, brfv4Example.appId); 11 | }; 12 | 13 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 14 | 15 | // In a webcam example imageData is the mirrored webcam video feed. 16 | // In an image example imageData is the (not mirrored) image content. 17 | 18 | brfManager.update(imageData); 19 | 20 | // Drawing the results: 21 | 22 | draw.clear(); 23 | 24 | // Face detection results: a rough rectangle used to start the face tracking. 25 | 26 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 27 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 28 | 29 | // Get all faces. The default setup only tracks one face. 30 | 31 | var faces = brfManager.getFaces(); 32 | 33 | for(var i = 0; i < faces.length; i++) { 34 | 35 | var face = faces[i]; 36 | 37 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 38 | face.state === brfv4.BRFState.FACE_TRACKING) { 39 | 40 | // Face tracking results: 68 facial feature points. 41 | 42 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 43 | draw.drawVertices( face.vertices, 2.0, false, 0x00a0ff, 0.4); 44 | } 45 | } 46 | }; 47 | 48 | brfv4Example.dom.updateHeadline("BRFv4 - basic - face tracking - track single face\n" + 49 | "Detect and track one face and draw the 68 facial landmarks."); 50 | 51 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 52 | })(); -------------------------------------------------------------------------------- /js/examples/face_tracking/yawn_detection.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | brfv4Example.initCurrentExample = function(brfManager, resolution) { 5 | brfManager.init(resolution, resolution, brfv4Example.appId); 6 | }; 7 | 8 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 9 | 10 | brfManager.update(imageData); 11 | 12 | draw.clear(); 13 | 14 | // Face detection results: a rough rectangle used to start the face tracking. 15 | 16 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 17 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 18 | 19 | var faces = brfManager.getFaces(); // default: one face, only one element in that array. 20 | 21 | for(var i = 0; i < faces.length; i++) { 22 | 23 | var face = faces[i]; 24 | 25 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 26 | face.state === brfv4.BRFState.FACE_TRACKING) { 27 | 28 | // Yawn Detection - Or: How wide open is the mouth? 29 | 30 | setPoint(face.vertices, 39, p1); // left eye inner corner 31 | setPoint(face.vertices, 42, p0); // right eye outer corner 32 | 33 | var eyeDist = calcDistance(p0, p1); 34 | 35 | setPoint(face.vertices, 62, p0); // mouth upper inner lip 36 | setPoint(face.vertices, 66, p1); // mouth lower inner lip 37 | 38 | var mouthOpen = calcDistance(p0, p1); 39 | var yawnFactor = mouthOpen / eyeDist; 40 | 41 | yawnFactor -= 0.35; // remove smiling 42 | 43 | if(yawnFactor < 0) yawnFactor = 0; 44 | 45 | yawnFactor *= 2.0; // scale up a bit 46 | 47 | if(yawnFactor > 1.0) yawnFactor = 1.0; 48 | 49 | if(yawnFactor < 0.0) { yawnFactor = 0.0; } 50 | if(yawnFactor > 1.0) { yawnFactor = 1.0; } 51 | 52 | // Let the color show you how much you yawn. 53 | 54 | var color = 55 | (((0xff * (1.0 - yawnFactor) & 0xff) << 16)) + 56 | (((0xff * yawnFactor) & 0xff) << 8); 57 | 58 | // Face Tracking results: 68 facial feature points. 59 | 60 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, color, 0.4); 61 | draw.drawVertices( face.vertices, 2.0, false, color, 0.4); 62 | 63 | brfv4Example.dom.updateHeadline("BRFv4 - Advanced - A Simple Yawn Detection.\n" + 64 | "Detects how wide open the mouth is: " + (yawnFactor * 100).toFixed(0) + "%"); 65 | } 66 | } 67 | }; 68 | 69 | var p0 = new brfv4.Point(); 70 | var p1 = new brfv4.Point(); 71 | 72 | var setPoint = brfv4.BRFv4PointUtils.setPoint; 73 | var calcDistance = brfv4.BRFv4PointUtils.calcDistance; 74 | 75 | brfv4Example.dom.updateHeadline("BRFv4 - intermediate - face tracking - simple yawn detection.\n" + 76 | "Detects how wide open the mouth is."); 77 | 78 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 79 | })(); -------------------------------------------------------------------------------- /js/examples/point_tracking/track_multiple_points.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _pointsToAdd = []; 5 | var _numTrackedPoints = 0; 6 | 7 | brfv4Example.initCurrentExample = function(brfManager, resolution, draw) { 8 | 9 | brfManager.init(resolution, resolution, brfv4Example.appId); 10 | 11 | // BRFMode.POINT_TRACKING skips the face detection/tracking entirely. 12 | // You can do point tracking and face detection/tracking simultaneously 13 | // by settings BRFMode.FACE_TRACKING or BRFMode.FACE_DETECTION. 14 | 15 | brfManager.setMode(brfv4.BRFMode.POINT_TRACKING); 16 | 17 | // Default settings: a patch size of 21 (needs to be odd), 4 pyramid levels, 18 | // 50 iterations and a small error of 0.0006 19 | 20 | brfManager.setOpticalFlowParams(21, 4, 50, 0.0006); 21 | 22 | // true means: BRF will remove points if they are not valid anymore. 23 | // false means: developers handle point removal on their own. 24 | 25 | brfManager.setOpticalFlowCheckPointsValidBeforeTracking(true); 26 | 27 | draw.clickArea.addEventListener("click", onClicked); 28 | draw.clickArea.mouseEnabled = true; 29 | }; 30 | 31 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 32 | 33 | // We add the _pointsToAdd right before an update. 34 | // If you do that onclick, the tracking might not 35 | // handle the new points correctly. 36 | 37 | if(_pointsToAdd.length > 0) { 38 | brfManager.addOpticalFlowPoints(_pointsToAdd); 39 | _pointsToAdd.length = 0; 40 | } 41 | 42 | brfManager.update(imageData); 43 | 44 | draw.clear(); 45 | 46 | var points = brfManager.getOpticalFlowPoints(); 47 | var states = brfManager.getOpticalFlowPointStates(); 48 | 49 | // Draw points by state: green valid, red invalid 50 | 51 | for(var i = 0, l = points.length; i < l; i++) { 52 | if(states[i]) { 53 | draw.drawPoint(points[i], 2, false, 0x00ff00, 1.0); 54 | } else { 55 | draw.drawPoint(points[i], 2, false, 0xff0000, 1.0); 56 | } 57 | } 58 | 59 | // ... or just draw all points that got tracked. 60 | //draw.drawPoints(points, 2, false, 0x00ff00, 1.0); 61 | 62 | if(points.length !== _numTrackedPoints) { 63 | _numTrackedPoints = points.length; 64 | brfv4Example.dom.updateHeadline("BRFv4 - Basic - Point Tracking\n" + 65 | "Tracking " + _numTrackedPoints + " points."); 66 | } 67 | }; 68 | 69 | function onClicked(event) { 70 | 71 | var x = event.localX; 72 | var y = event.localY; 73 | 74 | // Add 1 point: 75 | 76 | // _pointsToAdd.push(new brfv4.Point(x, y)); 77 | 78 | //Add 100 points 79 | 80 | var w = 60.0; 81 | var step = 6.0; 82 | var xStart = x - w * 0.5; 83 | var xEnd = x + w * 0.5; 84 | var yStart = y - w * 0.5; 85 | var yEnd = y + w * 0.5; 86 | var dy = yStart; 87 | var dx = xStart; 88 | 89 | for(; dy < yEnd; dy += step) { 90 | for(dx = xStart; dx < xEnd; dx += step) { 91 | _pointsToAdd.push(new brfv4.Point(dx, dy)); 92 | } 93 | } 94 | } 95 | 96 | brfv4Example.dom.updateHeadline("BRFv4 - basic - point tracking.\n" + 97 | "Click eg. on your face to add a bunch of points to track."); 98 | 99 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 100 | 101 | })(); -------------------------------------------------------------------------------- /js/examples/point_tracking/track_points_and_face.js: -------------------------------------------------------------------------------- 1 | (function exampleCode() { 2 | "use strict"; 3 | 4 | var _pointsToAdd = []; 5 | var _numTrackedPoints = 0; 6 | 7 | brfv4Example.initCurrentExample = function(brfManager, resolution, draw) { 8 | 9 | brfManager.init(resolution, resolution, brfv4Example.appId); 10 | 11 | // BRFMode.POINT_TRACKING skips the face detection/tracking entirely. 12 | // This examples shows that both can be done simultaneously by setting 13 | // the mode to BRFMode.FACE_TRACKING. 14 | 15 | brfManager.setMode(brfv4.BRFMode.FACE_TRACKING); 16 | 17 | // Default settings: a patch size of 21 (needs to be odd), 4 pyramid levels, 18 | // 50 iterations and a small error of 0.0006 19 | 20 | brfManager.setOpticalFlowParams(21, 4, 50, 0.0006); 21 | 22 | // true means: BRF will remove points if they are not valid anymore. 23 | // false means: developers handle point removal on their own. 24 | 25 | brfManager.setOpticalFlowCheckPointsValidBeforeTracking(true); 26 | 27 | draw.clickArea.addEventListener("click", onClicked); 28 | draw.clickArea.mouseEnabled = true; 29 | }; 30 | 31 | brfv4Example.updateCurrentExample = function(brfManager, imageData, draw) { 32 | 33 | // We add the _pointsToAdd right before an update. 34 | // If you do that onclick, the tracking might not 35 | // handle the new points correctly. 36 | 37 | if(_pointsToAdd.length > 0) { 38 | brfManager.addOpticalFlowPoints(_pointsToAdd); 39 | _pointsToAdd.length = 0; 40 | } 41 | 42 | brfManager.update(imageData); 43 | 44 | draw.clear(); 45 | 46 | // Face detection results: a rough rectangle used to start the face tracking. 47 | 48 | draw.drawRects(brfManager.getAllDetectedFaces(), false, 1.0, 0x00a1ff, 0.5); 49 | draw.drawRects(brfManager.getMergedDetectedFaces(), false, 2.0, 0xffd200, 1.0); 50 | 51 | // Get all faces. The default setup only tracks one face. 52 | 53 | var faces = brfManager.getFaces(); 54 | var i = 0; 55 | 56 | for(i = 0; i < faces.length; i++) { 57 | 58 | var face = faces[i]; 59 | 60 | if( face.state === brfv4.BRFState.FACE_TRACKING_START || 61 | face.state === brfv4.BRFState.FACE_TRACKING) { 62 | 63 | // Face tracking results: 68 facial feature points. 64 | 65 | draw.drawTriangles( face.vertices, face.triangles, false, 1.0, 0x00a0ff, 0.4); 66 | draw.drawVertices( face.vertices, 2.0, false, 0x00a0ff, 0.4); 67 | } 68 | } 69 | 70 | var points = brfManager.getOpticalFlowPoints(); 71 | var states = brfManager.getOpticalFlowPointStates(); 72 | 73 | // Draw points by state: green valid, red invalid 74 | 75 | for(i = 0; i < points.length; i++) { 76 | if(states[i]) { 77 | draw.drawPoint(points[i], 2, false, 0x00ff00, 1.0); 78 | } else { 79 | draw.drawPoint(points[i], 2, false, 0xff0000, 1.0); 80 | } 81 | } 82 | 83 | // ... or just draw all points that got tracked. 84 | //draw.drawPoints(points, 2, false, 0x00ff00, 1.0); 85 | 86 | if(points.length !== _numTrackedPoints) { 87 | _numTrackedPoints = points.length; 88 | brfv4Example.dom.updateHeadline("BRFv4 - Basic - Point Tracking\n" + 89 | "Tracking " + _numTrackedPoints + " points."); 90 | } 91 | }; 92 | 93 | function onClicked(event) { 94 | 95 | var x = event.localX; 96 | var y = event.localY; 97 | 98 | // Add 1 point: 99 | 100 | // _pointsToAdd.push(new brfv4.Point(x, y)); 101 | 102 | //Add 100 points 103 | 104 | var w = 60.0; 105 | var step = 6.0; 106 | var xStart = x - w * 0.5; 107 | var xEnd = x + w * 0.5; 108 | var yStart = y - w * 0.5; 109 | var yEnd = y + w * 0.5; 110 | var dy = yStart; 111 | var dx = xStart; 112 | 113 | for(; dy < yEnd; dy += step) { 114 | for(dx = xStart; dx < xEnd; dx += step) { 115 | _pointsToAdd.push(new brfv4.Point(dx, dy)); 116 | } 117 | } 118 | } 119 | 120 | brfv4Example.dom.updateHeadline("BRFv4 - basic - point tracking - track points and a face.\n" + 121 | "Click eg. on your face to add a bunch of points to track."); 122 | 123 | brfv4Example.dom.updateCodeSnippet(exampleCode + ""); 124 | 125 | })(); -------------------------------------------------------------------------------- /js/libs/brf_asmjs/BRFv4_JS_TK210219_v4.2.0_trial.js.mem: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/js/libs/brf_asmjs/BRFv4_JS_TK210219_v4.2.0_trial.js.mem -------------------------------------------------------------------------------- /js/libs/brf_wasm/BRFv4_JS_TK210219_v4.2.0_trial.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Tastenkunst/brfv4_javascript_examples/fe72a136e3eb2d3173fe16460a60dbeec7cbf089/js/libs/brf_wasm/BRFv4_JS_TK210219_v4.2.0_trial.wasm -------------------------------------------------------------------------------- /js/libs/brf_wasm/BRFv4_JS_TK210219_v4.2.0_trial.worker.js: -------------------------------------------------------------------------------- 1 | var brfv4 = null; 2 | var brfManager = null; 3 | var resolution = null; 4 | 5 | var window = window || self; 6 | var document = document || self; 7 | 8 | importScripts("BRFv4_JS_TK210219_v4.2.0_trial.js"); 9 | 10 | self.addEventListener('message', function(e) { 11 | 12 | if(e.data === "waitForSDK") { 13 | 14 | waitForSDK(); 15 | 16 | } else { 17 | 18 | var dataBuffer = new Int32Array(e.data); 19 | 20 | if(dataBuffer.length === 4) { 21 | 22 | // "initBRFv4Manager" 23 | 24 | resolution = new brfv4.Rectangle(dataBuffer[0], dataBuffer[1], dataBuffer[2], dataBuffer[3]); 25 | 26 | brfManager = new brfv4.BRFManager(); 27 | brfManager.init(resolution, resolution, "com.tastenkunst.brfv4.js.examples.minimal.webworker"); 28 | 29 | self.postMessage("onInitBRFv4Manager"); 30 | 31 | } else if(resolution !== null && dataBuffer.length === resolution.width * resolution.height) { 32 | 33 | // track 34 | 35 | dataBuffer = new Uint8ClampedArray(e.data); 36 | 37 | brfManager.update(dataBuffer); 38 | 39 | var faces = brfManager.getFaces(); 40 | var vertices = new Float32Array(68 * 2); 41 | 42 | if(faces.length > 0) { 43 | 44 | var face = faces[0]; 45 | 46 | for(var k = 0; k < face.vertices.length; k++) { 47 | 48 | vertices[k] = face.vertices[k]; 49 | } 50 | } 51 | 52 | self.postMessage(vertices); 53 | } 54 | } 55 | 56 | }, false); 57 | 58 | function initializeSDK() { 59 | 60 | if(brfv4 === null) { 61 | 62 | brfv4 = { 63 | 64 | locateFile: function(fileName) { 65 | 66 | return fileName; 67 | }, 68 | 69 | ENVIRONMENT: "WORKER" 70 | }; 71 | 72 | initializeBRF(brfv4); 73 | } 74 | } 75 | 76 | initializeSDK(); 77 | 78 | function waitForSDK() { 79 | 80 | if(!!brfv4 && brfv4.sdkReady) { 81 | 82 | self.postMessage("initSDK"); 83 | 84 | } else { 85 | 86 | setTimeout(waitForSDK, 100); 87 | } 88 | } -------------------------------------------------------------------------------- /js/libs/highlight/highlight_tomorrow.css: -------------------------------------------------------------------------------- 1 | /* http://jmblog.github.com/color-themes-for-google-code-highlightjs */ 2 | 3 | /* Tomorrow Comment */ 4 | .hljs-comment, 5 | .hljs-quote { 6 | color: #8e908c; 7 | } 8 | 9 | /* Tomorrow Red */ 10 | .hljs-variable, 11 | .hljs-template-variable, 12 | .hljs-tag, 13 | .hljs-name, 14 | .hljs-selector-id, 15 | .hljs-selector-class, 16 | .hljs-regexp, 17 | .hljs-deletion { 18 | color: #c82829; 19 | } 20 | 21 | /* Tomorrow Orange */ 22 | .hljs-number, 23 | .hljs-built_in, 24 | .hljs-builtin-name, 25 | .hljs-literal, 26 | .hljs-type, 27 | .hljs-params, 28 | .hljs-meta, 29 | .hljs-link { 30 | color: #f5871f; 31 | } 32 | 33 | /* Tomorrow Yellow */ 34 | .hljs-attribute { 35 | color: #eab700; 36 | } 37 | 38 | /* Tomorrow Green */ 39 | .hljs-string, 40 | .hljs-symbol, 41 | .hljs-bullet, 42 | .hljs-addition { 43 | color: #718c00; 44 | } 45 | 46 | /* Tomorrow Blue */ 47 | .hljs-title, 48 | .hljs-section { 49 | color: #4271ae; 50 | } 51 | 52 | /* Tomorrow Purple */ 53 | .hljs-keyword, 54 | .hljs-selector-tag { 55 | color: #8959a8; 56 | } 57 | 58 | .hljs { 59 | display: block; 60 | overflow-x: auto; 61 | background: #f7f7f7; 62 | color: #4d4d4c; 63 | padding: 0.5em; 64 | } 65 | 66 | .hljs-emphasis { 67 | font-style: italic; 68 | } 69 | 70 | .hljs-strong { 71 | font-weight: bold; 72 | } 73 | -------------------------------------------------------------------------------- /js/libs/quicksettings/quicksettings.min.css: -------------------------------------------------------------------------------- 1 | .qs_main{background-color:#dddddd;text-align:left;position:absolute;width:200px;font:12px sans-serif;box-shadow:5px 5px 8px rgba(0,0,0,0.35);user-select:none;-webkit-user-select:none;color:#000000;border:none}.qs_content{background-color:#cccccc;overflow-y:auto}.qs_title_bar{background-color:#eeeeee;user-select:none;-webkit-user-select:none;cursor:pointer;padding:5px;font-weight:bold;border:none;color:#000000}.qs_container{margin:5px;padding:5px;background-color:#eeeeee;border:none;position:relative}.qs_container_selected{border:none;background-color:#ffffff}.qs_range{-webkit-appearance:none;-moz-appearance:none;width:100%;height:17px;padding:0;margin:0;background-color:transparent;border:none;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_range:focus{outline:none;border:none}.qs_range::-webkit-slider-runnable-track{width:100%;height:15px;cursor:pointer;background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-webkit-slider-runnable-track{background:#cccccc}.qs_range::-webkit-slider-thumb{-webkit-appearance:none;height:15px;width:15px;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer;margin-top:0}.qs_range::-moz-range-track{width:100%;height:15px;cursor:pointer;background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range::-moz-range-thumb{height:15px;width:15px;border:none;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer}.qs_range::-ms-track{width:100%;height:15px;cursor:pointer;visibility:hidden;background:transparent}.qs_range::-ms-thumb{height:15px;width:15px;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer;border:none}.qs_range::-ms-fill-lower{background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-ms-fill-lower{background:#cccccc}.qs_range::-ms-fill-upper{background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-ms-fill-upper{background:#cccccc}.qs_button{background-color:#f6f6f6;color:#000000;height:30px;border:1px solid #aaaaaa;font:12px sans-serif}.qs_button:active{background-color:#ffffff;border:1px solid #aaaaaa}.qs_button:focus{border:1px solid #aaaaaa;outline:none}.qs_checkbox{cursor:pointer;display:inline}.qs_checkbox input{position:absolute;left:-99999px}.qs_checkbox span{height:16px;width:100%;display:block;text-indent:20px;background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAALklEQVQ4T2OcOXPmfwYKACPIgLS0NLKMmDVrFsOoAaNhMJoOGBioFwZkZUWoJgApdFaxjUM1YwAAAABJRU5ErkJggg==') no-repeat}.qs_checkbox input:checked+span{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAvElEQVQ4T63Tyw2EIBAA0OFKBxBL40wDRovAUACcKc1IB1zZDAkG18GYZTmSmafzgTnnMgwchoDWGlJKheGcP3JtnPceCqCUAmttSZznuYtgchsXQrgC+77DNE0kUpPbmBOoJaBOIVQylnqWgAAeKhDve/AN+EaklJBzhhgjWRoJVGTbNjiOowAIret6a+4jYIwpX8aDwLIs74C2D0IIYIyVP6Gm898m9kbVm85ljHUTf16k4VUefkwDrxk+zoUEwCt0GbUAAAAASUVORK5CYII=') no-repeat}.qs_checkbox_label{position:absolute;top:7px;left:30px}.qs_label{margin-bottom:3px;user-select:none;-webkit-user-select:none;cursor:default;font:12px sans-serif}.qs_text_input{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;width:100%;padding:0 0 0 5px;height:24px;border:1px inset #ffffff;background-color:#ffffff;color:#000000;font-size:12px}.qs_text_input:focus{outline:none;background:#ffffff;border:1px inset #ffffff}.qs_select{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAp0lEQVRIS+2SsQ3FIAwF7RVYhA5mgQFhFuhYhJKWL0eKxI8SGylKZ0p4+OBsHGNM+HChAiS7qkgyBKrovaLeOxhjbgtxZ+cFtgelFMg5QwgBvPd/EO5sDbKAlBLUWo/8CjmL075zDmKMj6rEKbpCqBL9aqc4ZUQAhVbInBMQUXz5Vg/WfxOktXZsWWtZLds9uIqlqaH1NFV3jdhSJA47E1CAaE8ViYp+wGiWMZ/T+cgAAAAASUVORK5CYII=') no-repeat right #f6f6f6;-webkit-appearance:none;-moz-appearance:none;appearance:none;color:#000000;width:100%;height:24px;border:1px solid #aaaaaa;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;padding:0 5px;-moz-outline:none;font-size:14px}.qs_select option{font-size:14px}.qs_select::-ms-expand{display:none}.qs_select:focus{outline:none}.qs_number{height:24px}.qs_image{width:100%}.qs_progress{width:100%;height:15px;background-color:#cccccc;border:none;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_progress_value{height:100%;background-color:#999999}.qs_textarea{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;resize:vertical;width:100%;padding:3px 5px;border:1px inset #ffffff;background-color:#ffffff;color:#000000;font-size:12px}.qs_textarea:focus{outline:none;background:#ffffff;border:1px inset #ffffff}.qs_color{position:absolute;left:-999999px}.qs_color_label{width:100%;height:20px;display:block;border:1px solid #aaaaaa;cursor:pointer;padding:0 0 0 5px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_file_chooser{position:absolute;left:-999999px}.qs_file_chooser_label{background-color:#f6f6f6;color:#000000;height:30px;border:1px solid #aaaaaa;font:12px sans-serif;width:100%;display:block;cursor:pointer;padding:7px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;white-space:nowrap;overflow:hidden;text-overflow:ellipsis} -------------------------------------------------------------------------------- /js/libs/quicksettings/quicksettings_tiny.min.css: -------------------------------------------------------------------------------- 1 | .qs_main{background-color:#dddddd;text-align:left;position:absolute;width:160px;font:10px sans-serif;box-shadow:5px 5px 8px rgba(0,0,0,0.35);user-select:none;-webkit-user-select:none;color:#000000;border:none}.qs_content{background-color:#cccccc;overflow-y:auto}.qs_title_bar{background-color:#eeeeee;user-select:none;-webkit-user-select:none;cursor:pointer;padding:5px;font-weight:bold;border:none;color:#000000}.qs_container{margin:2px;padding:2px;background-color:#eeeeee;border:none;position:relative}.qs_container_selected{border:none;background-color:#ffffff}.qs_range{-webkit-appearance:none;-moz-appearance:none;width:100%;height:14px;padding:0;margin:0;background-color:transparent;border:none;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_range:focus{outline:none;border:none}.qs_range::-webkit-slider-runnable-track{width:100%;height:12px;cursor:pointer;background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-webkit-slider-runnable-track{background:#cccccc}.qs_range::-webkit-slider-thumb{-webkit-appearance:none;height:12px;width:12px;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer;margin-top:0}.qs_range::-moz-range-track{width:100%;height:12px;cursor:pointer;background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range::-moz-range-thumb{height:12px;width:12px;border:none;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer}.qs_range::-ms-track{width:100%;height:12px;cursor:pointer;visibility:hidden;background:transparent}.qs_range::-ms-thumb{height:12px;width:12px;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;background:#999999;cursor:pointer;border:none}.qs_range::-ms-fill-lower{background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-ms-fill-lower{background:#cccccc}.qs_range::-ms-fill-upper{background:#cccccc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.qs_range:focus::-ms-fill-upper{background:#cccccc}.qs_button{background-color:#f6f6f6;color:#000000;height:20px;border:1px solid #aaaaaa;font:10px sans-serif}.qs_button:active{background-color:#ffffff;border:1px solid #aaaaaa}.qs_button:focus{border:1px solid #aaaaaa;outline:none}.qs_checkbox{cursor:pointer;display:inline}.qs_checkbox input{position:absolute;left:-99999px}.qs_checkbox span{height:16px;width:100%;display:block;text-indent:20px;background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAALklEQVQ4T2OcOXPmfwYKACPIgLS0NLKMmDVrFsOoAaNhMJoOGBioFwZkZUWoJgApdFaxjUM1YwAAAABJRU5ErkJggg==') no-repeat}.qs_checkbox input:checked+span{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAvElEQVQ4T63Tyw2EIBAA0OFKBxBL40wDRovAUACcKc1IB1zZDAkG18GYZTmSmafzgTnnMgwchoDWGlJKheGcP3JtnPceCqCUAmttSZznuYtgchsXQrgC+77DNE0kUpPbmBOoJaBOIVQylnqWgAAeKhDve/AN+EaklJBzhhgjWRoJVGTbNjiOowAIret6a+4jYIwpX8aDwLIs74C2D0IIYIyVP6Gm898m9kbVm85ljHUTf16k4VUefkwDrxk+zoUEwCt0GbUAAAAASUVORK5CYII=') no-repeat}.qs_checkbox_label{position:absolute;top:5px;left:24px}.qs_label{margin-bottom:3px;user-select:none;-webkit-user-select:none;cursor:default;font:10px sans-serif}.qs_text_input{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;width:100%;padding:0 0 0 5px;height:18px;border:1px inset #ffffff;background-color:#ffffff;color:#000000;font-size:10px}.qs_text_input:focus{outline:none;background:#ffffff;border:1px inset #ffffff}.qs_select{background:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAp0lEQVRIS+2SsQ3FIAwF7RVYhA5mgQFhFuhYhJKWL0eKxI8SGylKZ0p4+OBsHGNM+HChAiS7qkgyBKrovaLeOxhjbgtxZ+cFtgelFMg5QwgBvPd/EO5sDbKAlBLUWo/8CjmL075zDmKMj6rEKbpCqBL9aqc4ZUQAhVbInBMQUXz5Vg/WfxOktXZsWWtZLds9uIqlqaH1NFV3jdhSJA47E1CAaE8ViYp+wGiWMZ/T+cgAAAAASUVORK5CYII=') no-repeat right #f6f6f6;-webkit-appearance:none;-moz-appearance:none;appearance:none;color:#000000;width:100%;height:18px;border:1px solid #aaaaaa;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;padding:0 5px;-moz-outline:none;font-size:12px}.qs_select option{font-size:12px}.qs_select::-ms-expand{display:none}.qs_select:focus{outline:none}.qs_number{height:18px}.qs_image{width:100%}.qs_progress{width:100%;height:12px;background-color:#cccccc;border:none;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_progress_value{height:100%;background-color:#999999}.qs_textarea{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;resize:vertical;width:100%;padding:3px 5px;border:1px inset #ffffff;background-color:#ffffff;color:#000000;font-size:10px}.qs_textarea:focus{outline:none;background:#ffffff;border:1px inset #ffffff}.qs_color{position:absolute;left:-999999px}.qs_color_label{width:100%;height:20px;display:block;border:1px solid #aaaaaa;cursor:pointer;padding:0 0 0 5px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.qs_file_chooser{position:absolute;left:-999999px}.qs_file_chooser_label{background-color:#f6f6f6;color:#000000;height:20px;border:1px solid #aaaaaa;font:10px sans-serif;width:100%;display:block;cursor:pointer;padding:3px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;white-space:nowrap;overflow:hidden;text-overflow:ellipsis} -------------------------------------------------------------------------------- /js/libs/threejs/Detector.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | * @author mr.doob / http://mrdoob.com/ 4 | */ 5 | 6 | var Detector = { 7 | 8 | canvas: !! window.CanvasRenderingContext2D, 9 | webgl: ( function () { 10 | 11 | try { 12 | 13 | var canvas = document.createElement( 'canvas' ); return !! ( window.WebGLRenderingContext && ( canvas.getContext( 'webgl' ) || canvas.getContext( 'experimental-webgl' ) ) ); 14 | 15 | } catch ( e ) { 16 | 17 | return false; 18 | 19 | } 20 | 21 | } )(), 22 | workers: !! window.Worker, 23 | fileapi: window.File && window.FileReader && window.FileList && window.Blob, 24 | 25 | getWebGLErrorMessage: function () { 26 | 27 | var element = document.createElement( 'div' ); 28 | element.id = 'webgl-error-message'; 29 | element.style.fontFamily = 'monospace'; 30 | element.style.fontSize = '13px'; 31 | element.style.fontWeight = 'normal'; 32 | element.style.textAlign = 'center'; 33 | element.style.background = '#fff'; 34 | element.style.color = '#000'; 35 | element.style.padding = '1.5em'; 36 | element.style.width = '400px'; 37 | element.style.margin = '5em auto 0'; 38 | 39 | if ( ! this.webgl ) { 40 | 41 | element.innerHTML = window.WebGLRenderingContext ? [ 42 | 'Your graphics card does not seem to support WebGL.
', 43 | 'Find out how to get it here.' 44 | ].join( '\n' ) : [ 45 | 'Your browser does not seem to support WebGL.
', 46 | 'Find out how to get it here.' 47 | ].join( '\n' ); 48 | 49 | } 50 | 51 | return element; 52 | 53 | }, 54 | 55 | addGetWebGLMessage: function ( parameters ) { 56 | 57 | var parent, id, element; 58 | 59 | parameters = parameters || {}; 60 | 61 | parent = parameters.parent !== undefined ? parameters.parent : document.body; 62 | id = parameters.id !== undefined ? parameters.id : 'oldie'; 63 | 64 | element = Detector.getWebGLErrorMessage(); 65 | element.id = id; 66 | 67 | parent.appendChild( element ); 68 | 69 | } 70 | 71 | }; 72 | 73 | // browserify support 74 | if ( typeof module === 'object' ) { 75 | 76 | module.exports = Detector; 77 | 78 | } 79 | -------------------------------------------------------------------------------- /js/utils/BRFv4DOMUtils.js: -------------------------------------------------------------------------------- 1 | // Some helpers to handle the HTML DOM: 2 | // 3 | // updateLayout(width, height) 4 | // updateHeadline(text) 5 | // updateCodeSnippet(text) 6 | // 7 | // getElement(elementId) 8 | // updateElementSize(element, width, height, whatToUpdate) 9 | // addElement(element, parent) 10 | // 11 | // createElement(htmlTag, id, width, height, parent) 12 | // createDiv(id, parent); 13 | // createCanvas(id, width, height, parent) 14 | // createVideo(id, width, height, parent) 15 | 16 | (function () { 17 | "use strict"; 18 | 19 | var dom = brfv4Example.dom; 20 | 21 | dom.stageWidth = 640; 22 | dom.stageHeight = 480; 23 | 24 | dom.updateLayout = function(width, height) { 25 | 26 | // update resolution, video size, canvas sizes etc. 27 | 28 | dom.stageWidth = width; 29 | dom.stageHeight = height; 30 | 31 | var getElement = dom.getElement; 32 | var updateElementSize = dom.updateElementSize; 33 | 34 | updateElementSize(getElement("_content"), width, height, 0); 35 | updateElementSize(getElement("_drawing"), width, height, 1); 36 | updateElementSize(getElement("_faceSub"), width, height, 1); 37 | updateElementSize(getElement("_t3d"), width, height, 1); 38 | updateElementSize(getElement("_f3d"), width, height, 1); 39 | updateElementSize(getElement("_webcam"), width, height, 1); 40 | updateElementSize(getElement("_imageData"), width, height, 1); 41 | 42 | var subline = getElement("_subline"); 43 | if(subline) subline.style.top = (height + 10) + "px"; 44 | 45 | var highlight = getElement("_highlight"); 46 | if(highlight) highlight.style.top = (height + 45) + "px"; 47 | }; 48 | 49 | dom.updateHeadline = function(text) { 50 | 51 | var subline = dom.getElement("_subline"); 52 | if(subline) { 53 | while(text.indexOf("\n") >= 0) { 54 | text = text.replace("\n", "
"); 55 | } 56 | subline.innerHTML = "" + text + ""; 57 | } 58 | }; 59 | 60 | dom.updateCodeSnippet = function(text) { 61 | 62 | var gist = dom.getElement("_gist"); 63 | if(gist && hljs) { 64 | 65 | var lines = text.split("\n"); 66 | 67 | var i = 0; 68 | var l = lines.length; 69 | 70 | for(; i < l; i++) { 71 | 72 | var line = lines[i]; 73 | 74 | while(line.indexOf(" ") >= 0) { 75 | 76 | var k = line.indexOf(" "); 77 | var repStr = ""; 78 | 79 | k = 4 - (k % 4); 80 | 81 | while(k-- > 0) { 82 | repStr += " "; 83 | } 84 | 85 | line = line.replace(" ", repStr); 86 | } 87 | 88 | lines[i] = line; 89 | } 90 | 91 | text = lines.join("\n"); 92 | 93 | gist.innerHTML = text; 94 | hljs.highlightBlock(gist); 95 | } 96 | }; 97 | 98 | dom.getElement = function(elementId) { 99 | 100 | var element = dom[elementId]; 101 | if(!element) { 102 | 103 | element = document.getElementById(elementId); 104 | if(element) { 105 | dom[elementId] = element; 106 | } 107 | } 108 | 109 | return element; 110 | }; 111 | 112 | dom.updateElementSize = function(element, width, height, whatToUpdate) { 113 | 114 | if(element) { 115 | 116 | if(whatToUpdate === 0) { // div 117 | element.style.width = width + "px"; 118 | element.style.height = height + "px"; 119 | } else if(whatToUpdate === 1) { // canvas, video 120 | element.width = width; 121 | element.height = height; 122 | } else if(whatToUpdate === 2) { // utility class instance 123 | element.updateLayout(width, height); 124 | } 125 | } 126 | }; 127 | 128 | dom.addElement = function(element, parent) { 129 | 130 | if(element) { 131 | 132 | var addToDom = true; 133 | 134 | if(parent) { 135 | 136 | var p = document.getElementById(parent); 137 | if(p) { 138 | p.appendChild(element); 139 | addToDom = false; 140 | } 141 | } 142 | 143 | if(addToDom) { 144 | document.body.appendChild(element); 145 | } 146 | } 147 | }; 148 | 149 | dom.createElement = function(htmlTag, id, width, height, parent) { 150 | 151 | var tag = null; 152 | 153 | if(htmlTag === "canvas" || htmlTag === "video" || htmlTag === "div") { 154 | 155 | tag = document.createElement(htmlTag); 156 | tag.id = id; 157 | 158 | if(width !== 0 && height !== 0) { 159 | dom.updateElementSize(tag, width, height, (htmlTag !== "div") ? 1 : 0); 160 | } 161 | 162 | dom.addElement(tag, parent); 163 | } 164 | 165 | return tag; 166 | }; 167 | 168 | dom.createDiv = function(id, parent) { 169 | 170 | var tag = document.getElementById(id); 171 | 172 | if(!tag) { // Not found? Create it. 173 | tag = dom.createElement("div", id, 0, 0, parent); 174 | } 175 | 176 | return tag; 177 | }; 178 | 179 | dom.createCanvas = function(id, width, height, parent) { 180 | 181 | var tag = document.getElementById(id); 182 | 183 | if(!tag) { // Not found? Create it. 184 | tag = dom.createElement("canvas", id, width, height, parent); 185 | } else { // Found? Then update size. 186 | dom.updateElementSize(tag, width, height, 1); 187 | } 188 | 189 | return tag; 190 | }; 191 | 192 | dom.createVideo = function(id, width, height, parent) { 193 | 194 | var tag = document.getElementById(id); 195 | 196 | if(!tag) { // Not found? Create it. 197 | tag = dom.createElement("video", id, width, height, parent); 198 | } 199 | 200 | return tag; 201 | }; 202 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4DownloadChooser.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | "use strict"; 3 | 4 | if(typeof QuickSettings === "undefined") return; 5 | 6 | if(!brfv4Example.gui.downloadChooser) { 7 | 8 | QuickSettings.useExtStyleSheet(); 9 | 10 | brfv4Example.gui.downloadChooser = QuickSettings.create( 11 | 2, 270, "Useful links", brfv4Example.dom.createDiv("_settingsRight")) 12 | .setWidth(250) 13 | .addHTML("Github", "The BRFv4 example packages are available on our Github page:

" + 14 | "Github
" + 15 | "What can I do with it?
" + 16 | "Docs / API reference

" 17 | ) 18 | .addHTML("Contact", "" + 19 | "Email us for commercial license
" + 20 | "Twitter

" 21 | ) 22 | } 23 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4Drawing3DUtils_ThreeJS.js: -------------------------------------------------------------------------------- 1 | // Utils to put a 3D object on top of a face using ThreeJS (www.threejs.org) 2 | 3 | (function() { 4 | "use strict"; 5 | 6 | var t3d = brfv4Example.drawing3d.t3d; // ThreeJS namespace. 7 | var dom = brfv4Example.dom; // ... e.g. the DOM handling. 8 | 9 | if(!t3d) { 10 | brfv4Example.drawing3d.t3d = {}; 11 | } 12 | 13 | t3d.setup = function(canvas) { 14 | 15 | t3d.stage = canvas; 16 | t3d.scene = new THREE.Scene(); 17 | 18 | t3d.camera = new THREE.OrthographicCamera( 19 | t3d.stage.width / -2, t3d.stage.width / 2, 20 | t3d.stage.height / 2, t3d.stage.height / -2, 50, 10000 ); 21 | 22 | t3d.renderer = new THREE.WebGLRenderer( 23 | {alpha: true, canvas: t3d.stage, antialias: true}); 24 | 25 | t3d.pointLight = new THREE.PointLight(0xffffff, 0.75, 10000); 26 | t3d.baseNodes = []; 27 | t3d.modelZ = 2000; 28 | 29 | t3d.renderer.setClearColor(0x000000, 0); // the default 30 | t3d.renderer.setPixelRatio(window.devicePixelRatio); 31 | t3d.renderer.setSize(t3d.stage.width, t3d.stage.height, true); 32 | 33 | t3d.scene.add(new THREE.AmbientLight(0xffffff, 0.65)); 34 | t3d.scene.add(t3d.pointLight); 35 | 36 | t3d.occlusionObjects = []; 37 | 38 | t3d.renderWidth = 0; 39 | t3d.renderHeight = 0; 40 | 41 | t3d.updateLayout(dom.stageWidth, dom.stageHeight); 42 | }; 43 | 44 | t3d.updateLayout = function(width, height) { 45 | 46 | t3d.renderWidth = width; 47 | t3d.renderHeight = height; 48 | 49 | t3d.renderer.setSize(width, height, true); 50 | 51 | t3d.camera.left = width / -2; 52 | t3d.camera.right = width / 2; 53 | t3d.camera.top = height / 2; 54 | t3d.camera.bottom = height / -2; 55 | 56 | t3d.camera.position.set(0, 0, 0); 57 | t3d.camera.lookAt(new THREE.Vector3(0, 0, 1)); 58 | t3d.camera.updateProjectionMatrix(); 59 | }; 60 | 61 | t3d.update = function(index, face, show) { 62 | 63 | if(index >= t3d.baseNodes.length) { 64 | return; 65 | } 66 | 67 | var baseNode = t3d.baseNodes[index]; 68 | if(!baseNode) return; 69 | 70 | if (show) { 71 | 72 | var rx = (THREE.Math.radToDeg(-face.rotationX)); 73 | var ry = (THREE.Math.radToDeg(-face.rotationY)); 74 | var rz = (THREE.Math.radToDeg( face.rotationZ)); 75 | 76 | var s = (face.scale / 180); 77 | var x = -(face.points[27].x - (t3d.renderWidth * 0.5)); 78 | var y = -(face.points[27].y - (t3d.renderHeight * 0.5)) 79 | - ((Math.abs(ry) / 45.0) * -2.0) 80 | + ((rx < 0) ? (rx * 0.20) : 0.0); 81 | var z = t3d.modelZ; 82 | 83 | rx = rx - 4 * (Math.abs(ry) / 45.0); 84 | rz = rz - ry * 0.066 * (Math.abs(rx) / 20.0); 85 | ry *= 0.9; 86 | 87 | baseNode.visible = true; 88 | baseNode.position.set(x, y, z); 89 | baseNode.scale.set(s, s, s); 90 | baseNode.rotation.set( 91 | THREE.Math.degToRad(rx), 92 | THREE.Math.degToRad(ry), 93 | THREE.Math.degToRad(rz) 94 | ); 95 | } else { 96 | baseNode.visible = false; // Hide the 3d object, if no face was tracked. 97 | } 98 | }; 99 | 100 | t3d.render = function() { 101 | t3d.renderer.render(t3d.scene, t3d.camera); // Render the threejs scene. 102 | }; 103 | 104 | t3d.addBaseNodes = function(maxFaces) { 105 | 106 | var containers = t3d.baseNodes; 107 | var i; 108 | var group; 109 | 110 | for(i = containers.length; i < maxFaces; i++) { 111 | group = new THREE.Group(); 112 | group.visible = false; 113 | containers.push(group); 114 | t3d.scene.add(group); 115 | } 116 | 117 | for(i = containers.length - 1; i > maxFaces; i--) { 118 | group = containers[k]; 119 | t3d.scene.remove(group); 120 | } 121 | }; 122 | 123 | t3d.loadOcclusionHead = function(url, maxFaces) { 124 | 125 | t3d.addBaseNodes(maxFaces); 126 | 127 | var containers = t3d.baseNodes; 128 | var loader = new THREE.ObjectLoader(); 129 | 130 | loader.load(url, (function(model) { 131 | // t3d.model = model; 132 | 133 | for(var k = 0; k < containers.length; k++) { 134 | var mesh = model.clone(); 135 | mesh.position.set(model.position.x, model.position.y, model.position.z); 136 | mesh.material.colorWrite = false; 137 | mesh.renderOrder = 0; 138 | 139 | t3d.occlusionObjects.push(mesh); 140 | containers[k].add(mesh); 141 | } 142 | 143 | t3d.render(); 144 | 145 | })); 146 | }; 147 | 148 | t3d.loadModel = function(url, maxFaces) { 149 | 150 | t3d.addBaseNodes(maxFaces); 151 | t3d.updateLayout(dom.stageWidth, dom.stageHeight); 152 | 153 | var containers = t3d.baseNodes; 154 | var loader = new THREE.ObjectLoader(); 155 | 156 | loader.load(url, (function(model) { 157 | // t3d.model = model; 158 | 159 | for(var k = 0; k < containers.length; k++) { 160 | var mesh = model.clone(); 161 | mesh.position.set(model.position.x, model.position.y, model.position.z); 162 | mesh.renderOrder = 2; 163 | containers[k].add(mesh); 164 | } 165 | 166 | t3d.render(); 167 | 168 | })); 169 | }; 170 | 171 | t3d.showOcclusionObjects = function(showThem) { 172 | 173 | for(var k = 0; k < t3d.occlusionObjects.length; k++) { 174 | var mesh = t3d.occlusionObjects[k]; 175 | mesh.material.colorWrite = showThem; 176 | } 177 | }; 178 | 179 | t3d.hideAll = function() { 180 | 181 | for(var k = 0; k < t3d.baseNodes.length; k++) { 182 | var baseNode = t3d.baseNodes[k]; 183 | baseNode.visible = false; 184 | } 185 | t3d.render(); 186 | }; 187 | 188 | t3d.removeAll = function() { 189 | for(var k = 0; k < t3d.baseNodes.length; k++) { 190 | var baseNode = t3d.baseNodes[k]; 191 | for(var j = baseNode.children.length - 1; j >= 0; j--) { 192 | baseNode.remove(baseNode.children[j]); 193 | } 194 | } 195 | t3d.render(); 196 | }; 197 | 198 | if(t3d.setup && !t3d.stage) { 199 | t3d.setup(dom.getElement("_t3d")); 200 | } 201 | })(); 202 | -------------------------------------------------------------------------------- /js/utils/BRFv4DrawingUtils_CreateJS.js: -------------------------------------------------------------------------------- 1 | // DrawingUtils using CreateJS. This sets up a drawing stage 2 | // and provides function to draw rectangles, points and triangles, as 3 | // well as face textures. 4 | 5 | (function() { 6 | "use strict"; 7 | 8 | var drawing = brfv4Example.drawing; 9 | 10 | var defaultValue = function(arg, val) { 11 | return typeof arg !== 'undefined' ? arg : val; 12 | }; 13 | 14 | drawing.setup = function(canvas, faceTextures, fps) { 15 | 16 | drawing.stage = new createjs.Stage(canvas); 17 | drawing.faceTextures = faceTextures; 18 | 19 | drawing.container = new createjs.Container(); 20 | drawing.clickArea = new createjs.Shape(); 21 | drawing.drawSprite = new createjs.Shape(); 22 | drawing.imageContainer = new createjs.Container(); 23 | drawing.draw = drawing.drawSprite.graphics; 24 | drawing.onUpdateCallback = null; 25 | 26 | drawing.container.addChild(drawing.drawSprite); 27 | drawing.container.addChild(drawing.imageContainer); 28 | drawing.container.addChild(drawing.clickArea); 29 | drawing.stage.addChild(drawing.container); 30 | 31 | // Usually webcams deliver 30 FPS. 32 | // Or 15 FPS, when it is too dark. 33 | // So a stage FPS of 30 is just fine. 34 | 35 | if(typeof fps === "undefined") { fps = 30; } 36 | 37 | createjs.Ticker.framerate = fps; 38 | createjs.Ticker.addEventListener("tick", drawing.stage); 39 | }; 40 | 41 | drawing.updateLayout = function(width, height) { 42 | 43 | drawing.clickArea.graphics.clear(); 44 | drawing.clickArea.graphics.beginFill("#ffffff"); 45 | drawing.clickArea.graphics.drawRect(0, 0, width, height); 46 | drawing.clickArea.graphics.endFill(); 47 | drawing.clickArea.alpha = 0.01; // will not be rendered if lower than 0.01 48 | drawing.clickArea.cursor = 'pointer'; 49 | 50 | drawing.stage.canvas.width = width; 51 | drawing.stage.canvas.height = height; 52 | drawing.faceTextures.width = width; 53 | drawing.faceTextures.height = height; 54 | 55 | drawing.stage.update(); 56 | }; 57 | 58 | drawing.setUpdateCallback = function(updateCallback) { 59 | 60 | // Once BRF and Camera are ready we need to setup an onEnterFrame event. 61 | // The Ticker helps to get 30 FPS. 62 | 63 | if(drawing.onUpdateCallback != null) { 64 | drawing.stage.removeEventListener("tick", drawing.onUpdateCallback); 65 | drawing.onUpdateCallback = null; 66 | } 67 | 68 | if(updateCallback != null) { 69 | drawing.onUpdateCallback = updateCallback; 70 | drawing.stage.addEventListener("tick", drawing.onUpdateCallback); 71 | } 72 | }; 73 | 74 | // The functions following below are drawing helpers 75 | // to draw points, rectangles, triangles, textures etc. 76 | 77 | drawing.clear = function() { 78 | drawing.draw.clear(); 79 | 80 | if(drawing.faceTextures) { 81 | var ctx = drawing.faceTextures.getContext("2d"); 82 | ctx.clearRect(0, 0, drawing.faceTextures.width, drawing.faceTextures.height); 83 | } 84 | }; 85 | 86 | drawing.getColor = function(color, alpha) { 87 | return createjs.Graphics.getRGB((color >> 16) & 0xff, (color >> 8) & 0xff, (color) & 0xff, alpha); 88 | }; 89 | var getColor = drawing.getColor; 90 | 91 | drawing.drawVertices = function(vertices, radius, clear, fillColor, fillAlpha) { 92 | clear = defaultValue(clear, false); 93 | radius = defaultValue(radius, 2.0); 94 | fillColor = defaultValue(fillColor, 0x00a0ff); 95 | fillAlpha = defaultValue(fillAlpha, 1.00); 96 | 97 | fillColor = getColor(fillColor, fillAlpha); 98 | 99 | var g = drawing.draw; 100 | 101 | clear && g.clear(); 102 | 103 | var i = 0; 104 | var l = vertices.length; 105 | 106 | for(; i < l;) { 107 | var x = vertices[i++]; 108 | var y = vertices[i++]; 109 | 110 | g.beginFill(fillColor); 111 | g.drawCircle(x, y, radius); 112 | g.endFill(); 113 | } 114 | }; 115 | 116 | drawing.drawTriangles = function(vertices, triangles, clear, lineThickness, lineColor, lineAlpha) { 117 | clear = defaultValue(clear, false); 118 | lineThickness = defaultValue(lineThickness, 0.5); 119 | lineColor = defaultValue(lineColor, 0x00a0ff); 120 | lineAlpha = defaultValue(lineAlpha, 0.85); 121 | 122 | lineColor = getColor(lineColor, lineAlpha); 123 | 124 | var g = drawing.draw; 125 | 126 | clear && g.clear(); 127 | 128 | var i = 0; 129 | var l = triangles.length; 130 | 131 | while(i < l) { 132 | var ti0 = triangles[i]; 133 | var ti1 = triangles[i + 1]; 134 | var ti2 = triangles[i + 2]; 135 | 136 | var x0 = vertices[ti0 * 2]; 137 | var y0 = vertices[ti0 * 2 + 1]; 138 | var x1 = vertices[ti1 * 2]; 139 | var y1 = vertices[ti1 * 2 + 1]; 140 | var x2 = vertices[ti2 * 2]; 141 | var y2 = vertices[ti2 * 2 + 1]; 142 | 143 | g.setStrokeStyle(lineThickness); 144 | g.beginStroke(lineColor); 145 | 146 | g.moveTo(x0, y0); 147 | g.lineTo(x1, y1); 148 | g.lineTo(x2, y2); 149 | g.lineTo(x0, y0); 150 | 151 | g.endStroke(); 152 | 153 | i+=3; 154 | } 155 | }; 156 | 157 | drawing.fillTriangles = function(vertices, triangles, clear, fillColor, fillAlpha) { 158 | clear = defaultValue(clear, false); 159 | fillColor = defaultValue(fillColor, 0x00a0ff); 160 | fillAlpha = defaultValue(fillAlpha, 0.85); 161 | 162 | fillColor = getColor(fillColor, fillAlpha); 163 | 164 | var g = drawing.draw; 165 | 166 | clear && g.clear(); 167 | 168 | var i = 0; 169 | var l = triangles.length; 170 | 171 | while(i < l) { 172 | var ti0 = triangles[i]; 173 | var ti1 = triangles[i + 1]; 174 | var ti2 = triangles[i + 2]; 175 | 176 | var x0 = vertices[ti0 * 2]; 177 | var y0 = vertices[ti0 * 2 + 1]; 178 | var x1 = vertices[ti1 * 2]; 179 | var y1 = vertices[ti1 * 2 + 1]; 180 | var x2 = vertices[ti2 * 2]; 181 | var y2 = vertices[ti2 * 2 + 1]; 182 | 183 | g.beginFill(fillColor); 184 | 185 | g.moveTo(x0, y0); 186 | g.lineTo(x1, y1); 187 | g.lineTo(x2, y2); 188 | g.lineTo(x0, y0); 189 | 190 | g.endFill(); 191 | 192 | i+=3; 193 | } 194 | }; 195 | 196 | drawing.drawTexture = function(vertices, triangles, uvData, texture) { 197 | 198 | // Ported from: http://stackoverflow.com/questions/4774172/image-manipulation-and-texture-mapping-using-html5-canvas 199 | 200 | if(drawing.faceTextures) { 201 | 202 | var ctx = drawing.faceTextures.getContext("2d"); 203 | 204 | var i = 0; 205 | var l = triangles.length; 206 | 207 | for(; i < l; i += 3) { 208 | 209 | var i0 = triangles[i]; 210 | var i1 = triangles[i + 1]; 211 | var i2 = triangles[i + 2]; 212 | 213 | var x0 = vertices[i0 * 2]; 214 | var y0 = vertices[i0 * 2 + 1]; 215 | var x1 = vertices[i1 * 2]; 216 | var y1 = vertices[i1 * 2 + 1]; 217 | var x2 = vertices[i2 * 2]; 218 | var y2 = vertices[i2 * 2 + 1]; 219 | 220 | var u0 = uvData[i0 * 2] * texture.width; 221 | var v0 = uvData[i0 * 2 + 1] * texture.height; 222 | var u1 = uvData[i1 * 2] * texture.width; 223 | var v1 = uvData[i1 * 2 + 1] * texture.height; 224 | var u2 = uvData[i2 * 2] * texture.width; 225 | var v2 = uvData[i2 * 2 + 1] * texture.height; 226 | 227 | // Set clipping area so that only pixels inside the triangle will 228 | // be affected by the image drawing operation 229 | ctx.save(); ctx.beginPath(); ctx.moveTo(x0, y0); ctx.lineTo(x1, y1); 230 | ctx.lineTo(x2, y2); ctx.closePath(); ctx.clip(); 231 | 232 | // Compute matrix transform 233 | var delta = u0*v1 + v0*u2 + u1*v2 - v1*u2 - v0*u1 - u0*v2; 234 | var delta_a = x0*v1 + v0*x2 + x1*v2 - v1*x2 - v0*x1 - x0*v2; 235 | var delta_b = u0*x1 + x0*u2 + u1*x2 - x1*u2 - x0*u1 - u0*x2; 236 | var delta_c = u0*v1*x2 + v0*x1*u2 + x0*u1*v2 - x0*v1*u2 - v0*u1*x2 - u0*x1*v2; 237 | var delta_d = y0*v1 + v0*y2 + y1*v2 - v1*y2 - v0*y1 - y0*v2; 238 | var delta_e = u0*y1 + y0*u2 + u1*y2 - y1*u2 - y0*u1 - u0*y2; 239 | var delta_f = u0*v1*y2 + v0*y1*u2 + y0*u1*v2 - y0*v1*u2 - v0*u1*y2 - u0*y1*v2; 240 | 241 | // Draw the transformed image 242 | ctx.setTransform( 243 | delta_a/delta, delta_d/delta, 244 | delta_b/delta, delta_e/delta, 245 | delta_c/delta, delta_f/delta); 246 | 247 | ctx.drawImage(texture, 0, 0); 248 | ctx.restore(); 249 | } 250 | } 251 | }; 252 | 253 | drawing.drawRect = function(rect, clear, lineThickness, lineColor, lineAlpha) { 254 | clear = defaultValue(clear, false); 255 | lineThickness = defaultValue(lineThickness, 1.0); 256 | lineColor = defaultValue(lineColor, 0x00a0ff); 257 | lineAlpha = defaultValue(lineAlpha, 1.0); 258 | 259 | lineColor = getColor(lineColor, lineAlpha); 260 | 261 | var g = drawing.draw; 262 | 263 | clear && g.clear(); 264 | 265 | g.setStrokeStyle(lineThickness); 266 | g.beginStroke(lineColor); 267 | g.drawRect(rect.x, rect.y, rect.width, rect.height); 268 | g.endStroke(); 269 | }; 270 | 271 | drawing.drawRects = function(rects, clear, lineThickness, lineColor, lineAlpha) { 272 | clear = defaultValue(clear, false); 273 | lineThickness = defaultValue(lineThickness, 1.0); 274 | lineColor = defaultValue(lineColor, 0x00a0ff); 275 | lineAlpha = defaultValue(lineAlpha, 1.0); 276 | 277 | lineColor = getColor(lineColor, lineAlpha); 278 | 279 | var g = drawing.draw; 280 | 281 | clear && g.clear(); 282 | 283 | g.setStrokeStyle(lineThickness); 284 | g.beginStroke(lineColor); 285 | 286 | var i = 0; 287 | var l = rects.length; 288 | var rect; 289 | 290 | for(; i < l; i++) { 291 | rect = rects[i]; 292 | g.drawRect(rect.x, rect.y, rect.width, rect.height); 293 | } 294 | 295 | g.endStroke(); 296 | }; 297 | 298 | drawing.drawPoint = function(point, radius, clear, fillColor, fillAlpha) { 299 | clear = defaultValue(clear, false); 300 | radius = defaultValue(radius, 2.0); 301 | fillColor = defaultValue(fillColor, 0x00a0ff); 302 | fillAlpha = defaultValue(fillAlpha, 1.0); 303 | 304 | fillColor = getColor(fillColor, fillAlpha); 305 | 306 | var g = drawing.draw; 307 | 308 | clear && g.clear(); 309 | 310 | g.beginFill(fillColor); 311 | g.drawCircle(point.x, point.y, radius); 312 | g.endFill(); 313 | }; 314 | 315 | drawing.drawPoints = function(points, radius, clear, fillColor, fillAlpha) { 316 | clear = defaultValue(clear, false); 317 | radius = defaultValue(radius, 2.0); 318 | fillColor = defaultValue(fillColor, 0x00a0ff); 319 | fillAlpha = defaultValue(fillAlpha, 1.0); 320 | 321 | fillColor = getColor(fillColor, fillAlpha); 322 | 323 | var g = drawing.draw; 324 | 325 | clear && g.clear(); 326 | 327 | 328 | var i = 0; 329 | var l = points.length; 330 | var point; 331 | 332 | for(; i < l; i++) { 333 | point = points[i]; 334 | 335 | g.beginFill(fillColor); 336 | g.drawCircle(point.x, point.y, radius); 337 | g.endFill(); 338 | } 339 | }; 340 | })(); 341 | -------------------------------------------------------------------------------- /js/utils/BRFv4ExampleChooser.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | "use strict"; 3 | 4 | if(typeof QuickSettings === "undefined") return; 5 | 6 | var urlMap = { 7 | 8 | "+++ basic - face detection +++": null, 9 | 10 | "basic - face detection - detect in whole image": "js/examples/face_detection/detect_in_whole_image.js", 11 | "basic - face detection - detect in center": "js/examples/face_detection/detect_in_center.js", 12 | "basic - face detection - detect smaller faces": "js/examples/face_detection/detect_smaller_faces.js", 13 | "basic - face detection - detect larger faces": "js/examples/face_detection/detect_larger_faces.js", 14 | 15 | "+++ basic - face tracking +++": null, 16 | 17 | "basic - face tracking - track single face": "js/examples/face_tracking/track_single_face.js", 18 | "basic - face tracking - track multiple faces": "js/examples/face_tracking/track_multiple_faces.js", 19 | "basic - face tracking - candide overlay": "js/examples/face_tracking/candide_overlay.js", 20 | 21 | "+++ basic - point tracking +++": null, 22 | 23 | "basic - point tracking - track multiple points": "js/examples/point_tracking/track_multiple_points.js", 24 | "basic - point tracking - track points and face": "js/examples/point_tracking/track_points_and_face.js", 25 | 26 | "+++ intermediate - face tracking +++": null, 27 | 28 | "intermediate - face tracking - restrict to center": "js/examples/face_tracking/restrict_to_center.js", 29 | "intermediate - face tracking - extended face": ["js/utils/BRFv4ExtendedFace.js", "js/examples/face_tracking/extended_face_shape.js"], 30 | "intermediate - face tracking - smile detection": "js/examples/face_tracking/smile_detection.js", 31 | "intermediate - face tracking - yawn detection": "js/examples/face_tracking/yawn_detection.js", 32 | "intermediate - face tracking - png/mask overlay": "js/examples/face_tracking/png_mask_overlay.js", 33 | "intermediate - face tracking - color libs": "js/examples/face_tracking/color_libs.js", 34 | 35 | "+++ advanced - face tracking +++": null, 36 | 37 | "advanced - face tracking - blink detection": "js/examples/face_tracking/blink_detection.js", 38 | "advanced - face tracking - blink detection center": "js/examples/face_tracking/blink_detection_center.js", 39 | "advanced - face tracking - ThreeJS example": "js/examples/face_tracking/ThreeJS_example.js", 40 | "advanced - face tracking - face texture overlay": ["assets/brfv4_face_textures.js", "js/examples/face_tracking/face_texture_overlay.js"], 41 | "advanced - face tracking - face swap (two faces)": "js/examples/face_tracking/face_swap_two_faces.js" 42 | }; 43 | var labels = []; 44 | for (var key in urlMap) { labels.push(key); } // Fill in the labels. 45 | 46 | function onExampleLoaded() { 47 | brfv4Example.reinit(); 48 | } 49 | 50 | var _isFirstSelect = true; 51 | function onExampleChosen(data) { 52 | 53 | if(_isFirstSelect) return; 54 | 55 | var url = urlMap[data.value]; 56 | 57 | if(url) { 58 | if(typeof url === "string") { 59 | brfv4Example.loader.loadExample([url], onExampleLoaded); 60 | } else { 61 | brfv4Example.loader.loadExample(url, onExampleLoaded); 62 | } 63 | } else { 64 | if(data.index >= 0) { 65 | brfv4Example.gui.exampleChooser.setValuesFromJSON({ "_example": data.index + 1}); 66 | } 67 | } 68 | } 69 | 70 | if(!brfv4Example.gui.exampleChooser) { 71 | 72 | QuickSettings.useExtStyleSheet(); 73 | 74 | brfv4Example.gui.exampleChooser = QuickSettings.create( 75 | 2, 2, "Example Chooser", brfv4Example.dom.createDiv("_settingsRight")) 76 | .setWidth(250) 77 | .addHTML("Switch between examples", "Which example do you want to try? Use the drop down to choose another example.").hideTitle("Switch between examples") 78 | .addDropDown("_example", labels, onExampleChosen) 79 | .hideTitle("_example") 80 | .setValuesFromJSON({ "_example": 6}); // "basic - face tracking - track single face" 81 | 82 | _isFirstSelect = false; 83 | } 84 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4ExtendedFace.js: -------------------------------------------------------------------------------- 1 | // This adds 6 points to the 68 landmarks. These points cover the forehead, 2 | // but are not actually tracked, they are just estimated depending on the 68 landmarks. 3 | 4 | (function(lib) { 5 | "use strict"; 6 | 7 | lib.BRFv4ExtendedFace = function() { 8 | 9 | this.vertices = []; 10 | this.triangles = []; 11 | this.points = []; 12 | this.bounds = new lib.Rectangle(0, 0, 0, 0); 13 | 14 | this._tmpPoint0 = new lib.Point(); 15 | this._tmpPoint1 = new lib.Point(); 16 | this._tmpPoint2 = new lib.Point(); 17 | this._tmpPoint3 = new lib.Point(); 18 | this._tmpPoint4 = new lib.Point(); 19 | this._tmpPoint5 = new lib.Point(); 20 | }; 21 | 22 | lib.BRFv4ExtendedFace.prototype.update = function(face) { 23 | 24 | var i, l; 25 | 26 | for(i = this.points.length, l = face.points.length + 6; i < l; ++i) { 27 | this.points[i] = new lib.Point(0.0, 0.0); 28 | } 29 | 30 | this.generateExtendedVertices(face); 31 | this.generateExtendedTriangles(face); 32 | this.updateBounds(); 33 | this.updatePoints(); 34 | }; 35 | 36 | lib.BRFv4ExtendedFace.prototype.generateExtendedVertices = function(face) { 37 | 38 | var v = face.vertices; 39 | var i, l; 40 | 41 | this.vertices.length = 0; 42 | 43 | for(i = 0, l = v.length; i < l; i++) { 44 | this.vertices[i] = v[i]; 45 | } 46 | 47 | this.addUpperForeheadPoints(this.vertices); 48 | }; 49 | 50 | lib.BRFv4ExtendedFace.prototype.generateExtendedTriangles = function(face) { 51 | if(this.triangles.length === 0) { 52 | this.triangles = face.triangles.concat(); 53 | this.triangles.push( 54 | 0, 17, 68, 55 | 17, 18, 68, 56 | 18, 19, 69, 57 | 18, 68, 69, 58 | 19, 20, 69, 59 | 20, 23, 71, 60 | 20, 69, 70, 61 | 20, 70, 71, 62 | 23, 24, 72, 63 | 23, 71, 72, 64 | 24, 25, 72, 65 | 25, 26, 73, 66 | 25, 72, 73, 67 | 16, 26, 73 68 | ); 69 | } 70 | }; 71 | 72 | lib.BRFv4ExtendedFace.prototype.updateBounds = function() { 73 | 74 | var minX = 0; 75 | var minY = 0; 76 | var maxX = 9999; 77 | var maxY = 9999; 78 | 79 | var i, l, value; 80 | 81 | for(i = 0, l = this.vertices.length; i < l; i++) { 82 | value = this.vertices[i]; 83 | 84 | if((i % 2) === 0) { 85 | if(value < minX) minX = value; 86 | if(value > maxX) maxX = value; 87 | } else { 88 | if(value < minY) minY = value; 89 | if(value > maxY) maxY = value; 90 | } 91 | } 92 | 93 | this.bounds.x = minX; 94 | this.bounds.y = minY; 95 | this.bounds.width = maxX - minX; 96 | this.bounds.height = maxY - minY; 97 | }; 98 | 99 | lib.BRFv4ExtendedFace.prototype.updatePoints = function() { 100 | 101 | var i, k, l, x, y; 102 | 103 | for(i = 0, k = 0, l = this.points.length; i < l; ++i) { 104 | x = this.vertices[k]; k++; 105 | y = this.vertices[k]; k++; 106 | 107 | this.points[i].x = x; 108 | this.points[i].y = y; 109 | } 110 | }; 111 | 112 | lib.BRFv4ExtendedFace.prototype.addUpperForeheadPoints = function(v) { 113 | 114 | var p0 = this._tmpPoint0; 115 | var p1 = this._tmpPoint1; 116 | var p2 = this._tmpPoint2; 117 | var p3 = this._tmpPoint3; 118 | var p4 = this._tmpPoint4; 119 | var p5 = this._tmpPoint5; 120 | 121 | // base distance 122 | 123 | this.setPoint(v, 33, p0); // nose base 124 | this.setPoint(v, 27, p1); // nose top 125 | var baseDist = this.calcDistance(p0, p1) * 1.5; 126 | 127 | // eyes as base line for orthogonal vector 128 | 129 | this.setPoint(v, 39, p0); // left eye inner corner 130 | this.setPoint(v, 42, p1); // right eye inner corner 131 | 132 | var distEyes = this.calcDistance(p0, p1); 133 | 134 | this.calcMovementVectorOrthogonalCCW(p4, p0, p1, baseDist / distEyes); 135 | 136 | // orthogonal line for intersection point calculation 137 | 138 | this.setPoint(v, 27, p2); // nose top 139 | this.applyMovementVector(p3, p2, p4, 10.95); 140 | this.applyMovementVector(p2, p2, p4, -10.95); 141 | 142 | this.calcIntersectionPoint(p5, p2, p3, p0, p1); 143 | 144 | // simple head rotation 145 | 146 | var f = 0.5-this.calcDistance(p0, p5) / distEyes; 147 | 148 | // outer left forehead point 149 | 150 | this.setPoint(v, 0, p5); // top left outline point 151 | var dist = this.calcDistance(p0, p5) * 0.75; 152 | 153 | this.interpolatePoint( p2, p0, p1, (dist / -distEyes)); 154 | this.applyMovementVector( p3, p2, p4, 0.75); 155 | this.addToExtendedVertices( p3); 156 | 157 | // upper four forehead points 158 | 159 | this.interpolatePoint( p2, p0, p1, f - 0.65); 160 | this.applyMovementVector( p3, p2, p4, 1.02); 161 | this.addToExtendedVertices( p3); 162 | 163 | this.interpolatePoint( p2, p0, p1, f/* + 0.0*/); 164 | this.applyMovementVector( p3, p2, p4, 1.10); 165 | this.addToExtendedVertices( p3); 166 | 167 | this.interpolatePoint( p2, p0, p1, f + 1.0); 168 | this.applyMovementVector( p3, p2, p4, 1.10); 169 | this.addToExtendedVertices( p3); 170 | 171 | this.interpolatePoint( p2, p0, p1, f + 1.65); 172 | this.applyMovementVector( p3, p2, p4, 1.02); 173 | this.addToExtendedVertices( p3); 174 | 175 | // outer right forehead point 176 | 177 | this.setPoint(v, 16, p5); // top right outline point 178 | dist = this.calcDistance(p1, p5) * 0.75; 179 | 180 | this.interpolatePoint( p2, p1, p0, (dist / -distEyes)); 181 | this.applyMovementVector( p3, p2, p4, 0.75); 182 | this.addToExtendedVertices( p3); 183 | }; 184 | 185 | lib.BRFv4ExtendedFace.prototype.addToExtendedVertices = function(p) { 186 | this.vertices.push(p.x); 187 | this.vertices.push(p.y); 188 | }; 189 | 190 | lib.BRFv4ExtendedFace.prototype.setPoint = lib.BRFv4PointUtils.setPoint; 191 | lib.BRFv4ExtendedFace.prototype.applyMovementVector = lib.BRFv4PointUtils.applyMovementVector; 192 | lib.BRFv4ExtendedFace.prototype.interpolatePoint = lib.BRFv4PointUtils.interpolatePoint; 193 | lib.BRFv4ExtendedFace.prototype.calcMovementVector = lib.BRFv4PointUtils.calcMovementVector; 194 | lib.BRFv4ExtendedFace.prototype.calcMovementVectorOrthogonalCW = lib.BRFv4PointUtils.calcMovementVectorOrthogonalCW; 195 | lib.BRFv4ExtendedFace.prototype.calcMovementVectorOrthogonalCCW = lib.BRFv4PointUtils.calcMovementVectorOrthogonalCCW; 196 | lib.BRFv4ExtendedFace.prototype.calcIntersectionPoint = lib.BRFv4PointUtils.calcIntersectionPoint; 197 | lib.BRFv4ExtendedFace.prototype.calcDistance = lib.BRFv4PointUtils.calcDistance; 198 | 199 | })(brfv4); -------------------------------------------------------------------------------- /js/utils/BRFv4PointUtils.js: -------------------------------------------------------------------------------- 1 | // Some helpers to make point, distance and angle calculations. 2 | 3 | (function(lib) { 4 | "use strict"; 5 | 6 | lib.BRFv4PointUtils = { 7 | 8 | setPoint: function(v, i, p) { 9 | p.x = v[i * 2]; p.y = v[i * 2 + 1]; 10 | }, 11 | applyMovementVector: function(p, p0, pmv, f) { 12 | p.x = p0.x + pmv.x * f; 13 | p.y = p0.y + pmv.y * f; 14 | }, 15 | interpolatePoint: function(p, p0, p1, f) { 16 | p.x = p0.x + f * (p1.x - p0.x); 17 | p.y = p0.y + f * (p1.y - p0.y); 18 | }, 19 | getAveragePoint: function(p, ar) { 20 | p.x = 0.0; p.y = 0.0; 21 | for(var i = 0, l = ar.length; i < l; i++) { 22 | p.x += ar[i].x; 23 | p.y += ar[i].y; 24 | } 25 | p.x /= l; p.y /= l; 26 | }, 27 | calcMovementVector: function(p, p0, p1, f) { 28 | p.x = f * (p1.x - p0.x); 29 | p.y = f * (p1.y - p0.y); 30 | }, 31 | calcMovementVectorOrthogonalCW: function(p, p0, p1, f) { 32 | lib.BRFv4PointUtils.calcMovementVector(p, p0, p1, f); 33 | var x = p.x; 34 | var y = p.y; 35 | p.x = -y; 36 | p.y = x; 37 | }, 38 | calcMovementVectorOrthogonalCCW: function(p, p0, p1, f) { 39 | lib.BRFv4PointUtils.calcMovementVector(p, p0, p1, f); 40 | var x = p.x; 41 | var y = p.y; 42 | p.x = y; 43 | p.y = -x; 44 | }, 45 | calcIntersectionPoint: function(p, pk0, pk1, pg0, pg1) { 46 | 47 | //y1 = m1 * x1 + t1 ... y2 = m2 * x2 + t1 48 | //m1 * x + t1 = m2 * x + t2 49 | //m1 * x - m2 * x = (t2 - t1) 50 | //x * (m1 - m2) = (t2 - t1) 51 | 52 | var dx1 = (pk1.x - pk0.x); if(dx1 == 0) dx1 = 0.01; 53 | var dy1 = (pk1.y - pk0.y); if(dy1 == 0) dy1 = 0.01; 54 | 55 | var dx2 = (pg1.x - pg0.x); if(dx2 == 0) dx2 = 0.01; 56 | var dy2 = (pg1.y - pg0.y); if(dy2 == 0) dy2 = 0.01; 57 | 58 | var m1 = dy1 / dx1; 59 | var t1 = pk1.y - m1 * pk1.x; 60 | 61 | var m2 = dy2 / dx2; 62 | var t2 = pg1.y - m2 * pg1.x; 63 | 64 | var m1m2 = (m1 - m2); if(m1m2 == 0) m1m2 = 0.01; 65 | var t2t1 = (t2 - t1); if(t2t1 == 0) t2t1 = 0.01; 66 | var px = t2t1 / m1m2; 67 | var py = m1 * px + t1; 68 | 69 | p.x = px; 70 | p.y = py; 71 | }, 72 | calcDistance: function(p0, p1) { 73 | return Math.sqrt( 74 | (p1.x - p0.x) * (p1.x - p0.x) + 75 | (p1.y - p0.y) * (p1.y - p0.y)); 76 | }, 77 | calcAngle: function(p0, p1) { 78 | return Math.atan2((p1.y - p0.y), (p1.x - p0.x)); 79 | }, 80 | toDegree: function(x) { 81 | return x * 180.0 / Math.PI; 82 | }, 83 | toRadian: function(x) { 84 | return x * Math.PI / 180.0; 85 | } 86 | }; 87 | })(brfv4); -------------------------------------------------------------------------------- /js/utils/BRFv4PublicAPI.js: -------------------------------------------------------------------------------- 1 | // This public stuff of the API is already build into the SDK. 2 | // You don't need to load this js file additionally. 3 | // 4 | // This is here for your reference only. 5 | 6 | (function(lib) { 7 | "use strict"; 8 | 9 | // utility function 10 | 11 | lib.defaultValue = function(arg, val) { 12 | return typeof arg !== 'undefined' ? arg : val; 13 | }; 14 | 15 | // API 16 | 17 | lib.Point = function(_x, _y) { 18 | this.x = lib.defaultValue(_x, 0.0); 19 | this.y = lib.defaultValue(_y, 0.0); 20 | }; 21 | lib.Point.prototype.setTo = function(_x, _y) { 22 | this.x = lib.defaultValue(_x, 0.0); 23 | this.y = lib.defaultValue(_y, 0.0); 24 | }; 25 | 26 | lib.Rectangle = function(_x, _y, _width, _height) { 27 | this.x = lib.defaultValue(_x, 0.0); 28 | this.y = lib.defaultValue(_y, 0.0); 29 | this.width = lib.defaultValue(_width, 0.0); 30 | this.height = lib.defaultValue(_height, 0.0); 31 | }; 32 | lib.Rectangle.prototype.setTo = function(_x, _y, _width, _height) { 33 | this.x = lib.defaultValue(_x, 0.0); 34 | this.y = lib.defaultValue(_y, 0.0); 35 | this.width = lib.defaultValue(_width, 0.0); 36 | this.height = lib.defaultValue(_height, 0.0); 37 | }; 38 | 39 | lib.BRFMode = { 40 | FACE_DETECTION: "mode_face_detection", 41 | FACE_TRACKING: "mode_face_tracking", 42 | POINT_TRACKING: "mode_point_tracking" 43 | }; 44 | 45 | lib.BRFState = { 46 | FACE_DETECTION: "state_face_detection", 47 | FACE_TRACKING_START: "state_face_tracking_start", 48 | FACE_TRACKING: "state_face_tracking", 49 | RESET: "state_reset" 50 | }; 51 | 52 | lib.BRFFace = function() { 53 | 54 | this.lastState = lib.BRFState.RESET; 55 | this.state = lib.BRFState.RESET; 56 | this.nextState = lib.BRFState.RESET; 57 | 58 | this.vertices = []; 59 | this.triangles = []; 60 | this.points = []; 61 | this.bounds = new lib.Rectangle(0, 0, 0, 0); 62 | this.refRect = new lib.Rectangle(0, 0, 0, 0); 63 | 64 | this.candideVertices = []; 65 | this.candideTriangles = []; 66 | 67 | this.scale = 1.0; 68 | this.translationX = 0.0; 69 | this.translationY = 0.0; 70 | this.rotationX = 0.0; 71 | this.rotationY = 0.0; 72 | this.rotationZ = 0.0; 73 | }; 74 | 75 | })(brfv4); -------------------------------------------------------------------------------- /js/utils/BRFv4SetupChooser.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | "use strict"; 3 | 4 | if(typeof QuickSettings === "undefined") return; 5 | 6 | var urlMap = { 7 | "Webcam Setup": "webcam", 8 | "Picture Setup": "picture" 9 | }; 10 | var labels = []; 11 | for (var key in urlMap) { labels.push(key); } // Fill in the labels. 12 | 13 | function onSetupChosen(data) { 14 | brfv4Example.init(urlMap[data.value]); 15 | } 16 | 17 | if(!brfv4Example.gui.setupChooser) { 18 | 19 | QuickSettings.useExtStyleSheet(); 20 | 21 | brfv4Example.gui.setupChooser = QuickSettings.create( 22 | 2, 115, "Setup Chooser", brfv4Example.dom.createDiv("_settingsRight")) 23 | .setWidth(250) 24 | .addHTML("Switch between setups", "Choose either webcam or loaded picture.

For webcam make sure you opened the https:// URL, otherwise it may not start in Chrome.") 25 | .addDropDown("_setup", labels, onSetupChosen) 26 | .hideTitle("_setup").hideTitle("Switch between setups"); 27 | } 28 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4SetupExample.js: -------------------------------------------------------------------------------- 1 | // BRFv4 example setup and handling: 2 | // 3 | // Init everything: 4 | // + image data (either webcam or picture) 5 | // + BRFv4 SDK with the size of the image data 6 | // + Set all the parameters for BRFv4 according to the chosen example 7 | // + Reinit if image data size changes 8 | 9 | (function() { 10 | "use strict"; 11 | 12 | var example = brfv4Example; 13 | 14 | var imageData = example.imageData; 15 | var dom = example.dom; 16 | var stats = example.stats; 17 | var drawing = example.drawing; 18 | var t3d = example.drawing3d.t3d; 19 | var trace = example.trace; 20 | 21 | var brfManager = null; 22 | var resolution = null; 23 | 24 | var paused = true; 25 | 26 | // This call initializes the library and put's all necessary date in RAM. 27 | initializeBRF(brfv4); 28 | 29 | // Tell the DrawingUtils where to draw to. 30 | if(drawing.setup && !drawing.stage) { 31 | drawing.setup(dom.getElement("_drawing"), dom.getElement("_faceSub"), 30); 32 | } 33 | 34 | // FPS meter 35 | if(stats.init) { 36 | stats.init(30); 37 | } 38 | 39 | // On imageData switch (webcam/picture) BRFv4 needs to reinit with the correct image sizes. 40 | example.reinit = function() { 41 | example.init(imageData.type()); 42 | }; 43 | 44 | // imageData available? Then update layout and reinit BRFv4. 45 | imageData.onAvailable = function(width, height) { 46 | 47 | trace("imageData.onAvailable: " + width + "x" + height); 48 | 49 | dom.updateLayout(width, height); 50 | resolution.setTo(0, 0, width, height); 51 | drawing.updateLayout(width, height); 52 | 53 | example.reinit(); 54 | }; 55 | 56 | // If the SDK didn't load yet (sdkReady is false) wait for it to do so. 57 | example.waitForSDK = function() { 58 | 59 | if(brfv4.sdkReady) { 60 | 61 | trace("waitForSDK: done."); 62 | example.init(); 63 | 64 | } else { 65 | 66 | trace("waitForSDK: still waiting."); 67 | clearTimeout(example.waitForSDK_timeout); 68 | example.waitForSDK_timeout = setTimeout(function() { 69 | example.waitForSDK(); 70 | }, 100); 71 | } 72 | }; 73 | 74 | // Setup BRF and the imageData by chosen type (webcam/picture). 75 | example.init = function(type) { 76 | 77 | paused = true; 78 | 79 | if(imageData.type && type !== imageData.type() && imageData.isAvailable()) { 80 | drawing.setUpdateCallback(null); 81 | trace("imageData.dispose: " + imageData.type()); 82 | imageData.dispose(); 83 | } 84 | 85 | trace("init: type: " + type); 86 | 87 | if(!brfv4.sdkReady) { 88 | 89 | example.waitForSDK(); 90 | 91 | } else { 92 | 93 | trace("-> brfv4.sdkReady: " + brfv4.sdkReady); 94 | 95 | if(brfv4.BRFManager && !brfManager) { 96 | brfManager = new brfv4.BRFManager(); 97 | } 98 | 99 | if(brfv4.Rectangle && !resolution) { 100 | resolution = new brfv4.Rectangle(0, 0, 640, 480); 101 | } 102 | 103 | if(brfManager === null || resolution === null) { 104 | trace("Init failed!", true); 105 | return; 106 | } 107 | 108 | if(type === "picture") { // Start either using an image ... 109 | 110 | imageData.picture.setup( 111 | dom.getElement("_imageData"), 112 | imageData.onAvailable 113 | ); 114 | 115 | } else { // ... or start using the webcam. 116 | 117 | imageData.webcam.setup( 118 | dom.getElement("_webcam"), 119 | dom.getElement("_imageData"), 120 | resolution, 121 | imageData.onAvailable 122 | ); 123 | } 124 | 125 | trace("-> imageData.isAvailable (" + imageData.type() + "): " + imageData.isAvailable()); 126 | 127 | if(imageData.isAvailable()) { 128 | 129 | setupBRFExample(); 130 | 131 | } else { 132 | 133 | resolution.setTo(0, 0, 640, 480); // reset for webcam initialization 134 | imageData.init(); 135 | } 136 | } 137 | }; 138 | 139 | function setupBRFExample() { 140 | 141 | // Remove clicks and image overlay as well as 3d models. 142 | 143 | drawing.clickArea.mouseEnabled = false; 144 | drawing.imageContainer.removeAllChildren(); 145 | if(t3d && t3d.hideAll) t3d.hideAll(); 146 | 147 | // Reset BRFv4 to it's default parameters. 148 | // Every example may change these according to 149 | // its own needs in initCurrentExample(). 150 | 151 | var size = resolution.height; 152 | 153 | if(resolution.height > resolution.width) { 154 | size = resolution.width; 155 | } 156 | 157 | brfManager.setMode(brfv4.BRFMode.FACE_TRACKING); 158 | brfManager.setNumFacesToTrack(1); 159 | 160 | brfManager.setFaceDetectionRoi(resolution); 161 | 162 | // more strict 163 | 164 | brfManager.setFaceDetectionParams( size * 0.30, size * 1.00, 12, 8); 165 | brfManager.setFaceTrackingStartParams( size * 0.30, size * 1.00, 22, 26, 22); 166 | brfManager.setFaceTrackingResetParams( size * 0.25, size * 1.00, 40, 55, 32); 167 | 168 | // less strict 169 | 170 | // brfManager.setFaceDetectionParams( size * 0.20, size * 1.00, 12, 8); 171 | // brfManager.setFaceTrackingStartParams( size * 0.20, size * 1.00, 32, 46, 32); 172 | // brfManager.setFaceTrackingResetParams( size * 0.15, size * 1.00, 40, 55, 32); 173 | 174 | // Initialize the example. See the specific files in js/examples 175 | 176 | example.initCurrentExample(brfManager, resolution, drawing); 177 | 178 | paused = false; 179 | 180 | if(imageData.isStream()) { 181 | 182 | // webcam continuous update. 183 | 184 | drawing.setUpdateCallback(updateBRFExample); 185 | 186 | } else { 187 | 188 | // Simply update 10 times for loaded images. 189 | // This is not the most sophisticated approach, but 190 | // will most likely do the job. 191 | 192 | drawing.clear(); 193 | 194 | var imageDataCanvas = dom.getElement("_imageData"); 195 | 196 | imageData.update(); // depends on whether it is a webcam or image setup 197 | 198 | var data = imageDataCanvas.getContext("2d").getImageData(0, 0, resolution.width, resolution.height).data; 199 | 200 | for(var i = 0; i < 10; i++) { 201 | brfManager.update(data); 202 | } 203 | 204 | setTimeout(function() { 205 | example.updateCurrentExample( // depends on the chosen example 206 | brfManager, data, drawing 207 | ); 208 | }, 100); 209 | 210 | } 211 | } 212 | 213 | function updateBRFExample() { 214 | 215 | if(!paused) { 216 | 217 | if (stats.start) stats.start(); 218 | 219 | var imageDataCanvas = dom.getElement("_imageData"); 220 | 221 | imageData.update(); // depends on whether it is a webcam or image setup 222 | 223 | example.updateCurrentExample( // depends on the chosen example 224 | brfManager, 225 | imageDataCanvas.getContext("2d").getImageData(0, 0, resolution.width, resolution.height).data, 226 | drawing 227 | ); 228 | 229 | if (stats.end) stats.end(); 230 | } 231 | } 232 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4SetupPicture.js: -------------------------------------------------------------------------------- 1 | // Instead of a webcam stream you can choose a picture and analyze it. 2 | // 3 | // QuickSettings image chooser panel. 4 | // First image in list is picked automatically. 5 | // 6 | // imageData handling using images: 7 | // 8 | // setupImage(imageDataCanvas, onImageDataReady) // canvas, callback 9 | 10 | (function(){ 11 | "use strict"; 12 | 13 | var urlMap = { 14 | "Marcel": "assets/brfv4_portrait_marcel.jpg", 15 | "Chris": "assets/brfv4_portrait_chris.jpg", 16 | "woman old": "assets/brfv4_woman_old.jpg", 17 | "women young": "assets/brfv4_women_young.jpg", 18 | "Two Faces": "assets/brfv4_two_faces.jpg" 19 | }; 20 | var labels = []; 21 | for (var key in urlMap) { labels.push(key); } // Fill in the labels. 22 | 23 | var example = brfv4Example; 24 | var trace = example.trace; 25 | var dom = example.dom; 26 | var imageData = example.imageData; 27 | var gui = example.gui; 28 | 29 | var picture = imageData.picture; 30 | 31 | var _picture = null; 32 | var _loader = null; 33 | var _onImageDataReady = null; 34 | 35 | picture.onImageLoaded = function(event) { 36 | _picture = event.result; 37 | 38 | if(_onImageDataReady) { 39 | _onImageDataReady(_picture.width, _picture.height); 40 | } 41 | }; 42 | 43 | picture.onImageLoadError = function(event) { 44 | trace("Error loading image.", true); 45 | }; 46 | 47 | picture.loadImage = function(url) { 48 | 49 | if(!url) return; 50 | 51 | _loader = new createjs.LoadQueue(true); 52 | _loader.on("fileload", picture.onImageLoaded); 53 | _loader.on("error", picture.onImageLoadError); 54 | _loader.loadFile(url); 55 | }; 56 | 57 | picture.setup = function(imageDataCanvas, onImageDataReady) { 58 | 59 | if(!imageDataCanvas) { 60 | trace("Please add a tag with id='_imageData' to the DOM.", true); 61 | return; 62 | } 63 | 64 | _onImageDataReady = onImageDataReady; 65 | 66 | imageData.type = function() { 67 | return "picture"; 68 | }; 69 | 70 | imageData.init = function() { 71 | if(labels.length > 0) { 72 | picture.loadImage(urlMap[labels[0]]); 73 | 74 | if(gui.pictureChooser) { 75 | gui.pictureChooser.show(); 76 | } 77 | } 78 | }; 79 | 80 | imageData.dispose = function() { 81 | _picture = null; 82 | if(gui.pictureChooser) { 83 | gui.pictureChooser.hide(); 84 | } 85 | }; 86 | 87 | imageData.isAvailable = function() { 88 | return _picture != null; 89 | }; 90 | 91 | imageData.isStream = function() { 92 | return false; 93 | }; 94 | 95 | imageData.update = function() { 96 | if(_picture != null) { 97 | var _imageDataCtx = imageDataCanvas.getContext("2d"); 98 | _imageDataCtx.drawImage(_picture, 0, 0, _picture.width, _picture.height); 99 | } 100 | }; 101 | }; 102 | 103 | if(typeof QuickSettings === "undefined") return; 104 | 105 | function onImageChosen(data) { 106 | picture.loadImage(urlMap[data.value]); 107 | } 108 | 109 | if(!gui.pictureChooser) { 110 | 111 | QuickSettings.useExtStyleSheet(); 112 | 113 | gui.pictureChooser = QuickSettings.create( 114 | 2, 505, "Picture Chooser", dom.createDiv("_settingsRight")) 115 | .setWidth(250) 116 | .addHTML("Choose a picture from the list", "") 117 | .addDropDown("_picture", labels, onImageChosen) 118 | .hideTitle("_picture") 119 | .hide(); 120 | } 121 | })(); -------------------------------------------------------------------------------- /js/utils/BRFv4SetupWebcam.js: -------------------------------------------------------------------------------- 1 | // webcam handling: 2 | // 3 | // setupStream(video, width, height, fps, callback) 4 | // startStream() 5 | // stopStream() 6 | // 7 | // onStreamFetched(mediaStream) 8 | // onStreamDimensionsAvailable() 9 | // onStreamError(event) 10 | // 11 | // imageData handling using webcam: 12 | // 13 | // setupWebcam(webcamVideo, imageDataCanvas, resolution, onImageDataReady) // video, canvas, Rectangle, callback 14 | 15 | (function(){ 16 | "use strict"; 17 | 18 | var example = brfv4Example; 19 | var imageData = example.imageData; 20 | var trace = example.trace; 21 | var webcam = imageData.webcam; 22 | 23 | var ua = window.navigator.userAgent; 24 | var isIOS11 = (ua.indexOf("iPad") > 0 || ua.indexOf("iPhone") > 0) && ua.indexOf("OS 11_") > 0; 25 | var stoppedOniOS = 0; 26 | 27 | webcam.setupStream = function(video, width, height, fps, callback) { 28 | 29 | trace("webcam.setupStream: isPlaying: " + webcam.isPlaying); 30 | 31 | webcam.video = video; 32 | webcam.constraints = {video: {width: width, height: height, frameRate: fps}}; 33 | webcam.onCameraReady = callback; 34 | 35 | webcam.startStream(); 36 | }; 37 | 38 | webcam.startStream = function() { 39 | 40 | webcam.stopStream(); 41 | 42 | trace("webcam.startStream: try: " + 43 | webcam.constraints.video.width + "x" + webcam.constraints.video.height); 44 | 45 | window.navigator.mediaDevices.getUserMedia(webcam.constraints) 46 | .then (webcam.onStreamFetched) 47 | .catch(webcam.onStreamError); 48 | }; 49 | 50 | webcam.stopStream = function() { 51 | 52 | if(webcam.isPlaying) { 53 | trace("webcam.stopStream: isPlaying: " + webcam.isPlaying); 54 | } 55 | 56 | webcam.isPlaying = false; 57 | 58 | if (webcam.stream !== null) { 59 | webcam.stream.getTracks().forEach(function(track) { 60 | track.stop(); 61 | }); 62 | webcam.stream = null; 63 | } 64 | if(webcam.video !== null && webcam.video.srcObject !== null) { 65 | webcam.video.srcObject = null; 66 | } 67 | }; 68 | 69 | webcam.onStreamFetched = function(mediaStream) { 70 | 71 | webcam.stream = mediaStream; 72 | 73 | if(webcam.video !== null) { 74 | webcam.video.srcObject = mediaStream; 75 | webcam.video.play(); 76 | webcam.onStreamDimensionsAvailable(); 77 | } 78 | }; 79 | 80 | webcam.onStreamDimensionsAvailable = function() { 81 | 82 | // As we can't be sure, what resolution we get, we need to read it 83 | // from the already playing stream to be sure. 84 | 85 | if(webcam.video.videoWidth === 0) { 86 | 87 | trace("webcam.onStreamDimensionsAvailable: waiting"); 88 | 89 | clearTimeout(webcam.onStreamDimensionsAvailable_timeout); 90 | 91 | webcam.onStreamDimensionsAvailable_timeout = 92 | setTimeout(webcam.onStreamDimensionsAvailable, 100); 93 | 94 | } else { 95 | 96 | trace("webcam.onStreamDimensionsAvailable: " + webcam.video.videoWidth + "x" + webcam.video.videoHeight); 97 | 98 | // Now we know the dimensions of the stream. So tell the app, the camera is ready. 99 | webcam.isPlaying = true; 100 | 101 | if (webcam.stream !== null && isIOS11 && stoppedOniOS === 0) { 102 | console.log('Turn off camera on iOS 11 and restart it later.'); 103 | stoppedOniOS = 1; 104 | webcam.stream.getTracks().forEach(function(track) { 105 | track.stop(); 106 | }); 107 | } 108 | 109 | if(webcam.onCameraReady) { 110 | webcam.onCameraReady(true); 111 | } 112 | } 113 | }; 114 | 115 | webcam.onStreamError = function(event) { 116 | trace("webcam.onStreamError: " + event); 117 | 118 | webcam.isPlaying = false; 119 | 120 | if(webcam.onCameraReady) { 121 | webcam.onCameraReady(false); 122 | } 123 | }; 124 | 125 | webcam.setup = function(webcamVideo, imageDataCanvas, resolution, onImageDataReady) { 126 | 127 | if(!webcamVideo || !imageDataCanvas) { 128 | trace("setupWebcam: Please add a