├── .gitignore ├── LICENSE ├── README.md ├── camera_calibration ├── README.md ├── cameracalib.py ├── parameters.txt └── pattern.png ├── demo ├── EAR.png ├── Eye processing for gaze score estimation.png ├── Gaze Score demo .mp4 ├── Gaze Score.png ├── Gaze_Score estimation v2.png ├── Gaze_Score estimation.png ├── demo.mp4 ├── face_keypoints.jpg ├── new_mediapipe_dsd_demo.gif └── new_mediapipe_dsd_demo.mp4 ├── driver_state_detection ├── attention_scorer.py ├── camera_params.json ├── eye_detector.py ├── face_geometry.py ├── main.py ├── parser.py ├── pose_estimation.py └── utils.py ├── poetry.lock ├── pyproject.toml └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | #Other files 2 | *.ini 3 | camera_calibration/calib_photos/* 4 | #GITIGNORE TEMPLATE COPIED FROM GITHUB GITIGNORE REPO + INTELLIJ GITIGNORE REPO 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | cover/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | .pybuilder/ 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | # For a library or package, you might want to ignore these files since the code is 92 | # intended to run in multiple environments; otherwise, check them in: 93 | # .python-version 94 | 95 | # pipenv 96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 99 | # install all needed dependencies. 100 | #Pipfile.lock 101 | 102 | # poetry 103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 104 | # This is especially recommended for binary packages to ensure reproducibility, and is more 105 | # commonly ignored for libraries. 106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 107 | #poetry.lock 108 | 109 | # pdm 110 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 111 | #pdm.lock 112 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 113 | # in version control. 114 | # https://pdm.fming.dev/#use-with-ide 115 | .pdm.toml 116 | 117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 118 | __pypackages__/ 119 | 120 | # Celery stuff 121 | celerybeat-schedule 122 | celerybeat.pid 123 | 124 | # SageMath parsed files 125 | *.sage.py 126 | 127 | # Environments 128 | .env 129 | .venv 130 | env/ 131 | venv/ 132 | ENV/ 133 | env.bak/ 134 | venv.bak/ 135 | 136 | # Spyder project settings 137 | .spyderproject 138 | .spyproject 139 | 140 | #VSCode project settings 141 | .vscode 142 | vscode/ 143 | *.vscode 144 | 145 | # Rope project settings 146 | .ropeproject 147 | 148 | # mkdocs documentation 149 | /site 150 | 151 | # mypy 152 | .mypy_cache/ 153 | .dmypy.json 154 | dmypy.json 155 | 156 | # Pyre type checker 157 | .pyre/ 158 | 159 | # pytype static type analyzer 160 | .pytype/ 161 | 162 | # Cython debug symbols 163 | cython_debug/ 164 | 165 | # PyCharm specifi gitignore 166 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 167 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 168 | 169 | # User-specific stuff 170 | .idea/**/workspace.xml 171 | .idea/**/tasks.xml 172 | .idea/**/usage.statistics.xml 173 | .idea/**/dictionaries 174 | .idea/**/shelf 175 | 176 | # AWS User-specific 177 | .idea/**/aws.xml 178 | 179 | # Generated files 180 | .idea/**/contentModel.xml 181 | 182 | # Sensitive or high-churn files 183 | .idea/**/dataSources/ 184 | .idea/**/dataSources.ids 185 | .idea/**/dataSources.local.xml 186 | .idea/**/sqlDataSources.xml 187 | .idea/**/dynamic.xml 188 | .idea/**/uiDesigner.xml 189 | .idea/**/dbnavigator.xml 190 | 191 | # Gradle 192 | .idea/**/gradle.xml 193 | .idea/**/libraries 194 | 195 | # Gradle and Maven with auto-import 196 | # When using Gradle or Maven with auto-import, you should exclude module files, 197 | # since they will be recreated, and may cause churn. Uncomment if using 198 | # auto-import. 199 | # .idea/artifacts 200 | # .idea/compiler.xml 201 | # .idea/jarRepositories.xml 202 | # .idea/modules.xml 203 | # .idea/*.iml 204 | # .idea/modules 205 | # *.iml 206 | # *.ipr 207 | 208 | # CMake 209 | cmake-build-*/ 210 | 211 | # Mongo Explorer plugin 212 | .idea/**/mongoSettings.xml 213 | 214 | # File-based project format 215 | *.iws 216 | 217 | # IntelliJ 218 | out/ 219 | 220 | # mpeltonen/sbt-idea plugin 221 | .idea_modules/ 222 | 223 | # JIRA plugin 224 | atlassian-ide-plugin.xml 225 | 226 | # Cursive Clojure plugin 227 | .idea/replstate.xml 228 | 229 | # SonarLint plugin 230 | .idea/sonarlint/ 231 | 232 | # Crashlytics plugin (for Android Studio and IntelliJ) 233 | com_crashlytics_export_strings.xml 234 | crashlytics.properties 235 | crashlytics-build.properties 236 | fabric.properties 237 | 238 | # Editor-based Rest Client 239 | .idea/httpRequests 240 | 241 | # Android studio 3.1+ serialized cache file 242 | .idea/caches/build_file_checksums.ser -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Ettore Candeloro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Real Time Driver State Detection 2 | ![Python](https://img.shields.io/badge/python-3670A0?style=for-the-badge&logo=python&logoColor=ffdd54) ![OpenCV](https://img.shields.io/badge/opencv-%23white.svg?style=for-the-badge&logo=opencv&logoColor=white) 3 | 4 | Real time, webcam based, driver attention state detection and monitoring using Python with the OpenCV and Mediapipe libraries. 5 | 6 | ![driver state detection demo](./demo/new_mediapipe_dsd_demo.gif) 7 | 8 | **Note**: 9 | This work is partially based on [this paper](https://www.researchgate.net/publication/327942674_Vision-Based_Driver%27s_Attention_Monitoring_System_for_Smart_Vehicles) for the scores and methods used. 10 | 11 | ## Mediapipe Update 12 | 13 | Thanks to the awesome contribution of [MustafaLotfi](https://github.com/MustafaLotfi), now the script uses the better performing and accurate face keypoints detection model from the [Google Mediapipe library](https://github.com/google/mediapipe). 14 | 15 | ## Last Features added: 16 | 17 | - Fast 478 face keypoints detection with Mediapipe 18 | - Direct iris keypoint detection with Mediapipe for gaze score estimation 19 | - Improved head pose estimation using the dynamical canonical face model 20 | - Fixed euler angles function and wrong returned values 21 | - Using time variables to make the code more modular and machine agnostic 22 | - Added rolling PERCLOS estimation and smoother driver state detection with decay factor 23 | - Added new demo video 24 | 25 | **NOTE**: the old mediapipe version can still be found in the "dlib-based" repository branch. 26 | 27 | ## How Does It Work? 28 | 29 | This script searches for the driver face, then use the mediapipe library to predict 478 face and iris keypoints. 30 | The enumeration and location of all the face keypoints/landmarks can be seen [here](./demo/face_keypoints.jpg). 31 | 32 | With those keypoints, the following scores are computed: 33 | 34 | - **EAR**: Eye Aspect Ratio, it's the normalized average eyes aperture, and it's used to see how much the eyes are opened or closed 35 | - **Gaze Score**: L2 Norm (Euclidean distance) between the center of the eye and the pupil, it's used to see if the driver is looking away or not 36 | - **Head Pose**: Roll, Pitch and Yaw of the head of the driver. The angles are used to see if the driver is not looking straight ahead or doesn't have a straight head pose (is probably unconscious) 37 | - **PERCLOS**: PERcentage of CLOSure eye time, used to see how much time the eyes are closed in a minute. A threshold of 0.2 is used in this case (20% of a minute) and the EAR score is used to estimate when the eyes are closed. 38 | 39 | The driver states can be classified as: 40 | 41 | - **Normal**: no messages are printed 42 | - **Tired**: when the PERCLOS score is > 0.2, a warning message is printed on screen 43 | - **Asleep**: when the eyes are closed (EAR < closure_threshold) for a certain amount of time, a warning message is printed on screen 44 | - **Looking Away**: when the gaze score is higher than a certain threshold for a certain amount of time, a warning message is printed on screen 45 | - **Distracted**: when the head pose score is higher than a certain threshold for a certain amount of time, a warning message is printed on screen 46 | 47 | ## Demo 48 | 49 | 52 | 53 | ## The Scores Explained 54 | 55 | ### EAR 56 | 57 | **Eye Aspect Ratio** is a normalized score that is useful to understand the rate of aperture of the eyes. 58 | Using the mediapipe face mesh keypoints for each eye (six for each), the eye lenght and width are estimated and using this data the EAR score is computed as explained in the image below: 59 | ![EAR](https://user-images.githubusercontent.com/67196406/121489162-18210900-c9d4-11eb-9d2e-765f5ac42286.png) 60 | 61 | **NOTE:** the average of the two eyes EAR score is computed 62 | 63 | ### Gaze Score Estimation 64 | 65 | The gaze score gives information about how much the driver is looking away without turning his head. 66 | 67 | To understand this, the distance between the eye center and the position of the pupil is computed. The result is then normalized by the eye width that can be different depending on the driver physionomy and distance from the camera. 68 | 69 | The below image explains graphically how the Gaze Score for a single eye is computed: 70 | ![Gaze Score](https://user-images.githubusercontent.com/67196406/121489746-ab5a3e80-c9d4-11eb-8f33-d34afd0947b4.png) 71 | **NOTE:** the average of the two eyes Gaze Score is computed 72 | 73 | ### Head Pose Estimation 74 | 75 | For the head pose estimation, a standard 3d head model in world coordinates was considered, in combination of the respective face mesh keypoints in the image plane. 76 | In this way, using the solvePnP function of OpenCV, estimating the rotation and translation vector of the head in respect to the camera is possible. 77 | Then the 3 Euler angles are computed. 78 | 79 | The partial snippets of code used for this task can be found in [this article](https://learnopencv.com/head-pose-estimation-using-opencv-and-dlib/). 80 | 81 | ## Installation 82 | 83 | This projects runs on Python with the following libraries: 84 | 85 | - numpy 86 | - OpenCV (opencv-python) 87 | - mediapipe 88 | 89 | Or you can use poetry to automatically create a virtualenv with all the required packages: 90 | 91 | ``` 92 | pip install poetry #a global install of poetry is required 93 | ``` 94 | 95 | Then inside the repo directory: 96 | 97 | ``` 98 | poetry install 99 | ``` 100 | 101 | To activate the env to execute command lines: 102 | 103 | ``` 104 | poetry shell 105 | ``` 106 | 107 | Alternatively (not recommended), you can use the requirements.txt file provided in the repository using: 108 | 109 | pip install -r requirements.txt 110 | 111 | 112 | ## Usage 113 | 114 | First navigate inside the driver state detection folder: 115 | 116 | cd driver_state_detection 117 | 118 | The scripts can be used with all default options and parameters by calling it via command line: 119 | 120 | python main.py 121 | 122 | For the list of possible arguments, write: 123 | 124 | python main.py --help 125 | 126 | Example of a possible use with parameters: 127 | 128 | python main.py --ear_time_tresh 5 129 | 130 | This will sets to 5 seconds the eye closure time before a warning message is shown on screen 131 | 132 | ## Why this project 133 | 134 | This project was developed as part for a final group project for the course of [Computer Vision and Cognitive Systems](https://international.unimore.it/singleins.html?ID=295) done at the [University of Modena and Reggio Emilia](https://international.unimore.it/) in the second semester of the academic year 2020/2021. 135 | Given the possible applications of Computer Vision, we wanted to focus mainly on the automotive field, developing a useful and potential life saving proof of concept project. 136 | In fact, sadly, many fatal accidents happens [because of the driver distraction](https://www.nhtsa.gov/risky-driving/distracted-driving). 137 | 138 | ## License and Contacts 139 | 140 | This project is freely available under the MIT license. You can use/modify this code as long as you include the original license present in this repository in it. 141 | 142 | For any question or if you want to contribute to this project, feel free to contact me or open a pull request. 143 | 144 | ## Improvements to make 145 | 146 | - [x] Reformat code in packages 147 | - [x] Add argparser to run the script with various settings using the command line 148 | - [x] Improve robustness of gaze detection (using mediapipe) 149 | - [x] Add argparser option for importing and using the camera matrix and dist. coefficients 150 | - [x] Reformat classes to follow design patterns and Python conventions 151 | - [ ] Debug new mediapipe methods and classes and adjust thresholds 152 | - [ ] Improve perfomances of the script by minimizing image processing steps 153 | -------------------------------------------------------------------------------- /camera_calibration/README.md: -------------------------------------------------------------------------------- 1 | # Camera Calibration Instructions 2 | 3 | 1. Create a folder named `calib_photos` inside the `camera_calibration` folder. 4 | 2. Print without any border adaptation, in an A4 paper sheet, the chessboard in the `pattern.png` file. 5 | 3. Attach the chessboard paper sheet to a planar/flat rigid surface, like a thick carboard piece or a clipboard. 6 | 4. With the desired camera/webcam, shoot various photos (20+) of the chessboard, with various angles. 7 | 5. Transfer all the photos to the `calib_photos` folder. 8 | 6. Ensure the `cameracalib.py` script has the correct path for the `calib_photos` folder. 9 | 7. The valid chessboard photos are visualized, after skipping them pressing a button, the camera coefficients are computed and printed out (may require some time). 10 | 8. Copy the camera parameters and save them to a file for late usage. 11 | 12 | For further explanations, follow [this guide](https://learnopencv.com/camera-calibration-using-opencv/). 13 | 14 | ## Example 15 | Camera parameters and distorsion coefficients in Python, initialized as numpy arrays 16 | 17 | camera_matrix = np.array([ 18 | [899.12150372, 0., 644.26261492], 19 | [0., 899.45280671, 372.28009436], 20 | [0, 0, 1] 21 | ], 22 | dtype="double") 23 | 24 | dist_coeffs = np.array([ 25 | [-0.03792548, 0.09233237, 0.00419088, 0.00317323, -0.15804257] 26 | ], 27 | dtype="double") -------------------------------------------------------------------------------- /camera_calibration/cameracalib.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import glob 4 | import os 5 | 6 | import cv2 7 | import numpy as np 8 | 9 | # Defining the dimensions of checkerboard 10 | CHECKERBOARD = (6, 9) 11 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) 12 | 13 | # Creating vector to store vectors of 3D points for each checkerboard image 14 | objpoints = [] 15 | # Creating vector to store vectors of 2D points for each checkerboard image 16 | imgpoints = [] 17 | 18 | 19 | # Defining the world coordinates for 3D points 20 | objp = np.zeros((1, CHECKERBOARD[0] * CHECKERBOARD[1], 3), np.float32) 21 | objp[0, :, :2] = np.mgrid[0 : CHECKERBOARD[0], 0 : CHECKERBOARD[1]].T.reshape(-1, 2) 22 | prev_img_shape = None 23 | 24 | # Extracting path of individual image stored in a given directory 25 | images = glob.glob("Camera_Calibration/calib_photos/*.jpg") 26 | for fname in images: 27 | img = cv2.imread(fname) 28 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) 29 | # Find the chess board corners 30 | # If desired number of corners are found in the image then ret = true 31 | ret, corners = cv2.findChessboardCorners( 32 | gray, 33 | CHECKERBOARD, 34 | cv2.CALIB_CB_ADAPTIVE_THRESH 35 | + cv2.CALIB_CB_FAST_CHECK 36 | + cv2.CALIB_CB_NORMALIZE_IMAGE, 37 | ) 38 | 39 | """ 40 | If desired number of corner are detected, 41 | we refine the pixel coordinates and display 42 | them on the images of checker board 43 | """ 44 | if ret == True: 45 | objpoints.append(objp) 46 | # refining pixel coordinates for given 2d points. 47 | corners2 = cv2.cornerSubPix(gray, corners, (11, 11), (-1, -1), criteria) 48 | 49 | imgpoints.append(corners2) 50 | 51 | # Draw and display the corners 52 | img = cv2.drawChessboardCorners(img, CHECKERBOARD, corners2, ret) 53 | 54 | cv2.imshow("img", img) 55 | cv2.waitKey(0) 56 | 57 | cv2.destroyAllWindows() 58 | 59 | h, w = img.shape[:2] 60 | 61 | """ 62 | Performing camera calibration by 63 | passing the value of known 3D points (objpoints) 64 | and corresponding pixel coordinates of the 65 | detected corners (imgpoints) 66 | """ 67 | ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera( 68 | objpoints, imgpoints, gray.shape[::-1], None, None 69 | ) 70 | 71 | print("Camera matrix : \n") 72 | print(mtx) 73 | print("dist : \n") 74 | print(dist) 75 | # print("rvecs : \n") 76 | # print(rvecs) 77 | # print("tvecs : \n") 78 | # print(tvecs) 79 | -------------------------------------------------------------------------------- /camera_calibration/parameters.txt: -------------------------------------------------------------------------------- 1 | Parameters were computed using the cameracalib.py taken from OpenCV and using 30+ webcam photos of a planar 9x6 chessboard 2 | 3 | Attempt 1: 4 | 5 | 6 | Camera matrix : 7 | 8 | [[1.10481797e+03 0.00000000e+00 9.76862669e+02] 9 | [0.00000000e+00 1.11775382e+03 5.07678687e+02] 10 | [0.00000000e+00 0.00000000e+00 1.00000000e+00]] 11 | 12 | distorsion coefficients: 13 | 14 | [[ 0.13580439 -0.08770944 -0.01913275 -0.00170941 -0.0361747 ]] 15 | 16 | 17 | Attempt 2: 18 | 19 | Camera matrix : 20 | 21 | [[1.09520943e+03 0.00000000e+00 9.80688063e+02] 22 | [0.00000000e+00 1.10470495e+03 5.42055897e+02] 23 | [0.00000000e+00 0.00000000e+00 1.00000000e+00]] 24 | 25 | 26 | dist : 27 | 28 | [[ 1.41401053e-01 -2.12991544e-01 -8.88887657e-04 1.03893066e-04 29 | 9.54437692e-02]] 30 | 31 | 32 | Attempt 3 (laptop camera): 33 | 34 | Camera matrix : 35 | 36 | [[899.12150372 0. 644.26261492] 37 | [ 0. 899.45280671 372.28009436] 38 | [ 0. 0. 1. ]] 39 | 40 | 41 | dist : 42 | 43 | [[-0.03792548 0.09233237 0.00419088 0.00317323 -0.15804257]] -------------------------------------------------------------------------------- /camera_calibration/pattern.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/camera_calibration/pattern.png -------------------------------------------------------------------------------- /demo/EAR.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/EAR.png -------------------------------------------------------------------------------- /demo/Eye processing for gaze score estimation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/Eye processing for gaze score estimation.png -------------------------------------------------------------------------------- /demo/Gaze Score demo .mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/Gaze Score demo .mp4 -------------------------------------------------------------------------------- /demo/Gaze Score.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/Gaze Score.png -------------------------------------------------------------------------------- /demo/Gaze_Score estimation v2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/Gaze_Score estimation v2.png -------------------------------------------------------------------------------- /demo/Gaze_Score estimation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/Gaze_Score estimation.png -------------------------------------------------------------------------------- /demo/demo.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/demo.mp4 -------------------------------------------------------------------------------- /demo/face_keypoints.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/face_keypoints.jpg -------------------------------------------------------------------------------- /demo/new_mediapipe_dsd_demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/new_mediapipe_dsd_demo.gif -------------------------------------------------------------------------------- /demo/new_mediapipe_dsd_demo.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/e-candeloro/Driver-State-Detection/7a2a9b2b8ff46cee387876c5cbf188dbc9d1971d/demo/new_mediapipe_dsd_demo.mp4 -------------------------------------------------------------------------------- /driver_state_detection/attention_scorer.py: -------------------------------------------------------------------------------- 1 | import time 2 | import numpy as np 3 | 4 | 5 | class AttentionScorer: 6 | """ 7 | Attention Scorer class that contains methods for estimating EAR, Gaze_Score, PERCLOS and Head Pose over time, 8 | with the given thresholds (time thresholds and value thresholds) 9 | 10 | Methods 11 | ---------- 12 | - eval_scores: used to evaluate the driver's state of attention 13 | - get_PERCLOS: specifically used to evaluate the driver sleepiness 14 | """ 15 | 16 | def __init__( 17 | self, 18 | t_now, 19 | ear_thresh, 20 | gaze_thresh, 21 | perclos_thresh=0.2, 22 | roll_thresh=60, 23 | pitch_thresh=20, 24 | yaw_thresh=30, 25 | ear_time_thresh=4.0, 26 | gaze_time_thresh=2.0, 27 | pose_time_thresh=4.0, 28 | decay_factor=0.9, 29 | verbose=False, 30 | ): 31 | """ 32 | Initialize the AttentionScorer object with the given thresholds and parameters. 33 | 34 | Parameters 35 | ---------- 36 | t_now: float or int 37 | The current time in seconds. 38 | 39 | ear_thresh: float or int 40 | EAR score value threshold (if the EAR score is less than this value, eyes are considered closed!) 41 | 42 | gaze_thresh: float or int 43 | Gaze Score value threshold (if the Gaze Score is more than this value, the gaze is considered not centered) 44 | 45 | perclos_thresh: float (ranges from 0 to 1), optional 46 | PERCLOS threshold that indicates the maximum time allowed in 60 seconds of eye closure 47 | (default is 0.2 -> 20% of 1 minute) 48 | 49 | roll_thresh: int, optional 50 | The roll angle increases or decreases when you turn your head clockwise or counter clockwise. 51 | Threshold of the roll angle for considering the person distracted/unconscious (not straight neck) 52 | Default threshold is 20 degrees from the center position. 53 | 54 | pitch_thresh: int, optional 55 | The pitch angle increases or decreases when you move your head upwards or downwards. 56 | Threshold of the pitch angle for considering the person distracted (not looking in front) 57 | Default threshold is 20 degrees from the center position. 58 | 59 | yaw_thresh: int, optional 60 | The yaw angle increases or decreases when you turn your head to left or right. 61 | Threshold of the yaw angle for considering the person distracted/unconscious (not straight neck) 62 | It increase or decrease when you turn your head to left or right. default is 20 degrees from the center position. 63 | 64 | ear_time_thresh: float or int, optional 65 | Maximum time allowable for consecutive eye closure (given the EAR threshold considered) 66 | (default is 4.0 seconds) 67 | 68 | gaze_time_thresh: float or int, optional 69 | Maximum time allowable for consecutive gaze not centered (given the Gaze Score threshold considered) 70 | (default is 2.0 seconds) 71 | 72 | pose_time_thresh: float or int, optional 73 | Maximum time allowable for consecutive distracted head pose (given the pitch,yaw and roll thresholds) 74 | (default is 4.0 seconds) 75 | 76 | decay_factor: float, optional 77 | Decay factor for the attention scores. This value should be between 0 and 1. The decay factor is used to reduce the score over time when a distraction condition is not met, simulating a decay effect. A value of 0 means istant decay to 0, while a value of 1 means the score will not decay at all. (default is 0.9) 78 | 79 | verbose: bool, optional 80 | If set to True, print additional information about the scores (default is False) 81 | """ 82 | 83 | # Thresholds and configuration 84 | self.ear_thresh = ear_thresh 85 | self.gaze_thresh = gaze_thresh 86 | self.perclos_thresh = perclos_thresh 87 | self.roll_thresh = roll_thresh 88 | self.pitch_thresh = pitch_thresh 89 | self.yaw_thresh = yaw_thresh 90 | self.ear_time_thresh = ear_time_thresh 91 | self.gaze_time_thresh = gaze_time_thresh 92 | self.pose_time_thresh = pose_time_thresh 93 | self.decay_factor = decay_factor 94 | self.verbose = verbose 95 | 96 | # Initialize timers for smoothing the metrics 97 | self.last_eval_time = t_now 98 | self.closure_time = 0.0 99 | self.not_look_ahead_time = 0.0 100 | self.distracted_time = 0.0 101 | 102 | # PERCLOS parameters 103 | self.PERCLOS_TIME_PERIOD = 60 104 | self.timestamps = np.empty((0,), dtype=np.float64) 105 | self.closed_flags = np.empty((0,), dtype=bool) 106 | self.eye_closure_counter = 0 107 | self.prev_time = t_now 108 | 109 | def _update_metric(self, metric_value, condition, elapsed): 110 | """ 111 | Update a given metric timer based on the condition. 112 | 113 | If the condition is True, accumulate the elapsed time. 114 | Otherwise, apply exponential decay to the metric value. 115 | 116 | Parameters 117 | ---------- 118 | metric_value : float 119 | The current accumulated value of the metric. 120 | condition : bool 121 | True if the current measurement should accumulate more time. 122 | elapsed : float 123 | Time elapsed since the last update. 124 | 125 | Returns 126 | ------- 127 | float 128 | The updated metric value. 129 | """ 130 | if condition: 131 | return metric_value + elapsed 132 | else: 133 | return metric_value * self.decay_factor 134 | 135 | def eval_scores( 136 | self, t_now, ear_score, gaze_score, head_roll, head_pitch, head_yaw 137 | ): 138 | """ 139 | Evaluate the driver's state of attention using smoothed metrics. 140 | 141 | Instead of instantly resetting timers when conditions are not met, 142 | each timer is updated with accumulated elapsed time when active or decayed otherwise. 143 | 144 | Parameters 145 | ---------- 146 | t_now : float or int 147 | The current time in seconds. 148 | ear_score : float 149 | The Eye Aspect Ratio (EAR) score. 150 | gaze_score : float 151 | The gaze score. 152 | head_roll : float 153 | The roll angle of the head. 154 | head_pitch : float 155 | The pitch angle of the head. 156 | head_yaw : float 157 | The yaw angle of the head. 158 | 159 | Returns 160 | ------- 161 | asleep : bool 162 | True if the accumulated closure time exceeds the EAR threshold. 163 | looking_away : bool 164 | True if the accumulated gaze timer exceeds its threshold. 165 | distracted : bool 166 | True if the accumulated head pose timer exceeds its threshold. 167 | """ 168 | # Calculate the time elapsed since the last evaluation 169 | elapsed = t_now - self.last_eval_time 170 | self.last_eval_time = t_now 171 | 172 | # Update the eye closure metric 173 | self.closure_time = self._update_metric( 174 | self.closure_time, 175 | (ear_score is not None and ear_score <= self.ear_thresh), 176 | elapsed, 177 | ) 178 | 179 | # Update the gaze metric 180 | self.not_look_ahead_time = self._update_metric( 181 | self.not_look_ahead_time, 182 | (gaze_score is not None and gaze_score > self.gaze_thresh), 183 | elapsed, 184 | ) 185 | 186 | # Update the head pose metric: check if any head angle exceeds its threshold 187 | head_condition = ( 188 | (head_roll is not None and abs(head_roll) > self.roll_thresh) 189 | or (head_pitch is not None and abs(head_pitch) > self.pitch_thresh) 190 | or (head_yaw is not None and abs(head_yaw) > self.yaw_thresh) 191 | ) 192 | self.distracted_time = self._update_metric( 193 | self.distracted_time, head_condition, elapsed 194 | ) 195 | 196 | # Determine driver state based on thresholds 197 | asleep = self.closure_time >= self.ear_time_thresh 198 | looking_away = self.not_look_ahead_time >= self.gaze_time_thresh 199 | distracted = self.distracted_time >= self.pose_time_thresh 200 | 201 | if self.verbose: 202 | print( 203 | f"Closure Time: {self.closure_time:.2f}s | " 204 | f"Not Look Ahead Time: {self.not_look_ahead_time:.2f}s | " 205 | f"Distracted Time: {self.distracted_time:.2f}s" 206 | ) 207 | 208 | return asleep, looking_away, distracted 209 | 210 | # NOTE: This method uses a fixed window for the PERCLOS score - that is it resets every X seconds and don't consider the last X seconds as a rolling window! 211 | def get_PERCLOS(self, t_now, fps, ear_score): 212 | """ 213 | Compute the PERCLOS (Percentage of Eye Closure) score over a given time period. 214 | 215 | Parameters 216 | ---------- 217 | t_now: float or int 218 | The current time in seconds. 219 | 220 | fps: int 221 | The frames per second of the video. 222 | 223 | ear_score: float 224 | EAR (Eye Aspect Ratio) score obtained from the driver eye aperture. 225 | 226 | Returns 227 | ------- 228 | tired: bool 229 | Indicates if the driver is tired or not. 230 | 231 | perclos_score: float 232 | The PERCLOS score over a minute. 233 | """ 234 | 235 | delta = t_now - self.prev_time # set delta timer 236 | tired = False # set default value for the tired state of the driver 237 | 238 | all_frames_numbers_in_perclos_duration = int(self.PERCLOS_TIME_PERIOD * fps) 239 | 240 | # if the ear_score is lower or equal than the threshold, increase the eye_closure_counter 241 | if (ear_score is not None) and (ear_score <= self.ear_thresh): 242 | self.eye_closure_counter += 1 243 | 244 | # compute the PERCLOS over a given time period 245 | perclos_score = ( 246 | self.eye_closure_counter 247 | ) / all_frames_numbers_in_perclos_duration 248 | 249 | if ( 250 | perclos_score >= self.perclos_thresh 251 | ): # if the PERCLOS score is higher than a threshold, tired = True 252 | tired = True 253 | 254 | if ( 255 | delta >= self.PERCLOS_TIME_PERIOD 256 | ): # at every end of the given time period, reset the counter and the timer 257 | self.eye_closure_counter = 0 258 | self.prev_time = t_now 259 | 260 | return tired, perclos_score 261 | 262 | def get_rolling_PERCLOS(self, t_now, ear_score): 263 | """ 264 | Compute the rolling PERCLOS score using NumPy vectorized operations. 265 | 266 | Parameters 267 | ---------- 268 | t_now : float or int 269 | The current time in seconds. 270 | ear_score : float 271 | The EAR (Eye Aspect Ratio) score for the current frame. 272 | 273 | Returns 274 | ------- 275 | tired : bool 276 | Indicates if the driver is tired based on the PERCLOS score. 277 | perclos_score : float 278 | The rolling PERCLOS score calculated over the defined time period. 279 | """ 280 | # Determine if the current frame indicates closed eyes 281 | eye_closed = (ear_score is not None) and (ear_score <= self.ear_thresh) 282 | 283 | # Append new values to the NumPy arrays. (np.concatenate creates new arrays.) 284 | self.timestamps = np.concatenate((self.timestamps, [t_now])) 285 | self.closed_flags = np.concatenate((self.closed_flags, [eye_closed])) 286 | 287 | # Create a boolean mask of entries within the rolling window. 288 | valid_mask = self.timestamps >= (t_now - self.PERCLOS_TIME_PERIOD) 289 | self.timestamps = self.timestamps[valid_mask] 290 | self.closed_flags = self.closed_flags[valid_mask] 291 | 292 | total_frames = self.timestamps.size 293 | if total_frames > 0: 294 | perclos_score = np.sum(self.closed_flags) / total_frames 295 | else: 296 | perclos_score = 0.0 297 | 298 | tired = perclos_score >= self.perclos_thresh 299 | return tired, perclos_score 300 | -------------------------------------------------------------------------------- /driver_state_detection/camera_params.json: -------------------------------------------------------------------------------- 1 | { 2 | "camera_matrix": [ 3 | [ 4 | 899.12150372, 5 | 0.0, 6 | 644.26261492 7 | ], 8 | [ 9 | 0.0, 10 | 899.45280671, 11 | 372.28009436 12 | ], 13 | [ 14 | 0.0, 15 | 0.0, 16 | 1.0 17 | ] 18 | ], 19 | "dist_coeffs": [ 20 | [ 21 | -0.03792548, 22 | 0.09233237, 23 | 0.00419088, 24 | 0.00317323, 25 | -0.15804257 26 | ] 27 | ] 28 | } -------------------------------------------------------------------------------- /driver_state_detection/eye_detector.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from numpy import linalg as LA 4 | from utils import resize 5 | 6 | 7 | class EyeDetector: 8 | def __init__(self, show_processing: bool = False): 9 | """ 10 | Eye dector class that contains various method for eye aperture rate estimation and gaze score estimation 11 | 12 | Parameters 13 | ---------- 14 | show_processing: bool 15 | If set to True, shows frame images during the processing in some steps (default is False) 16 | 17 | Methods 18 | ---------- 19 | - show_eye_keypoints: shows eye keypoints in the frame/image 20 | - get_EAR: computes EAR average score for the two eyes of the face 21 | - get_Gaze_Score: computes the Gaze_Score (normalized euclidean distance between center of eye and pupil) 22 | of the eyes of the face 23 | """ 24 | 25 | self.show_processing = show_processing 26 | # Eye landmarks numbers constants 27 | self.EYES_LMS_NUMS = [33, 133, 160, 144, 158, 153, 362, 263, 385, 380, 387, 373] 28 | self.LEFT_IRIS_NUM = 468 29 | self.RIGHT_IRIS_NUM = 473 30 | 31 | @staticmethod 32 | def _calc_EAR_eye(eye_pts): 33 | """ 34 | Computer the EAR score for a single eyes given it's keypoints 35 | :param eye_pts: numpy array of shape (6,2) containing the keypoints of an eye 36 | :return: ear_eye 37 | EAR of the eye 38 | """ 39 | ear_eye = ( 40 | LA.norm(eye_pts[2] - eye_pts[3]) + LA.norm(eye_pts[4] - eye_pts[5]) 41 | ) / (2 * LA.norm(eye_pts[0] - eye_pts[1])) 42 | """ 43 | EAR is computed as the mean of two measures of eye opening (see mediapipe face keypoints for the eye) 44 | divided by the eye lenght 45 | """ 46 | return ear_eye 47 | 48 | def show_eye_keypoints(self, color_frame, landmarks, frame_size): 49 | """ 50 | Shows eyes keypoints found in the face, drawing red circles in their position in the frame/image 51 | 52 | Parameters 53 | ---------- 54 | color_frame: numpy array 55 | Frame/image in which the eyes keypoints are found 56 | landmarks: landmarks: numpy array 57 | List of 478 mediapipe keypoints of the face 58 | """ 59 | 60 | cv2.circle( 61 | color_frame, 62 | (landmarks[self.LEFT_IRIS_NUM, :2] * frame_size).astype(np.uint32), 63 | 3, 64 | (255, 255, 255), 65 | cv2.FILLED, 66 | ) 67 | cv2.circle( 68 | color_frame, 69 | (landmarks[self.RIGHT_IRIS_NUM, :2] * frame_size).astype(np.uint32), 70 | 3, 71 | (255, 255, 255), 72 | cv2.FILLED, 73 | ) 74 | 75 | for n in self.EYES_LMS_NUMS: 76 | x = int(landmarks[n, 0] * frame_size[0]) 77 | y = int(landmarks[n, 1] * frame_size[1]) 78 | cv2.circle(color_frame, (x, y), 1, (0, 0, 255), -1) 79 | return 80 | 81 | def get_EAR(self, landmarks): 82 | """ 83 | Computes the average eye aperture rate of the face 84 | 85 | Parameters 86 | ---------- 87 | landmarks: landmarks: numpy array 88 | List of 478 mediapipe keypoints of the face 89 | 90 | Returns 91 | -------- 92 | ear_score: float 93 | EAR average score between the two eyes 94 | The EAR or Eye Aspect Ratio is computed as the eye opennes divided by the eye lenght 95 | Each eye has his scores and the two scores are averaged 96 | """ 97 | 98 | # numpy array for storing the keypoints positions of the left and right eyes 99 | eye_pts_l = np.zeros(shape=(6, 2)) 100 | eye_pts_r = eye_pts_l.copy() 101 | 102 | # get the face mesh keypoints 103 | for i in range(len(self.EYES_LMS_NUMS) // 2): 104 | # array of x,y coordinates for the left eye reference point 105 | eye_pts_l[i] = landmarks[self.EYES_LMS_NUMS[i], :2] 106 | # array of x,y coordinates for the right eye reference point 107 | eye_pts_r[i] = landmarks[self.EYES_LMS_NUMS[i + 6], :2] 108 | 109 | # computing the left eye EAR score 110 | ear_left = self._calc_EAR_eye(eye_pts_l) 111 | # computing the right eye EAR score 112 | ear_right = self._calc_EAR_eye(eye_pts_r) 113 | 114 | # computing the average EAR score 115 | ear_avg = (ear_left + ear_right) / 2 116 | 117 | return ear_avg 118 | 119 | @staticmethod 120 | def _calc_1eye_score(landmarks, eye_lms_nums, eye_iris_num, frame_size, frame): 121 | """Gets each eye score and its picture.""" 122 | iris = landmarks[eye_iris_num, :2] 123 | 124 | eye_x_min = landmarks[eye_lms_nums, 0].min() 125 | eye_y_min = landmarks[eye_lms_nums, 1].min() 126 | eye_x_max = landmarks[eye_lms_nums, 0].max() 127 | eye_y_max = landmarks[eye_lms_nums, 1].max() 128 | 129 | eye_center = np.array( 130 | ((eye_x_min + eye_x_max) / 2, (eye_y_min + eye_y_max) / 2) 131 | ) 132 | 133 | eye_gaze_score = LA.norm(iris - eye_center) / eye_center[0] 134 | 135 | eye_x_min_frame = int(eye_x_min * frame_size[0]) 136 | eye_y_min_frame = int(eye_y_min * frame_size[1]) 137 | eye_x_max_frame = int(eye_x_max * frame_size[0]) 138 | eye_y_max_frame = int(eye_y_max * frame_size[1]) 139 | 140 | eye = frame[eye_y_min_frame:eye_y_max_frame, eye_x_min_frame:eye_x_max_frame] 141 | 142 | return eye_gaze_score, eye 143 | 144 | def get_Gaze_Score(self, frame, landmarks, frame_size): 145 | """ 146 | Computes the average Gaze Score for the eyes 147 | The Gaze Score is the mean of the l2 norm (euclidean distance) between the center point of the Eye ROI 148 | (eye bounding box) and the center of the eye-pupil 149 | 150 | Parameters 151 | ---------- 152 | frame: numpy array 153 | Frame/image in which the eyes keypoints are found 154 | landmarks: numpy array 155 | List of 478 face mesh keypoints of the face 156 | 157 | Returns 158 | -------- 159 | avg_gaze_score: float 160 | If successful, returns the float gaze score 161 | If unsuccessful, returns None 162 | 163 | """ 164 | 165 | left_gaze_score, left_eye = self._calc_1eye_score( 166 | landmarks, self.EYES_LMS_NUMS[:6], self.LEFT_IRIS_NUM, frame_size, frame 167 | ) 168 | right_gaze_score, right_eye = self._calc_1eye_score( 169 | landmarks, self.EYES_LMS_NUMS[6:], self.RIGHT_IRIS_NUM, frame_size, frame 170 | ) 171 | 172 | # if show_processing is True, shows the eyes ROI 173 | # TODO: show iris and distance from the center of the eye 174 | 175 | # computes the average gaze score for the 2 eyes 176 | avg_gaze_score = (left_gaze_score + right_gaze_score) / 2 177 | 178 | if self.show_processing and (left_eye is not None) and (right_eye is not None): 179 | left_eye = resize(left_eye, 1000) 180 | right_eye = resize(right_eye, 1000) 181 | cv2.imshow("left eye", left_eye) 182 | cv2.imshow("right eye", right_eye) 183 | 184 | return avg_gaze_score 185 | -------------------------------------------------------------------------------- /driver_state_detection/face_geometry.py: -------------------------------------------------------------------------------- 1 | # Many parts taken from the cpp implementation from github.com/google/mediapipe 2 | # 3 | # Copyright 2020 The MediaPipe Authors. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import numpy as np 18 | 19 | canonical_metric_landmarks = np.array( 20 | [ 21 | 0.000000, 22 | -3.406404, 23 | 5.979507, 24 | 0.499977, 25 | 0.652534, 26 | 0.000000, 27 | -1.126865, 28 | 7.475604, 29 | 0.500026, 30 | 0.547487, 31 | 0.000000, 32 | -2.089024, 33 | 6.058267, 34 | 0.499974, 35 | 0.602372, 36 | -0.463928, 37 | 0.955357, 38 | 6.633583, 39 | 0.482113, 40 | 0.471979, 41 | 0.000000, 42 | -0.463170, 43 | 7.586580, 44 | 0.500151, 45 | 0.527156, 46 | 0.000000, 47 | 0.365669, 48 | 7.242870, 49 | 0.499910, 50 | 0.498253, 51 | 0.000000, 52 | 2.473255, 53 | 5.788627, 54 | 0.499523, 55 | 0.401062, 56 | -4.253081, 57 | 2.577646, 58 | 3.279702, 59 | 0.289712, 60 | 0.380764, 61 | 0.000000, 62 | 4.019042, 63 | 5.284764, 64 | 0.499955, 65 | 0.312398, 66 | 0.000000, 67 | 4.885979, 68 | 5.385258, 69 | 0.499987, 70 | 0.269919, 71 | 0.000000, 72 | 8.261778, 73 | 4.481535, 74 | 0.500023, 75 | 0.107050, 76 | 0.000000, 77 | -3.706811, 78 | 5.864924, 79 | 0.500023, 80 | 0.666234, 81 | 0.000000, 82 | -3.918301, 83 | 5.569430, 84 | 0.500016, 85 | 0.679224, 86 | 0.000000, 87 | -3.994436, 88 | 5.219482, 89 | 0.500023, 90 | 0.692348, 91 | 0.000000, 92 | -4.542400, 93 | 5.404754, 94 | 0.499977, 95 | 0.695278, 96 | 0.000000, 97 | -4.745577, 98 | 5.529457, 99 | 0.499977, 100 | 0.705934, 101 | 0.000000, 102 | -5.019567, 103 | 5.601448, 104 | 0.499977, 105 | 0.719385, 106 | 0.000000, 107 | -5.365123, 108 | 5.535441, 109 | 0.499977, 110 | 0.737019, 111 | 0.000000, 112 | -6.149624, 113 | 5.071372, 114 | 0.499968, 115 | 0.781371, 116 | 0.000000, 117 | -1.501095, 118 | 7.112196, 119 | 0.499816, 120 | 0.562981, 121 | -0.416106, 122 | -1.466449, 123 | 6.447657, 124 | 0.473773, 125 | 0.573910, 126 | -7.087960, 127 | 5.434801, 128 | 0.099620, 129 | 0.104907, 130 | 0.254141, 131 | -2.628639, 132 | 2.035898, 133 | 3.848121, 134 | 0.365930, 135 | 0.409576, 136 | -3.198363, 137 | 1.985815, 138 | 3.796952, 139 | 0.338758, 140 | 0.413025, 141 | -3.775151, 142 | 2.039402, 143 | 3.646194, 144 | 0.311120, 145 | 0.409460, 146 | -4.465819, 147 | 2.422950, 148 | 3.155168, 149 | 0.274658, 150 | 0.389131, 151 | -2.164289, 152 | 2.189867, 153 | 3.851822, 154 | 0.393362, 155 | 0.403706, 156 | -3.208229, 157 | 3.223926, 158 | 4.115822, 159 | 0.345234, 160 | 0.344011, 161 | -2.673803, 162 | 3.205337, 163 | 4.092203, 164 | 0.370094, 165 | 0.346076, 166 | -3.745193, 167 | 3.165286, 168 | 3.972409, 169 | 0.319322, 170 | 0.347265, 171 | -4.161018, 172 | 3.059069, 173 | 3.719554, 174 | 0.297903, 175 | 0.353591, 176 | -5.062006, 177 | 1.934418, 178 | 2.776093, 179 | 0.247792, 180 | 0.410810, 181 | -2.266659, 182 | -7.425768, 183 | 4.389812, 184 | 0.396889, 185 | 0.842755, 186 | -4.445859, 187 | 2.663991, 188 | 3.173422, 189 | 0.280098, 190 | 0.375600, 191 | -7.214530, 192 | 2.263009, 193 | 0.073150, 194 | 0.106310, 195 | 0.399956, 196 | -5.799793, 197 | 2.349546, 198 | 2.204059, 199 | 0.209925, 200 | 0.391353, 201 | -2.844939, 202 | -0.720868, 203 | 4.433130, 204 | 0.355808, 205 | 0.534406, 206 | -0.711452, 207 | -3.329355, 208 | 5.877044, 209 | 0.471751, 210 | 0.650404, 211 | -0.606033, 212 | -3.924562, 213 | 5.444923, 214 | 0.474155, 215 | 0.680192, 216 | -1.431615, 217 | -3.500953, 218 | 5.496189, 219 | 0.439785, 220 | 0.657229, 221 | -1.914910, 222 | -3.803146, 223 | 5.028930, 224 | 0.414617, 225 | 0.666541, 226 | -1.131043, 227 | -3.973937, 228 | 5.189648, 229 | 0.450374, 230 | 0.680861, 231 | -1.563548, 232 | -4.082763, 233 | 4.842263, 234 | 0.428771, 235 | 0.682691, 236 | -2.650112, 237 | -5.003649, 238 | 4.188483, 239 | 0.374971, 240 | 0.727805, 241 | -0.427049, 242 | -1.094134, 243 | 7.360529, 244 | 0.486717, 245 | 0.547629, 246 | -0.496396, 247 | -0.475659, 248 | 7.440358, 249 | 0.485301, 250 | 0.527395, 251 | -5.253307, 252 | 3.881582, 253 | 3.363159, 254 | 0.257765, 255 | 0.314490, 256 | -1.718698, 257 | 0.974609, 258 | 4.558359, 259 | 0.401223, 260 | 0.455172, 261 | -1.608635, 262 | -0.942516, 263 | 5.814193, 264 | 0.429819, 265 | 0.548615, 266 | -1.651267, 267 | -0.610868, 268 | 5.581319, 269 | 0.421352, 270 | 0.533741, 271 | -4.765501, 272 | -0.701554, 273 | 3.534632, 274 | 0.276896, 275 | 0.532057, 276 | -0.478306, 277 | 0.295766, 278 | 7.101013, 279 | 0.483370, 280 | 0.499587, 281 | -3.734964, 282 | 4.508230, 283 | 4.550454, 284 | 0.337212, 285 | 0.282883, 286 | -4.588603, 287 | 4.302037, 288 | 4.048484, 289 | 0.296392, 290 | 0.293243, 291 | -6.279331, 292 | 6.615427, 293 | 1.425850, 294 | 0.169295, 295 | 0.193814, 296 | -1.220941, 297 | 4.142165, 298 | 5.106035, 299 | 0.447580, 300 | 0.302610, 301 | -2.193489, 302 | 3.100317, 303 | 4.000575, 304 | 0.392390, 305 | 0.353888, 306 | -3.102642, 307 | -4.352984, 308 | 4.095905, 309 | 0.354490, 310 | 0.696784, 311 | -6.719682, 312 | -4.788645, 313 | -1.745401, 314 | 0.067305, 315 | 0.730105, 316 | -1.193824, 317 | -1.306795, 318 | 5.737747, 319 | 0.442739, 320 | 0.572826, 321 | -0.729766, 322 | -1.593712, 323 | 5.833208, 324 | 0.457098, 325 | 0.584792, 326 | -2.456206, 327 | -4.342621, 328 | 4.283884, 329 | 0.381974, 330 | 0.694711, 331 | -2.204823, 332 | -4.304508, 333 | 4.162499, 334 | 0.392389, 335 | 0.694203, 336 | -4.985894, 337 | 4.802461, 338 | 3.751977, 339 | 0.277076, 340 | 0.271932, 341 | -1.592294, 342 | -1.257709, 343 | 5.456949, 344 | 0.422552, 345 | 0.563233, 346 | -2.644548, 347 | 4.524654, 348 | 4.921559, 349 | 0.385919, 350 | 0.281364, 351 | -2.760292, 352 | 5.100971, 353 | 5.015990, 354 | 0.383103, 355 | 0.255840, 356 | -3.523964, 357 | 8.005976, 358 | 3.729163, 359 | 0.331431, 360 | 0.119714, 361 | -5.599763, 362 | 5.715470, 363 | 2.724259, 364 | 0.229924, 365 | 0.232003, 366 | -3.063932, 367 | 6.566144, 368 | 4.529981, 369 | 0.364501, 370 | 0.189114, 371 | -5.720968, 372 | 4.254584, 373 | 2.830852, 374 | 0.229622, 375 | 0.299541, 376 | -6.374393, 377 | 4.785590, 378 | 1.591691, 379 | 0.173287, 380 | 0.278748, 381 | -0.672728, 382 | -3.688016, 383 | 5.737804, 384 | 0.472879, 385 | 0.666198, 386 | -1.262560, 387 | -3.787691, 388 | 5.417779, 389 | 0.446828, 390 | 0.668527, 391 | -1.732553, 392 | -3.952767, 393 | 5.000579, 394 | 0.422762, 395 | 0.673890, 396 | -1.043625, 397 | -1.464973, 398 | 5.662455, 399 | 0.445308, 400 | 0.580066, 401 | -2.321234, 402 | -4.329069, 403 | 4.258156, 404 | 0.388103, 405 | 0.693961, 406 | -2.056846, 407 | -4.477671, 408 | 4.520883, 409 | 0.403039, 410 | 0.706540, 411 | -2.153084, 412 | -4.276322, 413 | 4.038093, 414 | 0.403629, 415 | 0.693953, 416 | -0.946874, 417 | -1.035249, 418 | 6.512274, 419 | 0.460042, 420 | 0.557139, 421 | -1.469132, 422 | -4.036351, 423 | 4.604908, 424 | 0.431158, 425 | 0.692366, 426 | -1.024340, 427 | -3.989851, 428 | 4.926693, 429 | 0.452182, 430 | 0.692366, 431 | -0.533422, 432 | -3.993222, 433 | 5.138202, 434 | 0.475387, 435 | 0.692366, 436 | -0.769720, 437 | -6.095394, 438 | 4.985883, 439 | 0.465828, 440 | 0.779190, 441 | -0.699606, 442 | -5.291850, 443 | 5.448304, 444 | 0.472329, 445 | 0.736226, 446 | -0.669687, 447 | -4.949770, 448 | 5.509612, 449 | 0.473087, 450 | 0.717857, 451 | -0.630947, 452 | -4.695101, 453 | 5.449371, 454 | 0.473122, 455 | 0.704626, 456 | -0.583218, 457 | -4.517982, 458 | 5.339869, 459 | 0.473033, 460 | 0.695278, 461 | -1.537170, 462 | -4.423206, 463 | 4.745470, 464 | 0.427942, 465 | 0.695278, 466 | -1.615600, 467 | -4.475942, 468 | 4.813632, 469 | 0.426479, 470 | 0.703540, 471 | -1.729053, 472 | -4.618680, 473 | 4.854463, 474 | 0.423162, 475 | 0.711846, 476 | -1.838624, 477 | -4.828746, 478 | 4.823737, 479 | 0.418309, 480 | 0.720063, 481 | -2.368250, 482 | -3.106237, 483 | 4.868096, 484 | 0.390095, 485 | 0.639573, 486 | -7.542244, 487 | -1.049282, 488 | -2.431321, 489 | 0.013954, 490 | 0.560034, 491 | 0.000000, 492 | -1.724003, 493 | 6.601390, 494 | 0.499914, 495 | 0.580147, 496 | -1.826614, 497 | -4.399531, 498 | 4.399021, 499 | 0.413200, 500 | 0.695400, 501 | -1.929558, 502 | -4.411831, 503 | 4.497052, 504 | 0.409626, 505 | 0.701823, 506 | -0.597442, 507 | -2.013686, 508 | 5.866456, 509 | 0.468080, 510 | 0.601535, 511 | -1.405627, 512 | -1.714196, 513 | 5.241087, 514 | 0.422729, 515 | 0.585985, 516 | -0.662449, 517 | -1.819321, 518 | 5.863759, 519 | 0.463080, 520 | 0.593784, 521 | -2.342340, 522 | 0.572222, 523 | 4.294303, 524 | 0.372120, 525 | 0.473414, 526 | -3.327324, 527 | 0.104863, 528 | 4.113860, 529 | 0.334562, 530 | 0.496073, 531 | -1.726175, 532 | -0.919165, 533 | 5.273355, 534 | 0.411671, 535 | 0.546965, 536 | -5.133204, 537 | 7.485602, 538 | 2.660442, 539 | 0.242176, 540 | 0.147676, 541 | -4.538641, 542 | 6.319907, 543 | 3.683424, 544 | 0.290777, 545 | 0.201446, 546 | -3.986562, 547 | 5.109487, 548 | 4.466315, 549 | 0.327338, 550 | 0.256527, 551 | -2.169681, 552 | -5.440433, 553 | 4.455874, 554 | 0.399510, 555 | 0.748921, 556 | -1.395634, 557 | 5.011963, 558 | 5.316032, 559 | 0.441728, 560 | 0.261676, 561 | -1.619500, 562 | 6.599217, 563 | 4.921106, 564 | 0.429765, 565 | 0.187834, 566 | -1.891399, 567 | 8.236377, 568 | 4.274997, 569 | 0.412198, 570 | 0.108901, 571 | -4.195832, 572 | 2.235205, 573 | 3.375099, 574 | 0.288955, 575 | 0.398952, 576 | -5.733342, 577 | 1.411738, 578 | 2.431726, 579 | 0.218937, 580 | 0.435411, 581 | -1.859887, 582 | 2.355757, 583 | 3.843181, 584 | 0.412782, 585 | 0.398970, 586 | -4.988612, 587 | 3.074654, 588 | 3.083858, 589 | 0.257135, 590 | 0.355440, 591 | -1.303263, 592 | 1.416453, 593 | 4.831091, 594 | 0.427685, 595 | 0.437961, 596 | -1.305757, 597 | -0.672779, 598 | 6.415959, 599 | 0.448340, 600 | 0.536936, 601 | -6.465170, 602 | 0.937119, 603 | 1.689873, 604 | 0.178560, 605 | 0.457554, 606 | -5.258659, 607 | 0.945811, 608 | 2.974312, 609 | 0.247308, 610 | 0.457194, 611 | -4.432338, 612 | 0.722096, 613 | 3.522615, 614 | 0.286267, 615 | 0.467675, 616 | -3.300681, 617 | 0.861641, 618 | 3.872784, 619 | 0.332828, 620 | 0.460712, 621 | -2.430178, 622 | 1.131492, 623 | 4.039035, 624 | 0.368756, 625 | 0.447207, 626 | -1.820731, 627 | 1.467954, 628 | 4.224124, 629 | 0.398964, 630 | 0.432655, 631 | -0.563221, 632 | 2.307693, 633 | 5.566789, 634 | 0.476410, 635 | 0.405806, 636 | -6.338145, 637 | -0.529279, 638 | 1.881175, 639 | 0.189241, 640 | 0.523924, 641 | -5.587698, 642 | 3.208071, 643 | 2.687839, 644 | 0.228962, 645 | 0.348951, 646 | -0.242624, 647 | -1.462857, 648 | 7.071491, 649 | 0.490726, 650 | 0.562401, 651 | -1.611251, 652 | 0.339326, 653 | 4.895421, 654 | 0.404670, 655 | 0.485133, 656 | -7.743095, 657 | 2.364999, 658 | -2.005167, 659 | 0.019469, 660 | 0.401564, 661 | -1.391142, 662 | 1.851048, 663 | 4.448999, 664 | 0.426243, 665 | 0.420431, 666 | -1.785794, 667 | -0.978284, 668 | 4.850470, 669 | 0.396993, 670 | 0.548797, 671 | -4.670959, 672 | 2.664461, 673 | 3.084075, 674 | 0.266470, 675 | 0.376977, 676 | -1.333970, 677 | -0.283761, 678 | 6.097047, 679 | 0.439121, 680 | 0.518958, 681 | -7.270895, 682 | -2.890917, 683 | -2.252455, 684 | 0.032314, 685 | 0.644357, 686 | -1.856432, 687 | 2.585245, 688 | 3.757904, 689 | 0.419054, 690 | 0.387155, 691 | -0.923388, 692 | 0.073076, 693 | 6.671944, 694 | 0.462783, 695 | 0.505747, 696 | -5.000589, 697 | -6.135128, 698 | 1.892523, 699 | 0.238979, 700 | 0.779745, 701 | -5.085276, 702 | -7.178590, 703 | 0.714711, 704 | 0.198221, 705 | 0.831938, 706 | -7.159291, 707 | -0.811820, 708 | -0.072044, 709 | 0.107550, 710 | 0.540755, 711 | -5.843051, 712 | -5.248023, 713 | 0.924091, 714 | 0.183610, 715 | 0.740257, 716 | -6.847258, 717 | 3.662916, 718 | 0.724695, 719 | 0.134410, 720 | 0.333683, 721 | -2.412942, 722 | -8.258853, 723 | 4.119213, 724 | 0.385764, 725 | 0.883154, 726 | -0.179909, 727 | -1.689864, 728 | 6.573301, 729 | 0.490967, 730 | 0.579378, 731 | -2.103655, 732 | -0.163946, 733 | 4.566119, 734 | 0.382385, 735 | 0.508573, 736 | -6.407571, 737 | 2.236021, 738 | 1.560843, 739 | 0.174399, 740 | 0.397671, 741 | -3.670075, 742 | 2.360153, 743 | 3.635230, 744 | 0.318785, 745 | 0.396235, 746 | -3.177186, 747 | 2.294265, 748 | 3.775704, 749 | 0.343364, 750 | 0.400597, 751 | -2.196121, 752 | -4.598322, 753 | 4.479786, 754 | 0.396100, 755 | 0.710217, 756 | -6.234883, 757 | -1.944430, 758 | 1.663542, 759 | 0.187885, 760 | 0.588538, 761 | -1.292924, 762 | -9.295920, 763 | 4.094063, 764 | 0.430987, 765 | 0.944065, 766 | -3.210651, 767 | -8.533278, 768 | 2.802001, 769 | 0.318993, 770 | 0.898285, 771 | -4.068926, 772 | -7.993109, 773 | 1.925119, 774 | 0.266248, 775 | 0.869701, 776 | 0.000000, 777 | 6.545390, 778 | 5.027311, 779 | 0.500023, 780 | 0.190576, 781 | 0.000000, 782 | -9.403378, 783 | 4.264492, 784 | 0.499977, 785 | 0.954453, 786 | -2.724032, 787 | 2.315802, 788 | 3.777151, 789 | 0.366170, 790 | 0.398822, 791 | -2.288460, 792 | 2.398891, 793 | 3.697603, 794 | 0.393207, 795 | 0.395537, 796 | -1.998311, 797 | 2.496547, 798 | 3.689148, 799 | 0.410373, 800 | 0.391080, 801 | -6.130040, 802 | 3.399261, 803 | 2.038516, 804 | 0.194993, 805 | 0.342102, 806 | -2.288460, 807 | 2.886504, 808 | 3.775031, 809 | 0.388665, 810 | 0.362284, 811 | -2.724032, 812 | 2.961810, 813 | 3.871767, 814 | 0.365962, 815 | 0.355971, 816 | -3.177186, 817 | 2.964136, 818 | 3.876973, 819 | 0.343364, 820 | 0.355357, 821 | -3.670075, 822 | 2.927714, 823 | 3.724325, 824 | 0.318785, 825 | 0.358340, 826 | -4.018389, 827 | 2.857357, 828 | 3.482983, 829 | 0.301415, 830 | 0.363156, 831 | -7.555811, 832 | 4.106811, 833 | -0.991917, 834 | 0.058133, 835 | 0.319076, 836 | -4.018389, 837 | 2.483695, 838 | 3.440898, 839 | 0.301415, 840 | 0.387449, 841 | 0.000000, 842 | -2.521945, 843 | 5.932265, 844 | 0.499988, 845 | 0.618434, 846 | -1.776217, 847 | -2.683946, 848 | 5.213116, 849 | 0.415838, 850 | 0.624196, 851 | -1.222237, 852 | -1.182444, 853 | 5.952465, 854 | 0.445682, 855 | 0.566077, 856 | -0.731493, 857 | -2.536683, 858 | 5.815343, 859 | 0.465844, 860 | 0.620641, 861 | 0.000000, 862 | 3.271027, 863 | 5.236015, 864 | 0.499923, 865 | 0.351524, 866 | -4.135272, 867 | -6.996638, 868 | 2.671970, 869 | 0.288719, 870 | 0.819946, 871 | -3.311811, 872 | -7.660815, 873 | 3.382963, 874 | 0.335279, 875 | 0.852820, 876 | -1.313701, 877 | -8.639995, 878 | 4.702456, 879 | 0.440512, 880 | 0.902419, 881 | -5.940524, 882 | -6.223629, 883 | -0.631468, 884 | 0.128294, 885 | 0.791941, 886 | -1.998311, 887 | 2.743838, 888 | 3.744030, 889 | 0.408772, 890 | 0.373894, 891 | -0.901447, 892 | 1.236992, 893 | 5.754256, 894 | 0.455607, 895 | 0.451801, 896 | 0.000000, 897 | -8.765243, 898 | 4.891441, 899 | 0.499877, 900 | 0.908990, 901 | -2.308977, 902 | -8.974196, 903 | 3.609070, 904 | 0.375437, 905 | 0.924192, 906 | -6.954154, 907 | -2.439843, 908 | -0.131163, 909 | 0.114210, 910 | 0.615022, 911 | -1.098819, 912 | -4.458788, 913 | 5.120727, 914 | 0.448662, 915 | 0.695278, 916 | -1.181124, 917 | -4.579996, 918 | 5.189564, 919 | 0.448020, 920 | 0.704632, 921 | -1.255818, 922 | -4.787901, 923 | 5.237051, 924 | 0.447112, 925 | 0.715808, 926 | -1.325085, 927 | -5.106507, 928 | 5.205010, 929 | 0.444832, 930 | 0.730794, 931 | -1.546388, 932 | -5.819392, 933 | 4.757893, 934 | 0.430012, 935 | 0.766809, 936 | -1.953754, 937 | -4.183892, 938 | 4.431713, 939 | 0.406787, 940 | 0.685673, 941 | -2.117802, 942 | -4.137093, 943 | 4.555096, 944 | 0.400738, 945 | 0.681069, 946 | -2.285339, 947 | -4.051196, 948 | 4.582438, 949 | 0.392400, 950 | 0.677703, 951 | -2.850160, 952 | -3.665720, 953 | 4.484994, 954 | 0.367856, 955 | 0.663919, 956 | -5.278538, 957 | -2.238942, 958 | 2.861224, 959 | 0.247923, 960 | 0.601333, 961 | -0.946709, 962 | 1.907628, 963 | 5.196779, 964 | 0.452770, 965 | 0.420850, 966 | -1.314173, 967 | 3.104912, 968 | 4.231404, 969 | 0.436392, 970 | 0.359887, 971 | -1.780000, 972 | 2.860000, 973 | 3.881555, 974 | 0.416164, 975 | 0.368714, 976 | -1.845110, 977 | -4.098880, 978 | 4.247264, 979 | 0.413386, 980 | 0.692366, 981 | -5.436187, 982 | -4.030482, 983 | 2.109852, 984 | 0.228018, 985 | 0.683572, 986 | -0.766444, 987 | 3.182131, 988 | 4.861453, 989 | 0.468268, 990 | 0.352671, 991 | -1.938616, 992 | -6.614410, 993 | 4.521085, 994 | 0.411362, 995 | 0.804327, 996 | 0.000000, 997 | 1.059413, 998 | 6.774605, 999 | 0.499989, 1000 | 0.469825, 1001 | -0.516573, 1002 | 1.583572, 1003 | 6.148363, 1004 | 0.479154, 1005 | 0.442654, 1006 | 0.000000, 1007 | 1.728369, 1008 | 6.316750, 1009 | 0.499974, 1010 | 0.439637, 1011 | -1.246815, 1012 | 0.230297, 1013 | 5.681036, 1014 | 0.432112, 1015 | 0.493589, 1016 | 0.000000, 1017 | -7.942194, 1018 | 5.181173, 1019 | 0.499886, 1020 | 0.866917, 1021 | 0.000000, 1022 | -6.991499, 1023 | 5.153478, 1024 | 0.499913, 1025 | 0.821729, 1026 | -0.997827, 1027 | -6.930921, 1028 | 4.979576, 1029 | 0.456549, 1030 | 0.819201, 1031 | -3.288807, 1032 | -5.382514, 1033 | 3.795752, 1034 | 0.344549, 1035 | 0.745439, 1036 | -2.311631, 1037 | -1.566237, 1038 | 4.590085, 1039 | 0.378909, 1040 | 0.574010, 1041 | -2.680250, 1042 | -6.111567, 1043 | 4.096152, 1044 | 0.374293, 1045 | 0.780185, 1046 | -3.832928, 1047 | -1.537326, 1048 | 4.137731, 1049 | 0.319688, 1050 | 0.570738, 1051 | -2.961860, 1052 | -2.274215, 1053 | 4.440943, 1054 | 0.357155, 1055 | 0.604270, 1056 | -4.386901, 1057 | -2.683286, 1058 | 3.643886, 1059 | 0.295284, 1060 | 0.621581, 1061 | -1.217295, 1062 | -7.834465, 1063 | 4.969286, 1064 | 0.447750, 1065 | 0.862477, 1066 | -1.542374, 1067 | -0.136843, 1068 | 5.201008, 1069 | 0.410986, 1070 | 0.508723, 1071 | -3.878377, 1072 | -6.041764, 1073 | 3.311079, 1074 | 0.313951, 1075 | 0.775308, 1076 | -3.084037, 1077 | -6.809842, 1078 | 3.814195, 1079 | 0.354128, 1080 | 0.812553, 1081 | -3.747321, 1082 | -4.503545, 1083 | 3.726453, 1084 | 0.324548, 1085 | 0.703993, 1086 | -6.094129, 1087 | -3.205991, 1088 | 1.473482, 1089 | 0.189096, 1090 | 0.646300, 1091 | -4.588995, 1092 | -4.728726, 1093 | 2.983221, 1094 | 0.279777, 1095 | 0.714658, 1096 | -6.583231, 1097 | -3.941269, 1098 | 0.070268, 1099 | 0.133823, 1100 | 0.682701, 1101 | -3.492580, 1102 | -3.195820, 1103 | 4.130198, 1104 | 0.336768, 1105 | 0.644733, 1106 | -1.255543, 1107 | 0.802341, 1108 | 5.307551, 1109 | 0.429884, 1110 | 0.466522, 1111 | -1.126122, 1112 | -0.933602, 1113 | 6.538785, 1114 | 0.455528, 1115 | 0.548623, 1116 | -1.443109, 1117 | -1.142774, 1118 | 5.905127, 1119 | 0.437114, 1120 | 0.558896, 1121 | -0.923043, 1122 | -0.529042, 1123 | 7.003423, 1124 | 0.467288, 1125 | 0.529925, 1126 | -1.755386, 1127 | 3.529117, 1128 | 4.327696, 1129 | 0.414712, 1130 | 0.335220, 1131 | -2.632589, 1132 | 3.713828, 1133 | 4.364629, 1134 | 0.377046, 1135 | 0.322778, 1136 | -3.388062, 1137 | 3.721976, 1138 | 4.309028, 1139 | 0.344108, 1140 | 0.320151, 1141 | -4.075766, 1142 | 3.675413, 1143 | 4.076063, 1144 | 0.312876, 1145 | 0.322332, 1146 | -4.622910, 1147 | 3.474691, 1148 | 3.646321, 1149 | 0.283526, 1150 | 0.333190, 1151 | -5.171755, 1152 | 2.535753, 1153 | 2.670867, 1154 | 0.241246, 1155 | 0.382786, 1156 | -7.297331, 1157 | 0.763172, 1158 | -0.048769, 1159 | 0.102986, 1160 | 0.468763, 1161 | -4.706828, 1162 | 1.651000, 1163 | 3.109532, 1164 | 0.267612, 1165 | 0.424560, 1166 | -4.071712, 1167 | 1.476821, 1168 | 3.476944, 1169 | 0.297879, 1170 | 0.433176, 1171 | -3.269817, 1172 | 1.470659, 1173 | 3.731945, 1174 | 0.333434, 1175 | 0.433878, 1176 | -2.527572, 1177 | 1.617311, 1178 | 3.865444, 1179 | 0.366427, 1180 | 0.426116, 1181 | -1.970894, 1182 | 1.858505, 1183 | 3.961782, 1184 | 0.396012, 1185 | 0.416696, 1186 | -1.579543, 1187 | 2.097941, 1188 | 4.084996, 1189 | 0.420121, 1190 | 0.410228, 1191 | -7.664182, 1192 | 0.673132, 1193 | -2.435867, 1194 | 0.007561, 1195 | 0.480777, 1196 | -1.397041, 1197 | -1.340139, 1198 | 5.630378, 1199 | 0.432949, 1200 | 0.569518, 1201 | -0.884838, 1202 | 0.658740, 1203 | 6.233232, 1204 | 0.458639, 1205 | 0.479089, 1206 | -0.767097, 1207 | -0.968035, 1208 | 7.077932, 1209 | 0.473466, 1210 | 0.545744, 1211 | -0.460213, 1212 | -1.334106, 1213 | 6.787447, 1214 | 0.476088, 1215 | 0.563830, 1216 | -0.748618, 1217 | -1.067994, 1218 | 6.798303, 1219 | 0.468472, 1220 | 0.555057, 1221 | -1.236408, 1222 | -1.585568, 1223 | 5.480490, 1224 | 0.433991, 1225 | 0.582362, 1226 | -0.387306, 1227 | -1.409990, 1228 | 6.957705, 1229 | 0.483518, 1230 | 0.562984, 1231 | -0.319925, 1232 | -1.607931, 1233 | 6.508676, 1234 | 0.482483, 1235 | 0.577849, 1236 | -1.639633, 1237 | 2.556298, 1238 | 3.863736, 1239 | 0.426450, 1240 | 0.389799, 1241 | -1.255645, 1242 | 2.467144, 1243 | 4.203800, 1244 | 0.438999, 1245 | 0.396495, 1246 | -1.031362, 1247 | 2.382663, 1248 | 4.615849, 1249 | 0.450067, 1250 | 0.400434, 1251 | -4.253081, 1252 | 2.772296, 1253 | 3.315305, 1254 | 0.289712, 1255 | 0.368253, 1256 | -4.530000, 1257 | 2.910000, 1258 | 3.339685, 1259 | 0.276670, 1260 | 0.363373, 1261 | 0.463928, 1262 | 0.955357, 1263 | 6.633583, 1264 | 0.517862, 1265 | 0.471948, 1266 | 4.253081, 1267 | 2.577646, 1268 | 3.279702, 1269 | 0.710288, 1270 | 0.380764, 1271 | 0.416106, 1272 | -1.466449, 1273 | 6.447657, 1274 | 0.526227, 1275 | 0.573910, 1276 | 7.087960, 1277 | 5.434801, 1278 | 0.099620, 1279 | 0.895093, 1280 | 0.254141, 1281 | 2.628639, 1282 | 2.035898, 1283 | 3.848121, 1284 | 0.634070, 1285 | 0.409576, 1286 | 3.198363, 1287 | 1.985815, 1288 | 3.796952, 1289 | 0.661242, 1290 | 0.413025, 1291 | 3.775151, 1292 | 2.039402, 1293 | 3.646194, 1294 | 0.688880, 1295 | 0.409460, 1296 | 4.465819, 1297 | 2.422950, 1298 | 3.155168, 1299 | 0.725342, 1300 | 0.389131, 1301 | 2.164289, 1302 | 2.189867, 1303 | 3.851822, 1304 | 0.606630, 1305 | 0.403705, 1306 | 3.208229, 1307 | 3.223926, 1308 | 4.115822, 1309 | 0.654766, 1310 | 0.344011, 1311 | 2.673803, 1312 | 3.205337, 1313 | 4.092203, 1314 | 0.629906, 1315 | 0.346076, 1316 | 3.745193, 1317 | 3.165286, 1318 | 3.972409, 1319 | 0.680678, 1320 | 0.347265, 1321 | 4.161018, 1322 | 3.059069, 1323 | 3.719554, 1324 | 0.702097, 1325 | 0.353591, 1326 | 5.062006, 1327 | 1.934418, 1328 | 2.776093, 1329 | 0.752212, 1330 | 0.410805, 1331 | 2.266659, 1332 | -7.425768, 1333 | 4.389812, 1334 | 0.602918, 1335 | 0.842863, 1336 | 4.445859, 1337 | 2.663991, 1338 | 3.173422, 1339 | 0.719902, 1340 | 0.375600, 1341 | 7.214530, 1342 | 2.263009, 1343 | 0.073150, 1344 | 0.893693, 1345 | 0.399960, 1346 | 5.799793, 1347 | 2.349546, 1348 | 2.204059, 1349 | 0.790082, 1350 | 0.391354, 1351 | 2.844939, 1352 | -0.720868, 1353 | 4.433130, 1354 | 0.643998, 1355 | 0.534488, 1356 | 0.711452, 1357 | -3.329355, 1358 | 5.877044, 1359 | 0.528249, 1360 | 0.650404, 1361 | 0.606033, 1362 | -3.924562, 1363 | 5.444923, 1364 | 0.525850, 1365 | 0.680191, 1366 | 1.431615, 1367 | -3.500953, 1368 | 5.496189, 1369 | 0.560215, 1370 | 0.657229, 1371 | 1.914910, 1372 | -3.803146, 1373 | 5.028930, 1374 | 0.585384, 1375 | 0.666541, 1376 | 1.131043, 1377 | -3.973937, 1378 | 5.189648, 1379 | 0.549626, 1380 | 0.680861, 1381 | 1.563548, 1382 | -4.082763, 1383 | 4.842263, 1384 | 0.571228, 1385 | 0.682692, 1386 | 2.650112, 1387 | -5.003649, 1388 | 4.188483, 1389 | 0.624852, 1390 | 0.728099, 1391 | 0.427049, 1392 | -1.094134, 1393 | 7.360529, 1394 | 0.513050, 1395 | 0.547282, 1396 | 0.496396, 1397 | -0.475659, 1398 | 7.440358, 1399 | 0.515097, 1400 | 0.527252, 1401 | 5.253307, 1402 | 3.881582, 1403 | 3.363159, 1404 | 0.742247, 1405 | 0.314507, 1406 | 1.718698, 1407 | 0.974609, 1408 | 4.558359, 1409 | 0.598631, 1410 | 0.454979, 1411 | 1.608635, 1412 | -0.942516, 1413 | 5.814193, 1414 | 0.570338, 1415 | 0.548575, 1416 | 1.651267, 1417 | -0.610868, 1418 | 5.581319, 1419 | 0.578632, 1420 | 0.533623, 1421 | 4.765501, 1422 | -0.701554, 1423 | 3.534632, 1424 | 0.723087, 1425 | 0.532054, 1426 | 0.478306, 1427 | 0.295766, 1428 | 7.101013, 1429 | 0.516446, 1430 | 0.499639, 1431 | 3.734964, 1432 | 4.508230, 1433 | 4.550454, 1434 | 0.662801, 1435 | 0.282918, 1436 | 4.588603, 1437 | 4.302037, 1438 | 4.048484, 1439 | 0.703624, 1440 | 0.293271, 1441 | 6.279331, 1442 | 6.615427, 1443 | 1.425850, 1444 | 0.830705, 1445 | 0.193814, 1446 | 1.220941, 1447 | 4.142165, 1448 | 5.106035, 1449 | 0.552386, 1450 | 0.302568, 1451 | 2.193489, 1452 | 3.100317, 1453 | 4.000575, 1454 | 0.607610, 1455 | 0.353888, 1456 | 3.102642, 1457 | -4.352984, 1458 | 4.095905, 1459 | 0.645429, 1460 | 0.696707, 1461 | 6.719682, 1462 | -4.788645, 1463 | -1.745401, 1464 | 0.932695, 1465 | 0.730105, 1466 | 1.193824, 1467 | -1.306795, 1468 | 5.737747, 1469 | 0.557261, 1470 | 0.572826, 1471 | 0.729766, 1472 | -1.593712, 1473 | 5.833208, 1474 | 0.542902, 1475 | 0.584792, 1476 | 2.456206, 1477 | -4.342621, 1478 | 4.283884, 1479 | 0.618026, 1480 | 0.694711, 1481 | 2.204823, 1482 | -4.304508, 1483 | 4.162499, 1484 | 0.607591, 1485 | 0.694203, 1486 | 4.985894, 1487 | 4.802461, 1488 | 3.751977, 1489 | 0.722943, 1490 | 0.271963, 1491 | 1.592294, 1492 | -1.257709, 1493 | 5.456949, 1494 | 0.577414, 1495 | 0.563167, 1496 | 2.644548, 1497 | 4.524654, 1498 | 4.921559, 1499 | 0.614083, 1500 | 0.281387, 1501 | 2.760292, 1502 | 5.100971, 1503 | 5.015990, 1504 | 0.616907, 1505 | 0.255886, 1506 | 3.523964, 1507 | 8.005976, 1508 | 3.729163, 1509 | 0.668509, 1510 | 0.119914, 1511 | 5.599763, 1512 | 5.715470, 1513 | 2.724259, 1514 | 0.770092, 1515 | 0.232021, 1516 | 3.063932, 1517 | 6.566144, 1518 | 4.529981, 1519 | 0.635536, 1520 | 0.189249, 1521 | 5.720968, 1522 | 4.254584, 1523 | 2.830852, 1524 | 0.770391, 1525 | 0.299556, 1526 | 6.374393, 1527 | 4.785590, 1528 | 1.591691, 1529 | 0.826722, 1530 | 0.278755, 1531 | 0.672728, 1532 | -3.688016, 1533 | 5.737804, 1534 | 0.527121, 1535 | 0.666198, 1536 | 1.262560, 1537 | -3.787691, 1538 | 5.417779, 1539 | 0.553172, 1540 | 0.668527, 1541 | 1.732553, 1542 | -3.952767, 1543 | 5.000579, 1544 | 0.577238, 1545 | 0.673890, 1546 | 1.043625, 1547 | -1.464973, 1548 | 5.662455, 1549 | 0.554692, 1550 | 0.580066, 1551 | 2.321234, 1552 | -4.329069, 1553 | 4.258156, 1554 | 0.611897, 1555 | 0.693961, 1556 | 2.056846, 1557 | -4.477671, 1558 | 4.520883, 1559 | 0.596961, 1560 | 0.706540, 1561 | 2.153084, 1562 | -4.276322, 1563 | 4.038093, 1564 | 0.596371, 1565 | 0.693953, 1566 | 0.946874, 1567 | -1.035249, 1568 | 6.512274, 1569 | 0.539958, 1570 | 0.557139, 1571 | 1.469132, 1572 | -4.036351, 1573 | 4.604908, 1574 | 0.568842, 1575 | 0.692366, 1576 | 1.024340, 1577 | -3.989851, 1578 | 4.926693, 1579 | 0.547818, 1580 | 0.692366, 1581 | 0.533422, 1582 | -3.993222, 1583 | 5.138202, 1584 | 0.524613, 1585 | 0.692366, 1586 | 0.769720, 1587 | -6.095394, 1588 | 4.985883, 1589 | 0.534090, 1590 | 0.779141, 1591 | 0.699606, 1592 | -5.291850, 1593 | 5.448304, 1594 | 0.527671, 1595 | 0.736226, 1596 | 0.669687, 1597 | -4.949770, 1598 | 5.509612, 1599 | 0.526913, 1600 | 0.717857, 1601 | 0.630947, 1602 | -4.695101, 1603 | 5.449371, 1604 | 0.526878, 1605 | 0.704626, 1606 | 0.583218, 1607 | -4.517982, 1608 | 5.339869, 1609 | 0.526967, 1610 | 0.695278, 1611 | 1.537170, 1612 | -4.423206, 1613 | 4.745470, 1614 | 0.572058, 1615 | 0.695278, 1616 | 1.615600, 1617 | -4.475942, 1618 | 4.813632, 1619 | 0.573521, 1620 | 0.703540, 1621 | 1.729053, 1622 | -4.618680, 1623 | 4.854463, 1624 | 0.576838, 1625 | 0.711846, 1626 | 1.838624, 1627 | -4.828746, 1628 | 4.823737, 1629 | 0.581691, 1630 | 0.720063, 1631 | 2.368250, 1632 | -3.106237, 1633 | 4.868096, 1634 | 0.609945, 1635 | 0.639910, 1636 | 7.542244, 1637 | -1.049282, 1638 | -2.431321, 1639 | 0.986046, 1640 | 0.560034, 1641 | 1.826614, 1642 | -4.399531, 1643 | 4.399021, 1644 | 0.586800, 1645 | 0.695400, 1646 | 1.929558, 1647 | -4.411831, 1648 | 4.497052, 1649 | 0.590372, 1650 | 0.701823, 1651 | 0.597442, 1652 | -2.013686, 1653 | 5.866456, 1654 | 0.531915, 1655 | 0.601537, 1656 | 1.405627, 1657 | -1.714196, 1658 | 5.241087, 1659 | 0.577268, 1660 | 0.585935, 1661 | 0.662449, 1662 | -1.819321, 1663 | 5.863759, 1664 | 0.536915, 1665 | 0.593786, 1666 | 2.342340, 1667 | 0.572222, 1668 | 4.294303, 1669 | 0.627543, 1670 | 0.473352, 1671 | 3.327324, 1672 | 0.104863, 1673 | 4.113860, 1674 | 0.665586, 1675 | 0.495951, 1676 | 1.726175, 1677 | -0.919165, 1678 | 5.273355, 1679 | 0.588354, 1680 | 0.546862, 1681 | 5.133204, 1682 | 7.485602, 1683 | 2.660442, 1684 | 0.757824, 1685 | 0.147676, 1686 | 4.538641, 1687 | 6.319907, 1688 | 3.683424, 1689 | 0.709250, 1690 | 0.201508, 1691 | 3.986562, 1692 | 5.109487, 1693 | 4.466315, 1694 | 0.672684, 1695 | 0.256581, 1696 | 2.169681, 1697 | -5.440433, 1698 | 4.455874, 1699 | 0.600409, 1700 | 0.749005, 1701 | 1.395634, 1702 | 5.011963, 1703 | 5.316032, 1704 | 0.558266, 1705 | 0.261672, 1706 | 1.619500, 1707 | 6.599217, 1708 | 4.921106, 1709 | 0.570304, 1710 | 0.187871, 1711 | 1.891399, 1712 | 8.236377, 1713 | 4.274997, 1714 | 0.588166, 1715 | 0.109044, 1716 | 4.195832, 1717 | 2.235205, 1718 | 3.375099, 1719 | 0.711045, 1720 | 0.398952, 1721 | 5.733342, 1722 | 1.411738, 1723 | 2.431726, 1724 | 0.781070, 1725 | 0.435405, 1726 | 1.859887, 1727 | 2.355757, 1728 | 3.843181, 1729 | 0.587247, 1730 | 0.398932, 1731 | 4.988612, 1732 | 3.074654, 1733 | 3.083858, 1734 | 0.742870, 1735 | 0.355446, 1736 | 1.303263, 1737 | 1.416453, 1738 | 4.831091, 1739 | 0.572156, 1740 | 0.437652, 1741 | 1.305757, 1742 | -0.672779, 1743 | 6.415959, 1744 | 0.551868, 1745 | 0.536570, 1746 | 6.465170, 1747 | 0.937119, 1748 | 1.689873, 1749 | 0.821442, 1750 | 0.457556, 1751 | 5.258659, 1752 | 0.945811, 1753 | 2.974312, 1754 | 0.752702, 1755 | 0.457182, 1756 | 4.432338, 1757 | 0.722096, 1758 | 3.522615, 1759 | 0.713757, 1760 | 0.467627, 1761 | 3.300681, 1762 | 0.861641, 1763 | 3.872784, 1764 | 0.667113, 1765 | 0.460673, 1766 | 2.430178, 1767 | 1.131492, 1768 | 4.039035, 1769 | 0.631101, 1770 | 0.447154, 1771 | 1.820731, 1772 | 1.467954, 1773 | 4.224124, 1774 | 0.600862, 1775 | 0.432473, 1776 | 0.563221, 1777 | 2.307693, 1778 | 5.566789, 1779 | 0.523481, 1780 | 0.405627, 1781 | 6.338145, 1782 | -0.529279, 1783 | 1.881175, 1784 | 0.810748, 1785 | 0.523926, 1786 | 5.587698, 1787 | 3.208071, 1788 | 2.687839, 1789 | 0.771046, 1790 | 0.348959, 1791 | 0.242624, 1792 | -1.462857, 1793 | 7.071491, 1794 | 0.509127, 1795 | 0.562718, 1796 | 1.611251, 1797 | 0.339326, 1798 | 4.895421, 1799 | 0.595293, 1800 | 0.485024, 1801 | 7.743095, 1802 | 2.364999, 1803 | -2.005167, 1804 | 0.980531, 1805 | 0.401564, 1806 | 1.391142, 1807 | 1.851048, 1808 | 4.448999, 1809 | 0.573500, 1810 | 0.420000, 1811 | 1.785794, 1812 | -0.978284, 1813 | 4.850470, 1814 | 0.602995, 1815 | 0.548688, 1816 | 4.670959, 1817 | 2.664461, 1818 | 3.084075, 1819 | 0.733530, 1820 | 0.376977, 1821 | 1.333970, 1822 | -0.283761, 1823 | 6.097047, 1824 | 0.560611, 1825 | 0.519017, 1826 | 7.270895, 1827 | -2.890917, 1828 | -2.252455, 1829 | 0.967686, 1830 | 0.644357, 1831 | 1.856432, 1832 | 2.585245, 1833 | 3.757904, 1834 | 0.580985, 1835 | 0.387160, 1836 | 0.923388, 1837 | 0.073076, 1838 | 6.671944, 1839 | 0.537728, 1840 | 0.505385, 1841 | 5.000589, 1842 | -6.135128, 1843 | 1.892523, 1844 | 0.760966, 1845 | 0.779753, 1846 | 5.085276, 1847 | -7.178590, 1848 | 0.714711, 1849 | 0.801779, 1850 | 0.831938, 1851 | 7.159291, 1852 | -0.811820, 1853 | -0.072044, 1854 | 0.892441, 1855 | 0.540761, 1856 | 5.843051, 1857 | -5.248023, 1858 | 0.924091, 1859 | 0.816351, 1860 | 0.740260, 1861 | 6.847258, 1862 | 3.662916, 1863 | 0.724695, 1864 | 0.865595, 1865 | 0.333687, 1866 | 2.412942, 1867 | -8.258853, 1868 | 4.119213, 1869 | 0.614074, 1870 | 0.883246, 1871 | 0.179909, 1872 | -1.689864, 1873 | 6.573301, 1874 | 0.508953, 1875 | 0.579438, 1876 | 2.103655, 1877 | -0.163946, 1878 | 4.566119, 1879 | 0.617942, 1880 | 0.508316, 1881 | 6.407571, 1882 | 2.236021, 1883 | 1.560843, 1884 | 0.825608, 1885 | 0.397675, 1886 | 3.670075, 1887 | 2.360153, 1888 | 3.635230, 1889 | 0.681215, 1890 | 0.396235, 1891 | 3.177186, 1892 | 2.294265, 1893 | 3.775704, 1894 | 0.656636, 1895 | 0.400597, 1896 | 2.196121, 1897 | -4.598322, 1898 | 4.479786, 1899 | 0.603900, 1900 | 0.710217, 1901 | 6.234883, 1902 | -1.944430, 1903 | 1.663542, 1904 | 0.812086, 1905 | 0.588539, 1906 | 1.292924, 1907 | -9.295920, 1908 | 4.094063, 1909 | 0.568013, 1910 | 0.944565, 1911 | 3.210651, 1912 | -8.533278, 1913 | 2.802001, 1914 | 0.681008, 1915 | 0.898285, 1916 | 4.068926, 1917 | -7.993109, 1918 | 1.925119, 1919 | 0.733752, 1920 | 0.869701, 1921 | 2.724032, 1922 | 2.315802, 1923 | 3.777151, 1924 | 0.633830, 1925 | 0.398822, 1926 | 2.288460, 1927 | 2.398891, 1928 | 3.697603, 1929 | 0.606793, 1930 | 0.395537, 1931 | 1.998311, 1932 | 2.496547, 1933 | 3.689148, 1934 | 0.589660, 1935 | 0.391062, 1936 | 6.130040, 1937 | 3.399261, 1938 | 2.038516, 1939 | 0.805016, 1940 | 0.342108, 1941 | 2.288460, 1942 | 2.886504, 1943 | 3.775031, 1944 | 0.611335, 1945 | 0.362284, 1946 | 2.724032, 1947 | 2.961810, 1948 | 3.871767, 1949 | 0.634038, 1950 | 0.355971, 1951 | 3.177186, 1952 | 2.964136, 1953 | 3.876973, 1954 | 0.656636, 1955 | 0.355357, 1956 | 3.670075, 1957 | 2.927714, 1958 | 3.724325, 1959 | 0.681215, 1960 | 0.358340, 1961 | 4.018389, 1962 | 2.857357, 1963 | 3.482983, 1964 | 0.698585, 1965 | 0.363156, 1966 | 7.555811, 1967 | 4.106811, 1968 | -0.991917, 1969 | 0.941867, 1970 | 0.319076, 1971 | 4.018389, 1972 | 2.483695, 1973 | 3.440898, 1974 | 0.698585, 1975 | 0.387449, 1976 | 1.776217, 1977 | -2.683946, 1978 | 5.213116, 1979 | 0.584177, 1980 | 0.624107, 1981 | 1.222237, 1982 | -1.182444, 1983 | 5.952465, 1984 | 0.554318, 1985 | 0.566077, 1986 | 0.731493, 1987 | -2.536683, 1988 | 5.815343, 1989 | 0.534154, 1990 | 0.620640, 1991 | 4.135272, 1992 | -6.996638, 1993 | 2.671970, 1994 | 0.711218, 1995 | 0.819975, 1996 | 3.311811, 1997 | -7.660815, 1998 | 3.382963, 1999 | 0.664630, 2000 | 0.852871, 2001 | 1.313701, 2002 | -8.639995, 2003 | 4.702456, 2004 | 0.559100, 2005 | 0.902632, 2006 | 5.940524, 2007 | -6.223629, 2008 | -0.631468, 2009 | 0.871706, 2010 | 0.791941, 2011 | 1.998311, 2012 | 2.743838, 2013 | 3.744030, 2014 | 0.591234, 2015 | 0.373894, 2016 | 0.901447, 2017 | 1.236992, 2018 | 5.754256, 2019 | 0.544341, 2020 | 0.451584, 2021 | 2.308977, 2022 | -8.974196, 2023 | 3.609070, 2024 | 0.624563, 2025 | 0.924192, 2026 | 6.954154, 2027 | -2.439843, 2028 | -0.131163, 2029 | 0.885770, 2030 | 0.615029, 2031 | 1.098819, 2032 | -4.458788, 2033 | 5.120727, 2034 | 0.551338, 2035 | 0.695278, 2036 | 1.181124, 2037 | -4.579996, 2038 | 5.189564, 2039 | 0.551980, 2040 | 0.704632, 2041 | 1.255818, 2042 | -4.787901, 2043 | 5.237051, 2044 | 0.552888, 2045 | 0.715808, 2046 | 1.325085, 2047 | -5.106507, 2048 | 5.205010, 2049 | 0.555168, 2050 | 0.730794, 2051 | 1.546388, 2052 | -5.819392, 2053 | 4.757893, 2054 | 0.569944, 2055 | 0.767035, 2056 | 1.953754, 2057 | -4.183892, 2058 | 4.431713, 2059 | 0.593203, 2060 | 0.685676, 2061 | 2.117802, 2062 | -4.137093, 2063 | 4.555096, 2064 | 0.599262, 2065 | 0.681069, 2066 | 2.285339, 2067 | -4.051196, 2068 | 4.582438, 2069 | 0.607600, 2070 | 0.677703, 2071 | 2.850160, 2072 | -3.665720, 2073 | 4.484994, 2074 | 0.631938, 2075 | 0.663500, 2076 | 5.278538, 2077 | -2.238942, 2078 | 2.861224, 2079 | 0.752033, 2080 | 0.601315, 2081 | 0.946709, 2082 | 1.907628, 2083 | 5.196779, 2084 | 0.547226, 2085 | 0.420395, 2086 | 1.314173, 2087 | 3.104912, 2088 | 4.231404, 2089 | 0.563544, 2090 | 0.359828, 2091 | 1.780000, 2092 | 2.860000, 2093 | 3.881555, 2094 | 0.583841, 2095 | 0.368714, 2096 | 1.845110, 2097 | -4.098880, 2098 | 4.247264, 2099 | 0.586614, 2100 | 0.692366, 2101 | 5.436187, 2102 | -4.030482, 2103 | 2.109852, 2104 | 0.771915, 2105 | 0.683578, 2106 | 0.766444, 2107 | 3.182131, 2108 | 4.861453, 2109 | 0.531597, 2110 | 0.352483, 2111 | 1.938616, 2112 | -6.614410, 2113 | 4.521085, 2114 | 0.588371, 2115 | 0.804441, 2116 | 0.516573, 2117 | 1.583572, 2118 | 6.148363, 2119 | 0.520797, 2120 | 0.442565, 2121 | 1.246815, 2122 | 0.230297, 2123 | 5.681036, 2124 | 0.567985, 2125 | 0.493479, 2126 | 0.997827, 2127 | -6.930921, 2128 | 4.979576, 2129 | 0.543283, 2130 | 0.819255, 2131 | 3.288807, 2132 | -5.382514, 2133 | 3.795752, 2134 | 0.655317, 2135 | 0.745515, 2136 | 2.311631, 2137 | -1.566237, 2138 | 4.590085, 2139 | 0.621009, 2140 | 0.574018, 2141 | 2.680250, 2142 | -6.111567, 2143 | 4.096152, 2144 | 0.625560, 2145 | 0.780312, 2146 | 3.832928, 2147 | -1.537326, 2148 | 4.137731, 2149 | 0.680198, 2150 | 0.570719, 2151 | 2.961860, 2152 | -2.274215, 2153 | 4.440943, 2154 | 0.642764, 2155 | 0.604338, 2156 | 4.386901, 2157 | -2.683286, 2158 | 3.643886, 2159 | 0.704663, 2160 | 0.621530, 2161 | 1.217295, 2162 | -7.834465, 2163 | 4.969286, 2164 | 0.552012, 2165 | 0.862592, 2166 | 1.542374, 2167 | -0.136843, 2168 | 5.201008, 2169 | 0.589072, 2170 | 0.508637, 2171 | 3.878377, 2172 | -6.041764, 2173 | 3.311079, 2174 | 0.685945, 2175 | 0.775357, 2176 | 3.084037, 2177 | -6.809842, 2178 | 3.814195, 2179 | 0.645735, 2180 | 0.812640, 2181 | 3.747321, 2182 | -4.503545, 2183 | 3.726453, 2184 | 0.675343, 2185 | 0.703978, 2186 | 6.094129, 2187 | -3.205991, 2188 | 1.473482, 2189 | 0.810858, 2190 | 0.646305, 2191 | 4.588995, 2192 | -4.728726, 2193 | 2.983221, 2194 | 0.720122, 2195 | 0.714667, 2196 | 6.583231, 2197 | -3.941269, 2198 | 0.070268, 2199 | 0.866152, 2200 | 0.682705, 2201 | 3.492580, 2202 | -3.195820, 2203 | 4.130198, 2204 | 0.663187, 2205 | 0.644597, 2206 | 1.255543, 2207 | 0.802341, 2208 | 5.307551, 2209 | 0.570082, 2210 | 0.466326, 2211 | 1.126122, 2212 | -0.933602, 2213 | 6.538785, 2214 | 0.544562, 2215 | 0.548376, 2216 | 1.443109, 2217 | -1.142774, 2218 | 5.905127, 2219 | 0.562759, 2220 | 0.558785, 2221 | 0.923043, 2222 | -0.529042, 2223 | 7.003423, 2224 | 0.531987, 2225 | 0.530140, 2226 | 1.755386, 2227 | 3.529117, 2228 | 4.327696, 2229 | 0.585271, 2230 | 0.335177, 2231 | 2.632589, 2232 | 3.713828, 2233 | 4.364629, 2234 | 0.622953, 2235 | 0.322779, 2236 | 3.388062, 2237 | 3.721976, 2238 | 4.309028, 2239 | 0.655896, 2240 | 0.320163, 2241 | 4.075766, 2242 | 3.675413, 2243 | 4.076063, 2244 | 0.687132, 2245 | 0.322346, 2246 | 4.622910, 2247 | 3.474691, 2248 | 3.646321, 2249 | 0.716482, 2250 | 0.333201, 2251 | 5.171755, 2252 | 2.535753, 2253 | 2.670867, 2254 | 0.758757, 2255 | 0.382787, 2256 | 7.297331, 2257 | 0.763172, 2258 | -0.048769, 2259 | 0.897013, 2260 | 0.468769, 2261 | 4.706828, 2262 | 1.651000, 2263 | 3.109532, 2264 | 0.732392, 2265 | 0.424547, 2266 | 4.071712, 2267 | 1.476821, 2268 | 3.476944, 2269 | 0.702114, 2270 | 0.433163, 2271 | 3.269817, 2272 | 1.470659, 2273 | 3.731945, 2274 | 0.666525, 2275 | 0.433866, 2276 | 2.527572, 2277 | 1.617311, 2278 | 3.865444, 2279 | 0.633505, 2280 | 0.426088, 2281 | 1.970894, 2282 | 1.858505, 2283 | 3.961782, 2284 | 0.603876, 2285 | 0.416587, 2286 | 1.579543, 2287 | 2.097941, 2288 | 4.084996, 2289 | 0.579658, 2290 | 0.409945, 2291 | 7.664182, 2292 | 0.673132, 2293 | -2.435867, 2294 | 0.992440, 2295 | 0.480777, 2296 | 1.397041, 2297 | -1.340139, 2298 | 5.630378, 2299 | 0.567192, 2300 | 0.569420, 2301 | 0.884838, 2302 | 0.658740, 2303 | 6.233232, 2304 | 0.541366, 2305 | 0.478899, 2306 | 0.767097, 2307 | -0.968035, 2308 | 7.077932, 2309 | 0.526564, 2310 | 0.546118, 2311 | 0.460213, 2312 | -1.334106, 2313 | 6.787447, 2314 | 0.523913, 2315 | 0.563830, 2316 | 0.748618, 2317 | -1.067994, 2318 | 6.798303, 2319 | 0.531529, 2320 | 0.555057, 2321 | 1.236408, 2322 | -1.585568, 2323 | 5.480490, 2324 | 0.566036, 2325 | 0.582329, 2326 | 0.387306, 2327 | -1.409990, 2328 | 6.957705, 2329 | 0.516311, 2330 | 0.563054, 2331 | 0.319925, 2332 | -1.607931, 2333 | 6.508676, 2334 | 0.517472, 2335 | 0.577877, 2336 | 1.639633, 2337 | 2.556298, 2338 | 3.863736, 2339 | 0.573595, 2340 | 0.389807, 2341 | 1.255645, 2342 | 2.467144, 2343 | 4.203800, 2344 | 0.560698, 2345 | 0.395332, 2346 | 1.031362, 2347 | 2.382663, 2348 | 4.615849, 2349 | 0.549756, 2350 | 0.399751, 2351 | 4.253081, 2352 | 2.772296, 2353 | 3.315305, 2354 | 0.710288, 2355 | 0.368253, 2356 | 4.530000, 2357 | 2.910000, 2358 | 3.339685, 2359 | 0.723330, 2360 | 0.363373, 2361 | ] 2362 | ) 2363 | canonical_metric_landmarks = np.reshape( 2364 | canonical_metric_landmarks, (canonical_metric_landmarks.shape[0] // 5, 5) 2365 | ).T 2366 | canonical_metric_landmarks = canonical_metric_landmarks[:3, :] 2367 | 2368 | procrustes_landmark_basis = [ 2369 | (4, 0.070909939706326), 2370 | (6, 0.032100144773722), 2371 | (10, 0.008446550928056), 2372 | (33, 0.058724168688059), 2373 | (54, 0.007667080033571), 2374 | (67, 0.009078059345484), 2375 | (117, 0.009791937656701), 2376 | (119, 0.014565368182957), 2377 | (121, 0.018591361120343), 2378 | (127, 0.005197994410992), 2379 | (129, 0.120625205338001), 2380 | (132, 0.005560018587857), 2381 | (133, 0.05328618362546), 2382 | (136, 0.066890455782413), 2383 | (143, 0.014816547743976), 2384 | (147, 0.014262833632529), 2385 | (198, 0.025462191551924), 2386 | (205, 0.047252278774977), 2387 | (263, 0.058724168688059), 2388 | (284, 0.007667080033571), 2389 | (297, 0.009078059345484), 2390 | (346, 0.009791937656701), 2391 | (348, 0.014565368182957), 2392 | (350, 0.018591361120343), 2393 | (356, 0.005197994410992), 2394 | (358, 0.120625205338001), 2395 | (361, 0.005560018587857), 2396 | (362, 0.05328618362546), 2397 | (365, 0.066890455782413), 2398 | (372, 0.014816547743976), 2399 | (376, 0.014262833632529), 2400 | (420, 0.025462191551924), 2401 | (425, 0.047252278774977), 2402 | ] 2403 | landmark_weights = np.zeros((canonical_metric_landmarks.shape[1],)) 2404 | for idx, weight in procrustes_landmark_basis: 2405 | landmark_weights[idx] = weight 2406 | 2407 | 2408 | class Singleton(type): 2409 | """ 2410 | This implements the Singleton design pattern using a metaclass. The Singleton class ensures that only one 2411 | instance of the class is created and returned whenever the class is called. The metaclass defines the __call__ 2412 | method which creates and returns an instance of the Singleton class if it does not already exist. The _instances 2413 | dictionary is used to keep track of existing instances of the class. Overall, this implementation ensures that 2414 | the Singleton class can only be instantiated once and that subsequent calls to the class return the same instance. 2415 | 2416 | """ 2417 | 2418 | _instances = {} 2419 | 2420 | def __call__(cls, *args, **kwargs): 2421 | """ 2422 | The `__call__` method is a special method in Python that allows an object to be called like a function. In 2423 | this case, it is used to create and return an instance of the Singleton class. It checks if the class is not 2424 | already in the _instances. If it isn't, it creates a new instance of the class and adds it to the _instances. 2425 | Overall, this implementation ensures that only one instance of the Singleton class is created and returned 2426 | whenever the class is called. 2427 | 2428 | Parameters 2429 | ---------- 2430 | args: Variable length argument list. 2431 | kwargs: Arbitrary keyword arguments. 2432 | 2433 | Returns 2434 | ------- 2435 | The instance of the class as a Singleton object. 2436 | 2437 | """ 2438 | if cls not in cls._instances: 2439 | cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) 2440 | return cls._instances[cls] 2441 | 2442 | 2443 | class Debugger(metaclass=Singleton): 2444 | """ 2445 | The Debugger class is a singleton class that allows an object to run in debug mode. 2446 | 2447 | Methods 2448 | ------- 2449 | set_debug: Used to set the debug mode. 2450 | toggle: Toggles the value of the "debug" attribute. 2451 | get_debug: Used to get the value of the "debug" attribute. 2452 | 2453 | """ 2454 | 2455 | def set_debug(self, debug): 2456 | """ 2457 | This method is used to set the debug mode for an object. The "debug" parameter is a boolean value that 2458 | determines whether the object should run in debug mode. By setting the "debug" attribute of the object 2459 | to the value of the "debug" parameter, the debug mode can be turned on or off. 2460 | 2461 | Parameters: 2462 | ----------- 2463 | debug: Debug value as a bool. 2464 | 2465 | Returns 2466 | ------- 2467 | 2468 | """ 2469 | self.debug = debug 2470 | 2471 | def toggle(self): 2472 | """ 2473 | This method toggles the value of the "debug" attribute of an object. If "debug" is currently True, 2474 | it will be set to False, and vice versa. The purpose of this function is to easily switch between debug mode 2475 | and normal mode for the object. 2476 | 2477 | Returns 2478 | ------- 2479 | 2480 | """ 2481 | self.debug = not self.debug 2482 | 2483 | def get_debug(self): 2484 | """ 2485 | This method is used to get the value of the "debug" attribute. 2486 | 2487 | Returns 2488 | ------- 2489 | 2490 | """ 2491 | return self.debug 2492 | 2493 | 2494 | DEBUG = Debugger() 2495 | DEBUG.set_debug(False) 2496 | 2497 | 2498 | class PCF: 2499 | def __init__( 2500 | self, 2501 | near=1, 2502 | far=10000, 2503 | frame_height=1920, 2504 | frame_width=1080, 2505 | fy=1074.520446598223, 2506 | ): 2507 | """ 2508 | This method sets the values of several attributes including the near and far clipping planes, frame height 2509 | and width, and the focal length in the y-direction (fy) for a Perspective Camera Frustum (PCF) object. Then 2510 | calculates the vertical field of view (fov_y) using the frame height and fy. It then calculates the height 2511 | and width at the near clipping plane based on the fov_y and frame dimensions. Finally, the method sets the 2512 | values for the left, right, bottom, and top attributes, which define the boundaries of the viewing frustum. 2513 | These values are derived from the width and height at the near clipping plane. In other words, this method 2514 | will define a rectangular region in the 3D space. 2515 | 2516 | Parameters: 2517 | ----------- 2518 | near: Near clipping plane value as an int. 2519 | far: Far clipping plane value as an int. 2520 | frame_height: Height of frame as an int or a float. 2521 | frame_width: Height of frame as an int or a float. 2522 | fy: Focal length in the y-direction as a float. 2523 | 2524 | """ 2525 | self.near = near 2526 | self.far = far 2527 | self.frame_height = frame_height 2528 | self.frame_width = frame_width 2529 | self.fy = fy 2530 | 2531 | fov_y = 2 * np.arctan(frame_height / (2 * fy)) 2532 | 2533 | # kDegreesToRadians = np.pi / 180.0 # never used 2534 | height_at_near = 2 * near * np.tan(0.5 * fov_y) 2535 | width_at_near = frame_width * height_at_near / frame_height 2536 | # print(height_at_near) 2537 | 2538 | self.fov_y = fov_y 2539 | self.left = -0.5 * width_at_near 2540 | self.right = 0.5 * width_at_near 2541 | self.bottom = -0.5 * height_at_near 2542 | self.top = 0.5 * height_at_near 2543 | 2544 | 2545 | def log(name, f): 2546 | """ 2547 | This function is used to log information. If debugging is enabled, the function prints the log message. The 2548 | purpose of this function is to provide a convenient way to log information during debugging. It allows developers 2549 | to easily track the flow of their code and inspect the values of variables at different points. 2550 | 2551 | Parameters: 2552 | ----------- 2553 | name: Represents the name of the log as a str. 2554 | f: Represents the content of the log as an object. 2555 | 2556 | Returns 2557 | ------- 2558 | 2559 | """ 2560 | if DEBUG.get_debug(): 2561 | print(f"{name} logged:", f) 2562 | print() 2563 | 2564 | 2565 | def cpp_compare(name, np_matrix): 2566 | """ 2567 | This function is used to compare a given matrix with a C++ matrix stored in a file. If using debug mode, 2568 | the function proceeds to compare the matrices. To ensure correct memory alignment, the C++ matrix is loaded from 2569 | the file and reshaped into a 2D array. Then, the function calculates the sum of squared differences between the 2570 | C++ matrix and the first rows and cols of the np_matrix. Finally, the function prints the result of the 2571 | comparison. 2572 | 2573 | Parameters: 2574 | ----------- 2575 | name: Name of the file as a str. 2576 | f: Represents the content of the log as an object. 2577 | 2578 | Returns 2579 | ------- 2580 | 2581 | """ 2582 | if DEBUG.get_debug(): 2583 | # reorder cpp matrix as memory alignment is not correct 2584 | cpp_matrix = np.load(f"{name}_cpp.npy") 2585 | rows, cols = cpp_matrix.shape 2586 | cpp_matrix = np.split(np.reshape(cpp_matrix, -1), cols) 2587 | cpp_matrix = np.stack(cpp_matrix, 1) 2588 | 2589 | print(f"{name}:", np.sum(np.abs(cpp_matrix - np_matrix[:rows, :cols]) ** 2)) 2590 | print() 2591 | 2592 | 2593 | def get_metric_landmarks(screen_landmarks, pcf): 2594 | """ 2595 | This function performs several steps to convert the screen landmarks into metric landmarks. 2596 | 2597 | First, the screen landmarks are projected onto the 3D space using the Perspective Camera Frustum (PCF). The depth 2598 | offset is calculated as the mean of the z-coordinates of the projected landmarks. 2599 | 2600 | Next, a copy of the projected landmarks is made and the handedness is changed to match the coordinate system used 2601 | in the subsequent steps. The scale of the first iteration is estimated using these intermediate landmarks. 2602 | 2603 | Another copy of the projected landmarks is made and the z-coordinates are moved and rescaled based on the pcf, 2604 | the depth offset, the first iteration scale, and the intermediate landmarks. The intermediate landmarks are then 2605 | unprojected back to the screen space using the pcf. The handedness is changed again to match the coordinate system. 2606 | 2607 | The scale of the second iteration is estimated using these intermediate landmarks. The metric landmarks are then 2608 | calculated by z-coordinates moving and rescaling the projected landmarks based on the pcf, the depth offset and 2609 | the total scale (product of first and second iteration scales). The metric landmarks are unprojected back to the 2610 | screen space and the handedness is changed again. 2611 | 2612 | A weighted orthogonal problem is solved using the canonical metric landmarks, the metric landmarks, and landmark 2613 | weights. The resulting pose transformation matrix is stored and compared using a C++ function. 2614 | 2615 | The inverse of the pose transformation matrix is calculated to obtain the inverse rotation and translation 2616 | components. The metric landmarks are transformed using the inverse rotation and translation. 2617 | 2618 | Parameters: 2619 | ----------- 2620 | screen_landmarks: Transposed face landmarks as a np.ndarray. 2621 | pcf: A Perspective Camera Frustum (PCF) object. 2622 | 2623 | Returns 2624 | ------- 2625 | metric_landmarks: Metric landmarks as a np.ndarray. 2626 | pose_transform_mat: Pose transformation matrix as a np.ndarray. 2627 | 2628 | """ 2629 | screen_landmarks = project_xy(screen_landmarks, pcf) 2630 | depth_offset = np.mean(screen_landmarks[2, :]) 2631 | 2632 | intermediate_landmarks = screen_landmarks.copy() 2633 | intermediate_landmarks = change_handedness(intermediate_landmarks) 2634 | first_iteration_scale = estimate_scale(intermediate_landmarks) 2635 | 2636 | intermediate_landmarks = screen_landmarks.copy() 2637 | intermediate_landmarks = move_and_rescale_z( 2638 | pcf, depth_offset, first_iteration_scale, intermediate_landmarks 2639 | ) 2640 | intermediate_landmarks = unproject_xy(pcf, intermediate_landmarks) 2641 | intermediate_landmarks = change_handedness(intermediate_landmarks) 2642 | second_iteration_scale = estimate_scale(intermediate_landmarks) 2643 | 2644 | metric_landmarks = screen_landmarks.copy() 2645 | total_scale = first_iteration_scale * second_iteration_scale 2646 | metric_landmarks = move_and_rescale_z( 2647 | pcf, depth_offset, total_scale, metric_landmarks 2648 | ) 2649 | metric_landmarks = unproject_xy(pcf, metric_landmarks) 2650 | metric_landmarks = change_handedness(metric_landmarks) 2651 | 2652 | pose_transform_mat = solve_weighted_orthogonal_problem( 2653 | canonical_metric_landmarks, metric_landmarks, landmark_weights 2654 | ) 2655 | cpp_compare("pose_transform_mat", pose_transform_mat) 2656 | 2657 | inv_pose_transform_mat = np.linalg.inv(pose_transform_mat) 2658 | inv_pose_rotation = inv_pose_transform_mat[:3, :3] 2659 | inv_pose_translation = inv_pose_transform_mat[:3, 3] 2660 | 2661 | metric_landmarks = ( 2662 | inv_pose_rotation @ metric_landmarks + inv_pose_translation[:, None] 2663 | ) 2664 | 2665 | return metric_landmarks, pose_transform_mat 2666 | 2667 | 2668 | def project_xy(landmarks, pcf): 2669 | """ 2670 | This function first calculates the scaling factors and translation values for the x and y axes based on the difference between the pcf 2671 | parameters. 2672 | 2673 | Next, the function modifies the landmarks by performing the following operations: 2674 | 1. It subtracts the y-coordinate values from 1.0. This is done to flip the y-axis so that the coordinates are 2675 | aligned with the conventional Cartesian coordinate system. 2676 | 2677 | 2. It multiplies the coordinates by a scaling factor array, which is created by stacking the x and y scaling 2678 | factors with an additional x scaling factor. This scales the coordinates to fit within the specified region. 2679 | 2680 | 3. It adds a translation array, which is created by stacking the x and y translation values with a 0 value. This 2681 | translates the coordinates to align them with the specified region. 2682 | 2683 | Overall, this function takes a set of 3D coordinates and projects them onto a 2D plane defined by a specified 2684 | region. The resulting coordinates are scaled and translated to fit within the region. 2685 | 2686 | Parameters: 2687 | ----------- 2688 | landmarks: Transposed face landmarks in a 3D space as a np.ndarray. 2689 | pcf: A Perspective Camera Frustum (PCF) object. 2690 | 2691 | Returns 2692 | ------- 2693 | landmarks: Modified landmarks as a np.ndarray. 2694 | 2695 | """ 2696 | x_scale = pcf.right - pcf.left 2697 | y_scale = pcf.top - pcf.bottom 2698 | x_translation = pcf.left 2699 | y_translation = pcf.bottom 2700 | 2701 | landmarks[1, :] = 1.0 - landmarks[1, :] 2702 | 2703 | landmarks = landmarks * np.array([[x_scale, y_scale, x_scale]]).T 2704 | landmarks = landmarks + np.array([[x_translation, y_translation, 0]]).T 2705 | 2706 | return landmarks 2707 | 2708 | 2709 | def change_handedness(landmarks): 2710 | """ 2711 | This function is first multiplies the second row of the "landmarks" array by -1.0, effectively changing the sign 2712 | of all the elements in that row. Finally, it returns the modified landmarks. By multiplying the second row by 2713 | -1.0, it is flipping the landmarks from left-handed coordinates to right-handed coordinates or vice versa. 2714 | 2715 | Parameters: 2716 | ----------- 2717 | landmarks: Landmarks as a np.ndarray. 2718 | 2719 | Returns 2720 | ------- 2721 | landmarks: Modified landmarks as a np.ndarray. 2722 | 2723 | """ 2724 | landmarks[2, :] *= -1.0 2725 | 2726 | return landmarks 2727 | 2728 | 2729 | def move_and_rescale_z(pcf, depth_offset, scale, landmarks): 2730 | """ 2731 | This function is used to move and rescale the z-coordinate of a set of landmarks. 2732 | 2733 | The function first subtracts the depth_offset from the z-coordinate of each landmark. Then, it adds the near clipping plane value from the pcf object to each z-coordinate. Finally, it divides each z-coordinate by the scaling factor. 2734 | 2735 | 2736 | Parameters: 2737 | ----------- 2738 | pcf: A Perspective Camera Frustum (PCF) object. 2739 | depth_offset: A value representing the offset of the depth values of the landmarks as a float. 2740 | scale: A value representing the scaling factor for the z-coordinate of the landmarks as a float. 2741 | landmarks: Landmarks in a 3D space as a np.ndarray. 2742 | 2743 | Returns 2744 | ------- 2745 | landmarks: Modified landmarks as a np.ndarray. 2746 | 2747 | """ 2748 | landmarks[2, :] = (landmarks[2, :] - depth_offset + pcf.near) / scale 2749 | 2750 | return landmarks 2751 | 2752 | 2753 | def unproject_xy(pcf, landmarks): 2754 | """ 2755 | This function unprojects the x and y coordinates of the landmarks based on the given Perspective Camera Frustum ( 2756 | PCF). To achieve this, the x and y coordinates of the landmarks are multiplied by their corresponding z 2757 | coordinates divided by the near clipping plane distance (pcf.near). This calculation is done element-wise for all 2758 | landmarks. Overall, this function helps in transforming the 2D image coordinates of the landmarks to their 2759 | original 3D coordinates in the camera's coordinate system. 2760 | 2761 | Parameters: 2762 | ----------- 2763 | pcf: A Perspective Camera Frustum (PCF) object. 2764 | landmarks: Landmarks in a 3D space as a np.ndarray. 2765 | 2766 | Returns 2767 | ------- 2768 | landmarks: Modified landmarks as a np.ndarray. 2769 | 2770 | """ 2771 | landmarks[0, :] = landmarks[0, :] * landmarks[2, :] / pcf.near 2772 | landmarks[1, :] = landmarks[1, :] * landmarks[2, :] / pcf.near 2773 | 2774 | return landmarks 2775 | 2776 | 2777 | def estimate_scale(landmarks): 2778 | """ 2779 | This function calculates the transformation matrix by solving a weighted orthogonal problem. Finally, 2780 | the function returns the Euclidean norm of the first column of the transformation matrix. 2781 | 2782 | Parameters: 2783 | ----------- 2784 | landmarks: Landmarks in a 3D space as a np.ndarray. 2785 | 2786 | Returns 2787 | ------- 2788 | landmarks: Modified landmarks as a np.ndarray. 2789 | 2790 | """ 2791 | transform_mat = solve_weighted_orthogonal_problem( 2792 | canonical_metric_landmarks, landmarks, landmark_weights 2793 | ) 2794 | 2795 | return np.linalg.norm(transform_mat[:, 0]) 2796 | 2797 | 2798 | def extract_square_root(point_weights): 2799 | """ 2800 | This function calculates the square root of the point weights and returns the result. 2801 | 2802 | Parameters: 2803 | ----------- 2804 | point_weights: Point weights as a np.ndarray. 2805 | 2806 | Returns 2807 | ------- 2808 | landmarks: Square root of point weights as a np.ndarray. 2809 | 2810 | """ 2811 | return np.sqrt(point_weights) 2812 | 2813 | 2814 | def solve_weighted_orthogonal_problem(source_points, target_points, point_weights): 2815 | """ 2816 | This function is used to solve a weighted orthogonal problem. 2817 | 2818 | Parameters: 2819 | ----------- 2820 | source_points: Source points as a np.ndarray. 2821 | target_points: Target points as a np.ndarray. 2822 | point_weights: Point weights as a np.ndarray. 2823 | 2824 | Returns 2825 | ------- 2826 | transform_mat: Transformation matrix as a np.ndarray. 2827 | 2828 | """ 2829 | sqrt_weights = extract_square_root(point_weights) 2830 | transform_mat = internal_solve_weighted_orthogonal_problem( 2831 | source_points, target_points, sqrt_weights 2832 | ) 2833 | return transform_mat 2834 | 2835 | 2836 | def internal_solve_weighted_orthogonal_problem(sources, targets, sqrt_weights): 2837 | """ 2838 | This function solves a weighted orthogonal problem. 2839 | 2840 | The function calculates the weighted sources and weighted targets matrices. The weighted sources matrix is 2841 | obtained by element-wise multiplication of the "sources" matrix with the square root of weights. The weighted 2842 | targets matrix is obtained by multiplying the first 468 columns of the "targets" matrix with the square root of 2843 | weights. Next, the function calculates the total weight by summing the product of the square root of weights with 2844 | itself. 2845 | 2846 | The function then calculates the source center of mass by summing the element-wise product of the weighted 2847 | sources matrix with the square root of weights and dividing by the total weight. 2848 | 2849 | Next, the function calculates the centered weighted sources matrix by subtracting the product of the source 2850 | center of mass and the square root of weights from the weighted sources matrix. 2851 | 2852 | The function then calculates the design matrix by multiplying the weighted targets matrix with the transposed 2853 | centered weighted sources matrix. 2854 | 2855 | The function then computes optimal rotation and optimal scale to calculate the optimal rotation and scale 2856 | parameters, respectively. The rotation and scale parameters are then combined into a transformation matrix. 2857 | 2858 | Parameters: 2859 | ----------- 2860 | sources: Source points as a np.ndarray. 2861 | targets: Target points as a np.ndarray. 2862 | sqrt_weights: Square root of weights as a np.ndarray. 2863 | 2864 | Returns 2865 | ------- 2866 | result: Transformation matrix as a np.ndarray. 2867 | 2868 | """ 2869 | cpp_compare("sources", sources) 2870 | cpp_compare("targets", targets) 2871 | 2872 | # tranposed(A_w). 2873 | weighted_sources = sources * sqrt_weights[None, :] 2874 | # tranposed(B_w). 2875 | weighted_targets = targets[:, :468] * sqrt_weights[None, :] 2876 | 2877 | cpp_compare("weighted_sources", weighted_sources) 2878 | cpp_compare("weighted_targets", weighted_targets) 2879 | 2880 | # w = tranposed(j_w) j_w. 2881 | total_weight = np.sum(sqrt_weights * sqrt_weights) 2882 | log("total_weight", total_weight) 2883 | 2884 | # Let C = (j_w tranposed(j_w)) / (tranposed(j_w) j_w). 2885 | # Note that C = tranposed(C), hence (I - C) = tranposed(I - C). 2886 | # 2887 | # tranposed(A_w) C = tranposed(A_w) j_w tranposed(j_w) / w = 2888 | # (tranposed(A_w) j_w) tranposed(j_w) / w = c_w tranposed(j_w), 2889 | # 2890 | # where c_w = tranposed(A_w) j_w / w is a k x 1 vector calculated here: 2891 | twice_weighted_sources = weighted_sources * sqrt_weights[None, :] 2892 | source_center_of_mass = np.sum(twice_weighted_sources, axis=1) / total_weight 2893 | log("source_center_of_mass", source_center_of_mass) 2894 | 2895 | # tranposed((I - C) A_w) = tranposed(A_w) (I - C) = 2896 | # tranposed(A_w) - tranposed(A_w) C = tranposed(A_w) - c_w tranposed(j_w). 2897 | centered_weighted_sources = weighted_sources - np.matmul( 2898 | source_center_of_mass[:, None], sqrt_weights[None, :] 2899 | ) 2900 | cpp_compare("centered_weighted_sources", centered_weighted_sources) 2901 | 2902 | design_matrix = np.matmul(weighted_targets, centered_weighted_sources.T) 2903 | cpp_compare("design_matrix", design_matrix) 2904 | log("design_matrix_norm", np.linalg.norm(design_matrix)) 2905 | 2906 | rotation = compute_optimal_rotation(design_matrix) 2907 | 2908 | scale = compute_optimal_scale( 2909 | centered_weighted_sources, weighted_sources, weighted_targets, rotation 2910 | ) 2911 | log("scale", scale) 2912 | 2913 | rotation_and_scale = scale * rotation 2914 | 2915 | pointwise_diffs = weighted_targets - np.matmul(rotation_and_scale, weighted_sources) 2916 | cpp_compare("pointwise_diffs", pointwise_diffs) 2917 | 2918 | weighted_pointwise_diffs = pointwise_diffs * sqrt_weights[None, :] 2919 | cpp_compare("weighted_pointwise_diffs", weighted_pointwise_diffs) 2920 | 2921 | translation = np.sum(weighted_pointwise_diffs, axis=1) / total_weight 2922 | log("translation", translation) 2923 | 2924 | transform_mat = combine_transform_matrix(rotation_and_scale, translation) 2925 | cpp_compare("transform_mat", transform_mat) 2926 | 2927 | return transform_mat 2928 | 2929 | 2930 | def compute_optimal_rotation(design_matrix): 2931 | """ 2932 | This function computes the optimal rotation matrix for a given design matrix. The function first checks if the 2933 | norm of the design matrix is too small (less than 1e-9). If it is, it prints a warning message stating that the 2934 | design matrix norm is too small. 2935 | 2936 | Next, it performs a singular value decomposition (SVD) on the design matrix. The SVD decomposes the design matrix 2937 | into three matrices: U, Σ, and V^T, where U and V are orthogonal matrices and Σ is a diagonal matrix containing 2938 | the singular values. 2939 | 2940 | The function then assigns the U matrix to the variable "postrotation" and the V^T matrix to the variable 2941 | "prerotation". If the determinant of only one of the postrotation and prerotation matrices is negative, 2942 | it flips the sign of the third column of the postrotation matrix. The function then calculates the rotation 2943 | matrix by multiplying the postrotation and prerotation matrices. 2944 | 2945 | Parameters: 2946 | ----------- 2947 | design_matrix: Design matrix as a np.ndarray. 2948 | 2949 | Returns 2950 | ------- 2951 | rotation: Optimal rotation as a np.ndarray. 2952 | 2953 | """ 2954 | if np.linalg.norm(design_matrix) < 1e-9: 2955 | print("Design matrix norm is too small!") 2956 | 2957 | u, _, vh = np.linalg.svd(design_matrix, full_matrices=True) 2958 | 2959 | postrotation = u 2960 | prerotation = vh 2961 | 2962 | if np.linalg.det(postrotation) * np.linalg.det(prerotation) < 0: 2963 | postrotation[:, 2] = -1 * postrotation[:, 2] 2964 | 2965 | cpp_compare("postrotation", postrotation) 2966 | cpp_compare("prerotation", prerotation) 2967 | 2968 | rotation = np.matmul(postrotation, prerotation) 2969 | 2970 | cpp_compare("rotation", rotation) 2971 | 2972 | return rotation 2973 | 2974 | 2975 | def compute_optimal_scale( 2976 | centered_weighted_sources, weighted_sources, weighted_targets, rotation 2977 | ): 2978 | """ 2979 | The given code computes the optimal scale for a transformation based on weighted sources and targets. 2980 | 2981 | First, the code applies the given rotation matrix to the centered weighted sources. This results in the 2982 | rotated_centered_weighted_sources. Next, the code calculates the numerator by taking the element-wise 2983 | multiplication of the rotated_centered_weighted_sources and weighted_targets, and then summing the resulting 2984 | array. Similarly, the denominator is calculated by taking the element-wise multiplication of the 2985 | centered_weighted_sources and weighted_sources, and then summing the resulting array. 2986 | 2987 | If the denominator is very small (less than 1e-9), a warning message is printed stating that the scale expression 2988 | denominator is too small. If the numerator divided by the denominator is very small (less than 1e-9), 2989 | a warning message is printed stating that the scale is too small. Finally, the code returns the result of the 2990 | numerator divided by the denominator, which represents the optimal scale for the transformation. 2991 | 2992 | Parameters: 2993 | ----------- 2994 | centered_weighted_sources: Centered weighted sources as a np.ndarray. 2995 | weighted_sources: Weighted sources as a np.ndarray. 2996 | weighted_targets: Weighted targets as a np.ndarray. 2997 | rotation: Rotation as a np.ndarray. 2998 | 2999 | Returns 3000 | ------- 3001 | result: Combination of rotation and scaling with translation matrixes as a np.ndarray. 3002 | 3003 | """ 3004 | rotated_centered_weighted_sources = np.matmul(rotation, centered_weighted_sources) 3005 | 3006 | numerator = np.sum(rotated_centered_weighted_sources * weighted_targets) 3007 | denominator = np.sum(centered_weighted_sources * weighted_sources) 3008 | 3009 | if denominator < 1e-9: 3010 | print("Scale expression denominator is too small!") 3011 | if numerator / denominator < 1e-9: 3012 | print("Scale is too small!") 3013 | 3014 | return numerator / denominator 3015 | 3016 | 3017 | def combine_transform_matrix(r_and_s, t): 3018 | """ 3019 | This function combines a rotation and scaling matrix (r_and_s) with a translation matrix (t) to create a single 3020 | transform matrix. The resulting matrix is a 4x4 matrix, where the top left 3x3 submatrix represents the rotation 3021 | and scaling, and the last column represents the translation. The function first initializes the result matrix as 3022 | the identity matrix. Then, it assigns the rotation and scaling matrix to the top left submatrix and the 3023 | translation vector to the last column of the result matrix. 3024 | 3025 | Parameters: 3026 | ----------- 3027 | r_and_s: Rotation and scaling matrix as a np.ndarray. 3028 | t: Translation matrix as a np.ndarray. 3029 | 3030 | Returns 3031 | ------- 3032 | result: Combination of rotation and scaling with translation matrixes as a np.ndarray. 3033 | 3034 | """ 3035 | result = np.eye(4) 3036 | result[:3, :3] = r_and_s 3037 | result[:3, 3] = t 3038 | return result 3039 | -------------------------------------------------------------------------------- /driver_state_detection/main.py: -------------------------------------------------------------------------------- 1 | import time 2 | import pprint 3 | 4 | import cv2 5 | import mediapipe as mp 6 | import numpy as np 7 | 8 | from attention_scorer import AttentionScorer as AttScorer 9 | from eye_detector import EyeDetector as EyeDet 10 | from parser import get_args 11 | from pose_estimation import HeadPoseEstimator as HeadPoseEst 12 | from utils import get_landmarks, load_camera_parameters 13 | 14 | 15 | def main(): 16 | args = get_args() 17 | 18 | if not cv2.useOptimized(): 19 | try: 20 | cv2.setUseOptimized(True) # set OpenCV optimization to True 21 | except Exception as e: 22 | print( 23 | f"OpenCV optimization could not be set to True, the script may be slower than expected.\nError: {e}" 24 | ) 25 | 26 | if args.camera_params: 27 | camera_matrix, dist_coeffs = load_camera_parameters(args.camera_params) 28 | else: 29 | camera_matrix, dist_coeffs = None, None 30 | 31 | if args.verbose: 32 | print("Arguments and Parameters used:\n") 33 | pprint.pp(vars(args), indent=4) 34 | print("\nCamera Matrix:") 35 | pprint.pp(camera_matrix, indent=4) 36 | print("\nDistortion Coefficients:") 37 | pprint.pp(dist_coeffs, indent=4) 38 | print("\n") 39 | 40 | """instantiation of mediapipe face mesh model. This model give back 478 landmarks 41 | if the rifine_landmarks parameter is set to True. 468 landmarks for the face and 42 | the last 10 landmarks for the irises 43 | """ 44 | Detector = mp.solutions.face_mesh.FaceMesh( 45 | static_image_mode=False, 46 | min_detection_confidence=0.5, 47 | min_tracking_confidence=0.5, 48 | refine_landmarks=True, 49 | ) 50 | 51 | # instantiation of the Eye Detector and Head Pose estimator objects 52 | Eye_det = EyeDet(show_processing=args.show_eye_proc) 53 | 54 | Head_pose = HeadPoseEst( 55 | show_axis=args.show_axis, camera_matrix=camera_matrix, dist_coeffs=dist_coeffs 56 | ) 57 | 58 | # timing variables 59 | prev_time = time.perf_counter() 60 | fps = 0.0 # Initial FPS value 61 | 62 | t_now = time.perf_counter() 63 | 64 | # instantiation of the attention scorer object, with the various thresholds 65 | # NOTE: set verbose to True for additional printed information about the scores 66 | Scorer = AttScorer( 67 | t_now=t_now, 68 | ear_thresh=args.ear_thresh, 69 | gaze_time_thresh=args.gaze_time_thresh, 70 | roll_thresh=args.roll_thresh, 71 | pitch_thresh=args.pitch_thresh, 72 | yaw_thresh=args.yaw_thresh, 73 | ear_time_thresh=args.ear_time_thresh, 74 | gaze_thresh=args.gaze_thresh, 75 | pose_time_thresh=args.pose_time_thresh, 76 | verbose=args.verbose, 77 | ) 78 | 79 | # capture the input from the default system camera (camera number 0) 80 | cap = cv2.VideoCapture(args.camera) 81 | if not cap.isOpened(): # if the camera can't be opened exit the program 82 | print("Cannot open camera") 83 | exit() 84 | 85 | # time.sleep(0.01) # To prevent zero division error when calculating the FPS 86 | 87 | while True: # infinite loop for webcam video capture 88 | # get current time in seconds 89 | t_now = time.perf_counter() 90 | 91 | # Calculate the time taken to process the previous frame 92 | elapsed_time = t_now - prev_time 93 | prev_time = t_now 94 | 95 | # calculate FPS 96 | if elapsed_time > 0: 97 | fps = np.round(1 / elapsed_time, 3) 98 | 99 | ret, frame = cap.read() # read a frame from the webcam 100 | 101 | if not ret: # if a frame can't be read, exit the program 102 | print("Can't receive frame from camera/stream end") 103 | break 104 | 105 | # if the frame comes from webcam, flip it so it looks like a mirror. 106 | if args.camera == 0: 107 | frame = cv2.flip(frame, 2) 108 | 109 | # start the tick counter for computing the processing time for each frame 110 | e1 = cv2.getTickCount() 111 | 112 | # transform the BGR frame in grayscale 113 | gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 114 | 115 | # get the frame size 116 | frame_size = frame.shape[1], frame.shape[0] 117 | 118 | # apply a bilateral filter to lower noise but keep frame details. create a 3D matrix from gray image to give it to the model 119 | # gray = cv2.bilateralFilter(gray, 5, 10, 10) 120 | gray = np.expand_dims(gray, axis=2) 121 | gray = np.concatenate([gray, gray, gray], axis=2) 122 | 123 | # find the faces using the face mesh model 124 | lms = Detector.process(gray).multi_face_landmarks 125 | 126 | if lms: # process the frame only if at least a face is found 127 | # getting face landmarks and then take only the bounding box of the biggest face 128 | landmarks = get_landmarks(lms) 129 | 130 | # shows the eye keypoints (can be commented) 131 | Eye_det.show_eye_keypoints( 132 | color_frame=frame, landmarks=landmarks, frame_size=frame_size 133 | ) 134 | 135 | # compute the EAR score of the eyes 136 | ear = Eye_det.get_EAR(landmarks=landmarks) 137 | 138 | # compute the *rolling* PERCLOS score and state of tiredness 139 | # if you don't want to use the rolling PERCLOS, use the get_PERCLOS method instead 140 | tired, perclos_score = Scorer.get_rolling_PERCLOS(t_now, ear) 141 | 142 | # compute the Gaze Score 143 | gaze = Eye_det.get_Gaze_Score( 144 | frame=gray, landmarks=landmarks, frame_size=frame_size 145 | ) 146 | 147 | # compute the head pose 148 | frame_det, roll, pitch, yaw = Head_pose.get_pose( 149 | frame=frame, landmarks=landmarks, frame_size=frame_size 150 | ) 151 | 152 | # evaluate the scores for EAR, GAZE and HEAD POSE 153 | asleep, looking_away, distracted = Scorer.eval_scores( 154 | t_now=t_now, 155 | ear_score=ear, 156 | gaze_score=gaze, 157 | head_roll=roll, 158 | head_pitch=pitch, 159 | head_yaw=yaw, 160 | ) 161 | 162 | # if the head pose estimation is successful, show the results 163 | if frame_det is not None: 164 | frame = frame_det 165 | 166 | # show the real-time EAR score 167 | if ear is not None: 168 | cv2.putText( 169 | frame, 170 | "EAR:" + str(round(ear, 3)), 171 | (10, 50), 172 | cv2.FONT_HERSHEY_PLAIN, 173 | 2, 174 | (255, 255, 255), 175 | 1, 176 | cv2.LINE_AA, 177 | ) 178 | 179 | # show the real-time Gaze Score 180 | if gaze is not None: 181 | cv2.putText( 182 | frame, 183 | "Gaze Score:" + str(round(gaze, 3)), 184 | (10, 80), 185 | cv2.FONT_HERSHEY_PLAIN, 186 | 2, 187 | (255, 255, 255), 188 | 1, 189 | cv2.LINE_AA, 190 | ) 191 | 192 | # show the real-time PERCLOS score 193 | cv2.putText( 194 | frame, 195 | "PERCLOS:" + str(round(perclos_score, 3)), 196 | (10, 110), 197 | cv2.FONT_HERSHEY_PLAIN, 198 | 2, 199 | (255, 255, 255), 200 | 1, 201 | cv2.LINE_AA, 202 | ) 203 | 204 | if roll is not None: 205 | cv2.putText( 206 | frame, 207 | "roll:" + str(roll.round(1)[0]), 208 | (450, 40), 209 | cv2.FONT_HERSHEY_PLAIN, 210 | 1.5, 211 | (255, 0, 255), 212 | 1, 213 | cv2.LINE_AA, 214 | ) 215 | if pitch is not None: 216 | cv2.putText( 217 | frame, 218 | "pitch:" + str(pitch.round(1)[0]), 219 | (450, 70), 220 | cv2.FONT_HERSHEY_PLAIN, 221 | 1.5, 222 | (255, 0, 255), 223 | 1, 224 | cv2.LINE_AA, 225 | ) 226 | if yaw is not None: 227 | cv2.putText( 228 | frame, 229 | "yaw:" + str(yaw.round(1)[0]), 230 | (450, 100), 231 | cv2.FONT_HERSHEY_PLAIN, 232 | 1.5, 233 | (255, 0, 255), 234 | 1, 235 | cv2.LINE_AA, 236 | ) 237 | 238 | # if the driver is tired, show and alert on screen 239 | if tired: 240 | cv2.putText( 241 | frame, 242 | "TIRED!", 243 | (10, 280), 244 | cv2.FONT_HERSHEY_PLAIN, 245 | 1, 246 | (0, 0, 255), 247 | 1, 248 | cv2.LINE_AA, 249 | ) 250 | 251 | # if the state of attention of the driver is not normal, show an alert on screen 252 | if asleep: 253 | cv2.putText( 254 | frame, 255 | "ASLEEP!", 256 | (10, 300), 257 | cv2.FONT_HERSHEY_PLAIN, 258 | 1, 259 | (0, 0, 255), 260 | 1, 261 | cv2.LINE_AA, 262 | ) 263 | if looking_away: 264 | cv2.putText( 265 | frame, 266 | "LOOKING AWAY!", 267 | (10, 320), 268 | cv2.FONT_HERSHEY_PLAIN, 269 | 1, 270 | (0, 0, 255), 271 | 1, 272 | cv2.LINE_AA, 273 | ) 274 | if distracted: 275 | cv2.putText( 276 | frame, 277 | "DISTRACTED!", 278 | (10, 340), 279 | cv2.FONT_HERSHEY_PLAIN, 280 | 1, 281 | (0, 0, 255), 282 | 1, 283 | cv2.LINE_AA, 284 | ) 285 | 286 | # stop the tick counter for computing the processing time for each frame 287 | e2 = cv2.getTickCount() 288 | # processign time in milliseconds 289 | proc_time_frame_ms = ((e2 - e1) / cv2.getTickFrequency()) * 1000 290 | # print fps and processing time per frame on screen 291 | if args.show_fps: 292 | cv2.putText( 293 | frame, 294 | "FPS:" + str(round(fps)), 295 | (10, 400), 296 | cv2.FONT_HERSHEY_PLAIN, 297 | 2, 298 | (255, 0, 255), 299 | 1, 300 | ) 301 | if args.show_proc_time: 302 | cv2.putText( 303 | frame, 304 | "PROC. TIME FRAME:" + str(round(proc_time_frame_ms, 0)) + "ms", 305 | (10, 430), 306 | cv2.FONT_HERSHEY_PLAIN, 307 | 2, 308 | (255, 0, 255), 309 | 1, 310 | ) 311 | 312 | # show the frame on screen 313 | cv2.imshow("Press 'q' to terminate", frame) 314 | 315 | # if the key "q" is pressed on the keyboard, the program is terminated 316 | if cv2.waitKey(20) & 0xFF == ord("q"): 317 | break 318 | 319 | cap.release() 320 | cv2.destroyAllWindows() 321 | 322 | return 323 | 324 | 325 | if __name__ == "__main__": 326 | main() 327 | -------------------------------------------------------------------------------- /driver_state_detection/parser.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | 4 | def get_args(): 5 | parser = argparse.ArgumentParser(description="Driver State Detection") 6 | 7 | # selection the camera number, default is 0 (webcam) 8 | parser.add_argument( 9 | "-c", 10 | "--camera", 11 | type=int, 12 | default=0, 13 | metavar="", 14 | help="Camera number, default is 0 (webcam)", 15 | ) 16 | 17 | parser.add_argument( 18 | "--camera_params", 19 | type=str, 20 | help="Path to the camera parameters file (JSON or YAML).", 21 | ) 22 | 23 | # visualisation parameters 24 | parser.add_argument( 25 | "--show_fps", 26 | type=bool, 27 | default=True, 28 | metavar="", 29 | help="Show the actual FPS of the capture stream, default is true", 30 | ) 31 | parser.add_argument( 32 | "--show_proc_time", 33 | type=bool, 34 | default=True, 35 | metavar="", 36 | help="Show the processing time for a single frame, default is true", 37 | ) 38 | parser.add_argument( 39 | "--show_eye_proc", 40 | type=bool, 41 | default=False, 42 | metavar="", 43 | help="Show the eyes processing, deafult is false", 44 | ) 45 | parser.add_argument( 46 | "--show_axis", 47 | type=bool, 48 | default=True, 49 | metavar="", 50 | help="Show the head pose axis, default is true", 51 | ) 52 | parser.add_argument( 53 | "--verbose", 54 | type=bool, 55 | default=False, 56 | metavar="", 57 | help="Prints additional info, default is false", 58 | ) 59 | 60 | # Attention Scorer parameters (EAR, Gaze Score, Pose) 61 | parser.add_argument( 62 | "--smooth_factor", 63 | type=float, 64 | default=0.5, 65 | metavar="", 66 | help="Sets the smooth factor for the head pose estimation keypoint smoothing, default is 0.5", 67 | ) 68 | parser.add_argument( 69 | "--ear_thresh", 70 | type=float, 71 | default=0.15, 72 | metavar="", 73 | help="Sets the EAR threshold for the Attention Scorer, default is 0.15", 74 | ) 75 | parser.add_argument( 76 | "--ear_time_thresh", 77 | type=float, 78 | default=2, 79 | metavar="", 80 | help="Sets the EAR time (seconds) threshold for the Attention Scorer, default is 2 seconds", 81 | ) 82 | parser.add_argument( 83 | "--gaze_thresh", 84 | type=float, 85 | default=0.015, 86 | metavar="", 87 | help="Sets the Gaze Score threshold for the Attention Scorer, default is 0.2", 88 | ) 89 | parser.add_argument( 90 | "--gaze_time_thresh", 91 | type=float, 92 | default=2, 93 | metavar="", 94 | help="Sets the Gaze Score time (seconds) threshold for the Attention Scorer, default is 2. seconds", 95 | ) 96 | parser.add_argument( 97 | "--pitch_thresh", 98 | type=float, 99 | default=20, 100 | metavar="", 101 | help="Sets the PITCH threshold (degrees) for the Attention Scorer, default is 30 degrees", 102 | ) 103 | parser.add_argument( 104 | "--yaw_thresh", 105 | type=float, 106 | default=20, 107 | metavar="", 108 | help="Sets the YAW threshold (degrees) for the Attention Scorer, default is 20 degrees", 109 | ) 110 | parser.add_argument( 111 | "--roll_thresh", 112 | type=float, 113 | default=20, 114 | metavar="", 115 | help="Sets the ROLL threshold (degrees) for the Attention Scorer, default is 30 degrees", 116 | ) 117 | parser.add_argument( 118 | "--pose_time_thresh", 119 | type=float, 120 | default=2.5, 121 | metavar="", 122 | help="Sets the Pose time threshold (seconds) for the Attention Scorer, default is 2.5 seconds", 123 | ) 124 | 125 | # parse the arguments and store them in the args variable dictionary 126 | args, _ = parser.parse_known_args() 127 | 128 | return args 129 | -------------------------------------------------------------------------------- /driver_state_detection/pose_estimation.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from face_geometry import PCF, get_metric_landmarks, procrustes_landmark_basis 4 | from utils import rot_mat_to_euler 5 | 6 | 7 | class HeadPoseEstimator: 8 | def __init__(self, camera_matrix=None, dist_coeffs=None, show_axis: bool = False): 9 | """ 10 | Class for estimating the head pose using the image/frame, face mesh landmarks, and camera parameters. 11 | 12 | Attributes 13 | ---------- 14 | show_axis : bool 15 | If set to True, shows the head pose axis projected from the nose keypoint and the face landmarks points 16 | used for pose estimation (default is False). 17 | camera_matrix : numpy array 18 | Camera matrix of the camera used to capture the image/frame. 19 | dist_coeffs : numpy array 20 | Distortion coefficients of the camera used to capture the image/frame. 21 | 22 | Methods 23 | ------- 24 | get_pose(frame, landmarks, frame_size) 25 | Estimate the head pose using the provided frame, landmarks, and frame size. 26 | _get_model_lms_ids() 27 | Get the model landmark IDs used for pose estimation. 28 | _draw_nose_axes(frame, rvec, tvec, model_img_lms) 29 | Draw the nose axes on the frame. 30 | _get_camera_parameters(frame_size) 31 | Get the camera parameters for pose estimation. 32 | """ 33 | 34 | self.NOSE_AXES_POINTS = np.array( 35 | [[7, 0, 10], [0, 7, 6], [0, 0, 14]], dtype=float 36 | ) 37 | self.JAW_LMS_NUMS = [61, 291, 199] 38 | 39 | self.show_axis = show_axis 40 | self.camera_matrix = camera_matrix 41 | self.dist_coeffs = dist_coeffs 42 | self.focal_length = None 43 | 44 | self.pcf_calculated = False 45 | 46 | self.model_lms_ids = self._get_model_lms_ids() 47 | 48 | def _get_model_lms_ids(self): 49 | model_lms_ids = self.JAW_LMS_NUMS + [ 50 | key for key, _ in procrustes_landmark_basis 51 | ] 52 | model_lms_ids.sort() 53 | 54 | return model_lms_ids 55 | 56 | def get_pose(self, frame, landmarks, frame_size): 57 | """ 58 | Estimate head pose using the head pose estimator object instantiated attribute 59 | 60 | Parameters 61 | ---------- 62 | frame: numpy array 63 | Image/frame captured by the camera 64 | landmarks: numpy array 65 | mediapiep face mesh detected 478 landmarks of the head 66 | 67 | Returns 68 | -------- 69 | - if successful: image_frame, yaw, pitch, roll (tuple) 70 | - if unsuccessful: None,None,None,None (tuple) 71 | 72 | """ 73 | 74 | rvec = None 75 | tvec = None 76 | model_img_lms = None 77 | eulers = None 78 | metric_lms = None 79 | 80 | if not self.pcf_calculated: 81 | self._get_camera_parameters(frame_size) 82 | 83 | model_img_lms = ( 84 | np.clip(landmarks[self.model_lms_ids, :2], 0.0, 1.0) * frame_size 85 | ) 86 | 87 | metric_lms = get_metric_landmarks(landmarks.T.copy(), self.pcf)[0].T 88 | 89 | model_metric_lms = metric_lms[self.model_lms_ids, :] 90 | 91 | (solve_pnp_success, rvec, tvec) = cv2.solvePnP( 92 | model_metric_lms, 93 | model_img_lms, 94 | self.camera_matrix, 95 | self.dist_coeffs, 96 | flags=cv2.SOLVEPNP_ITERATIVE, 97 | ) 98 | """ 99 | The OpenCV Solve PnP method computes the rotation and translation vectors with respect to the camera coordinate 100 | system of the image_points referred to the 3d head model_points. It takes into account the camera matrix and 101 | the distortion coefficients. 102 | The method used is iterative (cv2.SOLVEPNP_ITERATIVE) 103 | An alternative method can be the cv2.SOLVEPNP_SQPNP 104 | """ 105 | tvec = tvec.round(2) 106 | 107 | if solve_pnp_success: 108 | rvec, tvec = cv2.solvePnPRefineVVS( 109 | model_metric_lms, 110 | model_img_lms, 111 | self.camera_matrix, 112 | self.dist_coeffs, 113 | rvec, 114 | tvec, 115 | ) 116 | 117 | rvec1 = np.array([rvec[2, 0], rvec[0, 0], rvec[1, 0]]).reshape((3, 1)) 118 | 119 | # cv2.Rodrigues: convert a rotation vector to a rotation matrix (also known as a Rodrigues rotation matrix) 120 | rmat, _ = cv2.Rodrigues(rvec1) 121 | 122 | eulers = rot_mat_to_euler(rmat).reshape((-1, 1)) 123 | 124 | """ 125 | We use the rotationMatrixToEulerAngles function to compute the euler angles (roll, pitch, yaw) from the 126 | Rotation Matrix. This function also checks if we have a gymbal lock. 127 | The angles are converted from radians to degrees 128 | 129 | An alternative method to compute the euler angles is the following: 130 | 131 | P = np.hstack((Rmat,tvec)) -> computing the projection matrix 132 | euler_angles = -cv2.decomposeProjectionMatrix(P)[6] -> extracting euler angles for yaw pitch and roll from the projection matrix 133 | """ 134 | 135 | self._draw_nose_axes(frame, rvec, tvec, model_img_lms) 136 | 137 | return frame, eulers[0], eulers[1], eulers[2] 138 | 139 | else: 140 | return None, None, None, None 141 | 142 | def _draw_nose_axes(self, frame, rvec, tvec, model_img_lms): 143 | (nose_axes_point2D, _) = cv2.projectPoints( 144 | self.NOSE_AXES_POINTS, rvec, tvec, self.camera_matrix, self.dist_coeffs 145 | ) 146 | nose = tuple(model_img_lms[0, :2].astype(int)) 147 | 148 | nose_x = tuple(nose_axes_point2D[0, 0].astype(int)) 149 | nose_y = tuple(nose_axes_point2D[1, 0].astype(int)) 150 | nose_z = tuple(nose_axes_point2D[2, 0].astype(int)) 151 | 152 | cv2.line(frame, nose, nose_x, (255, 0, 0), 2) 153 | cv2.line(frame, nose, nose_y, (0, 255, 0), 2) 154 | cv2.line(frame, nose, nose_z, (0, 0, 255), 2) 155 | 156 | def _get_camera_parameters(self, frame_size): 157 | fr_w = frame_size[0] 158 | fr_h = frame_size[1] 159 | if self.camera_matrix is None: 160 | fr_center = (fr_w // 2, fr_h // 2) 161 | focal_length = fr_w 162 | self.camera_matrix = np.array( 163 | [ 164 | [focal_length, 0, fr_center[0]], 165 | [0, focal_length, fr_center[1]], 166 | [0, 0, 1], 167 | ], 168 | dtype="double", 169 | ) 170 | self.focal_length = focal_length 171 | else: 172 | self.focal_length = self.camera_matrix[0, 0] 173 | if self.dist_coeffs is None: 174 | self.dist_coeffs = np.zeros((5, 1)) 175 | 176 | self.pcf = PCF(frame_height=fr_h, frame_width=fr_w, fy=self.focal_length) 177 | 178 | self.pcf_calculated = True 179 | -------------------------------------------------------------------------------- /driver_state_detection/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import cv2 4 | import numpy as np 5 | 6 | 7 | def load_camera_parameters(file_path): 8 | try: 9 | with open(file_path, "r") as file: 10 | if file_path.endswith(".json"): 11 | data = json.load(file) 12 | else: 13 | raise ValueError("Unsupported file format. Use JSON or YAML.") 14 | return ( 15 | np.array(data["camera_matrix"], dtype="double"), 16 | np.array(data["dist_coeffs"], dtype="double"), 17 | ) 18 | except Exception as e: 19 | print(f"Failed to load camera parameters: {e}") 20 | return None, None 21 | 22 | 23 | def resize(frame, scale_percent): 24 | """ 25 | Resize the image maintaining the aspect ratio 26 | :param frame: opencv image/frame 27 | :param scale_percent: int 28 | scale factor for resizing the image 29 | :return: 30 | resized: rescaled opencv image/frame 31 | """ 32 | width = int(frame.shape[1] * scale_percent / 100) 33 | height = int(frame.shape[0] * scale_percent / 100) 34 | dim = (width, height) 35 | 36 | resized = cv2.resize(frame, dim, interpolation=cv2.INTER_LINEAR) 37 | return resized 38 | 39 | 40 | def get_landmarks(lms): 41 | surface = 0 42 | for lms0 in lms: 43 | landmarks = [np.array([point.x, point.y, point.z]) for point in lms0.landmark] 44 | 45 | landmarks = np.array(landmarks) 46 | 47 | landmarks[landmarks[:, 0] < 0.0, 0] = 0.0 48 | landmarks[landmarks[:, 0] > 1.0, 0] = 1.0 49 | landmarks[landmarks[:, 1] < 0.0, 1] = 0.0 50 | landmarks[landmarks[:, 1] > 1.0, 1] = 1.0 51 | 52 | dx = landmarks[:, 0].max() - landmarks[:, 0].min() 53 | dy = landmarks[:, 1].max() - landmarks[:, 1].min() 54 | new_surface = dx * dy 55 | if new_surface > surface: 56 | biggest_face = landmarks 57 | 58 | return biggest_face 59 | 60 | 61 | def get_face_area(face): 62 | """ 63 | Computes the area of the bounding box ROI of the face detected by the dlib face detector 64 | It's used to sort the detected faces by the box area 65 | 66 | :param face: dlib bounding box of a detected face in faces 67 | :return: area of the face bounding box 68 | """ 69 | return abs((face.left() - face.right()) * (face.bottom() - face.top())) 70 | 71 | 72 | def show_keypoints(keypoints, frame): 73 | """ 74 | Draw circles on the opencv frame over the face keypoints predicted by the dlib predictor 75 | 76 | :param keypoints: dlib iterable 68 keypoints object 77 | :param frame: opencv frame 78 | :return: frame 79 | Returns the frame with all the 68 dlib face keypoints drawn 80 | """ 81 | for n in range(0, 68): 82 | x = keypoints.part(n).x 83 | y = keypoints.part(n).y 84 | cv2.circle(frame, (x, y), 1, (0, 0, 255), -1) 85 | return frame 86 | 87 | 88 | def midpoint(p1, p2): 89 | """ 90 | Compute the midpoint between two dlib keypoints 91 | 92 | :param p1: dlib single keypoint 93 | :param p2: dlib single keypoint 94 | :return: array of x,y coordinated of the midpoint between p1 and p2 95 | """ 96 | return np.array([int((p1.x + p2.x) / 2), int((p1.y + p2.y) / 2)]) 97 | 98 | 99 | def get_array_keypoints(landmarks, dtype="int", verbose: bool = False): 100 | """ 101 | Converts all the iterable dlib 68 face keypoint in a numpy array of shape 68,2 102 | 103 | :param landmarks: dlib iterable 68 keypoints object 104 | :param dtype: dtype desired in output 105 | :param verbose: if set to True, prints array of keypoints (default is False) 106 | :return: points_array 107 | Numpy array containing all the 68 keypoints (x,y) coordinates 108 | The shape is 68,2 109 | """ 110 | points_array = np.zeros((68, 2), dtype=dtype) 111 | for i in range(0, 68): 112 | points_array[i] = (landmarks.part(i).x, landmarks.part(i).y) 113 | 114 | if verbose: 115 | print(points_array) 116 | 117 | return points_array 118 | 119 | 120 | def rot_mat_to_euler(rmat): 121 | """ 122 | This function converts a rotation matrix into Euler angles. It first checks if the given matrix is a valid 123 | rotation matrix by comparing its calculated identity matrix to the identity matrix. If it is a valid rotation 124 | matrix, it checks for the presence of a gimbal lock situation. If there is no gimbal lock, it calculates the 125 | Euler angles using the arctan2 function. If there is a gimbal lock, it uses a different formula for yaw, pitch, 126 | and roll. The function then checks the signs of the angles and adjusts them accordingly. Finally, it returns the 127 | Euler angles in degrees, rounded to two decimal places. 128 | 129 | Parameters 130 | ---------- 131 | rmat: A rotation matrix as a np.ndarray. 132 | 133 | Returns 134 | ------- 135 | Euler angles in degrees as a np.ndarray. 136 | 137 | """ 138 | rtr = np.transpose(rmat) 139 | r_identity = np.matmul(rtr, rmat) 140 | 141 | I = np.identity(3, dtype=rmat.dtype) 142 | if np.linalg.norm(r_identity - I) < 1e-6: 143 | sy = (rmat[:2, 0] ** 2).sum() ** 0.5 144 | singular = sy < 1e-6 145 | 146 | if not singular: # check if it's a gimbal lock situation 147 | x = np.arctan2(rmat[2, 1], rmat[2, 2]) 148 | y = np.arctan2(-rmat[2, 0], sy) 149 | z = np.arctan2(rmat[1, 0], rmat[0, 0]) 150 | 151 | else: # if in gimbal lock, use different formula for yaw, pitch roll 152 | x = np.arctan2(-rmat[1, 2], rmat[1, 1]) 153 | y = np.arctan2(-rmat[2, 0], sy) 154 | z = 0 155 | 156 | if x > 0: 157 | x = np.pi - x 158 | else: 159 | x = -(np.pi + x) 160 | 161 | if z > 0: 162 | z = np.pi - z 163 | else: 164 | z = -(np.pi + z) 165 | 166 | return (np.array([x, y, z]) * 180.0 / np.pi).round(2) 167 | else: 168 | print("Isn't rotation matrix") 169 | 170 | 171 | def draw_pose_info(frame, img_point, point_proj, roll=None, pitch=None, yaw=None): 172 | """ 173 | Draw 3d orthogonal axis given a frame, a point in the frame, the projection point array. 174 | Also prints the information about the roll, pitch and yaw if passed 175 | 176 | :param frame: opencv image/frame 177 | :param img_point: tuple 178 | x,y position in the image/frame for the 3d axis for the projection 179 | :param point_proj: np.array 180 | Projected point along 3 axis obtained from the cv2.projectPoints function 181 | :param roll: float, optional 182 | :param pitch: float, optional 183 | :param yaw: float, optional 184 | :return: frame: opencv image/frame 185 | Frame with 3d axis drawn and, optionally, the roll,pitch and yaw values drawn 186 | """ 187 | frame = cv2.line( 188 | frame, img_point, tuple(point_proj[0].ravel().astype(int)), (255, 0, 0), 3 189 | ) 190 | frame = cv2.line( 191 | frame, img_point, tuple(point_proj[1].ravel().astype(int)), (0, 255, 0), 3 192 | ) 193 | frame = cv2.line( 194 | frame, img_point, tuple(point_proj[2].ravel().astype(int)), (0, 0, 255), 3 195 | ) 196 | 197 | if roll is not None and pitch is not None and yaw is not None: 198 | cv2.putText( 199 | frame, 200 | "Roll:" + str(round(roll, 0)), 201 | (500, 50), 202 | cv2.FONT_HERSHEY_PLAIN, 203 | 1, 204 | (255, 255, 255), 205 | 1, 206 | cv2.LINE_AA, 207 | ) 208 | cv2.putText( 209 | frame, 210 | "Pitch:" + str(round(pitch, 0)), 211 | (500, 70), 212 | cv2.FONT_HERSHEY_PLAIN, 213 | 1, 214 | (255, 255, 255), 215 | 1, 216 | cv2.LINE_AA, 217 | ) 218 | cv2.putText( 219 | frame, 220 | "Yaw:" + str(round(yaw, 0)), 221 | (500, 90), 222 | cv2.FONT_HERSHEY_PLAIN, 223 | 1, 224 | (255, 255, 255), 225 | 1, 226 | cv2.LINE_AA, 227 | ) 228 | 229 | return frame 230 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "dsd" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Ettore Candeloro"] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10" 10 | opencv-contrib-python = "^4.10.0.82" 11 | mediapipe = "^0.10.14" 12 | numpy = "^1.0.0" 13 | 14 | 15 | [tool.poetry.group.dev.dependencies] 16 | black = "^24.4.2" 17 | isort = "^5.13.2" 18 | 19 | [build-system] 20 | requires = ["poetry-core"] 21 | build-backend = "poetry.core.masonry.api" 22 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | opencv-python 3 | mediapipe 4 | --------------------------------------------------------------------------------