├── .gitignore
├── LICENSE
├── README.md
├── docs
└── gifs
│ ├── afterspell.gif
│ ├── itunes.gif
│ ├── python_annotation_large.gif
│ ├── python_annotation_small.gif
│ ├── python_demo.gif
│ ├── slides.gif
│ ├── snap-zoom.gif
│ ├── viz.gif
│ └── web.gif
├── jesture_sdk
├── README.md
├── install.sh
└── remove.sh
└── python
├── README.md
├── annotation.py
├── demo.py
├── fonts
├── Comfortaa-Light.ttf
└── Quivira.otf
├── images
└── jesture_logo.png
└── src
├── __init__.py
├── runner.py
├── thread_camera.py
├── thread_camera_draw.py
└── utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # -------------- Custom - Jesture SDK related --------------
2 |
3 | *.zip
4 | *.dylib
5 | **/err.txt
6 | **/out.txt
7 |
8 | **/jesture_sdk/dev
9 | **/jesture_sdk/misc
10 | **/jesture_sdk/releases
11 | **/jesture_sdk/mediapipe
12 | **/jesture_sdk/third_party
13 |
14 | **/python/emoji
15 | **/python/out_data
16 | **/python/annotation_tool
17 | **/python/netron
18 | **/python/notebooks
19 | **/python/mediapipe
20 | **/python/_private
21 |
22 | # ------------- Python-specific ignore -------------
23 |
24 | **/__pycache__
25 |
26 | # -------------- MacOS-specific ignore --------------
27 |
28 | # General
29 | .DS_Store
30 | .AppleDouble
31 | .LSOverride
32 |
33 | # Icon must end with two \r
34 | Icon
35 |
36 | # Thumbnails
37 | ._*
38 |
39 | # Files that might appear in the root of a volume
40 | .DocumentRevisions-V100
41 | .fseventsd
42 | .Spotlight-V100
43 | .TemporaryItems
44 | .Trashes
45 | .VolumeIcon.icns
46 | .com.apple.timemachine.donotpresent
47 |
48 | # Directories potentially created on remote AFP share
49 | .AppleDB
50 | .AppleDesktop
51 | Network Trash Folder
52 | Temporary Items
53 | .apdisk
54 |
55 | # -------------- Common ignore --------------
56 |
57 | # Prerequisites
58 | *.d
59 |
60 | # Compiled Object files
61 | *.slo
62 | *.lo
63 | *.o
64 | *.obj
65 |
66 | # Precompiled Headers
67 | *.gch
68 | *.pch
69 |
70 | # Compiled Dynamic libraries
71 | *.so
72 | *.dylib
73 | *.dll
74 |
75 | # Fortran module files
76 | *.mod
77 | *.smod
78 |
79 | # Compiled Static libraries
80 | *.lai
81 | *.la
82 | *.a
83 | *.lib
84 |
85 | # Executables
86 | *.exe
87 | *.out
88 | *.app
89 |
90 | # Jupyter notebook
91 | **/.ipynb_checkpoints
92 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2021 Jesture AI
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |

4 |
5 | # Jesture AI SDK
6 |
7 |
8 |
9 | **Real-time gesture control made accessible.**
10 |
11 | [](https://www.youtube.com/watch?v=sxaZ_yLgtLk&list=PL4Z9jysfxyfj9F1Kfdy_4oXKX9-MESCpP&index=1)
12 | [](https://twitter.com/jestureai)
13 | [](https://www.jesture.ai)
14 | [](https://www.instagram.com/jesture.ai)
15 |
16 |
17 |
18 | ---
19 |
20 | ## Use-cases with Jesture AI SDK
21 | Immersive Gaming | Web Apps
22 | :----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------:
23 | | 
24 |
25 | Slides Control | Visual Effects
26 | :-------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------:
27 | | 
28 |
29 | Music Control | Snap Masks
30 | :-------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------:
31 | | 
32 |
33 | ## Install
34 |
35 | ### MacOS
36 |
37 | 1. Clone this repo: `git clone https://github.com/jesture-ai/jesture-sdk`
38 | 2. Download the latest version of SDK from the [Releases](https://github.com/jesture-ai/jesture-sdk/releases) page
39 | 3. Place the downloaded archive into the `jesture_sdk/` folder and unzip it there:
40 | ```
41 | jesture_sdk/
42 | |-- install.sh
43 | |-- remove.sh
44 | |-- full_cpu.dylib
45 | |-- third_party/
46 | |-- mediapipe/
47 | |-- third_party/
48 | ```
49 | 5. Install the dependencies: `sudo bash install.sh`
50 | 6. You are all set! Checkout the [Python demo](https://github.com/jesture-ai/jesture-sdk/tree/main/python) to learn how to run the SDK in Python.
51 |
52 | If one wish to remove all the dependencies: `sudo bash remove.sh`.
53 |
54 | **Note:** The script `install.sh` just copies the dependencies to `/usr/local/Cellar/jestureai/` and `/usr/local/opt/jestureai/` paths. To access them it requires priviledged access (`sudo`).
55 |
56 | ### Windows
57 |
58 | SDK is not yet available on Windows. Stay tuned!
59 |
60 | ### Attribution
61 |
62 | See the `LICENSE` for more details on 3rd party software.
63 |
64 | Some of our SDK modules use the features of the [MediaPipe](https://github.com/google/mediapipe) open-source framework.
65 |
66 |
67 |
--------------------------------------------------------------------------------
/docs/gifs/afterspell.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/afterspell.gif
--------------------------------------------------------------------------------
/docs/gifs/itunes.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/itunes.gif
--------------------------------------------------------------------------------
/docs/gifs/python_annotation_large.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/python_annotation_large.gif
--------------------------------------------------------------------------------
/docs/gifs/python_annotation_small.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/python_annotation_small.gif
--------------------------------------------------------------------------------
/docs/gifs/python_demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/python_demo.gif
--------------------------------------------------------------------------------
/docs/gifs/slides.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/slides.gif
--------------------------------------------------------------------------------
/docs/gifs/snap-zoom.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/snap-zoom.gif
--------------------------------------------------------------------------------
/docs/gifs/viz.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/viz.gif
--------------------------------------------------------------------------------
/docs/gifs/web.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/docs/gifs/web.gif
--------------------------------------------------------------------------------
/jesture_sdk/README.md:
--------------------------------------------------------------------------------
1 | # jesture_sdk
2 |
3 | The SDK itself consists of `full_cpu.dylib` file and `mediapipe/` folder. Please check the [Install](https://github.com/jesture-ai/jesture-sdk#install) section on how to get them.
4 |
5 | ## Table of Available Gestures:
6 |
7 | | Dynamic | Static |
8 | | :-: | :-: |
9 | | Thumb Up :thumbsup: | OK :ok_hand: |
10 | | Thumb Down :thumbsdown: | YEAH :v: |
11 | | Swipe Up | ROCK :metal: |
12 | | Swipe Down | ONE |
13 | | Swipe Left | TWO |
14 | | Swipe Right | THREE |
15 | | Sliding Two Fingers Up | FOUR |
16 | | Sliding Two Fingers Down | FIVE |
17 | | Sliding Two Fingers Left | SPIDERMAN :love_you_gesture: |
18 | | Sliding Two Fingers Right | |
19 | | Zooming Out With Two Fingers | |
20 | | Zooming In With Two Fingers | |
21 | | Zooming Out With Full Hand | |
22 | | Zooming In With Full Hand | |
23 | | Stop Sign :hand: | |
24 | | Drumming Fingers | |
25 | | Shaking Hand | |
26 | | Pushing Hand Away | |
27 | | Pulling Hand In | |
28 | | Pushing Two Fingers Away | |
29 | | Pulling Two Fingers In | |
30 |
--------------------------------------------------------------------------------
/jesture_sdk/install.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | INSTALL_DIR=/usr/local/Cellar/jestureai/
4 | LINK_DIR=/usr/local/opt/jestureai/
5 |
6 | if [ ! -d "$INSTALL_DIR" ]; then
7 | mkdir ${INSTALL_DIR}
8 | fi
9 | if [ ! -d "$LINK_DIR" ]; then
10 | mkdir ${LINK_DIR}
11 | fi
12 |
13 | # ------------ INSTALL OPENCV -------------
14 |
15 | # Note: Version 3.4.10 is well-tested. Newer versions will probably also work.
16 | OPENCV_ARCHIVE_PATH=third_party/opencv@3.zip
17 |
18 | tar -xvzf ${OPENCV_ARCHIVE_PATH} -C ${INSTALL_DIR}
19 |
20 | # Note: Mandatory step, this path is required by jesture_sdk.dylib.
21 | ln -s ${INSTALL_DIR}/opencv@3/3.4.10_4/ ${LINK_DIR}/opencv@3
22 |
23 | # ------------ INSTALL ONNXRUNTIME -------------
24 |
25 | # Note: Version 1.3 because newer releases have some issues with CPU load on macOS.
26 | ORT_ARCHIVE_PATH=third_party/onnxruntime@1.3.zip
27 |
28 | tar -xvzf ${ORT_ARCHIVE_PATH} -C ${INSTALL_DIR}
29 |
30 | # Note: Mandatory step, this path is required by jesture_sdk.dylib.
31 | ln -s ${INSTALL_DIR}/onnxruntime@1.3/ ${LINK_DIR}/onnxruntime@1.3
32 |
--------------------------------------------------------------------------------
/jesture_sdk/remove.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | INSTALL_DIR=/usr/local/Cellar/jestureai/
4 | LINK_DIR=/usr/local/opt/jestureai/
5 |
6 | if [ -d "$INSTALL_DIR" ]; then
7 | rm -r ${INSTALL_DIR}
8 | fi
9 | if [ -d "$LINK_DIR" ]; then
10 | rm -r ${LINK_DIR}
11 | fi
12 |
13 |
--------------------------------------------------------------------------------
/python/README.md:
--------------------------------------------------------------------------------
1 | # Jesture SDK: Python
2 |
3 | Please check out the [Install](https://github.com/jesture-ai/jesture-sdk#install) section on the main page first.
4 |
5 | ## Demo
6 |
7 | To run the Python demo (checked with Python 3 only):
8 | 1. Copy the SDK file to `jesture_sdk_python/`: `cp ../jesture_sdk/full_cpu.dylib ./jesture_sdk_python/`
9 | 2. Copy the `mediapipe` folder to the current folder: `cp -r ../jesture_sdk/mediapipe ./`
10 | 3. Run the demo script from the current folder: `python demo.py`
11 |
12 | **Note:** The system could request to grant access to execute the `full_cpu.dylib` file, this binary is used by the Python demo to recognize hand gestures in real-time.
13 |
14 |
15 |
16 | On the top there is the current dynamic gesture, on the bottom, there are the current static gestures for the left and right hand respectively. Please refer to the [list of recognized gestures](https://github.com/jesture-ai/jesture-sdk/blob/main/jesture_sdk/README.md) for more details.
17 |
18 | ## Annotation tool
19 |
20 | One can use our SDK to collect the hand keypoints to further train a gesture recognition model. We prepared a convenient script for this:
21 |
22 | ```
23 | pyton annotation.py --cam_id=0
24 | ```
25 | **Note:** You need to do steps 1 and 2 from the Demo instructions to be able to use the SDK python wrapper.
26 |
27 |
28 |
29 | Each time when a key ("0"-"9" in this case) is pressed the record is added to a dict and it is saved to `out_data/hand_keypoints_{datatime}.pkl` file.
30 |
31 | Feel free to change the corresponding keys (`key_to_idx`) and the class names (`idx_to_gesture`) in `annotation.py`.
32 |
--------------------------------------------------------------------------------
/python/annotation.py:
--------------------------------------------------------------------------------
1 | from PIL import Image, ImageDraw, ImageFont
2 | import matplotlib.pyplot as plt
3 | from skimage import io
4 | import numpy as np
5 | import argparse
6 | import datetime
7 | import pickle
8 | import time
9 | import cv2
10 | import sys
11 | import os
12 |
13 | sys.path.append(os.path.abspath(".."))
14 |
15 | from src.runner import JestureSdkRunner
16 | from src.utils import load_image_with_alpha, overlay_alpha
17 | from src.utils import draw_text, draw_multiline_text, draw_skeleton
18 | from src.thread_camera_draw import ThreadCameraDraw
19 |
20 | print('cv2.__version__:', cv2.__version__) # 4.1.2 recommended
21 |
22 |
23 | # pasrse args
24 | parser = argparse.ArgumentParser(description='Collect hand keypoints data for gesture recognition model training.')
25 | parser.add_argument('--cam_id', type=int, default=0)
26 | args = parser.parse_args()
27 |
28 |
29 | # create the application window
30 | name = 'JestureSDK: Annotation Tool'
31 | width, height = (640, 480)
32 | cv2.namedWindow(name)
33 | # cv2.resizeWindow(name, (width, height))
34 | cv2.startWindowThread()
35 |
36 | # set the data file
37 | data_dir = './out_data'
38 | os.makedirs(data_dir, exist_ok=True)
39 | now = datetime.datetime.now()
40 | dt = f'{now.day:02d}{now.month:02d}{now.year%100:02d}_{now.hour:02d}_{now.minute:02d}'
41 | data_file_name = f'{data_dir}/hand_keypoints_{dt}.pkl'
42 |
43 | # set the logo stuff
44 | logo_path = 'images/jesture_logo.png'
45 | logo_img, logo_alpha = load_image_with_alpha(logo_path, remove_borders=True)
46 | logo_loc = (10, 10)
47 |
48 | # set the gestures help stuff
49 | key_to_idx = {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5,
50 | '6': 6, '7': 7, '8': 8, '9': 9}
51 | key_ords = [ord(x) for x in key_to_idx]
52 | idx_to_gesture = {0: 'no_gesture', 1: 'one', 2: 'two', 3: 'three', 4: 'four',
53 | 5: 'five', 6: 'fist', 7: 'peace', 8: 'love', 9: 'ok'}
54 | idx_to_count = {k: 0 for k in idx_to_gesture}
55 | # help_textlist = [f'{k}: {idx_to_gesture[key_to_idx[k]]} {idx_to_count[key_to_idx[k]]}' for k in key_to_idx]
56 | # help_textlist_str = '\n'.join(help_textlist)
57 |
58 | help_box_width = 175
59 | help_box_tl = {'right': (10, height//5+10),
60 | 'left': (width-(help_box_width+10), height//5+10)}
61 | help_box_br = {'right': (20+help_box_width, len(key_to_idx)*35),
62 | 'left': (width, len(key_to_idx)*35)}
63 | help_text_loc = {'right': (help_box_tl['right'][0]+10, help_box_tl['right'][1]+10),
64 | 'left': (help_box_tl['left'][0]+10, help_box_tl['left'][1]+10)}
65 | help_font = ImageFont.truetype("fonts/Comfortaa-Light.ttf", 20)
66 |
67 | # set the scaled hands stuff
68 | mid_hand_box_tl = (width//3, height-height//5)
69 | mid_hand_box_br = (2*width//3, height)
70 | hand_box_tl = {'right': (2*width//3, height-height//5),
71 | 'left': (0, height-height//5)}
72 | hand_box_br = {'right': (width, height),
73 | 'left': (width//3, height)}
74 |
75 | # set the hand type stuff
76 | handtype_text = {"right": "Right hand capture (press L to change)",
77 | "left": "Left hand capture (press R to change)"}
78 | handtype_text_loc = (width//3, 25)
79 |
80 | # set the counter stuff
81 | count_text_loc = (width//3, 25)
82 |
83 | # set common font
84 | font = ImageFont.truetype("fonts/Comfortaa-Light.ttf", 24)
85 | handtype_font = ImageFont.truetype("fonts/Comfortaa-Light.ttf", 20)
86 |
87 | # variables used in the main loop
88 | pressed_duration = 0
89 | pressed_text = ''
90 |
91 | selfie_mode = True
92 | hand_type = 'right'
93 | data_list = []
94 | prev_k = ''
95 | i = 0
96 |
97 |
98 | if __name__ == "__main__":
99 | # start Jesture SDK Python runner
100 | jesture_runner = JestureSdkRunner(cam_id=args.cam_id,
101 | use_tracking=True,
102 | use_static_gestures=False,
103 | use_dynamic_gestures=False)
104 | jesture_runner.start_recognition()
105 |
106 | # start reading frames to display in the application window
107 | cap = ThreadCameraDraw(jesture_runner,
108 | cam_id=args.cam_id,
109 | width=width, height=height,
110 | hand_box_tl=mid_hand_box_tl, hand_box_br=mid_hand_box_br,
111 | draw_hand_box=False)
112 | cap.start()
113 |
114 | # start the main loop
115 | while(True):
116 | if cap.frame is None:
117 | continue
118 |
119 | # get current webcam image with drawn hand skeletons
120 | frame = cap.frame[:,::-1,:] if selfie_mode else cap.frame
121 |
122 | # draw logo
123 | frame = overlay_alpha(logo_img[:,:,::-1], logo_alpha, frame, loc=logo_loc, alpha=1.0)
124 |
125 | # draw ui elements
126 | frame = Image.fromarray(frame if type(np.array([])) == type(frame) else frame.get())
127 | draw = ImageDraw.Draw(frame, "RGBA")
128 | draw.rectangle((help_box_tl[hand_type], help_box_br[hand_type]),
129 | fill=(0, 0, 0, 127), outline=(235, 190, 63, 255))
130 | # draw.rectangle((hand_box_tl, hand_box_br), fill=(0, 0, 0, 127), outline=(235, 190, 63, 255))
131 |
132 | # draw text
133 | draw.multiline_text(handtype_text_loc, handtype_text[hand_type],
134 | font=handtype_font, fill=(255, 255, 255, 200))
135 |
136 | help_textlist = [f'{idx_to_count[key_to_idx[k]]} | {k}: {idx_to_gesture[key_to_idx[k]]}'
137 | for k in key_to_idx]
138 | help_textlist_str = '\n'.join(help_textlist)
139 | draw.multiline_text(help_text_loc[hand_type], help_textlist_str,
140 | font=help_font, fill=(255, 255, 255))
141 |
142 | # retrieve keyboard signal
143 | c = cv2.waitKey(1) % 256
144 | if c == ord('q'):
145 | break
146 |
147 | if c == ord('l'):
148 | hand_type = 'left'
149 | if c == ord('r'):
150 | hand_type = 'right'
151 |
152 | # retrieve if gesture key is pressed
153 | if chr(c) in key_to_idx:
154 | k, v = chr(c), idx_to_gesture[key_to_idx[chr(c)]]
155 | pressed_text = f'{idx_to_count[key_to_idx[k]]} | {k}: {v}'
156 | idx_to_count[key_to_idx[k]] += 1
157 | pressed_duration = 4
158 | print(f"pressed {pressed_text}, shape: {frame.size}")
159 | data_list.append({
160 | 'hand_type': hand_type,
161 | 'gesture_id': key_to_idx[k],
162 | 'gesture_name': v,
163 | 'pred_gesture_name': jesture_runner.get_gesture(
164 | f'{hand_type}_static'),
165 | 'keypoints': jesture_runner.get_hand_keypoints(
166 | f'{hand_type}_keypoints'),
167 | 'scaled_keypoints': jesture_runner.get_hand_keypoints(
168 | f'scaled_{hand_type}_keypoints'),
169 | })
170 | # save current data to not to lose it
171 | # in case if the program accidentally exited
172 | if k != prev_k:
173 | with open(data_file_name, 'wb') as file:
174 | pickle.dump(data_list, file)
175 | prev_k = k
176 |
177 | # draw notification text if key was pressed less then 12 frames ago
178 | if pressed_duration > 0:
179 | notify_textlist_str = "\n".join(
180 | [x if x == pressed_text else "" for x in help_textlist])
181 | draw.multiline_text(help_text_loc[hand_type], notify_textlist_str,
182 | font=help_font, fill=(235, 190, 63))
183 | pressed_duration -= 1
184 |
185 | frame = np.array(frame).astype(np.uint8)
186 | cv2.imshow(name, frame)
187 |
188 | i += 1
189 |
190 | # save all the data collected
191 | with open(data_file_name, 'wb') as file:
192 | print(f'Dumping {len(data_list)} items to {data_file_name}...')
193 | pickle.dump(data_list, file)
194 | print(f'Dumped.')
195 |
196 | # finish and close all resources
197 | jesture_runner.stop_recognition()
198 | cap.stop()
199 |
200 | cv2.waitKey(1)
201 | cv2.destroyWindow(name)
202 | cv2.destroyAllWindows()
203 | cv2.waitKey(1)
204 |
--------------------------------------------------------------------------------
/python/demo.py:
--------------------------------------------------------------------------------
1 | from PIL import Image, ImageDraw, ImageFont
2 | import matplotlib.pyplot as plt
3 | from skimage import io
4 | import numpy as np
5 | import argparse
6 | import cv2
7 | import sys
8 | import os
9 |
10 | sys.path.append(os.path.abspath(".."))
11 |
12 | from src.runner import JestureSdkRunner
13 | from src.utils import load_image_with_alpha, overlay_alpha
14 | from src.utils import draw_text, draw_multiline_text
15 | from src.thread_camera import ThreadCamera
16 |
17 | print('cv2.__version__:', cv2.__version__) # 4.1.2 recommended
18 |
19 |
20 | # parse args
21 | parser = argparse.ArgumentParser(description='Collect hand keypoints data for gesture recognition fitting.')
22 | parser.add_argument('--cam_id', type=int, default=0)
23 | args = parser.parse_args()
24 |
25 |
26 | # create the application window
27 | name = 'Jesture SDK: Python Demo'
28 | width, height = (640, 480)
29 | cv2.namedWindow(name)
30 | # cv2.resizeWindow(name, (width, height))
31 | cv2.startWindowThread()
32 |
33 | # load the logo image
34 | logo_path = f'images/jesture_logo.png'
35 | logo_img, logo_alpha = load_image_with_alpha(logo_path, remove_borders=True)
36 |
37 | # set the ui elements positions
38 | top_box_tl = (230, 10)
39 | top_box_br = (630, 70)
40 |
41 | left_box_tl = (70, 360)
42 | left_box_br = (230, 420)
43 |
44 | right_box_tl = (420, 360)
45 | right_box_br = (580, 420)
46 |
47 | # set the text positions
48 | logo_loc = (10, 10)
49 | left_text_loc = (80, 375)
50 | right_text_loc = (430, 375)
51 | dynamic_text_loc = (240, 25)
52 | font = ImageFont.truetype("fonts/Comfortaa-Light.ttf", 24)
53 |
54 | # start Jesture SDK Python runner
55 | jesture_runner = JestureSdkRunner(
56 | cam_id=args.cam_id,
57 | use_tracking=True,
58 | use_static_gestures=True,
59 | use_dynamic_gestures=True
60 | )
61 | jesture_runner.start_recognition()
62 |
63 | # start reading frames to display in the application window
64 | cap = ThreadCamera(cam_id=args.cam_id, width=width, height=height)
65 | cap.start()
66 |
67 |
68 | selfie_mode = True
69 | i = 0
70 | while(True):
71 | if cap.frame is None:
72 | continue
73 |
74 | # get current webcam image
75 | frame = cap.frame[:,::-1,:] if selfie_mode else cap.frame # TODO: read frames from dylib
76 |
77 | # get current hand gestures
78 | dynamic_gesture = jesture_runner.get_gesture('dynamic')
79 | left_gesture = jesture_runner.get_gesture('left_static')
80 | right_gesture = jesture_runner.get_gesture('right_static')
81 |
82 | # draw logo
83 | frame = overlay_alpha(logo_img[:,:,::-1], logo_alpha, frame, loc=logo_loc, alpha=1.0)
84 |
85 | # draw ui elements
86 | frame = Image.fromarray(frame)
87 | draw = ImageDraw.Draw(frame, "RGBA")
88 | draw.rectangle((top_box_tl, top_box_br), fill=(0, 0, 0, 127), outline=(235, 190, 63, 127))
89 | draw.rectangle((left_box_tl, left_box_br), fill=(0, 0, 0, 127), outline=(235, 190, 63, 127))
90 | draw.rectangle((right_box_tl, right_box_br), fill=(0, 0, 0, 127), outline=(235, 190, 63, 127))
91 |
92 | # draw text
93 | draw.text(dynamic_text_loc, dynamic_gesture, font=font)
94 | draw.text(left_text_loc, left_gesture, font=font)
95 | draw.text(right_text_loc, right_gesture, font=font)
96 | frame = np.array(frame).astype(np.uint8)
97 |
98 | cv2.imshow(name, frame)
99 |
100 | if cv2.waitKey(1) & 0xFF == ord('q'):
101 | break
102 |
103 | i += 1
104 |
105 | # finish all jobs
106 | jesture_runner.stop_recognition()
107 | cap.stop()
108 |
109 | cv2.waitKey(1)
110 | cv2.destroyWindow(name)
111 | cv2.destroyAllWindows()
112 | cv2.waitKey(1)
113 |
--------------------------------------------------------------------------------
/python/fonts/Comfortaa-Light.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/python/fonts/Comfortaa-Light.ttf
--------------------------------------------------------------------------------
/python/fonts/Quivira.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/python/fonts/Quivira.otf
--------------------------------------------------------------------------------
/python/images/jesture_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/python/images/jesture_logo.png
--------------------------------------------------------------------------------
/python/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jesture-ai/jesture-sdk/e9f240ace2a9bd642aa0d2d4344ae3d0ed8d4b20/python/src/__init__.py
--------------------------------------------------------------------------------
/python/src/runner.py:
--------------------------------------------------------------------------------
1 | from numpy.ctypeslib import ndpointer
2 | import ctypes, ctypes.util
3 | import sys, platform
4 | import numpy as np
5 | import shutil
6 |
7 | from threading import Thread
8 | import logging
9 | import time
10 | import os
11 |
12 | logging.basicConfig(level=logging.DEBUG, format='(%(threadName)-10s) %(message)s')
13 |
14 | # ------------ Mac OS X specific stuff ------------
15 | ctypes.util.find_library("libSystem.B.dylib")
16 | # print('shutil.which("libSystem.B.dylib"):', shutil.which("libSystem.B.dylib"))
17 | # print('ctypes.CDLL("libSystem.B.dylib")._name:', ctypes.CDLL("libSystem.B.dylib")._name)
18 | # print('ctypes.__version__:', ctypes.__version__)
19 | print('platform.mac_ver():', platform.mac_ver())
20 |
21 | # ------------ Jesture SDK setup ------------
22 |
23 | jesturesdk_lib_name = os.path.join(os.path.dirname(__file__), "full_cpu")
24 | jesturesdk_lib_path = ctypes.util.find_library(jesturesdk_lib_name)
25 | if not jesturesdk_lib_path:
26 | print("Unable to find the specified library: {}".format(jesturesdk_lib_name))
27 | sys.exit()
28 |
29 | jesture_lib = ctypes.CDLL(jesturesdk_lib_path)
30 |
31 | # -------------- COMMON --------------
32 |
33 | create_full_cpu = jesture_lib.CreateFullCpu
34 | create_full_cpu.argtypes = [ctypes.c_int, ctypes.c_bool, ctypes.c_bool, ctypes.c_bool]
35 | create_full_cpu.restype = ctypes.POINTER(ctypes.c_int)
36 |
37 | run_full_cpu = jesture_lib.RunFullCpu
38 | run_full_cpu.argtypes = [ctypes.POINTER(ctypes.c_int)]
39 | run_full_cpu.restype = None
40 |
41 | stop_full_cpu = jesture_lib.StopFullCpu
42 | stop_full_cpu.argtypes = [ctypes.POINTER(ctypes.c_int)]
43 | stop_full_cpu.restype = None
44 |
45 | dispose_full_cpu = jesture_lib.DisposeFullCpu
46 | dispose_full_cpu.argtypes = [ctypes.POINTER(ctypes.c_int)]
47 | dispose_full_cpu.restype = None
48 |
49 | get_camera_width = jesture_lib.GetCameraWidth
50 | get_camera_width.argtypes = [ctypes.POINTER(ctypes.c_int)]
51 | get_camera_width.restype = ctypes.c_int
52 |
53 | get_camera_height = jesture_lib.GetCameraHeight
54 | get_camera_height.argtypes = [ctypes.POINTER(ctypes.c_int)]
55 | get_camera_height.restype = ctypes.c_int
56 |
57 | # -------------- HANDS --------------
58 |
59 | # gestures
60 |
61 | get_dynamic_gesture = jesture_lib.GetCurrentDynamicGesture
62 | get_dynamic_gesture.argtypes = [ctypes.POINTER(ctypes.c_int)]
63 | get_dynamic_gesture.restype = ctypes.c_char_p
64 |
65 | get_dynamic_gesture_idx = jesture_lib.GetCurrentDynamicGestureIdx
66 | get_dynamic_gesture_idx.argtypes = [ctypes.POINTER(ctypes.c_int)]
67 | get_dynamic_gesture_idx.restype = ctypes.c_int
68 |
69 | get_static_left_gesture = jesture_lib.GetCurrentStaticLeftGesture
70 | get_static_left_gesture.argtypes = [ctypes.POINTER(ctypes.c_int)]
71 | get_static_left_gesture.restype = ctypes.c_char_p
72 |
73 | get_static_right_gesture = jesture_lib.GetCurrentStaticRightGesture
74 | get_static_right_gesture.argtypes = [ctypes.POINTER(ctypes.c_int)]
75 | get_static_right_gesture.restype = ctypes.c_char_p
76 |
77 | # screensize-relative keypoints
78 |
79 | get_hand_left_keypoints = jesture_lib.GetCurrentHandLeftKeypoints
80 | get_hand_left_keypoints.argtypes = [ctypes.POINTER(ctypes.c_int)]
81 | get_hand_left_keypoints.restype = ndpointer(dtype=ctypes.c_double, shape=(63,))
82 |
83 | get_hand_right_keypoints = jesture_lib.GetCurrentHandRightKeypoints
84 | get_hand_right_keypoints.argtypes = [ctypes.POINTER(ctypes.c_int)]
85 | get_hand_right_keypoints.restype = ndpointer(dtype=ctypes.c_double, shape=(63,))
86 |
87 | # screensize-independent keypoints
88 |
89 | get_scaled_left_keypoints = jesture_lib.GetCurrentScaledLeftKeypoints
90 | get_scaled_left_keypoints.argtypes = [ctypes.POINTER(ctypes.c_int)]
91 | get_scaled_left_keypoints.restype = ndpointer(dtype=ctypes.c_double, shape=(63,))
92 |
93 | get_scaled_right_keypoints = jesture_lib.GetCurrentScaledRightKeypoints
94 | get_scaled_right_keypoints.argtypes = [ctypes.POINTER(ctypes.c_int)]
95 | get_scaled_right_keypoints.restype = ndpointer(dtype=ctypes.c_double, shape=(63,))
96 |
97 |
98 | class JestureSdkRunner:
99 | HAND_KEYPOINTS_METHOD_DICT = {
100 | 'left_keypoints': get_hand_left_keypoints,
101 | 'right_keypoints': get_hand_right_keypoints,
102 | 'scaled_left_keypoints': get_scaled_left_keypoints,
103 | 'scaled_right_keypoints': get_scaled_right_keypoints,
104 | }
105 | GESTURE_METHOD_DICT = {
106 | 'dynamic': get_dynamic_gesture,
107 | 'left_static': get_static_left_gesture,
108 | 'right_static': get_static_right_gesture
109 | }
110 |
111 | def __init__(self, cam_id=0, use_tracking=True, use_static_gestures=True, use_dynamic_gestures=True):
112 | self.cam_id = cam_id
113 | self.use_tracking = use_tracking
114 | self.use_static_gestures = use_static_gestures
115 | self.use_dynamic_gestures = use_dynamic_gestures
116 | self.instance = create_full_cpu(cam_id, use_tracking, use_static_gestures, use_dynamic_gestures)
117 | logging.debug('[JestureSdkRunner] Instance created.')
118 |
119 | def start_recognition(self):
120 | self.thread = Thread(name='jesture_sdk_python_thread',
121 | target=self.run_recognition,
122 | args=())
123 | # d.setDaemon(True)
124 | self.thread.start()
125 | logging.debug('[JestureSdkRunner] Recognition thread started.')
126 | return self
127 |
128 | def run_recognition(self):
129 | logging.debug('[JestureSdkRunner] Starting recognition...')
130 | run_full_cpu(self.instance)
131 |
132 | def stop_recognition(self):
133 | logging.debug('[JestureSdkRunner] Stopping recognition...')
134 | stop_full_cpu(self.instance)
135 | logging.debug('[JestureSdkRunner] Recognition stopped.')
136 | self.thread.join()
137 | logging.debug('[JestureSdkRunner] Thread joined.')
138 |
139 | def get_camera_width(self):
140 | return get_camera_width(self.instance)
141 |
142 | def get_camera_height(self):
143 | return get_camera_height(self.instance)
144 |
145 | def get_gesture(self, gesture_type):
146 | """
147 | Get hand gesture by `gesture_type`.
148 | """
149 |
150 | method = JestureSdkRunner.GESTURE_METHOD_DICT[gesture_type]
151 | return method(self.instance).decode()
152 |
153 | def get_hand_keypoints(self, keypoints_type):
154 | """
155 | Get hand keypoints by `keypoints_type`.
156 | """
157 |
158 | method = JestureSdkRunner.HAND_KEYPOINTS_METHOD_DICT[keypoints_type]
159 | raw_keypoints = method(self.instance)
160 | keypoints = raw_keypoints.reshape(21, 3).copy()
161 | return keypoints
162 |
--------------------------------------------------------------------------------
/python/src/thread_camera.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | import logging
3 | import cv2
4 |
5 |
6 | logging.basicConfig(level=logging.DEBUG, format='(%(threadName)-10s) %(message)s')
7 |
8 |
9 | class ThreadCamera:
10 | def __init__(self, cam_id=0, width=640, height=480):
11 | self.stream = cv2.VideoCapture(cam_id)
12 | self.stream.set(cv2.CAP_PROP_FRAME_WIDTH, width)
13 | self.stream.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
14 | (self.grabbed, self.frame) = self.stream.read()
15 | self.stopped = False
16 |
17 | def start(self):
18 | self.thread = Thread(name='Camera Python Thread', target=self.update, args=())
19 | self.thread.start()
20 | return self
21 |
22 | def update(self):
23 | while not self.stopped:
24 | (self.grabbed, self.frame) = self.stream.read()
25 | logging.debug('[ThreadCamera] Frame loop finished.')
26 | self.stream.release()
27 | logging.debug('[ThreadCamera] Capture released.')
28 |
29 | def read(self):
30 | return self.frame
31 |
32 | def stop(self) :
33 | logging.debug('[ThreadCamera] Stopping...')
34 | self.stopped = True
35 | self.thread.join()
36 | logging.debug('[ThreadCamera] Camera thread joined.')
37 |
38 | def __exit__(self, exc_type, exc_value, traceback):
39 | self.stream.release()
40 |
--------------------------------------------------------------------------------
/python/src/thread_camera_draw.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | import logging
3 | import cv2
4 |
5 | from PIL import Image, ImageDraw, ImageFont
6 | import matplotlib.pyplot as plt
7 | from skimage import io
8 | import numpy as np
9 |
10 | from .utils import draw_skeleton
11 |
12 |
13 | logging.basicConfig(level=logging.DEBUG, format='(%(threadName)-10s) %(message)s')
14 |
15 |
16 | class ThreadCameraDraw:
17 | def __init__(self, jesture_runner, cam_id=0, width=640, height=480,
18 | hand_box_tl=None, hand_box_br=None, draw_hand_box=False, mirror=False):
19 | '''
20 | Args:
21 | hand_box_tl (tuple[2]): top-left corner of ui box with hands
22 | hand_box_br (tuple[2]): bottom-right corner of ui box with hands
23 | '''
24 |
25 | self.jesture_runner = jesture_runner
26 | self.cam_id = cam_id
27 | self.width = width
28 | self.height = height
29 |
30 | self.stream = cv2.VideoCapture(self.cam_id)
31 | self.stream.set(cv2.CAP_PROP_FRAME_WIDTH, self.width)
32 | self.stream.set(cv2.CAP_PROP_FRAME_HEIGHT, self.height)
33 | (self.grabbed, self.frame) = self.stream.read()
34 | self.stopped = False
35 |
36 | self.hand_box_tl = hand_box_tl
37 | self.hand_box_br = hand_box_br
38 | self.draw_hand_box = draw_hand_box
39 |
40 | self.mirror = mirror
41 |
42 | def _scale_and_shift(self, keypoints, scale, shift):
43 | keypoints = np.array([scale[0], scale[1], 1]) * keypoints + np.array([shift[0], shift[1], 0])
44 | return keypoints
45 |
46 | def start(self):
47 | logging.debug('[ThreadCameraDraw] Starting a thread...')
48 | self.thread = Thread(name='Camera-Draw Python Thread', target=self.update, args=())
49 | self.thread.start()
50 | logging.debug('[ThreadCameraDraw] Thread started.')
51 | return self
52 |
53 | def update(self):
54 | logged = False
55 | while not self.stopped:
56 | (self.grabbed, frame) = self.stream.read()
57 |
58 | if not self.grabbed:
59 | continue
60 |
61 | display_height, display_width = frame.shape[:2]
62 | if not logged:
63 | print('Camera params was set to:', self.width, self.height)
64 | print('Real params are:', display_width, display_height)
65 |
66 | frame = cv2.resize(frame, (self.width, self.height))
67 |
68 | # get current hand keypoints
69 | left_keypoints = self.jesture_runner.get_hand_keypoints('left_keypoints')
70 | right_keypoints = self.jesture_runner.get_hand_keypoints('right_keypoints')
71 |
72 | left_keypoints = np.clip(left_keypoints, 0.0, 1.0) # !!!
73 | right_keypoints = np.clip(right_keypoints, 0.0, 1.0) # !!!
74 |
75 | # scale absolute keypoints by the actual display image size
76 | left_keypoints = left_keypoints * np.array([display_width, display_height, 1.0])
77 | if not logged: print(left_keypoints)
78 | right_keypoints = right_keypoints * np.array([display_width, display_height, 1.0])
79 | if not logged: print(right_keypoints)
80 | if self.mirror:
81 | left_keypoints[:,0] = display_width - left_keypoints[:,0]
82 | right_keypoints[:,0] = display_width - right_keypoints[:,0]
83 |
84 | # draw skeletons using screen-sized hand keypoints
85 | frame = draw_skeleton(frame, left_keypoints)
86 | frame = draw_skeleton(frame, right_keypoints)
87 |
88 | # TODO: move all `ImageDraw` tasks to a separate thread or do it asynchronously
89 | # draw a special box for scaled keypoints
90 | if self.draw_hand_box:
91 | frame = Image.fromarray(frame if type(np.array([])) == type(frame) else frame.get())
92 | draw = ImageDraw.Draw(frame, "RGBA")
93 | draw.rectangle((self.hand_box_tl, self.hand_box_br), fill=(0, 0, 0, 127), outline=(235, 190, 63, 255))
94 | frame = np.array(frame).astype(np.uint8)
95 |
96 | # get the scaled hand keypoints
97 | scaled_left_keypoints = self.jesture_runner.get_hand_keypoints('scaled_left_keypoints')
98 | scaled_right_keypoints = self.jesture_runner.get_hand_keypoints('scaled_right_keypoints')
99 |
100 | scaled_left_keypoints = np.clip(scaled_left_keypoints, 0.0, 1.0) # !!!
101 | scaled_right_keypoints = np.clip(scaled_right_keypoints, 0.0, 1.0) # !!!
102 |
103 | # scale and shift them to be in a proper place on the display image
104 | scale_x = (self.hand_box_br[0] - self.hand_box_tl[0]) // 2
105 | scale_y = self.hand_box_br[1] - self.hand_box_tl[1]
106 | scale = (scale_x, scale_y)
107 | shift_left = (self.hand_box_tl[0], self.hand_box_tl[1])
108 | shift_right = (self.hand_box_tl[0] + scale_x, self.hand_box_tl[1])
109 | scaled_left_keypoints = self._scale_and_shift(
110 | scaled_left_keypoints, scale=scale, shift=shift_left if self.mirror else shift_right)
111 | scaled_right_keypoints = self._scale_and_shift(
112 | scaled_right_keypoints, scale=scale, shift=shift_right if self.mirror else shift_left)
113 |
114 | # draw scaled keypoints
115 | frame = draw_skeleton(frame, scaled_left_keypoints, indices=False)
116 | frame = draw_skeleton(frame, scaled_right_keypoints, indices=False)
117 |
118 | # save to the field
119 | self.frame = frame
120 |
121 | if not logged:
122 | logged = True
123 |
124 | logging.debug('[ThreadCameraDraw] Frame loop finished.')
125 | self.stream.release()
126 | logging.debug('[ThreadCameraDraw] Capture released.')
127 |
128 | def read(self):
129 | return self.frame
130 |
131 | def stop(self) :
132 | logging.debug('[ThreadCameraDraw] Stopping...')
133 | self.stopped = True
134 | self.thread.join()
135 | logging.debug('[ThreadCameraDraw] Camera thread joined.')
136 |
137 |
138 | def __exit__(self, exc_type, exc_value, traceback):
139 | self.stream.release()
140 |
--------------------------------------------------------------------------------
/python/src/utils.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | import numpy as np
3 | import cv2
4 |
5 |
6 | def to_np(pil_img):
7 | return np.array(pil_img)[...,None]
8 |
9 |
10 | def load_image_with_alpha(img_path, resize_rate=10, remove_borders=False):
11 | img = Image.open(img_path, 'r')
12 | has_alpha = img.mode == 'RGBA'
13 | assert(has_alpha)
14 |
15 | red, green, blue, alpha = img.split()
16 |
17 | img_rgb = np.concatenate([to_np(red), to_np(green), to_np(blue)], axis=-1)
18 | img_alpha = to_np(alpha)
19 |
20 | if remove_borders:
21 | bsize = 250
22 | print(f'Removing {bsize} pixels from up and down borders')
23 | img_rgb = img_rgb[bsize:-bsize, :, :]
24 | img_alpha = img_alpha[bsize:-bsize, :, :]
25 |
26 | orig_size = (img_rgb.shape[1], img_rgb.shape[0])
27 | print('Original image size:', orig_size)
28 | target_size = (orig_size[0] // resize_rate, orig_size[1] // resize_rate)
29 | print('Target size:', target_size)
30 |
31 | img_rgb = cv2.resize(img_rgb, target_size)
32 | img_alpha = cv2.resize(img_alpha, target_size)
33 |
34 | return img_rgb, img_alpha
35 |
36 |
37 | def blur_image_patch(image, xmin, ymin, xmax, ymax, num_iter=5):
38 | patch = image[ymin:ymax, xmin:xmax, :]
39 | for i in range(num_iter):
40 | patch = cv2.blur(patch, (5, 5))
41 | image[ymin:ymax, xmin:xmax, :] = patch
42 | return image
43 |
44 |
45 | def overlay_alpha(src, src_alpha, dest, loc=(0, 0), alpha=1.0):
46 | src_h, src_w = src.shape[:2]
47 | dest_h, dest_w = dest.shape[:2]
48 | x1, y1 = max(0, loc[0]), max(0, loc[1])
49 | x2, y2 = min(x1 + src_w, dest_w), min(y1 + src_h, dest_h)
50 | if isinstance(src_alpha, type(src)):
51 | srca_h, srca_w = src_alpha.shape[:2]
52 | assert(srca_h == src_h and srca_w == src_w)
53 | src_alpha = src_alpha[:,:,None] // 255
54 | src_mask = src_alpha * alpha
55 | dest_mask = 1 - src_alpha
56 | dest_mask[dest_mask==0] = 1 - alpha
57 | dest[y1:y2,x1:x2,:] = dest_mask * dest[y1:y2,x1:x2,:] + src_mask * src
58 | return dest
59 |
60 |
61 | def draw_text(image, text, org=(0, 185), font=cv2.FONT_HERSHEY_SIMPLEX,
62 | fontScale=1, color=(255, 255, 255), thickness=2,
63 | lineType=cv2.LINE_AA, bottomLeftOrigin=False):
64 | image = cv2.putText(image, text, org, font, fontScale,
65 | color, thickness, lineType, bottomLeftOrigin)
66 | return image
67 |
68 |
69 | def draw_multiline_text(image, textlist, height=480, width=640, xloc=10,
70 | font=cv2.FONT_HERSHEY_SIMPLEX, font_size=1.0, color=(255,255,255),
71 | font_thickness=2, lineType=cv2.LINE_AA):
72 | for i, line in enumerate(textlist):
73 | textsize = cv2.getTextSize(line, font, font_size, font_thickness)[0]
74 | gap = textsize[1] + 5
75 | y = (height//2 + textsize[1]) // 2 + i * gap
76 | cv2.putText(image, line, (xloc, y), font, font_size,
77 | color, font_thickness, lineType)
78 | return image
79 |
80 |
81 | def draw_skeleton(image, landmarks, indices=True):
82 | HAND_SKELETON = {
83 | 'palm': {0:1,1:5,5:9,9:13,13:17,17:0},
84 | 'thumb': {1:2,2:3,3:4},
85 | 'forefinger': {5:6,6:7,7:8},
86 | 'middle_finger': {9:10,10:11,11:12},
87 | 'ring_finger': {13:14,14:15,15:16},
88 | 'pinkie': {17:18,18:19,19:20}
89 | }
90 | for hand_part_name in HAND_SKELETON:
91 | for curr_point in HAND_SKELETON[hand_part_name]:
92 | curr_point_coords = (int(landmarks[curr_point][0]), int(landmarks[curr_point][1]))
93 | next_point = landmarks[HAND_SKELETON[hand_part_name][curr_point]]
94 | next_point_coords = (int(next_point[0]), int(next_point[1]))
95 | cv2.line(
96 | image,
97 | pt1=curr_point_coords,
98 | pt2=next_point_coords,
99 | color=(255,255,255),
100 | thickness=2
101 | )
102 | for i, coords in enumerate(landmarks):
103 | if indices:
104 | image = cv2.putText(
105 | image,
106 | text=str(i),
107 | org=(int(coords[0]-5), int(coords[1])-10),
108 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,
109 | fontScale=0.5,
110 | color=(255,255,255)
111 | )
112 | image = cv2.circle(
113 | image,
114 | center=(int(coords[0]), int(coords[1])),
115 | radius=1,
116 | color=(235, 190, 63),
117 | thickness=5,
118 | lineType=8,
119 | shift=0
120 | )
121 | return image
122 |
--------------------------------------------------------------------------------