├── flask
├── recognition
│ ├── __init__.py
│ ├── not_in_use
│ │ ├── Emitter.py
│ │ ├── config.py
│ │ ├── MultiGesture.py
│ │ ├── mediapipe_hands.py
│ │ └── HandTracker.py
│ ├── Actions.py
│ └── detection.py
├── database.db
├── model.py
├── app.py
├── webcam.py
├── README.md
└── config.py
├── frontend
├── .babelrc
├── src
│ ├── assets
│ │ ├── home.png
│ │ ├── min.png
│ │ ├── about.png
│ │ ├── blank.png
│ │ ├── close.png
│ │ ├── loading.png
│ │ ├── profile.png
│ │ ├── switch.png
│ │ ├── webcam.png
│ │ ├── aboutlogo.png
│ │ ├── cambutton.png
│ │ ├── settings.png
│ │ ├── camera-off.png
│ │ ├── transparent.ico
│ │ └── gestures
│ │ │ ├── fist.png
│ │ │ ├── openhand.png
│ │ │ ├── thumbsup.png
│ │ │ ├── fourfinger.png
│ │ │ ├── onefinger.png
│ │ │ ├── thumbsdown.png
│ │ │ ├── twofinger.png
│ │ │ ├── rockandroll.png
│ │ │ └── threefinger.png
│ ├── index.js
│ ├── components
│ │ ├── Custom
│ │ │ ├── Sidebar.js
│ │ │ ├── AddSettings.js
│ │ │ ├── SettingName.js
│ │ │ └── CustomLayout.js
│ │ ├── Settings
│ │ │ ├── TrickGesturePairs.js
│ │ │ ├── GestureBox.js
│ │ │ ├── TrickGesturePair.js
│ │ │ ├── SideBar.js
│ │ │ ├── SettingsLayout.js
│ │ │ └── HandButtons.js
│ │ ├── Main
│ │ │ └── Main.js
│ │ ├── About
│ │ │ ├── SideBar.js
│ │ │ └── AboutLayout.js
│ │ ├── MenuBar
│ │ │ ├── MenuBar.css
│ │ │ └── MenuBar.js
│ │ └── MainScreen
│ │ │ ├── Options.js
│ │ │ ├── MainLayout.js
│ │ │ ├── SideBar.js
│ │ │ └── CameraOption.js
│ └── index.css
├── etc
│ ├── helpers.js
│ └── constants.js
├── public
│ └── index.html
├── README.md
├── package.json
├── webpack.build.config.js
├── webpack.dev.config.js
└── main.js
├── assets
└── readme
│ ├── blank.png
│ └── main.png
├── .gitignore
├── requirements.txt
└── README.md
/flask/recognition/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/frontend/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": ["react"]
3 | }
4 |
--------------------------------------------------------------------------------
/flask/database.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/flask/database.db
--------------------------------------------------------------------------------
/assets/readme/blank.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/assets/readme/blank.png
--------------------------------------------------------------------------------
/assets/readme/main.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/assets/readme/main.png
--------------------------------------------------------------------------------
/frontend/src/assets/home.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/home.png
--------------------------------------------------------------------------------
/frontend/src/assets/min.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/min.png
--------------------------------------------------------------------------------
/frontend/src/assets/about.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/about.png
--------------------------------------------------------------------------------
/frontend/src/assets/blank.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/blank.png
--------------------------------------------------------------------------------
/frontend/src/assets/close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/close.png
--------------------------------------------------------------------------------
/frontend/src/assets/loading.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/loading.png
--------------------------------------------------------------------------------
/frontend/src/assets/profile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/profile.png
--------------------------------------------------------------------------------
/frontend/src/assets/switch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/switch.png
--------------------------------------------------------------------------------
/frontend/src/assets/webcam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/webcam.png
--------------------------------------------------------------------------------
/frontend/src/assets/aboutlogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/aboutlogo.png
--------------------------------------------------------------------------------
/frontend/src/assets/cambutton.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/cambutton.png
--------------------------------------------------------------------------------
/frontend/src/assets/settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/settings.png
--------------------------------------------------------------------------------
/frontend/src/assets/camera-off.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/camera-off.png
--------------------------------------------------------------------------------
/frontend/src/assets/transparent.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/transparent.ico
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/fist.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/fist.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/openhand.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/openhand.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/thumbsup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/thumbsup.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/fourfinger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/fourfinger.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/onefinger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/onefinger.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/thumbsdown.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/thumbsdown.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/twofinger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/twofinger.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/rockandroll.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/rockandroll.png
--------------------------------------------------------------------------------
/frontend/src/assets/gestures/threefinger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aggie-coding-club/Gesture/HEAD/frontend/src/assets/gestures/threefinger.png
--------------------------------------------------------------------------------
/flask/recognition/not_in_use/Emitter.py:
--------------------------------------------------------------------------------
1 | from pymitter import EventEmitter
2 | import Actions
3 | import MultiGesture
4 |
5 | event = EventEmitter()
6 |
7 | # event.emit("start", hand="test", gest="test")
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .vscode
3 | __pycache__/
4 | frontend/.idea/
5 | frontend/node_modules/
6 | frontend/*npm-debug.log
7 | frontend/*.DS_Store
8 | frontend/dist/
9 | frontend/builds/
10 |
11 |
12 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | attrs==20.3.0
2 | mediapipe==0.8.3.1
3 | numpy==1.20.1
4 | opencv-python==4.5.1.48
5 | pyautogui==0.9.52
6 | pymitter==0.3.0
7 | flask==2.0.1
8 | flask-sqlalchemy==2.5.1
9 | flask-cors==3.0.10
10 | pycaw==20181226
--------------------------------------------------------------------------------
/frontend/etc/helpers.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 |
3 | // Helper functions
4 | function root(args) {
5 | args = Array.prototype.slice.call(arguments, 0);
6 | return path.join.apply(path, [__dirname].concat('../', ...args));
7 | }
8 |
9 | exports.root = root;
--------------------------------------------------------------------------------
/frontend/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { render } from 'react-dom';
3 | import './index.css';
4 | import Main from "./components/Main/Main"
5 |
6 |
7 | render((
8 |
9 |
10 | ), document.getElementById('app')
11 | );
12 |
13 |
--------------------------------------------------------------------------------
/flask/model.py:
--------------------------------------------------------------------------------
1 | from app import db
2 |
3 | class Configuration(db.Model):
4 | id = db.Column(db.Integer, primary_key=True)
5 | hand = db.Column(db.String(10))
6 | gesture = db.Column(db.String(20))
7 | action = db.Column(db.String(60))
8 | alias = db.Column(db.String(30))
--------------------------------------------------------------------------------
/frontend/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Gesture
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/frontend/etc/constants.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | CATCH_ON_MAIN: "catch-on-main",
3 | SEND_TO_RENDERER: "send-to-renderer",
4 | CREATE_FILE: "create-file",
5 | BUTTON_CLICK: "button-click",
6 | OPEN_FILE_EXPLORER: 'open-file-explorer',
7 | SEND_FILE_PATH: 'send-file-path',
8 | ADD_FILE_SETTING: 'add-file-setting'
9 | };
10 |
--------------------------------------------------------------------------------
/frontend/src/components/Custom/Sidebar.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function SideBar() {
4 | const sideBar = {
5 | flex: 1,
6 | backgroundColor: "#CBE3FF",
7 | };
8 |
9 | const spacer = {
10 | height: "10vh",
11 | position: "relative",
12 | };
13 |
14 | return (
15 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/frontend/src/components/Settings/TrickGesturePairs.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import TrickGesturePair from "./TrickGesturePair";
3 |
4 | export default function TrickGesturesPairs({ data, changeSettings }) {
5 | return (
6 |
7 | {data.map((trick, index) => (
8 |
9 | ))}
10 |
11 | );
12 | }
13 |
--------------------------------------------------------------------------------
/frontend/src/index.css:
--------------------------------------------------------------------------------
1 | @import url("https://fonts.googleapis.com/css2?family=Oxygen:wght@300;400;700&display=swap");
2 | @import url("https://fonts.googleapis.com/css2?family=Lobster+Two&display=swap");
3 |
4 | html,
5 | body {
6 | height: 100%;
7 | overflow: hidden;
8 | }
9 |
10 | body {
11 | margin: 0;
12 | -webkit-font-smoothing: antialiased;
13 | -moz-osx-font-smoothing: grayscale;
14 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
15 | sans-serif;
16 | }
17 |
--------------------------------------------------------------------------------
/flask/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | from flask_sqlalchemy import SQLAlchemy
3 | from flask_cors import CORS
4 |
5 | db = SQLAlchemy()
6 |
7 | def create_app():
8 | app = Flask(__name__)
9 | CORS(app)
10 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///database.db'
11 | app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
12 |
13 | db.init_app(app)
14 |
15 | from webcam import feed
16 | from config import cf
17 | app.register_blueprint(feed)
18 | app.register_blueprint(cf)
19 |
20 | return app
21 |
22 | if __name__ == "__main__":
23 | app = create_app()
24 | app.run(port=5000)
--------------------------------------------------------------------------------
/frontend/src/components/Main/Main.js:
--------------------------------------------------------------------------------
1 | import MainLayout from "../MainScreen/MainLayout";
2 | import SettingsLayout from "../Settings/SettingsLayout";
3 | import AboutLayout from "../About/AboutLayout";
4 | import CustomLayout from "../Custom/CustomLayout";
5 | import React from "react";
6 | import { HashRouter, Route } from "react-router-dom";
7 |
8 | export default function Main() {
9 | return (
10 |
11 |
12 |
13 |
14 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/frontend/src/components/About/SideBar.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Options from "../MainScreen/Options";
3 | import { Link } from "react-router-dom";
4 | import homePic from "../../assets/home.png";
5 |
6 | export default function SideBar({ btnClick }) {
7 | const spacer = {
8 | height: "10vh",
9 | position: "relative",
10 | };
11 |
12 | const optionWrapper = {
13 | margin: "12vh 3vw",
14 | };
15 |
16 | return (
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | );
28 | }
29 |
--------------------------------------------------------------------------------
/frontend/src/components/MenuBar/MenuBar.css:
--------------------------------------------------------------------------------
1 | #window-controls {
2 | display: grid;
3 | grid-template-columns: repeat(2, 46px);
4 | position: absolute;
5 | right: 0;
6 | height: 32px;
7 | -webkit-app-region: no-drag;
8 | text-align: center;
9 | }
10 |
11 | #window-controls .button:hover {
12 | background: rgba(77, 77, 77, 0.3);
13 | }
14 |
15 | #window-controls .button:active {
16 | background: rgba(26, 26, 26, 0.4);
17 | }
18 |
19 | #close-button:hover {
20 | background: #e81123 !important;
21 | }
22 |
23 | #close-button:active {
24 | background: #f1707a !important;
25 | }
26 |
27 | #close-button:hover .icon {
28 | filter: invert(1);
29 | }
30 |
31 | #close-button:active .icon {
32 | filter: invert(1);
33 | }
34 |
--------------------------------------------------------------------------------
/frontend/src/components/Custom/AddSettings.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 |
3 | export default class AddSettings extends Component {
4 | constructor(props) {
5 | super(props);
6 | this.state = {value: ''};
7 |
8 | this.handleChange = this.handleChange.bind(this);
9 | this.handleSubmit = this.handleSubmit.bind(this);
10 | }
11 |
12 | handleChange(event) {
13 | this.setState({value: event.target.value});
14 | }
15 |
16 | handleSubmit(event) {
17 | console.log('A setting was submitted: ' + this.state.value);
18 | event.preventDefault();
19 | }
20 |
21 | render() {
22 | return (
23 |
30 | );
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/frontend/src/components/Settings/GestureBox.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function GestureBox({ name, newSetting, selected }) {
4 | var theBackground;
5 | if (name.split("_").join("/").split("/")[2] === selected) {
6 | theBackground = "#967223";
7 | } else {
8 | theBackground = "#081a2d";
9 | }
10 |
11 | const innerGrid = {
12 | flex: 1,
13 | textAlign: "center",
14 | borderRight: "1px solid white",
15 | color: "white",
16 | background: theBackground,
17 | };
18 |
19 | const gestures = {
20 | padding: "4vh",
21 | };
22 |
23 | const imgFormat = {
24 | width: "auto",
25 | height: "4vh",
26 | };
27 |
28 | function handleClick() {
29 | newSetting(name.split("_").join("/").split("/")[2]);
30 | }
31 |
32 | return (
33 |
34 |
35 |
36 |
37 |
38 | );
39 | }
40 |
--------------------------------------------------------------------------------
/flask/webcam.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, Response
2 | import recognition.detection as dt
3 | from model import Configuration
4 |
5 | feed = Blueprint('video', __name__, url_prefix="/video")
6 |
7 | @feed.route('/feed')
8 | def video_feed():
9 | configQuery = Configuration.query.all()
10 | configData = []
11 |
12 | for configuration in configQuery:
13 | configData.append({ "hand" : configuration.hand,
14 | "gesture" : configuration.gesture,
15 | "action" : configuration.action,
16 | "alias" : configuration.alias})
17 |
18 | return Response(dt.gen_video(configData), mimetype='multipart/x-mixed-replace; boundary=frame')
19 |
20 |
21 | @feed.route('/off')
22 | def off():
23 | return Response(dt.gen_off(), mimetype='multipart/x-mixed-replace; boundary=frame')
24 |
25 |
26 | @feed.route('/switch')
27 | def switch():
28 | dt.switchWebcam()
29 | return "Switched", 200
30 |
31 |
--------------------------------------------------------------------------------
/frontend/src/components/Settings/TrickGesturePair.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import HandButtons from "./HandButtons";
3 |
4 | export default function TrickGesturePair({ dat, changeSettings }) {
5 | const deleteStyle = {
6 | color: "red",
7 | marginLeft: "5vw",
8 | cursor: "pointer"
9 | }
10 |
11 | const contentStyle = {
12 | display: "flex",
13 | textAlign: "center",
14 | padding: "2vh 2vw",
15 | borderBottom: "1px solid gray",
16 | };
17 |
18 | const trickContainer = {
19 | flex: 1,
20 | margin: "0vh 0vh 0vh 5vw",
21 | WebkitAppRegion: "no-drag",
22 | };
23 |
24 | const handButtonsStyle = {
25 | flex: 1,
26 | margin: "0vh 0vh 0vh 0vh",
27 | };
28 |
29 | return (
30 |
31 |
X
32 |
{dat["alias"]}
33 |
34 |
35 |
36 |
37 | );
38 | }
39 |
--------------------------------------------------------------------------------
/frontend/src/components/MainScreen/Options.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function Options({ name, btnClick, imagePic }) {
4 | const anOptionStyle = {
5 | padding: "2.5vh",
6 | };
7 |
8 | const btnStyle = {
9 | color: "#111111",
10 | backgroundColor: "transparent",
11 | border: "none",
12 | cursor: "pointer",
13 | overflow: "hidden",
14 | outline: "none",
15 | };
16 |
17 | const imageStyle = {
18 | padding: "0 10px 0 0",
19 | };
20 |
21 | const tagNameStyle = {
22 | marginTop: "7px",
23 | float: "right",
24 | fontFamily: "Oxygen",
25 | fontWeight: "normal",
26 | fontSize: 14,
27 | };
28 |
29 | function toggleChange() {
30 | btnClick(name);
31 | }
32 | return (
33 |
34 |
35 |
36 | {name}
37 |
38 |
39 | );
40 | }
41 |
--------------------------------------------------------------------------------
/frontend/src/components/MainScreen/MainLayout.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import SideBar from "./SideBar";
3 | import MenuBar from "../MenuBar/MenuBar";
4 | import camOff from "../../assets/camera-off.png";
5 |
6 | export default function MainLayout() {
7 | const btnClick = (name) => {
8 | console.log("clicked", name);
9 | };
10 |
11 | const flexContainer = {
12 | display: "flex",
13 | backgroundColor: "grey",
14 | };
15 |
16 | const sideScreen = {
17 | backgroundColor: "#ececec",
18 | flex: 1,
19 | height: "100vh",
20 | };
21 |
22 | const cameraScreen = {
23 | flex: 3,
24 | height: "100vh",
25 | color: "#afb0b2",
26 | overflow: "hidden",
27 | };
28 |
29 | return (
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | );
42 | }
43 |
--------------------------------------------------------------------------------
/flask/recognition/not_in_use/config.py:
--------------------------------------------------------------------------------
1 |
2 | '''
3 | ### CONFIGURING ACTIONS ###
4 | Each action is formatted:
5 | "action": ["hand", "gesture", "path_to_executable"]
6 |
7 | Options for hands: Left, Right
8 |
9 | Options for gestures:
10 | 1 finger, Peace, 3 fingers, 4 fingers, Open Hand, Fist, Gig Em, Thumbs Down, Rock and Roll, Horns Down
11 |
12 | There are some examples already written below.
13 | '''
14 | actions = {
15 | "chrome": [
16 | "Right",
17 | "Peace",
18 | "C:/Program Files (x86)/Google/Chrome/Application/chrome.exe"
19 | ],
20 | "Vision-Controls": [
21 | "Right",
22 | "Gig Em",
23 | "https://github.com/aggie-coding-club/Vision-Controls"
24 | ],
25 | "close": [ # Closes application that you are currently on. (Caution: you can close out of this application with this too)
26 | "Right",
27 | "1 finger",
28 | "x"
29 | ]
30 | }
31 |
32 | settings = {
33 | "camera_index": 0, # 0 should be the default for built in cameras. If this doesn't work, try 1.
34 | }
--------------------------------------------------------------------------------
/frontend/src/components/Custom/SettingName.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 |
3 | export default class SettingName extends Component {
4 | constructor(props) {
5 | super(props);
6 |
7 | this.handleChange = this.handleChange.bind(this);
8 | }
9 |
10 | handleChange(event) {
11 | this.props.updateName(event.target.value);
12 | }
13 |
14 | render() {
15 | const containerStyle = {
16 | margin: "10vh 8vw 5vh 8vw",
17 | display: "flex",
18 | };
19 |
20 | const textInputStyle = {
21 | border: "1px black solid",
22 | marginLeft: "10px",
23 | borderRadius: "10px",
24 | textAlign: "center",
25 | flex: "4",
26 | overflow: "hidden",
27 | backgroundColor: "#ececec",
28 | };
29 |
30 | return (
31 |
32 |
33 | Setting Alias:
34 |
41 |
42 |
43 | );
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/frontend/src/components/Settings/SideBar.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Options from "../MainScreen/Options";
3 | import { Link } from "react-router-dom";
4 | import homePic from "../../assets/home.png";
5 |
6 | export default function SideBar({ btnClick }) {
7 | const spacer = {
8 | height: "10vh",
9 | position: "relative",
10 | };
11 |
12 | const optionWrapper = {
13 | margin: "12vh 3vw",
14 | };
15 |
16 | const customBtn = {
17 | background: "#1250a4",
18 | color: "white",
19 | border: "none",
20 | outline: "none",
21 | cursor: "pointer",
22 | overflow: "hidden",
23 | borderRadius: "10px",
24 | height: "8vh",
25 | padding: "0 15px",
26 | marginTop: "50px",
27 | marginLeft: "4px",
28 | };
29 |
30 | return (
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | Custom Setting
42 |
43 |
44 |
45 |
46 | );
47 | }
48 |
--------------------------------------------------------------------------------
/frontend/README.md:
--------------------------------------------------------------------------------
1 | # Vision Controls Frontend
2 |
3 |
4 | _Thanks to Keith Weaver, for creating used Electron-React Boilerplate_
5 | https://github.com/keithweaver
6 |
7 | ### To get started:
8 | * Run `npm install` or `yarn install`
9 |
10 | ##### Development
11 | * Run `npm run dev` to start webpack-dev-server. Electron will launch automatically after compilation.
12 |
13 | ##### Production
14 | _You have two options, an automatic build or two manual steps_
15 |
16 | ###### One Shot
17 | * Run `npm run package` to have webpack compile your application into `dist/bundle.js` and `dist/index.html`, and then an electron-packager run will be triggered for the current platform/arch, outputting to `builds/`
18 |
19 | ###### Manual
20 | _Recommendation: Update the "postpackage" script call in package.json to specify parameters as you choose and use the `npm run package` command instead of running these steps manually_
21 | * Run `npm run build` to have webpack compile and output your bundle to `dist/bundle.js`
22 | * Then you can call electron-packager directly with any commands you choose
23 |
24 | If you want to test the production build (In case you think Babili might be breaking something) after running `npm run build` you can then call `npm run prod`. This will cause electron to load off of the `dist/` build instead of looking for the webpack-dev-server instance. Electron will launch automatically after compilation.
25 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Gesture
2 |
3 | > Gesture Recognition Software.
4 |
5 | ## Table of contents
6 |
7 | - [General info](#general-info)
8 | - [Tech Stack](#tech-stack)
9 | - [Setup](#setup)
10 | - [Features](#features)
11 | - [Extra](#extra)
12 |
13 | ## General info
14 |
15 |
16 |
17 |
18 |
19 |
20 | Gesture is a desktop application that allows the user to control various applications through hand gestures.
21 |
22 |
23 |
24 |
25 |
26 |
27 |
30 |
31 | ## Tech Stack
32 |
33 | **Frontend**
34 |
35 | - JS
36 | - React
37 | - Electron
38 |
39 | **Backend**
40 |
41 | - Python
42 | - Flask
43 | - OpenCV
44 |
45 | ## Setup
46 |
47 | Installation
48 |
49 | ```
50 | git clone https://github.com/aggie-coding-club/Vision-Controls
51 | cd Gesture
52 | pip install -r requirements.txt
53 | cd frontend
54 | npm install
55 | ```
56 |
57 | Once ready run **npm run dev** to launch application.
58 |
59 | ## Features
60 |
61 | - Full Desktop UI Using React and Electron
62 | - Gesture Recognition / mouse movement through Python with OpenCV
63 | - Settings page to change application preference / gestures
64 |
65 | ## Extra
66 |
67 | This project is managed by Aggie Coding Club.
68 |
--------------------------------------------------------------------------------
/frontend/src/components/MenuBar/MenuBar.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import close from "../../assets/close.png";
3 | import minimize from "../../assets/min.png";
4 | import "./MenuBar.css";
5 |
6 | export default function MenuBar() {
7 | const remote = require("electron").remote;
8 |
9 | const closeScreen = () => {
10 | var window = remote.getCurrentWindow();
11 | window.close();
12 | };
13 |
14 | const minimizeScreen = () => {
15 | var window = remote.getCurrentWindow();
16 | window.minimize();
17 | };
18 |
19 | const barStyle = {
20 | display: "block",
21 | height: "32px",
22 | background: "#CCCCCC",
23 | };
24 |
25 | const dragStyle = {
26 | width: "100%",
27 | height: "32px",
28 | WebkitAppRegion: "drag",
29 | };
30 |
31 | const iconStyle = {
32 | position: "relative",
33 | top: "2px",
34 | height: "10px",
35 | };
36 |
37 | return (
38 |
50 | );
51 | }
52 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "new-frontend",
3 | "version": "0.7.0",
4 | "description": "Desktop Application that allows the user to control various applications through hand gestures",
5 | "author": "Aggie Coding Club",
6 | "homepage": "https://github.com/aggie-coding-club/Vision-Controls",
7 | "repository": {
8 | "type": "git",
9 | "url": "https://github.com/aggie-coding-club/Vision-Controls"
10 | },
11 | "main": "main.js",
12 | "scripts": {
13 | "prod": "webpack --config webpack.build.config.js && electron --noDevServer .",
14 | "dev": "webpack-dev-server --hot --host 127.0.0.1 --port 4000 --config=./webpack.dev.config.js",
15 | "build": "webpack --config webpack.build.config.js",
16 | "package": "webpack --config webpack.build.config.js",
17 | "postpackage": "electron-packager ./ --out=./builds"
18 | },
19 | "devDependencies": {
20 | "babel-core": "^6.24.1",
21 | "babel-loader": "^7.1.2",
22 | "babel-preset-react": "^6.24.1",
23 | "babili-webpack-plugin": "^0.1.2",
24 | "css-loader": "^0.28.1",
25 | "electron": "^1.7.8",
26 | "electron-packager": "^9.1.0",
27 | "extract-text-webpack-plugin": "^3.0.1",
28 | "file-loader": "^1.1.5",
29 | "html-webpack-plugin": "^2.28.0",
30 | "react": "^16.0.0",
31 | "react-dom": "^16.0.0",
32 | "react-modal": "^3.14.2",
33 | "style-loader": "^0.19.0",
34 | "webpack": "^3.6.0",
35 | "webpack-dev-server": "^2.4.5"
36 | },
37 | "dependencies": {
38 | "react-router-dom": "^5.2.0"
39 | },
40 | "proxy": "http://localhost:5000"
41 | }
42 |
--------------------------------------------------------------------------------
/flask/recognition/Actions.py:
--------------------------------------------------------------------------------
1 | from pymitter import EventEmitter
2 | import webbrowser
3 | import os, getpass
4 | from ctypes import cast, POINTER
5 | from comtypes import CLSCTX_ALL
6 | from pycaw.pycaw import AudioUtilities, IAudioEndpointVolume
7 |
8 | event = EventEmitter()
9 | username = getpass.getuser()
10 |
11 | devices = AudioUtilities.GetSpeakers()
12 | interface = devices.Activate(
13 | IAudioEndpointVolume._iid_, CLSCTX_ALL, None)
14 |
15 | volume = cast(interface, POINTER(IAudioEndpointVolume))
16 |
17 | # start event, when the gesture is first made
18 | # :hand: left or right
19 | # :gest: the gesture
20 | @event.on("start")
21 | def openProject(configData, hand, gest):
22 | for x in configData:
23 | if (x["gesture"] == gest and x["hand"] == hand):
24 | action = x["action"]
25 | alias = x["alias"]
26 | if (action == 'x'): return
27 | elif action.startswith("http"):
28 | webbrowser.open(action)
29 | elif alias == "Open Chrome":
30 | os.startfile(action)
31 | elif action.startswith("C://"):
32 | try:
33 | os.startfile(action)
34 | except:
35 | os.startfile("C://Users//" + username + action)
36 | print("No program at path:", "C://Users//" + username + action)
37 | elif alias.startswith("Volume"):
38 | # current = volume.GetMasterVolumeLevel()
39 | if (alias[7] == 'M'):
40 | volume.SetMute(1, None)
41 | else:
42 | volume.SetMute(0, None)
43 |
--------------------------------------------------------------------------------
/flask/README.md:
--------------------------------------------------------------------------------
1 | (TO BE UPDATED)
2 |
3 | # Hand Gesture Recognition
4 |
5 | ## Dependencies
6 |
7 | Dependencies can be installed using: `pip install -r requirements.txt`
8 |
9 | ## Using Gesture Recognition
10 |
11 | ### Setup
12 |
13 | `camera_index` - Default is 0, but if you camera is not recognized your index may be 1
14 |
15 | ### Commands
16 |
17 | `python HandTracker.py`
18 |
19 | - Uses computer's default camera to detect hand gestures
20 |
21 | `python HandTracker.py -m mouse`
22 |
23 | - Uses gesture recognition to control the mouse
24 |
25 | ## Using features
26 |
27 | ### Basics
28 |
29 | - Keep your palm facing the camera when making hand gestures
30 |
31 | ### Mouse Control
32 |
33 | - Start HandTracker.py in mouse control mode with the `-m mouse` flag
34 | - The mouse control currently works on an anchor system
35 | - When the camera sees a "Thumbs Up", it sets that position as the anchor
36 | - Keep a thumbs up and move your hand away from the anchor to see the mouse move.
37 | - Change to a "Fist" when you want to click
38 |
39 | ## Notes for Developers
40 |
41 | ### Averaging Frames
42 |
43 | - frames_until_change: number of frames to look at before determining a hand gesture (all must match). Note the following:
44 | - With a higher `frames_until_change`, the script may be slow to recognize a gesture.
45 | - With a lower `frames_until_change`, the script will be very fast, but may have mistakes.
46 |
47 | ### Adding / Editing Gestures
48 |
49 | - When adding or editing a gesture in the `gesture` function, keep the following in mind:
50 | - `f[0]` = thumb, `f[1]` = index, `f[2]` = middle, `f[3]` = ring, `f[4]` = pinky
51 |
--------------------------------------------------------------------------------
/flask/config.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, jsonify, request
2 | from app import db
3 | from model import Configuration
4 |
5 | cf = Blueprint("config", __name__, url_prefix="/config")
6 |
7 | @cf.route("add_configuration", methods=["POST"])
8 | def addConfiguration():
9 | configData = request.get_json()
10 |
11 | newConfiguration = Configuration(
12 | id=configData["id"],
13 | hand=configData["hand"],
14 | gesture=configData["gesture"],
15 | action=configData["action"],
16 | alias=configData["alias"],
17 | )
18 |
19 | db.session.add(newConfiguration)
20 | db.session.commit()
21 |
22 | return "Added", 201
23 |
24 |
25 | @cf.route("/retrieve")
26 | def retrieve():
27 | configQuery = Configuration.query.all()
28 | configData = []
29 |
30 | for configuration in configQuery:
31 | if not(configuration.id):
32 | configuration.id = "none"
33 | configData.append({ "hand" : configuration.hand,
34 | "gesture" : configuration.gesture,
35 | "action" : configuration.action,
36 | "alias" : configuration.alias,
37 | "id": configuration.id})
38 |
39 | return jsonify({"config" : configData})
40 |
41 |
42 | @cf.route("update_configuration", methods=["POST"])
43 | def updateConfiguration():
44 | configData = request.get_json()
45 |
46 | configuration = Configuration.query.filter_by(alias=configData["alias"]).first()
47 | configuration.gesture = configData["gesture"]
48 | db.session.commit()
49 |
50 | return "Updated", 201
51 |
52 |
53 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/frontend/src/components/MainScreen/SideBar.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Options from "./Options";
3 | import CameraOption from "./CameraOption";
4 | import { Link } from "react-router-dom";
5 | import settingsPic from "../../assets/settings.png";
6 | import aboutPic from "../../assets/about.png";
7 | import logo from "../../assets/blank.png";
8 | import cameraPic from "../../assets/webcam.png";
9 |
10 | export default function SideBar({ btnClick }) {
11 | const logoStyle = {
12 | height: "10vh",
13 | position: "relative",
14 | top: "1.5vh",
15 | left: "1vw",
16 | };
17 |
18 | const optionWrapper = {
19 | margin: "9vh 2.5vw",
20 | height: "67vh",
21 | display: "flex",
22 | flexDirection: "column",
23 | };
24 |
25 | const minorOptions = {
26 | flex: 2,
27 | display: "flex",
28 | flexDirection: "column",
29 | marginTop: "3vh",
30 | };
31 |
32 | const optionStyle = {
33 | flex: 1,
34 | };
35 |
36 | const cameraOptionStyle = {
37 | flex: 1,
38 | };
39 |
40 | return (
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | );
64 | }
65 |
--------------------------------------------------------------------------------
/frontend/webpack.build.config.js:
--------------------------------------------------------------------------------
1 | const webpack = require("webpack");
2 | const path = require("path");
3 | const HtmlWebpackPlugin = require("html-webpack-plugin");
4 | const BabiliPlugin = require("babili-webpack-plugin");
5 | const ExtractTextPlugin = require("extract-text-webpack-plugin");
6 |
7 | // Config directories
8 | const SRC_DIR = path.resolve(__dirname, "src");
9 | const OUTPUT_DIR = path.resolve(__dirname, "dist");
10 |
11 | // Any directories you will be adding code/files into, need to be added to this array so webpack will pick them up
12 | const defaultInclude = [SRC_DIR];
13 |
14 | module.exports = {
15 | entry: SRC_DIR + "/index.js",
16 | output: {
17 | path: OUTPUT_DIR,
18 | publicPath: "./",
19 | filename: "bundle.js",
20 | },
21 | module: {
22 | rules: [
23 | {
24 | test: /\.css$/,
25 | use: ExtractTextPlugin.extract({
26 | fallback: "style-loader",
27 | use: "css-loader",
28 | }),
29 | include: defaultInclude,
30 | },
31 | {
32 | test: /\.jsx?$/,
33 | use: [{ loader: "babel-loader" }],
34 | include: defaultInclude,
35 | },
36 | {
37 | test: /\.(jpe?g|png|gif)$/,
38 | use: [{ loader: "file-loader?name=img/[name]__[hash:base64:5].[ext]" }],
39 | include: defaultInclude,
40 | },
41 | {
42 | test: /\.(eot|svg|ttf|woff|woff2)$/,
43 | use: [{ loader: "file-loader?name=font/[name]__[hash:base64:5].[ext]" }],
44 | include: defaultInclude,
45 | },
46 | ],
47 | },
48 | target: "electron-renderer",
49 | plugins: [
50 | new HtmlWebpackPlugin(),
51 | new ExtractTextPlugin("bundle.css"),
52 | new webpack.DefinePlugin({
53 | "process.env.NODE_ENV": JSON.stringify("production"),
54 | }),
55 | new BabiliPlugin(),
56 | ],
57 | stats: {
58 | colors: true,
59 | children: false,
60 | chunks: false,
61 | modules: false,
62 | },
63 | };
64 |
--------------------------------------------------------------------------------
/frontend/src/components/About/AboutLayout.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import SideBar from "./SideBar.js";
3 | import MenuBar from "../MenuBar/MenuBar";
4 | import aboutImage from "../../assets/aboutlogo.png";
5 |
6 | export default function AboutLayout() {
7 | const btnClick = (name) => {
8 | console.log("clicked", name);
9 | };
10 |
11 | const flexContainer = {
12 | display: "flex",
13 | margin: 0,
14 | padding: 0,
15 | backgroundColor: "#ececec",
16 | };
17 |
18 | const sideScreen = {
19 | flex: 1,
20 | };
21 |
22 | const aboutStyle = {
23 | margin: "0vh 1vh 1vh 0",
24 | padding: 0,
25 | flex: 3,
26 | height: "100vh",
27 | };
28 |
29 | const titleStyle = {
30 | textAlign: "center",
31 | color: "#111111",
32 | fontFamily: "Lobster Two",
33 | };
34 |
35 | const textStyle = {
36 | textAlign: "center",
37 | letterSpacing: "0.1em",
38 | color: "#111111",
39 | fontFamily: "Oxygen",
40 | fontWeight: "normal",
41 | fontSize: 15,
42 | margin: "-3vh 4vw 0 3vw",
43 | };
44 |
45 | const imageStyle = {
46 | width: "110vh",
47 | display: "block",
48 | marginTop: "6vh",
49 | marginLeft: "5vw",
50 | };
51 |
52 | return (
53 |
54 |
55 |
56 |
57 |
58 |
About
59 |
60 |
61 | Vision Controls is a desktop application that allows the user to control various applications through hand gestures. This purpose of this
62 | project is to provide students with a way to work in a team setting and achieve something while doing it. This project is managed by the
63 | Aggie Coding Club.
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 | );
73 | }
74 |
--------------------------------------------------------------------------------
/frontend/webpack.dev.config.js:
--------------------------------------------------------------------------------
1 | const webpack = require("webpack");
2 | const path = require("path");
3 | const HtmlWebpackPlugin = require("html-webpack-plugin");
4 | const { spawn } = require("child_process");
5 | const helpers = require("./etc/helpers");
6 |
7 | // Config directories
8 | const SRC_DIR = path.resolve(__dirname, "src");
9 | const OUTPUT_DIR = path.resolve(__dirname, "dist");
10 |
11 | // Any directories you will be adding code/files into, need to be added to this array so webpack will pick them up
12 | const defaultInclude = [SRC_DIR];
13 |
14 | module.exports = {
15 | entry: SRC_DIR + "/index.js",
16 | output: {
17 | path: OUTPUT_DIR,
18 | publicPath: "/",
19 | filename: "bundle.js",
20 | },
21 | module: {
22 | rules: [
23 | {
24 | test: /\.css$/,
25 | use: [{ loader: "style-loader" }, { loader: "css-loader" }],
26 | include: defaultInclude,
27 | },
28 | {
29 | test: /\.jsx?$/,
30 | use: [{ loader: "babel-loader" }],
31 | include: defaultInclude,
32 | },
33 | {
34 | test: /\.(jpe?g|png|gif)$/,
35 | use: [{ loader: "file-loader?name=img/[name]__[hash:base64:5].[ext]" }],
36 | include: defaultInclude,
37 | },
38 | {
39 | test: /\.(eot|svg|ttf|woff|woff2)$/,
40 | use: [{ loader: "file-loader?name=font/[name]__[hash:base64:5].[ext]" }],
41 | include: defaultInclude,
42 | },
43 | ],
44 | },
45 | target: "electron-renderer",
46 | plugins: [
47 | new HtmlWebpackPlugin({
48 | template: helpers.root("public/index.html"),
49 | inject: "body",
50 | }),
51 | new webpack.DefinePlugin({
52 | "process.env.NODE_ENV": JSON.stringify("development"),
53 | }),
54 | ],
55 | devtool: "cheap-source-map",
56 | devServer: {
57 | historyApiFallback: true,
58 | contentBase: OUTPUT_DIR,
59 | stats: {
60 | colors: true,
61 | chunks: false,
62 | children: false,
63 | },
64 | setup() {
65 | spawn("electron", ["."], { shell: true, env: process.env, stdio: "inherit" })
66 | .on("close", (code) => process.exit(0))
67 | .on("error", (spawnError) => console.error(spawnError));
68 | },
69 | },
70 | };
71 |
--------------------------------------------------------------------------------
/flask/recognition/not_in_use/MultiGesture.py:
--------------------------------------------------------------------------------
1 | from Emitter import event
2 | class MultiGesture():
3 | def __init__(self, name, gestures):
4 | self.name = name
5 | self.gestures = gestures
6 | self.on = 0
7 |
8 | def on_start_gest(self, hand, gest):
9 | # print(self.on)
10 | if type(self.gestures[self.on]) == str:
11 | if self.gestures[self.on] == gest: # doesn't matter which hand
12 | # print("Go to next gesture")
13 | self.on += 1
14 | elif self.gestures[self.on] == "No Gesture":
15 | return
16 | elif hand == self.gestures[self.on][0] and gest == self.gestures[self.on][1]: # does check which hand
17 | # print("Go to next gesture")
18 | self.on += 1
19 | elif self.gestures[self.on][0] == "No Gesture":
20 | return
21 | else:
22 | self.on = 0
23 |
24 | if self.on == len(self.gestures):
25 | event.emit("multigesture", gest=self.name)
26 | self.on = 0
27 |
28 | counting = ["1 finger", "Peace", "3 fingers", "4 fingers", "Open Hand"]
29 |
30 | countToFive = MultiGesture("Count to 5", counting)
31 | event.on("start", countToFive.on_start_gest)
32 |
33 | countDown = MultiGesture("Count Down from 5",list(reversed(counting)))
34 | event.on("start", countDown.on_start_gest)
35 | # %%
36 | # m = MultiGesture(["1 finger", "2 finger"], lambda: print("counting"))
37 | # m.on_keydown("left", "1 finger")
38 | # m.on_keydown("right", "2 finger")
39 |
40 | # m.on_keydown("right", "2 finger")
41 |
42 | # m.on_keydown("left", "1 finger")
43 | # m.on_keydown("right", "Thumbs Down")
44 | # m.on_keydown("right", "2 finger")
45 |
46 | # m.on_keydown("left", "1 finger")
47 | # m.on_keydown("right", "2 finger")
48 |
49 | # %%
50 | # m = MultiGesture([("right", "1 finger"), "2 finger"], lambda: print("counting"))
51 | # m.on_keydown("left", "1 finger")
52 | # m.on_keydown("right", "2 finger")
53 |
54 | # m.on_keydown("right", "1 finger")
55 | # m.on_keydown("right", "2 finger")
56 |
57 | # %%
58 | # m = MultiGesture([("right", "1 finger"), "2 finger"], lambda: print("counting"))
59 | # event.on("key down", m.on_keydown)
60 |
61 | # event.emit("key down", hand="right", gest="1 finger")
62 | # event.emit("key down", hand="right", gest="2 finger")
--------------------------------------------------------------------------------
/frontend/src/components/Settings/SettingsLayout.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import SideBar from "./SideBar.js";
3 | import MenuBar from "../MenuBar/MenuBar";
4 | import TrickGesturesPairs from "./TrickGesturePairs";
5 | import { ipcRenderer } from "electron";
6 | const { BUTTON_CLICK, SEND_TO_RENDERER, OPEN_CUSTOM_WINDOW } = require("../../../etc/constants.js");
7 |
8 | export default class SettingsLayout extends Component {
9 | constructor(props) {
10 | super(props);
11 | this.state = {
12 | data: [],
13 | };
14 | this.changeSettings = this.changeSettings.bind(this);
15 | this.click = this.click.bind(this);
16 | this.handleRenderer = this.handleRenderer.bind(this);
17 | }
18 |
19 | //............Example of Electron sending message to React..............
20 | componentDidMount() {
21 | ipcRenderer.on(SEND_TO_RENDERER, this.handleRenderer);
22 | fetch("http://localhost:5000/config/retrieve").then((response) =>
23 | response.json().then((data) => {
24 | this.setState({ data: data.config });
25 | })
26 | );
27 | }
28 | componentWillUnmount() {
29 | ipcRenderer.removeListener(SEND_TO_RENDERER, this.handleRenderer);
30 | }
31 | handleRenderer(event, data) {
32 | console.log("renderer msg:", data);
33 | }
34 |
35 | //.........Example of React sending message to Electron..............
36 | click(name) {
37 | console.log("click:", name);
38 | ipcRenderer.send(BUTTON_CLICK, name);
39 | }
40 | //...................................................................
41 |
42 | async changeSettings(originalConfiguration, newGesture) {
43 | const updatedConfiguration = { hand: "", gesture: newGesture, action: "", alias: originalConfiguration };
44 | await fetch("http://localhost:5000/config/update_configuration", {
45 | method: "POST",
46 | headers: {
47 | "Content-Type": "application/json",
48 | },
49 | body: JSON.stringify(updatedConfiguration),
50 | });
51 |
52 | console.log("Settings Layout - changeSettings:");
53 | console.log("updatedConfiguration:", updatedConfiguration);
54 |
55 | fetch("http://localhost:5000/config/retrieve").then((response) =>
56 | response.json().then((data) => {
57 | this.setState({ data: data.config });
58 | console.log("data:", data);
59 | })
60 | );
61 | }
62 |
63 | render() {
64 | const flexContainer = {
65 | display: "flex",
66 | margin: 0,
67 | padding: 0,
68 | backgroundColor: "#ececec",
69 | };
70 |
71 | const sideScreen = {
72 | flex: 1,
73 | };
74 |
75 | const settingsStyle = {
76 | padding: 0,
77 | flex: 3,
78 | height: "100vh",
79 | marginTop: "-2vh",
80 | };
81 |
82 | const titleStyle = {
83 | textAlign: "center",
84 | color: "#111111",
85 | fontFamily: "Lobster Two",
86 | };
87 |
88 | const pairStyle = {
89 | margin: "-3vh 5vw 0 7vw",
90 | height: "70vh",
91 | overflow: "auto",
92 | };
93 |
94 | return (
95 |
96 |
97 |
98 |
99 |
100 |
Settings
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 | );
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/frontend/src/components/MainScreen/CameraOption.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import { ipcRenderer } from "electron";
3 | import icon from "../../assets/webcam.png";
4 | import switchIcon from "../../assets/switch.png";
5 | const { BUTTON_CLICK, SEND_TO_RENDERER } = require("../../../etc/constants.js");
6 |
7 | export default class CameraOption extends Component {
8 | constructor(props) {
9 | super(props);
10 | this.state = {
11 | camSource: "http://localhost:5000/video/off",
12 | };
13 | this.handleRenderer = this.handleRenderer.bind(this);
14 | this.toggleChange = this.toggleChange.bind(this);
15 | }
16 |
17 | //............Example of Electron sending message to React..............
18 | componentDidMount() {
19 | ipcRenderer.on(SEND_TO_RENDERER, this.handleRenderer);
20 | }
21 | componentWillUnmount() {
22 | ipcRenderer.removeListener(SEND_TO_RENDERER, this.handleRenderer);
23 | }
24 | handleRenderer(event, data) {
25 | console.log("renderer msg:", data);
26 | }
27 |
28 | toggleChange() {
29 | if (this.state.camSource == "http://localhost:5000/video/feed") {
30 | this.setState({ camSource: "http://localhost:5000/video/off" });
31 | document.getElementById("camera-frame").style.display = "none";
32 | } else {
33 | document.getElementById("camera-frame").style.display = "block";
34 | this.setState({ camSource: "http://localhost:5000/video/feed" });
35 | }
36 | }
37 |
38 | toggleSwitch() {
39 | fetch("http://localhost:5000/video/switch");
40 | }
41 |
42 | render() {
43 | const btnContainer = {
44 | margin: "50vh 0vh 0vh 3vh",
45 | };
46 |
47 | const switchContainer = {
48 | position: "absolute",
49 | right: "92vw",
50 | bottom: "4vh",
51 | };
52 |
53 | const btnStyle = {
54 | background: "#1250a4",
55 | color: "white",
56 | border: "none",
57 | outline: "none",
58 | cursor: "pointer",
59 | overflow: "hidden",
60 | borderRadius: "10px",
61 | display: "flex",
62 | flexDirection: "row",
63 | height: "8vh",
64 | };
65 |
66 | const switchStyle = {
67 | background: "#ececec",
68 | color: "white",
69 | border: "none",
70 | outline: "none",
71 | cursor: "pointer",
72 | borderRadius: "100px",
73 | height: "10vh",
74 | width: "6vw",
75 | };
76 |
77 | const switchImageStyle = {
78 | top: "-10vh",
79 | };
80 |
81 | const imageStyle = {
82 | backgroundColor: "#144586",
83 | padding: "1vh 1vw",
84 | marginLeft: "-1vh",
85 | };
86 |
87 | const wordStyle = {
88 | padding: "0.6vh 3vw",
89 | fontFamily: "Oxygen",
90 | fontWeight: "400",
91 | };
92 |
93 | const word = {
94 | marginTop: "1vh",
95 | };
96 |
97 | const videoStyle = {
98 | height: "480px",
99 | width: "640px",
100 | border: "none",
101 | position: "absolute",
102 | bottom: "0px",
103 | right: "25vw",
104 | display: "none",
105 | };
106 |
107 | return (
108 |
109 |
110 |
111 |
112 |
113 |
114 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 | );
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/frontend/main.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 |
3 | // Import parts of electron to use
4 | const { app, BrowserWindow, ipcMain, dialog } = require("electron");
5 | const path = require("path");
6 | const fs = require("fs");
7 | const url = require("url");
8 | const { SEND_TO_RENDERER, BUTTON_CLICK, OPEN_FILE_EXPLORER, SEND_FILE_PATH, ADD_FILE_SETTING } = require("./etc/constants");
9 |
10 | let mainWindow;
11 |
12 | // Dev mode
13 | let dev = false;
14 | if (process.defaultApp || /[\\/]electron-prebuilt[\\/]/.test(process.execPath) || /[\\/]electron[\\/]/.test(process.execPath)) {
15 | dev = true;
16 | }
17 |
18 | function createWindow() {
19 | // load flask webserver
20 | require("child_process").spawn("python", ["../flask/app.py"]);
21 |
22 | mainWindow = new BrowserWindow({
23 | width: 840,
24 | height: 512,
25 | autoHideMenuBar: true,
26 | frame: false,
27 | resizable: false,
28 | maximizable: false,
29 | fullscreenable: false,
30 | icon: "./src/assets/transparent.ico",
31 | webPreferences: {
32 | nodeIntegration: true,
33 | },
34 | });
35 |
36 | let indexPath;
37 | if (dev && process.argv.indexOf("--noDevServer") === -1) {
38 | indexPath = url.format({
39 | protocol: "http:",
40 | host: "localhost:4000",
41 | pathname: "index.html",
42 | slashes: true,
43 | });
44 | } else {
45 | indexPath = url.format({
46 | protocol: "file:",
47 | pathname: path.join(__dirname, "dist", "index.html"),
48 | slashes: true,
49 | });
50 | }
51 | mainWindow.loadURL(indexPath);
52 |
53 | // Don't show until we are ready and loaded
54 | mainWindow.once("ready-to-show", () => {
55 | mainWindow.show();
56 | // Open the DevTools automatically if developing
57 | // if (dev) {
58 | // mainWindow.webContents.openDevTools();
59 | // }
60 | });
61 |
62 | // Emitted when the window is closed.
63 | mainWindow.on("closed", function () {
64 | // Dereference the window object, usually you would store windows
65 | // in an array if your app supports multi windows, this is the time
66 | // when you should delete the corresponding element.
67 | mainWindow = null;
68 | });
69 | }
70 |
71 | //Catch home button being clicked and send message to console
72 | //...Electron receiving message from React...
73 | ipcMain.on(BUTTON_CLICK, (event, arg) => {
74 | console.log("This button was clicked", arg);
75 | //...Electron sending message to React...
76 | mainWindow.send(SEND_TO_RENDERER, "Button Click received by Electron");
77 | });
78 |
79 | //open file explorer
80 | ipcMain.on(OPEN_FILE_EXPLORER, (event, arg) => {
81 | dialog.showOpenDialog(function (filePaths) {
82 | if (filePaths) {
83 | mainWindow.send(SEND_FILE_PATH, filePaths[0]);
84 | }
85 | });
86 | });
87 |
88 | //add file setting
89 | ipcMain.on(ADD_FILE_SETTING, (event, arg) => {
90 | console.log("add file setting: ", arg);
91 | });
92 |
93 | //...................EXAMPLES................................
94 | // ipcMain.on(CATCH_ON_MAIN, (event, arg) => {
95 | // console.log('this button was clicked', arg);
96 | // mainWindow.send(SEND_TO_RENDERER, 'pong');
97 | // })
98 | //
99 | // ipcMain.on(CREATE_FILE, (event, arg) => {
100 | // console.log("writing file...");
101 | // fs.writeFile('tmp.js', arg, function (err) {
102 | // console.log(err);
103 | // });
104 | // })
105 | //...........................................................
106 |
107 | // This method will be called when Electron has finished
108 | // initialization and is ready to create browser windows.
109 | // Some APIs can only be used after this event occurs.
110 | app.on("ready", createWindow);
111 |
112 | // Quit when all windows are closed.
113 | app.on("window-all-closed", () => {
114 | // On macOS it is common for applications and their menu bar
115 | // to stay active until the user quits explicitly with Cmd + Q
116 | if (process.platform !== "darwin") {
117 | app.quit();
118 | }
119 | });
120 |
121 | app.on("activate", () => {
122 | // On macOS it's common to re-create a window in the app when the
123 | // dock icon is clicked and there are no other windows open.
124 | if (mainWindow === null) {
125 | createWindow();
126 | }
127 | });
128 |
--------------------------------------------------------------------------------
/frontend/src/components/Settings/HandButtons.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import Modal from "react-modal";
3 | import GestureBox from "./GestureBox";
4 | import fist from "../../assets/gestures/fist.png";
5 | import onefinger from "../../assets/gestures/onefinger.png";
6 | import twofinger from "../../assets/gestures/twofinger.png";
7 | import threefingers from "../../assets/gestures/threefinger.png";
8 | import fourfingers from "../../assets/gestures/fourfinger.png";
9 | import openhand from "../../assets/gestures/openhand.png";
10 | import rockandroll from "../../assets/gestures/rockandroll.png";
11 | import thumbsup from "../../assets/gestures/thumbsup.png";
12 | import thumbsdown from "../../assets/gestures/thumbsdown.png";
13 |
14 | export default class HandButtons extends Component {
15 | constructor(props) {
16 | super(props);
17 | this.state = {
18 | borderClr: "none",
19 | showModal: false,
20 | selected: "",
21 | };
22 | this.handleClick = this.handleClick.bind(this);
23 | this.closeModal = this.closeModal.bind(this);
24 | this.newSetting = this.newSetting.bind(this);
25 | }
26 |
27 | handleClick() {
28 | this.setState({ borderClr: "1px solid white" });
29 | this.setState({ showModal: true });
30 | document.getElementById("settings-back").style.webkitAppRegion = "no-drag";
31 | }
32 |
33 | closeModal() {
34 | this.setState({ borderClr: "none" });
35 | this.setState({ showModal: false });
36 | document.getElementById("settings-back").style.webkitAppRegion = "drag";
37 | }
38 |
39 | newSetting(newGesture) {
40 | this.props.changeSettings(this.props.dat["alias"], newGesture);
41 | //console.log("newSetting props:", this.props.dat)
42 | this.setState({ selected: newGesture });
43 | }
44 |
45 | render() {
46 | const btnStyle = {
47 | cursor: "pointer",
48 | border: this.state.borderClr,
49 | backgroundColor: "#081a2d",
50 | padding: "2vh 3vw",
51 | outline: "none",
52 | borderRadius: "10px",
53 | };
54 |
55 | const modalContainer = {
56 | display: "flex",
57 | flexDirection: "column",
58 | padding: 0,
59 | margin: 0,
60 | height: "100%",
61 | };
62 |
63 | const row = {
64 | flex: 1,
65 | border: "1px solid white",
66 | display: "flex",
67 | flexDirection: "row",
68 | };
69 |
70 | const imgFormat = {
71 | width: "auto",
72 | height: "3vh",
73 | };
74 |
75 | const blankBox = {
76 | flex: 1,
77 | textAlign: "center",
78 | borderRight: "1px solid white",
79 | color: "white",
80 | };
81 |
82 | return (
83 |
84 |
85 | {" "}
86 | {" "}
87 |
88 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 | );
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/flask/recognition/not_in_use/mediapipe_hands.py:
--------------------------------------------------------------------------------
1 | # %%
2 | import cv2
3 | import mediapipe as mp
4 | from time import time
5 | import sys
6 | import pickle
7 | import math
8 |
9 | mp_drawing = mp.solutions.drawing_utils
10 | mp_hands = mp.solutions.hands
11 | target = sys.argv[1] if len(sys.argv) > 1 else 0 # use a file a input or the webcam
12 |
13 | # %%
14 | def vec_sub(a,b):
15 | return (a.x-b.x, a.y-b.y,a.z-b.z)
16 |
17 | def vec_dot(a,b):
18 | return sum([i*b for i,b in zip(a,b)])
19 |
20 | def vec_mag(a):
21 | return math.sqrt(sum([i**2 for i in a]))
22 |
23 | def finger_straightness(hand_landmarks, base_knuckle):
24 | ''' The higher the more straight goes between ~3.9-~6'''
25 | knuckles = hand_landmarks[base_knuckle:base_knuckle+4] # 4 knuckles in finger
26 | # print(knuckles)
27 | bendyness = 0
28 | for i in range(1,len(knuckles)-1): # loop through list excluding first and last
29 | # cos(theta) = a*b/ |a||b|
30 | # A -> B -> C
31 | # a = BA
32 | # b = BC
33 | a = vec_sub(knuckles[i-1], knuckles[i])
34 | b = vec_sub(knuckles[i+1], knuckles[i])
35 | dot = vec_dot(a,b)
36 | theta = math.acos(dot/ (vec_mag(a) * vec_mag(b)))
37 |
38 | bendyness += theta
39 | return bendyness
40 |
41 | def is_finger_bent(hand_landmarks, base_knuckle):
42 | straightness = finger_straightness(hand_landmarks, base_knuckle)
43 | return straightness < 6
44 | # img_hand_detect(['../../test-vids/piece_sign.png'])
45 | # %%
46 | def img_hand_detect(file_list):
47 | # For static images:
48 | with mp_hands.Hands(
49 | static_image_mode=True,
50 | max_num_hands=2,
51 | min_detection_confidence=0.5) as hands:
52 | for idx, file in enumerate(file_list):
53 | # Read an image, flip it around y-axis for correct handedness output (see
54 | # above).
55 | image = cv2.flip(cv2.imread(file), 1)
56 | # Convert the BGR image to RGB before processing.
57 | results = hands.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
58 |
59 | # Print handedness and draw hand landmarks on the image.
60 | print('Handedness:', results.multi_handedness)
61 | if not results.multi_hand_landmarks:
62 | continue
63 | image_height, image_width, _ = image.shape
64 | annotated_image = image.copy()
65 | for hand_landmarks in results.multi_hand_landmarks:
66 | print('hand_landmarks:', hand_landmarks)
67 | # print('Finger straightness: ', finger_straightness(hand_landmarks.landmark, mp_hands.HandLandmark.INDEX_FINGER_MCP))
68 | print('Index bent:', is_finger_bent(hand_landmarks.landmark, mp_hands.HandLandmark.INDEX_FINGER_MCP))
69 | print(
70 | f'Index finger tip coordinates: (',
71 | f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].x * image_width}, '
72 | f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].y * image_height})'
73 | )
74 | mp_drawing.draw_landmarks(
75 | annotated_image, hand_landmarks, mp_hands.HAND_CONNECTIONS)
76 | cv2.imwrite(
77 | '/tmp/annotated_image' + str(idx) + '.png', cv2.flip(annotated_image, 1))
78 |
79 | # %%
80 | hand_joints = {
81 | 'Index': mp_hands.HandLandmark.INDEX_FINGER_MCP,
82 | 'Middle': mp_hands.HandLandmark.MIDDLE_FINGER_MCP,
83 | 'Ring': mp_hands.HandLandmark.RING_FINGER_MCP,
84 | 'Pinky': mp_hands.HandLandmark.PINKY_MCP,
85 | 'Thumb': mp_hands.HandLandmark.THUMB_CMC,
86 | }
87 | def vid_hand_detect(target):
88 | # For webcam input:
89 | cap = cv2.VideoCapture(target)
90 |
91 | frames = 0
92 | t = 0
93 | start_t = time()
94 | with mp_hands.Hands(
95 | min_detection_confidence=0.5,
96 | min_tracking_confidence=0.5) as hands:
97 | while cap.isOpened():
98 | success, image = cap.read()
99 | if not success:
100 | cv2.destroyAllWindows()
101 | t = time()
102 | # If loading a video, use 'break' instead of 'continue'.
103 | break
104 |
105 | # Flip the image horizontally for a later selfie-view display, and convert
106 | # the BGR image to RGB.
107 | image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
108 | # To improve performance, optionally mark the image as not writeable to
109 | # pass by reference.
110 | image.flags.writeable = False
111 | results = hands.process(image)
112 |
113 | # Draw the hand annotations on the image.
114 | image.flags.writeable = True
115 | image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
116 | if results.multi_hand_landmarks:
117 | for hand_landmarks in results.multi_hand_landmarks:
118 | straight = []
119 | for finger, joint in hand_joints.items():
120 | if not is_finger_bent(hand_landmarks.landmark, joint):
121 | straight.append(finger)
122 | print(' '.join(straight))
123 |
124 | mp_drawing.draw_landmarks(
125 | image, hand_landmarks, mp_hands.HAND_CONNECTIONS)
126 | cv2.imshow('MediaPipe Hands', image)
127 | if cv2.waitKey(5) & 0xFF == 27:
128 | cv2.destroyAllWindows()
129 | t = time()
130 | break
131 | frames += 1
132 | cap.release()
133 |
134 | fps = frames / (t-start_t)
135 | print('FPS: ', fps)
136 |
137 | # %%
138 | vid_hand_detect(target)
139 |
--------------------------------------------------------------------------------
/frontend/src/components/Custom/CustomLayout.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import SideBar from "./Sidebar";
3 | import AddSetting from "./AddSettings";
4 | import MenuBar from "../MenuBar/MenuBar";
5 | import SettingName from "./SettingName";
6 | import { Link } from "react-router-dom";
7 | import { ipcRenderer } from "electron";
8 | const { OPEN_FILE_EXPLORER, SEND_FILE_PATH } = require("../../../etc/constants");
9 |
10 | const activeBtn = {
11 | backgroundColor: "#045bb7",
12 | color: "white",
13 | };
14 |
15 | const disabledBtn = {
16 | backgroundColor: "#51595b",
17 | color: "#bdb49d",
18 | };
19 |
20 | export default class CustomLayout extends Component {
21 | constructor() {
22 | super();
23 |
24 | this.state = {
25 | urlBtn: activeBtn,
26 | fileBtn: disabledBtn,
27 | customSettingName: "",
28 | filePath: "Click on File Button to Choose File",
29 | filePathVisible: "none",
30 | filePathColor: "#51595b",
31 | };
32 | this.focusUrl = this.focusUrl.bind(this);
33 | this.focusFile = this.focusFile.bind(this);
34 | this.addSetting = this.addSetting.bind(this);
35 | this.addFileSetting = this.addFileSetting.bind(this);
36 | this.updateFilePath = this.updateFilePath.bind(this);
37 | this.updateName = this.updateName.bind(this);
38 | }
39 |
40 | //update File Path received from electron
41 | componentDidMount() {
42 | ipcRenderer.on(SEND_FILE_PATH, this.updateFilePath);
43 | }
44 | componentWillUnmount() {
45 | ipcRenderer.removeListener(SEND_FILE_PATH, this.updateFilePath);
46 | }
47 | updateFilePath(event, data) {
48 | this.setState({ filePath: data });
49 | this.setState({ filePathColor: "black" });
50 | }
51 |
52 | focusUrl() {
53 | this.setState({ urlBtn: activeBtn });
54 | this.setState({ fileBtn: disabledBtn });
55 | this.setState({ filePathVisible: "none" });
56 | }
57 | focusFile() {
58 | this.setState({ urlBtn: disabledBtn });
59 | this.setState({ fileBtn: activeBtn });
60 | this.setState({ filePathVisible: "flex" });
61 |
62 | ipcRenderer.send(OPEN_FILE_EXPLORER);
63 | }
64 |
65 | updateName(newName) {
66 | this.setState({ customSettingName: newName });
67 | }
68 |
69 | addSetting() {
70 | if (this.state.customSettingName !== "") {
71 | if (this.state.filePath !== "Click on File Button to Choose File" && this.state.filePathVisible === "flex") {
72 | this.addFileSetting();
73 | } else {
74 | //add url
75 | }
76 | }
77 | }
78 |
79 | async addFileSetting() {
80 | this.setState({ filePathVisible: "none" });
81 |
82 | console.log("add file setting ");
83 | let action = this.state.filePath.split("\\").join("//");
84 |
85 | let fileData = {
86 | hand: "right",
87 | gesture: "fist",
88 | action: action,
89 | alias: this.state.customSettingName,
90 | };
91 |
92 | await fetch("http://localhost:5000/config/add_configuration", {
93 | method: "POST",
94 | headers: {
95 | "Content-Type": "application/json",
96 | },
97 | body: JSON.stringify(fileData),
98 | });
99 | }
100 |
101 | render() {
102 | const flexContainer = {
103 | display: "flex",
104 | margin: 0,
105 | padding: 0,
106 | backgroundColor: "#ececec",
107 | };
108 |
109 | const mainContentStyle = {
110 | margin: "0vh 1vh 1vh 0",
111 | padding: 0,
112 | flex: 3,
113 | height: "100vh",
114 | };
115 |
116 | const titleStyle = {
117 | textAlign: "center",
118 | color: "#111111",
119 | fontFamily: "Lobster Two",
120 | margin: "8vh",
121 | };
122 |
123 | const pathTypeStyle = {
124 | paddingLeft: "23vw",
125 | };
126 |
127 | const btnStyle = {
128 | border: "none",
129 | outline: "none",
130 | cursor: "pointer",
131 | overflow: "hidden",
132 | borderRadius: "10px",
133 | height: "8vh",
134 | marginRight: "5vw",
135 | width: "100px",
136 | textAlign: "center",
137 | };
138 |
139 | const addCancelStyle = {
140 | marginTop: "10vh",
141 | marginLeft: "31.5vw",
142 | };
143 |
144 | const addBtn = {
145 | color: "#ffd9d9",
146 | backgroundColor: "#a31212",
147 | marginTop: "5vh",
148 | };
149 |
150 | const cancelBtn = {
151 | backgroundColor: "#8f9cb0",
152 | color: "#1d2430",
153 | };
154 |
155 | const filePathContainer = {
156 | display: this.state.filePathVisible,
157 | margin: "0 8vw 5vh 11.5vw",
158 | };
159 |
160 | const fileTextStyle = {
161 | border: "1px black solid",
162 | borderRadius: "10px",
163 | textAlign: "center",
164 | color: this.state.filePathColor,
165 | flex: "5",
166 | overflow: "hidden",
167 | height: "20px",
168 | fontSize: "0.8em",
169 | backgroundColor: "#ececec",
170 | };
171 |
172 | return (
173 |
174 |
175 |
176 |
177 |
178 |
Customize
179 |
180 |
181 |
182 |
183 | Url
184 |
185 |
186 | File
187 |
188 |
189 |
190 |
191 |
192 | {/*
*/}
193 |
194 |
195 |
File Path:
196 |
{this.state.filePath}
197 |
198 |
199 |
200 |
201 |
202 | Cancel
203 |
204 |
205 |
206 | Add
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 | );
216 | }
217 | }
218 |
--------------------------------------------------------------------------------
/flask/recognition/detection.py:
--------------------------------------------------------------------------------
1 | # Using python version 3.7.9 for media-pipe
2 | import cv2
3 | import mediapipe as mp
4 | import numpy as np
5 | import pyautogui
6 | import math
7 |
8 | import recognition.Actions as Actions
9 |
10 |
11 | settings = {
12 | "camera_index": 0, # 0 should be the default for built in cameras. If this doesn't work, try 1.
13 | }
14 |
15 | if (settings["camera_index"] == 0):
16 | cap = cv2.VideoCapture(0, cv2.CAP_DSHOW)
17 | else:
18 | cap = cv2.VideoCapture(1)
19 |
20 |
21 | switch = False
22 |
23 | if cap is None or not cap.isOpened():
24 | pyautogui.alert('Your camera is unavailable. Try to fix this issue and try again!', 'Error')
25 |
26 | # restricting webcam size / framrate
27 | cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
28 | cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
29 | cap.set(cv2.CAP_PROP_FPS, 60)
30 |
31 |
32 | # Number of consecutive frames a gesture has to be detected before it changes
33 | # Lower the number the faster it changes, but the more jumpy it is
34 | # Higher the number the slower it changes, but the less jumpy it is
35 | frames_until_change = 10
36 | prevGestures = [] # gestures calculated in previous frames
37 |
38 | # Getting media-pipe ready
39 | mpHands = mp.solutions.hands
40 | hands = mpHands.Hands(False, 2, 0.5, 0.5)
41 | mpDraw = mp.solutions.drawing_utils
42 |
43 |
44 | def dotProduct(v1, v2):
45 | return v1[0]*v2[0] + v1[1]*v2[1]
46 |
47 | def normalize(v):
48 | mag = np.sqrt(v[0] ** 2 + v[1] ** 2)
49 | v[0] = v[0] / mag
50 | v[1] = v[1] / mag
51 | return v
52 |
53 | def angle_between(a,b,c):
54 | '''
55 | Gets angle ABC from points
56 |
57 | cos(theta) = (u*v)/ (|u| |v|)
58 | '''
59 | BA = (a.x - b.x, a.y-b.y, a.z-b.z)
60 | BC = (c.x - b.x, c.y-b.y, c.z-b.z)
61 |
62 | dot = BA[0] * BC[0] + BA[1] * BC[1] + BA[2] * BC[2]
63 | BA_mag = math.sqrt(BA[0]**2 + BA[1]**2 + BA[2]**2)
64 | BC_mag = math.sqrt(BC[0]**2 + BC[1]**2 + BC[2]**2)
65 |
66 | angle = math.acos(dot/(BA_mag*BC_mag))
67 | return angle
68 |
69 | def gesture(f, hand):
70 | """
71 | Uses the open fingers list to recognize gestures
72 | :param f: list of open fingers (+ num) and closed fingers (- num)
73 | :param hand: hand information
74 | :return: string representing the gesture that is detected
75 | """
76 |
77 | if f[1] > 0 and f[2] < 0 and f[3] < 0 and f[4] > 0:
78 | index_tip = hand.landmark[8]
79 | index_base = hand.landmark[5]
80 | if index_tip.y > index_base.y: # Y goes from top to bottom instead of bottom to top
81 | return "Horns Down"
82 | elif f[0] > 0:
83 | return "rockandroll"
84 | else:
85 | return "No Gesture"
86 | elif f[0] > 0 and (f[1] < 0 and f[2] < 0 and f[3] < 0 and f[4] < 0):
87 | thumb_tip = hand.landmark[4]
88 | thumb_base = hand.landmark[2]
89 | if thumb_tip.y < thumb_base.y: # Y goes from top to bottom instead of bottom to top
90 | return "thumbsup"
91 | else:
92 | return "thumbsdown"
93 | elif f[0] < 0 and f[1] > 0 and f[2] < 0 and (f[3] < 0 and f[4] < 0):
94 | return "onefinger"
95 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and (f[3] < 0 and f[4] < 0):
96 | return "twofinger"
97 | elif f[0] > 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] > 0:
98 | mid_tip = hand.landmark[12]
99 | ring_tip = hand.landmark[16]
100 | wrist = hand.landmark[0]
101 | if angle_between(mid_tip, wrist, ring_tip) > 0.3:
102 | return 'Vulcan Salute'
103 | else:
104 | return "openhand"
105 | elif f[0] < 0 and f[1] < 0 and f[2] < 0 and f[3] < 0 and f[4] < 0:
106 | return "fist"
107 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] > 0:
108 | return "fourfinger"
109 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] < 0:
110 | return "threefinger"
111 | else:
112 | return "No Gesture"
113 |
114 | def straightFingers(hand, img):
115 | """
116 | Calculates which fingers are open and which fingers are closed
117 | :param hand: media-pipe object of the hand
118 | :param img: frame with the hand in it
119 | :return: list of open (+ num) and closed (- num) fingers
120 | """
121 | fingerTipIDs = [4, 8, 12, 16, 20] # list of the id's for the finger tip landmarks
122 | openFingers = []
123 | lms = hand.landmark # 2d list of all 21 landmarks with there respective x, an y coordinates
124 |
125 | mpDraw.draw_landmarks(img, hand, connections=mpHands.HAND_CONNECTIONS, connection_drawing_spec=mpDraw.DrawingSpec(color=(255,0,0)))
126 |
127 | for id in fingerTipIDs:
128 | if id == 4: # This is for the thumb calculation, because it works differently than the other fingers
129 | x2, y2 = lms[id].x, lms[id].y # x, and y of the finger tip
130 | x1, y1 = lms[id-2].x, lms[id-2].y # x, and y of the joint 2 points below the finger tip
131 | x0, y0 = lms[0].x, lms[0].y # x, and y of the wrist
132 | fv = [x2-x1, y2-y1] # joint to finger tip vector
133 | fv = normalize(fv)
134 | pv = [x1-x0, y1-y0] # wrist to joint vector
135 | pv = normalize(pv)
136 |
137 | thumb = dotProduct(fv, pv)
138 | # Thumb that is greater than 0, but less than .65 is typically
139 | # folded across the hand, which should be calculated as "down"
140 | if thumb > .65:
141 | openFingers.append(thumb) # Calculates if the finger is open or closed
142 | else:
143 | openFingers.append(-1)
144 |
145 | else: # for any other finger (not thumb)
146 | x2, y2 = lms[id].x, lms[id].y # x, and y of the finger tip
147 | x1, y1 = lms[id-2].x, lms[id-2].y # x, and y of the joint 2 points below the finger tip
148 | x0, y0 = lms[0].x, lms[0].y # x, and y of the wrist
149 | fv = [x2-x1, y2-y1] # joint to finger tip vector
150 | fv = normalize(fv)
151 | pv = [x1-x0, y1-y0] # wrist to joint vector
152 | pv = normalize(pv)
153 | openFingers.append(dotProduct(fv, pv)) # Calculates if the finger is open or closed
154 |
155 | return openFingers
156 |
157 | def getHand(handedness):
158 | '''
159 | Mediapipe assumes that the camera is mirrored
160 | :param handedness: media-pipe object of handedness
161 | :return: string that is 'Left' or 'Right'
162 | '''
163 | hand = handedness.classification[0].label
164 |
165 | if(hand == 'Left'):
166 | return 'Right'
167 | else:
168 | return 'Left'
169 |
170 | def gen_video(configData):
171 | global switch
172 | # reopens camera after release
173 | if (settings["camera_index"] == 0):
174 | cap.open(0, cv2.CAP_DSHOW);
175 | else:
176 | cap.open(1);
177 |
178 | prevGests = {
179 | "right": [],
180 | "left": [],
181 | }
182 | currGests = {
183 | "right": None,
184 | "left": None,
185 | }
186 |
187 | while True:
188 | """
189 | Main code loop
190 | """
191 |
192 | if switch:
193 | if (settings["camera_index"] == 0):
194 | cap.open(1)
195 | settings["camera_index"] = 1
196 | if cap.read()[1] is None:
197 | cap.open(0, cv2.CAP_DSHOW)
198 | settings["camera_index"] = 0
199 | else:
200 | cap.open(0, cv2.CAP_DSHOW)
201 | settings["camera_index"] = 0
202 |
203 | switch = False
204 |
205 | success, img = cap.read()
206 |
207 | if img is None:
208 | print("Video ended. Closing.")
209 | break
210 |
211 | imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
212 | results = hands.process(imgRGB)
213 |
214 | # if there are hands in frame, calculate which fingers are open and draw the landmarks for each hand
215 | if results.multi_hand_landmarks:
216 | gestures = {}
217 |
218 | for handLms, handedness in zip(results.multi_hand_landmarks, results.multi_handedness):
219 | fingers = straightFingers(handLms, img)
220 | hand = getHand(handedness)
221 | if hand == "Left":
222 | gestures['left'] = gesture(fingers, handLms)
223 | else:
224 | gestures['right'] = gesture(fingers, handLms)
225 |
226 | for hand in ['left', 'right']:
227 | if not hand in gestures:
228 | continue
229 | # if gesture is diff from currGesture and the previous 3 gestures are the same as the current gesture
230 | # too much gesture, it is not a word anymore
231 | if(gestures[hand] != currGests[hand] and all(x == gestures[hand] for x in prevGests[hand])):
232 | Actions.event.emit("start", configData=configData, hand=hand, gest=gestures[hand])
233 | currGests[hand] = gestures[hand]
234 |
235 | # keep only the 3 previous Gestures
236 | prevGests[hand].append(gestures[hand])
237 | prevGests[hand] = prevGests[hand][-frames_until_change:]
238 |
239 | ret, buffer = cv2.imencode('.jpg', img)
240 | frame = buffer.tobytes()
241 | yield (b'--frame\r\n'
242 | b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
243 |
244 |
245 | if cv2.waitKey(1) == 27:
246 | break
247 |
248 | def gen_off():
249 | img = cv2.imread("../frontend/src/assets/loading.png", 1)
250 | ret, buffer = cv2.imencode('.jpg', img)
251 | frame = buffer.tobytes()
252 | yield (b'--frame\r\n'
253 | b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
254 | cap.release()
255 |
256 |
257 | def switchWebcam():
258 | global switch
259 | if switch:
260 | switch = False
261 | else:
262 | switch = True
263 |
264 |
265 |
--------------------------------------------------------------------------------
/flask/recognition/not_in_use/HandTracker.py:
--------------------------------------------------------------------------------
1 | # Using python version 3.7.9 for media-pipe
2 | import cv2
3 | import mediapipe as mp
4 | import numpy as np
5 | import pyautogui
6 | import time
7 | import argparse
8 | import config
9 | import math
10 |
11 | from Emitter import event
12 |
13 | # Getting openCV ready
14 | cap = cv2.VideoCapture(config.settings["camera_index"])
15 | #Camera detection
16 | if cap is None or not cap.isOpened():
17 | pyautogui.alert('Your camera is unavailable. Try to fix this issue and try again!', 'Error')
18 | # Dimensions of the camera output window
19 | wCam = int(cap.get(3))
20 | hCam = int(cap.get(4))
21 |
22 | # For testing, write output to video
23 | #out = cv2.VideoWriter('output.mp4',cv2.VideoWriter_fourcc('M','J','P','G'), 30, (wCam,hCam))
24 |
25 | # Number of consecutive frames a gesture has to be detected before it changes
26 | # Lower the number the faster it changes, but the more jumpy it is
27 | # Higher the number the slower it changes, but the less jumpy it is
28 | frames_until_change = 3
29 | prevGestures = [] # gestures calculated in previous frames
30 |
31 | # Getting media-pipe ready
32 | mpHands = mp.solutions.hands
33 | hands = mpHands.Hands(min_detection_confidence=.7)
34 | mpDraw = mp.solutions.drawing_utils
35 |
36 | # Vars used to calculate avg fps
37 | prevTime = 0
38 | currTime = 0
39 | fpsList = []
40 |
41 | # Mouse movement anchor
42 | mouseAnchor = [-1,-1]
43 | wristPositionHistory = []
44 | pyautogui.PAUSE = 0
45 | pyautogui.FAILSAFE = False
46 |
47 | screenWidth, screenHeight = pyautogui.size()
48 |
49 | def parse_arguments():
50 | """Parses Arguments
51 | -m: mode that gesture will be recognized for
52 | """
53 | # Setting up the argument parser
54 | p = argparse.ArgumentParser(description='Used to parse options for hand tracking')
55 |
56 | # -v flag is the path to the video, -m flag is the background subtraction method
57 | p.add_argument('-m', type=str, help='The mode that the recognition will control for (ie. mouse)')
58 |
59 | return p.parse_args()
60 |
61 | def dotProduct(v1, v2):
62 | return v1[0]*v2[0] + v1[1]*v2[1]
63 |
64 | def normalize(v):
65 | mag = np.sqrt(v[0] ** 2 + v[1] ** 2)
66 | v[0] = v[0] / mag
67 | v[1] = v[1] / mag
68 | return v
69 |
70 | def angle_between(a,b,c):
71 | '''
72 | Gets angle ABC from points
73 |
74 | cos(theta) = (u*v)/ (|u| |v|)
75 | '''
76 | BA = (a.x - b.x, a.y-b.y, a.z-b.z)
77 | BC = (c.x - b.x, c.y-b.y, c.z-b.z)
78 |
79 | dot = BA[0] * BC[0] + BA[1] * BC[1] + BA[2] * BC[2]
80 | BA_mag = math.sqrt(BA[0]**2 + BA[1]**2 + BA[2]**2)
81 | BC_mag = math.sqrt(BC[0]**2 + BC[1]**2 + BC[2]**2)
82 |
83 | angle = math.acos(dot/(BA_mag*BC_mag))
84 | return angle
85 |
86 | def gesture(f, hand):
87 | """
88 | Uses the open fingers list to recognize gestures
89 | :param f: list of open fingers (+ num) and closed fingers (- num)
90 | :param hand: hand information
91 | :return: string representing the gesture that is detected
92 | """
93 |
94 | if f[1] > 0 and f[2] < 0 and f[3] < 0 and f[4] > 0:
95 | index_tip = hand.landmark[8]
96 | index_base = hand.landmark[5]
97 | if index_tip.y > index_base.y: # Y goes from top to bottom instead of bottom to top
98 | return "Horns Down"
99 | elif f[0] > 0:
100 | return "Rock and Roll"
101 | else:
102 | return "No Gesture"
103 | elif f[0] > 0 and (f[1] < 0 and f[2] < 0 and f[3] < 0 and f[4] < 0):
104 | thumb_tip = hand.landmark[4]
105 | thumb_base = hand.landmark[2]
106 | if thumb_tip.y < thumb_base.y: # Y goes from top to bottom instead of bottom to top
107 | return "Gig Em"
108 | else:
109 | return "Thumbs Down"
110 | elif f[0] < 0 and f[1] > 0 and f[2] < 0 and (f[3] < 0 and f[4] < 0):
111 | return "1 finger"
112 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and (f[3] < 0 and f[4] < 0):
113 | return "Peace"
114 | elif f[0] > 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] > 0:
115 | mid_tip = hand.landmark[12]
116 | ring_tip = hand.landmark[16]
117 | wrist = hand.landmark[0]
118 | if angle_between(mid_tip, wrist, ring_tip) > 0.3:
119 | return 'Vulcan Salute'
120 | else:
121 | return "Open Hand"
122 | elif f[0] < 0 and f[1] < 0 and f[2] < 0 and f[3] < 0 and f[4] < 0:
123 | return "Fist"
124 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] > 0:
125 | return "4 fingers"
126 | elif f[0] < 0 and f[1] > 0 and f[2] > 0 and f[3] > 0 and f[4] < 0:
127 | return "3 fingers"
128 | else:
129 | return "No Gesture"
130 |
131 | def calcFPS(pt, ct, framelist):
132 | fps = 1 / (ct - pt)
133 | if len(framelist) < 30:
134 | framelist.append(fps)
135 | else:
136 | framelist.append(fps)
137 | framelist.pop(0)
138 | return framelist
139 |
140 | def findLandMarks(img):
141 | """
142 | Draws the landmarks on the hand (not being used currently)
143 | :param img: frame with the hand in it
144 | :return:
145 | """
146 | imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
147 | hands = mpHands.Hands()
148 | pHands = hands.process(imgRGB)
149 |
150 | if pHands.multi_hand_landmarks:
151 | for handlms in pHands.multi_hand_landmarks:
152 | # mpDraw.draw_landmarks(img, handlms, mpHands.HAND_CONNECTIONS)
153 | mpDraw.draw_landmarks(img, handlms)
154 |
155 | def straightFingers(hand, img):
156 | """
157 | Calculates which fingers are open and which fingers are closed
158 | :param hand: media-pipe object of the hand
159 | :param img: frame with the hand in it
160 | :return: list of open (+ num) and closed (- num) fingers
161 | """
162 | fingerTipIDs = [4, 8, 12, 16, 20] # list of the id's for the finger tip landmarks
163 | openFingers = []
164 | lms = hand.landmark # 2d list of all 21 landmarks with there respective x, an y coordinates
165 |
166 | # Draws the blue part
167 | palm_connections = filter(lambda x: x[1] in [0,1,2,5,6,9,10,13,14,17,18], mpHands.HAND_CONNECTIONS)
168 | mpDraw.draw_landmarks(img,hand, connections=palm_connections, connection_drawing_spec=mpDraw.DrawingSpec(color=(255,0,0)))
169 |
170 | for id in fingerTipIDs:
171 | if id == 4: # This is for the thumb calculation, because it works differently than the other fingers
172 | x2, y2 = lms[id].x, lms[id].y # x, and y of the finger tip
173 | x1, y1 = lms[id-2].x, lms[id-2].y # x, and y of the joint 2 points below the finger tip
174 | x0, y0 = lms[0].x, lms[0].y # x, and y of the wrist
175 | fv = [x2-x1, y2-y1] # joint to finger tip vector
176 | fv = normalize(fv)
177 | pv = [x1-x0, y1-y0] # wrist to joint vector
178 | pv = normalize(pv)
179 |
180 | thumb = dotProduct(fv, pv)
181 | # Thumb that is greater than 0, but less than .65 is typically
182 | # folded across the hand, which should be calculated as "down"
183 | if thumb > .65:
184 | openFingers.append(thumb) # Calculates if the finger is open or closed
185 | else:
186 | openFingers.append(-1)
187 |
188 | # Code below draws the two vectors from above
189 | cx, cy = int(lms[id].x * wCam), int(lms[id].y * hCam)
190 | cx2, cy2 = int(lms[id-2].x * wCam), int(lms[id-2].y * hCam)
191 | cx0, cy0 = int(lms[0].x * wCam), int(lms[0].y * hCam)
192 | finger_connections = filter(lambda x: id-2 <= x[0] and x[0] <= id, mpHands.HAND_CONNECTIONS) # gets the connections only for the thumb
193 | if dotProduct(fv, pv) >= .65:
194 | mpDraw.draw_landmarks(img,hand, connections=finger_connections)
195 | else:
196 | mpDraw.draw_landmarks(img,hand, connections=finger_connections, connection_drawing_spec=mpDraw.DrawingSpec(color=(0,0,255)))
197 |
198 | else: # for any other finger (not thumb)
199 | x2, y2 = lms[id].x, lms[id].y # x, and y of the finger tip
200 | x1, y1 = lms[id-2].x, lms[id-2].y # x, and y of the joint 2 points below the finger tip
201 | x0, y0 = lms[0].x, lms[0].y # x, and y of the wrist
202 | fv = [x2-x1, y2-y1] # joint to finger tip vector
203 | fv = normalize(fv)
204 | pv = [x1-x0, y1-y0] # wrist to joint vector
205 | pv = normalize(pv)
206 | openFingers.append(dotProduct(fv, pv)) # Calculates if the finger is open or closed
207 |
208 | # Code below draws the two vectors from above
209 | cx, cy = int(lms[id].x * wCam), int(lms[id].y * hCam)
210 | cx2, cy2 = int(lms[id-2].x * wCam), int(lms[id-2].y * hCam)
211 | cx0, cy0 = int(lms[0].x * wCam), int(lms[0].y * hCam)
212 |
213 | # Connections from tip to first knuckle from base
214 | finger_connections = [(id-1, id),
215 | (id-2, id-1)]
216 | if dotProduct(fv, pv) >= 0:
217 | mpDraw.draw_landmarks(img,hand, connections=finger_connections)
218 | else:
219 | mpDraw.draw_landmarks(img,hand, connections=finger_connections, connection_drawing_spec=mpDraw.DrawingSpec(color=(0,0,255)))
220 | # cv2.circle(img, (cx, cy), 15, (255, 0, 255), cv2.FILLED)
221 | return openFingers
222 |
223 | def getHand(handedness):
224 | '''
225 | Mediapipe assumes that the camera is mirrored
226 | :param handedness: media-pipe object of handedness
227 | :return: string that is 'Left' or 'Right'
228 | '''
229 | hand = handedness.classification[0].label
230 |
231 | if(hand == 'Left'):
232 | return 'Right'
233 | else:
234 | return 'Left'
235 |
236 | #Handles entering and exiting mouse-movement mode and also handles mouse clicks
237 | def mouseModeHandler(detectedHand, currGests, gestures, results, mouseHand):
238 | # Enters mouse movement mode on Gig Em gesture, setting a mouse anchor point at that position
239 | if(detectedHand == mouseHand and currGests[detectedHand] != "Gig Em" and currGests[detectedHand] != "Fist" and gestures[detectedHand] == "Gig Em"):
240 | print("Entering mouse mode at (" + str(results.multi_hand_landmarks[0].landmark[0].x) + ", " + str(results.multi_hand_landmarks[0].landmark[0].y) + ")")
241 | return [results.multi_hand_landmarks[0].landmark[0].x, results.multi_hand_landmarks[0].landmark[0].y]
242 |
243 | # Leave mouse mode when gesture isn't Gig Em or fist anymore
244 | if (detectedHand == mouseHand and (currGests[detectedHand] == "Gig Em" or currGests[detectedHand] == "Fist") and gestures[detectedHand] != "Fist" and gestures[hand] != "Gig Em"):
245 | print("Exiting mouse mode.")
246 | return [-1,-1]
247 |
248 | # Clicks the mouse upon a fist gesture while in mouse-movement mode
249 | if(detectedHand == mouseHand and currGests[detectedHand] == "Gig Em" and gestures[detectedHand] == "Fist"):
250 | pyautogui.click()
251 | print("Click!")
252 |
253 | return mouseAnchor
254 |
255 | #Moves the mouse
256 | #anchorMouse mode: While in mouse-movement mode (a.k.a. when mouseAnchor isn't [-1,-1]), when distance from mouse anchor point is far enough, start moving the mouse in that direction.
257 | #absoluteMouse mode: Moves mouse proportionately to screen size.
258 | def moveMouse(results):
259 | if(args.m == 'anchorMouse'):
260 | if(mouseAnchor != [-1,-1] and ((results.multi_hand_landmarks[0].landmark[0].x - mouseAnchor[0])**2 + (results.multi_hand_landmarks[0].landmark[0].y - mouseAnchor[1])**2)**0.5 > 0.025):
261 | pyautogui.moveTo(pyautogui.position()[0] - ((results.multi_hand_landmarks[0].landmark[0].x - mouseAnchor[0])*abs(results.multi_hand_landmarks[0].landmark[0].x - mouseAnchor[0])*1000), pyautogui.position()[1] + (((results.multi_hand_landmarks[0].landmark[0].y - mouseAnchor[1])*abs(results.multi_hand_landmarks[0].landmark[0].y - mouseAnchor[1]))*1000))
262 |
263 | if(args.m == 'absoluteMouse' and mouseAnchor != [-1,-1]):
264 | if(len(wristPositionHistory) == 10):
265 | wristPositionHistory.pop(0)
266 | wristPositionHistory.append((results.multi_hand_landmarks[0].landmark[0].x, results.multi_hand_landmarks[0].landmark[0].y))
267 | else:
268 | wristPositionHistory.append((results.multi_hand_landmarks[0].landmark[0].x, results.multi_hand_landmarks[0].landmark[0].y))
269 |
270 | avgx = 0
271 | avgy = 0
272 |
273 | for i in wristPositionHistory:
274 | avgx += i[0]
275 | avgy += i[1]
276 |
277 | avgx /= len(wristPositionHistory)
278 | avgy /= len(wristPositionHistory)
279 |
280 | pyautogui.moveTo(-(avgx - 0.5)*2*screenWidth + screenWidth/2, (avgy - 0.5)*2*screenHeight + screenHeight/2)
281 |
282 |
283 | # Preparing arguments for main
284 | args = parse_arguments() # parsing arguments
285 |
286 | prevGests = {
287 | "right": [],
288 | "left": [],
289 | }
290 | currGests = {
291 | "right": None,
292 | "left": None,
293 | }
294 | frame_count = 0
295 | while True:
296 | """
297 | Main code loop
298 | """
299 | # Gets the image from openCV and gets the hand data from media-pipe
300 | success, img = cap.read()
301 |
302 | # If there are no more frames, break loop
303 | if img is None:
304 | print("Video ended. Closing.")
305 | break
306 |
307 | imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
308 | results = hands.process(imgRGB)
309 |
310 | leftPrevGestures = []
311 | rightPrevGestures = []
312 | # if there are hands in frame, calculate which fingers are open and draw the landmarks for each hand
313 | if results.multi_hand_landmarks:
314 | gestures = {}
315 |
316 | for handLms, handedness in zip(results.multi_hand_landmarks, results.multi_handedness):
317 | fingers = straightFingers(handLms, img)
318 | hand = getHand(handedness)
319 | if hand == "Left":
320 | gestures['left'] = gesture(fingers, handLms)
321 | else:
322 | gestures['right'] = gesture(fingers, handLms)
323 | frame_count += 1
324 | #mpDraw.draw_landmarks(img, handLms, mpHands.HAND_CONNECTIONS)
325 | mpDraw.draw_landmarks(img, handLms)
326 |
327 | # print(f"{frame_count}, {gestures}, {len(results.multi_hand_landmarks)}")
328 | for hand in ['left', 'right']:
329 | if not hand in gestures:
330 | continue
331 |
332 | #Moves mouse if in mouse mode
333 | if (args.m == 'anchorMouse' or args.m == 'absoluteMouse'):
334 | moveMouse(results)
335 |
336 | # if gesture is diff from currGesture and the previous 3 gestures are the same as the current gesture
337 | # too much gesture, it is not a word anymore
338 | if(gestures[hand] != currGests[hand] and all(x == gestures[hand] for x in prevGests[hand])):
339 |
340 | print(f'{hand} : {gestures[hand]}')
341 |
342 | if (args.m == 'anchorMouse' or args.m == 'absoluteMouse'):
343 | # Handles mouse-movement mode through mouseModeHandler function
344 | mouseAnchor = mouseModeHandler(hand, currGests, gestures, results, "right")
345 | else:
346 | # event.emit("end", hand=hand, gest=currGests[hand]) ## doesn't do anything yet
347 | event.emit("start", hand=hand, gest=gestures[hand])
348 |
349 | currGests[hand] = gestures[hand]
350 |
351 | # keep only the 3 previous Gestures
352 | prevGests[hand].append(gestures[hand])
353 | prevGests[hand] = prevGests[hand][-frames_until_change:]
354 |
355 | # Used for fps calculation
356 | currTime = time.time()
357 | fpsList = calcFPS(prevTime, currTime, fpsList)
358 | prevTime = currTime
359 |
360 | # Displays the fps
361 | cv2.putText(img, str(int(np.average(fpsList))), (10, 70),
362 | cv2.FONT_HERSHEY_PLAIN, 2, (0, 255, 0), 3)
363 |
364 | cv2.imshow("Video with Hand Detection", img)
365 |
366 | # Used for testing, writing video to output
367 | #out.write(img)
368 |
369 | if cv2.waitKey(1) == 27:
370 | break
371 | cap.release()
372 | cv2.destroyAllWindows()
--------------------------------------------------------------------------------