├── README.md
├── alert.wav
├── index.html
├── style.css
└── video.js
/README.md:
--------------------------------------------------------------------------------
1 | # TF.JS CodeForCause Workshop : Posture Correction
2 |
3 | A workshop project created using Tensorflow.JS, Posenet, ML5.JS, P5.JS which tracks the body posture of the user and in case the posture is wrong, then it blurs the screen of the user and also sends an alarm sound.
4 |
5 | ## Important Links
6 | **- [Live Demo](https://shivaylamba.me/TensorFlow.JS-Posture-Correction/)**
7 | **- [Youtube Tutorial](https://www.youtube.com/watch?v=Ir9FNhoPyYc&t=2s)**
8 | **- [Slides](https://docs.google.com/presentation/d/1Lj4Y07NRRtp8lWQmwQ2xXAbW1T-vIfTRbc6oQji8RwQ/edit?usp=sharing)**
9 |
10 | ## Resources
11 | - [Posenet](https://www.tensorflow.org/lite/models/pose_estimation/overview)
12 | - [TF.JS Github](https://github.com/tensorflow/tfjs)
13 | - [TF.JS Website](https://www.tensorflow.org/js)
14 | - [ML5.JS](https://ml5js.org/)
15 |
16 |
17 |
--------------------------------------------------------------------------------
/alert.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shivaylamba/TensorFlow.JS-Posture-Correction/db0c82ad335290f12ff633edbe2994a87a863f02/alert.wav
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
16 |
17 |
18 | Posture Correction
19 |
20 |
23 |
24 | Github Repo
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/style.css:
--------------------------------------------------------------------------------
1 | body {
2 | background: -webkit-linear-gradient(top left, #50a3a2 0%, #53e3a6 100%);
3 | }
4 |
5 | button {
6 | appearance: none;
7 | outline: 0;
8 | background-color: white;
9 | border: 0;
10 | padding: 10px 15px;
11 | color: #53e3a6;
12 | border-radius: 3px;
13 | width: 250px;
14 | cursor: pointer;
15 | font-size: 18px;
16 | transition-duration: 0.25s;
17 | }
18 | button:hover {
19 | background-color: rgb(245, 247, 249);
20 | }
21 |
--------------------------------------------------------------------------------
/video.js:
--------------------------------------------------------------------------------
1 | let videofeed;
2 | let posenet;
3 | let poses = [];
4 | let started = false;
5 | var audio = document.getElementById("audioElement");
6 |
7 | // p5.js setup() function to set up the canvas for the web cam video stream
8 | function setup() {
9 | //creating a canvas by giving the dimensions
10 | const canvas = createCanvas(500, 500);
11 | canvas.parent("video");
12 |
13 | videofeed = createCapture(VIDEO);
14 | videofeed.size(width, height);
15 | console.log("setup");
16 |
17 | // setting up the poseNet model to feed in the video feed.
18 | posenet = ml5.poseNet(videofeed);
19 |
20 | posenet.on("pose", function (results) {
21 | poses = results;
22 | });
23 |
24 | videofeed.hide();
25 | noLoop();
26 | }
27 |
28 | // p5.js draw function() is called after the setup function
29 | function draw() {
30 | if (started) {
31 | image(videofeed, 0, 0, width, height);
32 | calEyes();
33 | }
34 | }
35 |
36 | // toggle button for starting the video feed
37 | function start() {
38 | select("#startstop").html("stop");
39 | document.getElementById("startstop").addEventListener("click", stop);
40 | started = true;
41 | loop();
42 | }
43 |
44 | // toggle button for ending the video feed
45 | function stop() {
46 | select("#startstop").html("start");
47 | document.getElementById("startstop").addEventListener("click", start);
48 | removeblur();
49 | started = false;
50 | noLoop();
51 | }
52 |
53 | // defining the parameters used for the posenet : the tracking of the eyes
54 | var rightEye,
55 | leftEye,
56 | defaultRightEyePosition = [],
57 | defaultLeftEyePosition = [];
58 |
59 | //function to calculate the position of the various keypoints
60 | function calEyes() {
61 | for (let i = 0; i < poses.length; i++) {
62 | let pose = poses[i].pose;
63 | for (let j = 0; j < pose.keypoints.length; j++) {
64 | let keypoint = pose.keypoints[j];
65 | rightEye = pose.keypoints[2].position;
66 | leftEye = pose.keypoints[1].position;
67 |
68 | // keypoints are the points representing the different joints on the body recognized by posenet
69 |
70 | while (defaultRightEyePosition.length < 1) {
71 | defaultRightEyePosition.push(rightEye.y);
72 | }
73 |
74 | while (defaultLeftEyePosition.length < 1) {
75 | defaultLeftEyePosition.push(leftEye.y);
76 | }
77 |
78 | // if the current position of the body is too far from the original position blur function is called
79 | if (Math.abs(rightEye.y - defaultRightEyePosition[0]) > 20) {
80 | blur();
81 | }
82 | if (Math.abs(rightEye.y - defaultRightEyePosition[0]) < 20) {
83 | removeblur();
84 | }
85 | }
86 | }
87 | }
88 |
89 | //function to blur the background and add audio effect
90 | function blur() {
91 | document.body.style.filter = "blur(5px)";
92 | document.body.style.transition = "1s";
93 | var audio = document.getElementById("audioElement");
94 | console.log("change");
95 | audio.play();
96 | }
97 |
98 | //function to remove the blur effect
99 | function removeblur() {
100 | document.body.style.filter = "blur(0px)";
101 | var audio = document.getElementById("audioElement");
102 |
103 | audio.pause();
104 | }
105 |
--------------------------------------------------------------------------------