├── ax02_depth_range_limit
└── ax02_depth_range_limit.pde
├── chFAB_modelbuilder_hello
├── .DS_Store
└── chFAB_modelbuilder_hello.pde
├── chFAB_scanner
├── .DS_Store
└── chFAB_scanner.pde
├── chFAB_scanner_no_cleanup
└── chFAB_scanner_no_cleanup.pde
├── chPC_ex01_ellipse
└── chPC_ex01_ellipse.pde
├── chPC_ex02_ellipse_translate
└── chPC_ex02_ellipse_translate.pde
├── chPC_ex03_receding_ellipse
└── chPC_ex03_receding_ellipse.pde
├── chPC_ex04_first_point_cloud
└── chPC_ex04_first_point_cloud.pde
├── chPC_ex052_color_point_cloud
└── chPC_ex052_color_point_cloud.pde
├── chPC_ex05_rotating_point_cloud
└── chPC_ex05_rotating_point_cloud.pde
├── chPC_ex06_floating_cube
└── chPC_ex06_floating_cube.pde
├── chPC_ex07_hotpoint
└── chPC_ex07_hotpoint.pde
├── chPC_ex08_minim_hello
├── .DS_Store
├── chPC_ex08_minim_hello.pde
└── kick.wav
├── chPC_ex09_hotpoint_sound
├── .DS_Store
├── chPC_ex09_hotpoint_sound.pde
└── data
│ ├── hat.wav
│ └── kick.wav
├── chPC_ex10_multi_hotpoint
├── .DS_Store
├── Hotpoint.pde
├── chPC_ex10_multi_hotpoint.pde
└── data
│ ├── hat.wav
│ └── kick.wav
├── chPC_ex11_obj_hello
├── .DS_Store
├── applet
│ ├── OBJLoader.jar
│ ├── chPC_ex11_obj_hello.jar
│ ├── chPC_ex11_obj_hello.java
│ ├── chPC_ex11_obj_hello.pde
│ ├── core.jar
│ ├── index.html
│ ├── loading.gif
│ └── opengl.jar
├── chPC_ex11_obj_hello.pde
├── kinect.obj
└── kinect.obj.mtl
├── chPC_ex12_obj_in_point_cloud
├── chPC_ex12_obj_in_point_cloud.pde
└── kinect.obj
├── chPC_ex13_obj_with_lines
├── chPC_ex13_obj_with_lines.pde
└── kinect.obj
├── chPC_ex14_obj_in_point_cloud_peasy
├── chPC_ex14_obj_in_point_cloud_peasy.pde
└── kinect.obj
├── chPC_ex15_interactive_peasy
├── Hotpoint.pde
├── chPC_ex15_interactive_peasy.pde
└── kinect.obj
├── chSK_advanced_dance_pose
├── .DS_Store
├── SkeletonPoser.pde
└── chSK_advanced_dance_pose.pde
├── chSK_basic_dance_pose
├── .DS_Store
└── chSK_basic_dance_pose.pde
├── chSK_ex01_one_joint
├── .DS_Store
├── chSK_ex01_one_joint.pde
├── scaledEllipse_big.png
└── scaledEllipse_small.png
├── chSK_ex02_skel_anatomy
├── .DS_Store
└── chSK_ex02_skel_anatomy.pde
├── chSK_ex03_joint_distance
├── .DS_Store
├── chSK_ex03_joint_distance.pde
└── distance_130.70207.png
├── chSK_ex04_joint_distance_art
├── .DS_Store
├── chSK_ex04_joint_distance_art.pde
└── joint_art_34.419777.png
├── chSK_exercise_measurement
├── SkeletonRecorder.pde
└── chSK_exercise_measurement.pde
├── chSK_exercise_measurement_one_limb
├── .DS_Store
├── SkeletonRecorder.pde
├── chSK_exercise_measurement_one_limb.pde
└── exercise_mesaurement.png
├── chSK_joint_orientation
├── .DS_Store
├── chSK_joint_orientation.pde
└── joint_orientation_axes_3.6722064.png
├── chSK_joint_orientation_w_model
├── .DS_Store
├── chSK_joint_orientation_w_model.pde
└── kinect.obj
├── chSK_orientation_from_vector
├── .DS_Store
├── chSK_orientation_from_vector.pde
├── kinect.obj
└── screen-1731.tif
├── chSK_scene_image
├── .DS_Store
├── chSK_scene_image.pde
└── empire_state.jpg
├── chSK_scene_image_basic
├── .DS_Store
└── chSK_scene_image_basic.pde
├── chSK_scene_map
├── .DS_Store
└── chSK_scene_map.pde
├── chSK_scene_map_image
└── chSK_scene_map_image.pde
├── chSK_translate_model_to_corner
├── chSK_translate_model_to_corner.pde
└── kinect.obj
├── ex01_basic_depth
└── ex01_basic_depth.pde
├── ex02_basic_depth_pimage
└── ex02_basic_depth_pimage.pde
├── ex03_basic_depth_plus_mouseclick
└── ex03_basic_depth_plus_mouseclick.pde
├── ex04_full_resolution_depth
└── ex04_full_resolution_depth.pde
├── ex05_real_world_measurement
└── ex05_real_world_measurement.pde
├── ex06_closest_pixel
└── ex06_closest_pixel.pde
├── ex07_closest_pixel_with_measurements
└── ex07_closest_pixel_with_measurements.pde
├── ex07_closest_pixel_with_running_average
└── ex07_closest_pixel_with_running_average.pde
├── ex08_basic_drawing
└── ex08_basic_drawing.pde
├── ex09_advanced_drawing
├── drawing.png
└── ex09_advanced_drawing.pde
└── ex10_basic_minority_report
├── .DS_Store
├── data
└── image1.jpg
└── ex10_basic_minority_report.pde
/ax02_depth_range_limit/ax02_depth_range_limit.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | PImage depthImage;
5 |
6 | void setup()
7 | {
8 | size(640, 480);
9 | kinect = new SimpleOpenNI(this);
10 | kinect.enableDepth();
11 | }
12 |
13 | void draw()
14 | {
15 | kinect.update();
16 |
17 | int[] depthValues = kinect.depthMap();
18 | depthImage = kinect.depthImage();
19 | for (int x = 0; x < 640; x++) {
20 | for (int y = 0; y < 480; y++) {
21 | int i = x + y * 640;
22 | int currentDepthValue = depthValues[i];
23 | if (currentDepthValue < 610 || currentDepthValue > 1525) {
24 | depthImage.pixels[i] = 0;
25 | }
26 | }
27 | }
28 |
29 | image(depthImage, 0, 0);
30 | }
31 |
32 | void mousePressed(){
33 | save("depth_limit.png");
34 | }
35 |
36 |
--------------------------------------------------------------------------------
/chFAB_modelbuilder_hello/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chFAB_modelbuilder_hello/.DS_Store
--------------------------------------------------------------------------------
/chFAB_modelbuilder_hello/chFAB_modelbuilder_hello.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | // import both
3 | import unlekker.util.*;
4 | import unlekker.modelbuilder.*;
5 | // declare our model object
6 | UGeometry model;
7 |
8 | float x = 0;
9 |
10 | void setup() {
11 | size(400, 400, OPENGL);
12 | stroke(255, 0, 0);
13 | strokeWeight(3);
14 | fill(255);
15 |
16 | // initialize our model,
17 | model = new UGeometry();
18 | // set shape type to TRIANGLES
19 | // and begin adding geometry
20 | model.beginShape(TRIANGLES);
21 |
22 | // build a triangle out of three vectors
23 | model.addFace(
24 | new UVec3(150, 150, 0),
25 | new UVec3(300, 150, 0),
26 | new UVec3(150, 150, -150)
27 | );
28 |
29 | model.addFace(
30 | new UVec3(300, 150, 0),
31 | new UVec3(300, 150, -150),
32 | new UVec3(150, 150, -150)
33 | );
34 |
35 | model.addFace(
36 | new UVec3(300, 150, -150),
37 | new UVec3(300, 150, 0),
38 | new UVec3(300, 300, 0)
39 | );
40 |
41 | model.addFace(
42 | new UVec3(300, 300, -150),
43 | new UVec3(300, 150, -150),
44 | new UVec3(300, 300, 0)
45 | );
46 |
47 | model.endShape();
48 | }
49 |
50 | void draw() {
51 | background(255);
52 | lights();
53 |
54 | translate(150, 150, -75);
55 | rotateY(x);
56 | x+=0.01;
57 | translate(-150, -150, 75);
58 |
59 | model.draw(this);
60 | }
61 |
62 | void keyPressed() {
63 | model.writeSTL(this, "part_cube.stl");
64 | }
65 |
66 |
--------------------------------------------------------------------------------
/chFAB_scanner/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chFAB_scanner/.DS_Store
--------------------------------------------------------------------------------
/chFAB_scanner/chFAB_scanner.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import unlekker.util.*;
3 | import unlekker.modelbuilder.*;
4 | import SimpleOpenNI.*;
5 | SimpleOpenNI kinect;
6 |
7 | boolean scanning = false;
8 |
9 | int maxZ = 2000;
10 | int spacing = 3;
11 |
12 | UGeometry model;
13 | UVertexList vertexList;
14 |
15 | void setup() {
16 | size(1024, 768, OPENGL);
17 | kinect = new SimpleOpenNI(this);
18 | kinect.enableDepth();
19 |
20 | model = new UGeometry();
21 | vertexList = new UVertexList();
22 | }
23 |
24 | void draw() {
25 | background(0);
26 |
27 | kinect.update();
28 |
29 | translate(width/2, height/2, -1000);
30 | rotateX(radians(180));
31 |
32 | if (scanning) {
33 | model.beginShape(TRIANGLES);
34 | }
35 |
36 | PVector[] depthPoints = kinect.depthMapRealWorld();
37 |
38 | // cleanup pass
39 | for (int y = 0; y < 480; y+=spacing) {
40 | for (int x = 0; x < 640; x+= spacing) {
41 | int i = y * 640 + x;
42 | PVector p = depthPoints[i];
43 | // if the point is on the edge or if it has no depth
44 | if (p.z < 10 || p.z > maxZ || y == 0 || y == 480 - spacing || x == 0 || x == 640 - spacing) {
45 | // replace it with a point at the depth of the backplane (i.e. maxZ)
46 | PVector realWorld = new PVector();
47 | PVector projective = new PVector(x, y, maxZ);
48 | // to get the point in the right place, we need to translate
49 | // from x/y to realworld coordinates to match our other points:
50 | kinect.convertProjectiveToRealWorld(projective, realWorld);
51 |
52 | depthPoints[i] = realWorld;
53 | }
54 | }
55 | }
56 |
57 | for (int y = 0; y < 480 - spacing; y+=spacing) {
58 | for (int x = 0; x < 640 -spacing; x+= spacing) {
59 | int i = y * 640 + x;
60 |
61 | if (scanning) {
62 | int nw = i;
63 | int ne = nw + spacing;
64 | int sw = i + 640 * spacing;
65 | int se = sw + spacing;
66 |
67 | if (!allZero(depthPoints[nw]) && !allZero(depthPoints[ne]) && !allZero(depthPoints[sw]) && !allZero(depthPoints[se])) {
68 |
69 | model.addFace(new UVec3(depthPoints[nw].x, depthPoints[nw].y, depthPoints[nw].z),
70 | new UVec3(depthPoints[ne].x, depthPoints[ne].y, depthPoints[ne].z),
71 | new UVec3(depthPoints[sw].x, depthPoints[sw].y, depthPoints[sw].z));
72 |
73 | model.addFace(new UVec3(depthPoints[ne].x, depthPoints[ne].y, depthPoints[ne].z),
74 | new UVec3(depthPoints[se].x, depthPoints[se].y, depthPoints[se].z ),
75 | new UVec3(depthPoints[sw].x, depthPoints[sw].y, depthPoints[sw].z));
76 | }
77 | }
78 | else {
79 | stroke(255);
80 | PVector currentPoint = depthPoints[i];
81 | if (currentPoint.z < maxZ) {
82 | point(currentPoint.x, currentPoint.y, currentPoint.z);
83 | }
84 | }
85 | }
86 | }
87 |
88 |
89 | if (scanning) {
90 | model.calcBounds();
91 | model.translate(0, 0, -maxZ);
92 |
93 | float modelWidth = (model.bb.max.x - model.bb.min.x);
94 | float modelHeight = (model.bb.max.y - model.bb.min.y);
95 |
96 | UGeometry backing = Primitive.box(modelWidth/2, modelHeight/2, 10);
97 | model.add(backing);
98 |
99 | model.scale(0.01);
100 | model.rotateY(radians(180));
101 | model.toOrigin();
102 |
103 | model.endShape();
104 | model.writeSTL(this, "scan_"+random(1000)+".stl");
105 | scanning = false;
106 | }
107 | }
108 |
109 |
110 | boolean allZero(PVector p) {
111 | return (p.x == 0 && p.y == 0 && p.z == 0);
112 | }
113 |
114 | void keyPressed() {
115 | println(maxZ);
116 | if (keyCode == UP) {
117 | maxZ += 100;
118 | }
119 | if (keyCode == DOWN) {
120 | maxZ -= 100;
121 | }
122 | if (key == ' ') {
123 | scanning = true;
124 | model.reset();
125 | }
126 | }
127 |
128 |
--------------------------------------------------------------------------------
/chFAB_scanner_no_cleanup/chFAB_scanner_no_cleanup.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import unlekker.util.*;
3 | import unlekker.modelbuilder.*;
4 | import SimpleOpenNI.*;
5 | SimpleOpenNI kinect;
6 |
7 | boolean scanning = false;
8 |
9 | int maxZ = 2000;
10 | int spacing = 3;
11 |
12 | UGeometry model;
13 | UVertexList vertexList;
14 |
15 | void setup() {
16 | size(1024, 768, OPENGL);
17 | kinect = new SimpleOpenNI(this);
18 | kinect.enableDepth();
19 |
20 | model = new UGeometry();
21 | vertexList = new UVertexList();
22 | }
23 |
24 | void draw() {
25 | background(0);
26 |
27 | kinect.update();
28 |
29 | translate(width/2, height/2, -1000);
30 | rotateX(radians(180));
31 |
32 | PVector[] depthPoints = kinect.depthMapRealWorld();
33 |
34 | if (scanning) {
35 | model.beginShape(TRIANGLES);
36 | fill(255);
37 | text("PERFORMING SCAN...", 5, 10);
38 | }
39 |
40 | for (int y = 0; y < 480 -spacing; y+=spacing) {
41 | for (int x = 0; x < 640 -spacing; x+= spacing) {
42 | int i = y * 640 + x;
43 |
44 | int nw = i;
45 | int ne = nw + spacing;
46 | int sw = i + 640 * spacing;
47 | int se = sw + spacing;
48 |
49 | if (scanning) {
50 | model.addFace(new UVec3(depthPoints[nw].x, depthPoints[nw].y, depthPoints[nw].z),
51 | new UVec3(depthPoints[ne].x, depthPoints[ne].y, depthPoints[ne].z),
52 | new UVec3(depthPoints[sw].x, depthPoints[sw].y, depthPoints[sw].z));
53 |
54 | model.addFace(new UVec3(depthPoints[ne].x, depthPoints[ne].y, depthPoints[ne].z),
55 | new UVec3(depthPoints[se].x, depthPoints[se].y, depthPoints[se].z),
56 | new UVec3(depthPoints[sw].x, depthPoints[sw].y, depthPoints[sw].z));
57 |
58 |
59 |
60 | }
61 | else {
62 | stroke(255);
63 | PVector currentPoint = depthPoints[i];
64 | point(currentPoint.x, currentPoint.y, currentPoint.z);
65 | }
66 | }
67 |
68 | if (scanning) {
69 | model.endShape();
70 | model.writeSTL(this, "scan_"+random(1000)+".stl");
71 | scanning = false;
72 | }
73 | }
74 |
75 |
76 | void keyPressed() {
77 | if (key == ' ') {
78 | scanning = true;
79 | }
80 | }
81 |
82 |
--------------------------------------------------------------------------------
/chPC_ex01_ellipse/chPC_ex01_ellipse.pde:
--------------------------------------------------------------------------------
1 | void setup(){
2 | size(640, 480, P3D);
3 | }
4 |
5 | void draw(){
6 | background(0);
7 | ellipse(width/2, height/2, 100, 100);
8 | }
9 |
--------------------------------------------------------------------------------
/chPC_ex02_ellipse_translate/chPC_ex02_ellipse_translate.pde:
--------------------------------------------------------------------------------
1 | void setup(){
2 | size(640, 480, P3D);
3 | }
4 |
5 | void draw(){
6 | background(0);
7 |
8 | // translate moves the position from which we draw
9 | // 100 in the third argument moves to z = 100
10 | translate(0,0,100);
11 | ellipse(width/2, height/2, 100, 100);
12 | }
13 |
--------------------------------------------------------------------------------
/chPC_ex03_receding_ellipse/chPC_ex03_receding_ellipse.pde:
--------------------------------------------------------------------------------
1 | int z = 200;
2 |
3 | void setup(){
4 | size(640, 480, P3D);
5 | }
6 |
7 | void draw(){
8 | background(0);
9 |
10 | // move to a further away z each time
11 | translate(0,0,z);
12 | ellipse(width/2, height/2, 100, 100);
13 |
14 | z = z - 1;
15 | }
16 |
--------------------------------------------------------------------------------
/chPC_ex04_first_point_cloud/chPC_ex04_first_point_cloud.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 |
5 | void setup() {
6 | size(1024, 768, OPENGL);
7 | kinect = new SimpleOpenNI(this);
8 | kinect.enableDepth();
9 | }
10 |
11 | void draw() {
12 | background(0);
13 |
14 | kinect.update();
15 |
16 | // prepare to draw centered in x-y
17 | // pull it 1000 pixels closer on z
18 | translate(width/2, height/2, -1000);
19 | rotateX(radians(180)); // flip y-axis from "realWorld"
20 |
21 |
22 | stroke(255);
23 |
24 | // get the depth data as 3D points
25 | PVector[] depthPoints = kinect.depthMapRealWorld();
26 | for(int i = 0; i < depthPoints.length; i++){
27 | // get the current point from the point array
28 | PVector currentPoint = depthPoints[i];
29 | // draw the current point
30 | point(currentPoint.x, currentPoint.y, currentPoint.z);
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/chPC_ex052_color_point_cloud/chPC_ex052_color_point_cloud.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 | float rotation = 0;
6 |
7 | void setup() {
8 | size(1024, 768, OPENGL);
9 | kinect = new SimpleOpenNI(this);
10 | kinect.enableDepth();
11 | // access the color camera
12 | kinect.enableRGB();
13 | // tell OpenNI to line-up the color pixels
14 | // with the depth data
15 | kinect.alternativeViewPointDepthToImage();
16 |
17 | }
18 |
19 | void draw() {
20 | background(0);
21 | kinect.update();
22 | // load the color image from the Kinect
23 | PImage rgbImage = kinect.rgbImage();
24 |
25 | translate(width/2, height/2, -250);
26 | rotateX(radians(180));
27 | translate(0, 0, 1000);
28 | rotateY(radians(rotation));
29 | rotation++;
30 |
31 | PVector[] depthPoints = kinect.depthMapRealWorld();
32 | // don't skip any depth points
33 | for (int i = 0; i < depthPoints.length; i+=1) {
34 | PVector currentPoint = depthPoints[i];
35 | // set the stroke color based on the color pixel
36 | stroke(rgbImage.pixels[i]);
37 | point(currentPoint.x, currentPoint.y, currentPoint.z);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/chPC_ex05_rotating_point_cloud/chPC_ex05_rotating_point_cloud.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | // variable to hold our current rotation
7 | // represented in degrees
8 | float rotation = 0;
9 |
10 | void setup() {
11 | size(1024, 768, OPENGL);
12 | kinect = new SimpleOpenNI(this);
13 | kinect.enableDepth();
14 | }
15 |
16 | void draw() {
17 | background(0);
18 | kinect.update();
19 |
20 | // prepare to draw centered in x-y
21 | // pull it 1000 pixels closer on z
22 | translate(width/2, height/2, -1000);
23 | // flip the point cloud vertically:
24 | rotateX(radians(180));
25 |
26 | // move the center of rotation
27 | // to inside the point cloud
28 | translate(0, 0, 1000);
29 |
30 | // rotate about the y-axis
31 | // and bump the rotation
32 | rotateY(radians(rotation));
33 | rotation++;
34 |
35 | stroke(255);
36 |
37 | PVector[] depthPoints = kinect.depthMapRealWorld();
38 |
39 | // notice: "i+=10"
40 | // only draw every 10th point to make things faster
41 | for (int i = 0; i < depthPoints.length; i+=10) {
42 | PVector currentPoint = depthPoints[i];
43 | point(currentPoint.x, currentPoint.y, currentPoint.z);
44 | }
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/chPC_ex06_floating_cube/chPC_ex06_floating_cube.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | float rotation = 0;
7 |
8 | // set the box size
9 | int boxSize = 150;
10 | // a vector holding the center of the box
11 | PVector boxCenter = new PVector(0, 0, 600);
12 |
13 | void setup() {
14 | size(1024, 768, OPENGL);
15 | kinect = new SimpleOpenNI(this);
16 | kinect.enableDepth();
17 | }
18 |
19 | void draw() {
20 | background(0);
21 | kinect.update();
22 |
23 | translate(width/2, height/2, -1000);
24 | rotateX(radians(180));
25 |
26 | translate(0, 0, 1000);
27 |
28 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
29 |
30 | stroke(255);
31 |
32 | PVector[] depthPoints = kinect.depthMapRealWorld();
33 |
34 | for (int i = 0; i < depthPoints.length; i+=10) {
35 | PVector currentPoint = depthPoints[i];
36 |
37 | point(currentPoint.x, currentPoint.y, currentPoint.z);
38 | }
39 |
40 | // move to the box center
41 | translate(boxCenter.x, boxCenter.y, boxCenter.z);
42 | // set line color to red
43 | stroke(255, 0, 0);
44 | // leave the box unfilled so we can see through it
45 | noFill();
46 | // draw the box
47 | box(boxSize);
48 | }
49 |
50 |
--------------------------------------------------------------------------------
/chPC_ex07_hotpoint/chPC_ex07_hotpoint.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | float rotation = 0;
7 |
8 | int boxSize = 150;
9 | PVector boxCenter = new PVector(0, 0, 600);
10 |
11 | // this will be used for zooming
12 | // start at normal
13 | float s = 1;
14 |
15 | void setup() {
16 | size(1024, 768, OPENGL);
17 | kinect = new SimpleOpenNI(this);
18 | kinect.enableDepth();
19 | }
20 |
21 | void draw() {
22 | background(0);
23 | kinect.update();
24 |
25 | translate(width/2, height/2, -1000);
26 | rotateX(radians(180));
27 |
28 | // bumped up the translation
29 | // so that scale is better centered
30 | translate(0, 0, 1400);
31 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
32 |
33 | // make everything bigger,
34 | // i.e. zoom in
35 | translate(0,0,s*-1000);
36 | scale(s);
37 |
38 | println(s);
39 |
40 | stroke(255);
41 |
42 | PVector[] depthPoints = kinect.depthMapRealWorld();
43 |
44 | // initialize a variable
45 | // for storing the total
46 | // points we find inside the box
47 | // on this frame
48 | int depthPointsInBox = 0;
49 |
50 | for (int i = 0; i < depthPoints.length; i+=10) {
51 | PVector currentPoint = depthPoints[i];
52 |
53 | if (currentPoint.x > boxCenter.x - boxSize/2 && currentPoint.x < boxCenter.x + boxSize/2) {
54 | if (currentPoint.y > boxCenter.y - boxSize/2 && currentPoint.y < boxCenter.y + boxSize/2) {
55 | if (currentPoint.z > boxCenter.z - boxSize/2 && currentPoint.z < boxCenter.z + boxSize/2) {
56 | depthPointsInBox++;
57 | }
58 | }
59 | }
60 |
61 | point(currentPoint.x, currentPoint.y, currentPoint.z);
62 | }
63 |
64 | println(depthPointsInBox);
65 |
66 | // set the box color's transparency
67 | // 0 is transparent, 1000 points is fully opaque red
68 | float boxAlpha = map(depthPointsInBox, 0, 1000, 0, 255);
69 |
70 | translate(boxCenter.x, boxCenter.y, boxCenter.z);
71 |
72 | // the fourth argument to fill() is "alpha"
73 | // it determines the color's opacity
74 | // we set it based on the number of points
75 | fill(255, 0, 0, boxAlpha);
76 | stroke(255, 0, 0);
77 | box(boxSize);
78 | }
79 |
80 | // use keys to control zoom
81 | // up-arrow zooms in
82 | // down arrow zooms out
83 | // s gets passed to scale() in draw()
84 | void keyPressed(){
85 | if(keyCode == 38){
86 | s = s + 0.01;
87 | }
88 | if(keyCode == 40){
89 | s = s - 0.01;
90 | }
91 | }
92 |
93 | void mousePressed(){
94 | save("touchedPoint.png");
95 | }
96 |
97 |
--------------------------------------------------------------------------------
/chPC_ex08_minim_hello/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex08_minim_hello/.DS_Store
--------------------------------------------------------------------------------
/chPC_ex08_minim_hello/chPC_ex08_minim_hello.pde:
--------------------------------------------------------------------------------
1 | import ddf.minim.*;
2 |
3 | Minim minim;
4 | AudioPlayer player;
5 |
6 | void setup() {
7 | minim = new Minim(this);
8 | player = minim.loadFile("kick.wav");
9 | player.play();
10 | }
11 |
12 | void draw() {
13 | }
14 |
15 | void stop()
16 | {
17 | player.close();
18 | minim.stop();
19 |
20 | super.stop();
21 | }
22 |
23 |
--------------------------------------------------------------------------------
/chPC_ex08_minim_hello/kick.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex08_minim_hello/kick.wav
--------------------------------------------------------------------------------
/chPC_ex09_hotpoint_sound/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex09_hotpoint_sound/.DS_Store
--------------------------------------------------------------------------------
/chPC_ex09_hotpoint_sound/chPC_ex09_hotpoint_sound.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import ddf.minim.*;
4 |
5 | // minim objects
6 | Minim minim;
7 | AudioPlayer player;
8 |
9 | SimpleOpenNI kinect;
10 |
11 | float rotation = 0;
12 |
13 | // used for edge detection
14 | boolean wasJustInBox = false;
15 |
16 | int boxSize = 150;
17 | PVector boxCenter = new PVector(0, 0, 600);
18 |
19 | float s = 1;
20 |
21 | void setup() {
22 | size(1024, 768, OPENGL);
23 | kinect = new SimpleOpenNI(this);
24 | kinect.enableDepth();
25 |
26 | // initialize Minim
27 | // and AudioPlayer
28 | minim = new Minim(this);
29 | player = minim.loadFile("kick.wav");
30 | }
31 |
32 | void draw() {
33 | background(0);
34 | kinect.update();
35 |
36 | translate(width/2, height/2, -1000);
37 | rotateX(radians(180));
38 |
39 | translate(0, 0, 1400);
40 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
41 |
42 | translate(0, 0, s*-1000);
43 | scale(s);
44 |
45 | stroke(255);
46 |
47 | PVector[] depthPoints = kinect.depthMapRealWorld();
48 | int depthPointsInBox = 0;
49 |
50 | for (int i = 0; i < depthPoints.length; i+=10) {
51 | PVector currentPoint = depthPoints[i];
52 |
53 | if (currentPoint.x > boxCenter.x - boxSize/2 && currentPoint.x < boxCenter.x + boxSize/2) {
54 | if (currentPoint.y > boxCenter.y - boxSize/2 && currentPoint.y < boxCenter.y + boxSize/2) {
55 | if (currentPoint.z > boxCenter.z - boxSize/2 && currentPoint.z < boxCenter.z + boxSize/2) {
56 | depthPointsInBox++;
57 | }
58 | }
59 | }
60 |
61 | point(currentPoint.x, currentPoint.y, currentPoint.z);
62 | }
63 |
64 | float boxAlpha = map(depthPointsInBox, 0, 1000, 0, 255);
65 |
66 | // edge detection
67 | // are we in the box this time
68 | boolean isInBox = (depthPointsInBox > 0);
69 |
70 | // if we just moved in from outside
71 | // start it playing
72 | if (isInBox && !wasJustInBox) {
73 | player.play();
74 | }
75 |
76 | // if it's played all the way through
77 | // pause and rewind
78 | if (!player.isPlaying()) {
79 | player.rewind();
80 | player.pause();
81 | }
82 |
83 | // save current status
84 | // for next time
85 | wasJustInBox = isInBox;
86 |
87 | translate(boxCenter.x, boxCenter.y, boxCenter.z);
88 |
89 | fill(255, 0, 0, boxAlpha);
90 | stroke(255, 0, 0);
91 | box(boxSize);
92 | }
93 |
94 | void stop()
95 | {
96 | player.close();
97 | minim.stop();
98 | super.stop();
99 | }
100 |
101 | // use keys to control zoom
102 | // up-arrow zooms in
103 | // down arrow zooms out
104 | // s gets passed to scale() in draw()
105 | void keyPressed() {
106 | if (keyCode == 38) {
107 | s = s + 0.01;
108 | }
109 | if (keyCode == 40) {
110 | s = s - 0.01;
111 | }
112 | }
113 |
114 |
--------------------------------------------------------------------------------
/chPC_ex09_hotpoint_sound/data/hat.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex09_hotpoint_sound/data/hat.wav
--------------------------------------------------------------------------------
/chPC_ex09_hotpoint_sound/data/kick.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex09_hotpoint_sound/data/kick.wav
--------------------------------------------------------------------------------
/chPC_ex10_multi_hotpoint/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex10_multi_hotpoint/.DS_Store
--------------------------------------------------------------------------------
/chPC_ex10_multi_hotpoint/Hotpoint.pde:
--------------------------------------------------------------------------------
1 | class Hotpoint {
2 | PVector center;
3 | color fillColor;
4 | color strokeColor;
5 | int size;
6 | int pointsIncluded;
7 | int maxPoints;
8 | boolean wasJustHit;
9 | int threshold;
10 |
11 |
12 | Hotpoint(float centerX, float centerY, float centerZ, int boxSize) {
13 | center = new PVector(centerX, centerY, centerZ);
14 | size = boxSize;
15 | pointsIncluded = 0;
16 | maxPoints = 1000;
17 | threshold = 0;
18 |
19 | fillColor = strokeColor = color(random(255), random(255), random(255));
20 | }
21 |
22 | void setThreshold( int newThreshold ){
23 | threshold = newThreshold;
24 | }
25 |
26 | void setMaxPoints(int newMaxPoints) {
27 | maxPoints = newMaxPoints;
28 | }
29 |
30 | void setColor(float red, float blue, float green){
31 | fillColor = strokeColor = color(red, blue, green);
32 | }
33 |
34 | boolean check(PVector point) {
35 | boolean result = false;
36 |
37 | if (point.x > center.x - size/2 && point.x < center.x + size/2) {
38 | if (point.y > center.y - size/2 && point.y < center.y + size/2) {
39 | if (point.z > center.z - size/2 && point.z < center.z + size/2) {
40 | result = true;
41 | pointsIncluded++;
42 | }
43 | }
44 | }
45 |
46 | return result;
47 | }
48 |
49 | void draw() {
50 | pushMatrix();
51 | translate(center.x, center.y, center.z);
52 |
53 | fill(red(fillColor), blue(fillColor), green(fillColor), 255 * percentIncluded());
54 | stroke(red(strokeColor), blue(strokeColor), green(strokeColor), 255);
55 | box(size);
56 | popMatrix();
57 | }
58 |
59 | float percentIncluded() {
60 | return map(pointsIncluded, 0, maxPoints, 0, 1);
61 | }
62 |
63 |
64 | boolean currentlyHit() {
65 | return (pointsIncluded > threshold);
66 | }
67 |
68 |
69 | boolean isHit() {
70 | return currentlyHit() && !wasJustHit;
71 | }
72 |
73 | void clear() {
74 | wasJustHit = currentlyHit();
75 | pointsIncluded = 0;
76 | }
77 | }
--------------------------------------------------------------------------------
/chPC_ex10_multi_hotpoint/chPC_ex10_multi_hotpoint.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import ddf.minim.*;
4 |
5 | SimpleOpenNI kinect;
6 |
7 | float rotation = 0;
8 |
9 | // two AudioPlayer objects this time
10 | Minim minim;
11 | AudioPlayer kick;
12 | AudioPlayer snare;
13 |
14 | // declare our two hotpoint objects
15 | Hotpoint snareTrigger;
16 | Hotpoint kickTrigger;
17 |
18 | float s = 1;
19 |
20 | void setup() {
21 | size(1024, 768, OPENGL);
22 | kinect = new SimpleOpenNI(this);
23 | kinect.enableDepth();
24 |
25 | minim = new Minim(this);
26 | // load both audio files
27 | snare = minim.loadFile("hat.wav");
28 | kick = minim.loadFile("kick.wav");
29 |
30 | // initialize hotpoints with their origins (x,y,z) and their size
31 | snareTrigger = new Hotpoint(200, 0, 600, 150);
32 | kickTrigger = new Hotpoint(-200, 0, 600, 150);
33 |
34 | }
35 |
36 | void draw() {
37 | background(0);
38 | kinect.update();
39 |
40 | translate(width/2, height/2, -1000);
41 | rotateX(radians(180));
42 |
43 | translate(0, 0, 1400);
44 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
45 |
46 | translate(0, 0, s*-1000);
47 | scale(s);
48 |
49 |
50 | stroke(255);
51 |
52 | PVector[] depthPoints = kinect.depthMapRealWorld();
53 |
54 | for (int i = 0; i < depthPoints.length; i+=10) {
55 | PVector currentPoint = depthPoints[i];
56 |
57 | // have each hotpoint check to see
58 | // if it includes the currentPoint
59 | snareTrigger.check(currentPoint);
60 | kickTrigger.check(currentPoint);
61 |
62 | point(currentPoint.x, currentPoint.y, currentPoint.z);
63 | }
64 |
65 | println(snareTrigger.pointsIncluded);
66 |
67 | if(snareTrigger.isHit()) {
68 | snare.play();
69 | }
70 |
71 | if(!snare.isPlaying()) {
72 | snare.rewind();
73 | snare.pause();
74 | }
75 |
76 | if (kickTrigger.isHit()) {
77 | kick.play();
78 | }
79 |
80 | if(!kick.isPlaying()) {
81 | kick.rewind();
82 | kick.pause();
83 | }
84 |
85 | // display each hotpoint
86 | // and clear its points
87 | snareTrigger.draw();
88 | snareTrigger.clear();
89 |
90 | kickTrigger.draw();
91 | kickTrigger.clear();
92 | }
93 |
94 | void stop()
95 | {
96 | // make sure to close
97 | // both AudioPlayer objects
98 | kick.close();
99 | snare.close();
100 |
101 | minim.stop();
102 | super.stop();
103 | }
104 |
105 | void keyPressed() {
106 | if (keyCode == 38) {
107 | s = s + 0.01;
108 | }
109 | if (keyCode == 40) {
110 | s = s - 0.01;
111 | }
112 | }
113 |
114 |
--------------------------------------------------------------------------------
/chPC_ex10_multi_hotpoint/data/hat.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex10_multi_hotpoint/data/hat.wav
--------------------------------------------------------------------------------
/chPC_ex10_multi_hotpoint/data/kick.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex10_multi_hotpoint/data/kick.wav
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/.DS_Store
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/OBJLoader.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/applet/OBJLoader.jar
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/chPC_ex11_obj_hello.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/applet/chPC_ex11_obj_hello.jar
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/chPC_ex11_obj_hello.java:
--------------------------------------------------------------------------------
1 | import processing.core.*;
2 | import processing.xml.*;
3 |
4 | import processing.opengl.*;
5 | import saito.objloader.*;
6 |
7 | import java.applet.*;
8 | import java.awt.Dimension;
9 | import java.awt.Frame;
10 | import java.awt.event.MouseEvent;
11 | import java.awt.event.KeyEvent;
12 | import java.awt.event.FocusEvent;
13 | import java.awt.Image;
14 | import java.io.*;
15 | import java.net.*;
16 | import java.text.*;
17 | import java.util.*;
18 | import java.util.zip.*;
19 | import java.util.regex.*;
20 |
21 | public class chPC_ex11_obj_hello extends PApplet {
22 |
23 |
24 |
25 |
26 | // declare an OBJModel object
27 | OBJModel model;
28 |
29 | float rotateX;
30 | float rotateY;
31 |
32 | public void setup() {
33 | size(640, 480, OPENGL);
34 |
35 | // load the model file
36 | // use triangles as the basic geometry
37 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
38 |
39 | // tell the model to translate itself
40 | // to be centered at 0,0
41 | model.translateToCenter();
42 | noStroke();
43 | }
44 |
45 | public void draw() {
46 | background(255);
47 |
48 | // turn on the lights
49 | lights();
50 |
51 | translate(width/2, height/2, 0);
52 |
53 | rotateX(rotateY);
54 | rotateY(rotateX);
55 |
56 | // tell the model to draw itself
57 | model.draw();
58 | }
59 |
60 | public void mouseDragged() {
61 | rotateX += (mouseX - pmouseX) * 0.01f;
62 | rotateY -= (mouseY - pmouseY) * 0.01f;
63 | }
64 | static public void main(String args[]) {
65 | PApplet.main(new String[] { "--bgcolor=#c0c0c0", "chPC_ex11_obj_hello" });
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/chPC_ex11_obj_hello.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import saito.objloader.*;
3 |
4 | // declare an OBJModel object
5 | OBJModel model;
6 |
7 | float rotateX;
8 | float rotateY;
9 |
10 | void setup() {
11 | size(640, 480, OPENGL);
12 |
13 | // load the model file
14 | // use triangles as the basic geometry
15 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
16 |
17 | // tell the model to translate itself
18 | // to be centered at 0,0
19 | model.translateToCenter();
20 | noStroke();
21 | }
22 |
23 | void draw() {
24 | background(255);
25 |
26 | // turn on the lights
27 | lights();
28 |
29 | translate(width/2, height/2, 0);
30 |
31 | rotateX(rotateY);
32 | rotateY(rotateX);
33 |
34 | // tell the model to draw itself
35 | model.draw();
36 | }
37 |
38 | void mouseDragged() {
39 | rotateX += (mouseX - pmouseX) * 0.01;
40 | rotateY -= (mouseY - pmouseY) * 0.01;
41 | }
42 |
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/core.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/applet/core.jar
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/index.html:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
8 | chPC_ex11_obj_hello : Built with Processing
9 |
10 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 | Source code: chPC_ex11_obj_hello
112 |
113 |
114 |
115 | Built with Processing
116 |
117 |
118 |
119 |
120 |
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/loading.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/applet/loading.gif
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/applet/opengl.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chPC_ex11_obj_hello/applet/opengl.jar
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/chPC_ex11_obj_hello.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import saito.objloader.*;
3 |
4 | // declare an OBJModel object
5 | OBJModel model;
6 |
7 | float rotateX;
8 | float rotateY;
9 |
10 | void setup() {
11 | size(640, 480, OPENGL);
12 |
13 | // load the model file
14 | // use triangles as the basic geometry
15 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
16 |
17 | // tell the model to translate itself
18 | // to be centered at 0,0
19 | model.translateToCenter();
20 | noStroke();
21 | }
22 |
23 | void draw() {
24 | background(255);
25 |
26 | // turn on the lights
27 | lights();
28 |
29 | translate(width/2, height/2, 0);
30 |
31 | rotateX(rotateY);
32 | rotateY(rotateX);
33 |
34 | // tell the model to draw itself
35 | model.draw();
36 | }
37 |
38 | void mouseDragged() {
39 | rotateX += (mouseX - pmouseX) * 0.01;
40 | rotateY -= (mouseY - pmouseY) * 0.01;
41 | }
42 |
--------------------------------------------------------------------------------
/chPC_ex11_obj_hello/kinect.obj.mtl:
--------------------------------------------------------------------------------
1 | #
2 | # Wavefront material file
3 | # Converted by Meshlab Group
4 | #
5 |
6 | newmtl material_0
7 | Ka 0.200000 0.200000 0.200000
8 | Kd 1.000000 1.000000 1.000000
9 | Ks 1.000000 1.000000 1.000000
10 | Tr 1.000000
11 | illum 2
12 | Ns 0.000000
13 |
14 |
--------------------------------------------------------------------------------
/chPC_ex12_obj_in_point_cloud/chPC_ex12_obj_in_point_cloud.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import saito.objloader.*;
4 |
5 | SimpleOpenNI kinect;
6 | OBJModel model;
7 |
8 | float s = 1;
9 |
10 | void setup() {
11 | size(1024, 768, OPENGL);
12 | kinect = new SimpleOpenNI(this);
13 | kinect.enableDepth();
14 |
15 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
16 | model.translateToCenter();
17 | noStroke();
18 | }
19 |
20 | void draw() {
21 | background(0);
22 | kinect.update();
23 |
24 | translate(width/2, height/2, -1000);
25 | rotateX(radians(180));
26 |
27 | translate(0, 0, 1400);
28 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
29 |
30 | translate(0, 0, s*-1000);
31 | scale(s);
32 |
33 | lights();
34 | noStroke();
35 |
36 | // isolate model transformations
37 | pushMatrix();
38 | // adjust for default orientation
39 | // of the model
40 | rotateX(radians(-90));
41 | rotateZ(radians(180));
42 | model.draw();
43 | popMatrix();
44 |
45 | stroke(255);
46 |
47 | PVector[] depthPoints = kinect.depthMapRealWorld();
48 |
49 | for (int i = 0; i < depthPoints.length; i+=10) {
50 | PVector currentPoint = depthPoints[i];
51 | point(currentPoint.x, currentPoint.y, currentPoint.z);
52 | }
53 | }
54 |
55 | void keyPressed() {
56 | if (keyCode == 38) {
57 | s = s + 0.01;
58 | }
59 | if (keyCode == 40) {
60 | s = s - 0.01;
61 | }
62 | }
63 |
64 |
--------------------------------------------------------------------------------
/chPC_ex13_obj_with_lines/chPC_ex13_obj_with_lines.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import saito.objloader.*;
4 |
5 | SimpleOpenNI kinect;
6 | OBJModel model;
7 |
8 | float s = 1;
9 |
10 | void setup() {
11 | size(1024, 768, OPENGL);
12 | kinect = new SimpleOpenNI(this);
13 | kinect.enableDepth();
14 |
15 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
16 | model.translateToCenter();
17 | noStroke();
18 | }
19 |
20 | void draw() {
21 | background(0);
22 | kinect.update();
23 |
24 | translate(width/2, height/2, -1000);
25 | rotateX(radians(180));
26 |
27 | translate(0, 0, 1400);
28 | rotateY(radians(map(mouseX, 0, width, -180, 180)));
29 |
30 | translate(0, 0, s*-1000);
31 | scale(s);
32 |
33 | lights();
34 | noStroke();
35 |
36 | // isolate model transformations
37 | pushMatrix();
38 | // adjust for default orientation
39 | // of the model
40 | rotateX(radians(-90));
41 | rotateZ(radians(180));
42 | model.draw();
43 | popMatrix();
44 |
45 |
46 | PVector[] depthPoints = kinect.depthMapRealWorld();
47 |
48 | for (int i = 0; i < depthPoints.length; i+=100) {
49 | PVector currentPoint = depthPoints[i];
50 | stroke(100, 30);
51 |
52 |
53 | line(0,0,0, currentPoint.x, currentPoint.y, currentPoint.z);
54 |
55 | stroke(0,255,0);
56 | point(currentPoint.x, currentPoint.y, currentPoint.z);
57 |
58 | }
59 | }
60 |
61 | void keyPressed() {
62 | if (keyCode == 38) {
63 | s = s + 0.01;
64 | }
65 | if (keyCode == 40) {
66 | s = s - 0.01;
67 | }
68 | }
69 |
70 |
71 |
--------------------------------------------------------------------------------
/chPC_ex14_obj_in_point_cloud_peasy/chPC_ex14_obj_in_point_cloud_peasy.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import saito.objloader.*;
4 | import peasy.*;
5 |
6 | PeasyCam cam;
7 | SimpleOpenNI kinect;
8 | OBJModel model;
9 |
10 | void setup() {
11 | size(1024, 768, OPENGL);
12 | kinect = new SimpleOpenNI(this);
13 | kinect.enableDepth();
14 |
15 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
16 | model.translateToCenter();
17 | noStroke();
18 |
19 | // create a camera
20 | // arguments set point to look at and distance from that point
21 | cam = new PeasyCam(this, 0, 0, 0, 1000);
22 | }
23 |
24 | void draw() {
25 | background(0);
26 | kinect.update();
27 |
28 | rotateX(radians(180));
29 |
30 | // NB: there used to be a bunch of transformations here
31 |
32 | lights();
33 | noStroke();
34 |
35 | pushMatrix();
36 | rotateX(radians(-90));
37 | rotateZ(radians(180));
38 | model.draw();
39 | popMatrix();
40 |
41 | stroke(255);
42 |
43 | PVector[] depthPoints = kinect.depthMapRealWorld();
44 |
45 | for (int i = 0; i < depthPoints.length; i+=10) {
46 | PVector currentPoint = depthPoints[i];
47 | point(currentPoint.x, currentPoint.y, currentPoint.z);
48 | }
49 | }
50 |
51 |
--------------------------------------------------------------------------------
/chPC_ex15_interactive_peasy/Hotpoint.pde:
--------------------------------------------------------------------------------
1 | class Hotpoint {
2 | PVector center;
3 | color fillColor;
4 | color strokeColor;
5 | int size;
6 | int pointsIncluded;
7 | int maxPoints;
8 | boolean wasJustHit;
9 | int threshold;
10 |
11 |
12 | Hotpoint(float centerX, float centerY, float centerZ, int boxSize) {
13 | center = new PVector(centerX, centerY, centerZ);
14 | size = boxSize;
15 | pointsIncluded = 0;
16 | maxPoints = 1000;
17 | threshold = 0;
18 |
19 | fillColor = strokeColor = color(random(255), random(255), random(255));
20 | }
21 |
22 | void setThreshold( int newThreshold ){
23 | threshold = newThreshold;
24 | }
25 |
26 | void setMaxPoints(int newMaxPoints) {
27 | maxPoints = newMaxPoints;
28 | }
29 |
30 | void setColor(float red, float blue, float green){
31 | fillColor = strokeColor = color(red, blue, green);
32 | }
33 |
34 | boolean check(PVector point) {
35 | boolean result = false;
36 |
37 | if (point.x > center.x - size/2 && point.x < center.x + size/2) {
38 | if (point.y > center.y - size/2 && point.y < center.y + size/2) {
39 | if (point.z > center.z - size/2 && point.z < center.z + size/2) {
40 | result = true;
41 | pointsIncluded++;
42 | }
43 | }
44 | }
45 |
46 | return result;
47 | }
48 |
49 | void draw() {
50 | pushMatrix();
51 | translate(center.x, center.y, center.z);
52 |
53 | fill(red(fillColor), blue(fillColor), green(fillColor), 255 * percentIncluded());
54 | stroke(red(strokeColor), blue(strokeColor), green(strokeColor), 255);
55 | box(size);
56 | popMatrix();
57 | }
58 |
59 | float percentIncluded() {
60 | return map(pointsIncluded, 0, maxPoints, 0, 1);
61 | }
62 |
63 |
64 | boolean currentlyHit() {
65 | return (pointsIncluded > threshold);
66 | }
67 |
68 |
69 | boolean isHit() {
70 | return currentlyHit() && !wasJustHit;
71 | }
72 |
73 | void clear() {
74 | wasJustHit = currentlyHit();
75 | pointsIncluded = 0;
76 | }
77 | }
--------------------------------------------------------------------------------
/chPC_ex15_interactive_peasy/chPC_ex15_interactive_peasy.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | import saito.objloader.*;
4 | import peasy.*;
5 |
6 | PeasyCam cam;
7 | SimpleOpenNI kinect;
8 | OBJModel model;
9 | Hotpoint hotpoint1;
10 | Hotpoint hotpoint2;
11 |
12 | void setup() {
13 | size(1024, 768, OPENGL);
14 | kinect = new SimpleOpenNI(this);
15 | kinect.enableDepth();
16 |
17 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
18 | model.translateToCenter();
19 | noStroke();
20 |
21 | cam = new PeasyCam(this, 0, 0, 0, 1000);
22 |
23 | hotpoint1 = new Hotpoint(200, 200, 800, 150);
24 | hotpoint2 = new Hotpoint(-200, 200, 800, 150);
25 | }
26 |
27 | void draw() {
28 | background(0);
29 | kinect.update();
30 |
31 | rotateX(radians(180));
32 |
33 | lights();
34 | noStroke();
35 |
36 | pushMatrix();
37 | rotateX(radians(-90));
38 | rotateZ(radians(180));
39 | model.draw();
40 | popMatrix();
41 |
42 |
43 | stroke(255);
44 |
45 | PVector[] depthPoints = kinect.depthMapRealWorld();
46 |
47 | for (int i = 0; i < depthPoints.length; i+=10) {
48 | PVector currentPoint = depthPoints[i];
49 | point(currentPoint.x, currentPoint.y, currentPoint.z);
50 |
51 | hotpoint1.check(currentPoint);
52 | hotpoint2.check(currentPoint);
53 | }
54 |
55 | hotpoint1.draw();
56 | hotpoint2.draw();
57 |
58 | if (hotpoint1.isHit()) {
59 | cam.lookAt(hotpoint1.center.x, hotpoint1.center.y * -1, hotpoint1.center.z * -1, 500, 500);
60 | }
61 |
62 | if (hotpoint2.isHit()) {
63 | cam.lookAt(hotpoint2.center.x, hotpoint2.center.y * -1, hotpoint2.center.z * -1, 500, 500);
64 | }
65 |
66 | hotpoint1.clear();
67 | hotpoint2.clear();
68 | }
69 |
70 |
--------------------------------------------------------------------------------
/chSK_advanced_dance_pose/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_advanced_dance_pose/.DS_Store
--------------------------------------------------------------------------------
/chSK_advanced_dance_pose/SkeletonPoser.pde:
--------------------------------------------------------------------------------
1 | /*
2 | pose.addRule(SimpleOpenNI.LEFT_HAND, SkeletonPoser.ABOVE, SimpleOpenNI.LEFT_ELBOW);
3 | pose.addRule(SimpleOpenNI.LEFT_HAND, SkeletonPoser.LEFT_OF, SimpleOpenNI.LEFT_ELBOW);
4 |
5 | if(pose.check(userId)){
6 | // play the song
7 | // with debounce
8 | }*/
9 |
10 | class SkeletonPoser {
11 | SimpleOpenNI context;
12 | ArrayList rules;
13 |
14 |
15 | SkeletonPoser(SimpleOpenNI context){
16 | this.context = context;
17 | rules = new ArrayList();
18 | }
19 |
20 | void addRule(int fromJoint, int jointRelation, int toJoint){
21 | PoseRule rule = new PoseRule(context, fromJoint, jointRelation, toJoint);
22 | rules.add(rule);
23 | }
24 |
25 | boolean check(int userID){
26 | boolean result = true;
27 | for(int i = 0; i < rules.size(); i++){
28 | PoseRule rule = (PoseRule)rules.get(i);
29 | result = result && rule.check(userID);
30 | }
31 | return result;
32 | }
33 |
34 | }
35 |
36 | class PoseRule {
37 | int fromJoint;
38 | int toJoint;
39 | PVector fromJointVector;
40 | PVector toJointVector;
41 | SimpleOpenNI context;
42 |
43 | int jointRelation; // one of:
44 | static final int ABOVE = 1;
45 | static final int BELOW = 2;
46 | static final int LEFT_OF = 3;
47 | static final int RIGHT_OF = 4;
48 |
49 | PoseRule(SimpleOpenNI context, int fromJoint, int jointRelation, int toJoint){
50 | this.context = context;
51 | this.fromJoint = fromJoint;
52 | this.toJoint = toJoint;
53 | this.jointRelation = jointRelation;
54 |
55 | fromJointVector = new PVector();
56 | toJointVector = new PVector();
57 | }
58 |
59 | boolean check(int userID){
60 |
61 | // populate the joint vectors for the user we're checking
62 | context.getJointPositionSkeleton(userID, fromJoint, fromJointVector);
63 | context.getJointPositionSkeleton(userID, toJoint, toJointVector);
64 |
65 | boolean result;
66 |
67 | switch(jointRelation){
68 | case ABOVE:
69 | result = (fromJointVector.y > toJointVector.y);
70 | break;
71 | case BELOW:
72 | result = (fromJointVector.y < toJointVector.y);
73 | break;
74 | case LEFT_OF:
75 | result = (fromJointVector.x < toJointVector.x);
76 | break;
77 | case RIGHT_OF:
78 | result = (fromJointVector.x > toJointVector.x);
79 | break;
80 | }
81 |
82 | return result;
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/chSK_advanced_dance_pose/chSK_advanced_dance_pose.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | // import and declarations for minim:
3 | import ddf.minim.*;
4 | Minim minim;
5 | AudioPlayer player;
6 | // declare our poser object
7 | SkeletonPoser pose;
8 |
9 | SimpleOpenNI kinect;
10 |
11 | void setup() {
12 | size(640, 480);
13 | kinect = new SimpleOpenNI(this);
14 | kinect.enableDepth();
15 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
16 | kinect.setMirror(true);
17 |
18 | // initialize the minim object
19 | minim = new Minim(this);
20 | // and load the stayin alive mp3 file
21 | player = minim.loadFile("stayin_alive.mp3");
22 |
23 | // initialize the pose object
24 | pose = new SkeletonPoser(kinect);
25 | // rules for the right arm
26 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_HAND, PoseRule.ABOVE, SimpleOpenNI.SKEL_RIGHT_ELBOW);
27 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_HAND, PoseRule.RIGHT_OF, SimpleOpenNI.SKEL_RIGHT_ELBOW);
28 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_ELBOW, PoseRule.ABOVE, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
29 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_ELBOW, PoseRule.RIGHT_OF, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
30 | // rules for the left arm
31 | pose.addRule(SimpleOpenNI.SKEL_LEFT_ELBOW, PoseRule.BELOW, SimpleOpenNI.SKEL_LEFT_SHOULDER);
32 | pose.addRule(SimpleOpenNI.SKEL_LEFT_ELBOW, PoseRule.LEFT_OF, SimpleOpenNI.SKEL_LEFT_SHOULDER);
33 | pose.addRule(SimpleOpenNI.SKEL_LEFT_HAND, PoseRule.LEFT_OF, SimpleOpenNI.SKEL_LEFT_ELBOW);
34 | pose.addRule(SimpleOpenNI.SKEL_LEFT_HAND, PoseRule.BELOW, SimpleOpenNI.SKEL_LEFT_ELBOW);
35 | // rules for the right leg
36 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_KNEE, PoseRule.BELOW, SimpleOpenNI.SKEL_RIGHT_HIP);
37 | pose.addRule(SimpleOpenNI.SKEL_RIGHT_KNEE, PoseRule.RIGHT_OF, SimpleOpenNI.SKEL_RIGHT_HIP);
38 | // rules for the left leg
39 | pose.addRule(SimpleOpenNI.SKEL_LEFT_KNEE, PoseRule.BELOW, SimpleOpenNI.SKEL_LEFT_HIP);
40 | pose.addRule(SimpleOpenNI.SKEL_LEFT_KNEE, PoseRule.LEFT_OF, SimpleOpenNI.SKEL_LEFT_HIP);
41 | pose.addRule(SimpleOpenNI.SKEL_LEFT_FOOT, PoseRule.BELOW, SimpleOpenNI.SKEL_LEFT_KNEE);
42 | pose.addRule(SimpleOpenNI.SKEL_LEFT_FOOT, PoseRule.LEFT_OF, SimpleOpenNI.SKEL_LEFT_KNEE);
43 | strokeWeight(5);
44 | }
45 |
46 | void draw() {
47 | background(0);
48 | kinect.update();
49 | image(kinect.depthImage(), 0, 0);
50 |
51 | IntVector userList = new IntVector();
52 | kinect.getUsers(userList);
53 | if (userList.size() > 0) {
54 | int userId = userList.get(0);
55 | if( kinect.isTrackingSkeleton(userId)) {
56 |
57 | // check to see if the user
58 | // is in the pose
59 | if(pose.check(userId)){
60 | //if they are, set the color white
61 | stroke(255);
62 | // and start the song playing
63 | if(!player.isPlaying(){
64 | player.play();
65 | }
66 | } else {
67 | // otherwise set the color to red
68 | // and don't start the song
69 | stroke(255,0,0);
70 | }
71 | // draw the skeleton in whatever color we chose
72 | drawSkeleton(userId);
73 | }
74 | }
75 | }
76 |
77 | void drawSkeleton(int userId) {
78 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
79 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
80 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
81 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
82 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
83 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
84 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
85 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
86 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
87 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
88 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
89 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
90 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
91 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
92 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
93 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_LEFT_HIP);
94 | }
95 |
96 | void drawLimb(int userId, int jointType1, int jointType2)
97 | {
98 | PVector jointPos1 = new PVector();
99 | PVector jointPos2 = new PVector();
100 | float confidence;
101 |
102 | // draw the joint position
103 | confidence = kinect.getJointPositionSkeleton(userId, jointType1, jointPos1);
104 | confidence = kinect.getJointPositionSkeleton(userId, jointType2, jointPos2);
105 |
106 | line(jointPos1.x, jointPos1.y, jointPos1.z,
107 | jointPos2.x, jointPos2.y, jointPos2.z);
108 | }
109 |
110 | void keyPressed(){
111 | saveFrame("stayin_alive_"+random(100)+".png");
112 | }
113 |
114 |
115 | // user-tracking callbacks!
116 | void onNewUser(int userId) {
117 | println("start pose detection");
118 | kinect.startPoseDetection("Psi", userId);
119 | }
120 |
121 | void onEndCalibration(int userId, boolean successful) {
122 | if (successful) {
123 | println(" User calibrated !!!");
124 | kinect.startTrackingSkeleton(userId);
125 | }
126 | else {
127 | println(" Failed to calibrate user !!!");
128 | kinect.startPoseDetection("Psi", userId);
129 | }
130 | }
131 |
132 | void onStartPose(String pose, int userId) {
133 | println("Started pose for user");
134 | kinect.stopPoseDetection(userId);
135 | kinect.requestCalibrationSkeleton(userId, true);
136 | }
137 |
138 |
--------------------------------------------------------------------------------
/chSK_basic_dance_pose/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_basic_dance_pose/.DS_Store
--------------------------------------------------------------------------------
/chSK_basic_dance_pose/chSK_basic_dance_pose.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup() {
5 | size(640, 480);
6 | kinect = new SimpleOpenNI(this);
7 | kinect.enableDepth();
8 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
9 | strokeWeight(5);
10 | }
11 |
12 | void draw() {
13 | background(0);
14 | kinect.update();
15 | image(kinect.depthImage(), 0, 0);
16 |
17 | IntVector userList = new IntVector();
18 | kinect.getUsers(userList);
19 | if (userList.size() > 0) {
20 | int userId = userList.get(0);
21 | if ( kinect.isTrackingSkeleton(userId)) {
22 | PVector rightHand = new PVector();
23 | PVector rightElbow = new PVector();
24 | PVector rightShoulder = new PVector();
25 |
26 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, rightHand);
27 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, rightElbow);
28 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, rightShoulder);
29 |
30 | // right elbow above right shoulder
31 | // AND
32 | // right elbow right of right shoulder
33 | if(rightElbow.y > rightShoulder.y && rightElbow.x > rightShoulder.x){
34 | stroke(255);
35 | } else {
36 | stroke(255,0,0);
37 | }
38 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
39 |
40 | // right hand above right elbow
41 | // AND
42 | // right hand right of right elbow
43 | if(rightHand.y > rightElbow.y && rightHand.x > rightElbow.x){
44 | stroke(255);
45 | } else {
46 | stroke(255,0,0);
47 | }
48 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HAND, SimpleOpenNI.SKEL_RIGHT_ELBOW);
49 | }
50 | }
51 | }
52 |
53 | // user-tracking callbacks!
54 | void onNewUser(int userId) {
55 | println("start pose detection");
56 | kinect.startPoseDetection("Psi", userId);
57 | }
58 |
59 | void onEndCalibration(int userId, boolean successful) {
60 | if (successful) {
61 | println(" User calibrated !!!");
62 | kinect.startTrackingSkeleton(userId);
63 | }
64 | else {
65 | println(" Failed to calibrate user !!!");
66 | kinect.startPoseDetection("Psi", userId);
67 | }
68 | }
69 |
70 | void onStartPose(String pose, int userId) {
71 | println("Started pose for user");
72 | kinect.stopPoseDetection(userId);
73 | kinect.requestCalibrationSkeleton(userId, true);
74 | }
75 |
76 |
--------------------------------------------------------------------------------
/chSK_ex01_one_joint/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex01_one_joint/.DS_Store
--------------------------------------------------------------------------------
/chSK_ex01_one_joint/chSK_ex01_one_joint.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup() {
5 | kinect = new SimpleOpenNI(this);
6 | kinect.enableDepth();
7 |
8 | // turn on user tracking
9 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
10 |
11 | size(640, 480);
12 | fill(255,0,0);
13 | }
14 |
15 | void draw() {
16 | kinect.update();
17 | PImage depth = kinect.depthImage();
18 | image(depth, 0, 0);
19 |
20 | // make a vector of ints to store the list of users
21 | IntVector userList = new IntVector();
22 | // write the list of detected users
23 | // into our vector
24 | kinect.getUsers(userList);
25 |
26 | // if we found any users
27 | if (userList.size() > 0) {
28 | // get the first user
29 | int userId = userList.get(0);
30 |
31 | // if we're successfully calibrated
32 | if ( kinect.isTrackingSkeleton(userId)) {
33 | // make a vector to store the left hand
34 | PVector rightHand = new PVector();
35 | // put the position of the left hand into that vector
36 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, rightHand);
37 |
38 | // convert the detected hand position
39 | // to "projective" coordinates
40 | // that will match the depth image
41 | PVector convertedRightHand = new PVector();
42 | kinect.convertRealWorldToProjective(rightHand, convertedRightHand);
43 | // and display it
44 | ellipse(convertedRightHand.x, convertedRightHand.y, 10, 10);
45 | }
46 | }
47 | }
48 |
49 | // user-tracking callbacks!
50 | void onNewUser(int userId) {
51 | println("start pose detection");
52 | kinect.startPoseDetection("Psi", userId);
53 | }
54 |
55 | void onEndCalibration(int userId, boolean successful) {
56 | if (successful) {
57 | println(" User calibrated !!!");
58 | kinect.startTrackingSkeleton(userId);
59 | } else {
60 | println(" Failed to calibrate user !!!");
61 | kinect.startPoseDetection("Psi", userId);
62 | }
63 | }
64 |
65 | void onStartPose(String pose, int userId) {
66 | println("Started pose for user");
67 | kinect.stopPoseDetection(userId);
68 | kinect.requestCalibrationSkeleton(userId, true);
69 | }
70 |
--------------------------------------------------------------------------------
/chSK_ex01_one_joint/scaledEllipse_big.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex01_one_joint/scaledEllipse_big.png
--------------------------------------------------------------------------------
/chSK_ex01_one_joint/scaledEllipse_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex01_one_joint/scaledEllipse_small.png
--------------------------------------------------------------------------------
/chSK_ex02_skel_anatomy/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex02_skel_anatomy/.DS_Store
--------------------------------------------------------------------------------
/chSK_ex02_skel_anatomy/chSK_ex02_skel_anatomy.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup() {
5 | kinect = new SimpleOpenNI(this);
6 | kinect.enableDepth();
7 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
8 |
9 | size(640, 480);
10 | fill(255, 0, 0);
11 | }
12 |
13 | void draw() {
14 | kinect.update();
15 | image(kinect.depthImage(), 0, 0);
16 |
17 | IntVector userList = new IntVector();
18 | kinect.getUsers(userList);
19 |
20 | if (userList.size() > 0) {
21 | int userId = userList.get(0);
22 |
23 | if ( kinect.isTrackingSkeleton(userId)) {
24 | drawSkeleton(userId);
25 | }
26 | }
27 | }
28 |
29 | void drawSkeleton(int userId) {
30 | stroke(0);
31 | strokeWeight(5);
32 |
33 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
34 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
35 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
36 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
37 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
38 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
39 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
40 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
41 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
42 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
43 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
44 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
45 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
46 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
47 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
48 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_LEFT_HIP);
49 |
50 | noStroke();
51 |
52 | fill(255,0,0);
53 | drawJoint(userId, SimpleOpenNI.SKEL_HEAD);
54 | drawJoint(userId, SimpleOpenNI.SKEL_NECK);
55 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER);
56 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_ELBOW);
57 | drawJoint(userId, SimpleOpenNI.SKEL_NECK);
58 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
59 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW);
60 | drawJoint(userId, SimpleOpenNI.SKEL_TORSO);
61 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HIP);
62 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_KNEE);
63 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_HIP);
64 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_FOOT);
65 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_KNEE);
66 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HIP);
67 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_FOOT);
68 | drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_HAND);
69 | drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HAND);
70 | }
71 |
72 | void drawJoint(int userId, int jointID) {
73 | PVector joint = new PVector();
74 | float confidence = kinect.getJointPositionSkeleton(userId, jointID, joint);
75 | if(confidence < 0.5){
76 | return;
77 | }
78 | PVector convertedJoint = new PVector();
79 | kinect.convertRealWorldToProjective(joint, convertedJoint);
80 | ellipse(convertedJoint.x, convertedJoint.y, 5, 5);
81 | }
82 |
83 | // user-tracking callbacks!
84 | void onNewUser(int userId) {
85 | println("start pose detection");
86 | kinect.startPoseDetection("Psi", userId);
87 | }
88 |
89 | void onEndCalibration(int userId, boolean successful) {
90 | if (successful) {
91 | println(" User calibrated !!!");
92 | kinect.startTrackingSkeleton(userId);
93 | }
94 | else {
95 | println(" Failed to calibrate user !!!");
96 | kinect.startPoseDetection("Psi", userId);
97 | }
98 | }
99 |
100 | void onStartPose(String pose, int userId) {
101 | println("Started pose for user");
102 | kinect.stopPoseDetection(userId);
103 | kinect.requestCalibrationSkeleton(userId, true);
104 | }
105 |
--------------------------------------------------------------------------------
/chSK_ex03_joint_distance/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex03_joint_distance/.DS_Store
--------------------------------------------------------------------------------
/chSK_ex03_joint_distance/chSK_ex03_joint_distance.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup() {
5 | kinect = new SimpleOpenNI(this);
6 | kinect.enableDepth();
7 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
8 |
9 | size(640, 480);
10 | stroke(255,0,0);
11 | strokeWeight(5);
12 | }
13 |
14 | void draw() {
15 | kinect.update();
16 | image(kinect.depthImage(), 0, 0);
17 |
18 | IntVector userList = new IntVector();
19 | kinect.getUsers(userList);
20 |
21 | if (userList.size() > 0) {
22 | int userId = userList.get(0);
23 |
24 | if ( kinect.isTrackingSkeleton(userId)) {
25 | PVector leftHand = new PVector();
26 | PVector rightHand = new PVector();
27 |
28 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, leftHand);
29 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, rightHand);
30 |
31 | // calculate difference by subtracting one vector from another
32 | PVector differenceVector = PVector.sub(leftHand, rightHand);
33 | // calculate the distance and direction
34 | // of the difference vector
35 | float magnitude = differenceVector.mag();
36 | differenceVector.normalize();
37 | // draw a line between the two handsst
38 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HAND, SimpleOpenNI.SKEL_RIGHT_HAND);
39 | // display
40 | pushMatrix();
41 | scale(4);
42 | fill(differenceVector.x * 255, differenceVector.y * 255, differenceVector.z * 255);
43 | text("m: " + magnitude, 5, 10);
44 | popMatrix();
45 | }
46 | }
47 | }
48 |
49 |
50 | // user-tracking callbacks!
51 | void onNewUser(int userId) {
52 | println("start pose detection");
53 | kinect.startPoseDetection("Psi", userId);
54 | }
55 |
56 | void onEndCalibration(int userId, boolean successful) {
57 | if (successful) {
58 | println(" User calibrated !!!");
59 | kinect.startTrackingSkeleton(userId);
60 | }
61 | else {
62 | println(" Failed to calibrate user !!!");
63 | kinect.startPoseDetection("Psi", userId);
64 | }
65 | }
66 |
67 | void onStartPose(String pose, int userId) {
68 | println("Started pose for user");
69 | kinect.stopPoseDetection(userId);
70 | kinect.requestCalibrationSkeleton(userId, true);
71 | }
--------------------------------------------------------------------------------
/chSK_ex03_joint_distance/distance_130.70207.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex03_joint_distance/distance_130.70207.png
--------------------------------------------------------------------------------
/chSK_ex04_joint_distance_art/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex04_joint_distance_art/.DS_Store
--------------------------------------------------------------------------------
/chSK_ex04_joint_distance_art/chSK_ex04_joint_distance_art.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup() {
5 | kinect = new SimpleOpenNI(this);
6 | kinect.enableDepth();
7 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
8 |
9 | size(640, 480);
10 | stroke(255,0,0);
11 | background(255);
12 | }
13 |
14 | void draw() {
15 | kinect.update();
16 |
17 | IntVector userList = new IntVector();
18 | kinect.getUsers(userList);
19 |
20 | if (userList.size() > 0) {
21 | int userId = userList.get(0);
22 |
23 | if ( kinect.isTrackingSkeleton(userId)) {
24 | PVector leftHand = new PVector();
25 | PVector rightHand = new PVector();
26 |
27 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, leftHand);
28 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, rightHand);
29 |
30 | // calculate difference by subtracting one vector from another
31 | PVector differenceVector = PVector.sub(leftHand, rightHand);
32 | // calculate the distance and direction
33 | // of the difference vector
34 | float magnitude = differenceVector.mag();
35 | differenceVector.normalize();
36 | stroke(differenceVector.x * 255, differenceVector.y * 255, differenceVector.z * 255);
37 | strokeWeight(map(magnitude, 100, 1200, 1, 8));
38 | kinect.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HAND, SimpleOpenNI.SKEL_RIGHT_HAND);
39 | }
40 | }
41 | }
42 |
43 |
44 | // user-tracking callbacks!
45 | void onNewUser(int userId) {
46 | println("start pose detection");
47 | kinect.startPoseDetection("Psi", userId);
48 | }
49 |
50 | void onEndCalibration(int userId, boolean successful) {
51 | if (successful) {
52 | println(" User calibrated !!!");
53 | kinect.startTrackingSkeleton(userId);
54 | }
55 | else {
56 | println(" Failed to calibrate user !!!");
57 | kinect.startPoseDetection("Psi", userId);
58 | }
59 | }
60 |
61 | void onStartPose(String pose, int userId) {
62 | println("Started pose for user");
63 | kinect.stopPoseDetection(userId);
64 | kinect.requestCalibrationSkeleton(userId, true);
65 | }
66 |
67 | void keyPressed(){
68 | saveFrame("joint_art_" + random(1000) + ".png");
69 | }
70 |
--------------------------------------------------------------------------------
/chSK_ex04_joint_distance_art/joint_art_34.419777.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_ex04_joint_distance_art/joint_art_34.419777.png
--------------------------------------------------------------------------------
/chSK_exercise_measurement/SkeletonRecorder.pde:
--------------------------------------------------------------------------------
1 | class SkeletonRecorder {
2 | private SimpleOpenNI context;
3 | TrackedJoint[] trackedJoints;
4 | int userID;
5 | int currentFrame = 0;
6 | int[] jointIDsToTrack;
7 |
8 | SkeletonRecorder(SimpleOpenNI context, int[] jointIDsToTrack) {
9 | this.context = context;
10 | this.userID = userID;
11 | this.jointIDsToTrack = jointIDsToTrack;
12 | }
13 |
14 | void setUser(int userID) {
15 | this.userID = userID;
16 | trackedJoints = new TrackedJoint[jointIDsToTrack.length];
17 |
18 | for (int i = 0; i < trackedJoints.length; i++) {
19 | trackedJoints[i] = new TrackedJoint(this, context, userID, jointIDsToTrack[i]);
20 | }
21 | }
22 |
23 | void recordFrame() {
24 | for (int i = 0; i < trackedJoints.length; i++) {
25 | trackedJoints[i].recordFrame();
26 | }
27 | }
28 |
29 | void nextFrame() {
30 | currentFrame++;
31 | if (currentFrame == totalFrames) {
32 | currentFrame = 0;
33 | }
34 | }
35 | }
36 |
37 | class TrackedJoint {
38 | int jointID;
39 | SimpleOpenNI context;
40 | ArrayList frames;
41 | int userID;
42 | SkeletonRecorder recorder;
43 |
44 | TrackedJoint(SkeletonRecorder recorder, SimpleOpenNI context, int userID, int jointID ) {
45 | this.recorder = recorder;
46 | this.context = context;
47 | this.userID = userID;
48 | this.jointID = jointID;
49 |
50 | frames = new ArrayList();
51 | }
52 |
53 | JointPosition getPosition() {
54 | return getPositionAtFrame(recorder.currentFrame);
55 | }
56 |
57 | JointPosition getPositionAtFrame(int frameNum) {
58 | return (JointPosition) frames.get(frameNum);
59 | }
60 |
61 | void recordFrame() {
62 | PVector position = new PVector();
63 | float confidence = context.getJointPositionSkeleton(userID, jointID, position);
64 | JointPosition frame = new JointPosition(position, confidence)
65 | frames.add(frame);
66 | }
67 | }
68 |
69 |
70 | class JointPosition{
71 | PVector position;
72 | float confidence
73 |
74 | JointFrame(PVector position, float confidence){
75 | this.position = position;
76 | this.confidence = confidence;
77 | }
78 | }
--------------------------------------------------------------------------------
/chSK_exercise_measurement/chSK_exercise_measurement.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 |
5 | SkeletonRecorder recorder;
6 |
7 | boolean recording = false;
8 | boolean playing = false;
9 |
10 | float offByDistance = 0.0;
11 | PFont font;
12 |
13 | void setup() {
14 | size(1028, 768, OPENGL);
15 | kinect = new SimpleOpenNI(this);
16 | kinect.enableDepth();
17 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
18 | kinect.setMirror(true);
19 |
20 | // initialize our recorder and
21 | // tell it to track left hand
22 | // it takes an array because it can track multiple joints
23 | int[] jointsToTrack = {SimpleOpenNI.SKEL_LEFT_HAND};
24 | recorder = new SkeletonRecorder(kinect, jointsToTrack);
25 |
26 | font = createFont("Verdana", 40);
27 | textFont(font);
28 | }
29 |
30 | void draw() {
31 | background(0);
32 | kinect.update();
33 | // display text information
34 | pushMatrix();
35 | // scale(4);
36 |
37 | fill(255);
38 | translate(0, 50, 0);
39 | text("totalFrames: " + recorder.frames.size(), 5, 0);
40 | text("recording: " + recording, 5, 50);
41 | text("currentFrame: " + recorder.currentFrame, 5, 100 );
42 | float c = map(offByDistance, 0, 1000, 0, 255);
43 | fill(c, 255-c, 0);
44 | text("off by: " + offByDistance, 5, 150);
45 | popMatrix();
46 |
47 | translate(width/2, height/2, 0);
48 | rotateX(radians(180));
49 |
50 | IntVector userList = new IntVector();
51 | kinect.getUsers(userList);
52 | if (userList.size() > 0) {
53 | int userId = userList.get(0);
54 | recorder.setUser(userId);
55 | if ( kinect.isTrackingSkeleton(userId)) {
56 | PVector currentPosition = new PVector();
57 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, currentPosition);
58 |
59 | pushMatrix();
60 | stroke(255, 0, 0);
61 | strokeWeight(50);
62 | point(currentPosition.x, currentPosition.y, currentPosition.z);
63 | popMatrix();
64 |
65 | // if we're recording
66 | // tell the record to capture this frame
67 | if (recording) {
68 | recorder.recordFrame();
69 | }
70 | else if (playing) {
71 | // if we're playing
72 | // access the recorded joint position
73 | PVector recordedPosition = recorder.trackedJoints[0].getPosition().position;
74 |
75 | // display the recorded joint position
76 | pushMatrix();
77 | stroke(0, 255, 0);
78 | strokeWeight(30);
79 | point(recordedPosition.x, recordedPosition.y, recordedPosition.z);
80 | popMatrix();
81 |
82 | // draw a line between the current position and the recorded one
83 | // set its color based on the distance between the two
84 | stroke(c, 255-c, 0);
85 | strokeWeight(20);
86 | line(currentPosition.x, currentPosition.y, currentPosition.z, recordedPosition.x, recordedPosition.y, recordedPosition.z);
87 | // calculate the vector between the current and recorded positions
88 | // with vector subtraction
89 | currentPosition.sub(recordedPosition);
90 | // store the magnitude of that vector as the off-by distance
91 | // for display
92 | offByDistance = currentPosition.mag();
93 | // tell the recorder to load up
94 | // the next frame
95 | recorder.nextFrame();
96 | }
97 |
98 | }
99 | }
100 | }
101 |
102 | void keyPressed() {
103 | if (key == ' ') {
104 | recording = !recording;
105 | playing = !playing;
106 | }
107 | }
108 |
109 | // user-tracking callbacks!
110 | void onNewUser(int userId) {
111 | println("start pose detection");
112 | kinect.startPoseDetection("Psi", userId);
113 | }
114 |
115 | void onEndCalibration(int userId, boolean successful) {
116 | if (successful) {
117 | println(" User calibrated !!!");
118 | kinect.startTrackingSkeleton(userId);
119 | recording = true;
120 | }
121 | else {
122 | println(" Failed to calibrate user !!!");
123 | kinect.startPoseDetection("Psi", userId);
124 | }
125 | }
126 |
127 | void onStartPose(String pose, int userId) {
128 | println("Started pose for user");
129 | kinect.stopPoseDetection(userId);
130 | kinect.requestCalibrationSkeleton(userId, true);
131 | }
132 |
133 |
--------------------------------------------------------------------------------
/chSK_exercise_measurement_one_limb/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_exercise_measurement_one_limb/.DS_Store
--------------------------------------------------------------------------------
/chSK_exercise_measurement_one_limb/SkeletonRecorder.pde:
--------------------------------------------------------------------------------
1 | class SkeletonRecorder {
2 | SimpleOpenNI context;
3 | int jointID;
4 | int userID;
5 | ArrayList frames;
6 | int currentFrame = 0;
7 |
8 | SkeletonRecorder(SimpleOpenNI context, int jointID ) {
9 | this.context = context;
10 | this.jointID = jointID;
11 | frames = new ArrayList();
12 | }
13 |
14 | void setUser(int userID) {
15 | this.userID = userID;
16 | }
17 |
18 | void nextFrame() {
19 | currentFrame++;
20 | if (currentFrame == frames.size()) {
21 | currentFrame = 0;
22 | }
23 | }
24 |
25 | PVector getPosition() {
26 | return (PVector)frames.get(currentFrame);
27 | }
28 |
29 | void recordFrame() {
30 | PVector position = new PVector();
31 | context.getJointPositionSkeleton(userID, jointID, position);
32 | frames.add(position);
33 | }
34 | }
--------------------------------------------------------------------------------
/chSK_exercise_measurement_one_limb/chSK_exercise_measurement_one_limb.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 |
5 | SkeletonRecorder recorder;
6 | boolean recording = false;
7 | float offByDistance = 0.0;
8 |
9 | void setup() {
10 | size(1028, 768, OPENGL);
11 | kinect = new SimpleOpenNI(this);
12 | kinect.enableDepth();
13 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
14 | kinect.setMirror(true);
15 | // initialize our recorder and
16 | // tell it to track left hand
17 | recorder = new SkeletonRecorder(kinect, SimpleOpenNI.SKEL_LEFT_HAND);
18 | // load a font
19 | PFont font = createFont("Verdana", 40);
20 | textFont(font);
21 | }
22 |
23 | void draw() {
24 | background(0);
25 | kinect.update();
26 | // these are to make our spheres look nice
27 | lights();
28 | noStroke();
29 | // create heads-up display
30 | fill(255);
31 | text("totalFrames: " + recorder.frames.size(), 5, 50);
32 | text("recording: " + recording, 5, 100);
33 | text("currentFrame: " + recorder.currentFrame, 5, 150 );
34 | // set text color as a gradient from red to green
35 | // based on distance between hands
36 | float c = map(offByDistance, 0, 1000, 0, 255);
37 | fill(c, 255-c, 0);
38 | text("off by: " + offByDistance, 5, 200);
39 |
40 | translate(width/2, height/2, 0);
41 | rotateX(radians(180));
42 |
43 | IntVector userList = new IntVector();
44 | kinect.getUsers(userList);
45 | if (userList.size() > 0) {
46 | int userId = userList.get(0);
47 | recorder.setUser(userId);
48 | if ( kinect.isTrackingSkeleton(userId)) {
49 | PVector currentPosition = new PVector();
50 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, currentPosition);
51 | // display the sphere for the current limb position
52 | pushMatrix();
53 | fill(255,0,0);
54 | translate(currentPosition.x, currentPosition.y, currentPosition.z);
55 | sphere(80);
56 | popMatrix();
57 | // if we're recording tell the recorder to capture this frame
58 | if (recording) {
59 | recorder.recordFrame();
60 | }
61 | else {
62 | // if we're playing access the recorded joint position
63 | PVector recordedPosition = recorder.getPosition();
64 | // display the recorded joint position
65 | pushMatrix();
66 | fill(0, 255, 0);
67 | translate(recordedPosition.x, recordedPosition.y, recordedPosition.z);
68 | sphere(80);
69 | popMatrix();
70 | // draw a line between the current position and the recorded one
71 | // set its color based on the distance between the two
72 | stroke(c, 255-c, 0);
73 | strokeWeight(20);
74 | line(currentPosition.x, currentPosition.y, currentPosition.z, recordedPosition.x, recordedPosition.y, recordedPosition.z);
75 | // calculate the vector between the current and recorded positions
76 | // with vector subtraction
77 | currentPosition.sub(recordedPosition);
78 | // store the magnitude of that vector as the off-by distance for display
79 | offByDistance = currentPosition.mag();
80 | // tell the recorder to load up the next frame
81 | recorder.nextFrame();
82 | }
83 | }
84 | }
85 | }
86 |
87 | void keyPressed() {
88 | recording = false;
89 | }
90 |
91 | // user-tracking callbacks!
92 | void onNewUser(int userId) {
93 | println("start pose detection");
94 | kinect.startPoseDetection("Psi", userId);
95 | }
96 |
97 | void onEndCalibration(int userId, boolean successful) {
98 | if (successful) {
99 | println(" User calibrated !!!");
100 | kinect.startTrackingSkeleton(userId);
101 | recording = true;
102 | }
103 | else {
104 | println(" Failed to calibrate user !!!");
105 | kinect.startPoseDetection("Psi", userId);
106 | }
107 | }
108 |
109 | void onStartPose(String pose, int userId) {
110 | println("Started pose for user");
111 | kinect.stopPoseDetection(userId);
112 | kinect.requestCalibrationSkeleton(userId, true);
113 | }
114 |
115 |
--------------------------------------------------------------------------------
/chSK_exercise_measurement_one_limb/exercise_mesaurement.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_exercise_measurement_one_limb/exercise_mesaurement.png
--------------------------------------------------------------------------------
/chSK_joint_orientation/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_joint_orientation/.DS_Store
--------------------------------------------------------------------------------
/chSK_joint_orientation/chSK_joint_orientation.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 |
5 | void setup() {
6 | size(1028, 768, OPENGL);
7 |
8 | kinect = new SimpleOpenNI(this);
9 | kinect.enableDepth();
10 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
11 | kinect.setMirror(true);
12 |
13 | fill(255, 0, 0);
14 | }
15 |
16 | void draw() {
17 | kinect.update();
18 | background(255);
19 |
20 | translate(width/2, height/2, 0);
21 | rotateX(radians(180));
22 |
23 | IntVector userList = new IntVector();
24 | kinect.getUsers(userList);
25 | if (userList.size() > 0) {
26 | int userId = userList.get(0);
27 |
28 | if ( kinect.isTrackingSkeleton(userId)) {
29 | PVector position = new PVector();
30 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_TORSO, position);
31 |
32 | PMatrix3D orientation = new PMatrix3D();
33 | float confidence = kinect.getJointOrientationSkeleton(userId, SimpleOpenNI.SKEL_TORSO, orientation);
34 |
35 | println(confidence);
36 | drawSkeleton(userId);
37 |
38 | pushMatrix();
39 | // move to the position of the TORSO
40 | translate(position.x, position.y, position.z);
41 | // adopt the TORSO's orientation
42 | // to be our coordinate system
43 | applyMatrix(orientation);
44 | // draw x-axis in red
45 | stroke(255, 0, 0);
46 | strokeWeight(3);
47 | line(0, 0, 0, 150, 0, 0);
48 | // draw y-axis in blue
49 | stroke(0, 255, 0);
50 | line(0, 0, 0, 0, 150, 0);
51 | // draw z-axis in green
52 | stroke(0, 0, 255);
53 | line(0, 0, 0, 0, 0, 150);
54 |
55 | popMatrix();
56 |
57 | }
58 | }
59 | }
60 |
61 | void drawSkeleton(int userId) {
62 | drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
63 | drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
64 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
65 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
66 | drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
67 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
68 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
69 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
70 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
71 | drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
72 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
73 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
74 | drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
75 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
76 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
77 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_LEFT_HIP);
78 | }
79 |
80 | void drawLimb(int userId,int jointType1,int jointType2)
81 | {
82 | PVector jointPos1 = new PVector();
83 | PVector jointPos2 = new PVector();
84 | float confidence;
85 |
86 | confidence = kinect.getJointPositionSkeleton(userId,jointType1,jointPos1);
87 | confidence += kinect.getJointPositionSkeleton(userId,jointType2,jointPos2);
88 | stroke(100);
89 | strokeWeight(5);
90 | if(confidence > 1){
91 | line(jointPos1.x,jointPos1.y,jointPos1.z, jointPos2.x,jointPos2.y,jointPos2.z);
92 | }
93 | }
94 |
95 | // user-tracking callbacks!
96 | void onNewUser(int userId) {
97 | println("start pose detection");
98 | kinect.startPoseDetection("Psi", userId);
99 | }
100 |
101 | void onEndCalibration(int userId, boolean successful) {
102 | if (successful) {
103 | println(" User calibrated !!!");
104 | kinect.startTrackingSkeleton(userId);
105 | }
106 | else {
107 | println(" Failed to calibrate user !!!");
108 | kinect.startPoseDetection("Psi", userId);
109 | }
110 | }
111 |
112 | void onStartPose(String pose, int userId) {
113 | println("Started pose for user");
114 | kinect.stopPoseDetection(userId);
115 | kinect.requestCalibrationSkeleton(userId, true);
116 | }
117 |
118 |
--------------------------------------------------------------------------------
/chSK_joint_orientation/joint_orientation_axes_3.6722064.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_joint_orientation/joint_orientation_axes_3.6722064.png
--------------------------------------------------------------------------------
/chSK_joint_orientation_w_model/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_joint_orientation_w_model/.DS_Store
--------------------------------------------------------------------------------
/chSK_joint_orientation_w_model/chSK_joint_orientation_w_model.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 | import saito.objloader.*;
5 |
6 | OBJModel model;
7 |
8 | void setup() {
9 | size(1028, 768, OPENGL);
10 |
11 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
12 | model.translateToCenter();
13 |
14 | kinect = new SimpleOpenNI(this);
15 | kinect.enableDepth();
16 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
17 | kinect.setMirror(true);
18 |
19 | fill(255, 0, 0);
20 | }
21 |
22 | PImage colorImage;
23 | boolean gotImage;
24 |
25 | void draw() {
26 | kinect.update();
27 | background(0);
28 |
29 | translate(width/2, height/2, 0);
30 | rotateX(radians(180));
31 |
32 | scale(0.9);
33 |
34 | IntVector userList = new IntVector();
35 | kinect.getUsers(userList);
36 | if (userList.size() > 0) {
37 | int userId = userList.get(0);
38 |
39 | if ( kinect.isTrackingSkeleton(userId)) {
40 | PVector position = new PVector();
41 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, position);
42 |
43 | PMatrix3D orientation = new PMatrix3D();
44 | float confidence = kinect.getJointOrientationSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, orientation);
45 |
46 | pushMatrix();
47 | translate(position.x, position.y, position.z);
48 | applyMatrix(orientation);
49 | model.draw();
50 | popMatrix();
51 | }
52 | }
53 | }
54 |
55 | // user-tracking callbacks!
56 | void onNewUser(int userId) {
57 | println("start pose detection");
58 | kinect.startPoseDetection("Psi", userId);
59 | }
60 |
61 | void onEndCalibration(int userId, boolean successful) {
62 | if (successful) {
63 | println(" User calibrated !!!");
64 | kinect.startTrackingSkeleton(userId);
65 | }
66 | else {
67 | println(" Failed to calibrate user !!!");
68 | kinect.startPoseDetection("Psi", userId);
69 | }
70 | }
71 |
72 | void onStartPose(String pose, int userId) {
73 | println("Started pose for user");
74 | kinect.stopPoseDetection(userId);
75 | kinect.requestCalibrationSkeleton(userId, true);
76 | }
77 |
78 |
--------------------------------------------------------------------------------
/chSK_orientation_from_vector/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_orientation_from_vector/.DS_Store
--------------------------------------------------------------------------------
/chSK_orientation_from_vector/chSK_orientation_from_vector.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 | SimpleOpenNI kinect;
4 | import saito.objloader.*;
5 |
6 | OBJModel model;
7 |
8 | void setup() {
9 | size(1028, 768, OPENGL);
10 |
11 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
12 | model.translateToCenter();
13 | // translate the model so its origin
14 | // is at its left side
15 | BoundingBox box = new BoundingBox(this, model);
16 | model.translate(box.getMin());
17 |
18 | kinect = new SimpleOpenNI(this);
19 | kinect.enableDepth();
20 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
21 | kinect.setMirror(true);
22 | }
23 |
24 | void draw() {
25 | kinect.update();
26 | background(255);
27 |
28 | translate(width/2, height/2, 0);
29 | rotateX(radians(180));
30 |
31 | IntVector userList = new IntVector();
32 | kinect.getUsers(userList);
33 | if (userList.size() > 0) {
34 | int userId = userList.get(0);
35 |
36 | if ( kinect.isTrackingSkeleton(userId)) {
37 | PVector leftHand = new PVector();
38 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, leftHand);
39 |
40 | PVector rightHand = new PVector();
41 | kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, rightHand);
42 |
43 | // subtract right from left hand
44 | // to turn left into a vector representing
45 | // the difference between them
46 | leftHand.sub(rightHand);
47 | // save the leftHand position before we normalize it
48 |
49 | leftHand.normalize();
50 | // model is rotated so "up" is the x-axis
51 | PVector modelOrientation = new PVector(1, 0, 0);
52 | // calculate angle and axis
53 | float angle = acos(modelOrientation.dot(leftHand));
54 | PVector axis = modelOrientation.cross(leftHand);
55 |
56 | stroke(255, 0, 0);
57 | strokeWeight(5);
58 | drawSkeleton(userId);
59 |
60 | pushMatrix();
61 | lights();
62 | stroke(175);
63 | strokeWeight(1);
64 | fill(250);
65 | translate(rightHand.x, rightHand.y, rightHand.z);
66 | // rotate angle amount around axis
67 | rotate(angle, axis.x, axis.y, axis.z);
68 | model.draw();
69 | popMatrix();
70 | }
71 | }
72 | }
73 |
74 | void drawSkeleton(int userId) {
75 | drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
76 | drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
77 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
78 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
79 | drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
80 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
81 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
82 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
83 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
84 | drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
85 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
86 | drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
87 | drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
88 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
89 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
90 | drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_LEFT_HIP);
91 | }
92 |
93 | void drawLimb(int userId, int jointType1, int jointType2)
94 | {
95 | PVector jointPos1 = new PVector();
96 | PVector jointPos2 = new PVector();
97 | float confidence;
98 |
99 | // draw the joint position
100 | confidence = kinect.getJointPositionSkeleton(userId, jointType1, jointPos1);
101 | confidence = kinect.getJointPositionSkeleton(userId, jointType2, jointPos2);
102 |
103 | line(jointPos1.x, jointPos1.y, jointPos1.z,
104 | jointPos2.x, jointPos2.y, jointPos2.z);
105 | }
106 |
107 | // user-tracking callbacks!
108 | void onNewUser(int userId) {
109 | println("start pose detection");
110 | kinect.startPoseDetection("Psi", userId);
111 | }
112 |
113 | void onEndCalibration(int userId, boolean successful) {
114 | if (successful) {
115 | println(" User calibrated !!!");
116 | kinect.startTrackingSkeleton(userId);
117 | }
118 | else {
119 | println(" Failed to calibrate user !!!");
120 | kinect.startPoseDetection("Psi", userId);
121 | }
122 | }
123 |
124 | void onStartPose(String pose, int userId) {
125 | println("Started pose for user");
126 | kinect.stopPoseDetection(userId);
127 | kinect.requestCalibrationSkeleton(userId, true);
128 | }
129 |
--------------------------------------------------------------------------------
/chSK_orientation_from_vector/screen-1731.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_orientation_from_vector/screen-1731.tif
--------------------------------------------------------------------------------
/chSK_scene_image/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_scene_image/.DS_Store
--------------------------------------------------------------------------------
/chSK_scene_image/chSK_scene_image.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | boolean tracking = false;
7 | int userID;
8 | int[] userMap;
9 | // declare our background
10 | PImage backgroundImage;
11 |
12 | void setup() {
13 | size(640, 480, OPENGL);
14 |
15 | kinect = new SimpleOpenNI(this);
16 | kinect.enableDepth();
17 | // enable color image from the Kinect
18 | kinect.enableRGB();
19 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_NONE);
20 | // turn on depth-color alignment
21 | kinect.alternativeViewPointDepthToImage();
22 | // load the background image
23 | backgroundImage = loadImage("empire_state.jpg");
24 | }
25 |
26 | void draw() {
27 | // display the background image
28 | image(backgroundImage, 0, 0);
29 | kinect.update();
30 | if (tracking) {
31 | // get the Kinect color image
32 | PImage rgbImage = kinect.rgbImage();
33 | // prepare the color pixels
34 | rgbImage.loadPixels();
35 | loadPixels();
36 |
37 | userMap = kinect.getUsersPixels(SimpleOpenNI.USERS_ALL);
38 | for (int i =0; i < userMap.length; i++) {
39 | // if the pixel is part of the user
40 | if (userMap[i] != 0) {
41 | // set the sketch pixel to the color pixel
42 | pixels[i] = rgbImage.pixels[i];
43 | }
44 | }
45 | updatePixels();
46 | }
47 | }
48 |
49 | void onNewUser(int uID) {
50 | userID = uID;
51 | tracking = true;
52 | println("tracking");
53 | }
--------------------------------------------------------------------------------
/chSK_scene_image/empire_state.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_scene_image/empire_state.jpg
--------------------------------------------------------------------------------
/chSK_scene_image_basic/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_scene_image_basic/.DS_Store
--------------------------------------------------------------------------------
/chSK_scene_image_basic/chSK_scene_image_basic.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | PImage userImage;
7 | int userID;
8 | int[] userMap;
9 |
10 | PImage rgbImage;
11 | void setup() {
12 | size(640, 480, OPENGL);
13 |
14 | kinect = new SimpleOpenNI(this);
15 | kinect.enableDepth();
16 | kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_NONE);
17 | }
18 |
19 | void draw() {
20 | background(0);
21 | kinect.update();
22 | // if we have detected any users
23 | if (kinect.getNumberOfUsers()) {
24 | // find out which pixels have users in them
25 | userMap = kinect.getUsersPixels(SimpleOpenNI.USERS_ALL);
26 | // populate the pixels array
27 | // from the sketch's current contents
28 | loadPixels();
29 | for (int i = 0; i < userMap.length; i++) {
30 | // if the current pixel is on a user
31 | if (userMap[i] = 0) {
32 | // make it green
33 | pixels[i] = color(0, 255, 0);
34 | }
35 | }
36 | // display the changed pixel array
37 | updatePixels();
38 | }
39 | }
40 | void onNewUser(int uID) {
41 | userID = uID;
42 | tracking = true;
43 | println("tracking");
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/chSK_scene_map/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/chSK_scene_map/.DS_Store
--------------------------------------------------------------------------------
/chSK_scene_map/chSK_scene_map.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 | int[] sceneMap;
6 | PImage depthImage;
7 |
8 | void setup() {
9 | size(640, 480, OPENGL);
10 | kinect = new SimpleOpenNI(this);
11 | kinect.enableDepth();
12 | // turn on access to the scene
13 | kinect.enableScene();
14 | }
15 |
16 | void draw() {
17 | background(0);
18 | kinect.update();
19 | image(kinect.depthImage(), 0, 0);
20 | loadPixels();
21 | // scene map is an array of ints
22 | // just like the user map
23 | sceneMap = kinect.sceneMap();
24 | for (int i =0; i < sceneMap.length; i++) {
25 | // each distinct value in the map
26 | // indicates a different object, wall, or person
27 | if(sceneMap[i] == 1){
28 | pixels[i] = color(0, 255, 0);
29 | }
30 | if(sceneMap[i] == 2){
31 | pixels[i] = color(255, 0, 0);
32 | }
33 | if(sceneMap[i] == 3){
34 | pixels[i] = color(0, 0, 255);
35 | }
36 | if(sceneMap[i] == 4){
37 | pixels[i] = color(255, 255, 0);
38 | }
39 | if(sceneMap[i] == 5){
40 | pixels[i] = color(0,255,255);
41 | }
42 | if(sceneMap[i] == 6){
43 | pixels[i] = color(255,0,255);
44 | }
45 | }
46 | updatePixels();
47 | }
48 |
49 | void keyPressed(){
50 | saveFrame("scene_image.png");
51 | }
52 |
--------------------------------------------------------------------------------
/chSK_scene_map_image/chSK_scene_map_image.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import SimpleOpenNI.*;
3 |
4 | SimpleOpenNI kinect;
5 |
6 | void setup() {
7 | size(640, 480, OPENGL);
8 | kinect = new SimpleOpenNI(this);
9 | kinect.enableDepth();
10 | // turn on access to the scene
11 | kinect.enableScene();
12 | }
13 |
14 | void draw() {
15 | background(0);
16 | kinect.update();
17 | // draw the scene image
18 | image(kinect.sceneImage(), 0, 0);
19 | }
20 |
--------------------------------------------------------------------------------
/chSK_translate_model_to_corner/chSK_translate_model_to_corner.pde:
--------------------------------------------------------------------------------
1 | import processing.opengl.*;
2 | import saito.objloader.*;
3 |
4 | // declare an OBJModel object
5 | OBJModel model;
6 | BoundingBox box;
7 |
8 | float rotateX;
9 | float rotateY;
10 |
11 | void setup() {
12 | size(640, 480, OPENGL);
13 |
14 | // load the model file
15 | // use triangles as the basic geometry
16 | model = new OBJModel(this, "kinect.obj", "relative", TRIANGLES);
17 |
18 | // tell the model to translate itself
19 | // to be centered at 0,0
20 | model.translateToCenter();
21 | box = new BoundingBox(this, model);
22 | model.translate(box.getMin());
23 |
24 | }
25 |
26 | void draw() {
27 | background(255);
28 |
29 | // turn on the lights
30 | lights();
31 |
32 | translate(width/2, height/2, 0);
33 |
34 | rotateX(rotateY);
35 | rotateY(rotateX);
36 |
37 | // tell the model to draw itself
38 | fill(100);
39 | noStroke();
40 | model.draw();
41 | stroke(255,0,0);
42 | noFill();
43 | box.draw();
44 | }
45 |
46 | void mouseDragged() {
47 | rotateX += (mouseX - pmouseX) * 0.01;
48 | rotateY -= (mouseY - pmouseY) * 0.01;
49 | }
50 |
--------------------------------------------------------------------------------
/ex01_basic_depth/ex01_basic_depth.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup()
5 | {
6 | size(640*2, 480);
7 | kinect = new SimpleOpenNI(this);
8 |
9 | kinect.enableDepth();
10 | kinect.enableRGB();
11 | }
12 |
13 | void draw()
14 | {
15 | kinect.update();
16 |
17 | image(kinect.depthImage(), 0, 0);
18 | image(kinect.rgbImage(), 640, 0);
19 | }
20 |
21 |
--------------------------------------------------------------------------------
/ex02_basic_depth_pimage/ex02_basic_depth_pimage.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup()
5 | {
6 | // double the width to display two images side by side
7 | size(640*2, 480);
8 | kinect = new SimpleOpenNI(this);
9 |
10 | kinect.enableDepth();
11 | kinect.enableRGB();
12 | }
13 |
14 | void draw()
15 | {
16 | kinect.update();
17 |
18 | PImage depthImage = kinect.depthImage();
19 | PImage rgbImage = kinect.rgbImage();
20 |
21 | image(depthImage, 0, 0);
22 | image(rgbImage, 640, 0);
23 | }
24 |
25 |
--------------------------------------------------------------------------------
/ex03_basic_depth_plus_mouseclick/ex03_basic_depth_plus_mouseclick.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup()
5 | {
6 | size(640*2, 480);
7 | kinect = new SimpleOpenNI(this);
8 |
9 | kinect.enableDepth();
10 | kinect.enableRGB();
11 | }
12 |
13 | void draw()
14 | {
15 | kinect.update();
16 |
17 | PImage depthImage = kinect.depthImage();
18 | PImage rgbImage = kinect.rgbImage();
19 |
20 | image(depthImage, 0, 0);
21 | image(rgbImage, 640, 0);
22 | }
23 |
24 | void mousePressed(){
25 | color c = get(mouseX, mouseY);
26 | println("r: " + red(c) + " g: " + green(c) + " b: " + blue(c));
27 | }
--------------------------------------------------------------------------------
/ex04_full_resolution_depth/ex04_full_resolution_depth.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup()
5 | {
6 | size(640, 480);
7 | kinect = new SimpleOpenNI(this);
8 | kinect.enableDepth();
9 | }
10 |
11 | void draw()
12 | {
13 | kinect.update();
14 |
15 | PImage depthImage = kinect.depthImage();
16 | image(depthImage, 0, 0);
17 | }
18 |
19 | void mousePressed(){
20 | int[] depthValues = kinect.depthMap();
21 | int clickPosition = mouseX + (mouseY * 640);
22 | int clickedDepth = depthValues[clickPosition];
23 |
24 | float inches = clickedDepth / 25.4;
25 |
26 | println("inches: " + inches);
27 | }
28 |
29 |
--------------------------------------------------------------------------------
/ex05_real_world_measurement/ex05_real_world_measurement.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | void setup()
5 | {
6 | size(640, 480);
7 | kinect = new SimpleOpenNI(this);
8 |
9 | kinect.enableDepth();
10 | }
11 |
12 | void draw()
13 | {
14 | kinect.update();
15 |
16 | PImage depthImage = kinect.depthImage();
17 |
18 | image(depthImage, 0, 0);
19 | }
20 |
21 | void mousePressed(){
22 | int[] depthValues = kinect.depthMap();
23 | int clickPosition = mouseX + (mouseY * 640);
24 |
25 | int millimeters = depthValues[clickPosition];
26 | float inches = millimeters / 25.4;
27 |
28 | println("mm: " + millimeters + " in: " + inches);
29 | }
30 |
--------------------------------------------------------------------------------
/ex06_closest_pixel/ex06_closest_pixel.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | int closestValue;
5 | int closestX;
6 | int closestY;
7 |
8 | void setup()
9 | {
10 | size(640, 480);
11 | kinect = new SimpleOpenNI(this);
12 | kinect.enableDepth();
13 | }
14 |
15 | void draw()
16 | {
17 | closestValue = 8000;
18 |
19 | kinect.update();
20 |
21 | // get the depth array from the kinect
22 | int[] depthValues = kinect.depthMap();
23 |
24 | // for each row in the depth image
25 | for(int y = 0; y < 480; y++){
26 | // look at each pixel in the row
27 | for(int x = 0; x < 640; x++){
28 | // pull out the corresponding value from the depth array
29 | int i = x + y * 640;
30 | int currentDepthValue = depthValues[i];
31 |
32 | // if that pixel is the closest one we've seen so far
33 | if(currentDepthValue > 0 && currentDepthValue < closestValue){
34 | // save its value
35 | closestValue = currentDepthValue;
36 | // and save its position (both X and Y coordinates)
37 | closestX = x;
38 | closestY = y;
39 | }
40 | }
41 | }
42 |
43 | //draw the depth image on the screen
44 | image(kinect.depthImage(),0,0);
45 |
46 | // draw a red circle over it,
47 | // positioned at the X and Y coordinates
48 | // we saved of the closest pixel.
49 | fill(255,0,0);
50 | ellipse(closestX, closestY, 25, 25);
51 | }
52 |
--------------------------------------------------------------------------------
/ex07_closest_pixel_with_measurements/ex07_closest_pixel_with_measurements.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | float brightestValue;
5 | int brightestX;
6 | int brightestY;
7 |
8 |
9 | void setup()
10 | {
11 | size(640, 480);
12 | kinect = new SimpleOpenNI(this);
13 | kinect.enableDepth();
14 | }
15 |
16 | void draw()
17 | {
18 | brightestValue = 0;
19 | kinect.update();
20 |
21 | int[] depthValues = kinect.depthMap();
22 | for(int x = 0; x < 640; x++){
23 | for(int y = 0; y < 480; y++){
24 | int i = x + y * 640;
25 |
26 | int currentDepthValue = depthValues[i];
27 |
28 | if(currentDepthValue > brightestValue){
29 | brightestValue = currentDepthValue;
30 | brightestX = x;
31 | brightestY = y;
32 | }
33 | }
34 | }
35 |
36 | image(kinect.depthImage(),0,0);
37 |
38 | float closestValueInInches = brightestValue / 25.4;
39 | println("in: " + closestValueInInches);
40 |
41 | fill(255,0,0);
42 | ellipse(brightestX, brightestY, 25, 25);
43 | }
44 |
45 |
--------------------------------------------------------------------------------
/ex07_closest_pixel_with_running_average/ex07_closest_pixel_with_running_average.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | // declare these here
5 | // so they persist over multiples
6 | // runs of draw()
7 | int closestX;
8 | int closestY;
9 |
10 | void setup()
11 | {
12 | size(640, 480);
13 | kinect = new SimpleOpenNI(this);
14 | kinect.enableDepth();
15 | }
16 |
17 | void draw()
18 | {
19 | // declare these within the draw loop
20 | // so they change every time
21 | int closestValue = 8000;
22 | int currentX;
23 | int currentY;
24 |
25 | kinect.update();
26 |
27 | // get the depth array from the kinect
28 | int[] depthValues = kinect.depthMap();
29 |
30 | // for each row in the depth image
31 | for(int y = 0; y < 480; y++){
32 | // look at each pixel in the row
33 | for(int x = 0; x < 640; x++){
34 | // pull out the corresponding value from the depth array
35 | int i = x + y * 640;
36 | int currentDepthValue = depthValues[i];
37 |
38 | // if that pixel is the closest one we've seen so far
39 | if(currentDepthValue > 0 && currentDepthValue < closestValue){
40 | // save its value
41 | closestValue = currentDepthValue;
42 | // and save its position (both X and Y coordinates)
43 | currentX = x;
44 | currentY = y;
45 | }
46 | }
47 | }
48 |
49 | // closestX and closestY become
50 | // a running average with currentX and currentY
51 | closestX = (closestX + currentX) / 2;
52 | closestY = (closestY + currentY) / 2;
53 |
54 | //draw the depth image on the screen
55 | image(kinect.depthImage(),0,0);
56 |
57 | // draw a red circle over it,
58 | // positioned at the X and Y coordinates
59 | // we saved of the closest pixel.
60 | fill(255,0,0);
61 | ellipse(closestX, closestY, 25, 25);
62 | }
63 |
64 |
--------------------------------------------------------------------------------
/ex08_basic_drawing/ex08_basic_drawing.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | int closestValue;
5 | int closestX;
6 | int closestY;
7 |
8 | // declare global variables for the
9 | // previous x and y coordinates
10 | int previousX;
11 | int previousY;
12 |
13 | void setup()
14 | {
15 | size(640, 480);
16 | kinect = new SimpleOpenNI(this);
17 | kinect.enableDepth();
18 | }
19 |
20 | void draw()
21 | {
22 | closestValue = 8000;
23 |
24 | kinect.update();
25 |
26 | int[] depthValues = kinect.depthMap();
27 |
28 | for(int y = 0; y < 480; y++){
29 | for(int x = 0; x < 640; x++){
30 | int i = x + y * 640;
31 | int currentDepthValue = depthValues[i];
32 |
33 | if(currentDepthValue > 0 && currentDepthValue < closestValue){
34 |
35 | closestValue = currentDepthValue;
36 | closestX = x;
37 | closestY = y;
38 | }
39 | }
40 | }
41 |
42 | image(kinect.depthImage(),0,0);
43 |
44 | // set the line drawing color to red
45 | stroke(255,0,0);
46 |
47 | // draw a line from the previous point
48 | // to the new closest one
49 | line(previousX, previousY, closestX, closestY);
50 |
51 | // save the closest point
52 | // as the new previous one
53 | previousX = closestX;
54 | previousY = closestY;
55 |
56 |
57 | }
58 |
59 | void mousePressed(){
60 | background(0);
61 | }
62 |
--------------------------------------------------------------------------------
/ex09_advanced_drawing/drawing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/ex09_advanced_drawing/drawing.png
--------------------------------------------------------------------------------
/ex09_advanced_drawing/ex09_advanced_drawing.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | int closestValue;
5 | int closestX;
6 | int closestY;
7 |
8 | float lastX;
9 | float lastY;
10 |
11 | void setup()
12 | {
13 | size(640, 480);
14 | kinect = new SimpleOpenNI(this);
15 | kinect.enableDepth();
16 |
17 | // start out with a black background
18 | background(0);
19 | }
20 |
21 | void draw()
22 | {
23 | closestValue = 8000;
24 |
25 | kinect.update();
26 |
27 | int[] depthValues = kinect.depthMap();
28 |
29 | for(int y = 0; y < 480; y++){
30 | for(int x = 0; x < 640; x++){
31 |
32 | // reverse x by moving in from
33 | // the right side of the image
34 | int reversedX = 640-x-1;
35 |
36 | // use reversedX to calculate
37 | // the array index
38 | int i = reversedX + y * 640;
39 | int currentDepthValue = depthValues[i];
40 |
41 | // only look for the closestValue within a range
42 | // 610 (or 2 feet) is the minimum
43 | // 1525 (or 5 feet) is the maximum
44 | if(currentDepthValue > 610 && currentDepthValue < 1525 && currentDepthValue < closestValue){
45 |
46 | closestValue = currentDepthValue;
47 | closestX = x;
48 | closestY = y;
49 | }
50 | }
51 | }
52 |
53 | // "linear interpolation", i.e.
54 | // smooth transition between last point
55 | // and new closest point
56 | float interpolatedX = lerp(lastX, closestX, 0.3f);
57 | float interpolatedY = lerp(lastY, closestY, 0.3f);
58 |
59 | stroke(255,0,0);
60 |
61 | // make a thicker line, which looks nicer
62 | strokeWeight(3);
63 |
64 | line(lastX, lastY, interpolatedX, interpolatedY);
65 | lastX = interpolatedX;
66 | lastY = interpolatedY;
67 |
68 | }
69 |
70 | void mousePressed(){
71 | // save image to a file
72 | // then clear it on the screen
73 | save("drawing.png");
74 | background(0);
75 | }
76 |
--------------------------------------------------------------------------------
/ex10_basic_minority_report/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/ex10_basic_minority_report/.DS_Store
--------------------------------------------------------------------------------
/ex10_basic_minority_report/data/image1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atduskgreg/Making-Things-See-Examples/80a6519a464097bc2bf4da1a893c6b52d81f739e/ex10_basic_minority_report/data/image1.jpg
--------------------------------------------------------------------------------
/ex10_basic_minority_report/ex10_basic_minority_report.pde:
--------------------------------------------------------------------------------
1 | import SimpleOpenNI.*;
2 | SimpleOpenNI kinect;
3 |
4 | int closestValue;
5 | int closestX;
6 | int closestY;
7 |
8 | float lastX;
9 | float lastY;
10 |
11 | // declare x-y coordinates
12 | // for the image
13 | float image1X;
14 | float image1Y;
15 | // declare a boolean to
16 | // store whether or not the
17 | // image is moving
18 | boolean imageMoving;
19 | // declare a variable
20 | // to store the image
21 | PImage image1;
22 |
23 | void setup()
24 | {
25 | size(640, 480);
26 | kinect = new SimpleOpenNI(this);
27 | kinect.enableDepth();
28 |
29 | // start the image out moving
30 | // so mouse press will drop it
31 | imageMoving = true;
32 |
33 | // load the image from a file
34 | image1 = loadImage("image1.jpg");
35 |
36 | background(0);
37 | }
38 |
39 | void draw()
40 | {
41 | closestValue = 8000;
42 |
43 | kinect.update();
44 |
45 | int[] depthValues = kinect.depthMap();
46 |
47 | for(int y = 0; y < 480; y++){
48 | for(int x = 0; x < 640; x++){
49 |
50 | int reversedX = 640-x-1;
51 | int i = reversedX + y * 640;
52 | int currentDepthValue = depthValues[i];
53 |
54 | if(currentDepthValue > 610 && currentDepthValue < 1525 && currentDepthValue < closestValue){
55 |
56 | closestValue = currentDepthValue;
57 | closestX = x;
58 | closestY = y;
59 | }
60 | }
61 | }
62 |
63 | float interpolatedX = lerp(lastX, closestX, 0.3);
64 | float interpolatedY = lerp(lastY, closestY, 0.3);
65 |
66 | // clear the previous drawing
67 | background(0);
68 |
69 | // only update image position
70 | // if image is in moving state
71 | if(imageMoving){
72 | image1X = interpolatedX;
73 | image1Y = interpolatedY;
74 | }
75 |
76 | //draw the image on the screen
77 | image(image1,image1X,image1Y);
78 |
79 | lastX = interpolatedX;
80 | lastY = interpolatedY;
81 | }
82 |
83 | void mousePressed(){
84 | // if the image is moving, drop it
85 | // if the image is dropped, pick it up
86 | imageMoving = !imageMoving;
87 | }
88 |
--------------------------------------------------------------------------------