├── openframeworks ├── FaceOSCRawReceiver │ ├── bin │ │ └── data │ │ │ └── .gitkeep │ ├── addons.make │ └── src │ │ ├── main.cpp │ │ ├── ofApp.h │ │ └── ofApp.cpp ├── FaceOSCReceiver │ ├── addons.make │ ├── bin │ │ └── data │ │ │ └── .gitignore │ ├── src │ │ ├── main.cpp │ │ ├── ofApp.h │ │ ├── Face.h │ │ ├── Face.cpp │ │ └── ofApp.cpp │ └── config.make └── .gitignore ├── .gitignore ├── processing ├── FaceOSCPose3D │ └── FaceOSCPose3D.pde ├── FaceOSCSmiley │ ├── FaceOSCSmiley.pde │ └── SmartRobot.pde ├── FaceOSCReceiverClass │ ├── FaceOSCReceiverClass.pde │ └── Face.pde ├── FaceOSCSine │ └── FaceOSCSine.pde ├── FaceOSCReceiverGrapher │ ├── FaceOSCReceiverGrapher.pde │ ├── Face.pde │ └── Graph.pde ├── FaceOSCCursor │ └── FaceOSCCursor.pde ├── FaceOSCSyphon │ ├── FaceOSCSyphon.pde │ └── Face.pde ├── FaceOSCRawReceiver │ └── FaceOSCRawReceiver.pde └── FaceOSCReceiver │ └── FaceOSCReceiver.pde ├── supercollider └── faceOSCReceiver.scd ├── puredata └── FaceOSCReceiver.pd ├── README.markdown └── max └── FaceOSCReceiver.maxpat /openframeworks/FaceOSCRawReceiver/bin/data/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/addons.make: -------------------------------------------------------------------------------- 1 | ofxOsc 2 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCRawReceiver/addons.make: -------------------------------------------------------------------------------- 1 | ofxOsc 2 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/bin/data/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore everything in here apart from the .gitignore file 2 | * 3 | !.gitignore -------------------------------------------------------------------------------- /openframeworks/.gitignore: -------------------------------------------------------------------------------- 1 | # OSX 2 | *.app 3 | .DS_Store 4 | *.xcodeproj 5 | *.xcconfig 6 | openFrameworks-Info.plist 7 | 8 | # Makefiles 9 | Makefile 10 | config.make 11 | 12 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/src/main.cpp: -------------------------------------------------------------------------------- 1 | #include "ofMain.h" 2 | #include "ofApp.h" 3 | 4 | int main(){ 5 | ofSetupOpenGL(640, 480, OF_WINDOW); 6 | ofRunApp(new ofApp()); 7 | } 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # xcode 3 | *.mode1v3 4 | *.pbxuser 5 | *.perspectivev3 6 | *xcuserdata 7 | *xcshareddata 8 | build 9 | *.app 10 | *.a 11 | 12 | # OSX 13 | .DS_Store 14 | 15 | # codeblocks 16 | *.layout 17 | example/obj 18 | libs 19 | 20 | # Win 21 | *.exe 22 | *.dll 23 | *.vcxproj.user 24 | *.exp 25 | *.ilk 26 | *.lib 27 | *.pdb 28 | obj 29 | *.sdf 30 | *.suo 31 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCRawReceiver/src/main.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving raw face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2017 Dan Wilcox danomatika.com 6 | // for the EDP Creative Coding class @ the University of Denver 7 | // 8 | // adapted from Golan Levin's FaceOSCRawReceiver Processing template 9 | // 10 | #include "ofMain.h" 11 | #include "ofApp.h" 12 | 13 | int main(){ 14 | ofSetupOpenGL(640, 480, OF_WINDOW); 15 | ofRunApp(new ofApp()); 16 | } 17 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCRawReceiver/src/ofApp.h: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving raw face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2017 Dan Wilcox danomatika.com 6 | // for the EDP Creative Coding class @ the University of Denver 7 | // 8 | // adapted from Golan Levin's FaceOSCRawReceiver Processing template 9 | // 10 | #pragma once 11 | 12 | #include "ofMain.h" 13 | #include "ofxOsc.h" 14 | 15 | class ofApp : public ofBaseApp{ 16 | 17 | public: 18 | void setup(); 19 | void update(); 20 | void draw(); 21 | 22 | void keyPressed(int key); 23 | 24 | void drawFacePoints(); 25 | void drawFacePolygons(); 26 | 27 | ofxOscReceiver receiver; 28 | 29 | bool found = false; 30 | vector points; 31 | 32 | int highlighted = 0; // which point is selected 33 | }; 34 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/config.make: -------------------------------------------------------------------------------- 1 | # add custom variables to this file 2 | 3 | # OF_ROOT allows to move projects outside apps/* just set this variable to the 4 | # absoulte path to the OF root folder 5 | 6 | OF_ROOT = ../../../.. 7 | 8 | 9 | # USER_CFLAGS allows to pass custom flags to the compiler 10 | # for example search paths like: 11 | # USER_CFLAGS = -I src/objects 12 | 13 | USER_CFLAGS = 14 | 15 | 16 | # USER_LDFLAGS allows to pass custom flags to the linker 17 | # for example libraries like: 18 | # USER_LD_FLAGS = libs/libawesomelib.a 19 | 20 | USER_LDFLAGS = 21 | 22 | 23 | # use this to add system libraries for example: 24 | # USER_LIBS = -lpango 25 | 26 | USER_LIBS = 27 | 28 | 29 | # change this to add different compiler optimizations to your project 30 | 31 | USER_COMPILER_OPTIMIZATION = -march=native -mtune=native -Os 32 | 33 | 34 | EXCLUDE_FROM_SOURCE="bin,.xcodeproj,obj" 35 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/src/ofApp.h: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2012 Dan Wilcox danomatika.com 6 | // for the IACD class at the CMU School of Art 7 | // 8 | // adapted from Greg Borenstein's Processing example 9 | // https://gist.github.com/1603230 10 | // 11 | #pragma once 12 | 13 | #include "ofMain.h" 14 | #include "ofxOsc.h" 15 | 16 | #include "Face.h" 17 | 18 | class ofApp : public ofBaseApp { 19 | 20 | public: 21 | 22 | void setup(); 23 | void update(); 24 | void draw(); 25 | 26 | void keyPressed(int key); 27 | void keyReleased(int key); 28 | void mouseMoved(int x, int y); 29 | void mouseDragged(int x, int y, int button); 30 | void mousePressed(int x, int y, int button); 31 | void mouseReleased(int x, int y, int button); 32 | void windowResized(int w, int h); 33 | void dragEvent(ofDragInfo dragInfo); 34 | void gotMessage(ofMessage msg); 35 | 36 | ofxOscReceiver receiver; 37 | Face face; 38 | }; 39 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/src/Face.h: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2012 Dan Wilcox danomatika.com 6 | // for the IACD class at the CMU School of Art 7 | // 8 | #pragma once 9 | 10 | #include "ofMain.h" 11 | 12 | class ofxOscMessage; 13 | 14 | // a single tracked face from FaceOSC 15 | class Face { 16 | 17 | public: 18 | 19 | int found = 0; 20 | 21 | // pose 22 | float poseScale = 0; 23 | ofVec2f posePosition; // FaceOSC win size: 640x680 24 | ofVec3f poseOrientation; 25 | 26 | // gesture 27 | float mouthWidth = 0; 28 | float mouthHeight = 0; 29 | float eyeLeft = 0; 30 | float eyeRight = 0; 31 | float eyebrowLeft = 0; 32 | float eyebrowRight = 0; 33 | float jaw = 0; 34 | float nostrils = 0; 35 | 36 | Face() {} 37 | 38 | // parse an OSC message from FaceOSC 39 | // returns true if a message was handled 40 | bool parseOSC(ofxOscMessage& m); 41 | 42 | // get the current face values as a string (includes end lines) 43 | string toString(); 44 | }; 45 | -------------------------------------------------------------------------------- /processing/FaceOSCPose3D/FaceOSCPose3D.pde: -------------------------------------------------------------------------------- 1 | // 3D Pose receiver for FaceOSC 2 | // Golan Levin, 2012-17 3 | 4 | import oscP5.*; 5 | OscP5 oscP5; 6 | 7 | int found; // global variable, indicates if a face is found 8 | PVector poseOrientation = new PVector(); // stores an (x,y,z) 9 | 10 | //---------------------------------- 11 | void setup() { 12 | size(640, 480, OPENGL); 13 | oscP5 = new OscP5(this, 8338); 14 | oscP5.plug(this, "found", "/found"); 15 | oscP5.plug(this, "poseOrientation", "/pose/orientation"); 16 | } 17 | 18 | //---------------------------------- 19 | void draw() { 20 | background (180); 21 | strokeWeight (3); 22 | noFill(); 23 | 24 | if (found != 0) { 25 | pushMatrix(); 26 | translate (width/2, height/2, 0); 27 | rotateY (0 - poseOrientation.y); 28 | rotateX (0 - poseOrientation.x); 29 | rotateZ ( poseOrientation.z); 30 | box (200, 250, 200); 31 | popMatrix(); 32 | } 33 | } 34 | 35 | //---------------------------------- 36 | // Event handlers for receiving FaceOSC data 37 | public void found (int i) { found = i; } 38 | public void poseOrientation(float x, float y, float z) { 39 | poseOrientation.set(x, y, z); 40 | } -------------------------------------------------------------------------------- /processing/FaceOSCSmiley/FaceOSCSmiley.pde: -------------------------------------------------------------------------------- 1 | // 2 | // a bar graph for overall smiliness 3 | // 4 | // derived from the FaceOSCReceiver demo 5 | 6 | import oscP5.*; 7 | OscP5 oscP5; 8 | 9 | SmartRobot robot; 10 | 11 | int found; 12 | float smileThreshold = 16; 13 | float mouthWidth, previousMouthWidth; 14 | 15 | PFont font; 16 | 17 | void setup() { 18 | size(400, 64); 19 | frameRate(30); 20 | oscP5 = new OscP5(this, 8338); 21 | oscP5.plug(this, "found", "/found"); 22 | oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width"); 23 | try { 24 | robot = new SmartRobot(); 25 | } catch (AWTException e) { 26 | } 27 | font = createFont("Helvetica", 64); 28 | textFont(font); 29 | textAlign(LEFT, TOP); 30 | } 31 | 32 | void draw() { 33 | background(255); 34 | if (found > 0) { 35 | noStroke(); 36 | fill(mouthWidth > smileThreshold ? color(255, 0, 0) : 0); 37 | float drawWidth = map(mouthWidth, 10, 25, 0, width); 38 | rect(0, 0, drawWidth, 64); 39 | text(nf(mouthWidth, 0, 1), drawWidth + 10, 0); 40 | if (previousMouthWidth < smileThreshold && mouthWidth > smileThreshold) { 41 | robot.type(":)\n"); 42 | } 43 | previousMouthWidth = mouthWidth; 44 | } 45 | } 46 | 47 | public void found(int i) { 48 | found = i; 49 | } 50 | 51 | public void mouthWidthReceived(float w) { 52 | mouthWidth = w; 53 | } 54 | 55 | // all other OSC messages end up here 56 | void oscEvent(OscMessage m) { 57 | if (m.isPlugged() == false) { 58 | } 59 | } 60 | 61 | -------------------------------------------------------------------------------- /processing/FaceOSCReceiverClass/FaceOSCReceiverClass.pde: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker 4 | // 5 | // this example includes a class to abstract the Face data 6 | // 7 | // 2012 Dan Wilcox danomatika.com 8 | // for the IACD Spring 2012 class at the CMU School of Art 9 | // 10 | // adapted from from Greg Borenstein's 2011 example 11 | // http://www.gregborenstein.com/ 12 | // https://gist.github.com/1603230 13 | // 14 | import oscP5.*; 15 | OscP5 oscP5; 16 | 17 | // our FaceOSC tracked face dat 18 | Face face = new Face(); 19 | 20 | void setup() { 21 | size(640, 480); 22 | frameRate(30); 23 | 24 | oscP5 = new OscP5(this, 8338); 25 | } 26 | 27 | void draw() { 28 | background(255); 29 | stroke(0); 30 | 31 | if(face.found > 0) { 32 | translate(face.posePosition.x, face.posePosition.y); 33 | scale(face.poseScale); 34 | noFill(); 35 | ellipse(-20, face.eyeLeft * -9, 20, 7); 36 | ellipse(20, face.eyeRight * -9, 20, 7); 37 | ellipse(0, 20, face.mouthWidth* 3, face.mouthHeight * 3); 38 | ellipse(-5, face.nostrils * -1, 7, 3); 39 | ellipse(5, face.nostrils * -1, 7, 3); 40 | rectMode(CENTER); 41 | fill(0); 42 | rect(-20, face.eyebrowLeft * -5, 25, 5); 43 | rect(20, face.eyebrowRight * -5, 25, 5); 44 | 45 | print(face.toString()); 46 | } 47 | } 48 | 49 | // OSC CALLBACK FUNCTIONS 50 | 51 | void oscEvent(OscMessage m) { 52 | face.parseOSC(m); 53 | } 54 | -------------------------------------------------------------------------------- /processing/FaceOSCSine/FaceOSCSine.pde: -------------------------------------------------------------------------------- 1 | // derived from the FaceOSCReceiver demo and minim's SineWaveSignal example 2 | 3 | import oscP5.*; 4 | OscP5 oscP5; 5 | 6 | import ddf.minim.*; 7 | import ddf.minim.signals.*; 8 | Minim minim; 9 | AudioOutput out; 10 | SineWave sine; 11 | 12 | PFont font; 13 | 14 | void setup() { 15 | size(512, 200); 16 | frameRate(30); 17 | oscP5 = new OscP5(this, 8338); 18 | oscP5.plug(this, "faceScale", "/pose/scale"); 19 | oscP5.plug(this, "posePosition", "/pose/position"); 20 | 21 | 22 | minim = new Minim(this); 23 | out = minim.getLineOut(Minim.STEREO); 24 | sine = new SineWave(440, 0.5, out.sampleRate()); 25 | sine.portamento(100); 26 | out.addSignal(sine); 27 | } 28 | 29 | void draw() { 30 | background(255); 31 | stroke(0); 32 | for(int i = 0; i < out.bufferSize() - 1; i++) { 33 | float x1 = map(i, 0, out.bufferSize(), 0, width); 34 | float x2 = map(i+1, 0, out.bufferSize(), 0, width); 35 | line(x1, 50 + out.left.get(i)*50, x2, 50 + out.left.get(i+1)*50); 36 | line(x1, 150 + out.right.get(i)*50, x2, 150 + out.right.get(i+1)*50); 37 | } 38 | } 39 | 40 | public void faceScale(float x) { 41 | float freq = map(x, 4, 6, 60, 1500); 42 | sine.setFreq(freq); 43 | } 44 | 45 | public void posePosition(float x, float y) { 46 | float pan = map(x, 0, 640, -1, +1); 47 | sine.setPan(pan); 48 | } 49 | 50 | // all other OSC messages end up here 51 | void oscEvent(OscMessage m) { 52 | if (m.isPlugged() == false) { 53 | } 54 | } 55 | 56 | void stop() { 57 | out.close(); 58 | minim.stop(); 59 | super.stop(); 60 | } 61 | -------------------------------------------------------------------------------- /processing/FaceOSCReceiverGrapher/FaceOSCReceiverGrapher.pde: -------------------------------------------------------------------------------- 1 | // 2 | // Can you pass the a face polygraph? 3 | // 4 | // copied and modified from the FaceOSCReceiverClass 5 | 6 | import oscP5.*; 7 | OscP5 oscP5; 8 | 9 | // our FaceOSC tracked face dat 10 | Face face = new Face(); 11 | PFont font; 12 | ArrayList graphs; 13 | int totalGraphs; 14 | 15 | void setup() { 16 | size(640, 480); 17 | frameRate(30); 18 | 19 | font = createFont("Helvetica", 10, false); 20 | textFont(font); 21 | 22 | oscP5 = new OscP5(this, 8338); 23 | 24 | totalGraphs = 12; 25 | reset(); 26 | } 27 | 28 | void reset() { 29 | graphs = new ArrayList(); 30 | graphs.add(new Graph("poseScale")); 31 | graphs.add(new Graph("mouthWidth")); 32 | graphs.add(new Graph("mouthHeight")); 33 | graphs.add(new Graph("eyeLeft/Right")); 34 | graphs.add(new Graph("eyebrowLeft/Right")); 35 | graphs.add(new Graph("jaw")); 36 | graphs.add(new Graph("nostrils")); 37 | graphs.add(new Graph("posePosition.x")); 38 | graphs.add(new Graph("posePosition.y")); 39 | graphs.add(new Graph("poseOrientation.x")); 40 | graphs.add(new Graph("poseOrientation.y")); 41 | graphs.add(new Graph("poseOrientation.z")); 42 | } 43 | 44 | void draw() { 45 | if(face.found > 0) { 46 | graphs.get(0).add(face.poseScale); 47 | graphs.get(1).add(face.mouthWidth); 48 | graphs.get(2).add(face.mouthHeight); 49 | graphs.get(3).add(face.eyeLeft + face.eyeRight); 50 | graphs.get(4).add(face.eyebrowLeft + face.eyebrowRight); 51 | graphs.get(5).add(face.jaw); 52 | graphs.get(6).add(face.nostrils); 53 | graphs.get(7).add(face.posePosition.x); 54 | graphs.get(8).add(face.posePosition.y); 55 | graphs.get(9).add(face.poseOrientation.x); 56 | graphs.get(10).add(face.poseOrientation.y); 57 | graphs.get(11).add(face.poseOrientation.z); 58 | } 59 | 60 | background(255); 61 | for(int i = 0; i < totalGraphs; i++) { 62 | graphs.get(i).draw(width, height / totalGraphs); 63 | translate(0, height / totalGraphs); 64 | } 65 | } 66 | 67 | // OSC CALLBACK FUNCTIONS 68 | 69 | void oscEvent(OscMessage m) { 70 | face.parseOSC(m); 71 | } 72 | -------------------------------------------------------------------------------- /processing/FaceOSCCursor/FaceOSCCursor.pde: -------------------------------------------------------------------------------- 1 | // 2 | // move the mouse cursor with your face 3 | // angle your face left, right, up, & down 4 | // 5 | // Hit escape to exit. If another app is becomes active, 6 | // use Alt-Tab/Cmd-Tab to select the running Processing Java app 7 | // and kill it via the keyboard. 8 | // 9 | // derived from the FaceOSCReceiver demo 10 | 11 | import oscP5.*; 12 | OscP5 oscP5; 13 | 14 | import java.awt.*; 15 | import java.awt.event.*; 16 | Robot robot; 17 | 18 | int found; 19 | float speed = 100; 20 | PVector poseOrientation; 21 | PVector cursorPosition; 22 | 23 | void setup() { 24 | // 1.5+ screen.width 25 | // 2.05 screenWidth 26 | // 2.06 displayWidth 27 | size(displayWidth, displayHeight); 28 | frameRate(30); 29 | oscP5 = new OscP5(this, 8338); 30 | oscP5.plug(this, "found", "/found"); 31 | oscP5.plug(this, "poseOrientation", "/pose/orientation"); 32 | oscP5.plug(this, "mouthWidth", "/gesture/mouth/width"); 33 | try { 34 | robot = new Robot(); 35 | } catch (AWTException e) { 36 | } 37 | poseOrientation = new PVector(); 38 | cursorPosition = new PVector(); 39 | } 40 | 41 | void draw() { 42 | background(255); 43 | if (found > 0) { 44 | cursorPosition.x += speed * poseOrientation.y; 45 | cursorPosition.y += speed * poseOrientation.x; 46 | cursorPosition.x = constrain(cursorPosition.x, 0, screen.width); 47 | cursorPosition.y = constrain(cursorPosition.y, 0, screen.height); 48 | robot.mouseMove((int) cursorPosition.x, (int) cursorPosition.y); 49 | ellipseMode(CENTER); 50 | noStroke(); 51 | fill(0); 52 | ellipse( 53 | map(cursorPosition.x, 0, screen.width, 0, width), 54 | map(cursorPosition.y, 0, screen.height, 0, height), 55 | 10, 10); 56 | } 57 | } 58 | 59 | public void found(int i) { 60 | found = i; 61 | } 62 | 63 | public void poseOrientation(float x, float y, float z) { 64 | println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z); 65 | poseOrientation.set(x, y, z); 66 | } 67 | 68 | public void mouthWidth(float w) { 69 | println(w); 70 | if(w > 16) { 71 | robot.mousePress(InputEvent.BUTTON1_MASK); 72 | } else { 73 | robot.mouseRelease(InputEvent.BUTTON1_MASK); 74 | } 75 | } 76 | 77 | // all other OSC messages end up here 78 | void oscEvent(OscMessage m) { 79 | if (m.isPlugged() == false) { 80 | } 81 | } 82 | 83 | -------------------------------------------------------------------------------- /processing/FaceOSCSyphon/FaceOSCSyphon.pde: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker 4 | // 5 | // This example is similar to FaceOSCReceiverClass and utilizes Syphon to grab 6 | // the camera stream from FaceOSC+Syphon. Syphon is only available for Mac OSX. 7 | // 8 | // You will need Processing 3.0, a Mac, and 9 | // - FaceOSC+Syphon: https://github.com/kylemcdonald/ofxFaceTracker/downloads 10 | // - Syphon for Processing: https://github.com/Syphon/Processing 11 | // (install via SKetch->Import Library->Add Library) 12 | // 13 | // 2013 Dan Wilcox danomatika.com 14 | // for the IACD Spring 2013 class at the CMU School of Art 15 | // 2016 updated for Processing 3 16 | // 17 | // adapted from from the Syphon ReceiveFrames example 18 | // 19 | 20 | import codeanticode.syphon.*; 21 | import oscP5.*; 22 | 23 | // for Syphon 24 | PImage frame; 25 | SyphonClient client; 26 | 27 | // for OSC 28 | OscP5 oscP5; 29 | 30 | // our FaceOSC tracked face data 31 | Face face = new Face(); 32 | 33 | void settings () { 34 | size(720, 480, P3D); 35 | PJOGL.profile = 1; 36 | } 37 | 38 | void setup() { 39 | println("Available Syphon servers:"); 40 | println(SyphonClient.listServers()); 41 | 42 | // create syhpon client to receive frames from FaceOSC 43 | client = new SyphonClient(this, "FaceOSC"); 44 | 45 | // stat listening on OSC 46 | oscP5 = new OscP5(this, 8338); 47 | } 48 | 49 | public void draw() { 50 | background(255); 51 | 52 | // grab syphon frame 53 | if(client.newFrame()) { 54 | frame = client.getImage(frame, false); 55 | } 56 | if(frame != null) { 57 | image(frame, 0, 0, width, height); 58 | } 59 | 60 | // draw face 61 | if(face.found > 0) { 62 | stroke(255, 100, 100); 63 | strokeWeight(4); 64 | translate(face.posePosition.x, face.posePosition.y); 65 | scale(face.poseScale); 66 | noFill(); 67 | ellipse(-20, face.eyeLeft * -9, 20, 7); 68 | ellipse(20, face.eyeRight * -9, 20, 7); 69 | ellipse(0, 20, face.mouthWidth* 3, face.mouthHeight * 3); 70 | ellipse(-5, face.nostrils * -1, 7, 3); 71 | ellipse(5, face.nostrils * -1, 7, 3); 72 | rectMode(CENTER); 73 | fill(0); 74 | rect(-20, face.eyebrowLeft * -5, 25, 5); 75 | rect(20, face.eyebrowRight * -5, 25, 5); 76 | 77 | //print(face.toString()); 78 | } 79 | } 80 | 81 | // OSC CALLBACK FUNCTIONS 82 | 83 | void oscEvent(OscMessage m) { 84 | face.parseOSC(m); 85 | } -------------------------------------------------------------------------------- /supercollider/faceOSCReceiver.scd: -------------------------------------------------------------------------------- 1 | // a template for receiving face tracking osc messages from 2 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 3 | // 4 | // 2013 Arthur Carabott arthurcarabott.com 5 | // 6 | 7 | ( 8 | //GUI for value posting 9 | ~faceKeys = [ 10 | '/found', 11 | '/gesture/eye/left', 12 | '/gesture/eye/right', 13 | '/gesture/eyebrow/left', 14 | '/gesture/eyebrow/right', 15 | '/gesture/jaw', 16 | '/gesture/mouth/height', 17 | '/gesture/mouth/width', 18 | '/gesture/nostrils', 19 | '/pose/orientation', 20 | '/pose/position', 21 | '/pose/scale', 22 | '/raw' 23 | ]; 24 | 25 | ~facePost = (); 26 | ~faceKeys.do {|key, i| 27 | ~facePost[key] = false; 28 | OSCdef((key ++ "_post").asSymbol, {|msg, time, addr, recvPort| 29 | if(~facePost[key]) { 30 | msg.postln; 31 | }; 32 | }, key); 33 | }; 34 | 35 | ~facePostWindow = Window("FaceOSC", Rect(150, 400, 210, 580)).front; 36 | ~facePostWindow.view.addFlowLayout; 37 | ~facePost.keys.asArray.sort.do {|item, i| 38 | Button(~facePostWindow, 200@40) 39 | .states_([ 40 | [item.asString, Color.black, Color.white], 41 | [item.asString, Color.black, Color.green] 42 | ]) 43 | .action_({|button| 44 | ~facePost[item] = ~facePost[item].not; 45 | }); 46 | }; 47 | ) 48 | 49 | //OSCdefs for your actions 50 | OSCdef('/found', {|msg, time, addr, recvPort| 51 | 52 | }, '/found'); 53 | 54 | OSCdef('/gesture/eye/left', {|msg, time, addr, recvPort| 55 | 56 | }, '/gesture/eye/left'); 57 | 58 | OSCdef('/gesture/eye/right', {|msg, time, addr, recvPort| 59 | 60 | }, '/gesture/eye/right'); 61 | 62 | OSCdef('/gesture/eyebrow/left', {|msg, time, addr, recvPort| 63 | 64 | }, '/gesture/eyebrow/left'); 65 | 66 | OSCdef('/gesture/eyebrow/right', {|msg, time, addr, recvPort| 67 | 68 | }, '/gesture/eyebrow/right'); 69 | 70 | OSCdef('/gesture/jaw', {|msg, time, addr, recvPort| 71 | 72 | }, '/gesture/jaw'); 73 | 74 | OSCdef('/gesture/mouth/height', {|msg, time, addr, recvPort| 75 | 76 | }, '/gesture/mouth/height'); 77 | 78 | OSCdef('/gesture/mouth/width', {|msg, time, addr, recvPort| 79 | 80 | }, '/gesture/mouth/width'); 81 | 82 | OSCdef('/gesture/nostrils', {|msg, time, addr, recvPort| 83 | 84 | }, '/gesture/nostrils'); 85 | 86 | OSCdef('/pose/orientation', {|msg, time, addr, recvPort| 87 | 88 | }, '/pose/orientation'); 89 | 90 | OSCdef('/pose/position', {|msg, time, addr, recvPort| 91 | 92 | }, '/pose/position'); 93 | 94 | OSCdef('/pose/scale', {|msg, time, addr, recvPort| 95 | 96 | }, '/pose/scale'); 97 | 98 | OSCdef('/raw', {|msg, time, addr, recvPort| 99 | 100 | }, '/raw'); -------------------------------------------------------------------------------- /puredata/FaceOSCReceiver.pd: -------------------------------------------------------------------------------- 1 | #N canvas 24 29 728 673 10; 2 | #X declare -lib mrpeach; 3 | #X obj 39 41 udpreceive 8338; 4 | #X obj 39 71 unpackOSC; 5 | #X obj 39 543 routeOSC /position /scale /orientation; 6 | #X obj 189 599 unpack f f f; 7 | #X obj 39 596 unpack f f; 8 | #X floatatom 114 598 5 0 0 1 scale - -; 9 | #X floatatom 39 639 5 0 0 1 x - -; 10 | #X floatatom 96 640 5 0 0 1 y - -; 11 | #X floatatom 189 640 5 0 0 1 x - -; 12 | #X floatatom 245 640 5 0 0 1 y - -; 13 | #X floatatom 300 641 5 0 0 1 z - -; 14 | #X floatatom 98 511 5 0 0 1 mouth_width - -; 15 | #X obj 98 450 routeOSC /width /height; 16 | #X floatatom 165 486 5 0 0 1 mouth_height - -; 17 | #X obj 98 180 routeOSC /mouth /eyebrow /eye /jaw /nostrils; 18 | #X floatatom 150 415 5 0 0 1 L_eyebrow - -; 19 | #X floatatom 211 391 5 0 0 1 R_eyebrow - -; 20 | #X obj 150 355 routeOSC /left /right; 21 | #X floatatom 254 235 5 0 0 1 jaw - -; 22 | #X floatatom 306 212 5 0 0 1 nostrils - -; 23 | #X floatatom 202 323 5 0 0 1 L_eye - -; 24 | #X floatatom 263 304 5 0 0 1 R_eye - -; 25 | #X obj 202 268 routeOSC /left /right; 26 | #X floatatom 157 151 5 0 0 1 num_faces_found - -; 27 | #X text 52 99 OSC message breakout; 28 | #X text 38 13 FaceOSC sends on port 8338 by default; 29 | #X obj 39 121 routeOSC /pose /gesture /found; 30 | #X text 366 53 2012 Dan Wilcox danomatika.com for the IACD class at 31 | the CMU School of Art; 32 | #X obj 367 528 maxlib/scale 0 10 150 800; 33 | #X obj 548 530 maxlib/scale 6 9 0 0.6; 34 | #X floatatom 367 557 5 0 0 0 - - -; 35 | #X floatatom 548 558 5 0 0 0 - - -; 36 | #X obj 367 585 osc~; 37 | #X obj 367 615 *~; 38 | #X obj 367 642 dac~; 39 | #X text 361 404 mouth height controls oscilator pitch \, raise left 40 | eyebrow for volume boost; 41 | #X text 367 95 based on the example max by Craig Fahner http://cmuems.com/2011/a/unit-60-pd/ 42 | ; 43 | #X obj 614 17 import mrpeach; 44 | #X connect 0 0 1 0; 45 | #X connect 1 0 26 0; 46 | #X connect 2 0 4 0; 47 | #X connect 2 1 5 0; 48 | #X connect 2 2 3 0; 49 | #X connect 3 0 8 0; 50 | #X connect 3 1 9 0; 51 | #X connect 3 2 10 0; 52 | #X connect 4 0 6 0; 53 | #X connect 4 1 7 0; 54 | #X connect 12 0 11 0; 55 | #X connect 12 1 13 0; 56 | #X connect 13 0 28 0; 57 | #X connect 14 0 12 0; 58 | #X connect 14 1 17 0; 59 | #X connect 14 2 22 0; 60 | #X connect 14 3 18 0; 61 | #X connect 14 4 19 0; 62 | #X connect 15 0 29 0; 63 | #X connect 17 0 15 0; 64 | #X connect 17 1 16 0; 65 | #X connect 22 0 20 0; 66 | #X connect 22 1 21 0; 67 | #X connect 26 0 2 0; 68 | #X connect 26 1 14 0; 69 | #X connect 26 2 23 0; 70 | #X connect 28 0 30 0; 71 | #X connect 29 0 31 0; 72 | #X connect 30 0 32 0; 73 | #X connect 31 0 33 1; 74 | #X connect 32 0 33 0; 75 | #X connect 33 0 34 0; 76 | #X connect 33 0 34 1; 77 | -------------------------------------------------------------------------------- /processing/FaceOSCSmiley/SmartRobot.pde: -------------------------------------------------------------------------------- 1 | import java.awt.AWTException; 2 | import java.awt.Robot; 3 | import java.awt.event.KeyEvent; 4 | 5 | public class SmartRobot extends Robot { 6 | 7 | public SmartRobot() throws AWTException { 8 | super(); 9 | } 10 | 11 | public void keyType(int keyCode) { 12 | keyPress(keyCode); 13 | delay(50); 14 | keyRelease(keyCode); 15 | } 16 | 17 | public void keyType(int keyCode, int keyCodeModifier) { 18 | keyPress(keyCodeModifier); 19 | keyPress(keyCode); 20 | keyRelease(keyCode); 21 | keyRelease(keyCodeModifier); 22 | } 23 | 24 | 25 | public void type(String text) { 26 | String textUpper = text.toUpperCase(); 27 | 28 | for (int i=0; i': 96 | keyCode = (int)'.'; 97 | break; 98 | default: 99 | keyCode = (int)c; 100 | shift = false; 101 | } 102 | if (shift) 103 | keyType(keyCode, KeyEvent.VK_SHIFT); 104 | else 105 | keyType(keyCode); 106 | } 107 | 108 | private int charToKeyCode(char c) { 109 | switch (c) { 110 | case ':': 111 | return ';'; 112 | } 113 | return (int)c; 114 | } 115 | } 116 | 117 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/src/Face.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2012 Dan Wilcox danomatika.com 6 | // for the IACD class at the CMU School of Art 7 | // 8 | #include "Face.h" 9 | #include "ofxOscMessage.h" 10 | 11 | //-------------------------------------------------------------- 12 | bool Face::parseOSC(ofxOscMessage& m) { 13 | 14 | if(m.getAddress() == "/found") { 15 | found = m.getArgAsInt32(0); 16 | return true; 17 | } 18 | 19 | // pose 20 | else if(m.getAddress() == "/pose/scale") { 21 | poseScale = m.getArgAsFloat(0); 22 | return true; 23 | } 24 | else if(m.getAddress() == "/pose/position") { 25 | posePosition.x = m.getArgAsFloat(0); 26 | posePosition.y = m.getArgAsFloat(1); 27 | return true; 28 | } 29 | else if(m.getAddress() == "/pose/orientation") { 30 | poseOrientation.x = m.getArgAsFloat(0); 31 | poseOrientation.y = m.getArgAsFloat(1); 32 | poseOrientation.z = m.getArgAsFloat(2); 33 | return true; 34 | } 35 | 36 | // gesture 37 | else if(m.getAddress() == "/gesture/mouth/width") { 38 | mouthWidth = m.getArgAsFloat(0); 39 | return true; 40 | } 41 | else if(m.getAddress() == "/gesture/mouth/height") { 42 | mouthHeight = m.getArgAsFloat(0); 43 | return true; 44 | } 45 | else if(m.getAddress() == "/gesture/eye/left") { 46 | eyeLeft = m.getArgAsFloat(0); 47 | return true; 48 | } 49 | else if(m.getAddress() == "/gesture/eye/right") { 50 | eyeRight = m.getArgAsFloat(0); 51 | return true; 52 | } 53 | else if(m.getAddress() == "/gesture/eyebrow/left") { 54 | eyebrowLeft = m.getArgAsFloat(0); 55 | return true; 56 | } 57 | else if(m.getAddress() == "/gesture/eyebrow/right") { 58 | eyebrowRight = m.getArgAsFloat(0); 59 | return true; 60 | } 61 | else if(m.getAddress() == "/gesture/jaw") { 62 | jaw = m.getArgAsFloat(0); 63 | return true; 64 | } 65 | else if(m.getAddress() == "/gesture/nostrils") { 66 | nostrils = m.getArgAsFloat(0); 67 | return true; 68 | } 69 | 70 | return false; 71 | } 72 | 73 | //-------------------------------------------------------------- 74 | string Face::toString() { 75 | stringstream stream; 76 | stream << "found: " << found << endl 77 | << "pose" << endl 78 | << " scale: " << poseScale << endl 79 | << " position: " << posePosition << endl 80 | << " orientation: " << poseOrientation << endl 81 | << "gesture" << endl 82 | << " mouth: " << mouthWidth << " " << mouthHeight << endl 83 | << " eye: " << eyeLeft << " " << eyeRight << endl 84 | << " eyebrow: " << eyebrowLeft << " " << eyebrowRight << endl 85 | << " jaw: " << jaw << endl 86 | << " nostrils: " << nostrils << endl; 87 | return stream.str(); 88 | } 89 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCReceiver/src/ofApp.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2012 Dan Wilcox danomatika.com 6 | // for the IACD class at the CMU School of Art 7 | // 8 | // adapted from Greg Borenstein's Processing example 9 | // https://gist.github.com/1603230 10 | // 11 | #include "ofApp.h" 12 | 13 | //-------------------------------------------------------------- 14 | void ofApp::setup() { 15 | ofSetVerticalSync(true); 16 | ofSetFrameRate(60); 17 | 18 | // FaceOSC sends to port 8338 by default 19 | receiver.setup(8338); 20 | 21 | ofBackground(255); 22 | } 23 | 24 | //-------------------------------------------------------------- 25 | void ofApp::update() { 26 | 27 | // check for waiting osc messages 28 | while(receiver.hasWaitingMessages()) { 29 | 30 | // get the next message 31 | ofxOscMessage m; 32 | receiver.getNextMessage(m); 33 | 34 | // load face from any FaceOSC messages 35 | face.parseOSC(m); 36 | } 37 | 38 | // found face? 39 | if(face.found > 0) { 40 | cout << "---------" << endl << face.toString(); 41 | } 42 | } 43 | 44 | 45 | //-------------------------------------------------------------- 46 | void ofApp::draw() { 47 | 48 | // draw a face 49 | if(face.found > 0) { 50 | ofPushMatrix(); 51 | ofTranslate(face.posePosition); 52 | ofScale(face.poseScale, face.poseScale); 53 | 54 | ofSetColor(0); 55 | ofNoFill(); 56 | ofDrawEllipse(-20, face.eyeLeft * -9, 20, 7); 57 | ofDrawEllipse(20, face.eyeRight * -9, 20, 7); 58 | ofDrawEllipse(0, 20, face.mouthWidth * 3, face.mouthHeight * 3); 59 | ofDrawEllipse(-5, face.nostrils * -1, 7, 3); 60 | ofDrawEllipse(5, face.nostrils * -1, 7, 3); 61 | 62 | ofSetRectMode(OF_RECTMODE_CENTER); 63 | ofFill(); 64 | ofDrawRectangle(-20, face.eyebrowLeft * -5, 25, 5); 65 | ofDrawRectangle(20, face.eyebrowRight * -5, 25, 5); 66 | ofSetRectMode(OF_RECTMODE_CORNER); 67 | ofPopMatrix(); 68 | } 69 | 70 | } 71 | 72 | //-------------------------------------------------------------- 73 | void ofApp::keyPressed (int key) { 74 | 75 | } 76 | 77 | //-------------------------------------------------------------- 78 | void ofApp::keyReleased(int key) { 79 | 80 | } 81 | 82 | //-------------------------------------------------------------- 83 | void ofApp::mouseMoved(int x, int y) { 84 | 85 | } 86 | 87 | //-------------------------------------------------------------- 88 | void ofApp::mouseDragged(int x, int y, int button) { 89 | 90 | } 91 | 92 | //-------------------------------------------------------------- 93 | void ofApp::mousePressed(int x, int y, int button) { 94 | 95 | } 96 | 97 | //-------------------------------------------------------------- 98 | void ofApp::mouseReleased(int x, int y, int button) { 99 | 100 | } 101 | 102 | //-------------------------------------------------------------- 103 | void ofApp::windowResized(int w, int h) { 104 | 105 | } 106 | 107 | //-------------------------------------------------------------- 108 | void ofApp::gotMessage(ofMessage msg) { 109 | 110 | } 111 | 112 | //-------------------------------------------------------------- 113 | void ofApp::dragEvent(ofDragInfo dragInfo) { 114 | 115 | } 116 | -------------------------------------------------------------------------------- /processing/FaceOSCSyphon/Face.pde: -------------------------------------------------------------------------------- 1 | 2 | import oscP5.*; 3 | 4 | // a single tracked face from FaceOSC 5 | class Face { 6 | 7 | // num faces found 8 | int found; 9 | 10 | // pose 11 | float poseScale; 12 | PVector posePosition = new PVector(); 13 | PVector poseOrientation = new PVector(); 14 | 15 | // gesture 16 | float mouthHeight, mouthWidth; 17 | float eyeLeft, eyeRight; 18 | float eyebrowLeft, eyebrowRight; 19 | float jaw; 20 | float nostrils; 21 | 22 | Face() {} 23 | 24 | // parse an OSC message from FaceOSC 25 | // returns true if a message was handled 26 | boolean parseOSC(OscMessage m) { 27 | 28 | if(m.checkAddrPattern("/found")) { 29 | found = m.get(0).intValue(); 30 | return true; 31 | } 32 | 33 | // pose 34 | else if(m.checkAddrPattern("/pose/scale")) { 35 | poseScale = m.get(0).floatValue(); 36 | return true; 37 | } 38 | else if(m.checkAddrPattern("/pose/position")) { 39 | posePosition.x = m.get(0).floatValue(); 40 | posePosition.y = m.get(1).floatValue(); 41 | return true; 42 | } 43 | else if(m.checkAddrPattern("/pose/orientation")) { 44 | poseOrientation.x = m.get(0).floatValue(); 45 | poseOrientation.y = m.get(1).floatValue(); 46 | poseOrientation.z = m.get(2).floatValue(); 47 | return true; 48 | } 49 | 50 | // gesture 51 | else if(m.checkAddrPattern("/gesture/mouth/width")) { 52 | mouthWidth = m.get(0).floatValue(); 53 | return true; 54 | } 55 | else if(m.checkAddrPattern("/gesture/mouth/height")) { 56 | mouthHeight = m.get(0).floatValue(); 57 | return true; 58 | } 59 | else if(m.checkAddrPattern("/gesture/eye/left")) { 60 | eyeLeft = m.get(0).floatValue(); 61 | return true; 62 | } 63 | else if(m.checkAddrPattern("/gesture/eye/right")) { 64 | eyeRight = m.get(0).floatValue(); 65 | return true; 66 | } 67 | else if(m.checkAddrPattern("/gesture/eyebrow/left")) { 68 | eyebrowLeft = m.get(0).floatValue(); 69 | return true; 70 | } 71 | else if(m.checkAddrPattern("/gesture/eyebrow/right")) { 72 | eyebrowRight = m.get(0).floatValue(); 73 | return true; 74 | } 75 | else if(m.checkAddrPattern("/gesture/jaw")) { 76 | jaw = m.get(0).floatValue(); 77 | return true; 78 | } 79 | else if(m.checkAddrPattern("/gesture/nostrils")) { 80 | nostrils = m.get(0).floatValue(); 81 | return true; 82 | } 83 | 84 | return false; 85 | } 86 | 87 | // get the current face values as a string (includes end lines) 88 | String toString() { 89 | return "found: " + found + "\n" 90 | + "pose" + "\n" 91 | + " scale: " + poseScale + "\n" 92 | + " position: " + posePosition.toString() + "\n" 93 | + " orientation: " + poseOrientation.toString() + "\n" 94 | + "gesture" + "\n" 95 | + " mouth: " + mouthWidth + " " + mouthHeight + "\n" 96 | + " eye: " + eyeLeft + " " + eyeRight + "\n" 97 | + " eyebrow: " + eyebrowLeft + " " + eyebrowRight + "\n" 98 | + " jaw: " + jaw + "\n" 99 | + " nostrils: " + nostrils + "\n"; 100 | } 101 | 102 | }; 103 | -------------------------------------------------------------------------------- /processing/FaceOSCReceiverGrapher/Face.pde: -------------------------------------------------------------------------------- 1 | import oscP5.*; 2 | 3 | // a single tracked face from FaceOSC 4 | class Face { 5 | 6 | // num faces found 7 | int found; 8 | 9 | // pose 10 | float poseScale; 11 | PVector posePosition = new PVector(); 12 | PVector poseOrientation = new PVector(); 13 | 14 | // gesture 15 | float mouthHeight, mouthWidth; 16 | float eyeLeft, eyeRight; 17 | float eyebrowLeft, eyebrowRight; 18 | float jaw; 19 | float nostrils; 20 | 21 | Face() {} 22 | 23 | // parse an OSC message from FaceOSC 24 | // returns true if a message was handled 25 | boolean parseOSC(OscMessage m) { 26 | 27 | if(m.checkAddrPattern("/found")) { 28 | found = m.get(0).intValue(); 29 | return true; 30 | } 31 | 32 | // pose 33 | else if(m.checkAddrPattern("/pose/scale")) { 34 | poseScale = m.get(0).floatValue(); 35 | return true; 36 | } 37 | else if(m.checkAddrPattern("/pose/position")) { 38 | posePosition.x = m.get(0).floatValue(); 39 | posePosition.y = m.get(1).floatValue(); 40 | return true; 41 | } 42 | else if(m.checkAddrPattern("/pose/orientation")) { 43 | poseOrientation.x = m.get(0).floatValue(); 44 | poseOrientation.y = m.get(1).floatValue(); 45 | poseOrientation.z = m.get(2).floatValue(); 46 | return true; 47 | } 48 | 49 | // gesture 50 | else if(m.checkAddrPattern("/gesture/mouth/width")) { 51 | mouthWidth = m.get(0).floatValue(); 52 | return true; 53 | } 54 | else if(m.checkAddrPattern("/gesture/mouth/height")) { 55 | mouthHeight = m.get(0).floatValue(); 56 | return true; 57 | } 58 | else if(m.checkAddrPattern("/gesture/eye/left")) { 59 | eyeLeft = m.get(0).floatValue(); 60 | return true; 61 | } 62 | else if(m.checkAddrPattern("/gesture/eye/right")) { 63 | eyeRight = m.get(0).floatValue(); 64 | return true; 65 | } 66 | else if(m.checkAddrPattern("/gesture/eyebrow/left")) { 67 | eyebrowLeft = m.get(0).floatValue(); 68 | return true; 69 | } 70 | else if(m.checkAddrPattern("/gesture/eyebrow/right")) { 71 | eyebrowRight = m.get(0).floatValue(); 72 | return true; 73 | } 74 | else if(m.checkAddrPattern("/gesture/jaw")) { 75 | jaw = m.get(0).floatValue(); 76 | return true; 77 | } 78 | else if(m.checkAddrPattern("/gesture/nostrils")) { 79 | nostrils = m.get(0).floatValue(); 80 | return true; 81 | } 82 | 83 | return false; 84 | } 85 | 86 | // get the current face values as a string (includes end lines) 87 | String toString() { 88 | return "found: " + found + "\n" 89 | + "pose" + "\n" 90 | + " scale: " + poseScale + "\n" 91 | + " position: " + posePosition.toString() + "\n" 92 | + " orientation: " + poseOrientation.toString() + "\n" 93 | + "gesture" + "\n" 94 | + " mouth: " + mouthWidth + " " + mouthHeight + "\n" 95 | + " eye: " + eyeLeft + " " + eyeRight + "\n" 96 | + " eyebrow: " + eyebrowLeft + " " + eyebrowRight + "\n" 97 | + " jaw: " + jaw + "\n" 98 | + " nostrils: " + nostrils + "\n"; 99 | } 100 | 101 | }; 102 | -------------------------------------------------------------------------------- /processing/FaceOSCReceiverClass/Face.pde: -------------------------------------------------------------------------------- 1 | 2 | import oscP5.*; 3 | 4 | // a single tracked face from FaceOSC 5 | class Face { 6 | 7 | // num faces found 8 | int found; 9 | 10 | // pose 11 | float poseScale; 12 | PVector posePosition = new PVector(); 13 | PVector poseOrientation = new PVector(); 14 | 15 | // gesture 16 | float mouthHeight, mouthWidth; 17 | float eyeLeft, eyeRight; 18 | float eyebrowLeft, eyebrowRight; 19 | float jaw; 20 | float nostrils; 21 | 22 | Face() {} 23 | 24 | // parse an OSC message from FaceOSC 25 | // returns true if a message was handled 26 | boolean parseOSC(OscMessage m) { 27 | 28 | if(m.checkAddrPattern("/found")) { 29 | found = m.get(0).intValue(); 30 | return true; 31 | } 32 | 33 | // pose 34 | else if(m.checkAddrPattern("/pose/scale")) { 35 | poseScale = m.get(0).floatValue(); 36 | return true; 37 | } 38 | else if(m.checkAddrPattern("/pose/position")) { 39 | posePosition.x = m.get(0).floatValue(); 40 | posePosition.y = m.get(1).floatValue(); 41 | return true; 42 | } 43 | else if(m.checkAddrPattern("/pose/orientation")) { 44 | poseOrientation.x = m.get(0).floatValue(); 45 | poseOrientation.y = m.get(1).floatValue(); 46 | poseOrientation.z = m.get(2).floatValue(); 47 | return true; 48 | } 49 | 50 | // gesture 51 | else if(m.checkAddrPattern("/gesture/mouth/width")) { 52 | mouthWidth = m.get(0).floatValue(); 53 | return true; 54 | } 55 | else if(m.checkAddrPattern("/gesture/mouth/height")) { 56 | mouthHeight = m.get(0).floatValue(); 57 | return true; 58 | } 59 | else if(m.checkAddrPattern("/gesture/eye/left")) { 60 | eyeLeft = m.get(0).floatValue(); 61 | return true; 62 | } 63 | else if(m.checkAddrPattern("/gesture/eye/right")) { 64 | eyeRight = m.get(0).floatValue(); 65 | return true; 66 | } 67 | else if(m.checkAddrPattern("/gesture/eyebrow/left")) { 68 | eyebrowLeft = m.get(0).floatValue(); 69 | return true; 70 | } 71 | else if(m.checkAddrPattern("/gesture/eyebrow/right")) { 72 | eyebrowRight = m.get(0).floatValue(); 73 | return true; 74 | } 75 | else if(m.checkAddrPattern("/gesture/jaw")) { 76 | jaw = m.get(0).floatValue(); 77 | return true; 78 | } 79 | else if(m.checkAddrPattern("/gesture/nostrils")) { 80 | nostrils = m.get(0).floatValue(); 81 | return true; 82 | } 83 | 84 | return false; 85 | } 86 | 87 | // get the current face values as a string (includes end lines) 88 | String toString() { 89 | return "found: " + found + "\n" 90 | + "pose" + "\n" 91 | + " scale: " + poseScale + "\n" 92 | + " position: " + posePosition.toString() + "\n" 93 | + " orientation: " + poseOrientation.toString() + "\n" 94 | + "gesture" + "\n" 95 | + " mouth: " + mouthWidth + " " + mouthHeight + "\n" 96 | + " eye: " + eyeLeft + " " + eyeRight + "\n" 97 | + " eyebrow: " + eyebrowLeft + " " + eyebrowRight + "\n" 98 | + " jaw: " + jaw + "\n" 99 | + " nostrils: " + nostrils + "\n"; 100 | } 101 | 102 | }; 103 | -------------------------------------------------------------------------------- /README.markdown: -------------------------------------------------------------------------------- 1 | ## FaceOSC Receiver Templates 2 | 3 | 2012-2017 by [Dan Wilcox](http://danomatika.com), et al. 4 | 5 | This repository contains templates for receiving face tracking OSC ([Open Sound Control](http://opensoundcontrol.org/introduction-osc)) messages from Kyle McDonald's [FaceOSC](https://github.com/kylemcdonald/ofxFaceTracker/downloads) wrapper around Jason Saragih’s [FaceTracker](http://web.mac.com/jsaragih/FaceTracker/FaceTracker.html). Templates are provided for a number of popular arts-engineering toolkits, including Processing, openFrameworks, Max/MSP, Pure Data (pd), and SuperCollider. 6 | 7 | Created for Prof. Golan Levin's [Spring 2012 IACD class](http://golancourses.net/2012spring/) at the [CMU School of Art](http://www.cmu.edu/art/). 8 | 9 | -- 10 | 11 | ### Instructions 12 | 13 | Download [FaceOSC](https://github.com/kylemcdonald/ofxFaceTracker/releases) and get started with a template project for one of the following creative coding environments: 14 | 15 | * [Processing](http://processing.org/) 16 | * Requires the [OscP5 library](http://www.sojamo.de/libraries/oscP5/) 17 | * Known to be compatible with Processing 3.0. 18 | * [OpenFrameworks](http://www.openframeworks.cc/) 19 | * Currently requires OF version 007+ 20 | * Make sure to copy the FaceOSCReceiver folder into the openframeworks/apps/myApps folder (it must be 3 levels deep) 21 | * [Max/MSP](http://cycling74.com/) 22 | * requires the [CNMAT Everything for Max package](http://cnmat.berkeley.edu/downloads) for the (OSC-route) object 23 | * [Pure Data Extended](http://puredata.info/) 24 | * requires *Pd-Extended* for the [OSCroute] and [udpreceive] objects (part of the *mrpeach* external included in Pd-Extended) 25 | * [SuperCollider](http://supercollider.github.io/) 26 | 27 | Make sure FaceOSC is running and a face is detected (i.e., a face mesh is drawn). The face detection may run very slowly (0.5 fps) while initially searching for a face. It speeds up to 30-60fps once one is found. For best results, make sure your face is evenly lit. 28 | 29 | **Further info:** 30 | 31 | * FaceOSC uses port 8338 by default for OSC communication. 32 | * The FaceOSC window size is 640 x 480 pixels. 33 | 34 | -- 35 | 36 | ### OpenFrameworks 37 | 38 | #### Generating Project Files 39 | 40 | The OpenFrameworks template does not come with the project files pre-generated. You will need to use the OF ProjectGenerator to create them (you should only need to do this once). 41 | 42 | To (re)generate project files for an existing project: 43 | 44 | * click the "Import" button in the ProjectGenerator 45 | * navigate the to base folder for the OF project ie. "FaceOSCReceiver" 46 | * click the "Update" button 47 | 48 | If everything went OK, you should now be able to open the generated project and build/run. 49 | 50 | ### Choosing a Version 51 | 52 | If you are using an older version (007, ...) of OpenFrameworks then you'll want to use a git tag of this repo for that version. You can select the tag in the Github "Current Branch" menu or clone and check it out using git. 53 | 54 | For example, the following commands will clone and switch to the OF 007 tagged version: 55 |
56 | git clone git://github.com/CreativeInquiry/FaceOSC-Templates.git
57 | cd FaceOSC-Templates
58 | git checkout of-007
59 | 
60 | 61 | The current master branch should work with the current OF and a tag will only be created when there is an appreciable change in OF, so there may not be a tag for *every* version of OF. 62 | -------------------------------------------------------------------------------- /processing/FaceOSCReceiverGrapher/Graph.pde: -------------------------------------------------------------------------------- 1 | int recent = 10; 2 | int minAdapt = 2; 3 | int maxAdapt = 100; 4 | 5 | class Graph extends ArrayList { 6 | float maxValue, minValue; 7 | boolean watching; 8 | String name; 9 | Graph(String name) { 10 | this.name = name; 11 | this.watching = true; 12 | this.maxValue = Float.NEGATIVE_INFINITY; 13 | this.minValue = Float.POSITIVE_INFINITY; 14 | } 15 | void add(float value) { 16 | if(watching) { 17 | if(value == Float.NEGATIVE_INFINITY || 18 | value == Float.POSITIVE_INFINITY || 19 | value != value) 20 | return; 21 | if(value > maxValue) 22 | maxValue = value; 23 | if(value < minValue) 24 | minValue = value; 25 | } 26 | super.add(value); 27 | } 28 | float getFloat(int i) { 29 | if(size() == 0) 30 | return 0; 31 | return ((Float) super.get(i)).floatValue(); 32 | } 33 | float getLastFloat() { 34 | return getFloat(size() - 1); 35 | } 36 | float normalize(float x) { 37 | return constrain(norm(x, minValue, maxValue), 0, 1); 38 | } 39 | float getNorm(int i) { 40 | return normalize(getFloat(i)); 41 | } 42 | float getLastNorm() { 43 | return getNorm(size() - 1); 44 | } 45 | float getLinear(int i) { 46 | return sqrt(1. / getNorm(i)); 47 | } 48 | float getLastLinear() { 49 | return getLinear(size() - 1); 50 | } 51 | float mean() { 52 | float sum = 0; 53 | for(int i = 0; i < size(); i++) 54 | sum += getFloat(i); 55 | return sum / size(); 56 | } 57 | float recentMean() { 58 | float mean = 0; 59 | int n = min(size(), recent); 60 | for(int i = 0; i < n; i++) 61 | mean += getFloat(size() - i - 1); 62 | return mean / n; 63 | } 64 | float recentVarianceWeighted() { 65 | float mean = recentMean(); 66 | float recentVariance = 0; 67 | int n = min(size(), recent); 68 | float weights = 0; 69 | for(int i = 0; i < n; i++) { 70 | float w = 1. - ((float) i / (float) n); 71 | recentVariance += abs(getFloat(size() - i - 1) - mean) * w; 72 | weights += w; 73 | } 74 | return recentVariance / weights; 75 | } 76 | float recentAdaptive(float adapt) { 77 | float sum = 0; 78 | float weights = 0; 79 | float curRecent = map(adapt, 0, 1, minAdapt, maxAdapt); 80 | int n = min(size(), 1 + (int) curRecent); 81 | for(int i = 0; i < n; i++) { 82 | float w = 1. - ((float) i / (float) n); 83 | sum += getFloat(size() - i - 1) * w; 84 | weights += w; 85 | } 86 | println(sum + " " + weights + " " + n); 87 | return sum / weights; 88 | } 89 | void draw(int width, int height) { 90 | fill(getNorm(size() - 1) * 255); 91 | //rect(0, 0, width, height); 92 | 93 | fill(0); 94 | stroke(0); 95 | 96 | textAlign(LEFT, CENTER); 97 | text(nf(getLastFloat(), 0, 0) + " " + name, 10, height - normalize(recentMean()) * height); 98 | 99 | textAlign(LEFT, TOP); 100 | text(nf(minValue, 0, 0), width - 20, height - 20); 101 | 102 | noFill(); 103 | beginShape(); 104 | vertex(0, height); 105 | for(int i = 0; i < width && i < size(); i++) { 106 | int position = size() - i - 1; 107 | vertex(i, height - getNorm(position) * height); 108 | } 109 | vertex(width, height); 110 | endShape(); 111 | 112 | fill(0); 113 | textAlign(LEFT, BOTTOM); 114 | text(nf(maxValue, 0, 0), width - 20, 20); 115 | } 116 | void save(String filename) { 117 | String[] out = new String[size()]; 118 | for(int i = 0; i < size(); i++) 119 | out[i] = nf(getFloat(i), 0, 0); 120 | saveStrings(filename + ".csv", out); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /processing/FaceOSCRawReceiver/FaceOSCRawReceiver.pde: -------------------------------------------------------------------------------- 1 | // Processing 3.0x template for receiving raw points from 2 | // Kyle McDonald's FaceOSC v.1.1 3 | // https://github.com/kylemcdonald/ofxFaceTracker 4 | // 5 | // Adapted by Kaleb Crawford and Golan Levin, 2016-7, after: 6 | // 2012 Dan Wilcox danomatika.com 7 | // for the IACD Spring 2012 class at the CMU School of Art 8 | // adapted from from Greg Borenstein's 2011 example 9 | // https://gist.github.com/1603230 10 | 11 | import oscP5.*; 12 | OscP5 oscP5; 13 | int found; 14 | float[] rawArray; 15 | int highlighted; //which point is selected 16 | 17 | //-------------------------------------------- 18 | void setup() { 19 | size(640, 480); 20 | frameRate(30); 21 | 22 | rawArray = new float[132]; 23 | oscP5 = new OscP5(this, 8338); 24 | oscP5.plug(this, "found", "/found"); 25 | oscP5.plug(this, "rawData", "/raw"); 26 | } 27 | 28 | //-------------------------------------------- 29 | void draw() { 30 | background(255); 31 | noStroke(); 32 | 33 | if (found != 0) { 34 | drawFacePoints(); 35 | drawFacePolygons(); 36 | } 37 | 38 | fill(0); 39 | text("Be certain FaceOSC is sending 'raw' data!", 12, 20); 40 | text("Use Left and Right arrow keys to cycle points", 12, 40); 41 | text("current index = [" + highlighted + "," + (highlighted + 1) + "]", 12, 60); 42 | } 43 | 44 | //-------------------------------------------- 45 | void drawFacePoints() { 46 | int nData = rawArray.length; 47 | for (int val=0; val32; i-=2) { 69 | vertex(rawArray[i], rawArray[i+1]); 70 | } 71 | endShape(CLOSE); 72 | 73 | // Eyes 74 | beginShape(); 75 | for (int i=72; i<84; i+=2) { 76 | vertex(rawArray[i], rawArray[i+1]); 77 | } 78 | endShape(CLOSE); 79 | beginShape(); 80 | for (int i=84; i<96; i+=2) { 81 | vertex(rawArray[i], rawArray[i+1]); 82 | } 83 | endShape(CLOSE); 84 | 85 | // Upper lip 86 | beginShape(); 87 | for (int i=96; i<110; i+=2) { 88 | vertex(rawArray[i], rawArray[i+1]); 89 | } 90 | for (int i=124; i>118; i-=2) { 91 | vertex(rawArray[i], rawArray[i+1]); 92 | } 93 | endShape(CLOSE); 94 | 95 | // Lower lip 96 | beginShape(); 97 | for (int i=108; i<120; i+=2) { 98 | vertex(rawArray[i], rawArray[i+1]); 99 | } 100 | vertex(rawArray[96], rawArray[97]); 101 | for (int i=130; i>124; i-=2) { 102 | vertex(rawArray[i], rawArray[i+1]); 103 | } 104 | endShape(CLOSE); 105 | 106 | // Nose bridge 107 | beginShape(); 108 | for (int i=54; i<62; i+=2) { 109 | vertex(rawArray[i], rawArray[i+1]); 110 | } 111 | endShape(); 112 | 113 | // Nose bottom 114 | beginShape(); 115 | for (int i=62; i<72; i+=2) { 116 | vertex(rawArray[i], rawArray[i+1]); 117 | } 118 | endShape(); 119 | } 120 | 121 | 122 | //-------------------------------------------- 123 | public void found(int i) { 124 | found = i; 125 | } 126 | public void rawData(float[] raw) { 127 | rawArray = raw; // stash data in array 128 | } 129 | 130 | //-------------------------------------------- 131 | void keyPressed() { 132 | int len = rawArray.length; 133 | if (keyCode == RIGHT) { 134 | highlighted = (highlighted + 2) % len; 135 | } 136 | if (keyCode == LEFT) { 137 | highlighted = (highlighted - 2 + len) % len; 138 | } 139 | } -------------------------------------------------------------------------------- /processing/FaceOSCReceiver/FaceOSCReceiver.pde: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker 4 | // 5 | // 2012 Dan Wilcox danomatika.com 6 | // for the IACD Spring 2012 class at the CMU School of Art 7 | // 8 | // adapted from from Greg Borenstein's 2011 example 9 | // http://www.gregborenstein.com/ 10 | // https://gist.github.com/1603230 11 | // 12 | import oscP5.*; 13 | OscP5 oscP5; 14 | 15 | // num faces found 16 | int found; 17 | 18 | // pose 19 | float poseScale; 20 | PVector posePosition = new PVector(); 21 | PVector poseOrientation = new PVector(); 22 | 23 | // gesture 24 | float mouthHeight; 25 | float mouthWidth; 26 | float eyeLeft; 27 | float eyeRight; 28 | float eyebrowLeft; 29 | float eyebrowRight; 30 | float jaw; 31 | float nostrils; 32 | 33 | void setup() { 34 | size(640, 480); 35 | frameRate(30); 36 | 37 | oscP5 = new OscP5(this, 8338); 38 | oscP5.plug(this, "found", "/found"); 39 | oscP5.plug(this, "poseScale", "/pose/scale"); 40 | oscP5.plug(this, "posePosition", "/pose/position"); 41 | oscP5.plug(this, "poseOrientation", "/pose/orientation"); 42 | oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width"); 43 | oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height"); 44 | oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left"); 45 | oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right"); 46 | oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left"); 47 | oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right"); 48 | oscP5.plug(this, "jawReceived", "/gesture/jaw"); 49 | oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils"); 50 | } 51 | 52 | void draw() { 53 | background(255); 54 | stroke(0); 55 | 56 | if(found > 0) { 57 | translate(posePosition.x, posePosition.y); 58 | scale(poseScale); 59 | noFill(); 60 | ellipse(-20, eyeLeft * -9, 20, 7); 61 | ellipse(20, eyeRight * -9, 20, 7); 62 | ellipse(0, 20, mouthWidth* 3, mouthHeight * 3); 63 | ellipse(-5, nostrils * -1, 7, 3); 64 | ellipse(5, nostrils * -1, 7, 3); 65 | rectMode(CENTER); 66 | fill(0); 67 | rect(-20, eyebrowLeft * -5, 25, 5); 68 | rect(20, eyebrowRight * -5, 25, 5); 69 | } 70 | } 71 | 72 | // OSC CALLBACK FUNCTIONS 73 | 74 | public void found(int i) { 75 | println("found: " + i); 76 | found = i; 77 | } 78 | 79 | public void poseScale(float s) { 80 | println("scale: " + s); 81 | poseScale = s; 82 | } 83 | 84 | public void posePosition(float x, float y) { 85 | println("pose position\tX: " + x + " Y: " + y ); 86 | posePosition.set(x, y, 0); 87 | } 88 | 89 | public void poseOrientation(float x, float y, float z) { 90 | println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z); 91 | poseOrientation.set(x, y, z); 92 | } 93 | 94 | public void mouthWidthReceived(float w) { 95 | println("mouth Width: " + w); 96 | mouthWidth = w; 97 | } 98 | 99 | public void mouthHeightReceived(float h) { 100 | println("mouth height: " + h); 101 | mouthHeight = h; 102 | } 103 | 104 | public void eyeLeftReceived(float f) { 105 | println("eye left: " + f); 106 | eyeLeft = f; 107 | } 108 | 109 | public void eyeRightReceived(float f) { 110 | println("eye right: " + f); 111 | eyeRight = f; 112 | } 113 | 114 | public void eyebrowLeftReceived(float f) { 115 | println("eyebrow left: " + f); 116 | eyebrowLeft = f; 117 | } 118 | 119 | public void eyebrowRightReceived(float f) { 120 | println("eyebrow right: " + f); 121 | eyebrowRight = f; 122 | } 123 | 124 | public void jawReceived(float f) { 125 | println("jaw: " + f); 126 | jaw = f; 127 | } 128 | 129 | public void nostrilsReceived(float f) { 130 | println("nostrils: " + f); 131 | nostrils = f; 132 | } 133 | 134 | // all other OSC messages end up here 135 | void oscEvent(OscMessage m) { 136 | if(m.isPlugged() == false) { 137 | println("UNPLUGGED: " + m); 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /openframeworks/FaceOSCRawReceiver/src/ofApp.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // a template for receiving raw face tracking osc messages from 3 | // Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker/downloads 4 | // 5 | // 2017 Dan Wilcox danomatika.com 6 | // for the EDP Creative Coding class @ the University of Denver 7 | // 8 | // adapted from Golan Levin's FaceOSCRawReceiver Processing template 9 | // 10 | #include "ofApp.h" 11 | 12 | //-------------------------------------------------------------- 13 | void ofApp::setup(){ 14 | ofSetVerticalSync(true); 15 | ofSetFrameRate(60); 16 | 17 | // FaceOSC sends to port 8338 by default 18 | receiver.setup(8338); 19 | 20 | // FaceOSC raw message is 66 xy pairs 21 | for(int i = 0; i < 66; ++i) { 22 | points.push_back(ofVec2f()); 23 | } 24 | 25 | ofBackground(255); 26 | } 27 | 28 | //-------------------------------------------------------------- 29 | void ofApp::update(){ 30 | 31 | // check for waiting osc messages 32 | while(receiver.hasWaitingMessages()) { 33 | 34 | // get the next message 35 | ofxOscMessage m; 36 | receiver.getNextMessage(m); 37 | 38 | // found face? 39 | if(m.getAddress() == "/found") { 40 | found = (bool) m.getArgAsInt32(0); 41 | } 42 | 43 | // raw mesh points: 132 floats for 66 xy pairs 44 | else if(m.getAddress() == "/raw" && m.getNumArgs() == 132) { 45 | int p = 0; 46 | for(int i = 0; i < 132; i = i+2) { 47 | points[p].x = m.getArgAsFloat(i); 48 | points[p].y = m.getArgAsFloat(i+1); 49 | p++; 50 | } 51 | } 52 | } 53 | } 54 | 55 | //-------------------------------------------------------------- 56 | void ofApp::draw(){ 57 | 58 | if(found) { 59 | drawFacePoints(); 60 | drawFacePolygons(); 61 | } 62 | 63 | ofFill(); 64 | ofSetColor(0); 65 | ofDrawBitmapString("Be certain FaceOSC is sending 'raw' data!", 12, 20); 66 | ofDrawBitmapString("Use Left and Right arrow keys to cycle points", 12, 40); 67 | ofDrawBitmapString("current index = [" + ofToString(highlighted) + "]", 12, 60); 68 | } 69 | 70 | //-------------------------------------------------------------- 71 | void ofApp::keyPressed(int key){ 72 | if(key == OF_KEY_RIGHT) { 73 | highlighted++; 74 | if(highlighted >= points.size()) { 75 | highlighted = 0; 76 | } 77 | } 78 | else if(key == OF_KEY_LEFT) { 79 | highlighted--; 80 | if(highlighted < 0) { 81 | highlighted = points.size()-1; 82 | } 83 | } 84 | } 85 | 86 | //-------------------------------------------------------------- 87 | void ofApp::drawFacePoints() { 88 | ofFill(); 89 | for(int i = 0; i < points.size(); i++) { 90 | if(i == highlighted) { 91 | ofSetColor(255, 0, 0); 92 | ofDrawCircle(points[i], 11); 93 | } else { 94 | ofSetColor(100); 95 | ofDrawCircle(points[i], 8); 96 | } 97 | } 98 | } 99 | 100 | //-------------------------------------------------------------- 101 | void ofApp::drawFacePolygons() { 102 | ofNoFill(); 103 | ofSetColor(100); 104 | 105 | // Face outline 106 | ofBeginShape(); 107 | for(int i = 0; i < 17; i++) { 108 | ofVertex(points[i].x, points[i].y); 109 | } 110 | for(int i = 26; i > 16; i--) { 111 | ofVertex(points[i].x, points[i].y); 112 | } 113 | ofEndShape(OF_CLOSE); 114 | 115 | // Eyes 116 | ofBeginShape(); 117 | for(int i = 36; i < 42; i++) { 118 | ofVertex(points[i].x, points[i].y); 119 | } 120 | ofEndShape(OF_CLOSE); 121 | ofBeginShape(); 122 | for(int i = 42; i < 48; i++) { 123 | ofVertex(points[i].x, points[i].y); 124 | } 125 | ofEndShape(OF_CLOSE); 126 | 127 | // Upper lip 128 | ofBeginShape(); 129 | for(int i = 48; i < 55; i++) { 130 | ofVertex(points[i].x, points[i].y); 131 | } 132 | for(int i = 62; i > 59; i--) { 133 | ofVertex(points[i].x, points[i].y); 134 | } 135 | ofEndShape(OF_CLOSE); 136 | 137 | // Lower lip 138 | ofBeginShape(); 139 | for(int i = 54; i < 60; i+=2) { 140 | ofVertex(points[i].x, points[i].y); 141 | } 142 | ofVertex(points[48].x, points[48].y); 143 | for(int i = 65; i > 62; i--) { 144 | ofVertex(points[i].x, points[i].y); 145 | } 146 | ofEndShape(OF_CLOSE); 147 | 148 | // Nose bridge 149 | ofBeginShape(); 150 | for(int i = 27; i < 31; i++) { 151 | ofVertex(points[i].x, points[i].y); 152 | } 153 | ofEndShape(); 154 | 155 | // Nose bottom 156 | ofBeginShape(); 157 | for(int i = 31; i < 36; i++) { 158 | ofVertex(points[i].x, points[i].y); 159 | } 160 | ofEndShape(); 161 | } 162 | -------------------------------------------------------------------------------- /max/FaceOSCReceiver.maxpat: -------------------------------------------------------------------------------- 1 | { 2 | "patcher" : { 3 | "fileversion" : 1, 4 | "appversion" : { 5 | "major" : 6, 6 | "minor" : 0, 7 | "revision" : 3 8 | } 9 | , 10 | "rect" : [ 660.0, 44.0, 546.0, 706.0 ], 11 | "bglocked" : 0, 12 | "openinpresentation" : 0, 13 | "default_fontsize" : 12.0, 14 | "default_fontface" : 0, 15 | "default_fontname" : "Arial", 16 | "gridonopen" : 0, 17 | "gridsize" : [ 15.0, 15.0 ], 18 | "gridsnaponopen" : 0, 19 | "statusbarvisible" : 2, 20 | "toolbarvisible" : 1, 21 | "boxanimatetime" : 200, 22 | "imprint" : 0, 23 | "enablehscroll" : 1, 24 | "enablevscroll" : 1, 25 | "devicewidth" : 0.0, 26 | "description" : "", 27 | "digest" : "", 28 | "tags" : "", 29 | "boxes" : [ { 30 | "box" : { 31 | "fontname" : "Arial", 32 | "fontsize" : 12.0, 33 | "id" : "obj-43", 34 | "linecount" : 2, 35 | "maxclass" : "comment", 36 | "numinlets" : 1, 37 | "numoutlets" : 0, 38 | "patching_rect" : [ 281.299988, 76.5, 241.0, 34.0 ], 39 | "text" : "based on the example by Craig Fahner on http://cmuems.com/2011/a/unit-60-pd/" 40 | } 41 | 42 | } 43 | , { 44 | "box" : { 45 | "fontname" : "Arial", 46 | "fontsize" : 12.0, 47 | "id" : "obj-42", 48 | "linecount" : 2, 49 | "maxclass" : "comment", 50 | "numinlets" : 1, 51 | "numoutlets" : 0, 52 | "patching_rect" : [ 303.0, 390.0, 212.0, 34.0 ], 53 | "text" : "mouth height controls oscilator pitch, raise left eyebrow for volume boost" 54 | } 55 | 56 | } 57 | , { 58 | "box" : { 59 | "fontname" : "Arial", 60 | "fontsize" : 12.0, 61 | "id" : "obj-41", 62 | "linecount" : 2, 63 | "maxclass" : "comment", 64 | "numinlets" : 1, 65 | "numoutlets" : 0, 66 | "patching_rect" : [ 281.299988, 26.5, 255.399994, 35.0 ], 67 | "text" : "2012 Dan Wilcox danomatika.com\nfor the IACD class at the CMU School of Art" 68 | } 69 | 70 | } 71 | , { 72 | "box" : { 73 | "fontname" : "Arial", 74 | "fontsize" : 12.0, 75 | "id" : "obj-40", 76 | "maxclass" : "comment", 77 | "numinlets" : 1, 78 | "numoutlets" : 0, 79 | "patching_rect" : [ 22.0, 3.0, 229.600006, 20.0 ], 80 | "text" : "FaceOSC sends on port 8338 by default" 81 | } 82 | 83 | } 84 | , { 85 | "box" : { 86 | "id" : "obj-39", 87 | "maxclass" : "meter~", 88 | "numinlets" : 1, 89 | "numoutlets" : 1, 90 | "outlettype" : [ "float" ], 91 | "patching_rect" : [ 366.0, 622.5, 80.0, 13.0 ] 92 | } 93 | 94 | } 95 | , { 96 | "box" : { 97 | "fontname" : "Arial", 98 | "fontsize" : 12.0, 99 | "id" : "obj-29", 100 | "maxclass" : "flonum", 101 | "numinlets" : 1, 102 | "numoutlets" : 2, 103 | "outlettype" : [ "float", "bang" ], 104 | "parameter_enable" : 0, 105 | "patching_rect" : [ 138.5, 458.0, 50.0, 20.0 ] 106 | } 107 | 108 | } 109 | , { 110 | "box" : { 111 | "fontname" : "Arial", 112 | "fontsize" : 12.0, 113 | "id" : "obj-30", 114 | "maxclass" : "flonum", 115 | "numinlets" : 1, 116 | "numoutlets" : 2, 117 | "outlettype" : [ "float", "bang" ], 118 | "parameter_enable" : 0, 119 | "patching_rect" : [ 77.0, 484.0, 50.0, 20.0 ] 120 | } 121 | 122 | } 123 | , { 124 | "box" : { 125 | "fontname" : "Arial", 126 | "fontsize" : 12.0, 127 | "id" : "obj-31", 128 | "maxclass" : "newobj", 129 | "numinlets" : 1, 130 | "numoutlets" : 3, 131 | "outlettype" : [ "", "", "" ], 132 | "patching_rect" : [ 77.0, 418.0, 142.0, 20.0 ], 133 | "text" : "OSC-route /width /height" 134 | } 135 | 136 | } 137 | , { 138 | "box" : { 139 | "fontname" : "Arial", 140 | "fontsize" : 12.0, 141 | "id" : "obj-26", 142 | "maxclass" : "flonum", 143 | "numinlets" : 1, 144 | "numoutlets" : 2, 145 | "outlettype" : [ "float", "bang" ], 146 | "parameter_enable" : 0, 147 | "patching_rect" : [ 175.199997, 354.0, 50.0, 20.0 ] 148 | } 149 | 150 | } 151 | , { 152 | "box" : { 153 | "fontname" : "Arial", 154 | "fontsize" : 12.0, 155 | "id" : "obj-27", 156 | "maxclass" : "flonum", 157 | "numinlets" : 1, 158 | "numoutlets" : 2, 159 | "outlettype" : [ "float", "bang" ], 160 | "parameter_enable" : 0, 161 | "patching_rect" : [ 124.199997, 383.0, 50.0, 20.0 ] 162 | } 163 | 164 | } 165 | , { 166 | "box" : { 167 | "fontname" : "Arial", 168 | "fontsize" : 12.0, 169 | "id" : "obj-28", 170 | "maxclass" : "newobj", 171 | "numinlets" : 1, 172 | "numoutlets" : 3, 173 | "outlettype" : [ "", "", "" ], 174 | "patching_rect" : [ 124.199997, 316.0, 121.0, 20.0 ], 175 | "text" : "OSC-route /left /right" 176 | } 177 | 178 | } 179 | , { 180 | "box" : { 181 | "fontname" : "Arial", 182 | "fontsize" : 12.0, 183 | "id" : "obj-25", 184 | "maxclass" : "flonum", 185 | "numinlets" : 1, 186 | "numoutlets" : 2, 187 | "outlettype" : [ "float", "bang" ], 188 | "parameter_enable" : 0, 189 | "patching_rect" : [ 218.600006, 192.0, 50.0, 20.0 ] 190 | } 191 | 192 | } 193 | , { 194 | "box" : { 195 | "fontname" : "Arial", 196 | "fontsize" : 12.0, 197 | "id" : "obj-24", 198 | "maxclass" : "flonum", 199 | "numinlets" : 1, 200 | "numoutlets" : 2, 201 | "outlettype" : [ "float", "bang" ], 202 | "parameter_enable" : 0, 203 | "patching_rect" : [ 265.799988, 164.0, 50.0, 20.0 ] 204 | } 205 | 206 | } 207 | , { 208 | "box" : { 209 | "fontname" : "Arial", 210 | "fontsize" : 12.0, 211 | "id" : "obj-21", 212 | "maxclass" : "flonum", 213 | "numinlets" : 1, 214 | "numoutlets" : 2, 215 | "outlettype" : [ "float", "bang" ], 216 | "parameter_enable" : 0, 217 | "patching_rect" : [ 222.399994, 258.0, 50.0, 20.0 ] 218 | } 219 | 220 | } 221 | , { 222 | "box" : { 223 | "fontname" : "Arial", 224 | "fontsize" : 12.0, 225 | "id" : "obj-22", 226 | "maxclass" : "flonum", 227 | "numinlets" : 1, 228 | "numoutlets" : 2, 229 | "outlettype" : [ "float", "bang" ], 230 | "parameter_enable" : 0, 231 | "patching_rect" : [ 171.399994, 287.0, 50.0, 20.0 ] 232 | } 233 | 234 | } 235 | , { 236 | "box" : { 237 | "fontname" : "Arial", 238 | "fontsize" : 12.0, 239 | "id" : "obj-23", 240 | "maxclass" : "newobj", 241 | "numinlets" : 1, 242 | "numoutlets" : 3, 243 | "outlettype" : [ "", "", "" ], 244 | "patching_rect" : [ 171.399994, 222.0, 121.0, 20.0 ], 245 | "text" : "OSC-route /left /right" 246 | } 247 | 248 | } 249 | , { 250 | "box" : { 251 | "fontname" : "Arial", 252 | "fontsize" : 12.0, 253 | "id" : "obj-20", 254 | "maxclass" : "flonum", 255 | "numinlets" : 1, 256 | "numoutlets" : 2, 257 | "outlettype" : [ "float", "bang" ], 258 | "parameter_enable" : 0, 259 | "patching_rect" : [ 201.333328, 606.0, 50.0, 20.0 ] 260 | } 261 | 262 | } 263 | , { 264 | "box" : { 265 | "fontname" : "Arial", 266 | "fontsize" : 12.0, 267 | "id" : "obj-17", 268 | "maxclass" : "flonum", 269 | "numinlets" : 1, 270 | "numoutlets" : 2, 271 | "outlettype" : [ "float", "bang" ], 272 | "parameter_enable" : 0, 273 | "patching_rect" : [ 176.333328, 646.0, 50.0, 20.0 ] 274 | } 275 | 276 | } 277 | , { 278 | "box" : { 279 | "fontname" : "Arial", 280 | "fontsize" : 12.0, 281 | "id" : "obj-18", 282 | "maxclass" : "flonum", 283 | "numinlets" : 1, 284 | "numoutlets" : 2, 285 | "outlettype" : [ "float", "bang" ], 286 | "parameter_enable" : 0, 287 | "patching_rect" : [ 151.333328, 676.0, 50.0, 20.0 ] 288 | } 289 | 290 | } 291 | , { 292 | "box" : { 293 | "fontname" : "Arial", 294 | "fontsize" : 12.0, 295 | "id" : "obj-19", 296 | "maxclass" : "newobj", 297 | "numinlets" : 1, 298 | "numoutlets" : 3, 299 | "outlettype" : [ "float", "float", "float" ], 300 | "patching_rect" : [ 151.333328, 565.0, 69.0, 20.0 ], 301 | "text" : "unpack f f f" 302 | } 303 | 304 | } 305 | , { 306 | "box" : { 307 | "fontname" : "Arial", 308 | "fontsize" : 12.0, 309 | "id" : "obj-16", 310 | "maxclass" : "flonum", 311 | "numinlets" : 1, 312 | "numoutlets" : 2, 313 | "outlettype" : [ "float", "bang" ], 314 | "parameter_enable" : 0, 315 | "patching_rect" : [ 86.666664, 565.0, 50.0, 20.0 ] 316 | } 317 | 318 | } 319 | , { 320 | "box" : { 321 | "fontname" : "Arial", 322 | "fontsize" : 12.0, 323 | "id" : "obj-15", 324 | "maxclass" : "flonum", 325 | "numinlets" : 1, 326 | "numoutlets" : 2, 327 | "outlettype" : [ "float", "bang" ], 328 | "parameter_enable" : 0, 329 | "patching_rect" : [ 66.0, 646.0, 50.0, 20.0 ] 330 | } 331 | 332 | } 333 | , { 334 | "box" : { 335 | "fontname" : "Arial", 336 | "fontsize" : 12.0, 337 | "id" : "obj-14", 338 | "maxclass" : "flonum", 339 | "numinlets" : 1, 340 | "numoutlets" : 2, 341 | "outlettype" : [ "float", "bang" ], 342 | "parameter_enable" : 0, 343 | "patching_rect" : [ 22.0, 676.0, 50.0, 20.0 ] 344 | } 345 | 346 | } 347 | , { 348 | "box" : { 349 | "annotation" : "x", 350 | "fontname" : "Arial", 351 | "fontsize" : 12.0, 352 | "id" : "obj-6", 353 | "maxclass" : "number", 354 | "numinlets" : 1, 355 | "numoutlets" : 2, 356 | "outlettype" : [ "int", "bang" ], 357 | "parameter_enable" : 0, 358 | "patching_rect" : [ 132.0, 104.0, 50.0, 20.0 ] 359 | } 360 | 361 | } 362 | , { 363 | "box" : { 364 | "fontname" : "Arial", 365 | "fontsize" : 12.0, 366 | "id" : "obj-4", 367 | "maxclass" : "newobj", 368 | "numinlets" : 1, 369 | "numoutlets" : 2, 370 | "outlettype" : [ "float", "float" ], 371 | "patching_rect" : [ 22.0, 606.0, 63.0, 20.0 ], 372 | "text" : "unpack f f" 373 | } 374 | 375 | } 376 | , { 377 | "box" : { 378 | "fontname" : "Arial", 379 | "fontsize" : 12.0, 380 | "id" : "obj-2", 381 | "maxclass" : "newobj", 382 | "numinlets" : 1, 383 | "numoutlets" : 4, 384 | "outlettype" : [ "", "", "", "" ], 385 | "patching_rect" : [ 22.0, 524.0, 213.0, 20.0 ], 386 | "text" : "OSC-route /position /scale /orientation" 387 | } 388 | 389 | } 390 | , { 391 | "box" : { 392 | "fontname" : "Arial", 393 | "fontsize" : 12.0, 394 | "id" : "obj-1", 395 | "maxclass" : "newobj", 396 | "numinlets" : 1, 397 | "numoutlets" : 6, 398 | "outlettype" : [ "", "", "", "", "", "" ], 399 | "patching_rect" : [ 77.0, 134.0, 255.0, 20.0 ], 400 | "text" : "OSC-route /mouth /eyebrow /eye /jaw /nostrils" 401 | } 402 | 403 | } 404 | , { 405 | "box" : { 406 | "id" : "obj-13", 407 | "maxclass" : "ezdac~", 408 | "numinlets" : 2, 409 | "numoutlets" : 0, 410 | "patching_rect" : [ 303.0, 622.5, 45.0, 45.0 ] 411 | } 412 | 413 | } 414 | , { 415 | "box" : { 416 | "fontname" : "Arial", 417 | "fontsize" : 12.0, 418 | "id" : "obj-12", 419 | "maxclass" : "newobj", 420 | "numinlets" : 2, 421 | "numoutlets" : 1, 422 | "outlettype" : [ "signal" ], 423 | "patching_rect" : [ 303.0, 551.0, 45.0, 20.0 ], 424 | "text" : "cycle~" 425 | } 426 | 427 | } 428 | , { 429 | "box" : { 430 | "fontname" : "Arial", 431 | "fontsize" : 12.0, 432 | "id" : "obj-11", 433 | "maxclass" : "newobj", 434 | "numinlets" : 2, 435 | "numoutlets" : 1, 436 | "outlettype" : [ "signal" ], 437 | "patching_rect" : [ 303.0, 589.0, 32.5, 20.0 ], 438 | "text" : "*~" 439 | } 440 | 441 | } 442 | , { 443 | "box" : { 444 | "fontname" : "Arial", 445 | "fontsize" : 12.0, 446 | "id" : "obj-9", 447 | "maxclass" : "flonum", 448 | "numinlets" : 1, 449 | "numoutlets" : 2, 450 | "outlettype" : [ "float", "bang" ], 451 | "parameter_enable" : 0, 452 | "patching_rect" : [ 434.0, 518.0, 50.0, 20.0 ] 453 | } 454 | 455 | } 456 | , { 457 | "box" : { 458 | "fontname" : "Arial", 459 | "fontsize" : 12.0, 460 | "id" : "obj-10", 461 | "maxclass" : "newobj", 462 | "numinlets" : 6, 463 | "numoutlets" : 1, 464 | "outlettype" : [ "" ], 465 | "patching_rect" : [ 434.0, 492.0, 99.0, 20.0 ], 466 | "text" : "scale 6. 9. 0. 0.6" 467 | } 468 | 469 | } 470 | , { 471 | "box" : { 472 | "fontname" : "Arial", 473 | "fontsize" : 12.0, 474 | "id" : "obj-8", 475 | "maxclass" : "flonum", 476 | "numinlets" : 1, 477 | "numoutlets" : 2, 478 | "outlettype" : [ "float", "bang" ], 479 | "parameter_enable" : 0, 480 | "patching_rect" : [ 303.0, 518.0, 50.0, 20.0 ] 481 | } 482 | 483 | } 484 | , { 485 | "box" : { 486 | "fontname" : "Arial", 487 | "fontsize" : 12.0, 488 | "id" : "obj-7", 489 | "maxclass" : "newobj", 490 | "numinlets" : 6, 491 | "numoutlets" : 1, 492 | "outlettype" : [ "" ], 493 | "patching_rect" : [ 303.0, 492.0, 125.0, 20.0 ], 494 | "text" : "scale 0. 10. 150. 800." 495 | } 496 | 497 | } 498 | , { 499 | "box" : { 500 | "fontname" : "Arial", 501 | "fontsize" : 12.0, 502 | "id" : "obj-58", 503 | "maxclass" : "newobj", 504 | "numinlets" : 1, 505 | "numoutlets" : 4, 506 | "outlettype" : [ "", "", "", "" ], 507 | "patching_rect" : [ 22.0, 63.0, 184.0, 20.0 ], 508 | "text" : "OSC-route /pose /gesture /found" 509 | } 510 | 511 | } 512 | , { 513 | "box" : { 514 | "fontname" : "Arial", 515 | "fontsize" : 12.0, 516 | "id" : "obj-56", 517 | "maxclass" : "newobj", 518 | "numinlets" : 1, 519 | "numoutlets" : 1, 520 | "outlettype" : [ "" ], 521 | "patching_rect" : [ 22.0, 34.0, 99.0, 20.0 ], 522 | "text" : "udpreceive 8338" 523 | } 524 | 525 | } 526 | ], 527 | "lines" : [ { 528 | "patchline" : { 529 | "destination" : [ "obj-23", 0 ], 530 | "disabled" : 0, 531 | "hidden" : 0, 532 | "source" : [ "obj-1", 2 ] 533 | } 534 | 535 | } 536 | , { 537 | "patchline" : { 538 | "destination" : [ "obj-24", 0 ], 539 | "disabled" : 0, 540 | "hidden" : 0, 541 | "source" : [ "obj-1", 4 ] 542 | } 543 | 544 | } 545 | , { 546 | "patchline" : { 547 | "destination" : [ "obj-25", 0 ], 548 | "disabled" : 0, 549 | "hidden" : 0, 550 | "source" : [ "obj-1", 3 ] 551 | } 552 | 553 | } 554 | , { 555 | "patchline" : { 556 | "destination" : [ "obj-28", 0 ], 557 | "disabled" : 0, 558 | "hidden" : 0, 559 | "source" : [ "obj-1", 1 ] 560 | } 561 | 562 | } 563 | , { 564 | "patchline" : { 565 | "destination" : [ "obj-31", 0 ], 566 | "disabled" : 0, 567 | "hidden" : 0, 568 | "source" : [ "obj-1", 0 ] 569 | } 570 | 571 | } 572 | , { 573 | "patchline" : { 574 | "destination" : [ "obj-9", 0 ], 575 | "disabled" : 0, 576 | "hidden" : 0, 577 | "source" : [ "obj-10", 0 ] 578 | } 579 | 580 | } 581 | , { 582 | "patchline" : { 583 | "destination" : [ "obj-13", 1 ], 584 | "disabled" : 0, 585 | "hidden" : 0, 586 | "source" : [ "obj-11", 0 ] 587 | } 588 | 589 | } 590 | , { 591 | "patchline" : { 592 | "destination" : [ "obj-13", 0 ], 593 | "disabled" : 0, 594 | "hidden" : 0, 595 | "source" : [ "obj-11", 0 ] 596 | } 597 | 598 | } 599 | , { 600 | "patchline" : { 601 | "destination" : [ "obj-39", 0 ], 602 | "disabled" : 0, 603 | "hidden" : 0, 604 | "source" : [ "obj-11", 0 ] 605 | } 606 | 607 | } 608 | , { 609 | "patchline" : { 610 | "destination" : [ "obj-11", 0 ], 611 | "disabled" : 0, 612 | "hidden" : 0, 613 | "source" : [ "obj-12", 0 ] 614 | } 615 | 616 | } 617 | , { 618 | "patchline" : { 619 | "destination" : [ "obj-17", 0 ], 620 | "disabled" : 0, 621 | "hidden" : 0, 622 | "source" : [ "obj-19", 1 ] 623 | } 624 | 625 | } 626 | , { 627 | "patchline" : { 628 | "destination" : [ "obj-18", 0 ], 629 | "disabled" : 0, 630 | "hidden" : 0, 631 | "source" : [ "obj-19", 0 ] 632 | } 633 | 634 | } 635 | , { 636 | "patchline" : { 637 | "destination" : [ "obj-20", 0 ], 638 | "disabled" : 0, 639 | "hidden" : 0, 640 | "source" : [ "obj-19", 2 ] 641 | } 642 | 643 | } 644 | , { 645 | "patchline" : { 646 | "destination" : [ "obj-16", 0 ], 647 | "disabled" : 0, 648 | "hidden" : 0, 649 | "source" : [ "obj-2", 1 ] 650 | } 651 | 652 | } 653 | , { 654 | "patchline" : { 655 | "destination" : [ "obj-19", 0 ], 656 | "disabled" : 0, 657 | "hidden" : 0, 658 | "source" : [ "obj-2", 2 ] 659 | } 660 | 661 | } 662 | , { 663 | "patchline" : { 664 | "destination" : [ "obj-4", 0 ], 665 | "disabled" : 0, 666 | "hidden" : 0, 667 | "source" : [ "obj-2", 0 ] 668 | } 669 | 670 | } 671 | , { 672 | "patchline" : { 673 | "destination" : [ "obj-21", 0 ], 674 | "disabled" : 0, 675 | "hidden" : 0, 676 | "source" : [ "obj-23", 1 ] 677 | } 678 | 679 | } 680 | , { 681 | "patchline" : { 682 | "destination" : [ "obj-22", 0 ], 683 | "disabled" : 0, 684 | "hidden" : 0, 685 | "source" : [ "obj-23", 0 ] 686 | } 687 | 688 | } 689 | , { 690 | "patchline" : { 691 | "destination" : [ "obj-10", 0 ], 692 | "disabled" : 0, 693 | "hidden" : 0, 694 | "source" : [ "obj-27", 0 ] 695 | } 696 | 697 | } 698 | , { 699 | "patchline" : { 700 | "destination" : [ "obj-26", 0 ], 701 | "disabled" : 0, 702 | "hidden" : 0, 703 | "source" : [ "obj-28", 1 ] 704 | } 705 | 706 | } 707 | , { 708 | "patchline" : { 709 | "destination" : [ "obj-27", 0 ], 710 | "disabled" : 0, 711 | "hidden" : 0, 712 | "source" : [ "obj-28", 0 ] 713 | } 714 | 715 | } 716 | , { 717 | "patchline" : { 718 | "destination" : [ "obj-7", 0 ], 719 | "disabled" : 0, 720 | "hidden" : 0, 721 | "source" : [ "obj-29", 0 ] 722 | } 723 | 724 | } 725 | , { 726 | "patchline" : { 727 | "destination" : [ "obj-29", 0 ], 728 | "disabled" : 0, 729 | "hidden" : 0, 730 | "source" : [ "obj-31", 1 ] 731 | } 732 | 733 | } 734 | , { 735 | "patchline" : { 736 | "destination" : [ "obj-30", 0 ], 737 | "disabled" : 0, 738 | "hidden" : 0, 739 | "source" : [ "obj-31", 0 ] 740 | } 741 | 742 | } 743 | , { 744 | "patchline" : { 745 | "destination" : [ "obj-14", 0 ], 746 | "disabled" : 0, 747 | "hidden" : 0, 748 | "source" : [ "obj-4", 0 ] 749 | } 750 | 751 | } 752 | , { 753 | "patchline" : { 754 | "destination" : [ "obj-15", 0 ], 755 | "disabled" : 0, 756 | "hidden" : 0, 757 | "source" : [ "obj-4", 1 ] 758 | } 759 | 760 | } 761 | , { 762 | "patchline" : { 763 | "destination" : [ "obj-58", 0 ], 764 | "disabled" : 0, 765 | "hidden" : 0, 766 | "source" : [ "obj-56", 0 ] 767 | } 768 | 769 | } 770 | , { 771 | "patchline" : { 772 | "destination" : [ "obj-1", 0 ], 773 | "disabled" : 0, 774 | "hidden" : 0, 775 | "source" : [ "obj-58", 1 ] 776 | } 777 | 778 | } 779 | , { 780 | "patchline" : { 781 | "destination" : [ "obj-2", 0 ], 782 | "disabled" : 0, 783 | "hidden" : 0, 784 | "source" : [ "obj-58", 0 ] 785 | } 786 | 787 | } 788 | , { 789 | "patchline" : { 790 | "destination" : [ "obj-6", 0 ], 791 | "disabled" : 0, 792 | "hidden" : 0, 793 | "source" : [ "obj-58", 2 ] 794 | } 795 | 796 | } 797 | , { 798 | "patchline" : { 799 | "destination" : [ "obj-8", 0 ], 800 | "disabled" : 0, 801 | "hidden" : 0, 802 | "source" : [ "obj-7", 0 ] 803 | } 804 | 805 | } 806 | , { 807 | "patchline" : { 808 | "destination" : [ "obj-12", 0 ], 809 | "disabled" : 0, 810 | "hidden" : 0, 811 | "source" : [ "obj-8", 0 ] 812 | } 813 | 814 | } 815 | , { 816 | "patchline" : { 817 | "destination" : [ "obj-11", 1 ], 818 | "disabled" : 0, 819 | "hidden" : 0, 820 | "source" : [ "obj-9", 0 ] 821 | } 822 | 823 | } 824 | ], 825 | "dependency_cache" : [ { 826 | "name" : "OSC-route.mxo", 827 | "type" : "iLaX" 828 | } 829 | ] 830 | } 831 | 832 | } 833 | --------------------------------------------------------------------------------