├── .gitignore ├── LICENSE ├── README.md ├── dub.sdl └── source └── ft ├── adaptor.d ├── adaptors ├── facemotion3d.d ├── ifacialmocap.d ├── jinsmemelogger.d ├── llf.d ├── openseeface.d ├── package.d ├── phiz.d ├── phizosc.d ├── vmc.d ├── vtsproto.d └── webhook.d ├── data.d └── package.d /.gitignore: -------------------------------------------------------------------------------- 1 | .dub 2 | docs.json 3 | __dummy.html 4 | docs/ 5 | /facetrack-d 6 | facetrack-d.so 7 | facetrack-d.dylib 8 | facetrack-d.dll 9 | facetrack-d.a 10 | facetrack-d.lib 11 | facetrack-d-test-* 12 | *.exe 13 | *.o 14 | *.obj 15 | *.lst 16 | *.a 17 | dub.selections.json 18 | out/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright 2022, Inochi2D Project 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Facetrack-d 2 | 3 | A library for interfacing with face tracking systems. 4 | 5 | ## Features to be implemented 6 | - Improved mapping scheme 7 | - Ability to record tracking 8 | - Ability to replay tracking 9 | - Ability for trackers to be natively executed 10 | 11 | ## Supported Adaptors 12 | - VMC 13 | - VTubeStudio protocol 14 | - OpenSeeFace 15 | - J!NS MEME Logger 16 | - Webhooks 17 | - iFacialMocap 18 | - Facemotion3D 19 | - LiveLinkFace/MeFaMo 20 | 21 | ## Adaptors to be supported 22 | - NVIDIA Maxine 23 | -------------------------------------------------------------------------------- /dub.sdl: -------------------------------------------------------------------------------- 1 | name "facetrack-d" 2 | description "Face tracking integration for D" 3 | authors "Luna" 4 | copyright "Copyright © 2022, Luna" 5 | license "BSD 2-clause" 6 | targetPath "out/" 7 | 8 | dependency "inmath" version="~>1.3.0" 9 | dependency "vmc-d" version="~>1.1.3" 10 | dependency "fghj" version="~>1.0.0" 11 | 12 | configuration "default" { 13 | targetType "library" 14 | } 15 | 16 | configuration "web-adaptors" { 17 | versions "JML" "WebHookAdaptor" "Phiz" 18 | dependency "vibe-http" version="~>1.1.2" 19 | } -------------------------------------------------------------------------------- /source/ft/adaptor.d: -------------------------------------------------------------------------------- 1 | module ft.adaptor; 2 | import ft.data; 3 | 4 | abstract class Adaptor { 5 | protected: 6 | float[string] blendshapes; 7 | Bone[string] bones; 8 | string[string] options; 9 | 10 | int dataLossCounter; 11 | enum RECV_TIMEOUT = 16; 12 | 13 | public: 14 | ~this() { 15 | if (this.isRunning) this.stop(); 16 | } 17 | 18 | abstract void start(); 19 | abstract void stop(); 20 | abstract void poll(); 21 | abstract string[] getOptionNames(); 22 | abstract bool isRunning(); 23 | abstract bool isReceivingData(); 24 | abstract string getAdaptorName(); 25 | 26 | final 27 | void start(string[string] options) { 28 | this.setOptions(options); 29 | this.start(); 30 | } 31 | 32 | final 33 | void setOptions(string[string] options = string[string].init) { this.options = options; } 34 | final 35 | 36 | ref string[string] getOptions() { return options; } 37 | 38 | final 39 | ref float[string] getBlendshapes() { return blendshapes; } 40 | 41 | final 42 | ref Bone[string] getBones() { return bones; } 43 | } -------------------------------------------------------------------------------- /source/ft/adaptors/facemotion3d.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.facemotion3d; 2 | import ft.adaptor; 3 | import ft.data; 4 | import std.conv : to; 5 | import std.socket; 6 | import fghj.serialization; 7 | import fghj; 8 | import inmath.linalg; 9 | import core.thread; 10 | import core.sync.mutex; 11 | import std.exception; 12 | import inmath.math; 13 | import std.stdio : writeln, write; 14 | import std.array; 15 | 16 | /** 17 | Represents the raw blendshape tracking data to be sent to facetrack-d via UDP. 18 | */ 19 | struct FM3DTrackingData { 20 | 21 | this(string datastr) { 22 | this.indata = datastr; 23 | 24 | // writeln(datastr); 25 | 26 | size_t i = 0; 27 | size_t nBufStart; 28 | size_t nBufEnd; 29 | size_t nBufSplit; 30 | try { 31 | while(i < datastr.length) { 32 | 33 | // Skip whitespace 34 | while(datastr[i] == ' ') i++; 35 | 36 | if (datastr[i] != '|') nBufEnd = i; 37 | else { 38 | nBufEnd++; 39 | // writeln(datastr[nBufStart..nBufEnd]); 40 | // Head bone mode 41 | if (datastr[nBufStart] == '=') { 42 | nBufStart += 5; // Skip "head#" part 43 | 44 | // Fetch values 45 | float[6] values; 46 | size_t rStart = nBufStart+1; 47 | size_t rEnd = rStart; 48 | size_t aIdx = 0; 49 | while (rEnd < nBufEnd) { 50 | rEnd++; 51 | if (datastr[rEnd] == ',' || datastr[rEnd] == '|') { 52 | values[aIdx++] = datastr[rStart..rEnd].to!float; 53 | rStart = rEnd+1; 54 | } 55 | } 56 | 57 | headRot.x = values[0]; 58 | headRot.y = values[1]; 59 | headRot.z = values[2]; 60 | headPos.x = values[3]; 61 | headPos.y = values[4]; 62 | headPos.z = values[5]; 63 | } else { 64 | 65 | nBufSplit = nBufStart; 66 | while(nBufSplit++ < nBufEnd) { 67 | if (datastr[nBufSplit] == '&') { 68 | 69 | // Blendshape mode 70 | blendshapes[datastr[nBufStart..nBufSplit].idup] = datastr[nBufSplit+1..nBufEnd].to!float/100.0; 71 | break; 72 | } else if (datastr[nBufSplit] == '#') { 73 | 74 | // Bone mode 75 | float[3] values; 76 | size_t rStart = nBufSplit+1; 77 | size_t rEnd = rStart; 78 | size_t aIdx = 0; 79 | while (rEnd < nBufEnd) { 80 | rEnd++; 81 | if (datastr[rEnd] == ',' || datastr[rEnd] == '|') { 82 | values[aIdx++] = datastr[rStart..rEnd].to!float; 83 | rStart = rEnd+1; 84 | } 85 | } 86 | 87 | // Load data in to leftEye or rightEye 88 | if (datastr[nBufStart..nBufSplit] == "leftEye") { 89 | leftEye.x = values[0]; 90 | leftEye.y = values[1]; 91 | leftEye.z = values[2]; 92 | } else if (datastr[nBufStart..nBufSplit] == "rightEye") { 93 | rightEye.x = values[0]; 94 | rightEye.y = values[1]; 95 | rightEye.z = values[2]; 96 | } 97 | break; 98 | } 99 | } 100 | } 101 | 102 | // Next iteration 103 | nBufStart = ++i; 104 | nBufEnd = nBufStart; 105 | } 106 | 107 | 108 | // Next iter 109 | i++; 110 | } 111 | } catch(Exception ex) { 112 | writeln(ex.msg, " ", nBufStart, " ", nBufSplit, " ", nBufEnd, " ", datastr[nBufStart..nBufEnd]); 113 | } 114 | } 115 | 116 | /** 117 | Current iOS blendshapes. 118 | */ 119 | float[string] blendshapes; 120 | 121 | vec3 headPos = vec3(0, 0, 0); 122 | vec3 headRot = vec3(0, 0, 0); 123 | vec3 leftEye = vec3(0, 0, 0); 124 | vec3 rightEye = vec3(0, 0, 0); 125 | 126 | string indata; 127 | } 128 | 129 | /** 130 | Thread-safe queue for FM3D tracking data 131 | */ 132 | struct FM3DThreadSafeData { 133 | private: 134 | FM3DTrackingData data; 135 | Mutex mtx; 136 | bool updated_; 137 | 138 | public: 139 | this(Mutex mutex) { 140 | this.mtx = mutex; 141 | } 142 | 143 | bool updated() { 144 | mtx.lock(); 145 | scope(exit) mtx.unlock(); 146 | return updated_; 147 | } 148 | 149 | void set(FM3DTrackingData data) { 150 | mtx.lock(); 151 | updated_ = true; 152 | this.data = data; 153 | mtx.unlock(); 154 | } 155 | 156 | FM3DTrackingData get() { 157 | mtx.lock(); 158 | updated_ = false; 159 | scope(exit) mtx.unlock(); 160 | return data; 161 | } 162 | } 163 | 164 | /** 165 | Adaptor to recieve Facemotion3D tracking data 166 | */ 167 | class FM3DAdaptor : Adaptor { 168 | private: 169 | // Constant enums 170 | enum fm3dSendPort = 49993; 171 | enum fm3dListenPort = 49983; 172 | enum fm3dPollRate = 8; 173 | 174 | // Data 175 | size_t dataPacketsReceivedTotal; 176 | FM3DThreadSafeData tsdata; 177 | 178 | // Settings 179 | string phoneIP; 180 | 181 | // Sockets 182 | Socket sender; 183 | Socket dataIn; 184 | 185 | // Threading 186 | bool isCloseRequested; 187 | Thread listeningThread; 188 | 189 | bool gotDataFromFetch; 190 | 191 | void listenThread() { 192 | ubyte[ushort.max] buff; 193 | Address addr = new InternetAddress(InternetAddress.ADDR_ANY, fm3dListenPort); 194 | 195 | int failed = 0; 196 | while (!isCloseRequested) { 197 | try { 198 | ptrdiff_t recvBytes = dataIn.receiveFrom(buff, SocketFlags.NONE, addr); 199 | if (recvBytes != Socket.ERROR && recvBytes <= buff.length) { 200 | dataPacketsReceivedTotal++; 201 | string recvString = cast(string)buff[0..recvBytes]; 202 | auto trackingData = FM3DTrackingData(recvString); 203 | failed = 0; 204 | tsdata.set(trackingData); 205 | } 206 | Thread.sleep(fm3dPollRate.msecs); 207 | } catch (Exception ex) { 208 | writeln(ex.msg); 209 | failed++; 210 | Thread.sleep(fm3dPollRate.msecs); 211 | 212 | if (failed > 100) { 213 | 214 | // try connecting again 215 | sender.sendTo("FACEMOTION3D_OtherStreaming", SocketFlags.NONE, new InternetAddress(phoneIP, fm3dSendPort)); 216 | Thread.sleep(1.seconds); 217 | failed = 0; 218 | dataLossCounter = RECV_TIMEOUT; 219 | } 220 | } 221 | } 222 | } 223 | 224 | public: 225 | ~this() { 226 | this.stop(); 227 | } 228 | 229 | override 230 | string getAdaptorName() { 231 | return "Facemotion3D"; 232 | } 233 | 234 | override 235 | void start() { 236 | if ("phoneIP" in options) { 237 | phoneIP = options["phoneIP"]; 238 | } else return; 239 | 240 | if (isRunning) this.stop(); 241 | 242 | // Start our new threading 243 | isCloseRequested = false; 244 | tsdata = FM3DThreadSafeData(new Mutex()); 245 | 246 | try { 247 | sender = new UdpSocket(); 248 | sender.sendTo("FACEMOTION3D_OtherStreaming", SocketFlags.NONE, new InternetAddress(phoneIP, fm3dSendPort)); 249 | 250 | dataIn = new UdpSocket(); 251 | dataIn.setOption(SocketOptionLevel.SOCKET, SocketOption.RCVTIMEO, 5.msecs); 252 | dataIn.bind(new InternetAddress("0.0.0.0", fm3dListenPort)); 253 | } catch (Exception ex) { 254 | dataIn.close(); 255 | dataIn = null; 256 | return; 257 | } 258 | 259 | // Reset PPS counter 260 | dataPacketsReceivedTotal = 0; 261 | 262 | // Start threads 263 | if (dataIn.isAlive) { 264 | listeningThread = new Thread(&listenThread); 265 | listeningThread.start(); 266 | } 267 | } 268 | 269 | override 270 | void stop() { 271 | if (isRunning) { 272 | // Stop threads 273 | isCloseRequested = true; 274 | listeningThread.join(false); 275 | 276 | // Close UDP sockets 277 | dataIn.close(); 278 | 279 | // Set everything to null 280 | listeningThread = null; 281 | dataIn = null; 282 | } 283 | } 284 | 285 | override 286 | void poll() { 287 | if (!isRunning) return; 288 | 289 | if (tsdata.updated) { 290 | FM3DTrackingData data = tsdata.get(); 291 | dataLossCounter = 0; 292 | gotDataFromFetch = true; 293 | 294 | bones[BoneNames.ftHead] = Bone( 295 | vec3(data.headPos.x*-1, data.headPos.y, data.headPos.z), 296 | quat.eulerRotation(radians(data.headRot.z), radians(data.headRot.y), radians(data.headRot.x)) 297 | ); 298 | 299 | bones[BoneNames.vmcLeftEye] = Bone( 300 | vec3(0, 0, 0), 301 | quat.eulerRotation(radians(data.leftEye.z), radians(data.leftEye.y), radians(data.leftEye.x)) 302 | ); 303 | 304 | bones[BoneNames.vmcRightEye] = Bone( 305 | vec3(data.headPos.x*-1, data.headPos.y, data.headPos.z), 306 | quat.eulerRotation(radians(data.rightEye.z), radians(data.rightEye.y), radians(data.rightEye.x)) 307 | ); 308 | 309 | // Duplicate blendshapes in 310 | this.blendshapes = data.blendshapes.dup; 311 | 312 | try { 313 | if (this.blendshapes.length > 0) { 314 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["eyeBlinkLeft"]; 315 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["eyeLookOutLeft"]-this.blendshapes["eyeLookInLeft"]; 316 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["eyeLookUpLeft"]-this.blendshapes["eyeLookDownLeft"]; 317 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["eyeSquintLeft"]; 318 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["eyeWideLeft"]; 319 | 320 | // RIGHT EYE 321 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["eyeBlinkRight"]; 322 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["eyeLookInRight"]-this.blendshapes["eyeLookOutRight"]; 323 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["eyeLookUpRight"]-this.blendshapes["eyeLookDownRight"]; 324 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["eyeSquintRight"]; 325 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["eyeWideRight"]; 326 | 327 | // MOUTH 328 | this.blendshapes[BlendshapeNames.ftMouthOpen] = clamp( 329 | 330 | // Avg out the different ways of opening the mouth 331 | ( 332 | ((this.blendshapes["mouthLowerDownLeft"]+this.blendshapes["mouthUpperUpLeft"])/2) + 333 | ((this.blendshapes["mouthLowerDownRight"]+this.blendshapes["mouthUpperUpRight"])/2) 334 | ), 335 | 0, 336 | 1 337 | ); 338 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["mouthLeft"]-this.blendshapes["mouthRight"]) / 2.0; 339 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 340 | clamp( 341 | 1 + 342 | (this.blendshapes["mouthSmileLeft"]+this.blendshapes["mouthSmileRight"]/2.0) - 343 | (this.blendshapes["mouthFrownLeft"]+this.blendshapes["mouthFrownRight"]/2.0), 344 | 0, 2 345 | ) 346 | ) / 2.0; 347 | } 348 | } catch (Exception ex) { } // Some unknown format, drop creating ft blendshapes 349 | } else { 350 | if (dataLossCounter > RECV_TIMEOUT*10) gotDataFromFetch = false; 351 | dataLossCounter++; 352 | } 353 | } 354 | 355 | override 356 | bool isRunning() { 357 | return dataIn !is null; 358 | } 359 | 360 | override 361 | bool isReceivingData() { 362 | return gotDataFromFetch; 363 | } 364 | 365 | override 366 | string[] getOptionNames() { 367 | return [ 368 | "phoneIP" 369 | ]; 370 | } 371 | } 372 | -------------------------------------------------------------------------------- /source/ft/adaptors/ifacialmocap.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.ifacialmocap; 2 | import ft.adaptor; 3 | import ft.data; 4 | import std.conv : to; 5 | import std.socket; 6 | import fghj.serialization; 7 | import fghj; 8 | import inmath.linalg; 9 | import core.thread; 10 | import core.sync.mutex; 11 | import std.exception; 12 | import inmath.math; 13 | import std.stdio : writeln, write; 14 | import std.array; 15 | 16 | /** 17 | Represents the raw blendshape tracking data to be sent to facetrack-d via UDP. 18 | */ 19 | struct IFMTrackingData { 20 | 21 | this(string datastr) { 22 | this.indata = datastr; 23 | 24 | // writeln(datastr); 25 | 26 | size_t i = 0; 27 | size_t nBufStart; 28 | size_t nBufEnd; 29 | size_t nBufSplit; 30 | try { 31 | while(i < datastr.length) { 32 | 33 | // Skip whitespace 34 | while(datastr[i] == ' ') i++; 35 | 36 | if (datastr[i] != '|') nBufEnd = i; 37 | else { 38 | nBufEnd++; 39 | // writeln(datastr[nBufStart..nBufEnd]); 40 | // Head bone mode 41 | if (datastr[nBufStart] == '=') { 42 | nBufStart += 5; // Skip "head#" part 43 | 44 | // Fetch values 45 | float[6] values; 46 | size_t rStart = nBufStart+1; 47 | size_t rEnd = rStart; 48 | size_t aIdx = 0; 49 | while (rEnd < nBufEnd) { 50 | rEnd++; 51 | if (datastr[rEnd] == ',' || datastr[rEnd] == '|') { 52 | values[aIdx++] = datastr[rStart..rEnd].to!float; 53 | rStart = rEnd+1; 54 | } 55 | } 56 | 57 | headRot.x = values[0]; 58 | headRot.y = values[1]; 59 | headRot.z = values[2]; 60 | headPos.x = values[3]; 61 | headPos.y = values[4]; 62 | headPos.z = values[5]; 63 | } else { 64 | 65 | nBufSplit = nBufStart; 66 | while(nBufSplit++ < nBufEnd) { 67 | if (datastr[nBufSplit] == '-') { 68 | 69 | // Blendshape mode 70 | blendshapes[datastr[nBufStart..nBufSplit].idup] = datastr[nBufSplit+1..nBufEnd].to!float/100.0; 71 | break; 72 | } else if (datastr[nBufSplit] == '#') { 73 | 74 | // Bone mode 75 | float[3] values; 76 | size_t rStart = nBufSplit+1; 77 | size_t rEnd = rStart; 78 | size_t aIdx = 0; 79 | while (rEnd < nBufEnd) { 80 | rEnd++; 81 | if (datastr[rEnd] == ',' || datastr[rEnd] == '|') { 82 | values[aIdx++] = datastr[rStart..rEnd].to!float; 83 | rStart = rEnd+1; 84 | } 85 | } 86 | 87 | // Load data in to leftEye or rightEye 88 | if (datastr[nBufStart..nBufSplit] == "leftEye") { 89 | leftEye.x = values[0]; 90 | leftEye.y = values[1]; 91 | leftEye.z = values[2]; 92 | } else if (datastr[nBufStart..nBufSplit] == "rightEye") { 93 | rightEye.x = values[0]; 94 | rightEye.y = values[1]; 95 | rightEye.z = values[2]; 96 | } 97 | break; 98 | } 99 | } 100 | } 101 | 102 | // Next iteration 103 | nBufStart = ++i; 104 | nBufEnd = nBufStart; 105 | } 106 | 107 | 108 | // Next iter 109 | i++; 110 | } 111 | } catch(Exception ex) { 112 | writeln(ex.msg, " ", nBufStart, " ", nBufSplit, " ", nBufEnd, " ", datastr[nBufStart..nBufEnd]); 113 | } 114 | } 115 | 116 | /** 117 | Current iOS blendshapes. 118 | */ 119 | float[string] blendshapes; 120 | 121 | vec3 headPos = vec3(0, 0, 0); 122 | vec3 headRot = vec3(0, 0, 0); 123 | vec3 leftEye = vec3(0, 0, 0); 124 | vec3 rightEye = vec3(0, 0, 0); 125 | 126 | string indata; 127 | } 128 | 129 | /** 130 | Thread-safe queue for IFM tracking data 131 | */ 132 | struct IFMThreadSafeData { 133 | private: 134 | IFMTrackingData data; 135 | Mutex mtx; 136 | bool updated_; 137 | 138 | public: 139 | this(Mutex mutex) { 140 | this.mtx = mutex; 141 | } 142 | 143 | bool updated() { 144 | mtx.lock(); 145 | scope(exit) mtx.unlock(); 146 | return updated_; 147 | } 148 | 149 | void set(IFMTrackingData data) { 150 | mtx.lock(); 151 | updated_ = true; 152 | this.data = data; 153 | mtx.unlock(); 154 | } 155 | 156 | IFMTrackingData get() { 157 | mtx.lock(); 158 | updated_ = false; 159 | scope(exit) mtx.unlock(); 160 | return data; 161 | } 162 | } 163 | 164 | /** 165 | Adaptor to recieve iFacialMocap tracking data 166 | */ 167 | class IFMAdaptor : Adaptor { 168 | private: 169 | // Constant enums 170 | enum ifmPort = 49983; 171 | enum ifmPollRate = 8; 172 | enum magicString = "iFacialMocap_sahuasouryya9218sauhuiayeta91555dy3719"; 173 | 174 | // Data 175 | size_t dataPacketsReceivedTotal; 176 | IFMThreadSafeData tsdata; 177 | 178 | // Settings 179 | string phoneIP; 180 | 181 | // Sockets 182 | Socket sender; 183 | Socket dataIn; 184 | 185 | // Threading 186 | bool isCloseRequested; 187 | Thread listeningThread; 188 | 189 | bool gotDataFromFetch; 190 | 191 | void listenThread() { 192 | ubyte[ushort.max] buff; 193 | Address addr = new InternetAddress(InternetAddress.ADDR_ANY, ifmPort); 194 | 195 | int failed = 0; 196 | while (!isCloseRequested) { 197 | try { 198 | ptrdiff_t recvBytes = dataIn.receiveFrom(buff, SocketFlags.NONE, addr); 199 | if (recvBytes != Socket.ERROR && recvBytes <= buff.length) { 200 | dataPacketsReceivedTotal++; 201 | string recvString = cast(string)buff[0..recvBytes]; 202 | auto trackingData = IFMTrackingData(recvString); 203 | failed = 0; 204 | tsdata.set(trackingData); 205 | } 206 | Thread.sleep(ifmPollRate.msecs); 207 | } catch (Exception ex) { 208 | writeln(ex.msg); 209 | failed++; 210 | Thread.sleep(ifmPollRate.msecs); 211 | 212 | if (failed > 100) { 213 | 214 | // try connecting again 215 | auto addrPort = new InternetAddress(phoneIP, ifmPort); 216 | sender.sendTo(magicString, SocketFlags.NONE, addrPort); // Nani the fuck, if I may ask? 217 | Thread.sleep(1.seconds); 218 | failed = 0; 219 | dataLossCounter = RECV_TIMEOUT; 220 | } 221 | } 222 | } 223 | } 224 | 225 | public: 226 | ~this() { 227 | this.stop(); 228 | } 229 | 230 | override 231 | string getAdaptorName() { 232 | return "iFacialMocap"; 233 | } 234 | 235 | override 236 | void start() { 237 | if ("phoneIP" in options) { 238 | phoneIP = options["phoneIP"]; 239 | import std.socket : parseAddress; 240 | // Allow Throw exception, prevnt hard crash 241 | parseAddress(phoneIP); 242 | } else return; 243 | 244 | if (isRunning) this.stop(); 245 | 246 | // Start our new threading 247 | isCloseRequested = false; 248 | tsdata = IFMThreadSafeData(new Mutex()); 249 | 250 | try { 251 | sender = new UdpSocket(); 252 | auto addrPort = new InternetAddress(phoneIP, ifmPort); 253 | sender.sendTo(magicString, SocketFlags.NONE, addrPort); // Nani the fuck, if I may ask? 254 | 255 | dataIn = new UdpSocket(); 256 | dataIn.setOption(SocketOptionLevel.SOCKET, SocketOption.RCVTIMEO, 5.msecs); 257 | dataIn.bind(new InternetAddress("0.0.0.0", ifmPort)); 258 | } catch (Exception ex) { 259 | dataIn.close(); 260 | dataIn = null; 261 | return; 262 | } 263 | 264 | // Reset PPS counter 265 | dataPacketsReceivedTotal = 0; 266 | 267 | // Start threads 268 | if (dataIn.isAlive) { 269 | listeningThread = new Thread(&listenThread); 270 | listeningThread.start(); 271 | } 272 | } 273 | 274 | override 275 | void stop() { 276 | if (isRunning) { 277 | // Stop threads 278 | isCloseRequested = true; 279 | listeningThread.join(false); 280 | 281 | // Close UDP sockets 282 | dataIn.close(); 283 | 284 | // Set everything to null 285 | listeningThread = null; 286 | dataIn = null; 287 | } 288 | } 289 | 290 | override 291 | void poll() { 292 | if (!isRunning) return; 293 | 294 | if (tsdata.updated) { 295 | IFMTrackingData data = tsdata.get(); 296 | dataLossCounter = 0; 297 | gotDataFromFetch = true; 298 | 299 | bones[BoneNames.ftHead] = Bone( 300 | vec3(data.headPos.x*-1, data.headPos.y, data.headPos.z), 301 | quat.eulerRotation(radians(data.headRot.z), radians(data.headRot.y), radians(data.headRot.x)) 302 | ); 303 | 304 | bones[BoneNames.vmcLeftEye] = Bone( 305 | vec3(0, 0, 0), 306 | quat.eulerRotation(radians(data.leftEye.z), radians(data.leftEye.y), radians(data.leftEye.x)) 307 | ); 308 | 309 | bones[BoneNames.vmcRightEye] = Bone( 310 | vec3(data.headPos.x*-1, data.headPos.y, data.headPos.z), 311 | quat.eulerRotation(radians(data.rightEye.z), radians(data.rightEye.y), radians(data.rightEye.x)) 312 | ); 313 | 314 | // Duplicate blendshapes in 315 | this.blendshapes = data.blendshapes.dup; 316 | 317 | try { 318 | if (this.blendshapes.length > 0) { 319 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["eyeBlink_L"]; 320 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["eyeLookOut_L"]-this.blendshapes["eyeLookIn_L"]; 321 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["eyeLookUp_L"]-this.blendshapes["eyeLookDown_L"]; 322 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["eyeSquint_L"]; 323 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["eyeWide_L"]; 324 | 325 | // RIGHT EYE 326 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["eyeBlink_R"]; 327 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["eyeLookIn_R"]-this.blendshapes["eyeLookOut_R"]; 328 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["eyeLookUp_R"]-this.blendshapes["eyeLookDown_R"]; 329 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["eyeSquint_R"]; 330 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["eyeWide_R"]; 331 | 332 | // MOUTH 333 | this.blendshapes[BlendshapeNames.ftMouthOpen] = clamp( 334 | 335 | // Avg out the different ways of opening the mouth 336 | ( 337 | ((this.blendshapes["mouthLowerDown_L"]+this.blendshapes["mouthUpperUp_L"])/2) + 338 | ((this.blendshapes["mouthLowerDown_R"]+this.blendshapes["mouthUpperUp_R"])/2) 339 | ), 340 | 0, 341 | 1 342 | ); 343 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["mouthLeft"]-this.blendshapes["mouthRight"]) / 2.0; 344 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 345 | clamp( 346 | 1 + 347 | (this.blendshapes["mouthSmile_L"]+this.blendshapes["mouthSmile_R"]/2.0) - 348 | (this.blendshapes["mouthFrown_L"]+this.blendshapes["mouthFrown_R"]/2.0), 349 | 0, 2 350 | ) 351 | ) / 2.0; 352 | } 353 | } catch (Exception ex) { } // Some unknown format, drop creating ft blendshapes 354 | } else { 355 | if (dataLossCounter > RECV_TIMEOUT*10) gotDataFromFetch = false; 356 | dataLossCounter++; 357 | } 358 | } 359 | 360 | override 361 | bool isRunning() { 362 | return dataIn !is null; 363 | } 364 | 365 | override 366 | bool isReceivingData() { 367 | return gotDataFromFetch; 368 | } 369 | 370 | override 371 | string[] getOptionNames() { 372 | return [ 373 | "phoneIP" 374 | ]; 375 | } 376 | } 377 | -------------------------------------------------------------------------------- /source/ft/adaptors/jinsmemelogger.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.jinsmemelogger; 2 | version (JML) { 3 | import ft.adaptor; 4 | import ft.data; 5 | 6 | import vibe.http.websockets; 7 | import vibe.http.server; 8 | import vibe.http.router; 9 | import vibe.data.json; 10 | import vibe.core.sync; 11 | import core.thread; 12 | import core.sync.mutex; 13 | import std.conv; 14 | 15 | 16 | struct JMLData { 17 | double time; 18 | float[string] data; 19 | } 20 | 21 | struct JMLThreadSafeData { 22 | private: 23 | JMLData data; 24 | Mutex mtx; 25 | bool updated_; 26 | 27 | public: 28 | this(Mutex mutex) { 29 | this.mtx = mutex; 30 | } 31 | 32 | bool updated() { 33 | if (mtx is null) 34 | return false; 35 | mtx.lock(); 36 | scope(exit) mtx.unlock(); 37 | return updated_; 38 | } 39 | 40 | void set(JMLData data) { 41 | if (mtx is null) 42 | return; 43 | mtx.lock(); 44 | updated_ = true; 45 | this.data = data; 46 | mtx.unlock(); 47 | } 48 | 49 | JMLData get() { 50 | if (mtx is null) 51 | return data; 52 | mtx.lock(); 53 | updated_ = false; 54 | scope(exit) mtx.unlock(); 55 | return data; 56 | } 57 | } 58 | 59 | class JMLAdaptor : Adaptor { 60 | private: 61 | ushort port = 23456; 62 | string bind = "0.0.0.0"; 63 | 64 | bool isCloseRequested; 65 | Thread receivingThread; 66 | Mutex mutex; 67 | TaskCondition condition; 68 | 69 | int dataLossCounter; 70 | int sequenceNumber; 71 | enum RECV_TIMEOUT = 30; 72 | enum CALIBRATION_TRIGGER_INTERVAL = 20 * 30; 73 | 74 | bool gotDataFromFetch = false; 75 | 76 | JMLThreadSafeData tsdata; 77 | 78 | float initYaw = 0; 79 | float nextYaw = 0; 80 | int numInitYaw; 81 | int lastSequenceNumber; 82 | int onBootup = true; 83 | float[CALIBRATION_TRIGGER_INTERVAL] yawHistory = [0]; 84 | 85 | public: 86 | ~this() { 87 | this.stop(); 88 | } 89 | 90 | void handleConnection(scope WebSocket socket) { 91 | 92 | while (!isCloseRequested && socket.connected) { 93 | try { 94 | ptrdiff_t received = socket.waitForData(16.msecs); 95 | if (received < 0) { 96 | continue; 97 | } 98 | JMLData data; 99 | 100 | auto text = socket.receiveText; 101 | foreach (string key, value; parseJson(text)) { 102 | data.data[key] = value.to!float; 103 | } 104 | 105 | tsdata.set(data); 106 | } catch (Exception ex) { 107 | Thread.sleep(100.msecs); 108 | } 109 | } 110 | 111 | } 112 | 113 | void receiveThread() { 114 | isCloseRequested = false; 115 | tsdata = JMLThreadSafeData(new Mutex()); 116 | 117 | HTTPServerSettings settings = new HTTPServerSettings(); 118 | settings.port = port; 119 | settings.bindAddresses = [bind]; 120 | 121 | auto router = new URLRouter; 122 | router.get("/", handleWebSockets(&this.handleConnection)); 123 | 124 | HTTPListener listener = listenHTTP(settings, router); 125 | synchronized (mutex) { 126 | condition.wait(); 127 | } 128 | listener.stopListening(); 129 | } 130 | 131 | override 132 | void start() { 133 | calibrate(); 134 | if ("jml_bind_port" in this.options) { 135 | string port_str = options["jml_bind_port"]; 136 | if (port_str !is null && port_str != "") 137 | port = to!ushort(this.options["jml_bind_port"]); 138 | } 139 | 140 | if ("jml_bind_ip" in this.options) { 141 | string addr_str = options["jml_bind_ip"]; 142 | if (addr_str !is null && addr_str != "") 143 | bind = this.options["jml_bind_ip"]; 144 | } 145 | 146 | this.stop(); 147 | mutex = new Mutex; 148 | condition = new TaskCondition(mutex); 149 | receivingThread = new Thread(&receiveThread); 150 | receivingThread.start(); 151 | } 152 | 153 | override 154 | void stop() { 155 | if (isRunning) { 156 | isCloseRequested = true; 157 | condition.notify(); 158 | receivingThread.join(false); 159 | mutex = null; 160 | condition = null; 161 | receivingThread = null; 162 | } 163 | } 164 | 165 | override 166 | void poll() { 167 | if (tsdata.updated) { 168 | dataLossCounter = 0; 169 | gotDataFromFetch = true; 170 | JMLData data = tsdata.get(); 171 | 172 | blendshapes = data.data.dup; 173 | 174 | if (lastSequenceNumber < 0) { 175 | lastSequenceNumber = cast(int)blendshapes["sequenceNumber"]; 176 | sequenceNumber = 0; 177 | } 178 | 179 | int sequenceDiff = cast(int)blendshapes["sequenceNumber"] - lastSequenceNumber; 180 | if (sequenceDiff < 0) 181 | sequenceDiff += 256; 182 | 183 | if (sequenceDiff > 0) { 184 | sequenceNumber += sequenceDiff; 185 | if (sequenceNumber >= CALIBRATION_TRIGGER_INTERVAL) { 186 | sequenceNumber = 0; 187 | onBootup = false; 188 | } 189 | nextYaw -= yawHistory[sequenceNumber]; 190 | yawHistory[sequenceNumber] = cast(int)blendshapes["yaw"]; 191 | nextYaw += yawHistory[sequenceNumber]; 192 | if (onBootup) 193 | numInitYaw += sequenceDiff; 194 | } 195 | 196 | lastSequenceNumber = cast(int)blendshapes["sequenceNumber"]; 197 | if (numInitYaw > 0) 198 | initYaw = nextYaw / numInitYaw; 199 | 200 | float headYaw; 201 | headYaw = blendshapes["yaw"] - initYaw; 202 | headYaw = headYaw > 180? -360 + headYaw: headYaw; 203 | headYaw = headYaw < -180? 360 + headYaw: headYaw; 204 | blendshapes["jmlYaw"] = headYaw; 205 | 206 | } else { 207 | dataLossCounter ++; 208 | if (dataLossCounter > RECV_TIMEOUT) 209 | gotDataFromFetch = false; 210 | } 211 | } 212 | 213 | void calibrate() { 214 | lastSequenceNumber = -1; 215 | numInitYaw = 0; 216 | if (onBootup) 217 | foreach (i; 0..CALIBRATION_TRIGGER_INTERVAL) 218 | yawHistory[i] = 0; 219 | } 220 | 221 | override 222 | bool isRunning() { 223 | return receivingThread !is null; 224 | } 225 | 226 | override 227 | string[] getOptionNames() { 228 | return [ 229 | "jml_bind_port", 230 | "jml_bind_ip" 231 | ]; 232 | } 233 | 234 | override string getAdaptorName() { 235 | return "JINS MEME Logger"; 236 | } 237 | 238 | override 239 | bool isReceivingData() { 240 | return gotDataFromFetch; 241 | } 242 | } 243 | 244 | 245 | } 246 | -------------------------------------------------------------------------------- /source/ft/adaptors/llf.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.llf; 2 | import ft.adaptor; 3 | import ft.data; 4 | import std.conv : to; 5 | import std.bitmanip; 6 | import std.socket; 7 | import fghj.serialization; 8 | import fghj; 9 | import inmath.linalg; 10 | import core.thread; 11 | import core.sync.mutex; 12 | import std.exception; 13 | import inmath.math; 14 | import std.stdio : writeln; 15 | 16 | const ushort llfBlendshapes = 61; 17 | // canonical 61 IDs mapping 18 | const string[] llfBlendshapeNames = [ 19 | BlendshapeNames.eyeBlinkLeft, 20 | BlendshapeNames.eyeLookDownLeft, 21 | BlendshapeNames.eyeLookInLeft, 22 | BlendshapeNames.eyeLookOutLeft, 23 | BlendshapeNames.eyeLookUpLeft, 24 | BlendshapeNames.eyeSquintLeft, 25 | BlendshapeNames.eyeWideLeft, 26 | 27 | BlendshapeNames.eyeBlinkRight, 28 | BlendshapeNames.eyeLookDownRight, 29 | BlendshapeNames.eyeLookInRight, 30 | BlendshapeNames.eyeLookOutRight, 31 | BlendshapeNames.eyeLookUpRight, 32 | BlendshapeNames.eyeSquintRight, 33 | BlendshapeNames.eyeWideRight, 34 | 35 | BlendshapeNames.jawForward, 36 | BlendshapeNames.jawLeft, 37 | BlendshapeNames.jawRight, 38 | BlendshapeNames.jawOpen, 39 | 40 | BlendshapeNames.mouthClose, 41 | BlendshapeNames.mouthFunnel, 42 | BlendshapeNames.mouthPucker, 43 | BlendshapeNames.mouthLeft, 44 | BlendshapeNames.mouthRight, 45 | BlendshapeNames.mouthSmileLeft, 46 | BlendshapeNames.mouthSmileRight, 47 | BlendshapeNames.mouthFrownLeft, 48 | BlendshapeNames.mouthFrownRight, 49 | BlendshapeNames.mouthDimpleLeft, 50 | BlendshapeNames.mouthDimpleRight, 51 | BlendshapeNames.mouthStretchLeft, 52 | BlendshapeNames.mouthStretchRight, 53 | BlendshapeNames.mouthRollLower, 54 | BlendshapeNames.mouthRollUpper, 55 | BlendshapeNames.mouthShrugLower, 56 | BlendshapeNames.mouthShrugUpper, 57 | BlendshapeNames.mouthPressLeft, 58 | BlendshapeNames.mouthPressRight, 59 | BlendshapeNames.mouthLowerDownLeft, 60 | BlendshapeNames.mouthLowerDownRight, 61 | BlendshapeNames.mouthUpperUpLeft, 62 | BlendshapeNames.mouthUpperUpRight, 63 | BlendshapeNames.browDownLeft, 64 | BlendshapeNames.browDownRight, 65 | BlendshapeNames.browInnerUp, 66 | BlendshapeNames.browOuterUpLeft, 67 | BlendshapeNames.browOuterUpRight, 68 | BlendshapeNames.cheekPuff, 69 | BlendshapeNames.cheekSquintLeft, 70 | BlendshapeNames.cheekSquintRight, 71 | BlendshapeNames.noseSneerLeft, 72 | BlendshapeNames.noseSneerRight, 73 | BlendshapeNames.tongueOut, 74 | "headYaw", 75 | "headPitch", 76 | "headRoll", 77 | "leftEyeYaw", 78 | "leftEyePitch", 79 | "leftEyeRoll", 80 | "rightEyeYaw", 81 | "rightEyePitch", 82 | "rightEyeRoll" 83 | ]; 84 | 85 | /** 86 | Represents the raw blendshape tracking data to be sent to facetrack-d via UDP. 87 | */ 88 | struct LLFRawTrackingData { 89 | /** 90 | Current blendshapes. 91 | */ 92 | float[llfBlendshapes] blendshapes; 93 | } 94 | 95 | /** 96 | Thread-safe queue for LLF tracking data 97 | */ 98 | struct LLFThreadSafeData { 99 | private: 100 | LLFRawTrackingData data; 101 | Mutex mtx; 102 | bool updated_; 103 | 104 | public: 105 | this(Mutex mutex) { 106 | this.mtx = mutex; 107 | } 108 | 109 | bool updated() { 110 | mtx.lock(); 111 | scope(exit) mtx.unlock(); 112 | return updated_; 113 | } 114 | 115 | void set(LLFRawTrackingData data) { 116 | mtx.lock(); 117 | updated_ = true; 118 | this.data = data; 119 | mtx.unlock(); 120 | } 121 | 122 | LLFRawTrackingData get() { 123 | mtx.lock(); 124 | updated_ = false; 125 | scope(exit) mtx.unlock(); 126 | return data; 127 | } 128 | } 129 | 130 | /** 131 | Adaptor to recieve LiveLinkFace/MeFaMo tracking data 132 | */ 133 | class LLFAdaptor : Adaptor { 134 | private: 135 | // Constant enums 136 | enum llfPort = 11111; 137 | enum llfBind = "0.0.0.0"; 138 | enum vtsRequestDataFramesForSeconds = 1; 139 | 140 | // Data 141 | LLFThreadSafeData tsdata; 142 | 143 | // Settings 144 | 145 | // Sockets 146 | Socket llfIn; 147 | 148 | // Threading 149 | bool isCloseRequested; 150 | Thread listeningThread; 151 | 152 | bool gotDataFromFetch; 153 | 154 | void listenThread() { 155 | ubyte[ushort.max] buff; 156 | Address addr = new InternetAddress(InternetAddress.ADDR_ANY, 0); 157 | 158 | while (!isCloseRequested) { 159 | try { 160 | ptrdiff_t recvBytes = llfIn.receiveFrom(buff, SocketFlags.NONE, addr); 161 | if (recvBytes != Socket.ERROR && recvBytes <= buff.length) { 162 | // need to actually decode here 163 | if (recvBytes < 46) 164 | continue; 165 | // this is a uint, but let's not invite overflows, so decode as a ushort and cast up 166 | uint nameLen = bigEndianToNative!ushort(buff[43 .. 45]); 167 | if (recvBytes < (45 + nameLen + 17)) 168 | continue; 169 | ubyte[] mainBody = buff[(45 + nameLen + 17) .. recvBytes]; 170 | 171 | auto trackingData = LLFRawTrackingData(); 172 | foreach (i; 0..llfBlendshapeNames.length) { 173 | if (mainBody.length >= 4) { 174 | trackingData.blendshapes[i] = mainBody.read!(float, Endian.bigEndian); 175 | } else { 176 | trackingData.blendshapes[i] = 0.0f; 177 | } 178 | } 179 | 180 | tsdata.set(trackingData); 181 | } 182 | } catch (Exception ex) { 183 | Thread.sleep(100.msecs); 184 | } 185 | } 186 | } 187 | 188 | public: 189 | ~this() { 190 | this.stop(); 191 | } 192 | 193 | override 194 | string getAdaptorName() { 195 | return "LiveLinkFace/MeFaMo Receiver"; 196 | } 197 | 198 | override 199 | void start() { 200 | 201 | // Do not create zombie threads please 202 | if (isRunning) this.stop(); 203 | 204 | // Start our new threading 205 | isCloseRequested = false; 206 | tsdata = LLFThreadSafeData(new Mutex()); 207 | 208 | llfIn = new UdpSocket(); 209 | llfIn.setOption(SocketOptionLevel.SOCKET, SocketOption.RCVTIMEO, 16.msecs); 210 | llfIn.bind(new InternetAddress(llfBind, llfPort)); 211 | 212 | // Start threads 213 | if (llfIn.isAlive) { 214 | listeningThread = new Thread(&listenThread); 215 | listeningThread.start(); 216 | } 217 | } 218 | 219 | override 220 | void stop() { 221 | if (isRunning) { 222 | // Stop threads 223 | isCloseRequested = true; 224 | 225 | listeningThread.join(false); 226 | 227 | // Close UDP sockets 228 | llfIn.close(); 229 | 230 | // Set everything to null 231 | listeningThread = null; 232 | llfIn = null; 233 | } 234 | } 235 | 236 | override 237 | void poll() { 238 | if (!isRunning) return; 239 | 240 | if (tsdata.updated) { 241 | LLFRawTrackingData data = tsdata.get(); 242 | dataLossCounter = 0; 243 | gotDataFromFetch = data.blendshapes.length > 0; 244 | 245 | // Write in blendshapes 246 | foreach (i; 0..llfBlendshapeNames.length) { 247 | this.blendshapes[llfBlendshapeNames[i]] = i < data.blendshapes.length ? data.blendshapes[i] : 0; 248 | } 249 | } else { 250 | dataLossCounter++; 251 | if (dataLossCounter > RECV_TIMEOUT) gotDataFromFetch = false; 252 | } 253 | } 254 | 255 | override 256 | bool isRunning() { 257 | return llfIn !is null; 258 | } 259 | 260 | override 261 | bool isReceivingData() { 262 | return gotDataFromFetch; 263 | } 264 | 265 | override 266 | string[] getOptionNames() { 267 | return [ 268 | ]; 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /source/ft/adaptors/openseeface.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.openseeface; 2 | import ft.adaptor; 3 | import ft.data; 4 | 5 | import std.socket; 6 | import std.conv : to; 7 | import std.range.primitives; 8 | import std.bitmanip; 9 | import inmath.linalg; 10 | import core.thread; 11 | import core.sync.mutex; 12 | import std.traits; 13 | import std.string; 14 | import std.math : PI; 15 | import inmath.math; 16 | 17 | const ushort trackingPoints = 68; 18 | enum OSFFeatureName { 19 | eyeLeft = "eyeLeft", 20 | eyeRight = "eyeRight", 21 | eyebrowSteepnessLeft = "eyebrowSteppnessLeft", 22 | eyebrowUpDownLeft = "eyebrowUpDownLeft", 23 | eyebrowQuirkLeft = "eyebrowQuirkLeft", 24 | eyebrowSteepnessRight = "eyebrowSteppnessRight", 25 | eyebrowUpDownRight = "eyebrowUpDownRight", 26 | eyebrowQuirkRight = "eyebrowQuirkRight", 27 | mouthCornerUpDownLeft = "mouthCornerUpDownLeft", 28 | mouthCornerInOutLeft = "mouthCornerInOutLeft", 29 | mouthCornerUpDownRight = "mouthCornerUpDownRight", 30 | mouthCornerInOutRight = "mouthCornerInOutRight", 31 | mouthOpen = "mouthOpen", 32 | mouthWide = "mouthWide" 33 | } 34 | 35 | const ushort packetFrameSize = 8 36 | + 4 37 | + 2 * 4 38 | + 2 * 4 39 | + 1 40 | + 4 41 | + 3 * 4 42 | + 3 * 4 43 | + 4 * 4 44 | + 4 * (trackingPoints) 45 | + 4 * 2 * (trackingPoints) 46 | + 4 * 3 * (trackingPoints + 2) 47 | + 4 * 14; 48 | 49 | struct OSFData { 50 | double time; 51 | int id; 52 | vec2 cameraResolution; 53 | 54 | float rightEyeOpen; 55 | float leftEyeOpen; 56 | quat rightGaze; 57 | quat leftGaze; 58 | bool got3dPoints; 59 | float fit3dError; 60 | 61 | vec3 translation; 62 | quat rawQuaternion; 63 | vec3 rawEuler; 64 | 65 | float[trackingPoints] confidence; 66 | vec2[trackingPoints] points; 67 | vec3[trackingPoints + 2] points3d; 68 | 69 | float[string] features; 70 | } 71 | 72 | struct OSFThreadSafeData { 73 | private: 74 | OSFData data; 75 | Mutex mtx; 76 | bool updated_; 77 | 78 | public: 79 | this(Mutex mutex) { 80 | this.mtx = mutex; 81 | } 82 | 83 | bool updated() { 84 | mtx.lock(); 85 | scope(exit) mtx.unlock(); 86 | return updated_; 87 | } 88 | 89 | void set(OSFData data) { 90 | mtx.lock(); 91 | updated_ = true; 92 | this.data = data; 93 | mtx.unlock(); 94 | } 95 | 96 | OSFData get() { 97 | mtx.lock(); 98 | updated_ = false; 99 | scope(exit) mtx.unlock(); 100 | return data; 101 | } 102 | } 103 | 104 | class OSFAdaptor : Adaptor { 105 | private: 106 | ushort port = 11573; 107 | string bind = "0.0.0.0"; 108 | 109 | Socket osf; 110 | 111 | bool isCloseRequested; 112 | Thread receivingThread; 113 | 114 | OSFThreadSafeData tsdata; 115 | 116 | 117 | int dataLossCounter; 118 | enum RECV_TIMEOUT = 16; 119 | bool gotDataFromFetch; 120 | 121 | vec3 swapX(vec3 v) { 122 | v.x = -v.x; 123 | return v; 124 | } 125 | 126 | float degreesAngleWrap(float af) { 127 | return ((af + 180) % 360) - 180; 128 | } 129 | 130 | void receiveThread() { 131 | ubyte[packetFrameSize] buffer; 132 | 133 | while (!isCloseRequested) { 134 | try { 135 | // Data must always match the expected amount of bytes 136 | ptrdiff_t recvBytes = osf.receive(buffer); 137 | ubyte[] bytes = buffer; 138 | if (recvBytes < packetFrameSize) { 139 | // Ignoring short packets, and read next packet. 140 | continue; 141 | } 142 | 143 | OSFData data; 144 | 145 | data.time = bytes.read!(double, Endian.littleEndian)(); 146 | data.id = bytes.read!(int, Endian.littleEndian)(); 147 | data.cameraResolution = vec2(bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)()); 148 | 149 | data.rightEyeOpen = bytes.read!(float, Endian.littleEndian)(); 150 | data.leftEyeOpen = bytes.read!(float, Endian.littleEndian)(); 151 | 152 | data.got3dPoints = bytes.read!(bool, Endian.littleEndian)(); 153 | data.fit3dError = bytes.read!(float, Endian.littleEndian)(); 154 | 155 | float qx = bytes.read!(float, Endian.littleEndian)(); 156 | float qy = bytes.read!(float, Endian.littleEndian)(); 157 | float qz = bytes.read!(float, Endian.littleEndian)(); 158 | float qw = bytes.read!(float, Endian.littleEndian)(); 159 | // (-qw, qx, qy, qz) corresponds to `rawEuler` below in `ZXY` conventiobn 160 | 161 | data.rawQuaternion = quat(-qw, qx, qy, qz); 162 | data.rawEuler = vec3(bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)()); 163 | data.translation = vec3(bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)()); 164 | 165 | for (int i = 0; i < trackingPoints; i++) { 166 | data.confidence[i] = bytes.read!(float, Endian.littleEndian)(); 167 | } 168 | for (int i = 0; i < trackingPoints; i++) { 169 | data.points[i] = vec2(bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)()); 170 | } 171 | for (int i = 0; i < trackingPoints + 2; i++) { 172 | // OSF C# code negates y 173 | data.points3d[i] = vec3(bytes.read!(float, Endian.littleEndian)(), -bytes.read!(float, Endian.littleEndian)(), bytes.read!(float, Endian.littleEndian)()); 174 | } 175 | 176 | data.rightGaze = quat.lookRotation(swapX(data.points3d[66]) - swapX(data.points3d[68]), vec3(0, 1, 0)) * quat.axisRotation(PI, vec3(1, 0, 0)) * quat.axisRotation(PI, vec3(0, 0, 1)); 177 | data.leftGaze = quat.lookRotation(swapX(data.points3d[67]) - swapX(data.points3d[69]), vec3(0, 1, 0)) * quat.axisRotation(PI, vec3(1, 0, 0)) * quat.axisRotation(PI, vec3(0, 0, 1)); 178 | 179 | foreach(name; EnumMembers!OSFFeatureName) { 180 | data.features[name] = bytes.read!(float, Endian.littleEndian)(); 181 | } 182 | tsdata.set(data); 183 | } catch (Exception ex) { 184 | Thread.sleep(100.msecs); 185 | } 186 | } 187 | } 188 | 189 | public: 190 | ~this() { 191 | this.stop(); 192 | } 193 | 194 | override 195 | string getAdaptorName() { 196 | return "OpenSeeFace"; 197 | } 198 | 199 | override 200 | void start() { 201 | 202 | if ("osf_bind_port" in options) { 203 | port = to!ushort(options["osf_bind_port"]); 204 | } 205 | 206 | if ("osf_bind_ip" in options) { 207 | bind = options["osf_bind_ip"]; 208 | } 209 | if (isRunning) { 210 | this.stop(); 211 | } 212 | 213 | isCloseRequested = false; 214 | tsdata = OSFThreadSafeData(new Mutex()); 215 | 216 | osf = new UdpSocket(); 217 | osf.bind(new InternetAddress(bind, port)); 218 | osf.setOption(SocketOptionLevel.SOCKET, SocketOption.RCVTIMEO, 16.msecs); 219 | 220 | if (osf.isAlive) { 221 | receivingThread = new Thread(&receiveThread); 222 | receivingThread.start(); 223 | } 224 | } 225 | 226 | override 227 | void stop() { 228 | if (isRunning) { 229 | isCloseRequested = true; 230 | if (receivingThread !is null) 231 | receivingThread.join(false); 232 | osf.close(); 233 | 234 | receivingThread = null; 235 | osf = null; 236 | } 237 | } 238 | 239 | override 240 | void poll() { 241 | if (!isRunning) return; 242 | 243 | if (tsdata.updated) { 244 | dataLossCounter = 0; 245 | gotDataFromFetch = true; 246 | OSFData data = tsdata.get(); 247 | 248 | if (data.got3dPoints) { 249 | // convert OpenCV coordinate system to Unity 250 | quat toRotate = quat.eulerRotation(radians(180), 0, radians(90)); 251 | quat temp = toRotate * data.rawQuaternion; 252 | 253 | // convert from Unity to Inochi2d convention 254 | quat converted = quat(temp.w, temp.z, temp.x, temp.y); 255 | bones[BoneNames.ftHead] = Bone( 256 | data.translation, 257 | converted 258 | ); 259 | 260 | // convert from Unity to Inochi2d convention 261 | auto convertedLeft = quat(data.leftGaze.w, data.leftGaze.z, data.leftGaze.x, data.leftGaze.y); 262 | auto convertedRight = quat(data.rightGaze.w, data.rightGaze.z, data.rightGaze.x, data.rightGaze.y); 263 | 264 | bones["LeftGaze"] = Bone(vec3(0,0,0), convertedLeft); 265 | bones["RightGaze"] = Bone(vec3(0,0,0), convertedRight); 266 | 267 | blendshapes = data.features.dup; 268 | 269 | blendshapes["EyeOpenRight"] = data.rightEyeOpen; 270 | blendshapes["EyeOpenLeft"] = data.leftEyeOpen; 271 | 272 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = 1-data.leftEyeOpen; 273 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = 1-data.rightEyeOpen; 274 | this.blendshapes[BlendshapeNames.ftMouthOpen] = data.features[OSFFeatureName.mouthOpen]; 275 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + data.features[OSFFeatureName.mouthCornerInOutLeft]-data.features[OSFFeatureName.mouthCornerInOutRight]) / 2.0; 276 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 277 | clamp( 278 | 1 + 279 | ((data.features[OSFFeatureName.mouthCornerUpDownRight]*2)-1) - 280 | ((data.features[OSFFeatureName.mouthCornerUpDownLeft]*2)-1), 281 | 0, 2 282 | ) 283 | ) / 2.0; 284 | } 285 | 286 | } else { 287 | dataLossCounter++; 288 | if (dataLossCounter > RECV_TIMEOUT) gotDataFromFetch = false; 289 | } 290 | } 291 | 292 | override 293 | bool isRunning() { 294 | return osf !is null; 295 | } 296 | 297 | override 298 | bool isReceivingData() { 299 | return gotDataFromFetch; 300 | } 301 | 302 | override 303 | string[] getOptionNames() { 304 | return [ 305 | "osf_bind_port", 306 | "osf_bind_ip" 307 | ]; 308 | } 309 | 310 | } 311 | -------------------------------------------------------------------------------- /source/ft/adaptors/package.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors; 2 | import ft.adaptor; 3 | public import ft.adaptors.vmc : VMCAdaptor; 4 | public import ft.adaptors.phizosc : PhizOSCAdaptor; 5 | public import ft.adaptors.vtsproto : VTSAdaptor; 6 | public import ft.adaptors.openseeface : OSFAdaptor; 7 | public import ft.adaptors.ifacialmocap : IFMAdaptor; 8 | public import ft.adaptors.facemotion3d : FM3DAdaptor; 9 | public import ft.adaptors.llf : LLFAdaptor; 10 | 11 | version (WebHookAdaptor){ 12 | public import ft.adaptors.webhook : WebHookAdaptor; 13 | } 14 | version (JML) { 15 | public import ft.adaptors.jinsmemelogger : JMLAdaptor; 16 | } 17 | version (Phiz) { 18 | public import ft.adaptors.phiz : PhizAdaptor; 19 | } 20 | 21 | private { 22 | Adaptor function()[string] adaptorFactories; 23 | } 24 | 25 | /** 26 | Adds an adaptor factory to the factory handler 27 | */ 28 | void ftRegisterAdaptorFactory(string name, Adaptor function() func) { 29 | adaptorFactories[name] = func; 30 | } 31 | 32 | /** 33 | Creates a new adaptor from an adaptor factory tag 34 | */ 35 | Adaptor ftCreateAdaptor(string name) { 36 | if (name in adaptorFactories) return adaptorFactories[name](); 37 | return null; 38 | } 39 | 40 | /** 41 | Creates a new adaptor from an adaptor factory tag, 42 | this adaptor will have the specified start options 43 | */ 44 | Adaptor ftCreateAdaptor(string name, string[string] options) { 45 | if (name in adaptorFactories) { 46 | auto adaptor = adaptorFactories[name](); 47 | adaptor.start(options); 48 | return adaptor; 49 | } 50 | return null; 51 | } 52 | 53 | shared static this() { 54 | ftRegisterAdaptorFactory("VTubeStudio", () { return new VTSAdaptor(); }); 55 | ftRegisterAdaptorFactory("OpenSeeFace", () { return new OSFAdaptor(); }); 56 | ftRegisterAdaptorFactory("VMC Receiver", () { return new VMCAdaptor(); }); 57 | ftRegisterAdaptorFactory("Phiz OSC Receiver", () { return new PhizOSCAdaptor(); }); 58 | ftRegisterAdaptorFactory("iFacialMocap", () { return new IFMAdaptor(); }); 59 | ftRegisterAdaptorFactory("Facemotion3D", () { return new FM3DAdaptor(); }); 60 | ftRegisterAdaptorFactory("LiveLinkFace/MeFaMo Receiver", () { return new LLFAdaptor(); }); 61 | version (Phiz){ 62 | ftRegisterAdaptorFactory("Phiz Receiver", () { return new PhizAdaptor(); }); 63 | } 64 | version (WebHookAdaptor){ 65 | ftRegisterAdaptorFactory("Web Hook Receiver", () { return new WebHookAdaptor(); }); 66 | } 67 | version (JML) { 68 | ftRegisterAdaptorFactory("JINS MEME Logger", () { return new JMLAdaptor(); }); 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /source/ft/adaptors/phiz.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.phiz; 2 | version (Phiz) { 3 | import ft.adaptor; 4 | import ft.data; 5 | 6 | import vibe.http.websockets; 7 | import vibe.http.server; 8 | import vibe.http.router; 9 | import vibe.core.sync; 10 | import core.thread; 11 | import core.sync.mutex; 12 | import std.conv; 13 | 14 | import std.array; 15 | import inmath.linalg; 16 | import std.traits; 17 | import inmath.math; 18 | 19 | 20 | enum PhizBlendshapes { 21 | browInnerUp = "browInnerUp", 22 | browDownLeft = "browDownLeft", 23 | browDownRight = "browDownRight", 24 | browOuterUpLeft = "browOuterUpLeft", 25 | browOuterUpRight = "browOuterUpRight", 26 | eyeLookUpLeft = "eyeLookUpLeft", 27 | eyeLookUpRight = "eyeLookUpRight", 28 | eyeLookDownLeft = "eyeLookDownLeft", 29 | eyeLookDownRight = "eyeLookDownRight", 30 | eyeLookInLeft = "eyeLookInLeft", 31 | eyeLookInRight = "eyeLookInRight", 32 | eyeLookOutLeft = "eyeLookOutLeft", 33 | eyeLookOutRight = "eyeLookOutRight", 34 | eyeBlinkLeft = "eyeBlinkLeft", 35 | eyeBlinkRight = "eyeBlinkRight", 36 | eyeSquintLeft = "eyeSquintLeft", 37 | eyeSquintRight = "eyeSquintRight", 38 | eyeWideLeft = "eyeWideLeft", 39 | eyeWideRight = "eyeWideRight", 40 | cheekPuff = "cheekPuff", 41 | cheekSquintLeft = "cheekSquintLeft", 42 | cheekSquintRight = "cheekSquintRight", 43 | noseSneerLeft = "noseSneerLeft", 44 | noseSneerRight = "noseSneerRight", 45 | mouthFunnel = "mouthFunnel", 46 | mouthPucker = "mouthPucker", 47 | mouthRollUpper = "mouthRollUpper", 48 | mouthRollLower = "mouthRollLower", 49 | mouthShrugUpper = "mouthShrugUpper", 50 | mouthShrugLower = "mouthShrugLower", 51 | mouthClose = "mouthClose", 52 | mouthSmileLeft = "mouthSmileLeft", 53 | mouthSmileRight = "mouthSmileRight", 54 | mouthFrownLeft = "mouthFrownLeft", 55 | mouthFrownRight = "mouthFrownRight", 56 | mouthDimpleLeft = "mouthDimpleLeft", 57 | mouthDimpleRight = "mouthDimpleRight", 58 | mouthUpperUpLeft = "mouthUpperUpLeft", 59 | mouthUpperUpRight = "mouthUpperUpRight", 60 | mouthLowerDownLeft = "mouthLowerDownLeft", 61 | mouthLowerDownRight = "mouthLowerDownRight", 62 | mouthPressLeft = "mouthPressLeft", 63 | mouthPressRight = "mouthPressRight", 64 | mouthStretchLeft = "mouthStretchLeft", 65 | mouthStretchRight = "mouthStretchRight", 66 | mouthLeft = "mouthLeft", 67 | mouthRight = "mouthRight", 68 | jawOpen = "jawOpen", 69 | jawForward = "jawForward", 70 | jawLeft = "jawLeft", 71 | jawRight = "jawRight", 72 | tongueOut = "tongueOut" 73 | } 74 | 75 | struct PhizBSData { 76 | float[52] data; 77 | } 78 | 79 | struct PhizQData { 80 | float[4] data; 81 | } 82 | 83 | struct PhizBSThreadSafeData { 84 | private: 85 | PhizBSData data; 86 | Mutex mtx; 87 | bool updated_; 88 | 89 | public: 90 | this(Mutex mutex) { 91 | this.mtx = mutex; 92 | } 93 | 94 | bool updated() { 95 | if (mtx is null) 96 | return false; 97 | mtx.lock(); 98 | scope(exit) mtx.unlock(); 99 | return updated_; 100 | } 101 | 102 | void set(PhizBSData data) { 103 | if (mtx is null) 104 | return; 105 | mtx.lock(); 106 | updated_ = true; 107 | this.data = data; 108 | mtx.unlock(); 109 | } 110 | 111 | PhizBSData get() { 112 | if (mtx is null) 113 | return data; 114 | mtx.lock(); 115 | updated_ = false; 116 | scope(exit) mtx.unlock(); 117 | return data; 118 | } 119 | } 120 | 121 | struct PhizQThreadSafeData { 122 | private: 123 | PhizQData data; 124 | Mutex mtx; 125 | bool updated_; 126 | 127 | public: 128 | this(Mutex mutex) { 129 | this.mtx = mutex; 130 | } 131 | 132 | bool updated() { 133 | if (mtx is null) 134 | return false; 135 | mtx.lock(); 136 | scope(exit) mtx.unlock(); 137 | return updated_; 138 | } 139 | 140 | void set(PhizQData data) { 141 | if (mtx is null) 142 | return; 143 | mtx.lock(); 144 | updated_ = true; 145 | this.data = data; 146 | mtx.unlock(); 147 | } 148 | 149 | PhizQData get() { 150 | if (mtx is null) 151 | return data; 152 | mtx.lock(); 153 | updated_ = false; 154 | scope(exit) mtx.unlock(); 155 | return data; 156 | } 157 | } 158 | 159 | class PhizAdaptor : Adaptor { 160 | private: 161 | ushort port = 9912; 162 | string bind = "0.0.0.0"; 163 | 164 | bool isCloseRequested; 165 | Thread receivingThread; 166 | Mutex mutex; 167 | TaskCondition condition; 168 | 169 | bool gotDataFromFetch = false; 170 | 171 | PhizBSThreadSafeData tsblendshapes; 172 | PhizQThreadSafeData tshead; 173 | PhizQThreadSafeData tsleftgaze; 174 | PhizQThreadSafeData tsrightgaze; 175 | 176 | public: 177 | ~this() { 178 | this.stop(); 179 | } 180 | 181 | void handleConnection(scope WebSocket socket) { 182 | 183 | while (!isCloseRequested && socket.connected) { 184 | try { 185 | ptrdiff_t received = socket.waitForData(16.msecs); 186 | if (received < 0) { 187 | continue; 188 | } 189 | 190 | auto text = socket.receiveText.split(","); 191 | auto addressPattern = text[0].split("/"); 192 | if (addressPattern[1] != "phiz") continue; 193 | switch(addressPattern[2]) { 194 | case "headRotation": 195 | { 196 | PhizQData data; 197 | for(size_t i = 1; i < text.length ; i++) { 198 | data.data[i-1] = text[i].to!float; 199 | } 200 | tshead.set(data); 201 | } 202 | break; 203 | case "leftEyeRotation": 204 | { 205 | PhizQData data; 206 | for(size_t i = 1; i < text.length ; i++) { 207 | data.data[i-1] = text[i].to!float; 208 | } 209 | tsleftgaze.set(data); 210 | } 211 | break; 212 | case "rightEyeRotation": 213 | { 214 | PhizQData data; 215 | for(size_t i = 1; i < text.length ; i++) { 216 | data.data[i-1] = text[i].to!float; 217 | } 218 | tsrightgaze.set(data); 219 | } 220 | break; 221 | case "blendshapes": 222 | { 223 | PhizBSData data; 224 | for(size_t i = 1; i < text.length ; i++) { 225 | data.data[i-1] = text[i].to!float; 226 | } 227 | tsblendshapes.set(data); 228 | } 229 | break; 230 | default: break; 231 | } 232 | } catch (Exception ex) { 233 | Thread.sleep(100.msecs); 234 | } 235 | } 236 | 237 | } 238 | 239 | void receiveThread() { 240 | isCloseRequested = false; 241 | tsblendshapes = PhizBSThreadSafeData(new Mutex()); 242 | tshead = PhizQThreadSafeData(new Mutex()); 243 | tsleftgaze = PhizQThreadSafeData(new Mutex()); 244 | tsrightgaze = PhizQThreadSafeData(new Mutex()); 245 | 246 | HTTPServerSettings settings = new HTTPServerSettings(); 247 | settings.port = port; 248 | settings.bindAddresses = [bind]; 249 | 250 | auto router = new URLRouter; 251 | router.get("/", handleWebSockets(&this.handleConnection)); 252 | 253 | HTTPListener listener = listenHTTP(settings, router); 254 | synchronized (mutex) { 255 | condition.wait(); 256 | } 257 | listener.stopListening(); 258 | } 259 | 260 | override 261 | void start() { 262 | if ("phiz_bind_port" in this.options) { 263 | string port_str = options["phiz_bind_port"]; 264 | if (port_str !is null && port_str != "") 265 | port = to!ushort(this.options["phiz_bind_port"]); 266 | } 267 | 268 | if ("phiz_bind_ip" in this.options) { 269 | string addr_str = options["phiz_bind_ip"]; 270 | if (addr_str !is null && addr_str != "") 271 | bind = this.options["phiz_bind_ip"]; 272 | } 273 | 274 | this.stop(); 275 | mutex = new Mutex; 276 | condition = new TaskCondition(mutex); 277 | receivingThread = new Thread(&receiveThread); 278 | receivingThread.start(); 279 | } 280 | 281 | override 282 | void stop() { 283 | if (isRunning) { 284 | isCloseRequested = true; 285 | condition.notify(); 286 | receivingThread.join(false); 287 | mutex = null; 288 | condition = null; 289 | receivingThread = null; 290 | } 291 | } 292 | 293 | override 294 | void poll() { 295 | if (tsblendshapes.updated) { 296 | gotDataFromFetch = true; 297 | PhizBSData data = tsblendshapes.get(); 298 | 299 | int i = 0; 300 | foreach(name; EnumMembers!PhizBlendshapes) { 301 | this.blendshapes[name] = data.data[i]; 302 | i++; 303 | } 304 | 305 | // LEFT EYE 306 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["eyeBlinkLeft"]; 307 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["eyeLookOutLeft"]-this.blendshapes["eyeLookInLeft"]; 308 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["eyeLookUpLeft"]-this.blendshapes["eyeLookDownLeft"]; 309 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["eyeSquintLeft"]; 310 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["eyeWideLeft"]; 311 | 312 | // RIGHT EYE 313 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["eyeBlinkRight"]; 314 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["eyeLookInRight"]-this.blendshapes["eyeLookOutRight"]; 315 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["eyeLookUpRight"]-this.blendshapes["eyeLookDownRight"]; 316 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["eyeSquintRight"]; 317 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["eyeWideRight"]; 318 | 319 | // MOUTH 320 | this.blendshapes[BlendshapeNames.ftMouthOpen] = clamp( 321 | 322 | // Avg out the different ways of opening the mouth 323 | ( 324 | ((this.blendshapes["mouthLowerDownLeft"]+this.blendshapes["mouthUpperUpLeft"])/2) + 325 | ((this.blendshapes["mouthLowerDownRight"]+this.blendshapes["mouthUpperUpRight"])/2) 326 | ), 327 | 0, 328 | 1 329 | ); 330 | 331 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["mouthLeft"]-this.blendshapes["mouthRight"]) / 2.0; 332 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 333 | clamp( 334 | 1 + 335 | (this.blendshapes["mouthSmileLeft"]+this.blendshapes["mouthSmileRight"]/2.0) - 336 | (this.blendshapes["mouthFrownLeft"]+this.blendshapes["mouthFrownRight"]/2.0), 337 | 0, 2 338 | ) 339 | ) / 2.0; 340 | } 341 | 342 | if(tshead.updated) { 343 | gotDataFromFetch = true; 344 | PhizQData data = tshead.get(); 345 | 346 | if ("Head" !in bones) { 347 | bones["Head"] = Bone( 348 | vec3.init, 349 | quat.identity 350 | ); 351 | } 352 | this.bones["Head"].rotation = quat( 353 | data.data[3], 354 | -data.data[2], 355 | data.data[0], 356 | -data.data[1], 357 | ); 358 | } 359 | 360 | if(tsleftgaze.updated) { 361 | gotDataFromFetch = true; 362 | PhizQData data = tsleftgaze.get(); 363 | 364 | if ("LeftGaze" !in bones) { 365 | bones["LeftGaze"] = Bone( 366 | vec3.init, 367 | quat.identity 368 | ); 369 | } 370 | 371 | this.bones["LeftGaze"].rotation = quat( 372 | data.data[3], 373 | -data.data[2], 374 | data.data[0], 375 | -data.data[1], 376 | ); 377 | } 378 | 379 | if(tsrightgaze.updated) { 380 | gotDataFromFetch = true; 381 | PhizQData data = tsrightgaze.get(); 382 | 383 | if ("RightGaze" !in bones) { 384 | bones["RightGaze"] = Bone( 385 | vec3.init, 386 | quat.identity 387 | ); 388 | } 389 | 390 | this.bones["RightGaze"].rotation = quat( 391 | data.data[3], 392 | -data.data[2], 393 | data.data[0], 394 | -data.data[1], 395 | ); 396 | } 397 | } 398 | 399 | override 400 | bool isRunning() { 401 | return receivingThread !is null; 402 | } 403 | 404 | override 405 | string[] getOptionNames() { 406 | return [ 407 | "phiz_bind_port", 408 | "phiz_bind_ip" 409 | ]; 410 | } 411 | 412 | override string getAdaptorName() { 413 | return "Phiz Receiver"; 414 | } 415 | 416 | override 417 | bool isReceivingData() { 418 | return gotDataFromFetch; 419 | } 420 | } 421 | 422 | } 423 | -------------------------------------------------------------------------------- /source/ft/adaptors/phizosc.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.phizosc; 2 | import ft.adaptor; 3 | import ft.data; 4 | 5 | import osc; 6 | import std.conv : to; 7 | import std.socket; 8 | import inmath.linalg; 9 | import std.traits; 10 | 11 | enum PhizOSCBlendshapes { 12 | browInnerUp = "browInnerUp", 13 | browDownLeft = "browDownLeft", 14 | browDownRight = "browDownRight", 15 | browOuterUpLeft = "browOuterUpLeft", 16 | browOuterUpRight = "browOuterUpRight", 17 | eyeLookUpLeft = "eyeLookUpLeft", 18 | eyeLookUpRight = "eyeLookUpRight", 19 | eyeLookDownLeft = "eyeLookDownLeft", 20 | eyeLookDownRight = "eyeLookDownRight", 21 | eyeLookInLeft = "eyeLookInLeft", 22 | eyeLookInRight = "eyeLookInRight", 23 | eyeLookOutLeft = "eyeLookOutLeft", 24 | eyeLookOutRight = "eyeLookOutRight", 25 | eyeBlinkLeft = "eyeBlinkLeft", 26 | eyeBlinkRight = "eyeBlinkRight", 27 | eyeSquintLeft = "eyeSquintLeft", 28 | eyeSquintRight = "eyeSquintRight", 29 | eyeWideLeft = "eyeWideLeft", 30 | eyeWideRight = "eyeWideRight", 31 | cheekPuff = "cheekPuff", 32 | cheekSquintLeft = "cheekSquintLeft", 33 | cheekSquintRight = "cheekSquintRight", 34 | noseSneerLeft = "noseSneerLeft", 35 | noseSneerRight = "noseSneerRight", 36 | mouthFunnel = "mouthFunnel", 37 | mouthPucker = "mouthPucker", 38 | mouthRollUpper = "mouthRollUpper", 39 | mouthRollLower = "mouthRollLower", 40 | mouthShrugUpper = "mouthShrugUpper", 41 | mouthShrugLower = "mouthShrugLower", 42 | mouthClose = "mouthClose", 43 | mouthSmileLeft = "mouthSmileLeft", 44 | mouthSmileRight = "mouthSmileRight", 45 | mouthFrownLeft = "mouthFrownLeft", 46 | mouthFrownRight = "mouthFrownRight", 47 | mouthDimpleLeft = "mouthDimpleLeft", 48 | mouthDimpleRight = "mouthDimpleRight", 49 | mouthUpperUpLeft = "mouthUpperUpLeft", 50 | mouthUpperUpRight = "mouthUpperUpRight", 51 | mouthLowerDownLeft = "mouthLowerDownLeft", 52 | mouthLowerDownRight = "mouthLowerDownRight", 53 | mouthPressLeft = "mouthPressLeft", 54 | mouthPressRight = "mouthPressRight", 55 | mouthStretchLeft = "mouthStretchLeft", 56 | mouthStretchRight = "mouthStretchRight", 57 | mouthLeft = "mouthLeft", 58 | mouthRight = "mouthRight", 59 | jawOpen = "jawOpen", 60 | jawForward = "jawForward", 61 | jawLeft = "jawLeft", 62 | jawRight = "jawRight", 63 | tongueOut = "tongueOut" 64 | } 65 | 66 | class PhizOSCAdaptor : Adaptor { 67 | private: 68 | Server server; 69 | ushort port = 41235; 70 | string bind = "0.0.0.0"; 71 | 72 | bool gotDataFromFetch; 73 | 74 | public: 75 | 76 | override 77 | string getAdaptorName() { 78 | return "Phiz OSC Receiver"; 79 | } 80 | 81 | override 82 | void start() { 83 | if ("port" in options) { 84 | port = to!ushort(options["port"]); 85 | } 86 | 87 | if ("address" in options) { 88 | bind = options["address"]; 89 | } 90 | 91 | server = new Server(new InternetAddress(bind, port)); 92 | } 93 | 94 | override 95 | bool isRunning() { 96 | return server !is null; 97 | } 98 | 99 | override 100 | void stop() { 101 | if (server) { 102 | server.close(); 103 | server = null; 104 | } 105 | } 106 | 107 | override 108 | void poll() { 109 | if (!isRunning) return; 110 | 111 | const(Message)[] msgs = server.popMessages(); 112 | if (msgs.length > 0) { 113 | dataLossCounter = 0; 114 | gotDataFromFetch = true; 115 | 116 | foreach(const(Message) msg; msgs) { 117 | if (msg.addressPattern.length < 2) continue; 118 | if (msg.addressPattern[0].toString != "/phiz") continue; 119 | switch(msg.addressPattern[1].toString) { 120 | case "/headRotation": 121 | if (msg.arg!string(0) !in bones) { 122 | bones["Head"] = Bone( 123 | vec3.init, 124 | quat.identity 125 | ); 126 | } 127 | 128 | this.bones["Head"].rotation = quat( 129 | msg.arg!float(3), 130 | -msg.arg!float(2), 131 | msg.arg!float(0), 132 | -msg.arg!float(1), 133 | ); 134 | break; 135 | case "/leftEyeRotation": 136 | if (msg.arg!string(0) !in bones) { 137 | bones["LeftGaze"] = Bone( 138 | vec3.init, 139 | quat.identity 140 | ); 141 | } 142 | 143 | this.bones["LeftGaze"].rotation = quat( 144 | msg.arg!float(3), 145 | -msg.arg!float(2), 146 | msg.arg!float(0), 147 | -msg.arg!float(1), 148 | ); 149 | break; 150 | case "/rightEyeRotation": 151 | if (msg.arg!string(0) !in bones) { 152 | bones["RightGaze"] = Bone( 153 | vec3.init, 154 | quat.identity 155 | ); 156 | } 157 | 158 | this.bones["RightGaze"].rotation = quat( 159 | msg.arg!float(3), 160 | -msg.arg!float(2), 161 | msg.arg!float(0), 162 | -msg.arg!float(1), 163 | ); 164 | break; 165 | case "/blendshapes": 166 | int i = 0; 167 | foreach(name; EnumMembers!PhizOSCBlendshapes) { 168 | this.blendshapes[name] = msg.arg!float(i); 169 | i++; 170 | } 171 | break; 172 | default: break; 173 | } 174 | } 175 | } else { 176 | dataLossCounter++; 177 | if (dataLossCounter > RECV_TIMEOUT) gotDataFromFetch = false; 178 | } 179 | } 180 | 181 | override 182 | bool isReceivingData() { 183 | return gotDataFromFetch; 184 | } 185 | 186 | override 187 | string[] getOptionNames() { 188 | return [ 189 | "port", 190 | "address" 191 | ]; 192 | } 193 | } -------------------------------------------------------------------------------- /source/ft/adaptors/vmc.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.vmc; 2 | import ft.adaptor; 3 | import ft.data; 4 | import osc; 5 | import std.conv : to; 6 | import std.socket; 7 | import inmath.linalg; 8 | 9 | class VMCAdaptor : Adaptor { 10 | private: 11 | Server server; 12 | ushort port = 39540; 13 | string bind = "0.0.0.0"; 14 | 15 | bool gotDataFromFetch; 16 | 17 | public: 18 | 19 | override 20 | string getAdaptorName() { 21 | return "VMC Receiver"; 22 | } 23 | 24 | override 25 | void start() { 26 | if ("port" in options) { 27 | port = to!ushort(options["port"]); 28 | } 29 | 30 | if ("address" in options) { 31 | bind = options["address"]; 32 | } 33 | 34 | server = new Server(new InternetAddress(bind, port)); 35 | } 36 | 37 | override 38 | bool isRunning() { 39 | return server !is null; 40 | } 41 | 42 | override 43 | void stop() { 44 | if (server) { 45 | server.close(); 46 | server = null; 47 | } 48 | } 49 | 50 | override 51 | void poll() { 52 | if (!isRunning) return; 53 | 54 | const(Message)[] msgs = server.popMessages(); 55 | if (msgs.length > 0) { 56 | dataLossCounter = 0; 57 | gotDataFromFetch = true; 58 | 59 | foreach(const(Message) msg; msgs) { 60 | if (msg.addressPattern.length < 3) continue; 61 | if (msg.addressPattern[0].toString != "/VMC" && msg.addressPattern[1].toString != "/Ext") continue; 62 | switch(msg.addressPattern[2].toString) { 63 | case "/Bone": 64 | if (msg.addressPattern.length < 4) break; 65 | if (msg.addressPattern[3].toString != "/Pos") break; 66 | // msg form: /VMC/Ext/Bone/Pos/ = [float x 7] 67 | if (msg.addressPattern.length > 4) { 68 | 69 | string pattern = msg.addressPattern[4].toString(); 70 | if (pattern.length > 1) { 71 | 72 | // Early escape for invalid bone seq length 73 | if (msg.typeTags.length != 7) break; 74 | 75 | string boneName = pattern[$..1]; 76 | this.bones[boneName].position = vec3( 77 | msg.arg!float(0), 78 | msg.arg!float(1), 79 | msg.arg!float(2) 80 | ); 81 | 82 | // NOTE: the bones quaternion is modified here to match the output of the VTS Protocol 83 | this.bones[boneName].rotation = quat( 84 | msg.arg!float(6), 85 | -msg.arg!float(5), 86 | msg.arg!float(3), 87 | -msg.arg!float(4), 88 | ); 89 | } 90 | 91 | // msg form: /VMC/Ext/Bone/Pos = [, float x 7] 92 | } else { 93 | 94 | // Early escape for invalid bone seq length 95 | if (msg.typeTags.length != 8) break; 96 | 97 | string boneName = msg.arg!string(0); 98 | if (boneName !in bones) { 99 | bones[boneName] = Bone( 100 | vec3.init, 101 | quat.identity 102 | ); 103 | } 104 | 105 | this.bones[boneName].position = vec3( 106 | msg.arg!float(1), 107 | msg.arg!float(2), 108 | msg.arg!float(3) 109 | ); 110 | 111 | // NOTE: the bones quaternion is modified here to match the output of the VTS Protocol 112 | this.bones[boneName].rotation = quat( 113 | msg.arg!float(7), 114 | -msg.arg!float(6), 115 | msg.arg!float(4), 116 | -msg.arg!float(5), 117 | ); 118 | } 119 | break; 120 | case "/Blend": 121 | if (msg.addressPattern.length > 3) { 122 | string pattern = msg.addressPattern[3].toString(); 123 | switch (pattern) { 124 | 125 | // We don't use /Apply, so we just break out. 126 | case "/Apply": break; 127 | 128 | case "/Val": 129 | // msg form: /VMC/Ext/Blend/Val = [, float] 130 | // Expected VMC protocol case 131 | if (msg.typeTags.length == 2) { 132 | if(msg.arg!string(0).length > 0){ 133 | this.blendshapes[msg.arg!string(0)] = msg.arg!float(1); 134 | } 135 | } 136 | // msg form: /VMC/Ext/Blend/Val/ = [float] 137 | else if (msg.typeTags.length == 1) { 138 | if (msg.addressPattern.length != 4) break; 139 | pattern = msg.addressPattern[4].toString(); 140 | // Avoid invalid string if name is an empty "/"". 141 | if (pattern.length > 1) { 142 | // Extension; for bones addressed via the pattern we need to handle it appropriately. 143 | this.blendshapes[pattern[1..$]] = msg.arg!float(0); 144 | } 145 | } 146 | 147 | break; 148 | default: break; 149 | } 150 | } 151 | break; 152 | default: break; 153 | } 154 | } 155 | } else { 156 | dataLossCounter++; 157 | if (dataLossCounter > RECV_TIMEOUT) gotDataFromFetch = false; 158 | } 159 | } 160 | 161 | override 162 | bool isReceivingData() { 163 | return gotDataFromFetch; 164 | } 165 | 166 | override 167 | string[] getOptionNames() { 168 | return [ 169 | "port", 170 | "address" 171 | ]; 172 | } 173 | } -------------------------------------------------------------------------------- /source/ft/adaptors/vtsproto.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.vtsproto; 2 | import ft.adaptor; 3 | import ft.data; 4 | import std.conv : to; 5 | import std.socket; 6 | import fghj.serialization; 7 | import fghj; 8 | import inmath.linalg; 9 | import core.thread; 10 | import core.sync.mutex; 11 | import std.exception; 12 | import inmath.math; 13 | import std.stdio : writeln; 14 | 15 | struct VTSUDPDataRequest { 16 | string messageType = "iOSTrackingDataRequest"; 17 | float time; 18 | string sentBy; 19 | int[] ports; 20 | 21 | this(string appName, float sendTime, int[] recieverPorts) { 22 | this.sentBy = appName; 23 | this.time = sendTime; 24 | this.ports = recieverPorts; 25 | } 26 | } 27 | 28 | 29 | /** 30 | Wrapper for VTubeStudio Vectors 31 | */ 32 | struct VTSVector { 33 | union { 34 | struct { 35 | float x; 36 | float y; 37 | float z; 38 | } 39 | 40 | @serdeIgnore 41 | vec3 vec; 42 | } 43 | } 44 | 45 | /** 46 | Represents the raw blendshape tracking data to be sent to facetrack-d via UDP. 47 | */ 48 | struct VTSRawTrackingData { 49 | struct VTSTrackingDataEntry { 50 | 51 | @serdeKeys("k") 52 | string key; 53 | 54 | @serdeKeys("v") 55 | float value; 56 | } 57 | 58 | /** 59 | Current UNIX millisecond timestamp. 60 | */ 61 | @serdeKeys("Timestamp") 62 | long timestamp = 0; 63 | 64 | /** 65 | Last pressed on-screen hotkey. 66 | */ 67 | @serdeKeys("Hotkey") 68 | int hotkey = -1; 69 | 70 | /** 71 | Whether or not face has been found 72 | */ 73 | @serdeKeys("FaceFound") 74 | bool faceFound = false; 75 | 76 | /** 77 | Current face rotation. 78 | */ 79 | @serdeKeys("Rotation") 80 | VTSVector rotation; 81 | 82 | /** 83 | Current face position. 84 | */ 85 | @serdeKeys("Position") 86 | VTSVector position; 87 | 88 | /** 89 | Current iOS blendshapes. 90 | */ 91 | @serdeKeys("BlendShapes") 92 | VTSTrackingDataEntry[] blendShapes; 93 | 94 | /** 95 | Current iOS blendshapes. 96 | */ 97 | @serdeIgnore 98 | float[string] blendShapesDict; 99 | } 100 | 101 | /** 102 | Thread-safe queue for VTS tracking data 103 | */ 104 | struct VTSThreadSafeData { 105 | private: 106 | VTSRawTrackingData data; 107 | Mutex mtx; 108 | bool updated_; 109 | 110 | public: 111 | this(Mutex mutex) { 112 | this.mtx = mutex; 113 | } 114 | 115 | bool updated() { 116 | mtx.lock(); 117 | scope(exit) mtx.unlock(); 118 | return updated_; 119 | } 120 | 121 | void set(VTSRawTrackingData data) { 122 | mtx.lock(); 123 | updated_ = true; 124 | this.data = data; 125 | mtx.unlock(); 126 | } 127 | 128 | VTSRawTrackingData get() { 129 | mtx.lock(); 130 | updated_ = false; 131 | scope(exit) mtx.unlock(); 132 | return data; 133 | } 134 | } 135 | 136 | /** 137 | Adaptor to recieve VTubeStudio tracking data 138 | 139 | DO NOTE: The VTubeStudio tracking API is not stable yet, 140 | this Adaptor may break any any point due to updates to the API. 141 | */ 142 | class VTSAdaptor : Adaptor { 143 | private: 144 | // Constant enums 145 | enum vtsPort = 21412; 146 | enum vtsBind = "0.0.0.0"; 147 | enum vtsKeepAlivePerSecond = 5; 148 | enum vtsRequestDataFramesForSeconds = 1; 149 | 150 | // Data 151 | size_t dataPacketsReceivedTotal; 152 | size_t dataPacketsReceivedInLastSecond; 153 | VTSThreadSafeData tsdata; 154 | 155 | // Settings 156 | string appName = "facetrack-d"; 157 | string phoneIP; 158 | float pollingFactor = 1; 159 | 160 | // Sockets 161 | Socket vtsIn; 162 | Socket vtsOut; 163 | 164 | // Threading 165 | bool isCloseRequested; 166 | Thread sendingThread; 167 | Thread listeningThread; 168 | 169 | bool gotDataFromFetch; 170 | 171 | void listenThread() { 172 | ubyte[ushort.max] buff; 173 | Address addr = new InternetAddress(InternetAddress.ADDR_ANY, 0); 174 | 175 | while (!isCloseRequested) { 176 | try { 177 | ptrdiff_t recvBytes = vtsIn.receiveFrom(buff, SocketFlags.NONE, addr); 178 | if (recvBytes != Socket.ERROR && recvBytes <= buff.length) { 179 | string recvString = cast(string)buff[0..recvBytes]; 180 | auto trackingData = deserialize!VTSRawTrackingData(parseJson(recvString)); 181 | 182 | // copy blendshape data in to an easy spot 183 | foreach(blendshapeKV; trackingData.blendShapes) { 184 | trackingData.blendShapesDict[blendshapeKV.key] = blendshapeKV.value; 185 | } 186 | 187 | tsdata.set(trackingData); 188 | } 189 | } catch (Exception ex) { 190 | Thread.sleep(100.msecs); 191 | } 192 | } 193 | } 194 | 195 | void sendThread() { 196 | float clampedFactor = clamp(pollingFactor, 1, 5); 197 | int senderThreadSleepTimeMs = clamp(cast(int)(1000.0 / (cast(float)vtsKeepAlivePerSecond*clampedFactor)), 10, 5000); 198 | 199 | VTSUDPDataRequest req = VTSUDPDataRequest(appName, cast(float)vtsRequestDataFramesForSeconds/clampedFactor, [vtsPort]); 200 | string serializedDataReq = req.serializeToJson(); 201 | InternetAddress addr = new InternetAddress(phoneIP, vtsPort); 202 | while(!isCloseRequested) { 203 | try { 204 | vtsOut.sendTo(serializedDataReq, SocketFlags.NONE, addr); 205 | } catch(Exception ex) { 206 | // Do nothing :) 207 | } 208 | 209 | Thread.sleep(senderThreadSleepTimeMs.msecs); 210 | } 211 | } 212 | 213 | public: 214 | ~this() { 215 | this.stop(); 216 | } 217 | 218 | override 219 | string getAdaptorName() { 220 | return "VTubeStudio"; 221 | } 222 | 223 | override 224 | void start() { 225 | // VTubeStudio wants an app name to be known by 226 | if ("appName" in options) { 227 | appName = options["appName"]; 228 | enforce(appName.length > 0, "App Name can't be empty."); 229 | enforce(appName.length <= 32, "App Name can't be longer than 32 characters."); 230 | } 231 | 232 | if ("phoneIP" in options) { 233 | phoneIP = options["phoneIP"]; 234 | } else return; 235 | 236 | if ("pollingFactor" in options) { 237 | try { 238 | pollingFactor = options["pollingFactor"].to!float; 239 | } catch (Exception ex) { 240 | return; 241 | } 242 | } 243 | 244 | // Do not create zombie threads please 245 | if (isRunning) this.stop(); 246 | 247 | // Start our new threading 248 | isCloseRequested = false; 249 | tsdata = VTSThreadSafeData(new Mutex()); 250 | 251 | vtsOut = new UdpSocket(); 252 | vtsOut.setOption(SocketOptionLevel.SOCKET, SocketOption.SNDTIMEO, 16.msecs); 253 | vtsIn = new UdpSocket(); 254 | vtsIn.setOption(SocketOptionLevel.SOCKET, SocketOption.RCVTIMEO, 16.msecs); 255 | vtsIn.bind(new InternetAddress(vtsBind, vtsPort)); 256 | 257 | // Reset PPS counter 258 | dataPacketsReceivedTotal = 0; 259 | dataPacketsReceivedInLastSecond = 0; 260 | 261 | // Start threads 262 | if (vtsIn.isAlive) { 263 | sendingThread = new Thread(&sendThread); 264 | sendingThread.start(); 265 | 266 | listeningThread = new Thread(&listenThread); 267 | listeningThread.start(); 268 | } 269 | } 270 | 271 | override 272 | void stop() { 273 | if (isRunning) { 274 | // Stop threads 275 | isCloseRequested = true; 276 | 277 | sendingThread.join(false); 278 | listeningThread.join(false); 279 | 280 | // Close UDP sockets 281 | vtsIn.close(); 282 | vtsOut.close(); 283 | 284 | // Set everything to null 285 | sendingThread = null; 286 | listeningThread = null; 287 | vtsIn = null; 288 | vtsOut = null; 289 | } 290 | } 291 | 292 | override 293 | void poll() { 294 | if (!isRunning) return; 295 | 296 | if (tsdata.updated) { 297 | VTSRawTrackingData data = tsdata.get(); 298 | dataLossCounter = 0; 299 | gotDataFromFetch = data.faceFound; 300 | 301 | bones[BoneNames.ftHead] = Bone( 302 | vec3(data.position.x*-1, data.position.y, data.position.z), 303 | quat.eulerRotation(radians(data.rotation.z), radians(data.rotation.y), radians(data.rotation.x)) 304 | ); 305 | 306 | // Duplicate blendshapes in 307 | this.blendshapes = data.blendShapesDict.dup; 308 | 309 | try { 310 | if (this.blendshapes.length > 0) { // CHECK FOR ANDROID 311 | 312 | 313 | if ("jawOpen" in this.blendshapes) { 314 | 315 | if ("eyeLookOut_L" in this.blendshapes) { // VTUBE STUDIO ANDROID 316 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["EyeBlinkLeft"]; 317 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["eyeLookOut_L"]-this.blendshapes["eyeLookIn_L"]; 318 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["eyeLookUp_L"]-this.blendshapes["eyeLookDown_L"]; 319 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["eyeSquint_L"]; 320 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["eyeSquint_L"]; 321 | 322 | // RIGHT EYE 323 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["EyeBlinkRight"]; 324 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["eyeLookIn_R"]-this.blendshapes["eyeLookOut_R"]; 325 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["eyeLookUp_R"]-this.blendshapes["eyeLookDown_R"]; 326 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["eyeSquint_R"]; 327 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["eyeSquint_R"]; 328 | 329 | // MOUTH 330 | this.blendshapes[BlendshapeNames.ftMouthOpen] = this.blendshapes["jawOpen"]; 331 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["mouthLeft"]-this.blendshapes["mouthRight"]) / 2.0; 332 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 333 | clamp( 334 | 1 + 335 | (this.blendshapes["mouthSmile_L"]+this.blendshapes["mouthSmile_R"]/2.0) - 336 | (this.blendshapes["mouthFrown_L"]+this.blendshapes["mouthFrown_R"]/2.0), 337 | 0, 2 338 | ) 339 | ) / 2.0; 340 | } else if ("eyeLookOutLeft" in this.blendshapes) { // MEOWFACE 341 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["eyeBlinkLeft"]; 342 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["eyeLookOutLeft"]-this.blendshapes["eyeLookInLeft"]; 343 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["eyeLookUpLeft"]-this.blendshapes["eyeLookDownLeft"]; 344 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["eyeSquintLeft"]; 345 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["eyeSquintLeft"]; 346 | 347 | // RIGHT EYE 348 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["eyeBlinkRight"]; 349 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["eyeLookInRight"]-this.blendshapes["eyeLookOutRight"]; 350 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["eyeLookUpRight"]-this.blendshapes["eyeLookDownRight"]; 351 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["eyeSquintRight"]; 352 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["eyeSquintRight"]; 353 | 354 | // MOUTH 355 | this.blendshapes[BlendshapeNames.ftMouthOpen] = this.blendshapes["jawOpen"]; 356 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["mouthLeft"]-this.blendshapes["mouthRight"]) / 2.0; 357 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 358 | clamp( 359 | 1 + 360 | (this.blendshapes["mouthSmileLeft"]+this.blendshapes["mouthSmileRight"]/2.0) - 361 | (this.blendshapes["mouthFrownLeft"]+this.blendshapes["mouthFrownRight"]/2.0), 362 | 0, 2 363 | ) 364 | ) / 2.0; 365 | } 366 | 367 | } else if ("JawOpen" in this.blendshapes) { // WE'RE ON IOS 368 | 369 | 370 | // LEFT EYE 371 | this.blendshapes[BlendshapeNames.ftEyeBlinkLeft] = this.blendshapes["EyeBlinkLeft"]; 372 | this.blendshapes[BlendshapeNames.ftEyeXLeft] = this.blendshapes["EyeLookOutLeft"]-this.blendshapes["EyeLookInLeft"]; 373 | this.blendshapes[BlendshapeNames.ftEyeYLeft] = this.blendshapes["EyeLookUpLeft"]-this.blendshapes["EyeLookDownLeft"]; 374 | this.blendshapes[BlendshapeNames.ftEyeSquintLeft] = this.blendshapes["EyeSquintLeft"]; 375 | this.blendshapes[BlendshapeNames.ftEyeWidenLeft] = this.blendshapes["EyeWideLeft"]; 376 | 377 | // RIGHT EYE 378 | this.blendshapes[BlendshapeNames.ftEyeBlinkRight] = this.blendshapes["EyeBlinkRight"]; 379 | this.blendshapes[BlendshapeNames.ftEyeXRight] = this.blendshapes["EyeLookInRight"]-this.blendshapes["EyeLookOutRight"]; 380 | this.blendshapes[BlendshapeNames.ftEyeYRight] = this.blendshapes["EyeLookUpRight"]-this.blendshapes["EyeLookDownRight"]; 381 | this.blendshapes[BlendshapeNames.ftEyeSquintRight] = this.blendshapes["EyeSquintRight"]; 382 | this.blendshapes[BlendshapeNames.ftEyeWidenRight] = this.blendshapes["EyeWideRight"]; 383 | 384 | // MOUTH 385 | this.blendshapes[BlendshapeNames.ftMouthOpen] = clamp( 386 | 387 | // Avg out the different ways of opening the mouth 388 | ( 389 | ((this.blendshapes["MouthLowerDownLeft"]+this.blendshapes["MouthUpperUpLeft"])/2) + 390 | ((this.blendshapes["MouthLowerDownRight"]+this.blendshapes["MouthUpperUpRight"])/2) 391 | ), 392 | 0, 393 | 1 394 | ); 395 | 396 | this.blendshapes[BlendshapeNames.ftMouthX] = (1 + this.blendshapes["MouthLeft"]-this.blendshapes["MouthRight"]) / 2.0; 397 | this.blendshapes[BlendshapeNames.ftMouthEmotion] = ( 398 | clamp( 399 | 1 + 400 | (this.blendshapes["MouthSmileLeft"]+this.blendshapes["MouthSmileRight"]/2.0) - 401 | (this.blendshapes["MouthFrownLeft"]+this.blendshapes["MouthFrownRight"]/2.0), 402 | 0, 2 403 | ) 404 | ) / 2.0; 405 | } 406 | 407 | // If both cases are false there's a problem! 408 | // TODO: make some logs that can be sent to devs? 409 | } 410 | } catch (Exception ex) { } // Some unknown format, drop creating ft blendshapes 411 | } else { 412 | dataLossCounter++; 413 | if (dataLossCounter > RECV_TIMEOUT) gotDataFromFetch = false; 414 | } 415 | } 416 | 417 | override 418 | bool isRunning() { 419 | return vtsOut !is null; 420 | } 421 | 422 | override 423 | bool isReceivingData() { 424 | return gotDataFromFetch; 425 | } 426 | 427 | override 428 | string[] getOptionNames() { 429 | return [ 430 | "phoneIP", 431 | "appName", 432 | "pollingFactor" 433 | ]; 434 | } 435 | } 436 | -------------------------------------------------------------------------------- /source/ft/adaptors/webhook.d: -------------------------------------------------------------------------------- 1 | module ft.adaptors.webhook; 2 | version (WebHookAdaptor) { 3 | import ft.adaptor; 4 | import ft.data; 5 | 6 | import vibe.http.server; 7 | import vibe.http.router; 8 | import vibe.data.json; 9 | import vibe.core.sync; 10 | import core.thread; 11 | import core.sync.mutex; 12 | import std.conv; 13 | import std.exception : collectException; 14 | 15 | struct WebHookData { 16 | float[string] data; 17 | } 18 | 19 | struct WebHookThreadSafeData { 20 | private: 21 | WebHookData data; 22 | Mutex mtx; 23 | bool updated_; 24 | 25 | public: 26 | this(Mutex mutex) { 27 | this.mtx = mutex; 28 | } 29 | 30 | bool updated() { 31 | if (mtx is null) 32 | return false; 33 | mtx.lock(); 34 | scope(exit) mtx.unlock(); 35 | return updated_; 36 | } 37 | 38 | void set(WebHookData data) { 39 | if (mtx is null) 40 | return; 41 | mtx.lock(); 42 | updated_ = true; 43 | this.data = data; 44 | mtx.unlock(); 45 | } 46 | 47 | WebHookData get() { 48 | if (mtx is null) 49 | return data; 50 | mtx.lock(); 51 | updated_ = false; 52 | scope(exit) mtx.unlock(); 53 | return data; 54 | } 55 | } 56 | 57 | class WebHookAdaptor : Adaptor { 58 | private: 59 | ushort port = 8080; 60 | string bind = "0.0.0.0"; 61 | 62 | Thread receivingThread; 63 | Mutex mutex; 64 | TaskCondition condition; 65 | 66 | bool gotDataFromFetch = false; 67 | 68 | WebHookThreadSafeData tsdata; 69 | 70 | public: 71 | ~this() { 72 | this.stop(); 73 | } 74 | 75 | void recvData(HTTPServerRequest req, HTTPServerResponse res) { 76 | Json json_data; 77 | auto e_result = collectException!Exception(req.json, json_data); 78 | enforceHTTP( 79 | e_result is null, 80 | HTTPStatus.badRequest, 81 | "Error processing request. "~e_result.msg); 82 | enforceHTTP( 83 | req.json.type == Json.Type.object, 84 | HTTPStatus.badRequest, 85 | "No json object in data."); 86 | WebHookData data; 87 | foreach (string key, value; req.json) { 88 | try { 89 | data.data[key] = value.to!float; 90 | } 91 | catch (Exception) { 92 | // Ignore malformed data 93 | } 94 | } 95 | 96 | tsdata.set(data); 97 | res.writeBody(""); 98 | } 99 | 100 | void receiveThread() { 101 | tsdata = WebHookThreadSafeData(new Mutex()); 102 | 103 | HTTPListener listener; 104 | HTTPServerSettings settings = new HTTPServerSettings(); 105 | settings.port = port; 106 | settings.bindAddresses = [bind]; 107 | 108 | auto router = new URLRouter; 109 | router.post("/blendshapes", &this.recvData); 110 | 111 | listener = listenHTTP(settings, router); 112 | synchronized (mutex) { 113 | condition.wait(); 114 | } 115 | listener.stopListening(); 116 | } 117 | 118 | override 119 | void start() { 120 | 121 | if ("port" in this.options) { 122 | string port_str = options["port"]; 123 | if (port_str !is null && port_str != "") 124 | port = this.options["port"].to!ushort; 125 | } 126 | 127 | if ("address" in this.options) { 128 | string addr_str = options["address"]; 129 | if (addr_str !is null && addr_str != "") 130 | bind = this.options["address"]; 131 | } 132 | if (isRunning) { 133 | this.stop(); 134 | } 135 | mutex = new Mutex; 136 | condition = new TaskCondition(mutex); 137 | receivingThread = new Thread(&receiveThread); 138 | receivingThread.start(); 139 | } 140 | 141 | override 142 | void stop() { 143 | if (isRunning) { 144 | condition.notify(); 145 | receivingThread.join(false); 146 | mutex = null; 147 | condition = null; 148 | receivingThread = null; 149 | gotDataFromFetch = false; 150 | } 151 | } 152 | 153 | override 154 | void poll() { 155 | if (tsdata.updated) { 156 | WebHookData data = tsdata.get(); 157 | gotDataFromFetch = data.data.length > 0; 158 | foreach(string key, float value; data.data) { 159 | blendshapes[key] = value; 160 | } 161 | } 162 | } 163 | 164 | override 165 | bool isRunning() { 166 | return receivingThread !is null; 167 | } 168 | 169 | override 170 | string[] getOptionNames() { 171 | return [ 172 | "address", 173 | "port", 174 | ]; 175 | } 176 | 177 | override string getAdaptorName() { 178 | return "Web Hook Receiver"; 179 | } 180 | 181 | override 182 | bool isReceivingData() { 183 | return gotDataFromFetch; 184 | } 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /source/ft/data.d: -------------------------------------------------------------------------------- 1 | module ft.data; 2 | import inmath.linalg; 3 | 4 | enum BlendshapeNames : string { 5 | eyeBlinkLeft = "EyeBlinkLeft", 6 | eyeLookDownLeft = "EyeLookDownLeft", 7 | eyeLookInLeft = "EyeLookInLeft", 8 | eyeLookOutLeft = "EyeLookOutLeft", 9 | eyeLookUpLeft = "EyeLookUpLeft", 10 | eyeSquintLeft = "EyeSquintLeft", 11 | eyeWideLeft = "EyeWideLeft", 12 | 13 | eyeBlinkRight = "EyeBlinkRight", 14 | eyeLookDownRight = "EyeLookDownRight", 15 | eyeLookInRight = "EyeLookInRight", 16 | eyeLookOutRight = "EyeLookOutRight", 17 | eyeLookUpRight = "EyeLookUpRight", 18 | eyeSquintRight = "EyeSquintRight", 19 | eyeWideRight = "EyeWideRight", 20 | 21 | jawForward = "JawForward", 22 | jawLeft = "JawLeft", 23 | jawRight = "JawRight", 24 | jawOpen = "JawOpen", 25 | mouthClose = "MouthClose", 26 | mouthFunnel = "MouthFunnel", 27 | mouthPucker = "MouthPucker", 28 | mouthLeft = "MouthLeft", 29 | mouthRight = "MouthRight", 30 | mouthSmileLeft = "MouthSmileLeft", 31 | mouthSmileRight = "MouthSmileRight", 32 | mouthFrownLeft = "MouthFrownLeft", 33 | mouthFrownRight = "MouthFrownRight", 34 | mouthDimpleLeft = "MouthDimpleLeft", 35 | mouthDimpleRight = "MouthDimpleRight", 36 | mouthStretchLeft = "MouthStretchLeft", 37 | mouthStretchRight = "MouthStretchRight", 38 | mouthRollLower = "MouthRollLower", 39 | mouthRollUpper = "MouthRollUpper", 40 | mouthShrugLower = "MouthShrugLower", 41 | mouthShrugUpper = "MouthShrugUpper", 42 | mouthPressLeft = "MouthPressLeft", 43 | mouthPressRight = "MouthPressRight", 44 | mouthLowerDownLeft = "MouthLowerDownLeft", 45 | mouthLowerDownRight = "MouthLowerDownRight", 46 | mouthUpperUpLeft = "MouthUpperUpLeft", 47 | mouthUpperUpRight = "MouthUpperUpRight", 48 | 49 | browDownLeft = "BrowDownLeft", 50 | browDownRight = "BrowDownRight", 51 | browInnerUp = "BrowInnerUp", 52 | browOuterUpLeft = "BrowOuterUpLeft", 53 | browOuterUpRight = "BrowOuterUpRight", 54 | cheekPuff = "CheekPuff", 55 | cheekSquintLeft = "CheekSquintLeft", 56 | cheekSquintRight = "CheekSquintRight", 57 | noseSneerLeft = "NoseSneerLeft", 58 | noseSneerRight = "NoseSneerRight", 59 | 60 | tongueOut = "TongueOut", 61 | 62 | vrmNeutral = "NEUTRAL", 63 | vrmA = "A", 64 | vrmI = "I", 65 | vrmU = "U", 66 | vrmE = "E", 67 | vrmO = "O", 68 | vrmBlink = "BLINK", 69 | vrmJoy = "JOY", 70 | vrmAngry = "ANGRY", 71 | vrmSorrow = "SORROW", 72 | vrmFun = "FUN", 73 | vrmLookUp = "LOOKUP", 74 | vrmLookLeft = "LOOKLEFT", 75 | vrmLookRight = "LOOKRIGHT", 76 | vrmBlinkLeft = "BLINK_L", 77 | vrmBlinkRight = "BLINK_R", 78 | 79 | // LEFT EYE 80 | ftEyeBlinkLeft = "ftEyeBlinkLeft", 81 | ftEyeYLeft = "ftEyeYLeft", 82 | ftEyeXLeft = "ftEyeXLeft", 83 | ftEyeSquintLeft = "ftEyeSquintLeft", 84 | ftEyeWidenLeft = "ftEyeWidenLeft", 85 | 86 | // RIGHT EYE 87 | ftEyeBlinkRight = "ftEyeBlinkRight", 88 | ftEyeYRight = "ftEyeYRight", 89 | ftEyeXRight = "ftEyeXRight", 90 | ftEyeSquintRight = "ftEyeSquintRight", 91 | ftEyeWidenRight = "ftEyeWidenRight", 92 | 93 | // MOUTH 94 | ftMouthOpen = "ftMouthOpen", 95 | ftMouthX = "ftMouthX", 96 | ftMouthEmotion = "ftMouthEmotion", 97 | 98 | // VOWELS 99 | ftA = "ftA", 100 | ftI = "ftI", 101 | ftU = "ftU", 102 | ftE = "ftE", 103 | ftO = "ftO", 104 | } 105 | 106 | /** 107 | Names of humanoid bones according to Unity 108 | and the VMC protocol 109 | */ 110 | enum BoneNames { 111 | vmcHips = "Hips", 112 | vmcLeftUpperLeg = "LeftUpperLeg", 113 | vmcRightUpperLeg = "RightUpperLeg", 114 | vmcLeftLowerLeg = "LeftLowerLeg", 115 | vmcRightLowerLeg = "RightLowerLeg", 116 | vmcLeftFoot = "LeftFoot", 117 | vmcRightFoot = "RightFoot", 118 | vmcSpine = "Spine", 119 | vmcChest = "Chest", 120 | vmcUpperChest = "UpperChest", 121 | vmcNeck = "Neck", 122 | vmcHead = "Head", 123 | vmcLeftShoulder = "LeftShoulder", 124 | vmcRightShoulder = "RightShoulder", 125 | vmcLeftUpperArm = "LeftUpperArm", 126 | vmcRightUpperArm = "RightUpperArm", 127 | vmcLeftLowerArm = "LeftLowerArm", 128 | vmcRightLowerArm = "RightLowerArm", 129 | vmcLeftHand = "LeftHand", 130 | vmcRightHand = "RightHand", 131 | vmcLeftToes = "LeftToes", 132 | vmcRightToes = "RightToes", 133 | vmcLeftEye = "LeftEye", 134 | vmcRightEye = "RightEye", 135 | vmcJaw = "Jaw", 136 | vmcLeftThumbProximal = "LeftThumbProximal", 137 | vmcLeftThumbIntermediate = "LeftThumbIntermediate", 138 | vmcLeftThumbDistal = "LeftThumbDistal", 139 | vmcLeftIndexProximal = "LeftIndexProximal", 140 | vmcLeftIndexIntermediate = "LeftIndexIntermediate", 141 | vmcLeftIndexDistal = "LeftIndexDistal", 142 | vmcLeftMiddleProximal = "LeftMiddleProximal", 143 | vmcLeftMiddleIntermediate = "LeftMiddleIntermediate", 144 | vmcLeftMiddleDistal = "LeftMiddleDistal", 145 | vmcLeftRingProximal = "LeftRingProximal", 146 | vmcLeftRingIntermediate = "LeftRingIntermediate", 147 | vmcLeftRingDistal = "LeftRingDistal", 148 | vmcLeftLittleProximal = "LeftLittleProximal", 149 | vmcLeftLittleIntermediate = "LeftLittleIntermediate", 150 | vmcLeftLittleDistal = "LeftLittleDistal", 151 | vmcRightThumbProximal = "RightThumbProximal", 152 | vmcRightThumbIntermediate = "RightThumbIntermediate", 153 | vmcRightThumbDistal = "RightThumbDistal", 154 | vmcRightIndexProximal = "RightIndexProximal", 155 | vmcRightIndexIntermediate = "RightIndexIntermediate", 156 | vmcRightIndexDistal = "RightIndexDistal", 157 | vmcRightMiddleProximal = "RightMiddleProximal", 158 | vmcRightMiddleIntermediate = "RightMiddleIntermediate", 159 | vmcRightMiddleDistal = "RightMiddleDistal", 160 | vmcRightRingProximal = "RightRingProximal", 161 | vmcRightRingIntermediate = "RightRingIntermediate", 162 | vmcRightRingDistal = "RightRingDistal", 163 | vmcRightLittleProximal = "RightLittleProximal", 164 | vmcRightLittleIntermediate = "RightLittleIntermediate", 165 | vmcRightLittleDistal = "RightLittleDistal", 166 | vmcLastBone = "LastBone", 167 | 168 | 169 | ftHead = "Head" 170 | } 171 | 172 | struct Bone { 173 | /** 174 | Position of the bone 175 | */ 176 | vec3 position = vec3(0); 177 | 178 | /** 179 | Rotation of the bone 180 | */ 181 | quat rotation = quat.identity; 182 | } -------------------------------------------------------------------------------- /source/ft/package.d: -------------------------------------------------------------------------------- 1 | module ft; 2 | public import ft.adaptor; 3 | public import ft.data; 4 | public import ft.adaptors; --------------------------------------------------------------------------------