├── .github
└── workflows
│ └── build.yml
├── .gitignore
├── .gitmodules
├── LICENSE
├── README.md
├── VRCFT Module Example
├── ExternalTrackingModule.cs
└── VRCFT Module Example.csproj
├── VRCFaceTracking.sln
└── VRCFaceTracking
├── ArgsHandler.cs
├── Assets
├── Images
│ ├── ColorBars.png
│ ├── LogoIndicators
│ │ ├── Active
│ │ │ ├── Bottom.png
│ │ │ └── Top.png
│ │ ├── Idle
│ │ │ ├── Bottom.png
│ │ │ └── Top.png
│ │ └── Uninitialized
│ │ │ ├── Bottom.png
│ │ │ └── Top.png
│ └── VRCFT.ico
└── UI
│ ├── App.xaml
│ ├── App.xaml.cs
│ ├── MainWindow.xaml
│ └── MainWindow.xaml.cs
├── ConfigParser.cs
├── DependencyManager.cs
├── Logger.cs
├── MainStandalone.cs
├── OSC
├── OSCBundle.cs
├── OSCMain.cs
├── OSCMessage.cs
└── OSCParams.cs
├── Params
├── Eye
│ └── EyeTrackingParams.cs
├── IParameter.cs
├── Lip
│ ├── LipShapeConversion.cs
│ └── LipShapeMerger.cs
├── ParamContainers.cs
├── Vector2.cs
├── Vector3.cs
└── XYParam.cs
├── SDK
└── ExtTrackingModule.cs
├── TrackingLibs
└── SRanipal
│ ├── Eye
│ ├── SRanipal_EyeData.cs
│ ├── SRanipal_EyeDataType_v2.cs
│ ├── SRanipal_EyeShapeTableDrawer.cs
│ ├── SRanipal_EyeShapeTableDrawer_v2.cs
│ ├── SRanipal_Eye_API.cs
│ ├── SRanipal_Eye_Enums.cs
│ └── SRanipal_Eye_v2.cs
│ ├── Lip
│ ├── SRanipal_LipData_v2.cs
│ ├── SRanipal_Lip_API.cs
│ └── SRanipal_Lip_v2.cs
│ ├── SRWorks_Enums.cs
│ ├── SRWorks_Log.dll
│ ├── SRanipal.dll
│ ├── SRanipalTrackingInterface.cs
│ ├── SRanipal_API.cs
│ ├── SRanipal_Enums.cs
│ ├── ViveSR_Client.dll
│ ├── libHTC_License.dll
│ └── nanomsg.dll
├── UnifiedLibManager.cs
├── UnifiedTrackingData.cs
├── Utils.cs
├── VRCFaceTracking.csproj
├── VRChat.cs
└── packages.config
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | push:
5 | pull_request:
6 | types: [ opened, edited ]
7 |
8 | jobs:
9 | build-dotnet-framework:
10 | runs-on: windows-latest
11 | name: .NET Framework 4.7.2
12 | steps:
13 |
14 | - uses: actions/checkout@v2
15 |
16 | - name: Set up NuGet
17 | uses: NuGet/setup-nuget@v1.0.5
18 |
19 | - name: Restore NuGet packages
20 | run: |
21 | nuget restore VRCFaceTracking.sln
22 |
23 | - name: Build solution
24 | run: |
25 | $MS_BUILD_PATH = Split-Path (& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -requires Microsoft.Component.MSBuild -find MSBuild\Current\Bin\amd64\MSBuild.exe | Select-Object -First 1) -Parent
26 | $env:PATH = $MS_BUILD_PATH + ';' + $env:PATH
27 |
28 | foreach ($config in 'Debug', 'Release') {
29 | Write-Host "Building $config";
30 | msbuild -v:m -m -restore -t:Build -p:Configuration=$config -p:TargetFramework=net472 VRCFaceTracking/VRCFaceTracking.csproj
31 | }
32 |
33 | - name: Capture version
34 | id: versioning
35 | run: |
36 | echo "::set-output name=VERSION::$(Get-Date -Format "yyyy.MM.dd").$(git rev-parse --short HEAD)"
37 |
38 | - name: Upload artifacts
39 | uses: actions/upload-artifact@v2
40 | with:
41 | name: VRCFaceTracking_dev_${{ steps.versioning.outputs.VERSION }}
42 | path: VRCFaceTracking/bin/*
43 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | bin/
2 | obj/
3 | /packages/
4 | riderModule.iml
5 | /_ReSharper.Caches/
6 | VRCFaceTracking/ParamLib
7 | VRCFaceTracking/ParamLib/*
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "VRCFaceTracking/ParamLib"]
2 | path = VRCFaceTracking/ParamLib
3 | url = https://github.com/benaclejames/ParamLib.git
4 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 👀 VRCFaceTracking
2 |
3 | Provides real eye tracking and lip tracking in VRChat via the HTC Vive Pro Eye's SRanipal SDK.
4 |
5 | [](https://discord.gg/Fh4FNehzKn)
6 |
7 | ## 🎥 Demo
8 |
9 | [](https://www.youtube.com/watch?v=5h4_mYDcgzM)
10 |
11 | https://www.youtube.com/watch?v=KbbfYW-hnMk
12 |
13 | ## 🛠 Avatar Setup
14 |
15 | For this app to work, you'll need to be using an avatar with the correct parameters or an avatar config file with the correct mappings. The system is designed to control your avatar's eyes and lips via simple blend states but what the parameters control is completely up to you.
16 |
17 | ### [List of Parameters](https://github.com/benaclejames/VRCFaceTracking/wiki/Parameters)
18 |
19 | ## 👀 [Eye Parameters](https://github.com/benaclejames/VRCFaceTracking/wiki/Parameters#eye-tracking-parameters)
20 |
21 | ### [Eye Tracking Setup Guide](https://github.com/benaclejames/VRCFaceTracking/wiki/Eye-Tracking-Setup)
22 |
23 | It's not required to use all of these parameters. Similar to the setup of parameters with Unity Animation Controllers, these are all case-sensitive and must be copied **EXACTLY** as shown into your Avatar's base parameters. A typical setup might look something like this:
24 | 
25 |
26 | **Please make sure you disable the built in simulated eye tracking in your avatar descriptor**. This will almost certainly mess with things if left on. Personally, I've also had some issues with blink blendshapes being overrided by my gesture layer so if you can see your eyes fine but others see them half closed, I would reccomend removing your Additive layer so the default is not applied. It should say "None (Runtime Animator Controller)" if it's removed correctly.
27 |
28 | Feel free to [consult the wiki](https://github.com/benaclejames/VRCFaceTracking/wiki/Eye-Tracking-Setup) for a setup guide and more info as to what each parameter does
29 |
30 | ## :lips: [Lip Parameters](https://github.com/benaclejames/VRCFaceTracking/wiki/Parameters#lip-tracking-parameters)
31 |
32 | There are a large number of parameters you can use for lip tracking.
33 |
34 | ### [Lip Tracking Setup Guide](https://github.com/benaclejames/VRCFaceTracking/wiki/Lip-Tracking-Setup) - Basic setup guide
35 |
36 | ### [Combined Lip Parameters](https://github.com/benaclejames/VRCFaceTracking/wiki/Parameters#combined-lip-parameters) - Combined parameters to group mutually exclusive face shapes.
37 |
38 | ### [Blend Shape Setup](https://github.com/benaclejames/VRCFaceTracking/wiki/Blend-Shapes-Setup) - Reference of standard blend shapes for to be used with facetracking
39 |
40 | You can also refer to this visual guide from NeosVR on what the following API parameters do: https://casuallydotcat.wordpress.com/2020/02/10/the-ultimate-neos-blend-shape-guide-february-2020/
41 |
42 | ## ⛓ External Modules
43 |
44 | Use the following modules to add support for other hardware:
45 |
46 | * [VRCFTVarjoModule](https://github.com/m3gagluk/VRCFTVarjoModule) - Adds support for Varjo eye tracking (Varjo Aero)
47 | * [LiveLink](https://github.com/Dazbme/VRCFaceTracking-LiveLink) - Adds support for LiveLink face tracking (iPhone)
48 | * [PimaxEyeTracking](https://github.com/Dazbme/VRCFaceTracking-LiveLink/tree/PimaxEyeTracking) - Adds support for Pimax eye tracking
49 | * [VRCFTOmniceptModule](https://github.com/200Tigersbloxed/VRCFTOmniceptModule) - Adds support for HP Omnicept eye tracking
50 | * [NoVRCFT](https://github.com/dfgHiatus/NoVRCFT) - Adds support for webcam based eye and face tracking using NeosWCFaceTrack and OpenSeeFace
51 |
52 | ## 👋 Credits
53 |
54 | * [VIVE](https://www.vive.com/) for the SRanipal SDK and their awesome hardware! ❤
55 |
56 | 
57 |
--------------------------------------------------------------------------------
/VRCFT Module Example/ExternalTrackingModule.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Threading;
3 | using VRCFaceTracking;
4 | using VRCFaceTracking.Params;
5 |
6 | namespace VRCFT_Module_Example
7 | {
8 | // Example "single-eye" data response.
9 | public struct ExampleExternalTrackingDataEye
10 | {
11 | public float eye_lid_openness;
12 | public float eye_x;
13 | public float eye_y;
14 | }
15 |
16 | // Example "full-data" response from the external tracking system.
17 | public struct ExampleExternalTrackingDataStruct
18 | {
19 | public ExampleExternalTrackingDataEye left_eye;
20 | public ExampleExternalTrackingDataEye right_eye;
21 | }
22 |
23 |
24 | // This class contains the overrides for any VRCFT Tracking Data struct functions
25 | public static class TrackingData
26 | {
27 | // This function parses the external module's single-eye data into a VRCFT-Parseable format
28 | public static void Update(ref Eye data, ExampleExternalTrackingDataEye external)
29 | {
30 | data.Look = new Vector2(external.eye_x, external.eye_y);
31 | data.Openness = external.eye_lid_openness;
32 | }
33 |
34 | // This function parses the external module's full-data data into multiple VRCFT-Parseable single-eye structs
35 | public static void Update(ref EyeTrackingData data, ExampleExternalTrackingDataStruct external)
36 | {
37 | Update(ref data.Right, external.left_eye);
38 | Update(ref data.Left, external.right_eye);
39 | }
40 | }
41 |
42 | public class ExternalExtTrackingModule : ExtTrackingModule
43 | {
44 | // Synchronous module initialization. Take as much time as you need to initialize any external modules. This runs in the init-thread
45 | public override (bool SupportsEye, bool SupportsLip) Supported => (true, true);
46 |
47 | public override (bool eyeSuccess, bool lipSuccess) Initialize(bool eye, bool lip)
48 | {
49 | Console.WriteLine("Initializing inside external module");
50 | return (true, false);
51 | }
52 |
53 | // This will be run in the tracking thread. This is exposed so you can control when and if the tracking data is updated down to the lowest level.
54 | public override Action GetUpdateThreadFunc()
55 | {
56 | return () =>
57 | {
58 | while (true)
59 | {
60 | Update();
61 | Thread.Sleep(10);
62 | }
63 | };
64 | }
65 |
66 | // The update function needs to be defined separately in case the user is running with the --vrcft-nothread launch parameter
67 | public void Update()
68 | {
69 | Console.WriteLine("Updating inside external module.");
70 |
71 | if (Status.EyeState == ModuleState.Active)
72 | Console.WriteLine("Eye data is being utilized.");
73 | if (Status.LipState == ModuleState.Active)
74 | Console.WriteLine("Lip data is being utilized.");
75 | }
76 |
77 | // A chance to de-initialize everything. This runs synchronously inside main game thread. Do not touch any Unity objects here.
78 | public override void Teardown()
79 | {
80 | Console.WriteLine("Teardown");
81 | }
82 | }
83 | }
--------------------------------------------------------------------------------
/VRCFT Module Example/VRCFT Module Example.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | AnyCPU
7 | {296110E5-3AE7-4D9F-BE09-890194CDD76E}
8 | Library
9 | Properties
10 | VRCFT_Module_Example
11 | VRCFT_Module_Example
12 | v4.7.2
13 | 512
14 | false
15 |
16 |
17 | AnyCPU
18 | true
19 | full
20 | false
21 | bin\Debug\
22 | DEBUG;TRACE
23 | prompt
24 | 4
25 |
26 |
27 | AnyCPU
28 | pdbonly
29 | true
30 | bin\Release\
31 | TRACE
32 | prompt
33 | 4
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | {0767c09e-d536-464b-b0a1-46d5bfa19e98}
47 | VRCFaceTracking
48 |
49 |
50 |
51 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/VRCFaceTracking.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.32126.315
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VRCFaceTracking", "VRCFaceTracking\VRCFaceTracking.csproj", "{0767C09E-D536-464B-B0A1-46D5BFA19E98}"
7 | EndProject
8 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VRCFT Module Example", "VRCFT Module Example\VRCFT Module Example.csproj", "{296110E5-3AE7-4D9F-BE09-890194CDD76E}"
9 | EndProject
10 | Global
11 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
12 | Debug|Any CPU = Debug|Any CPU
13 | Release|Any CPU = Release|Any CPU
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {0767C09E-D536-464B-B0A1-46D5BFA19E98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
17 | {0767C09E-D536-464B-B0A1-46D5BFA19E98}.Debug|Any CPU.Build.0 = Debug|Any CPU
18 | {0767C09E-D536-464B-B0A1-46D5BFA19E98}.Release|Any CPU.ActiveCfg = Release|Any CPU
19 | {0767C09E-D536-464B-B0A1-46D5BFA19E98}.Release|Any CPU.Build.0 = Release|Any CPU
20 | {296110E5-3AE7-4D9F-BE09-890194CDD76E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
21 | {296110E5-3AE7-4D9F-BE09-890194CDD76E}.Debug|Any CPU.Build.0 = Debug|Any CPU
22 | {296110E5-3AE7-4D9F-BE09-890194CDD76E}.Release|Any CPU.ActiveCfg = Release|Any CPU
23 | {296110E5-3AE7-4D9F-BE09-890194CDD76E}.Release|Any CPU.Build.0 = Release|Any CPU
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | GlobalSection(ExtensibilityGlobals) = postSolution
29 | SolutionGuid = {7160120F-762F-4906-B9F5-F0B1AB71E928}
30 | EndGlobalSection
31 | EndGlobal
32 |
--------------------------------------------------------------------------------
/VRCFaceTracking/ArgsHandler.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Text.RegularExpressions;
3 |
4 | namespace VRCFaceTracking
5 | {
6 | public static class ArgsHandler
7 | {
8 | public static (int SendPort, string IP, int RecievePort) HandleArgs()
9 | {
10 | (int SendPort, string IP, int RecievePort) = (9000, "127.0.0.1", 9001);
11 |
12 | foreach (var arg in Environment.GetCommandLineArgs())
13 | {
14 | if (arg.StartsWith("--osc="))
15 | {
16 | var oscConfig = arg.Remove(0, 6).Split(':');
17 | if (oscConfig.Length < 3)
18 | {
19 | Console.WriteLine("Invalid OSC config: " + arg +"\nExpected format: --osc=::");
20 | break;
21 | }
22 |
23 | if (!int.TryParse(oscConfig[0], out SendPort))
24 | {
25 | Console.WriteLine("Invalid OSC OutPort: " + oscConfig[0]);
26 | break;
27 | }
28 |
29 | if (!new Regex("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$").IsMatch(oscConfig[1]))
30 | {
31 | Console.WriteLine("Invalid OSC IP: " + oscConfig[1]);
32 | break;
33 | }
34 | IP = oscConfig[1];
35 |
36 | if (!int.TryParse(oscConfig[2], out RecievePort))
37 | {
38 | Console.WriteLine("Invalid OSC InPort: " + oscConfig[2]);
39 | break;
40 | }
41 | }
42 | }
43 |
44 | return (SendPort, IP, RecievePort);
45 | }
46 | }
47 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/ColorBars.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/ColorBars.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Active/Bottom.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Active/Bottom.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Active/Top.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Active/Top.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Idle/Bottom.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Idle/Bottom.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Idle/Top.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Idle/Top.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Uninitialized/Bottom.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Uninitialized/Bottom.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/LogoIndicators/Uninitialized/Top.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/LogoIndicators/Uninitialized/Top.png
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/Images/VRCFT.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/Assets/Images/VRCFT.ico
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/UI/App.xaml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/UI/App.xaml.cs:
--------------------------------------------------------------------------------
1 | using System.Threading;
2 | using System.Windows;
3 | using System.Windows.Data;
4 |
5 | namespace VRCFaceTracking.Assets.UI
6 | {
7 | public partial class App
8 | {
9 | private readonly Thread _oscThread = new Thread(MainStandalone.Initialize);
10 |
11 | public App()
12 | {
13 | BindingOperations.EnableCollectionSynchronization(Logger.ConsoleOutput, Logger.ConsoleLock);
14 | _oscThread.Start();
15 | }
16 |
17 | ~App() => MainStandalone.Teardown();
18 |
19 | private void App_OnExit(object sender, ExitEventArgs e) => MainStandalone.Teardown();
20 | }
21 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/UI/MainWindow.xaml:
--------------------------------------------------------------------------------
1 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
60 |
61 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
--------------------------------------------------------------------------------
/VRCFaceTracking/Assets/UI/MainWindow.xaml.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.ObjectModel;
3 | using System.Drawing;
4 | using System.Linq;
5 | using System.Reflection;
6 | using System.Threading;
7 | using System.Windows;
8 | using System.Windows.Controls;
9 | using System.Windows.Forms;
10 | using System.Windows.Media;
11 | using System.Windows.Media.Imaging;
12 | using ContextMenu = System.Windows.Forms.ContextMenu;
13 | using MenuItem = System.Windows.Forms.MenuItem;
14 |
15 | namespace VRCFaceTracking.Assets.UI
16 | {
17 | public partial class MainWindow
18 | {
19 | public static bool IsLipPageVisible { get; private set; }
20 | public static bool IsEyePageVisible { get; private set; }
21 |
22 | public static readonly NotifyIcon TrayIcon = new NotifyIcon
23 | {
24 | Icon = new Icon(Assembly.GetExecutingAssembly().GetManifestResourceStream("VRCFaceTracking.Assets.Images.VRCFT.ico")),
25 | Text = "VRCFaceTracking",
26 | Visible = true,
27 | };
28 |
29 | private void ShowWindow(object sender, EventArgs args)
30 | {
31 | Show();
32 | WindowState = WindowState.Normal;
33 | }
34 |
35 | public MainWindow()
36 | {
37 | InitializeComponent();
38 | // If --min is passed as a command line arg, hide the window immediately
39 | if (Environment.GetCommandLineArgs().Length > 1 && Environment.GetCommandLineArgs().Any(arg => arg == "--min"))
40 | {
41 | WindowState = WindowState.Minimized;
42 | Hide();
43 | }
44 |
45 | DataContext = this;
46 |
47 | ConsoleOutput.CollectionChanged += (sender, args) =>
48 | Dispatcher.BeginInvoke(new ThreadStart(() => Scroller.ScrollToVerticalOffset(Scroller.ExtentHeight)));
49 |
50 |
51 | // use the application icon as the icon for the tray
52 | TrayIcon.DoubleClick += ShowWindow;
53 | TrayIcon.ContextMenu = new ContextMenu(new[]
54 | {
55 | new MenuItem("Exit", (sender, args) => MainStandalone.Teardown()),
56 | new MenuItem("Show", ShowWindow)
57 | });
58 |
59 | // Is this running as admin?
60 | // If not, disable the re-int button
61 | if (!Utils.HasAdmin)
62 | {
63 | // Apparently, windows form buttons don't allow for text wrapping
64 | ReinitializeButton.Content =
65 | "Reinitialization is enabled \n only when the application \n is running as administrator.";
66 | ReinitializeButton.FontSize = 10f;
67 | ReinitializeButton.IsEnabled = false;
68 | }
69 |
70 | UnifiedLibManager.OnTrackingStateUpdate += (eye, lip) =>
71 | Dispatcher.BeginInvoke(new ThreadStart(() => UpdateLogo(eye, lip)));
72 |
73 | // Start a new thread to update the lip image
74 | new Thread(() =>
75 | {
76 | while (!MainStandalone.MasterCancellationTokenSource.IsCancellationRequested)
77 | {
78 | Thread.Sleep(10);
79 | Dispatcher.BeginInvoke(new ThreadStart(() =>
80 | {
81 | UpdateEyeImage();
82 | UpdateLipImage();
83 | }));
84 | }
85 | }).Start();
86 | }
87 |
88 | void UpdateLipImage()
89 | {
90 | if (!IsLipPageVisible || UnifiedTrackingData.LatestLipData.ImageData == null) // If the image is not initialized
91 | return;
92 |
93 | var bitmap = LipImage.Source;
94 | if (bitmap == null || bitmap.GetType() != typeof(WriteableBitmap))
95 | {
96 | bitmap = new WriteableBitmap(UnifiedTrackingData.LatestLipData.ImageSize.x,
97 | UnifiedTrackingData.LatestLipData.ImageSize.y, 96, 96, PixelFormats.Gray8, null);
98 | }
99 | ((WriteableBitmap)bitmap).WritePixels(new Int32Rect(0, 0, UnifiedTrackingData.LatestLipData.ImageSize.x,
100 | UnifiedTrackingData.LatestLipData.ImageSize.y), UnifiedTrackingData.LatestLipData.ImageData, 800, 0);
101 |
102 | // Set the WPF image name LipImage
103 | LipImage.Source = bitmap;
104 | }
105 |
106 | void UpdateEyeImage()
107 | {
108 | if (!IsEyePageVisible || UnifiedTrackingData.LatestEyeData.ImageData == null) // If the image is not initialized
109 | return;
110 |
111 | var bitmap = EyeImage.Source;
112 | if (bitmap == null || bitmap.GetType() != typeof(WriteableBitmap))
113 | {
114 | bitmap = new WriteableBitmap(UnifiedTrackingData.LatestEyeData.ImageSize.x,
115 | UnifiedTrackingData.LatestEyeData.ImageSize.y, 96, 96, PixelFormats.Gray8, null);
116 | }
117 |
118 | ((WriteableBitmap)bitmap).WritePixels(new Int32Rect(0, 0, UnifiedTrackingData.LatestEyeData.ImageSize.x,
119 | UnifiedTrackingData.LatestEyeData.ImageSize.y), UnifiedTrackingData.LatestEyeData.ImageData, UnifiedTrackingData.LatestEyeData.ImageSize.x, 0);
120 |
121 | // Set the WPF image name EyeImage
122 | EyeImage.Source = bitmap;
123 | }
124 |
125 | public ObservableCollection> ConsoleOutput => Logger.ConsoleOutput;
126 |
127 | private void AvatarInfoUpdate(object sender, System.Windows.Controls.SelectionChangedEventArgs e)
128 | {
129 | // Nothing should go here AFAIK
130 | }
131 |
132 | private void ReinitializeClick(object sender, RoutedEventArgs e)
133 | {
134 | Logger.Msg("Reinitializing...");
135 | UnifiedLibManager.Initialize();
136 | }
137 |
138 | private void PauseClickEyes(object sender, RoutedEventArgs e)
139 | {
140 | if (UnifiedLibManager.EyeStatus == ModuleState.Uninitialized) // We don't wanna change states of an inactive module
141 | return;
142 |
143 | UnifiedLibManager.EyeStatus = UnifiedLibManager.EyeStatus == ModuleState.Idle ? ModuleState.Active : ModuleState.Idle;
144 | }
145 |
146 | private void PauseClickMouth(object sender, RoutedEventArgs e)
147 | {
148 | if (UnifiedLibManager.LipStatus == ModuleState.Uninitialized) // We don't wanna change states of an inactive module
149 | return;
150 |
151 | UnifiedLibManager.LipStatus = UnifiedLibManager.LipStatus == ModuleState.Idle ? ModuleState.Active : ModuleState.Idle;
152 | }
153 |
154 | private void UpdateLogo(ModuleState eyeState, ModuleState lipState)
155 | {
156 | VRCFTLogoTop.Source =
157 | new BitmapImage(new Uri(@"../Images/LogoIndicators/" + eyeState + "/Top.png",
158 | UriKind.Relative));
159 | VRCFTLogoBottom.Source =
160 | new BitmapImage(new Uri(@"../Images/LogoIndicators/" + lipState + "/Bottom.png",
161 | UriKind.Relative));
162 | }
163 |
164 | private void MainWindow_OnSizeChanged(object sender, EventArgs eventArgs)
165 | {
166 | if (this.WindowState == WindowState.Minimized)
167 | {
168 | Hide();
169 | }
170 | }
171 |
172 | private void TabController_OnSelectionChanged(object sender, SelectionChangedEventArgs e)
173 | {
174 | IsEyePageVisible = TabController.SelectedIndex == 1;
175 | IsLipPageVisible = TabController.SelectedIndex == 2;
176 | }
177 | }
178 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/ConfigParser.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Text.Json;
6 | using System.Text.Json.Serialization;
7 |
8 | namespace VRCFaceTracking
9 | {
10 | public static class ConfigParser
11 | {
12 | public class InputOutputDef
13 | {
14 | public string address { get; set; }
15 | public string type { get; set; }
16 |
17 | [JsonIgnore]
18 | public Type Type => Utils.TypeConversions.Where(conversion => conversion.Value.configType == type).Select(conversion => conversion.Key).FirstOrDefault();
19 | }
20 |
21 | public class Parameter
22 | {
23 | public string name { get; set; }
24 | public InputOutputDef input { get; set; }
25 | public InputOutputDef output { get; set; }
26 | }
27 |
28 | public class AvatarConfigSpec
29 | {
30 | public string id { get; set; }
31 | public string name { get; set; }
32 | public List parameters { get; set; }
33 | }
34 |
35 | public static Action OnConfigLoaded = () => { };
36 |
37 | public static void ParseNewAvatar(string newId)
38 | {
39 | AvatarConfigSpec avatarConfig = null;
40 | foreach (var userFolder in Directory.GetDirectories(VRChat.VRCOSCDirectory))
41 | {
42 | if (Directory.Exists(userFolder + "\\Avatars"))
43 | foreach (var avatarFile in Directory.GetFiles(userFolder+"\\Avatars"))
44 | {
45 | var configText = File.ReadAllText(avatarFile);
46 | var tempConfig = JsonSerializer.Deserialize(configText);
47 | if (tempConfig == null || tempConfig.id != newId)
48 | continue;
49 |
50 | avatarConfig = tempConfig;
51 | break;
52 | }
53 | }
54 |
55 | if (avatarConfig == null)
56 | {
57 | Logger.Error("Avatar config file for " + newId + " not found");
58 | return;
59 | }
60 |
61 | Logger.Msg("Parsing config file for avatar: " + avatarConfig.name);
62 | var parameters = avatarConfig.parameters.Where(param => param.input != null).ToArray();
63 | foreach (var parameter in UnifiedTrackingData.AllParameters)
64 | parameter.ResetParam(parameters);
65 |
66 | OnConfigLoaded();
67 | }
68 | }
69 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/DependencyManager.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Reflection;
6 | using System.Runtime.InteropServices;
7 |
8 | namespace VRCFaceTracking
9 | {
10 | public static class DependencyManager
11 | {
12 | // Because SRanipal.dll needs to be loaded last.. Too lazy to automate moving it to back of load queue
13 | private static readonly string[] AssembliesToLoad = {
14 | "SRanipal.libHTC_License.dll",
15 | "SRanipal.nanomsg.dll",
16 | "SRanipal.SRWorks_Log.dll",
17 | "SRanipal.ViveSR_Client.dll",
18 | "SRanipal.SRanipal.dll"
19 | };
20 |
21 | public static void Load()
22 | {
23 | AppDomain.CurrentDomain.AssemblyResolve += OnResolveAssembly;
24 |
25 | var dllPaths = ExtractAssemblies(AssembliesToLoad);
26 | foreach (var path in dllPaths)
27 | LoadAssembly(path);
28 | }
29 |
30 | private static Assembly OnResolveAssembly(object sender, ResolveEventArgs e)
31 | {
32 | var thisAssembly = Assembly.GetExecutingAssembly();
33 |
34 | var assemblyName = new AssemblyName(e.Name);
35 | var dllName = assemblyName.Name + ".dll";
36 |
37 | // Load from Embedded Resources - This function is not called if the Assembly is already
38 | // in the same folder as the app.
39 | var resources = thisAssembly.GetManifestResourceNames().Where(s => s.EndsWith(dllName));
40 | if (resources.Any())
41 | {
42 |
43 | // 99% of cases will only have one matching item, but if you don't,
44 | // you will have to change the logic to handle those cases.
45 | var resourceName = resources.First();
46 | using (var stream = thisAssembly.GetManifestResourceStream(resourceName))
47 | {
48 | if (stream == null) return null;
49 | var block = new byte[stream.Length];
50 |
51 | // Safely try to load the assembly.
52 | try
53 | {
54 | stream.Read(block, 0, block.Length);
55 | return Assembly.Load(block);
56 | }
57 | catch (IOException)
58 | {
59 | return null;
60 | }
61 | catch(BadImageFormatException)
62 | {
63 | return null;
64 | }
65 | }
66 | }
67 |
68 | // in the case the resource doesn't exist, return null.
69 | return null;
70 | }
71 |
72 | private static IEnumerable ExtractAssemblies(IEnumerable resourceNames)
73 | {
74 | var extractedPaths = new List();
75 |
76 | var dirName = Path.Combine(Utils.PersistentDataDirectory, "StockLibs");
77 | if (!Directory.Exists(dirName))
78 | Directory.CreateDirectory(dirName);
79 |
80 | foreach (var dll in resourceNames)
81 | {
82 | var dllPath = Path.Combine(dirName, GetAssemblyNameFromPath(dll));
83 |
84 | using (var stm = Assembly.GetExecutingAssembly().GetManifestResourceStream("VRCFaceTracking.TrackingLibs."+dll))
85 | {
86 | try
87 | {
88 | using (Stream outFile = File.Create(dllPath))
89 | {
90 | const int sz = 4096;
91 | var buf = new byte[sz];
92 | while (true)
93 | {
94 | if (stm == null) continue;
95 | var nRead = stm.Read(buf, 0, sz);
96 | if (nRead < 1)
97 | break;
98 | outFile.Write(buf, 0, nRead);
99 | }
100 | }
101 |
102 | extractedPaths.Add(dllPath);
103 | }
104 | catch(Exception e)
105 | {
106 | Logger.Error("Failed to get DLL: " + e.Message);
107 | }
108 | }
109 | }
110 | return extractedPaths;
111 | }
112 |
113 | private static string GetAssemblyNameFromPath(string path)
114 | {
115 | var splitPath = path.Split('.').ToList();
116 | splitPath.Reverse();
117 | return splitPath[1]+".dll";
118 | }
119 |
120 | [DllImport("kernel32", SetLastError = true, CharSet = CharSet.Unicode)]
121 | private static extern IntPtr LoadLibrary(string lpFileName);
122 |
123 | private static void LoadAssembly(string path)
124 | {
125 | if (LoadLibrary(path) == IntPtr.Zero)
126 | Logger.Error("Unable to load library " + path);
127 | else
128 | Logger.Msg("Loaded library " + path);
129 | }
130 | }
131 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Logger.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.ObjectModel;
3 |
4 | namespace VRCFaceTracking
5 | {
6 | public static class Logger
7 | {
8 | public static readonly ObservableCollection> ConsoleOutput = new ObservableCollection> { new Tuple("Logger Initialized...", "White") };
9 | public static readonly object ConsoleLock = new object();
10 |
11 |
12 | public static void Msg(string msgStr)
13 | {
14 | var formattedStr = "[" + DateTime.Now.ToString("HH:mm:ss") + "] " + msgStr;
15 |
16 | ConsoleOutput.Add(new Tuple(formattedStr, "White"));
17 |
18 | Console.WriteLine(formattedStr);
19 | }
20 |
21 | public static void Warning(string warningStr)
22 | {
23 | var formattedStr = "["+DateTime.Now.ToString("HH:mm:ss") + "] [WARNING] " + warningStr;
24 |
25 | ConsoleOutput.Add(new Tuple(formattedStr, "Yellow"));
26 |
27 | Console.ForegroundColor = ConsoleColor.Yellow;
28 | Console.WriteLine(formattedStr);
29 | Console.ResetColor();
30 | }
31 |
32 | public static void Error(string errorStr)
33 | {
34 | var formattedStr = "[" + DateTime.Now.ToString("HH:mm:ss") + "] [ERROR] " + errorStr;
35 |
36 | ConsoleOutput.Add(new Tuple(formattedStr,"Red"));
37 | Console.ForegroundColor = ConsoleColor.Red;
38 | Console.WriteLine(formattedStr);
39 | Console.ResetColor();
40 | }
41 | }
42 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/MainStandalone.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Reflection;
5 | using System.Resources;
6 | using System.Runtime.InteropServices;
7 | using System.Threading;
8 | using System.Windows;
9 | using VRCFaceTracking.Assets.UI;
10 | using VRCFaceTracking.OSC;
11 |
12 | [assembly: AssemblyTitle("VRCFaceTracking")]
13 | [assembly: AssemblyDescription("Application to enable Face Tracking from within VRChat using OSC")]
14 | [assembly: AssemblyCompany("benaclejames")]
15 | [assembly: AssemblyProduct("VRCFaceTracking")]
16 | [assembly: AssemblyCopyright("Copyright © benaclejames 2022")]
17 | [assembly: ComVisible(false)]
18 | [assembly: AssemblyVersion("3.0.1")]
19 | [assembly: AssemblyFileVersion("3.0.1")]
20 | [assembly: NeutralResourcesLanguage("en")]
21 | [assembly: ThemeInfo(
22 | ResourceDictionaryLocation.None,
23 | ResourceDictionaryLocation.SourceAssembly
24 | )]
25 |
26 | namespace VRCFaceTracking
27 | {
28 | public static class MainStandalone
29 | {
30 | public static OscMain OscMain;
31 |
32 | private static List ConstructMessages(IEnumerable parameters) =>
33 | parameters.Where(p => p.NeedsSend).Select(param =>
34 | {
35 | param.NeedsSend = false;
36 | return new OscMessage(param.OutputInfo.address, param.OscType, param.ParamValue);
37 | }).ToList();
38 |
39 | private static IEnumerable _relevantParams;
40 | private static int _relevantParamsCount = 416;
41 |
42 | private static string _ip = "127.0.0.1";
43 | private static int _inPort = 9001, _outPort = 9000;
44 |
45 | public static readonly CancellationTokenSource MasterCancellationTokenSource = new CancellationTokenSource();
46 |
47 | public static void Teardown()
48 | {
49 | // Kill our threads
50 | MasterCancellationTokenSource.Cancel();
51 |
52 | Utils.TimeEndPeriod(1);
53 | Logger.Msg("VRCFT Standalone Exiting!");
54 | UnifiedLibManager.TeardownAllAndReset();
55 | Console.WriteLine("Shutting down");
56 | MainWindow.TrayIcon.Visible = false;
57 | Application.Current?.Shutdown();
58 | }
59 |
60 | public static void Initialize()
61 | {
62 | Logger.Msg("VRCFT Initializing!");
63 |
64 | // Parse Arguments
65 | (_outPort, _ip, _inPort) = ArgsHandler.HandleArgs();
66 |
67 | // Load dependencies
68 | DependencyManager.Load();
69 |
70 | // Ensure OSC is enabled
71 | if (VRChat.ForceEnableOsc()) // If osc was previously not enabled
72 | {
73 | Logger.Warning("VRCFT detected OSC was disabled and automatically enabled it.");
74 | // If we were launched after VRChat
75 | if (VRChat.IsVRChatRunning())
76 | Logger.Error(
77 | "However, VRChat was running while this change was made.\n" +
78 | "If parameters do not update, please restart VRChat or manually enable OSC yourself in your avatar's expressions menu.");
79 | }
80 |
81 | // Initialize Tracking Runtimes
82 | UnifiedLibManager.Initialize();
83 |
84 | // Initialize Locals
85 | OscMain = new OscMain();
86 | var bindResults = OscMain.Bind(_ip, _outPort, _inPort);
87 | if (!bindResults.receiverSuccess)
88 | Logger.Error("Socket failed to bind to receiver port, please ensure it's not already in use by another program or specify a different one instead.");
89 |
90 | if (!bindResults.senderSuccess)
91 | Logger.Error("Socket failed to bind to sender port, please ensure it's not already in use by another program or specify a different one instead.");
92 |
93 | _relevantParams = UnifiedTrackingData.AllParameters.SelectMany(p => p.GetBase()).Where(param => param.Relevant);
94 |
95 | ConfigParser.OnConfigLoaded += () =>
96 | {
97 | _relevantParams = UnifiedTrackingData.AllParameters.SelectMany(p => p.GetBase())
98 | .Where(param => param.Relevant);
99 | UnifiedTrackingData.LatestEyeData.ResetThresholds();
100 | _relevantParamsCount = _relevantParams.Count();
101 | Logger.Msg("Config file parsed successfully! " + _relevantParamsCount + " parameters loaded");
102 | };
103 |
104 | // Begin main OSC update loop
105 | Utils.TimeBeginPeriod(1);
106 | while (!MasterCancellationTokenSource.IsCancellationRequested)
107 | {
108 | Thread.Sleep(10);
109 |
110 | if (_relevantParamsCount <= 0)
111 | continue;
112 |
113 | UnifiedTrackingData.OnUnifiedDataUpdated.Invoke(UnifiedTrackingData.LatestEyeData,
114 | UnifiedTrackingData.LatestLipData);
115 |
116 | var messages = ConstructMessages(_relevantParams);
117 | while (messages.Count > 0)
118 | {
119 | var msgCount = 16;
120 | var msgList = new List();
121 | while (messages.Count > 0 && msgCount+messages[0].Data.Length+4 < 4096)
122 | {
123 | msgList.Add(messages[0]);
124 | msgCount += messages[0].Data.Length+4;
125 | messages.RemoveAt(0);
126 | }
127 | var bundle = new OscBundle(msgList);
128 | OscMain.Send(bundle.Data);
129 | }
130 | }
131 | }
132 | }
133 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/OSC/OSCBundle.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 |
5 | namespace VRCFaceTracking.OSC
6 | {
7 | public class OscBundle
8 | {
9 | public readonly byte[] Data;
10 |
11 | public OscBundle(IEnumerable messages)
12 | {
13 | int size = messages.Sum(param => param.Data.Length + 4);
14 | Data = new byte[16+size]; // Include #bundle header and null terminator
15 | Array.Copy(new byte[] {35, 98, 117, 110, 100, 108, 101, 0}, Data, 8);
16 | // Get the NTP time with picoseconds
17 | Int64 time = (Int64) (DateTime.UtcNow - new DateTime(1900, 1, 1)).TotalMilliseconds * 1000;
18 | var timeBytes = BitConverter.GetBytes(time);
19 | Array.Reverse(timeBytes);
20 | Array.Copy(timeBytes, 0, Data, 8, 8);
21 |
22 | // Now add bundle data
23 | int ix = 16;
24 | foreach (var message in messages)
25 | {
26 | var length = BitConverter.GetBytes(message.Data.Length);
27 | Array.Reverse(length);
28 | Array.Copy(length, 0, Data, ix, 4);
29 | ix += 4;
30 |
31 | Array.Copy(message.Data, 0, Data, ix, message.Data.Length);
32 | ix += message.Data.Length;
33 | }
34 | }
35 | }
36 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/OSC/OSCMain.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Net;
4 | using System.Net.Sockets;
5 | using System.Text;
6 | using System.Threading;
7 |
8 | namespace VRCFaceTracking.OSC
9 | {
10 | public static class OSCUtils
11 | {
12 | public static byte[] EnsureCompliance(this byte[] inputArr)
13 | {
14 | var nullTerm = new byte[inputArr.Length + 1];
15 | Array.Copy(inputArr, nullTerm, inputArr.Length);
16 |
17 | int n = nullTerm.Length + 3;
18 | int m = n % 4;
19 | int closestMult = n - m;
20 | int multDiff = closestMult - nullTerm.Length;
21 |
22 | // Construct new array of zeros with the correct length + 1 for null terminator
23 | byte[] newArr = new byte[nullTerm.Length + multDiff];
24 | Array.Copy(nullTerm, newArr, nullTerm.Length);
25 | return newArr;
26 | }
27 | }
28 |
29 | public class OscMain
30 | {
31 | private static readonly Socket SenderClient = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
32 | private static readonly Socket ReceiverClient = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
33 | private static Thread _receiveThread;
34 |
35 | public (bool senderSuccess, bool receiverSuccess) Bind(string address, int outPort, int inPort)
36 | {
37 | (bool senderSuccess, bool receiverSuccess) = (false, false);
38 | try
39 | {
40 | SenderClient.Connect(new IPEndPoint(IPAddress.Parse(address), outPort));
41 | senderSuccess = true;
42 | ReceiverClient.Bind(new IPEndPoint(IPAddress.Parse(address), inPort));
43 | receiverSuccess = true;
44 | ReceiverClient.ReceiveTimeout = 1000;
45 |
46 | _receiveThread = new Thread(() =>
47 | {
48 | while (!MainStandalone.MasterCancellationTokenSource.IsCancellationRequested)
49 | Recv();
50 | });
51 | _receiveThread.Start();
52 | }
53 | catch (Exception)
54 | {
55 | return (senderSuccess, receiverSuccess);
56 | }
57 | return (true, true);
58 | }
59 |
60 | private void Recv()
61 | {
62 | byte[] buffer = new byte[2048];
63 | try
64 | {
65 | ReceiverClient.Receive(buffer, buffer.Length, SocketFlags.None);
66 | }
67 | catch (SocketException)
68 | {
69 | // Ignore as this is most likely a timeout exception
70 | return;
71 | }
72 | var newMsg = new OscMessage(buffer);
73 | if (newMsg.Address == "/avatar/change")
74 | ConfigParser.ParseNewAvatar((string) newMsg.Value);
75 | }
76 |
77 | public void Send(byte[] data) => SenderClient.Send(data, data.Length, SocketFlags.None);
78 | }
79 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/OSC/OSCMessage.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 |
6 | namespace VRCFaceTracking.OSC
7 | {
8 | public class OscMessage
9 | {
10 | public readonly byte[] Data;
11 | public readonly string Address;
12 | public readonly object Value;
13 |
14 | private OscMessage(string name, char typeIdentifier)
15 | {
16 | Address = name;
17 |
18 | var nameBytes = Encoding.ASCII.GetBytes(name);
19 | nameBytes = nameBytes.EnsureCompliance();
20 |
21 | var valueIdentBytes = Encoding.ASCII.GetBytes("," + typeIdentifier);
22 | valueIdentBytes = valueIdentBytes.EnsureCompliance();
23 |
24 | Data = new byte[nameBytes.Length + valueIdentBytes.Length];
25 | Array.Copy(nameBytes, Data, nameBytes.Length);
26 | Array.Copy(valueIdentBytes, 0, Data, nameBytes.Length, valueIdentBytes.Length);
27 | }
28 |
29 | public OscMessage(string name, int value) : this(name, 'i')
30 | {
31 | var valueArr = BitConverter.GetBytes(value);
32 | Array.Reverse(valueArr);
33 |
34 | var newFullArr = new byte[Data.Length+valueArr.Length];
35 | Array.Copy(Data, newFullArr, Data.Length);
36 | Array.Copy(valueArr, 0, newFullArr, Data.Length, valueArr.Length);
37 | Data = newFullArr;
38 | }
39 |
40 | public OscMessage(string name, double value) : this(name, 'f')
41 | {
42 | var valueArr = BitConverter.GetBytes((float)value);
43 | Array.Reverse(valueArr);
44 |
45 | var newFullArr = new byte[Data.Length+valueArr.Length];
46 | Array.Copy(Data, newFullArr, Data.Length);
47 | Array.Copy(valueArr, 0, newFullArr, Data.Length, valueArr.Length);
48 | Data = newFullArr;
49 | }
50 |
51 | public OscMessage(string name, bool value) : this(name, value ? 'T' : 'F') {}
52 |
53 | public OscMessage(string name, char type, byte[] valueBytes) : this(name, type)
54 | {
55 | if (valueBytes == null) return;
56 | var newFullArr = new byte[Data.Length+valueBytes.Length];
57 | Array.Copy(Data, newFullArr, Data.Length);
58 | Array.Copy(valueBytes, 0, newFullArr, Data.Length, valueBytes.Length);
59 | Data = newFullArr;
60 | }
61 |
62 | public OscMessage(byte[] bytes)
63 | {
64 | int iter = 0;
65 |
66 | var addressBytes = new List();
67 | for (; iter < bytes.Length; iter++)
68 | {
69 | if (bytes[iter] == 0)
70 | break;
71 |
72 | addressBytes.Add(bytes[iter]);
73 | }
74 |
75 | Address = Encoding.ASCII.GetString(addressBytes.ToArray());
76 | // Increase iter until we find the type identifier
77 | for (; iter < bytes.Length; iter++)
78 | {
79 | if (bytes[iter] == ',')
80 | {
81 | iter++;
82 | break;
83 | }
84 | }
85 |
86 | byte type = bytes[iter];
87 | iter += 2; // Next multiple of 4
88 |
89 | switch (type)
90 | {
91 | case 105:
92 | var intBytes = new byte[4];
93 | Array.Copy(bytes, iter, intBytes, 0, 4);
94 | Array.Reverse(intBytes);
95 | Value = BitConverter.ToInt32(intBytes, 0);
96 | break;
97 | case 102:
98 | var floatBytes = new byte[4];
99 | Array.Copy(bytes, iter, floatBytes, 0, 4);
100 | Array.Reverse(floatBytes);
101 | Value = BitConverter.ToSingle(floatBytes, 0);
102 | break;
103 | case 115:
104 | var stringBytes = new List();
105 | for (iter++; iter < bytes.Length; iter++)
106 | {
107 | if (bytes[iter] == 0)
108 | break;
109 |
110 | stringBytes.Add(bytes[iter]);
111 | }
112 |
113 | Value = Encoding.ASCII.GetString(stringBytes.ToArray());
114 | break;
115 | case 70:
116 | Value = false;
117 | break;
118 | case 84:
119 | Value = true;
120 | break;
121 | default:
122 | throw new Exception("Unknown type identifier: " + type + " for name " + Address);
123 | }
124 | }
125 | }
126 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/OSC/OSCParams.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 |
5 | namespace VRCFaceTracking.OSC
6 | {
7 | public class OSCParams
8 | {
9 | public class BaseParam
10 | {
11 | private readonly string _paramName;
12 |
13 | public byte[] ParamValue
14 | {
15 | get => _paramValue;
16 | set
17 | {
18 | if (!Relevant || _paramValue.SequenceEqual(value)) return;
19 |
20 | _paramValue = value;
21 | NeedsSend = true;
22 | }
23 | }
24 |
25 | private byte[] _paramValue = new byte[4];
26 | private readonly Type _paramType;
27 | public char OscType;
28 | public bool Relevant, NeedsSend = true;
29 | public ConfigParser.InputOutputDef OutputInfo;
30 |
31 | public BaseParam(string name, Type type)
32 | {
33 | _paramName = name;
34 | _paramType = type;
35 | OscType = Utils.TypeConversions[type].oscType;
36 | }
37 |
38 | public virtual void ResetParam(ConfigParser.Parameter[] newParams)
39 | {
40 | var compatibleParam =
41 | newParams.FirstOrDefault(param => param.name == _paramName && param.input.Type == _paramType);
42 | if (compatibleParam != null)
43 | {
44 | Relevant = true;
45 | OutputInfo = compatibleParam.input;
46 | }
47 | else
48 | {
49 | Relevant = false;
50 | OutputInfo = null;
51 | }
52 | }
53 | }
54 |
55 | public class FloatBaseParam : BaseParam
56 | {
57 | public FloatBaseParam(string name) : base(name, typeof(float))
58 | {
59 | }
60 |
61 | public new float ParamValue
62 | {
63 | set
64 | {
65 | var valueArr = BitConverter.GetBytes(value);
66 | Array.Reverse(valueArr);
67 | base.ParamValue = valueArr;
68 | }
69 | }
70 | }
71 |
72 | public class BoolBaseParam : BaseParam
73 | {
74 | public BoolBaseParam(string name) : base(name, typeof(bool))
75 | {
76 | }
77 |
78 | public new bool ParamValue
79 | {
80 | set
81 | {
82 | OscType = value ? 'T' : 'F';
83 | NeedsSend = true;
84 | }
85 | }
86 | }
87 |
88 | public class BinaryBaseParameter : FloatBaseParam
89 | {
90 | public new double ParamValue
91 | {
92 | set
93 | {
94 | // If the value is negative, make it positive
95 | if (!_negativeParam.Relevant &&
96 | value < 0) // If the negative parameter isn't set, cut the negative values
97 | return;
98 |
99 | // Ensure value going into the bitwise shifts is between 0 and 1
100 | var adjustedValue = Math.Abs(value);
101 |
102 | var bigValue = (int) (adjustedValue * (_maxPossibleBinaryInt - 1));
103 |
104 | foreach (var boolChild in _params)
105 | boolChild.Value.ParamValue = ((bigValue >> boolChild.Key) & 1) == 1;
106 |
107 | _negativeParam.ParamValue = value < 0;
108 | }
109 | }
110 |
111 | protected readonly Dictionary
112 | _params = new Dictionary(); // Int represents binary steps
113 |
114 | protected readonly BoolBaseParam _negativeParam;
115 | private int _maxPossibleBinaryInt;
116 | private readonly string _paramName;
117 |
118 | /* Pretty complicated, but let me try to explain...
119 | * As with other ResetParam functions, the purpose of this function is to reset all the parameters.
120 | * Since we don't actually know what parameters we'll be needing for this new avatar, nor do we know if the parameters we currently have are valid
121 | * it's just easier to just reset everything.
122 | *
123 | * Step 1) Find all valid parameters on the new avatar that start with the name of this binary param, and end with a number.
124 | *
125 | * Step 2) Find the binary steps for that number. That's the number of shifts we need to do. That number could be 8, and it's steps would be 3 as it's 3 steps away from zero in binary
126 | * This also makes sure the number is a valid base2-compatible number
127 | *
128 | * Step 3) Calculate the maximum possible value for the discovered binary steps, then subtract 1 since we count from 0.
129 | *
130 | * Step 4) Create each parameter literal that'll be responsible for actually changing parameters. It's output data will be multiplied by the highest possible
131 | * binary number since we can safely assume the highest possible input float will be 1.0. Then we bitwise shift by the binary steps discovered in step 2.
132 | * Finally, we use a combination of bitwise AND to get whether the designated index for this param is 1 or 0.
133 | */
134 | public override void ResetParam(ConfigParser.Parameter[] newParams)
135 | {
136 | _params.Clear();
137 | _negativeParam.ResetParam(newParams);
138 |
139 | // Get all parameters starting with this parameter's name, and of type bool
140 | var boolParams = newParams.Where(p => p.input.Type == typeof(bool) && p.name.StartsWith(_paramName));
141 |
142 | var paramsToCreate = new Dictionary();
143 | foreach (var param in boolParams)
144 | {
145 | // Cut the parameter name to get the index
146 | if (!int.TryParse(param.name.Substring(_paramName.Length), out var index)) continue;
147 | // Get the shift steps
148 | var binaryIndex = GetBinarySteps(index);
149 | // If this index has a shift step, create the parameter
150 | if (binaryIndex.HasValue)
151 | paramsToCreate.Add(param.name, binaryIndex.Value);
152 | }
153 |
154 | if (paramsToCreate.Count == 0) return;
155 |
156 | // Calculate the highest possible binary number
157 | _maxPossibleBinaryInt = (int) Math.Pow(2, paramsToCreate.Values.Count);
158 | foreach (var param in paramsToCreate)
159 | {
160 | var newBool = new BoolBaseParam(param.Key);
161 | newBool.ResetParam(newParams);
162 | _params.Add(param.Value, newBool);
163 | }
164 | }
165 |
166 | // This serves both as a test to make sure this index is in the binary sequence, but also returns how many bits we need to shift to find it
167 | private static int? GetBinarySteps(int index)
168 | {
169 | var currSeqItem = 1;
170 | for (var i = 0; i < index; i++)
171 | {
172 | if (currSeqItem == index)
173 | return i;
174 | currSeqItem *= 2;
175 | }
176 |
177 | return null;
178 | }
179 |
180 | public BinaryBaseParameter(string paramName) : base(paramName)
181 | {
182 | _paramName = paramName;
183 | _negativeParam = new BoolBaseParam(paramName + "Negative");
184 | }
185 | }
186 | }
187 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/Eye/EyeTrackingParams.cs:
--------------------------------------------------------------------------------
1 | namespace VRCFaceTracking.Params.Eye
2 | {
3 | public static class EyeTrackingParams
4 | {
5 | public static readonly IParameter[] ParameterList = {
6 | #region XYParams
7 |
8 | new XYParameter(v2 => v2.Combined.Look, "EyesX", "EyesY"),
9 | new XYParameter(v2 => v2.Left.Look, "LeftEyeX", "LeftEyeY"),
10 | new XYParameter(v2 => v2.Right.Look, "RightEyeX", "RightEyeY"),
11 |
12 | #endregion
13 |
14 | #region Widen
15 |
16 | new EParam(v2 => v2.Left.Widen > v2.Right.Widen ? v2.Left.Widen : v2.Right.Widen, "EyesWiden"),
17 | new EParam(v2 => v2.Left.Widen, "LeftEyeWiden"),
18 | new EParam(v2 => v2.Right.Widen, "RightEyeWiden"),
19 |
20 | #endregion
21 |
22 | #region Squeeze
23 |
24 | new EParam(v2 => v2.Combined.Squeeze, "EyesSqueeze"),
25 | new EParam(v2 => v2.Left.Squeeze, "LeftEyeSqueeze"),
26 | new EParam(v2 => v2.Right.Squeeze, "RightEyeSqueeze"),
27 |
28 | #endregion
29 |
30 | #region Dilation
31 |
32 | new EParam(v2 => v2.EyesDilation, "EyesDilation"),
33 | new EParam(v2 => v2.EyesPupilDiameter, "EyesPupilDiameter"),
34 |
35 | #endregion
36 |
37 | #region EyeLid
38 |
39 | new EParam(v2 => v2.Left.Openness, "LeftEyeLid"),
40 | new EParam(v2 => v2.Right.Openness, "RightEyeLid"),
41 | new EParam(v2 => (v2.Left.Openness + v2.Right.Openness)/2, "CombinedEyeLid"),
42 |
43 | #endregion
44 |
45 | #region EyeLidExpanded
46 |
47 | new EParam((v2, eye) =>
48 | {
49 | if (v2.Left.Widen > 0)
50 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Left.Widen);
51 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Left.Openness);
52 | }, "LeftEyeLidExpanded", 0.5f, true),
53 |
54 | new EParam((v2, eye) =>
55 | {
56 | if (v2.Right.Widen > 0)
57 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Right.Widen);
58 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Right.Openness);
59 | }, "RightEyeLidExpanded", 0.5f, true),
60 |
61 | new EParam((v2, eye) =>
62 | {
63 | if (v2.Combined.Widen > 0)
64 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Combined.Widen);
65 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Combined.Openness);
66 | }, "CombinedEyeLidExpanded", 0.5f, true),
67 |
68 | #endregion
69 |
70 | #region EyeLidExpandedSqueeze
71 |
72 | new EParam((v2, eye) =>
73 | {
74 | if (v2.Left.Widen > 0)
75 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Left.Widen);
76 | if (v2.Left.Squeeze > 0)
77 | return v2.Left.Squeeze * -1;
78 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Left.Openness);
79 | } ,"LeftEyeLidExpandedSqueeze", 0.5f, true),
80 |
81 | new EParam((v2, eye) =>
82 | {
83 | if (v2.Right.Widen > 0)
84 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Right.Widen);
85 | if (v2.Right.Squeeze > 0)
86 | return v2.Right.Squeeze * -1;
87 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Right.Openness);
88 | } ,"RightEyeLidExpandedSqueeze", 0.5f, true),
89 |
90 | new EParam((v2, eye) =>
91 | {
92 | if (v2.Combined.Widen > 0)
93 | return NormalizeFloat(0, 1, 0.8f, 1, v2.Combined.Widen);
94 | if (v2.Combined.Squeeze > 0)
95 | return v2.Combined.Squeeze * -1;
96 | return NormalizeFloat(0, 1, 0, 0.8f, v2.Combined.Openness);
97 | } ,"CombinedEyeLidExpandedSqueeze", 0.5f, true),
98 |
99 | #endregion
100 |
101 | #region EyeLidExpanded Binary
102 |
103 | new BinaryParameter((v2, eye) =>
104 | {
105 | if (v2.Left.Widen > 0)
106 | return v2.Left.Widen;
107 | return v2.Left.Openness;
108 | }, "LeftEyeLidExpanded"),
109 |
110 | new BinaryParameter((v2, eye) =>
111 | {
112 | if (v2.Right.Widen > 0)
113 | return v2.Right.Widen;
114 | return v2.Right.Openness;
115 | }, "RightEyeLidExpanded"),
116 |
117 | new BinaryParameter((v2, eye) =>
118 | {
119 | if (v2.Combined.Widen > 0)
120 | return v2.Combined.Widen;
121 | return v2.Combined.Openness;
122 | }, "CombinedEyeLidExpanded"),
123 |
124 | #endregion
125 |
126 | #region EyeLidExpandedSqueeze Binary
127 |
128 | new BinaryParameter(v2 =>
129 | {
130 | if (v2.Left.Widen > 0)
131 | return v2.Left.Widen;
132 | if (v2.Left.Squeeze > 0)
133 | return v2.Left.Squeeze;
134 | return v2.Left.Openness;
135 | }, "LeftEyeLidExpandedSqueeze"),
136 |
137 | new BinaryParameter(v2 =>
138 | {
139 | if (v2.Right.Widen > 0)
140 | return v2.Right.Widen;
141 | if (v2.Right.Squeeze > 0)
142 | return v2.Right.Squeeze;
143 | return v2.Right.Openness;
144 | }, "RightEyeLidExpandedSqueeze"),
145 |
146 | new BinaryParameter((v2, eye) =>
147 | {
148 | if (v2.Combined.Widen > 0)
149 | return v2.Combined.Widen;
150 | if (v2.Combined.Squeeze > 0)
151 | return v2.Combined.Squeeze;
152 | return v2.Combined.Openness;
153 | }, "CombinedEyeLidExpandedSqueeze"),
154 |
155 | #endregion
156 |
157 | #region EyeLidExpandedSupplemental
158 |
159 | // These parameters are used to distinguish when EyeLidExpanded / EyeLidExpandedSqueeze
160 | // is returning a value as a Widen or Squeeze. Intended for the Bool or Binary param variant.
161 | new BoolParameter(v2 => v2.Left.Widen > 0, "LeftEyeWidenToggle"),
162 | new BoolParameter(v2 => v2.Right.Widen > 0, "RightEyeWidenToggle"),
163 | new BoolParameter(v2 => v2.Combined.Widen > 0, "EyesWidenToggle"),
164 |
165 | new BoolParameter(v2 => v2.Left.Squeeze > 0, "LeftEyeSqueezeToggle"),
166 | new BoolParameter(v2 => v2.Right.Squeeze > 0, "RightEyeSqueezeToggle"),
167 | new BoolParameter(v2 => v2.Combined.Squeeze > 0, "EyesSqueezeToggle"),
168 |
169 | #endregion
170 |
171 | #region Status
172 |
173 | new BoolParameter(v2 => UnifiedLibManager.EyeStatus.Equals(ModuleState.Active), "EyeTrackingActive"),
174 |
175 | #endregion
176 | };
177 |
178 | // Brain Hurty
179 | private static float NormalizeFloat(float minInput, float maxInput, float minOutput, float maxOutput,
180 | float value) => (maxOutput - minOutput) / (maxInput - minInput) * (value - maxInput) + maxOutput;
181 | }
182 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/IParameter.cs:
--------------------------------------------------------------------------------
1 | using VRCFaceTracking.OSC;
2 |
3 | namespace VRCFaceTracking.Params
4 | {
5 | public interface IParameter
6 | {
7 | void ResetParam(ConfigParser.Parameter[] newParams);
8 |
9 | OSCParams.BaseParam[] GetBase();
10 | }
11 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/Lip/LipShapeConversion.cs:
--------------------------------------------------------------------------------
1 | using System.Collections.Generic;
2 | using ViveSR.anipal.Lip;
3 | using System.Linq;
4 |
5 | namespace VRCFaceTracking.Params.Lip
6 | {
7 | public interface ICombinedShape
8 | {
9 | float GetBlendedLipShape(float[] inputMap);
10 | }
11 |
12 | public class PositiveNegativeShape : ICombinedShape
13 | {
14 | private readonly int _positiveShape, _negativeShape;
15 | private float _positiveCache, _negativeCache;
16 | private bool _steps;
17 |
18 | public PositiveNegativeShape(LipShape_v2 positiveShape, LipShape_v2 negativeShape, bool steps = false)
19 | {
20 | _positiveShape = (int)positiveShape;
21 | _negativeShape = (int)negativeShape;
22 | _steps = steps;
23 | }
24 |
25 | public float GetBlendedLipShape(float[] inputMap)
26 | {
27 | _positiveCache = inputMap[_positiveShape];
28 | _negativeCache = inputMap[_negativeShape] * -1;
29 | return _steps ? (_positiveCache - _negativeCache) - 1 : _positiveCache + _negativeCache;
30 | }
31 | }
32 |
33 | public class PositiveNegativeAveragedShape : ICombinedShape
34 | {
35 | private readonly int[] _positiveShapes, _negativeShapes;
36 | private readonly float[] _positiveCache, _negativeCache;
37 | private readonly int _positiveCount, _negativeCount;
38 | private readonly bool _useMax;
39 |
40 | public PositiveNegativeAveragedShape(LipShape_v2[] positiveShapes, LipShape_v2[] negativeShapes)
41 | {
42 | _positiveShapes = positiveShapes.Select(s => (int)s).ToArray();
43 | _negativeShapes = negativeShapes.Select(s => (int)s).ToArray();
44 | _positiveCache = new float[positiveShapes.Length];
45 | _negativeCache = new float[negativeShapes.Length];
46 | _positiveCount = positiveShapes.Length;
47 | _negativeCount = negativeShapes.Length;
48 | }
49 |
50 | public PositiveNegativeAveragedShape(LipShape_v2[] positiveShapes, LipShape_v2[] negativeShapes, bool useMax)
51 | {
52 | _positiveShapes = positiveShapes.Select(s => (int)s).ToArray();
53 | _negativeShapes = negativeShapes.Select(s => (int)s).ToArray();
54 | _positiveCache = new float[positiveShapes.Length];
55 | _negativeCache = new float[negativeShapes.Length];
56 | _positiveCount = positiveShapes.Length;
57 | _negativeCount = negativeShapes.Length;
58 | _useMax = useMax;
59 | }
60 |
61 | public float GetBlendedLipShape(float[] inputMap)
62 | {
63 | if (!_useMax)
64 | {
65 | float positive = 0;
66 | float negative = 0;
67 |
68 | for (int i = 0; i < _positiveCount; i++) {
69 | _positiveCache[i] = inputMap[_positiveShapes[i]];
70 | positive += _positiveCache[i];
71 | }
72 |
73 | for (int i = 0; i < _negativeCount; i++) {
74 | _negativeCache[i] = inputMap[_negativeShapes[i]] * -1;
75 | negative += _negativeCache[i];
76 | }
77 |
78 | return (positive / _positiveCount) + (negative / _negativeCount);
79 | }
80 |
81 | for (int i = 0; i < _positiveCount; i++) {
82 | _positiveCache[i] = inputMap[_positiveShapes[i]];
83 | }
84 |
85 | for (int i = 0; i < _negativeCount; i++) {
86 | _negativeCache[i] = inputMap[_negativeShapes[i]];
87 | }
88 |
89 | return _positiveCache.Max() + (-1) * _negativeCache.Max();
90 | }
91 | }
92 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/Lip/LipShapeMerger.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using ViveSR.anipal.Lip;
5 | using VRCFaceTracking.Params.Lip;
6 |
7 | namespace VRCFaceTracking.Params.LipMerging
8 | {
9 | public static class LipShapeMerger
10 | {
11 | private static readonly Dictionary MergedShapes =
12 | new Dictionary
13 | {
14 | {"JawX", new PositiveNegativeShape(LipShape_v2.JawRight, LipShape_v2.JawLeft)},
15 | {"MouthUpper", new PositiveNegativeShape(LipShape_v2.MouthUpperRight, LipShape_v2.MouthUpperLeft)},
16 | {"MouthLower", new PositiveNegativeShape(LipShape_v2.MouthLowerRight, LipShape_v2.MouthLowerLeft)},
17 | {"MouthX", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperRight, LipShape_v2.MouthLowerRight}, new LipShape_v2[]{LipShape_v2.MouthUpperLeft, LipShape_v2.MouthLowerLeft}, true)},
18 | {"SmileSadRight", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthSadRight)},
19 | {"SmileSadLeft", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSadLeft)},
20 | {"SmileSad", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthSadLeft, LipShape_v2.MouthSadRight})},
21 | {"TongueY", new PositiveNegativeShape(LipShape_v2.TongueUp, LipShape_v2.TongueDown)},
22 | {"TongueX", new PositiveNegativeShape(LipShape_v2.TongueRight, LipShape_v2.TongueLeft)},
23 | {"PuffSuckRight", new PositiveNegativeShape(LipShape_v2.CheekPuffRight, LipShape_v2.CheekSuck)},
24 | {"PuffSuckLeft", new PositiveNegativeShape(LipShape_v2.CheekPuffLeft, LipShape_v2.CheekSuck)},
25 | {"PuffSuck", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffLeft, LipShape_v2.CheekPuffRight}, new LipShape_v2[]{LipShape_v2.CheekSuck}, true)},
26 |
27 | //Additional combined shapes created with the help of the VRCFT Discord!
28 |
29 | //JawOpen based params
30 | {"JawOpenApe", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.MouthApeShape)},
31 | {"JawOpenPuff", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.JawOpen}, new LipShape_v2[]{LipShape_v2.CheekPuffLeft, LipShape_v2.CheekPuffRight})},
32 | {"JawOpenPuffRight", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.CheekPuffRight)},
33 | {"JawOpenPuffLeft", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.CheekPuffLeft)},
34 | {"JawOpenSuck", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.CheekSuck)},
35 | {"JawOpenForward", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.JawForward)},
36 | {"JawOpenOverlay", new PositiveNegativeShape(LipShape_v2.JawOpen, LipShape_v2.MouthLowerOverlay)},
37 |
38 | //MouthUpperUpRight based params
39 | {"MouthUpperUpRightUpperInside", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.MouthUpperInside)},
40 | {"MouthUpperUpRightPuffRight", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.CheekPuffRight)},
41 | {"MouthUpperUpRightApe", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.MouthApeShape)},
42 | {"MouthUpperUpRightPout", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.MouthPout)},
43 | {"MouthUpperUpRightOverlay", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.MouthLowerOverlay)},
44 | {"MouthUpperUpRightSuck", new PositiveNegativeShape(LipShape_v2.MouthUpperUpRight, LipShape_v2.CheekSuck)},
45 |
46 | //MouthUpperUpLeft based params
47 | {"MouthUpperUpLeftUpperInside", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperInside)},
48 | {"MouthUpperUpLeftPuffLeft", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.CheekPuffLeft)},
49 | {"MouthUpperUpLeftApe", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthApeShape)},
50 | {"MouthUpperUpLeftPout", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthPout)},
51 | {"MouthUpperUpLeftOverlay", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthLowerOverlay)},
52 | {"MouthUpperUpLeftSuck", new PositiveNegativeShape(LipShape_v2.MouthUpperUpLeft, LipShape_v2.CheekSuck)},
53 |
54 | // MouthUpperUp Left+Right base params
55 | {"MouthUpperUpUpperInside", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.MouthUpperInside })},
56 | {"MouthUpperUpInside", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.MouthUpperInside, LipShape_v2.MouthLowerInside}, true)},
57 | {"MouthUpperUpPuff", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.CheekPuffLeft, LipShape_v2.CheekPuffRight})},
58 | {"MouthUpperUpPuffLeft", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.CheekPuffLeft})},
59 | {"MouthUpperUpPuffRight", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.CheekPuffRight})},
60 | {"MouthUpperUpApe", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.MouthApeShape})},
61 | {"MouthUpperUpPout", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.MouthPout})},
62 | {"MouthUpperUpOverlay", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.MouthLowerOverlay})},
63 | {"MouthUpperUpSuck", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthUpperUpLeft, LipShape_v2.MouthUpperUpRight}, new LipShape_v2[]{LipShape_v2.CheekSuck})},
64 |
65 | //MouthLowerDownRight based params
66 | {"MouthLowerDownRightLowerInside", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.MouthLowerInside)},
67 | {"MouthLowerDownRightPuffRight", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.CheekPuffRight)},
68 | {"MouthLowerDownRightApe", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.MouthApeShape)},
69 | {"MouthLowerDownRightPout", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.MouthPout)},
70 | {"MouthLowerDownRightOverlay", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.MouthLowerOverlay)},
71 | {"MouthLowerDownRightSuck", new PositiveNegativeShape(LipShape_v2.MouthLowerDownRight, LipShape_v2.CheekSuck)},
72 |
73 | //MouthLowerDownLeft based params
74 | {"MouthLowerDownLeftLowerInside", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerInside)},
75 | {"MouthLowerDownLeftPuffLeft", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.CheekPuffLeft)},
76 | {"MouthLowerDownLeftApe", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthApeShape)},
77 | {"MouthLowerDownLeftPout", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthPout)},
78 | {"MouthLowerDownLeftOverlay", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerOverlay)},
79 | {"MouthLowerDownLeftSuck", new PositiveNegativeShape(LipShape_v2.MouthLowerDownLeft, LipShape_v2.CheekSuck)},
80 |
81 | // MouthLowerDown Left+Right base params
82 | {"MouthLowerDownLowerInside", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.MouthLowerInside})},
83 | {"MouthLowerDownInside", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.MouthUpperInside, LipShape_v2.MouthLowerInside}, true)},
84 | {"MouthLowerDownPuff", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.CheekPuffLeft, LipShape_v2.CheekPuffRight})},
85 | {"MouthLowerDownPuffLeft", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.CheekPuffLeft})},
86 | {"MouthLowerDownPuffRight", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.CheekPuffRight})},
87 | {"MouthLowerDownApe", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.MouthApeShape})},
88 | {"MouthLowerDownPout", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.MouthPout})},
89 | {"MouthLowerDownOverlay", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.MouthLowerOverlay})},
90 | {"MouthLowerDownSuck", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthLowerDownLeft, LipShape_v2.MouthLowerDownRight}, new LipShape_v2[]{LipShape_v2.CheekSuck})},
91 |
92 | // MouthInsideOverturn based params
93 | {"MouthUpperInsideOverturn", new PositiveNegativeShape(LipShape_v2.MouthUpperInside, LipShape_v2.MouthUpperOverturn)},
94 | {"MouthLowerInsideOverturn", new PositiveNegativeShape(LipShape_v2.MouthLowerInside, LipShape_v2.MouthLowerOverturn)},
95 |
96 | //SmileRight based params; Recommend using these if you already have SmileSadLeft setup!
97 | {"SmileRightUpperOverturn", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthUpperOverturn)},
98 | {"SmileRightLowerOverturn", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthLowerOverturn)},
99 | {"SmileRightOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn})},
100 | {"SmileRightApe", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthApeShape)},
101 | {"SmileRightOverlay", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthLowerOverlay)},
102 | {"SmileRightPout", new PositiveNegativeShape(LipShape_v2.MouthSmileRight, LipShape_v2.MouthPout)},
103 |
104 | //SmileLeft based params; Recommend using these if you already have SmileSadRight setup!
105 | {"SmileLeftUpperOverturn", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthUpperOverturn)},
106 | {"SmileLeftLowerOverturn", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthLowerOverturn)},
107 | {"SmileLeftOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn})},
108 | {"SmileLeftApe", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthApeShape)},
109 | {"SmileLeftOverlay", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthLowerOverlay)},
110 | {"SmileLeftPout", new PositiveNegativeShape(LipShape_v2.MouthSmileLeft, LipShape_v2.MouthPout)},
111 |
112 | //Smile Left+Right based params
113 | {"SmileUpperOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn})},
114 | {"SmileLowerOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthLowerOverturn})},
115 | {"SmileOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn})},
116 | {"SmileApe", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthApeShape})},
117 | {"SmileOverlay", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthLowerOverlay})},
118 | {"SmilePout", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.MouthSmileLeft, LipShape_v2.MouthSmileRight}, new LipShape_v2[]{LipShape_v2.MouthPout})},
119 |
120 | //CheekPuffRight based params
121 | {"PuffRightUpperOverturn", new PositiveNegativeShape(LipShape_v2.CheekPuffRight, LipShape_v2.MouthUpperOverturn)},
122 | {"PuffRightLowerOverturn", new PositiveNegativeShape(LipShape_v2.CheekPuffRight, LipShape_v2.MouthLowerOverturn)},
123 | {"PuffRightOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffRight}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn}, true)},
124 |
125 | //CheekPuffLeft based params
126 | {"PuffLeftUpperOverturn", new PositiveNegativeShape(LipShape_v2.CheekPuffLeft, LipShape_v2.MouthUpperOverturn)},
127 | {"PuffLeftLowerOverturn", new PositiveNegativeShape(LipShape_v2.CheekPuffLeft, LipShape_v2.MouthLowerOverturn)},
128 | {"PuffLeftOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffLeft}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn}, true)},
129 |
130 | //CheekPuff Left+Right based params
131 | {"PuffUpperOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffRight, LipShape_v2.CheekPuffLeft}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn})},
132 | {"PuffLowerOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffRight, LipShape_v2.CheekPuffLeft}, new LipShape_v2[]{LipShape_v2.MouthLowerOverturn})},
133 | {"PuffOverturn", new PositiveNegativeAveragedShape(new LipShape_v2[]{LipShape_v2.CheekPuffRight, LipShape_v2.CheekPuffLeft}, new LipShape_v2[]{LipShape_v2.MouthUpperOverturn, LipShape_v2.MouthLowerOverturn}, true)},
134 |
135 | //Combine both TongueSteps (-1 fully in, 0 on edge, 1 fully out)
136 | {"TongueSteps", new PositiveNegativeShape(LipShape_v2.TongueLongStep1, LipShape_v2.TongueLongStep2, true)},
137 | };
138 |
139 | // Make a list called LipParameters containing the results from both GetOptimizedLipParameters and GetAllLipParameters, and add GetLipActivatedStatus
140 | public static readonly IParameter[] AllLipParameters =
141 | GetAllLipShapes().Union(GetOptimizedLipParameters()).Union(GetLipActivatedStatus()).ToArray();
142 |
143 | public static bool IsLipShapeName(string name) => MergedShapes.ContainsKey(name) || Enum.TryParse(name, out LipShape_v2 shape);
144 |
145 | private static IEnumerable GetOptimizedLipParameters() => MergedShapes
146 | .Select(shape => new EParam((eye, lip) =>
147 | shape.Value.GetBlendedLipShape(lip.LatestShapes), shape.Key, 0.0f));
148 |
149 | private static IEnumerable GetAllLipShapes() =>
150 | ((LipShape_v2[]) Enum.GetValues(typeof(LipShape_v2))).ToList().Select(shape =>
151 | new EParam((eye, lip) => lip.LatestShapes[(int)shape],
152 | shape.ToString(), 0.0f));
153 |
154 | private static IEnumerable GetLipActivatedStatus() => new List
155 | {
156 | new BoolParameter(v2 => UnifiedLibManager.LipStatus.Equals(ModuleState.Active), "LipTrackingActive"),
157 | };
158 | }
159 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/ParamContainers.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using ViveSR.anipal.Lip;
5 | using VRCFaceTracking.OSC;
6 |
7 | namespace VRCFaceTracking.Params
8 | {
9 | public class FloatParameter : OSCParams.FloatBaseParam, IParameter
10 | {
11 | public FloatParameter(Func getValueFunc,
12 | string paramName)
13 | : base(paramName) =>
14 | UnifiedTrackingData.OnUnifiedDataUpdated += (eye, lip) =>
15 | {
16 | //if (!UnifiedLibManager.EyeEnabled && !UnifiedLibManager.LipEnabled) return;
17 | var value = getValueFunc.Invoke(eye, lip);
18 | if (value.HasValue)
19 | ParamValue = value.Value;
20 | };
21 |
22 | public OSCParams.BaseParam[] GetBase() => new OSCParams.BaseParam[] {this};
23 | }
24 |
25 | public class XYParameter : XYParam, IParameter
26 | {
27 | public XYParameter(Func getValueFunc, string xParamName, string yParamName)
28 | : base(new OSCParams.FloatBaseParam(xParamName), new OSCParams.FloatBaseParam(yParamName)) =>
29 | UnifiedTrackingData.OnUnifiedDataUpdated += (eye, lip) =>
30 | {
31 | var value = getValueFunc.Invoke(eye, lip);
32 | if (value.HasValue)
33 | ParamValue = value.Value;
34 | };
35 |
36 | public XYParameter(Func getValueFunc, string xParamName, string yParamName)
37 | : this((eye, lip) => getValueFunc.Invoke(eye), xParamName, yParamName)
38 | {
39 | }
40 |
41 | public void ResetParam(ConfigParser.Parameter[] newParams) => ResetParams(newParams);
42 |
43 | public OSCParams.BaseParam[] GetBase() => new OSCParams.BaseParam[] {X, Y};
44 | }
45 |
46 | public class BoolParameter : OSCParams.BoolBaseParam, IParameter
47 | {
48 | public BoolParameter(Func getValueFunc,
49 | string paramName) : base(paramName) =>
50 | UnifiedTrackingData.OnUnifiedDataUpdated += (eye, lip) =>
51 | {
52 | var value = getValueFunc.Invoke(eye, lip);
53 | if (value.HasValue)
54 | ParamValue = value.Value;
55 | };
56 |
57 | public BoolParameter(Func getValueFunc, string paramName) : this(
58 | (eye, lip) => getValueFunc.Invoke(eye), paramName)
59 | {
60 | }
61 |
62 | public OSCParams.BaseParam[] GetBase()
63 | {
64 | return new OSCParams.BaseParam[] {this};
65 | }
66 | }
67 |
68 | public class BinaryParameter : OSCParams.BinaryBaseParameter, IParameter
69 | {
70 | public BinaryParameter(Func getValueFunc,
71 | string paramName) : base(paramName)
72 | {
73 | UnifiedTrackingData.OnUnifiedDataUpdated += (eye, lip) =>
74 | {
75 | var value = getValueFunc.Invoke(eye, lip);
76 | if (value.HasValue)
77 | ParamValue = value.Value;
78 | };
79 | }
80 |
81 | public BinaryParameter(Func getValueFunc, string paramName) : this((eye, lip) => getValueFunc.Invoke(eye), paramName)
82 | {
83 | }
84 |
85 | public OSCParams.BaseParam[] GetBase()
86 | {
87 | OSCParams.BaseParam[] retParams = new OSCParams.BaseParam[_params.Count + 1];
88 | // Merge _params.Values and _negativeParam
89 | Array.Copy(_params.Values.ToArray(), retParams, _params.Count);
90 | retParams[_params.Count] = _negativeParam;
91 | return retParams;
92 | }
93 | }
94 |
95 | // EverythingParam, or EpicParam. You choose!
96 | // Contains a bool, float and binary parameter, all in one class with IParameter implemented.
97 | public class EParam : IParameter
98 | {
99 | private readonly IParameter[] _parameter;
100 | private readonly string Name;
101 |
102 | public EParam(Func getValueFunc, string paramName, float minBoolThreshold = 0.5f, bool skipBinaryParamCreation = false)
103 | {
104 | var paramLiterals = new List
105 | {
106 | new BoolParameter((eye, lip) => getValueFunc.Invoke(eye, lip) < minBoolThreshold, paramName),
107 | new FloatParameter(getValueFunc, paramName),
108 | };
109 |
110 | if (!skipBinaryParamCreation)
111 | paramLiterals.Add(new BinaryParameter(getValueFunc, paramName));
112 |
113 | Name = paramName;
114 | _parameter = paramLiterals.ToArray();
115 | }
116 |
117 | public EParam(Func getValueFunc, string paramName,
118 | float minBoolThreshold = 0.5f) : this((eye, lip) => getValueFunc.Invoke(eye), paramName, minBoolThreshold)
119 | {
120 | }
121 |
122 | OSCParams.BaseParam[] IParameter.GetBase() =>
123 | _parameter.SelectMany(p => p.GetBase()).ToArray();
124 |
125 | public void ResetParam(ConfigParser.Parameter[] newParams)
126 | {
127 | foreach (var param in _parameter)
128 | param.ResetParam(newParams);
129 | }
130 | }
131 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/Vector2.cs:
--------------------------------------------------------------------------------
1 | using System.Runtime.InteropServices;
2 |
3 | namespace VRCFaceTracking.Params
4 | {
5 | // Make a nullable class called Vector2
6 | [StructLayout(LayoutKind.Sequential)]
7 | public struct Vector2
8 | {
9 | public float x;
10 | public float y;
11 |
12 | public Vector2(float x, float y)
13 | {
14 | this.x = x;
15 | this.y = y;
16 | }
17 |
18 | // Make an implicit conversion from vector3 to vector2
19 | public static implicit operator Vector2(Vector3 v) => new Vector2(v.x, v.y);
20 |
21 | public static Vector2 operator *(Vector2 a, float d)
22 | => new Vector2(a.x * d, a.y * d);
23 |
24 | public static Vector2 zero => new Vector2(0, 0);
25 | }
26 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/Vector3.cs:
--------------------------------------------------------------------------------
1 | using System.Runtime.InteropServices;
2 |
3 | namespace VRCFaceTracking.Params
4 | {
5 | [StructLayout(LayoutKind.Sequential)]
6 | public struct Vector3
7 | {
8 | public float x;
9 | public float y;
10 | public float z;
11 |
12 | public Vector3(float x, float y, float z)
13 | {
14 | this.x = x;
15 | this.y = y;
16 | this.z = z;
17 | }
18 |
19 | // Make a method that multiplies the vector by a scalar
20 | public Vector3 Invert()
21 | {
22 | x *= -1;
23 |
24 | return this;
25 | }
26 | }
27 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Params/XYParam.cs:
--------------------------------------------------------------------------------
1 | using VRCFaceTracking.OSC;
2 |
3 | namespace VRCFaceTracking.Params
4 | {
5 | public class XYParam
6 | {
7 | public OSCParams.FloatBaseParam X, Y;
8 |
9 | protected Vector2 ParamValue
10 | {
11 | set
12 | {
13 | X.ParamValue = value.x;
14 | Y.ParamValue = value.y;
15 | }
16 | }
17 |
18 | protected XYParam(OSCParams.FloatBaseParam x, OSCParams.FloatBaseParam y)
19 | {
20 | X = x;
21 | Y = y;
22 | }
23 |
24 | protected void ResetParams(ConfigParser.Parameter[] newParams)
25 | {
26 | X.ResetParam(newParams);
27 | Y.ResetParam(newParams);
28 | }
29 | }
30 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/SDK/ExtTrackingModule.cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | namespace VRCFaceTracking
4 | {
5 | public abstract class ExtTrackingModule
6 | {
7 | // Should UnifiedLibManager try to initialize this module if it's looking for a module that supports eye or lip.
8 | public virtual (bool SupportsEye, bool SupportsLip) Supported => (false, false);
9 |
10 | // Should the module be writing to UnifiedTrackingData for eye or lip tracking updates.
11 | public (ModuleState EyeState, ModuleState LipState) Status = (ModuleState.Uninitialized,
12 | ModuleState.Uninitialized);
13 |
14 | public abstract (bool eyeSuccess, bool lipSuccess) Initialize(bool eye, bool lip);
15 |
16 | public abstract Action GetUpdateThreadFunc();
17 |
18 | public abstract void Teardown();
19 | }
20 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_EyeData.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2018, HTC Corporation. All rights reserved. ===========
2 | using System.Runtime.InteropServices;
3 | using Vector2 = VRCFaceTracking.Params.Vector2;
4 | using Vector3 = VRCFaceTracking.Params.Vector3;
5 |
6 | namespace ViveSR
7 | {
8 | namespace anipal
9 | {
10 | namespace Eye
11 | {
12 | #region VerboseData
13 | public enum EyeIndex { LEFT, RIGHT, }
14 | public enum GazeIndex { LEFT, RIGHT, COMBINE }
15 |
16 | /** @enum SingleEyeDataValidity
17 | An enum type for getting validity from the structure: eye data's bitmask
18 | */
19 | public enum SingleEyeDataValidity : int
20 | {
21 | /** The validity of the origin of gaze of the eye data */
22 | SINGLE_EYE_DATA_GAZE_ORIGIN_VALIDITY,
23 | /** The validity of the direction of gaze of the eye data */
24 | SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY,
25 | /** The validity of the diameter of gaze of the eye data */
26 | SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY,
27 | /** The validity of the openness of the eye data */
28 | SINGLE_EYE_DATA_EYE_OPENNESS_VALIDITY,
29 | /** The validity of normalized position of pupil */
30 | SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY
31 | };
32 |
33 | public enum TrackingImprovement : int
34 | {
35 | TRACKING_IMPROVEMENT_USER_POSITION_HMD,
36 | TRACKING_IMPROVEMENT_CALIBRATION_CONTAINS_POOR_DATA,
37 | TRACKING_IMPROVEMENT_CALIBRATION_DIFFERENT_BRIGHTNESS,
38 | TRACKING_IMPROVEMENT_IMAGE_QUALITY,
39 | TRACKING_IMPROVEMENT_INCREASE_EYE_RELIEF,
40 | };
41 |
42 | [StructLayout(LayoutKind.Sequential)]
43 | public struct TrackingImprovements
44 | {
45 | public int count;
46 | public unsafe fixed int items[10];
47 | };
48 |
49 | /** @struct SingleEyeData
50 | * A struct containing status related an eye.
51 | * @image html EyeData.png width=1040px height=880px
52 | */
53 | [StructLayout(LayoutKind.Sequential)]
54 | public struct SingleEyeData
55 | {
56 | /** The bits containing all validity for this frame.*/
57 | public System.UInt64 eye_data_validata_bit_mask;
58 | /** The point in the eye from which the gaze ray originates in millimeter.(right-handed coordinate system)*/
59 | public Vector3 gaze_origin_mm;
60 | /** The normalized gaze direction of the eye in [0,1].(right-handed coordinate system)*/
61 | public Vector3 gaze_direction_normalized;
62 | /** The diameter of the pupil in millimeter*/
63 | public float pupil_diameter_mm;
64 | /** A value representing how open the eye is.*/
65 | public float eye_openness;
66 | /** The normalized position of a pupil in [0,1]*/
67 | public Vector2 pupil_position_in_sensor_area;
68 |
69 | public bool GetValidity(SingleEyeDataValidity validity) => (eye_data_validata_bit_mask & (ulong)(1 << (int)validity)) > 0;
70 | }
71 |
72 | [StructLayout(LayoutKind.Sequential)]
73 | public struct CombinedEyeData
74 | {
75 | public SingleEyeData eye_data;
76 | public byte convergence_distance_validity;
77 | public float convergence_distance_mm;
78 | }
79 |
80 | [StructLayout(LayoutKind.Sequential)]
81 | /** @struct VerboseData
82 | * A struct containing all data listed below.
83 | */
84 | public struct VerboseData
85 | {
86 | /** A instance of the struct as @ref EyeData related to the left eye*/
87 | public SingleEyeData left;
88 | /** A instance of the struct as @ref EyeData related to the right eye*/
89 | public SingleEyeData right;
90 | /** A instance of the struct as @ref EyeData related to the combined eye*/
91 | public CombinedEyeData combined;
92 | public TrackingImprovements tracking_improvements;
93 | }
94 | #endregion
95 |
96 | #region EyeParameter
97 | [StructLayout(LayoutKind.Sequential)]
98 | /** @struct GazeRayParameter
99 | * A struct containing all data listed below.
100 | */
101 | public struct GazeRayParameter
102 | {
103 | /** The sensitive factor of gaze ray in [0,1]. The bigger factor is, the more sensitive the gaze ray is.*/
104 | public double sensitive_factor;
105 | };
106 |
107 | [StructLayout(LayoutKind.Sequential)]
108 | /** @struct EyeParameter
109 | * A struct containing all data listed below.
110 | */
111 | public struct EyeParameter
112 | {
113 | public GazeRayParameter gaze_ray_parameter;
114 | };
115 | #endregion
116 |
117 | #region CalibrationResult
118 | public enum CalibrationResult
119 | {
120 | SUCCESS,
121 | FAIL,
122 | BUSY,
123 | }
124 | #endregion
125 | }
126 | }
127 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_EyeDataType_v2.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2018, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Runtime.InteropServices;
4 |
5 | namespace ViveSR
6 | {
7 | namespace anipal
8 | {
9 | namespace Eye
10 | {
11 | #region EyeShape_v2
12 | public enum EyeShape_v2
13 | {
14 | None = -1,
15 | Eye_Left_Blink = 0,
16 | Eye_Left_Wide,
17 | Eye_Left_Right,
18 | Eye_Left_Left,
19 | Eye_Left_Up,
20 | Eye_Left_Down,
21 | Eye_Right_Blink = 6,
22 | Eye_Right_Wide,
23 | Eye_Right_Right,
24 | Eye_Right_Left,
25 | Eye_Right_Up,
26 | Eye_Right_Down,
27 | Eye_Frown = 12,
28 | Eye_Left_Squeeze,
29 | Eye_Right_Squeeze,
30 | Max = 15,
31 | }
32 |
33 | #endregion
34 |
35 | [StructLayout(LayoutKind.Sequential)]
36 | public struct SingleEyeExpression
37 | {
38 | public float eye_wide; /*! 0)
32 | {
33 | if (propEyeShapes.arraySize != skinnedMesh.sharedMesh.blendShapeCount)
34 | {
35 | propEyeShapes.arraySize = skinnedMesh.sharedMesh.blendShapeCount;
36 | for (int i = 0; i < skinnedMesh.sharedMesh.blendShapeCount; ++i)
37 | {
38 | SerializedProperty propEyeShape = propEyeShapes.GetArrayElementAtIndex(i);
39 | string elementName = skinnedMesh.sharedMesh.GetBlendShapeName(i);
40 |
41 | propEyeShape.intValue = (int)EyeShape.None;
42 | foreach (EyeShape EyeShape in (EyeShape[])Enum.GetValues(typeof(EyeShape)))
43 | {
44 | if (elementName == EyeShape.ToString())
45 | propEyeShape.intValue = (int)EyeShape;
46 | }
47 | }
48 | }
49 | for (int i = 0; i < skinnedMesh.sharedMesh.blendShapeCount; ++i)
50 | {
51 | SerializedProperty propEyeShape = propEyeShapes.GetArrayElementAtIndex(i);
52 | newLabelPosition.y = newFieldPosition.y;
53 | string elementName = skinnedMesh.sharedMesh.GetBlendShapeName(i);
54 | EditorGUI.LabelField(newLabelPosition, " " + elementName);
55 | EditorGUI.PropertyField(newFieldPosition, propEyeShape, GUIContent.none);
56 | newFieldPosition.y += EditorGUIUtility.singleLineHeight;
57 | }
58 | }
59 | EditorGUI.EndProperty();
60 | }
61 |
62 | public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
63 | {
64 | int LineCount = 1;
65 | SerializedProperty propSkinedMesh = property.FindPropertyRelative("skinnedMeshRenderer");
66 | SkinnedMeshRenderer skinnedMesh = propSkinedMesh.objectReferenceValue as SkinnedMeshRenderer;
67 | if (skinnedMesh != null) LineCount += skinnedMesh.sharedMesh.blendShapeCount;
68 | return EditorGUIUtility.singleLineHeight * LineCount;
69 | }
70 | }
71 | }
72 | }
73 | }
74 | #endif
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_EyeShapeTableDrawer_v2.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2018, HTC Corporation. All rights reserved. ===========
2 | #if UNITY_EDITOR
3 | using UnityEngine;
4 | using UnityEditor;
5 | using System;
6 |
7 | namespace ViveSR
8 | {
9 | namespace anipal
10 | {
11 | namespace Eye
12 | {
13 | [CustomPropertyDrawer(typeof(EyeShapeTable_v2))]
14 | public class SRanipal_EyeShapeTableDrawer_v2 : PropertyDrawer
15 | {
16 | public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
17 | {
18 | EditorGUI.BeginProperty(position, label, property);
19 |
20 | Rect newFieldPosition = EditorGUI.PrefixLabel(position, GUIUtility.GetControlID(FocusType.Passive), label);
21 | newFieldPosition.height = EditorGUIUtility.singleLineHeight;
22 | Rect newLabelPosition = position;
23 | newLabelPosition.width -= newFieldPosition.width;
24 |
25 | SerializedProperty propSkinedMesh = property.FindPropertyRelative("skinnedMeshRenderer");
26 | SerializedProperty propEyeShapes = property.FindPropertyRelative("eyeShapes");
27 | EditorGUI.PropertyField(newFieldPosition, propSkinedMesh, GUIContent.none);
28 | newFieldPosition.y += EditorGUIUtility.singleLineHeight;
29 |
30 | SkinnedMeshRenderer skinnedMesh = propSkinedMesh.objectReferenceValue as SkinnedMeshRenderer;
31 | if (skinnedMesh != null && skinnedMesh.sharedMesh.blendShapeCount > 0)
32 | {
33 | if (propEyeShapes.arraySize != skinnedMesh.sharedMesh.blendShapeCount)
34 | {
35 | propEyeShapes.arraySize = skinnedMesh.sharedMesh.blendShapeCount;
36 | for (int i = 0; i < skinnedMesh.sharedMesh.blendShapeCount; ++i)
37 | {
38 | SerializedProperty propEyeShape = propEyeShapes.GetArrayElementAtIndex(i);
39 | string elementName = skinnedMesh.sharedMesh.GetBlendShapeName(i);
40 |
41 | propEyeShape.intValue = (int)EyeShape_v2.None;
42 | foreach (EyeShape_v2 EyeShape in (EyeShape_v2[])Enum.GetValues(typeof(EyeShape_v2)))
43 | {
44 | if (elementName == EyeShape.ToString())
45 | propEyeShape.intValue = (int)EyeShape;
46 | }
47 | }
48 | }
49 | for (int i = 0; i < skinnedMesh.sharedMesh.blendShapeCount; ++i)
50 | {
51 | SerializedProperty propEyeShape = propEyeShapes.GetArrayElementAtIndex(i);
52 | newLabelPosition.y = newFieldPosition.y;
53 | string elementName = skinnedMesh.sharedMesh.GetBlendShapeName(i);
54 | EditorGUI.LabelField(newLabelPosition, " " + elementName);
55 | EditorGUI.PropertyField(newFieldPosition, propEyeShape, GUIContent.none);
56 | newFieldPosition.y += EditorGUIUtility.singleLineHeight;
57 | }
58 | }
59 | EditorGUI.EndProperty();
60 | }
61 |
62 | public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
63 | {
64 | int LineCount = 1;
65 | SerializedProperty propSkinedMesh = property.FindPropertyRelative("skinnedMeshRenderer");
66 | SkinnedMeshRenderer skinnedMesh = propSkinedMesh.objectReferenceValue as SkinnedMeshRenderer;
67 | if (skinnedMesh != null) LineCount += skinnedMesh.sharedMesh.blendShapeCount;
68 | return EditorGUIUtility.singleLineHeight * LineCount;
69 | }
70 | }
71 | }
72 | }
73 | }
74 | #endif
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_Eye_API.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Runtime.InteropServices;
4 |
5 | namespace ViveSR
6 | {
7 | namespace anipal
8 | {
9 | namespace Eye
10 | {
11 | public static class SRanipal_Eye_API
12 | {
13 | ///
14 | /// Check HMD device is ViveProEye or not.
15 | ///
16 | /// true : ViveProEye, false : other HMD.
17 | [DllImport("SRanipal")]
18 | public static extern bool IsViveProEye();
19 |
20 | ///
21 | /// Gets data from anipal's Eye module.
22 | ///
23 | /// ViveSR.anipal.Eye.EyeData
24 | /// Indicates the resulting ViveSR.Error status of this method.
25 | [DllImport("SRanipal")]
26 | public static extern Error GetEyeData_v2(ref EyeData_v2 data);
27 |
28 | ///
29 | /// Sets the parameter of anipal's Eye module.
30 | ///
31 | /// ViveSR.anipal.Eye.EyeParameter
32 | /// Indicates the resulting ViveSR.Error status of this method.
33 | [DllImport("SRanipal")]
34 | public static extern Error SetEyeParameter(EyeParameter parameter);
35 |
36 | ///
37 | /// Gets the parameter of anipal's Eye module.
38 | ///
39 | /// ViveSR.anipal.Eye.EyeParameter
40 | /// Indicates the resulting ViveSR.Error status of this method.
41 | [DllImport("SRanipal")]
42 | public static extern Error GetEyeParameter(ref EyeParameter parameter);
43 |
44 | ///
45 | /// Indicate if user need to do eye calibration now.
46 | ///
47 | /// If need calibration, it will be true, otherwise it will be false.
48 | /// Indicates the resulting ViveSR.Error status of this method.
49 | [DllImport("SRanipal")]
50 | public static extern int IsUserNeedCalibration(ref bool need);
51 |
52 | ///
53 | /// Launches anipal's Eye Calibration tool (an overlay program).
54 | ///
55 | /// (Upcoming feature) A callback method invoked at the end of the calibration process.
56 | /// Indicates the resulting ViveSR.Error status of this method.
57 | [DllImport("SRanipal")]
58 | public static extern int LaunchEyeCalibration(IntPtr callback);
59 |
60 | /* Register a callback function to receive eye camera related data when the module has new outputs.
61 | [in] function pointer of callback
62 | [out] error code. please refer Error in ViveSR_Enums.h
63 | */
64 | [DllImport("SRanipal")]
65 | public static extern int RegisterEyeDataCallback(IntPtr callback);
66 |
67 | /* Unegister a callback function to stop receiving eye camera related data.
68 | * [in] function pointer of callback
69 | * [out] error code. please refer Error in ViveSR_Enums.h
70 | */
71 | [DllImport("SRanipal")]
72 | public static extern int UnregisterEyeDataCallback(IntPtr callback);
73 |
74 | /* Register a callback function to receive eye camera related data when the module has new outputs.
75 | * [in] function pointer of callback
76 | * [out] error code. please refer Error in ViveSR_Enums.h
77 | */
78 | [DllImport("SRanipal")]
79 | public static extern int RegisterEyeDataCallback_v2(IntPtr callback);
80 |
81 | /* Unegister a callback function to stop receiving eye camera related data.
82 | * [in] function pointer of callback
83 | * [out] error code. please refer Error in ViveSR_Enums.h
84 | */
85 | [DllImport("SRanipal")]
86 | public static extern int UnregisterEyeDataCallback_v2(IntPtr callback);
87 |
88 | /* Synchronization the clock on the device and the clock on the system.
89 | * @param[in] Trigger for Synchronization function.
90 | * @return error code. please refer Error in ViveSR_Enums.h
91 | */
92 | [DllImport("SRanipal")]
93 | public static extern Error SRanipal_UpdateTimeSync();
94 |
95 | /* Get the system timestamp.
96 | * @param[out] the value of system timestamp.
97 | * @return error code. please refer Error in ViveSR_Enums.h
98 | */
99 | [DllImport("SRanipal")]
100 | public static extern Error SRanipal_GetSystemTime(ref Int64 time);
101 | }
102 | }
103 | }
104 | }
105 |
106 |
107 |
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_Eye_Enums.cs:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Eye/SRanipal_Eye_v2.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Collections.Generic;
4 |
5 | namespace ViveSR
6 | {
7 | namespace anipal
8 | {
9 | namespace Eye
10 | {
11 | public static class SRanipal_Eye_v2
12 | {
13 | public const int ANIPAL_TYPE_EYE_V2 = 2;
14 |
15 | public const int WeightingCount = (int)EyeShape_v2.Max;
16 | private static Dictionary Weightings;
17 |
18 | static SRanipal_Eye_v2()
19 | {
20 | Weightings = new Dictionary();
21 | for (int i = 0; i < WeightingCount; ++i) Weightings.Add((EyeShape_v2)i, 0.0f);
22 | }
23 |
24 | ///
25 | /// Launches anipal's Eye Calibration feature (an overlay program).
26 | ///
27 | /// Indicates the resulting ViveSR.Error status of this method.
28 | public static bool LaunchEyeCalibration()
29 | {
30 | int result = SRanipal_Eye_API.LaunchEyeCalibration(IntPtr.Zero);
31 | return result == (int)Error.WORK;
32 | }
33 | }
34 | }
35 | }
36 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Lip/SRanipal_LipData_v2.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Runtime.InteropServices;
4 |
5 | namespace ViveSR
6 | {
7 | namespace anipal
8 | {
9 | namespace Lip
10 | {
11 | public enum LipShape_v2
12 | {
13 | //None = -1,
14 | JawRight = 0, // +JawX
15 | JawLeft = 1, // -JawX
16 | JawForward = 2,
17 | JawOpen = 3,
18 | MouthApeShape = 4,
19 | MouthUpperRight = 5, // +MouthUpper
20 | MouthUpperLeft = 6, // -MouthUpper
21 | MouthLowerRight = 7, // +MouthLower
22 | MouthLowerLeft = 8, // -MouthLower
23 | MouthUpperOverturn = 9,
24 | MouthLowerOverturn = 10,
25 | MouthPout = 11,
26 | MouthSmileRight = 12, // +SmileSadRight
27 | MouthSmileLeft = 13, // +SmileSadLeft
28 | MouthSadRight = 14, // -SmileSadRight
29 | MouthSadLeft = 15, // -SmileSadLeft
30 | CheekPuffRight = 16,
31 | CheekPuffLeft = 17,
32 | CheekSuck = 18,
33 | MouthUpperUpRight = 19,
34 | MouthUpperUpLeft = 20,
35 | MouthLowerDownRight = 21,
36 | MouthLowerDownLeft = 22,
37 | MouthUpperInside = 23,
38 | MouthLowerInside = 24,
39 | MouthLowerOverlay = 25,
40 | TongueLongStep1 = 26,
41 | TongueLongStep2 = 32,
42 | TongueDown = 30, // -TongueY
43 | TongueUp = 29, // +TongueY
44 | TongueRight = 28, // +TongueX
45 | TongueLeft = 27, // -TongueX
46 | TongueRoll = 31,
47 | TongueUpLeftMorph = 34,
48 | TongueUpRightMorph = 33,
49 | TongueDownLeftMorph = 36,
50 | TongueDownRightMorph = 35,
51 | //Max = 37,
52 | }
53 |
54 | [StructLayout(LayoutKind.Sequential)]
55 | public struct PredictionData_v2
56 | {
57 | public unsafe fixed float blend_shape_weight[60];
58 | };
59 |
60 | [StructLayout(LayoutKind.Sequential)]
61 | public struct LipData_v2
62 | {
63 | public int frame;
64 | public int time;
65 | public IntPtr image;
66 | public PredictionData_v2 prediction_data;
67 | };
68 | }
69 | }
70 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Lip/SRanipal_Lip_API.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System.Runtime.InteropServices;
3 |
4 | namespace ViveSR
5 | {
6 | namespace anipal
7 | {
8 | namespace Lip
9 | {
10 | public static class SRanipal_Lip_API
11 | {
12 | ///
13 | /// Gets version 2 lip data from anipal's Lip module.
14 | ///
15 | /// ViveSR.anipal.Lip.LipData_v2
16 | /// Indicates the resulting ViveSR.Error status of this method.
17 | [DllImport("SRanipal")]
18 | public static extern unsafe Error GetLipData_v2(ref LipData_v2 data);
19 | }
20 | }
21 | }
22 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/Lip/SRanipal_Lip_v2.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Runtime.InteropServices;
5 |
6 | namespace ViveSR
7 | {
8 | namespace anipal
9 | {
10 | namespace Lip
11 | {
12 | public class SRanipal_Lip_v2
13 | {
14 | public const int ANIPAL_TYPE_LIP_V2 = 3;
15 |
16 | public const int ImageWidth = 800, ImageHeight = 400, ImageChannel = 1;
17 | public const int WeightingCount = 37;
18 | private static Error LastUpdateResult = Error.FAILED;
19 | public static LipData_v2 LipData;
20 | private static Dictionary Weightings;
21 |
22 | static SRanipal_Lip_v2()
23 | {
24 | LipData.image = Marshal.AllocCoTaskMem(ImageWidth * ImageHeight * ImageChannel);
25 | Weightings = new Dictionary();
26 | for (int i = 0; i < WeightingCount; ++i) Weightings.Add((LipShape_v2)i, 0.0f);
27 | }
28 |
29 | private static unsafe bool UpdateData()
30 | {
31 | LastUpdateResult = SRanipal_Lip_API.GetLipData_v2(ref LipData);
32 | if (LastUpdateResult == Error.WORK)
33 | {
34 | for (int i = 0; i < WeightingCount; ++i)
35 | {
36 | Weightings[(LipShape_v2)i] = LipData.prediction_data.blend_shape_weight[i];
37 | }
38 | }
39 | return LastUpdateResult == Error.WORK;
40 | }
41 |
42 | ///
43 | /// Gets weighting values from anipal's Lip module.
44 | ///
45 | /// Weighting values obtained from anipal's Lip module.
46 | /// Indicates whether the values received are new.
47 | public static bool GetLipWeightingsAndImage(out Dictionary shapes, out IntPtr image)
48 | {
49 | bool update = UpdateData();
50 | shapes = Weightings;
51 | image = LipData.image;
52 | return update;
53 | }
54 | }
55 | }
56 | }
57 | }
58 |
59 |
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRWorks_Enums.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | namespace ViveSR
3 | {
4 | ///
5 | /// error code of ViveSR
6 | ///
7 | public enum Error : int
8 | {
9 | RUNTIME_NOT_FOUND = -3,
10 | NOT_INITIAL = -2,
11 | FAILED = -1,
12 | WORK = 0,
13 | INVALID_INPUT = 1,
14 | FILE_NOT_FOUND = 2,
15 | DATA_NOT_FOUND = 13,
16 | UNDEFINED = 319,
17 | INITIAL_FAILED = 1001,
18 | NOT_IMPLEMENTED = 1003,
19 | NULL_POINTER = 1004,
20 | OVER_MAX_LENGTH = 1005,
21 | FILE_INVALID = 1006,
22 | UNINSTALL_STEAM = 1007,
23 | MEMCPY_FAIL = 1008,
24 | NOT_MATCH = 1009,
25 | NODE_NOT_EXIST = 1010,
26 | UNKONW_MODULE = 1011,
27 | MODULE_FULL = 1012,
28 | UNKNOW_TYPE = 1013,
29 | INVALID_MODULE = 1014,
30 | INVALID_TYPE = 1015,
31 | MEMORY_NOT_ENOUGH = 1016,
32 | BUSY = 1017,
33 | NOT_SUPPORTED = 1018,
34 | INVALID_VALUE = 1019,
35 | COMING_SOON = 1020,
36 | INVALID_CHANGE = 1021,
37 | TIMEOUT = 1022,
38 | DEVICE_NOT_FOUND = 1023,
39 | INVALID_DEVICE = 1024,
40 | NOT_AUTHORIZED = 1025,
41 | ALREADY = 1026,
42 | INTERNAL = 1027,
43 | CONNECTION_FAILED = 1028,
44 | ALLOCATION_FAILED = 1029,
45 | OPERATION_FAILED = 1030,
46 | NOT_AVAILABLE = 1031,
47 | CALLBACK_IN_PROGRESS= 1032,
48 | SERVICE_NOT_FOUND = 1033,
49 | DISABLED_BY_USER = 1034,
50 | EULA_NOT_ACCEPT = 1035,
51 | RUNTIME_NO_RESPONSE = 1036,
52 | OPENCL_NOT_SUPPORT = 1037,
53 | NOT_SUPPORT_EYE_TRACKING = 1038,
54 | FOXIP_SO = 1051 // Weird wireless issue
55 | };
56 | }
57 |
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRWorks_Log.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/TrackingLibs/SRanipal/SRWorks_Log.dll
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRanipal.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/TrackingLibs/SRanipal/SRanipal.dll
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRanipalTrackingInterface.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using System.Runtime.InteropServices;
4 | using System.Threading;
5 | using ViveSR;
6 | using ViveSR.anipal;
7 | using ViveSR.anipal.Eye;
8 | using ViveSR.anipal.Lip;
9 | using VRCFaceTracking.Assets.UI;
10 |
11 | namespace VRCFaceTracking.SRanipal
12 | {
13 | public class SRanipalExtTrackingInterface : ExtTrackingModule
14 | {
15 | LipData_v2 lipData = default;
16 | EyeData_v2 eyeData = default;
17 | private static CancellationTokenSource _cancellationToken;
18 |
19 | public override (bool SupportsEye, bool SupportsLip) Supported => (true, true);
20 |
21 | public override (bool eyeSuccess, bool lipSuccess) Initialize(bool eye, bool lip)
22 | {
23 | Error eyeError = Error.UNDEFINED, lipError = Error.UNDEFINED;
24 |
25 | if (eye)
26 | // Only try to init if we're actually using the only headset that supports SRanipal eye tracking
27 | eyeError = SRanipal_API.Initial(SRanipal_Eye_v2.ANIPAL_TYPE_EYE_V2, IntPtr.Zero);
28 |
29 | if (lip)
30 | lipError = SRanipal_API.Initial(SRanipal_Lip_v2.ANIPAL_TYPE_LIP_V2, IntPtr.Zero);
31 |
32 | var (eyeEnabled, lipEnabled) = HandleSrErrors(eyeError, lipError);
33 |
34 | if (eyeEnabled && Utils.HasAdmin)
35 | {
36 | var found = false;
37 | int tries = 0;
38 | while (!found && tries < 15)
39 | {
40 | tries++;
41 | found = Attach();
42 | Thread.Sleep(250);
43 | }
44 |
45 | if (found)
46 | {
47 | // Find the EyeCameraDevice.dll module inside sr_runtime, get it's offset and add hex 19190 to it for the image stream.
48 | foreach (ProcessModule module in _process.Modules)
49 | if (module.ModuleName == "EyeCameraDevice.dll")
50 | _offset = module.BaseAddress + (_process.MainModule.FileVersionInfo.FileVersion == "1.3.2.0" ? 0x19190 : 0x19100);
51 |
52 | UnifiedTrackingData.LatestEyeData.SupportsImage = true;
53 | UnifiedTrackingData.LatestEyeData.ImageSize = (200, 100);
54 | }
55 | }
56 |
57 | if (lipEnabled)
58 | {
59 | UnifiedTrackingData.LatestLipData.SupportsImage = true;
60 | UnifiedTrackingData.LatestLipData.ImageSize = (SRanipal_Lip_v2.ImageWidth, SRanipal_Lip_v2.ImageHeight);
61 | UnifiedTrackingData.LatestLipData.ImageData = new byte[UnifiedTrackingData.LatestLipData.ImageSize.x *
62 | UnifiedTrackingData.LatestLipData.ImageSize.y];
63 | lipData.image = Marshal.AllocCoTaskMem(UnifiedTrackingData.LatestLipData.ImageSize.x *
64 | UnifiedTrackingData.LatestLipData.ImageSize.y);
65 | }
66 |
67 | return (eyeEnabled, lipEnabled);
68 | }
69 |
70 | private static (bool eyeSuccess, bool lipSuccess) HandleSrErrors(Error eyeError, Error lipError)
71 | {
72 | bool eyeEnabled = false, lipEnabled = false;
73 |
74 | if (eyeError == Error.WORK)
75 | eyeEnabled = true;
76 |
77 | if (lipError == Error.FOXIP_SO)
78 | while (lipError == Error.FOXIP_SO)
79 | lipError = SRanipal_API.Initial(SRanipal_Lip_v2.ANIPAL_TYPE_LIP_V2, IntPtr.Zero);
80 |
81 | if (lipError == Error.WORK)
82 | lipEnabled = true;
83 |
84 | return (eyeEnabled, lipEnabled);
85 | }
86 |
87 | public override void Teardown()
88 | {
89 | _cancellationToken.Cancel();
90 | _cancellationToken.Dispose();
91 |
92 | Thread.Sleep(2000);
93 |
94 | if (Status.EyeState > ModuleState.Uninitialized)
95 | {
96 | Logger.Msg("Teardown: Releasing Eye");
97 | // Attempt to release this module and give up after 10 seconds because Vive Moment
98 | var killThread = new Thread(() => SRanipal_API.Release(SRanipal_Eye_v2.ANIPAL_TYPE_EYE_V2));
99 | killThread.Start();
100 | if (!killThread.Join(new TimeSpan(0, 0, 5)))
101 | {
102 | killThread.Abort();
103 | if (killThread.IsAlive)
104 | killThread.Interrupt();
105 | }
106 | }
107 |
108 | if (Status.LipState > ModuleState.Uninitialized)
109 | {
110 | Logger.Msg("Teardown: Releasing Lip");
111 | // Same for lips
112 | var killThread = new Thread(() => SRanipal_API.Release(SRanipal_Lip_v2.ANIPAL_TYPE_LIP_V2));
113 | killThread.Start();
114 | if (!killThread.Join(new TimeSpan(0,0,5)))
115 | killThread.Abort();
116 | }
117 | }
118 |
119 | #region Update
120 |
121 |
122 | public override Action GetUpdateThreadFunc()
123 | {
124 | _cancellationToken = new CancellationTokenSource();
125 | return () =>
126 | {
127 | while (!_cancellationToken.IsCancellationRequested)
128 | {
129 | if (Status.LipState == ModuleState.Active && UpdateMouth() != Error.WORK)
130 | {
131 | Logger.Msg("An error occured while getting lip data. This might be a wireless crash.");
132 | Logger.Msg("Waiting 30 seconds before reinitializing to account for wireless users.");
133 | Thread.Sleep(30000);
134 | UnifiedLibManager.Initialize();
135 | return;
136 | }
137 |
138 | if (Status.EyeState == ModuleState.Active && UpdateEye() != Error.WORK)
139 | {
140 | Logger.Msg("An error occured while getting eye data. This might be a wireless crash.");
141 | Logger.Msg("Waiting 30 seconds before reinitializing to account for wireless users.");
142 | Thread.Sleep(30000);
143 | UnifiedLibManager.Initialize();
144 | return;
145 | }
146 |
147 | Thread.Sleep(10);
148 | }
149 | };
150 | }
151 |
152 | private static Process _process;
153 | private static IntPtr _processHandle;
154 | private IntPtr _offset;
155 |
156 | private static bool Attach()
157 | {
158 | if (Process.GetProcessesByName("sr_runtime").Length <= 0) return false;
159 | _process = Process.GetProcessesByName("sr_runtime")[0];
160 | _processHandle =
161 | Utils.OpenProcess(Utils.PROCESS_VM_READ,
162 | false, _process.Id);
163 | return true;
164 | }
165 |
166 | private static byte[] ReadMemory(IntPtr offset, int size) {
167 | var buffer = new byte[size];
168 |
169 | var bytesRead = 0;
170 | Utils.ReadProcessMemory((int) _processHandle, offset, buffer, size, ref bytesRead);
171 |
172 | return bytesRead != size ? null : buffer;
173 | }
174 |
175 | private Error UpdateEye()
176 | {
177 | var updateResult = SRanipal_Eye_API.GetEyeData_v2(ref eyeData);
178 | UnifiedTrackingData.LatestEyeData.UpdateData(eyeData);
179 |
180 | if (!MainWindow.IsEyePageVisible || _processHandle == IntPtr.Zero || !UnifiedTrackingData.LatestEyeData.SupportsImage) return updateResult;
181 |
182 | // Read 20000 image bytes from the predefined offset. 10000 bytes per eye.
183 | var imageBytes = ReadMemory(_offset, 20000);
184 |
185 | // Concatenate the two images side by side instead of one after the other
186 | byte[] leftEye = new byte[10000];
187 | Array.Copy(imageBytes, 0, leftEye, 0, 10000);
188 | byte[] rightEye = new byte[10000];
189 | Array.Copy(imageBytes, 10000, rightEye, 0, 10000);
190 |
191 | for (var i = 0; i < 100; i++) // 100 lines of 200 bytes
192 | {
193 | // Add 100 bytes from the left eye to the left side of the image
194 | int leftIndex = i * 100 * 2;
195 | Array.Copy(leftEye,i*100, imageBytes, leftIndex, 100);
196 |
197 | // Add 100 bytes from the right eye to the right side of the image
198 | Array.Copy(rightEye, i*100, imageBytes, leftIndex + 100, 100);
199 | }
200 |
201 | // Write the image to the latest eye data
202 | UnifiedTrackingData.LatestEyeData.ImageData = imageBytes;
203 |
204 | return updateResult;
205 | }
206 |
207 | private Error UpdateMouth()
208 | {
209 | var updateResult = SRanipal_Lip_API.GetLipData_v2(ref lipData);
210 | UnifiedTrackingData.LatestLipData.UpdateData(lipData);
211 |
212 | if (!MainWindow.IsLipPageVisible || lipData.image == IntPtr.Zero || !UnifiedTrackingData.LatestLipData.SupportsImage) return updateResult;
213 |
214 | Marshal.Copy(lipData.image, UnifiedTrackingData.LatestLipData.ImageData, 0, UnifiedTrackingData.LatestLipData.ImageSize.x *
215 | UnifiedTrackingData.LatestLipData.ImageSize.y);
216 |
217 | return updateResult;
218 | }
219 |
220 | #endregion
221 | }
222 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRanipal_API.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | using System;
3 | using System.Runtime.InteropServices;
4 |
5 | namespace ViveSR
6 | {
7 | namespace anipal
8 | {
9 | public class SRanipal_API
10 | {
11 | ///
12 | /// Invokes an anipal module.
13 | ///
14 | /// The index of an anipal module.
15 | /// Indicates the resulting ViveSR.Error status of this method.
16 | /// Indicates the resulting ViveSR.Error status of this method.
17 | [DllImport("SRanipal")]
18 | public static extern Error Initial(int anipalType, IntPtr config);
19 |
20 | ///
21 | /// Terminates an anipal module.
22 | ///
23 | /// The index of an anipal module.
24 | /// Indicates the resulting ViveSR.Error status of this method.
25 | [DllImport("SRanipal")]
26 | public static extern Error Release(int anipalType);
27 |
28 | ///
29 | /// Gets the status of an anipal module.
30 | ///
31 | /// The index of an anipal module.
32 | /// The status of an anipal module.
33 | /// Indicates the resulting ViveSR.Error status of this method.
34 | [DllImport("SRanipal")]
35 | public static extern Error GetStatus(int anipalType, out AnipalStatus status);
36 |
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/SRanipal_Enums.cs:
--------------------------------------------------------------------------------
1 | //========= Copyright 2019, HTC Corporation. All rights reserved. ===========
2 | namespace ViveSR
3 | {
4 | namespace anipal
5 | {
6 | public enum AnipalStatus : int
7 | {
8 | ERROR,
9 | IDLE,
10 | WORKING,
11 | }
12 | }
13 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/ViveSR_Client.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/TrackingLibs/SRanipal/ViveSR_Client.dll
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/libHTC_License.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/TrackingLibs/SRanipal/libHTC_License.dll
--------------------------------------------------------------------------------
/VRCFaceTracking/TrackingLibs/SRanipal/nanomsg.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ULemon/VRCFaceTracking/bdbd54efea58883458cbbae743b5bc144416d955/VRCFaceTracking/TrackingLibs/SRanipal/nanomsg.dll
--------------------------------------------------------------------------------
/VRCFaceTracking/UnifiedLibManager.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Reflection;
6 | using System.Threading;
7 |
8 | namespace VRCFaceTracking
9 | {
10 | public enum ModuleState
11 | {
12 | Uninitialized = -1, // If the module is not initialized, we can assume it's not being used
13 | Idle = 0, // Idle and above we can assume the module in question is or has been in use
14 | Active = 1 // We're actively getting tracking data from the module
15 | }
16 |
17 | public static class UnifiedLibManager
18 | {
19 | #region Delegates
20 | public static Action OnTrackingStateUpdate = (b, b1) => { };
21 | #endregion
22 |
23 | #region Statuses
24 | public static ModuleState EyeStatus
25 | {
26 | get => _eyeModule?.Status.EyeState ?? ModuleState.Uninitialized;
27 | set
28 | {
29 | if (_eyeModule != null)
30 | _eyeModule.Status.EyeState = value;
31 | OnTrackingStateUpdate.Invoke(value, LipStatus);
32 | }
33 | }
34 |
35 | public static ModuleState LipStatus
36 | {
37 | get => _lipModule?.Status.LipState ?? ModuleState.Uninitialized;
38 | set
39 | {
40 | if (_lipModule != null)
41 | _lipModule.Status.LipState = value;
42 | OnTrackingStateUpdate.Invoke(EyeStatus, value);
43 | }
44 | }
45 | #endregion
46 |
47 | #region Modules
48 | private static ExtTrackingModule _eyeModule, _lipModule;
49 | private static readonly Dictionary UsefulThreads =
50 | new Dictionary();
51 | #endregion
52 |
53 | private static Thread _initializeWorker;
54 |
55 | public static void Initialize(bool eye = true, bool lip = true)
56 | {
57 | if (_initializeWorker != null && _initializeWorker.IsAlive) _initializeWorker.Abort();
58 |
59 | // Start Initialization
60 | _initializeWorker = new Thread(() =>
61 | {
62 | // Kill lingering threads
63 | TeardownAllAndReset();
64 |
65 | // Init
66 | FindAndInitRuntimes(eye, lip);
67 | });
68 | Logger.Msg("Starting initialization thread");
69 | _initializeWorker.Start();
70 | }
71 |
72 | private static List LoadExternalModules()
73 | {
74 | var returnList = new List();
75 | var customLibsPath = Path.Combine(Utils.PersistentDataDirectory, "CustomLibs");
76 |
77 | if (!Directory.Exists(customLibsPath))
78 | Directory.CreateDirectory(customLibsPath);
79 |
80 | Logger.Msg("Loading External Modules...");
81 |
82 | // Load dotnet dlls from the VRCFTLibs folder
83 | foreach (var dll in Directory.GetFiles(customLibsPath, "*.dll"))
84 | {
85 | Logger.Msg("Loading " + dll);
86 |
87 | Type module;
88 | try
89 | {
90 | var loadedModule = Assembly.LoadFrom(dll);
91 | // Get the first class that implements ExtTrackingModule
92 | module = loadedModule.GetTypes().FirstOrDefault(t => t.IsSubclassOf(typeof(ExtTrackingModule)));
93 | }
94 | catch (ReflectionTypeLoadException e)
95 | {
96 | foreach (var loaderException in e.LoaderExceptions)
97 | {
98 | Logger.Error("LoaderException: " + loaderException.Message);
99 | }
100 | Logger.Error("Exception loading " + dll + ". Skipping.");
101 | continue;
102 | }
103 | catch (BadImageFormatException e)
104 | {
105 | Logger.Error("Encountered a .dll with an invalid format: " + e.Message+". Skipping...");
106 | continue;
107 | }
108 |
109 | if (module != null)
110 | {
111 | returnList.Add(module);
112 | Logger.Msg("Loaded external tracking module: " + module.Name);
113 | continue;
114 | }
115 |
116 | Logger.Warning("Module " + dll + " does not implement ExtTrackingModule");
117 | }
118 |
119 | return returnList;
120 | }
121 |
122 | private static void EnsureModuleThreadStarted(ExtTrackingModule module)
123 | {
124 | if (UsefulThreads.ContainsKey(module))
125 | return;
126 |
127 | var thread = new Thread(module.GetUpdateThreadFunc().Invoke);
128 | UsefulThreads.Add(module, thread);
129 | thread.Start();
130 | }
131 |
132 | private static void FindAndInitRuntimes(bool eye = true, bool lip = true)
133 | {
134 | Logger.Msg("Finding and initializing runtimes...");
135 |
136 | // Get a list of our own built-in modules
137 | var trackingModules = Assembly.GetExecutingAssembly().GetTypes()
138 | .Where(type => type.IsSubclassOf(typeof(ExtTrackingModule)));
139 |
140 | // Concat both our own modules and the external ones
141 | trackingModules = trackingModules.Union(LoadExternalModules());
142 |
143 | foreach (var module in trackingModules)
144 | {
145 | Logger.Msg("Initializing module: " + module.Name);
146 | // Create module
147 | var moduleObj = (ExtTrackingModule) Activator.CreateInstance(module);
148 |
149 | // If there is still a need for a module with eye or lip tracking and this module supports the current need, try initialize it
150 | if (EyeStatus == ModuleState.Uninitialized && moduleObj.Supported.SupportsEye ||
151 | LipStatus == ModuleState.Uninitialized && moduleObj.Supported.SupportsLip)
152 | {
153 | bool eyeSuccess, lipSuccess;
154 | try
155 | {
156 | (eyeSuccess, lipSuccess) = moduleObj.Initialize(eye, lip);
157 | }
158 | catch(MissingMethodException)
159 | {
160 | Logger.Error(moduleObj.GetType().Name+ " does not properly implement ExtTrackingModule. Skipping.");
161 | continue;
162 | }
163 | catch (Exception e)
164 | {
165 | Logger.Error("Exception initializing " + moduleObj.GetType().Name + ". Skipping.");
166 | Logger.Error(e.Message);
167 | continue;
168 | }
169 |
170 | // If eyeSuccess or lipSuccess was true, set the status to active
171 | if (eyeSuccess && _eyeModule == null)
172 | {
173 | _eyeModule = moduleObj;
174 | EyeStatus = ModuleState.Active;
175 | EnsureModuleThreadStarted(moduleObj);
176 | }
177 |
178 | if (lipSuccess && _lipModule == null)
179 | {
180 | _lipModule = moduleObj;
181 | LipStatus = ModuleState.Active;
182 | EnsureModuleThreadStarted(moduleObj);
183 | }
184 | }
185 |
186 | if (EyeStatus > ModuleState.Uninitialized && LipStatus > ModuleState.Uninitialized)
187 | break; // Keep enumerating over all modules until we find ones we can use
188 | }
189 |
190 | if (eye)
191 | {
192 | if (EyeStatus != ModuleState.Uninitialized) Logger.Msg("Eye Tracking Initialized via " + _eyeModule);
193 | else Logger.Warning("Eye Tracking will be unavailable for this session.");
194 | }
195 |
196 | if (lip)
197 | {
198 | if (LipStatus != ModuleState.Uninitialized) Logger.Msg("Lip Tracking Initialized via " + _lipModule);
199 | else Logger.Warning("Lip Tracking will be unavailable for this session.");
200 | }
201 | }
202 |
203 | // Signal all active modules to gracefully shut down their respective runtimes
204 | public static void TeardownAllAndReset()
205 | {
206 | foreach (var module in UsefulThreads)
207 | {
208 | Logger.Msg("Teardown: " + module.Key.GetType().Name);
209 | module.Key.Teardown();
210 | module.Value.Abort();
211 | Logger.Msg("Teardown complete: " + module.Key.GetType().Name);
212 | }
213 | UsefulThreads.Clear();
214 |
215 | EyeStatus = ModuleState.Uninitialized;
216 | LipStatus = ModuleState.Uninitialized;
217 |
218 | _eyeModule = null;
219 | _lipModule = null;
220 | }
221 | }
222 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/UnifiedTrackingData.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using ViveSR.anipal.Eye;
5 | using ViveSR.anipal.Lip;
6 | using VRCFaceTracking.Params;
7 | using VRCFaceTracking.Params.Eye;
8 | using VRCFaceTracking.Params.LipMerging;
9 | using Vector2 = VRCFaceTracking.Params.Vector2;
10 |
11 | namespace VRCFaceTracking
12 | {
13 | // Represents a single eye, can also be used as a combined eye
14 | public struct Eye
15 | {
16 | public Vector2 Look;
17 | public float Openness;
18 | public float Widen, Squeeze;
19 |
20 |
21 | public void Update(SingleEyeData eyeData, SingleEyeExpression? expression = null)
22 | {
23 | if (eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY))
24 | Look = eyeData.gaze_direction_normalized.Invert();
25 |
26 | Openness = eyeData.eye_openness;
27 |
28 | if (expression == null) return; // This is null when we use this as a combined eye, so don't try read data from it
29 |
30 | Widen = expression.Value.eye_wide;
31 | Squeeze = expression.Value.eye_squeeze;
32 | }
33 | }
34 |
35 | public class EyeTrackingData
36 | {
37 | // Camera Data
38 | public (int x, int y) ImageSize;
39 | public byte[] ImageData;
40 | public bool SupportsImage;
41 |
42 | public Eye Left, Right, Combined;
43 |
44 | // SRanipal Exclusive
45 | public float EyesDilation;
46 | private float _maxDilation, _minDilation;
47 |
48 | // Custom parameter
49 | public float EyesPupilDiameter;
50 |
51 | public void UpdateData(EyeData_v2 eyeData)
52 | {
53 | float dilation = 0;
54 |
55 | if (eyeData.verbose_data.right.GetValidity(SingleEyeDataValidity
56 | .SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY))
57 | {
58 | dilation = eyeData.verbose_data.right.pupil_diameter_mm;
59 | UpdateMinMaxDilation(eyeData.verbose_data.right.pupil_diameter_mm);
60 | }
61 | else if (eyeData.verbose_data.left.GetValidity(SingleEyeDataValidity
62 | .SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY))
63 | {
64 | dilation = eyeData.verbose_data.left.pupil_diameter_mm;
65 | UpdateMinMaxDilation(eyeData.verbose_data.left.pupil_diameter_mm);
66 | }
67 |
68 | Left.Update(eyeData.verbose_data.left, eyeData.expression_data.left);
69 | Right.Update(eyeData.verbose_data.right, eyeData.expression_data.right);
70 |
71 | Combined.Update(eyeData.verbose_data.combined.eye_data);
72 | // Fabricate missing combined eye data
73 | Combined.Widen = (Left.Widen + Right.Widen) / 2;
74 | Combined.Squeeze = (Left.Squeeze + Right.Squeeze) / 2;
75 |
76 | if (dilation != 0)
77 | {
78 | EyesDilation = (dilation - _minDilation) / (_maxDilation - _minDilation);
79 | EyesPupilDiameter = dilation > 10 ? 1 : dilation / 10;
80 | }
81 | }
82 |
83 | private void UpdateMinMaxDilation(float readDilation)
84 | {
85 | if (readDilation > _maxDilation)
86 | _maxDilation = readDilation;
87 | if (readDilation < _minDilation)
88 | _minDilation = readDilation;
89 | }
90 |
91 | public void ResetThresholds()
92 | {
93 | _maxDilation = 0;
94 | _minDilation = 999;
95 | }
96 | }
97 |
98 | public class LipTrackingData
99 | {
100 | // Camera Data
101 | public (int x, int y) ImageSize;
102 | public byte[] ImageData;
103 | public bool SupportsImage;
104 |
105 | public float[] LatestShapes = new float[SRanipal_Lip_v2.WeightingCount];
106 |
107 | public void UpdateData(LipData_v2 lipData)
108 | {
109 | unsafe
110 | {
111 | for (int i = 0; i < SRanipal_Lip_v2.WeightingCount; i++)
112 | LatestShapes[i] = lipData.prediction_data.blend_shape_weight[i];
113 | }
114 | }
115 | }
116 |
117 | public class UnifiedTrackingData
118 | {
119 | public static readonly IParameter[] AllParameters = EyeTrackingParams.ParameterList.Union(LipShapeMerger.AllLipParameters).ToArray();
120 |
121 | // Central update action for all parameters to subscribe to
122 | public static Action OnUnifiedDataUpdated;
124 |
125 | public static EyeTrackingData LatestEyeData = new EyeTrackingData();
126 | public static LipTrackingData LatestLipData = new LipTrackingData();
127 | }
128 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/Utils.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Runtime.InteropServices;
5 | using System.Security;
6 | using System.Security.Principal;
7 |
8 | namespace VRCFaceTracking
9 | {
10 | public static class Utils
11 | {
12 | // Timer resolution helpers
13 | [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Interoperability", "CA1401:PInvokesShouldNotBeVisible"), System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2118:ReviewSuppressUnmanagedCodeSecurityUsage"), SuppressUnmanagedCodeSecurity]
14 | [DllImport("winmm.dll", EntryPoint = "timeBeginPeriod", SetLastError = true)]
15 | public static extern uint TimeBeginPeriod(uint uMilliseconds);
16 |
17 | [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Interoperability", "CA1401:PInvokesShouldNotBeVisible"), System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2118:ReviewSuppressUnmanagedCodeSecurityUsage"), SuppressUnmanagedCodeSecurity]
18 | [DllImport("winmm.dll", EntryPoint = "timeEndPeriod", SetLastError = true)]
19 | public static extern uint TimeEndPeriod(uint uMilliseconds);
20 |
21 | // Proc memory read helpers
22 | public const int PROCESS_VM_READ = 0x0010;
23 |
24 | [DllImport("kernel32.dll")]
25 | public static extern IntPtr OpenProcess(int dwDesiredAccess, bool bInheritHandle, int dwProcessId);
26 |
27 | [DllImport("kernel32.dll")]
28 | public static extern bool ReadProcessMemory(int hProcess, IntPtr lpBaseAddress, byte[] lpBuffer, int dwSize, ref int lpNumberOfBytesRead);
29 |
30 | public static readonly bool HasAdmin =
31 | new WindowsPrincipal(WindowsIdentity.GetCurrent()).IsInRole(WindowsBuiltInRole.Administrator);
32 |
33 | public static readonly string PersistentDataDirectory = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "VRCFaceTracking");
34 |
35 | public static readonly Dictionary TypeConversions =
36 | new Dictionary
37 | {
38 | {typeof(bool), ('F', "Bool")},
39 | {typeof(float), ('f', "Float")},
40 | {typeof(int), ('i', "Int")},
41 | };
42 | }
43 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/VRCFaceTracking.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | AnyCPU
7 | {0767C09E-D536-464B-B0A1-46D5BFA19E98}
8 | WinExe
9 | Properties
10 | VRCFaceTracking
11 | VRCFaceTracking
12 | v4.7.2
13 | {60dc8134-eba5-43b8-bcc9-bb4bc16c2548};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
14 | 512
15 |
16 |
17 | x64
18 | true
19 | full
20 | false
21 | bin\Debug\
22 | DEBUG;TRACE
23 | prompt
24 | 4
25 | true
26 | false
27 |
28 |
29 | pdbonly
30 | true
31 | TRACE
32 | prompt
33 | 4
34 | true
35 | bin\Release\
36 | x64
37 | false
38 |
39 |
40 | Assets/Images/VRCFT.ico
41 |
42 |
43 |
44 |
45 | App.xaml
46 | Code
47 |
48 |
49 | MainWindow.xaml
50 | Code
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 | ..\packages\Microsoft.Bcl.AsyncInterfaces.7.0.0-preview.1.22076.8\lib\net462\Microsoft.Bcl.AsyncInterfaces.dll
97 | True
98 |
99 |
100 |
101 |
102 |
103 |
104 | ..\packages\System.Buffers.4.5.1\lib\net461\System.Buffers.dll
105 | True
106 |
107 |
108 |
109 |
110 | ..\packages\System.Memory.4.5.4\lib\net461\System.Memory.dll
111 | True
112 |
113 |
114 | ..\..\..\..\..\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.2\System.Numerics.dll
115 |
116 |
117 | ..\packages\System.Numerics.Vectors.4.5.0\lib\net46\System.Numerics.Vectors.dll
118 | True
119 |
120 |
121 | ..\packages\System.Runtime.CompilerServices.Unsafe.7.0.0-preview.1.22076.8\lib\net462\System.Runtime.CompilerServices.Unsafe.dll
122 | True
123 |
124 |
125 | ..\packages\System.Text.Encodings.Web.7.0.0-preview.1.22076.8\lib\net462\System.Text.Encodings.Web.dll
126 | True
127 |
128 |
129 | ..\packages\System.Text.Json.7.0.0-preview.1.22076.8\lib\net462\System.Text.Json.dll
130 | True
131 |
132 |
133 | ..\packages\System.Threading.Tasks.Extensions.4.5.4\lib\net461\System.Threading.Tasks.Extensions.dll
134 | True
135 |
136 |
137 | ..\packages\System.ValueTuple.4.5.0\lib\net47\System.ValueTuple.dll
138 | True
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 | MSBuild:Compile
157 | Designer
158 |
159 |
160 |
161 |
162 | MSBuild:Compile
163 | Designer
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 | This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105.The missing file is {0}.
174 |
175 |
176 |
177 |
178 |
179 |
180 | %(ReferenceCopyLocalPaths.DestinationSubDirectory)%(ReferenceCopyLocalPaths.Filename)%(ReferenceCopyLocalPaths.Extension)
181 |
182 |
183 |
184 |
191 |
--------------------------------------------------------------------------------
/VRCFaceTracking/VRChat.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using System.IO;
4 | using System.Linq;
5 | using Microsoft.Win32;
6 |
7 | namespace VRCFaceTracking
8 | {
9 | public static class VRChat
10 | {
11 | public static readonly string VRCData = Path.Combine(Environment
12 | .GetFolderPath(Environment.SpecialFolder.ApplicationData).Replace("Roaming", "LocalLow"), "VRChat\\VRChat");
13 |
14 | public static readonly string VRCOSCDirectory = Path.Combine(VRCData, "OSC");
15 |
16 | public static bool ForceEnableOsc()
17 | {
18 | // Set all registry keys containing osc in the name to 1 in Computer\HKEY_CURRENT_USER\Software\VRChat\VRChat
19 | var regKey = Registry.CurrentUser.OpenSubKey("Software\\VRChat\\VRChat", true);
20 | if (regKey == null)
21 | return true; // Assume we already have osc enabled
22 |
23 | var keys = regKey.GetValueNames().Where(x => x.ToLower().Contains("osc"));
24 |
25 | bool wasOscForced = false;
26 | foreach (var key in keys)
27 | {
28 | if ((int) regKey.GetValue(key) == 0)
29 | {
30 | // Osc is likely not enabled
31 | regKey.SetValue(key, 1);
32 | wasOscForced = true;
33 | }
34 | }
35 |
36 | return wasOscForced;
37 | }
38 |
39 | public static bool IsVRChatRunning() => Process.GetProcesses().Any(x => x.ProcessName == "VRChat");
40 | }
41 | }
--------------------------------------------------------------------------------
/VRCFaceTracking/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------