├── .DS_Store ├── LICENSE ├── README.md ├── data ├── .DS_Store ├── generic_blendShapes │ ├── Neutral.obj │ ├── browDown_L.obj │ ├── browDown_R.obj │ ├── browInnerUp.obj │ ├── browOuterUp_L.obj │ ├── browOuterUp_R.obj │ ├── cheekPuff.obj │ ├── cheekSquint_L.obj │ ├── cheekSquint_R.obj │ ├── eyeBlink_L.obj │ ├── eyeBlink_R.obj │ ├── eyeLookDown_L.obj │ ├── eyeLookDown_R.obj │ ├── eyeLookIn_L.obj │ ├── eyeLookIn_R.obj │ ├── eyeLookOut_L.obj │ ├── eyeLookOut_R.obj │ ├── eyeLookUp_L.obj │ ├── eyeLookUp_R.obj │ ├── eyeSquint_L.obj │ ├── eyeSquint_R.obj │ ├── eyeWide_L.obj │ ├── eyeWide_R.obj │ ├── jawForward.obj │ ├── jawLeft.obj │ ├── jawOpen.obj │ ├── jawRight.obj │ ├── mouthClose.obj │ ├── mouthDimple_L.obj │ ├── mouthDimple_R.obj │ ├── mouthFrown_L.obj │ ├── mouthFrown_R.obj │ ├── mouthFunnel.obj │ ├── mouthLeft.obj │ ├── mouthLowerDown_L.obj │ ├── mouthLowerDown_R.obj │ ├── mouthPress_L.obj │ ├── mouthPress_R.obj │ ├── mouthPucker.obj │ ├── mouthRight.obj │ ├── mouthRollLower.obj │ ├── mouthRollUpper.obj │ ├── mouthShrugLower.obj │ ├── mouthShrugUpper.obj │ ├── mouthSmile_L.obj │ ├── mouthSmile_R.obj │ ├── mouthStretch_L.obj │ ├── mouthStretch_R.obj │ ├── mouthUpperUp_L.obj │ ├── mouthUpperUp_R.obj │ ├── noseSneer_L.obj │ └── noseSneer_R.obj └── training_poses │ ├── .DS_Store │ ├── 0.obj │ ├── 1.obj │ ├── 2.obj │ └── 3.obj ├── ebr.ipynb ├── images └── ebr_flow.png ├── landmarks ├── .DS_Store ├── LICT_narrow_r.py ├── __init__.py └── __pycache__ │ ├── LICT_narrow_r.cpython-37.pyc │ └── __init__.cpython-37.pyc └── local_packages ├── .DS_Store ├── ExampleBasedRigging.py ├── __init__.py ├── __pycache__ ├── ExampleBasedRigging.cpython-37.pyc ├── __init__.cpython-37.pyc └── tools3d_.cpython-37.pyc └── tools3d_.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/.DS_Store -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 vasiliskatr 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # example_based_facial_rigging_ARkit_blendshapes 2 | 3 | 4 | Implementation of the Example-based facial rigging paper(https://lgg.epfl.ch/publications/2010/siggraph2010EBFR.pdf). 5 | Provided a set of generic blend shapes and a new face with a small number of scanned training poses (unconstrained expressions), the algorithm can progressively deform the generic blend shapes so that they can reproduce the training poses optimally during facial animation. In other words, the algorithm personalised the generic blend shapes to match the real expressions of any given face. 6 | 7 | 8 | * All input data must share the same topology (same mesh). If you have a target face which is in a different topology than the generic blend shapes, use deformation transfer (https://github.com/vasiliskatr/deformation_transfer_ARkit_blendshapes) to create a set of generic blend shapes in the target topology. 9 | 10 | The meshes used in the data folder are only for demonstration purposes and originate from open source projects (target face mesh taken from https://github.com/ICT-VGL/ICT-FaceKit, source ARkit blend shapes and meshes taken from http://blog.kiteandlightning.la/iphone-x-facial-capture-apple-blendshapes/). 11 | 12 | 13 | ![alt text](https://github.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/blob/main/images/ebr_flow.png?raw=true) 14 | 15 | 16 | ## Dependencies 17 | * numpy 18 | * scipy 19 | * numba 20 | * qpsolvers 21 | * plotly 22 | * pickle 23 | * tqdm 24 | -------------------------------------------------------------------------------- /data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/data/.DS_Store -------------------------------------------------------------------------------- /data/training_poses/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/data/training_poses/.DS_Store -------------------------------------------------------------------------------- /ebr.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import time\n", 10 | "import os\n", 11 | "import local_packages.ExampleBasedRigging as ebr\n", 12 | "import local_packages.tools3d_ as t3d\n", 13 | "\n", 14 | "import landmarks.LICT_narrow_r as LICT_narrow\n", 15 | "import numpy as np\n", 16 | "import scipy.sparse as sp\n", 17 | "from qpsolvers import solve_qp" 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "# Implementation of Example - based facial rigging \n", 25 | "(https://lgg.epfl.ch/publications/2010/siggraph2010EBFR.pdf)\n", 26 | "\n", 27 | "Provided a set of generic blend shapes and a new face with a small number of scanned training poses (unconstrained expressions), the algorithm can progressively deform the generic blend shapes so that they can reproduce the training poses optimally during facial animation. In other words, the algorithm personalised the generic blend shapes to match the real expressions of any given face. \n", 28 | "\n", 29 | "\n", 30 | "* All input data must share the same topology (same mesh). If you have a target face which is in a different topology than the generic blend shapes, use deformation transfer (https://github.com/vasiliskatr/deformation_transfer_ARkit_blendshapes) to create a set of generic blend shapes in the target topology.\n" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": { 37 | "scrolled": false 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "# # # # # # # Example Based Rigging # # # # # # # \n", 42 | " \n", 43 | "# Get the landmarks which remain unaffected across different facial expressions.\n", 44 | "# This will enable the best alignement between meshes with variable expressions. \n", 45 | "skull_landmaks_target = LICT_narrow.LM[13::]\n", 46 | " \n", 47 | "\n", 48 | "print('Starting example based rigging for face ') \n", 49 | "start_ebr = time.time()\n", 50 | " \n", 51 | "# Parametrs chosen according to the original paper - refer to it for more details\n", 52 | "kappa = 0.1\n", 53 | "theta = 2\n", 54 | "n_iterations = 3\n", 55 | "distribution = np.flip(np.logspace(0.1, 1, n_iterations, endpoint=True))\n", 56 | "beta = t3d.normalise (distribution, 0.02, 0.09)\n", 57 | "gamma = t3d.normalise (distribution, 80, 1000)\n", 58 | " \n", 59 | " \n", 60 | "# # # # # # # \n", 61 | "objpath_training_poses = 'data/training_poses/'\n", 62 | "objpath_target_neutral = 'data/training_poses/0.obj'\n", 63 | "objpath_personalised_bs = 'personalised_blendshapes/' \n", 64 | "\n", 65 | "# # # # # # # # # # # # # # Create matrices and structures before the main loop\n", 66 | "B_0, _, _, _ = t3d.Read(objpath_target_neutral, QuadMode = True)\n", 67 | "A_BS_model, B_BS_model, A_0, faces, n, bs_names = ebr.reading_generic_bs('data/generic_blendShapes/')\n", 68 | "n_vertices = A_0.shape[1]\n", 69 | "tri = faces.T \n", 70 | "num_triangles = tri.shape[0]\n", 71 | "S_training_poses, m = ebr.reading_training_data(objpath_training_poses)\n", 72 | "# Allign all training poses to neutral pose using 'skull' landmarks \n", 73 | "for i in range (len(S_training_poses)):\n", 74 | " S_training_poses[i] = t3d.align_target_to_source(S_training_poses[i], faces, skull_landmaks_target, B_0, faces, skull_landmaks_target) \n", 75 | " \n", 76 | "Alpha_star = ebr.blend_shape_weights(A_0, B_0, A_BS_model, S_training_poses)\n", 77 | "A_0 = A_0.T\n", 78 | "B_0 = B_0.T\n", 79 | "\n", 80 | "A_BS_model = ebr.columnise(A_BS_model)\n", 81 | "A_BS_model = np.asarray(A_BS_model)\n", 82 | "\n", 83 | "S_training_poses = ebr.columnise(S_training_poses) \n", 84 | "M_A_star_f = ebr.make_M_A_star_fast(tri, A_0, B_0, A_BS_model)\n", 85 | "W_seed_f = ebr.make_W_seed_fast(tri, A_BS_model, kappa, theta)\n", 86 | "M_S_minus_M_B_0_f, M_B_0_f, M_S_f = ebr.make_M_S_minus_M_B_0_fast(S_training_poses, B_0, tri)\n", 87 | "A_sparse_recon = ebr.make_A_sparse_reconstruction(tri, n_vertices)\n", 88 | " \n", 89 | "Alpha_optimum = Alpha_star.copy()\n", 90 | "\n", 91 | "# Main loop\n", 92 | "for opt_iteration in range(n_iterations):\n", 93 | " \n", 94 | " print('\\nOptimization Step: ' + str(opt_iteration))\n", 95 | " print('Part A:')\n", 96 | " print('Calculating new triangle local frames...')\n", 97 | " start_temp = time.time()\n", 98 | " I = np.eye(3)\n", 99 | " A = sp.kron(Alpha_optimum, I)\n", 100 | " M_B = np.zeros((n*3, 2*num_triangles))\n", 101 | " M_B = ebr.lf_optimisation (num_triangles, A.A, M_S_minus_M_B_0_f, M_B, M_A_star_f, beta, gamma, W_seed_f, opt_iteration, n, m)\n", 102 | " print (\"...done in \",(time.time() - start_temp), \"sec\") \n", 103 | " print('\\nReconstructing vertex positions of unknown blendshapes from M_B...')\n", 104 | " #RECONSTRUCTION\n", 105 | " start = time.time()\n", 106 | " reconstruction = [ebr.recon(M_B, A_sparse_recon, n_vertices, num_triangles, i) for i in range(n)]\n", 107 | " for f in reconstruction:\n", 108 | " idx = f[3]\n", 109 | " B_BS_model[idx][0, :] = f[0]\n", 110 | " B_BS_model[idx][1, :] = f[1]\n", 111 | " B_BS_model[idx][2, :] = f[2]\n", 112 | " \n", 113 | " print (\"done in \",(time.time() - start), \"sec\") \n", 114 | " print('\\nPart B:')\n", 115 | " print('Optimising blend shape weights...')\n", 116 | " # Step B - Hold blendshapes constant and solve for optimum weights\n", 117 | " start_temp = time.time()\n", 118 | " B_BS_model = np.asarray(B_BS_model)\n", 119 | " B_All = B_BS_model.reshape(n, n_vertices*3)\n", 120 | " B_All = B_All.T\n", 121 | " for i in range(m): \n", 122 | " Sj_minus_B0 = (S_training_poses[i]-B_0).T.reshape(n_vertices*3, 1)\n", 123 | " qp_P = 2 * (B_All.T @ B_All + gamma[opt_iteration] * np.identity(n))\n", 124 | " qp_q = -2 * (Sj_minus_B0.T @ B_All+ gamma[opt_iteration] * Alpha_star[i, :]).flatten()\n", 125 | " qp_lb = np.zeros(n)\n", 126 | " qp_ub = np.ones(n)\n", 127 | " Alpha_optimum_temp = solve_qp(P=qp_P, q=qp_q, lb=qp_lb, ub=qp_ub) \n", 128 | " Alpha_optimum[i, :] = Alpha_optimum_temp\n", 129 | " print (\"...done in \",(time.time() - start_temp), \"sec\") \n", 130 | " Exp1 = B_0 + B_BS_model[13].T\n", 131 | " t3d.ShowDeltaGrad(B_0.T,Exp1.T, faces)\n", 132 | "end = time.time()\n", 133 | "print(end-start)\n", 134 | "\n", 135 | "# save generated bs\n", 136 | "if not os.path.exists(objpath_personalised_bs):\n", 137 | " os.makedirs(objpath_personalised_bs)\n", 138 | "i =0\n", 139 | "for delta in B_BS_model:\n", 140 | " blend_shape = delta + B_0.T\n", 141 | " blend_shape = t3d.align_target_to_source(blend_shape, faces, skull_landmaks_target,B_0.T, faces, skull_landmaks_target)\n", 142 | " t3d.SaveObj(blend_shape, faces, objpath_target_neutral, save_destination = objpath_personalised_bs + bs_names[i] +'.obj' , CM=True)\n", 143 | " i = i+1\n", 144 | " \n", 145 | "print (\" All done in \",(time.time() - start_ebr), \" sec\") \n", 146 | "\n" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [] 155 | } 156 | ], 157 | "metadata": { 158 | "kernelspec": { 159 | "display_name": "carv3d1", 160 | "language": "python", 161 | "name": "carv3d1" 162 | }, 163 | "language_info": { 164 | "codemirror_mode": { 165 | "name": "ipython", 166 | "version": 3 167 | }, 168 | "file_extension": ".py", 169 | "mimetype": "text/x-python", 170 | "name": "python", 171 | "nbconvert_exporter": "python", 172 | "pygments_lexer": "ipython3", 173 | "version": "3.7.9" 174 | } 175 | }, 176 | "nbformat": 4, 177 | "nbformat_minor": 4 178 | } 179 | -------------------------------------------------------------------------------- /images/ebr_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/images/ebr_flow.png -------------------------------------------------------------------------------- /landmarks/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/landmarks/.DS_Store -------------------------------------------------------------------------------- /landmarks/LICT_narrow_r.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | LM = np.zeros(18).astype(int) 3 | ### Indices for key landmarks 4 | LM[0] = 5069 #lm_nose_tip 5 | 6 | LM[1] = 3888#lm_right_eye_outer 3888 7 | LM[2] = 3621 #lm_right_eye_inner (tears) 3621 8 | 9 | LM[3] = 1244 #lm_left_eye_inner (tears) 1244 10 | LM[4] = 2023 #lm_left_eye_outer 2023 11 | 12 | LM[5] = 6128#lm_mouth_R 13 | LM[6] = 5567 #lm_mouth_L 5567 14 | 15 | LM[7] = 6323#lm_mouth_middle_Top_T 16 | LM[8] = 6248 #lm_mouth_middle_Top_B 17 | 18 | LM[9] = 6399 #lm_mouth_middle_Bottom_T 19 | LM[10] = 6414 #lm_mouth_middle_Bottom_B 20 | 21 | LM[11] = 3138#center_chin 22 | 23 | # Use edge vertices for reigid alignment (the less the better) 24 | LM[12] = 2141 # top_of_mask_mid (edge vertice) 25 | LM[13] = 1844 # mask_edge_Right_eyeheight (edge vertice) 26 | LM[14] = 4063 # mask_edge_left_eyeheight (edge vertice) 27 | LM[15] = 829# mask_edge_right_beloweyeheight (edge vertice) 28 | LM[16] = 3083# mask_edge_left_beloweyeheight (edge vertice) 29 | LM[17] = 2003# bottom_mask_neck)mid (edge vertice) 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /landmarks/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /landmarks/__pycache__/LICT_narrow_r.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/landmarks/__pycache__/LICT_narrow_r.cpython-37.pyc -------------------------------------------------------------------------------- /landmarks/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/landmarks/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /local_packages/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/local_packages/.DS_Store -------------------------------------------------------------------------------- /local_packages/ExampleBasedRigging.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import time 4 | import local_packages.tools3d_ as t3d 5 | from scipy.optimize import lsq_linear 6 | from IPython.display import clear_output 7 | from scipy.sparse import csr_matrix 8 | import scipy.sparse as sp 9 | from scipy.sparse.linalg import spsolve 10 | from qpsolvers import solve_qp 11 | import numba 12 | from numba import jit 13 | from numba.core.extending import overload 14 | from numba.np.linalg import norm_impl 15 | from numba.core.errors import NumbaDeprecationWarning, NumbaPendingDeprecationWarning, NumbaPerformanceWarning 16 | import warnings 17 | warnings.simplefilter('ignore', category=NumbaDeprecationWarning) 18 | warnings.simplefilter('ignore', category=NumbaPendingDeprecationWarning) 19 | warnings.simplefilter('ignore', category=NumbaPerformanceWarning) 20 | from tqdm import tqdm 21 | 22 | 23 | 24 | def reading_generic_bs (objpath_generic_bs): 25 | A_bs_model = [] # variable to store all data with regards to the template 26 | B_bs_model = [] # variable to store all data with regards to the actor 27 | bs_name_list = [] # variable to store the names of imported BS 28 | A_0, faces, _, _ = t3d.Read(objpath_generic_bs + 'Neutral.obj', QuadMode = True) 29 | n_vertices = A_0.shape[1] 30 | #A_0 = tools_3d.center(A_0) 31 | generic_bs_data = os.scandir(objpath_generic_bs) 32 | for generic_bs in tqdm(generic_bs_data, unit=' files', desc="Loading generic blend shapes"): 33 | name, ext = os.path.splitext(generic_bs) 34 | name_s = name.split("/") 35 | 36 | if ext == '.obj' and 'neutral' not in name: # read only the .obj files from the source directory 37 | temp_vertices, _, _, _ = t3d.Read(name+ext, QuadMode = True) 38 | A_bs_vertices = temp_vertices - A_0 39 | A_bs_model.append(A_bs_vertices) 40 | B_bs_model.append(np.zeros((3, n_vertices))) 41 | bs_name_list.append(name_s[-1]) 42 | 43 | n = len(A_bs_model) 44 | print ('Generic model of n = ' + str(len(A_bs_model)) + ' blend shapes imported (+1 neutral pose)') 45 | return A_bs_model, B_bs_model, A_0, faces, n, bs_name_list 46 | 47 | 48 | 49 | def reading_training_data(objpath_training_poses): 50 | S_training_poses = [] # Variable to store training poses 51 | training_pose_data = os.scandir(objpath_training_poses) 52 | 53 | for training_pose in tqdm(training_pose_data, unit=' files', desc="Loading poses"): 54 | name, ext = os.path.splitext(training_pose) 55 | 56 | if ext == '.obj': # read only the .obj files from the source directory 57 | temp_vertices, _, _, _ = t3d.Read(name+ext, QuadMode = True) 58 | 59 | S_training_poses.append(temp_vertices) 60 | 61 | m = len(S_training_poses) 62 | print ('m = ' + str(m)+' training poses in total (+ 1 neutral)') 63 | return S_training_poses, m 64 | 65 | 66 | 67 | def blend_shape_weights(A_0, B_0, A_BS_model, S_training_poses): 68 | # initial blend shape weights guessing for each training pose 69 | 70 | start_time = time.time() 71 | print ('Computing intial blend-shape weight guess for the training poses') 72 | n = len(A_BS_model) 73 | m = len(S_training_poses) 74 | n_vertices = A_0.shape[1] 75 | 76 | Alpha_star = np.zeros((m, n))# Initial guess of blendhspae weights 77 | 78 | A_All = A_BS_model[0].T.flatten().reshape(n_vertices*3, 1) 79 | 80 | for i in range(1,n): 81 | A_All = np.concatenate((A_All, A_BS_model[i].T.flatten().reshape(n_vertices*3, 1)), axis=1) 82 | 83 | for i in tqdm(range(m), unit=' pose', desc='Guessing weights'): 84 | B_temp = (S_training_poses[i] - B_0).T.flatten().reshape(n_vertices*3) 85 | weights_temp = lsq_linear(A_All, B_temp, bounds = (0, 1), lsmr_tol='auto', verbose=0) 86 | Alpha_star[i, :] = weights_temp.x.reshape(1, n) 87 | 88 | return Alpha_star 89 | 90 | 91 | print ("done in ",(time.time() - start_time), "sec") 92 | 93 | 94 | def columnise(model): 95 | for i in range(0, len(model)): 96 | model[i] = model[i].T 97 | 98 | return model 99 | 100 | 101 | @jit(nopython=True, parallel=True) 102 | def local_tri_frame_fast(vertices, triangles, tri_index): 103 | 104 | tri_vertices = vertices[triangles[tri_index, :], :] 105 | 106 | LF = np.zeros((3,3)) 107 | 108 | v1 = tri_vertices[0, :] 109 | v2 = tri_vertices[1, :] 110 | v3 = tri_vertices[2, :] 111 | 112 | LF[:,0] = (v3-v1) # v3-v1 113 | LF[:,1] = (v2-v1) # v2-v1 114 | LF[:,2] = (np.cross((v3-v1),(v2-v1))) # n 115 | 116 | return LF 117 | 118 | 119 | @jit(nopython=True, parallel=True) 120 | def compute_lf_fast (vertices, triangles): 121 | 122 | lf = np.zeros((len(triangles)*3, 3)) 123 | for i in numba.prange(len(triangles)): 124 | lf[i*3:i*3+3]= local_tri_frame_fast(vertices, triangles, i) 125 | 126 | return lf 127 | 128 | 129 | @jit(nopython=True, parallel=True) 130 | def compute_lf_inverse_fast(vertices, triangles): 131 | 132 | lf_inv = np.zeros((len(triangles)*3, 3)) 133 | for i in numba.prange(len(triangles)): 134 | lf_inv[i*3:i*3+3] = np.linalg.inv(local_tri_frame_fast(vertices, triangles, i)) 135 | return lf_inv 136 | 137 | 138 | @jit(nopython=True, parallel=True) 139 | def make_M_S_minus_M_B_0_fast(S_training_poses, B_0, triangles): 140 | 141 | m = len(S_training_poses) 142 | M_B_0 = compute_lf_fast(B_0, triangles) 143 | M_S_minus_M_B_0 = np.empty((m, len(triangles)*3, 3)) 144 | M_S = np.empty((m, len(triangles)*3, 3)) 145 | 146 | for s in numba.prange(m): 147 | M_S_temp = compute_lf_fast(S_training_poses[s], triangles) 148 | M_S_minus_M_B_0[s] = M_S_temp - M_B_0 149 | M_S[s] = M_S_temp 150 | 151 | return M_S_minus_M_B_0 , M_B_0, M_S 152 | 153 | 154 | @jit(nopython=True, parallel=True) 155 | def make_W_seed_fast(triangles, A_BS_model, kappa, theta): 156 | n = len(A_BS_model) 157 | W_seed = np.empty((n, len(triangles))) 158 | for i in numba.prange(n): 159 | M_A_i = compute_lf_fast(A_BS_model[i], triangles) 160 | 161 | for j in numba.prange(len(triangles)): 162 | lf_tri_norm = np.linalg.norm(M_A_i[j*3:j*3+3,:]) 163 | W_seed[i,j] = (1 + lf_tri_norm)/np.power((kappa + lf_tri_norm), theta) 164 | 165 | return W_seed 166 | 167 | 168 | @jit(nopython=True, parallel=True) 169 | def make_M_A_star_fast(triangles, A_0, B_0, A_BS_model): 170 | n = len(A_BS_model) 171 | M_A_star = np.empty((n, len(triangles)*3, 3)) 172 | M_A_0_inv = compute_lf_inverse_fast(A_0, triangles) 173 | M_A_0 = compute_lf_fast(A_0, triangles) 174 | M_B_0 = compute_lf_fast(B_0, triangles) 175 | 176 | for i in numba.prange(n): 177 | M_A_i = compute_lf_fast(A_BS_model[i], triangles) 178 | M_A_sum = M_A_0 + M_A_i 179 | 180 | for j in numba.prange(len(triangles)): 181 | M_A_star[i][j*3:j*3+3] = ((M_A_sum[j*3:j*3+3] @ M_A_0_inv[j*3:j*3+3]) @ M_B_0[j*3:j*3+3]) - M_B_0[j*3:j*3+3] 182 | 183 | return M_A_star 184 | 185 | 186 | 187 | # Parallel version lf optimisation 188 | @jit(nopython=True, parallel=True) 189 | def lf_optimisation (num_triangles, A, M_S_minus_M_B_0, M_B, M_A_star, beta, gamma, W_seed, opt_iteration, n, m): 190 | 191 | for tri_index in numba.prange(num_triangles): 192 | # Constructing Bfit 193 | B_fit = np.zeros((n*3,3)) 194 | B_fit = A.T @ M_S_minus_M_B_0[:,tri_index*3:tri_index*3+3,:].copy().reshape(m*3,3) 195 | 196 | # Constructing W 197 | dia = [[i,i,i] for i in W_seed[:,tri_index]] 198 | dia = np.asarray(dia) 199 | dia = dia.flatten() 200 | W = np.diag(dia, 0) 201 | M_A_starr = M_A_star[:,tri_index*3:tri_index*3+3,:].copy().reshape(n*3,3) 202 | A_sum = A.T @ A + beta[opt_iteration] * (W.T @ W) 203 | B_sum = B_fit + beta[opt_iteration] * (W.T @ (W @ M_A_starr)) 204 | M_B_tri = np.linalg.solve(A_sum, B_sum[:,0:2]) #.copy() 205 | M_B[:, tri_index*2:tri_index*2+2] = M_B_tri #.copy() 206 | 207 | return M_B 208 | 209 | def make_A_sparse_reconstruction (triangles, n_vertices): 210 | row = [] 211 | col = [] 212 | data = [] 213 | 214 | for j in range(len(triangles)): 215 | tri_indices = triangles[j] 216 | 217 | row.append(j*2) 218 | col.append(tri_indices[2]) 219 | data.append(1) 220 | 221 | row.append(j*2) 222 | col.append(tri_indices[0]) 223 | data.append(-1) 224 | 225 | row.append(j*2+1) 226 | col.append(tri_indices[1]) 227 | data.append(1) 228 | 229 | row.append(j*2+1) 230 | col.append(tri_indices[0]) 231 | data.append(-1) 232 | 233 | row = np.asarray(row) 234 | col = np.asarray(col) 235 | data = np.asarray(data) 236 | 237 | ########### I removed the consideration of zero-deformation vertices. 238 | ########### There is no drifting in the reconstruction even without it. 239 | A_sparse = csr_matrix((data, (row, col)), shape=(triangles.shape[0]*2, n_vertices)) 240 | return A_sparse 241 | 242 | 243 | def recon(M_B, A_sparse_recon, n_vertices, num_triangles, i): 244 | # reconstruction of vertices 245 | 246 | B_temp_X = M_B[i*3,:].reshape(num_triangles*2, 1) 247 | X_vals = sp.linalg.lsqr(A_sparse_recon, B_temp_X)[0] 248 | B_temp_Y = M_B[i*3+1,:].reshape(num_triangles*2, 1) 249 | Y_vals = sp.linalg.lsqr(A_sparse_recon, B_temp_Y)[0] 250 | B_temp_Z = M_B[i*3+2,:].reshape(num_triangles*2, 1) 251 | Z_vals = sp.linalg.lsqr(A_sparse_recon, B_temp_Z)[0] 252 | 253 | return X_vals.reshape(1, n_vertices), Y_vals.reshape(1, n_vertices), Z_vals.reshape(1, n_vertices), i 254 | 255 | 256 | 257 | -------------------------------------------------------------------------------- /local_packages/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /local_packages/__pycache__/ExampleBasedRigging.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/local_packages/__pycache__/ExampleBasedRigging.cpython-37.pyc -------------------------------------------------------------------------------- /local_packages/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/local_packages/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /local_packages/__pycache__/tools3d_.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vasiliskatr/example_based_facial_rigging_ARkit_blendshapes/39db32431f3bab2ebb91a74560a61da1ab40af05/local_packages/__pycache__/tools3d_.cpython-37.pyc -------------------------------------------------------------------------------- /local_packages/tools3d_.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | #import plotly.express as px 4 | import plotly.graph_objects as go 5 | from plotly.subplots import make_subplots 6 | import plotly 7 | #from sklearn.neighbors import NearestNeighbors 8 | 9 | from plotly.io import write_image 10 | 11 | 12 | # Reads an obj file and returns vertex coordinates [x,y,z], number of vertices n and mesh triangle coordinates [i,j,k] 13 | def Read(filename, flipZY = False, QuadMode = False): 14 | 15 | # Vertices 16 | x = [] 17 | y = [] 18 | z = [] 19 | 20 | # Triangle Indices 21 | i = [] 22 | j = [] 23 | k = [] 24 | 25 | # Texture coordinates 26 | u = [] 27 | v = [] 28 | 29 | if QuadMode==True: 30 | # Quad Indices 31 | A = [] 32 | B = [] 33 | C = [] 34 | D = [] 35 | 36 | 37 | vertex_read_done = 0 38 | 39 | with open(filename) as f: 40 | content = f.readlines() 41 | 42 | content = [x.strip() for x in content] 43 | 44 | 45 | for line in content: 46 | temp = line.split() 47 | 48 | if len(temp)>0: 49 | if temp[0] == 'v': 50 | 51 | if flipZY==False: 52 | x.append(float(temp[1])) 53 | y.append(float(temp[2])) 54 | z.append(float(temp[3])) 55 | else: 56 | x.append(float(temp[1])) 57 | y.append(float(temp[3])) 58 | z.append(float(temp[2])) 59 | 60 | 61 | elif temp[0] == 'f': 62 | 63 | if vertex_read_done == 0: 64 | vertex_texture_correspondence = np.zeros(len(x)) 65 | vertex_read_done = 1 66 | 67 | i.append(int(temp[1].split('/')[0]) - 1) 68 | j.append(int(temp[2].split('/')[0]) - 1) 69 | k.append(int(temp[3].split('/')[0]) - 1) 70 | 71 | vertex_texture_correspondence[i[-1]] = int(temp[1].split('/')[1]) - 1 72 | vertex_texture_correspondence[j[-1]] = int(temp[2].split('/')[1]) - 1 73 | vertex_texture_correspondence[k[-1]] = int(temp[3].split('/')[1]) - 1 74 | 75 | if len(temp) == 5: 76 | i.append(int(temp[1].split('/')[0]) - 1) 77 | j.append(int(temp[3].split('/')[0]) - 1) 78 | k.append(int(temp[4].split('/')[0]) - 1) 79 | 80 | vertex_texture_correspondence[k[-1]] = int(temp[4].split('/')[1]) - 1 81 | 82 | if QuadMode==True: 83 | A.append(int(temp[1].split('/')[0]) - 1) 84 | B.append(int(temp[2].split('/')[0]) - 1) 85 | C.append(int(temp[3].split('/')[0]) - 1) 86 | D.append(int(temp[4].split('/')[0]) - 1) 87 | 88 | elif temp[0] == 'vt': 89 | u.append(float(temp[1])) 90 | v.append(float(temp[2])) 91 | 92 | 93 | x = np.asarray(x) 94 | y = np.asarray(y) 95 | z = np.asarray(z) 96 | 97 | i = np.asarray(i) 98 | j = np.asarray(j) 99 | k = np.asarray(k) 100 | 101 | u = np.asarray(u) 102 | v = np.asarray(v) 103 | 104 | if QuadMode == True: 105 | A = np.asarray(A) 106 | B = np.asarray(B) 107 | C = np.asarray(C) 108 | D = np.asarray(D) 109 | nq = A.shape[0] 110 | A = A.reshape(1, nq) 111 | B = B.reshape(1, nq) 112 | C = C.reshape(1, nq) 113 | D = D.reshape(1, nq) 114 | quad = np.concatenate((A, B, C, D), axis=0) 115 | 116 | 117 | 118 | n = x.shape[0] 119 | nt = i.shape[0] 120 | nuv = u.shape[0] 121 | 122 | x = x.reshape(1, n) 123 | y = y.reshape(1, n) 124 | z = z.reshape(1, n) 125 | 126 | vertices = np.concatenate((x, y, z), axis=0) 127 | 128 | i = i.reshape(1, nt) 129 | j = j.reshape(1, nt) 130 | k = k.reshape(1, nt) 131 | 132 | tri = np.concatenate((i, j, k), axis=0) 133 | 134 | u = u.reshape(nuv, 1) 135 | v = v.reshape(nuv, 1) 136 | uv_coord = np.concatenate((u, v), axis=1) 137 | 138 | if QuadMode == False: 139 | return vertices, tri, uv_coord, vertex_texture_correspondence.astype(int), n 140 | else: 141 | return vertices, tri, quad, n 142 | 143 | # Plots a 3D scatter plot of the mesh using x,y,z 144 | def ShowScatter(vertices): 145 | 146 | x = vertices[0, :].reshape(vertices.shape[1]) 147 | y = vertices[1, :].reshape(vertices.shape[1]) 148 | z = vertices[2, :].reshape(vertices.shape[1]) 149 | 150 | df = pd.DataFrame({'x':x, 'y':y, 'z':z}) 151 | n = x.shape[0] 152 | index = np.linspace(0,n-1,n) 153 | index= index.astype(int) 154 | index= index.astype(str) 155 | 156 | #fig = px.scatter_3d(df, x='x', y='y', z='z') 157 | fig=go.Figure(data=[go.Scatter3d(x=df['x'],y=df['y'],z=df['z'],mode='markers',marker=dict(size=2), hovertext=index)]) 158 | fig.show() 159 | 160 | # Plots a 3D mesh using x,y,z and i,j,k 161 | def ShowMesh(vertices, triangles): 162 | 163 | x = vertices[0, :].reshape(vertices.shape[1]) 164 | y = vertices[1, :].reshape(vertices.shape[1]) 165 | z = vertices[2, :].reshape(vertices.shape[1]) 166 | 167 | i = triangles[0, :].reshape(triangles.shape[1]) 168 | j = triangles[1, :].reshape(triangles.shape[1]) 169 | k = triangles[2, :].reshape(triangles.shape[1]) 170 | 171 | n = x.shape[0] 172 | index = np.linspace(0,n-1,n) 173 | index= index.astype(int) 174 | index= index.astype(str) 175 | 176 | fig = go.Figure(data=[ 177 | go.Mesh3d( 178 | x=x,y=y,z=z,i=i,j=j,k=k,showscale=True, hovertext = index)]) 179 | 180 | fig.show() 181 | 182 | 183 | # Show two meshes on the same figure with 2 different colours 184 | def Show2Meshes(vertices1, triangles1, vertices2, triangles2): 185 | 186 | x = np.concatenate( (vertices1[0, :], vertices2[0, :]), axis=0 ).reshape(vertices1.shape[1] + vertices2.shape[1]) 187 | y = np.concatenate( (vertices1[1, :], vertices2[1, :]), axis=0 ).reshape(vertices1.shape[1] + vertices2.shape[1]) 188 | z = np.concatenate( (vertices1[2, :], vertices2[2, :]), axis=0 ).reshape(vertices1.shape[1] + vertices2.shape[1]) 189 | 190 | triangles2 = triangles2 + vertices1.shape[1] 191 | 192 | i = np.concatenate( (triangles1[0, :], triangles2[0, :]), axis=0 ).reshape(triangles1.shape[1] + triangles2.shape[1]) 193 | j = np.concatenate( (triangles1[1, :], triangles2[1, :]), axis=0 ).reshape(triangles1.shape[1] + triangles2.shape[1]) 194 | k = np.concatenate( (triangles1[2, :], triangles2[2, :]), axis=0 ).reshape(triangles1.shape[1] + triangles2.shape[1]) 195 | 196 | n = x.shape[0] 197 | index = np.linspace(0,n-1,n) 198 | index= index.astype(int) 199 | index= index.astype(str) 200 | 201 | fig = go.Figure(data=[ 202 | go.Mesh3d( 203 | x=x,y=y,z=z,i=i,j=j,k=k,showscale=True, hovertext = index, 204 | colorscale=[[0, 'gold'], 205 | [1, 'cyan']], 206 | intensity = np.concatenate( (np.zeros(triangles1.shape[1]), np.ones(triangles2.shape[1])), axis =0), 207 | opacity = 0.7, 208 | intensitymode='cell')]) 209 | 210 | fig.show() 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | def Rotate_Mesh_X(Template_vert, theta): 219 | 220 | R = np.zeros((3, 3)) 221 | R[1,1] = np.cos(theta) 222 | R[1,2] = np.sin(theta) 223 | R[2,1] = -np.sin(theta) 224 | R[2,2] = np.cos(theta) 225 | R[0,0] = 1 226 | 227 | rotated_vertices = np.matmul(R, Template_vert) 228 | 229 | return rotated_vertices 230 | 231 | def Rotate_Mesh_Y(Template_vert, theta): 232 | 233 | R = np.zeros((3, 3)) 234 | R[0,0] = np.cos(theta) 235 | R[0,2] = -np.sin(theta) 236 | R[2,0] = np.sin(theta) 237 | R[2,2] = np.cos(theta) 238 | R[1,1] = 1 239 | 240 | rotated_vertices = np.matmul(R, Template_vert) 241 | 242 | return rotated_vertices 243 | 244 | def Rotate_Mesh_Z(Template_vert, theta): 245 | 246 | R = np.zeros((3, 3)) 247 | R[0,0] = np.cos(theta) 248 | R[0,1] = np.sin(theta) 249 | R[1,0] = -np.sin(theta) 250 | R[1,1] = np.cos(theta) 251 | R[2,2] = 1 252 | 253 | rotated_vertices = np.matmul(R, Template_vert) 254 | 255 | return rotated_vertices 256 | 257 | def WireFrameMesh(vertices, triangles, quadMode = True): 258 | 259 | vertices = vertices.transpose() 260 | triangles = triangles.transpose() 261 | 262 | trace1 = go.Scatter3d(x=vertices[:, 0].flatten(), y=vertices[:, 1].flatten(), z=vertices[:, 2].flatten(), mode='markers', marker=dict(size=0.0001), name='markers') 263 | 264 | x_lines = list() 265 | y_lines = list() 266 | z_lines = list() 267 | 268 | if quadMode == False: 269 | 270 | 271 | for i in range(triangles.shape[0]): 272 | 273 | A_index = triangles[i][0] 274 | B_index = triangles[i][1] 275 | C_index = triangles[i][2] 276 | 277 | A = vertices[A_index] 278 | B = vertices[B_index] 279 | C = vertices[C_index] 280 | 281 | ## Line AB ## 282 | # Point A 283 | x_lines.append(A[0]) 284 | y_lines.append(A[1]) 285 | z_lines.append(A[2]) 286 | 287 | # Point B 288 | x_lines.append(B[0]) 289 | y_lines.append(B[1]) 290 | z_lines.append(B[2]) 291 | 292 | # Point C 293 | x_lines.append(C[0]) 294 | y_lines.append(C[1]) 295 | z_lines.append(C[2]) 296 | 297 | # Point A 298 | x_lines.append(A[0]) 299 | y_lines.append(A[1]) 300 | z_lines.append(A[2]) 301 | 302 | x_lines.append(None) 303 | y_lines.append(None) 304 | z_lines.append(None) 305 | 306 | else: 307 | 308 | for i in range(triangles.shape[0]): 309 | 310 | A_index = triangles[i][0] 311 | B_index = triangles[i][1] 312 | C_index = triangles[i][2] 313 | D_index = triangles[i][3] 314 | 315 | A = vertices[A_index] 316 | B = vertices[B_index] 317 | C = vertices[C_index] 318 | D = vertices[D_index] 319 | 320 | ## Line AB ## 321 | # Point A 322 | x_lines.append(A[0]) 323 | y_lines.append(A[1]) 324 | z_lines.append(A[2]) 325 | 326 | # Point B 327 | x_lines.append(B[0]) 328 | y_lines.append(B[1]) 329 | z_lines.append(B[2]) 330 | 331 | # Point C 332 | x_lines.append(C[0]) 333 | y_lines.append(C[1]) 334 | z_lines.append(C[2]) 335 | 336 | # Point D 337 | x_lines.append(D[0]) 338 | y_lines.append(D[1]) 339 | z_lines.append(D[2]) 340 | 341 | # Point A 342 | x_lines.append(A[0]) 343 | y_lines.append(A[1]) 344 | z_lines.append(A[2]) 345 | 346 | x_lines.append(None) 347 | y_lines.append(None) 348 | z_lines.append(None) 349 | 350 | 351 | trace2 = go.Scatter3d(x=x_lines,y=y_lines,z=z_lines,mode='lines',name='lines') 352 | fig = go.Figure(data=[trace1, trace2]) 353 | fig.show() 354 | 355 | 356 | def ShowMeshAndWireFrame(vertices, triangles, quad): 357 | 358 | x = vertices[0, :].reshape(vertices.shape[1]) 359 | y = vertices[1, :].reshape(vertices.shape[1]) 360 | z = vertices[2, :].reshape(vertices.shape[1]) 361 | 362 | i = triangles[0, :].reshape(triangles.shape[1]) 363 | j = triangles[1, :].reshape(triangles.shape[1]) 364 | k = triangles[2, :].reshape(triangles.shape[1]) 365 | 366 | n = x.shape[0] 367 | index = np.linspace(0,n-1,n) 368 | index= index.astype(int) 369 | index= index.astype(str) 370 | 371 | trace1 = go.Mesh3d(x=x,y=y,z=z,i=i,j=j,k=k,showscale=True, hovertext = index) 372 | 373 | ## Plotting the wireframe 374 | x_lines = list() 375 | y_lines = list() 376 | z_lines = list() 377 | vertices = vertices.transpose() 378 | quad = quad.transpose() 379 | 380 | for i in range(quad.shape[0]): 381 | 382 | A_index = quad[i][0] 383 | B_index = quad[i][1] 384 | C_index = quad[i][2] 385 | D_index = quad[i][3] 386 | 387 | A = vertices[A_index] 388 | B = vertices[B_index] 389 | C = vertices[C_index] 390 | D = vertices[D_index] 391 | 392 | ## Line AB ## 393 | # Point A 394 | x_lines.append(A[0]) 395 | y_lines.append(A[1]) 396 | z_lines.append(A[2]) 397 | 398 | # Point B 399 | x_lines.append(B[0]) 400 | y_lines.append(B[1]) 401 | z_lines.append(B[2]) 402 | 403 | # Point C 404 | x_lines.append(C[0]) 405 | y_lines.append(C[1]) 406 | z_lines.append(C[2]) 407 | 408 | # Point D 409 | x_lines.append(D[0]) 410 | y_lines.append(D[1]) 411 | z_lines.append(D[2]) 412 | 413 | # Point A 414 | x_lines.append(A[0]) 415 | y_lines.append(A[1]) 416 | z_lines.append(A[2]) 417 | 418 | x_lines.append(None) 419 | y_lines.append(None) 420 | z_lines.append(None) 421 | 422 | 423 | trace2 = go.Scatter3d(x=x_lines,y=y_lines,z=z_lines,mode='lines',name='lines') 424 | fig = go.Figure(data=[trace1, trace2]) 425 | 426 | fig.show() 427 | 428 | 429 | 430 | 431 | 432 | vertices = Landmark_vertices 433 | x = vertices[0, :].reshape(vertices.shape[1]) 434 | y = vertices[1, :].reshape(vertices.shape[1]) 435 | z = vertices[2, :].reshape(vertices.shape[1]) 436 | 437 | df = pd.DataFrame({'x':x, 'y':y, 'z':z}) 438 | 439 | #fig = px.scatter_3d(df, x='x', y='y', z='z') 440 | trace3 = go.Scatter3d(x=df['x'],y=df['y'],z=df['z'],mode='markers',marker=dict(size=marker_size)) 441 | 442 | 443 | 444 | fig = go.Figure(data=[trace1, trace2, trace3]) 445 | 446 | fig.show() 447 | 448 | 449 | 450 | 451 | # Plots a 3D mesh using x,y,z and i,j,k 452 | def ShowMeshAndLandMarks(vertices, triangles, Landmark_vertices, marker_size = 4): 453 | 454 | x = vertices[0, :].reshape(vertices.shape[1]) 455 | y = vertices[1, :].reshape(vertices.shape[1]) 456 | z = vertices[2, :].reshape(vertices.shape[1]) 457 | 458 | i = triangles[0, :].reshape(triangles.shape[1]) 459 | j = triangles[1, :].reshape(triangles.shape[1]) 460 | k = triangles[2, :].reshape(triangles.shape[1]) 461 | 462 | n = x.shape[0] 463 | index = np.linspace(0,n-1,n) 464 | index= index.astype(int) 465 | index= index.astype(str) 466 | 467 | trace1 = go.Mesh3d(x=x,y=y,z=z,i=i,j=j,k=k,showscale=True, hovertext = index) 468 | 469 | vertices = Landmark_vertices 470 | x = vertices[0, :].reshape(vertices.shape[1]) 471 | y = vertices[1, :].reshape(vertices.shape[1]) 472 | z = vertices[2, :].reshape(vertices.shape[1]) 473 | 474 | df = pd.DataFrame({'x':x, 'y':y, 'z':z}) 475 | 476 | #fig = px.scatter_3d(df, x='x', y='y', z='z') 477 | trace2 = go.Scatter3d(x=df['x'],y=df['y'],z=df['z'],mode='markers',marker=dict(size=marker_size)) 478 | 479 | fig = go.Figure(data=[trace1, trace2]) 480 | 481 | fig.show() 482 | 483 | def SaveObj(vert, tri, template_destination, save_destination, save_normals = False, Head_Mode = True, CM = False): 484 | 485 | vertices = vert.copy() 486 | vertices = vertices.transpose() 487 | triangles = tri.copy() 488 | triangles = triangles.transpose() 489 | #normals = create_nomrals_for_vertices(vertices, triangles) 490 | #normals = -normals 491 | ## Reference to output file 492 | output = open(save_destination ,"w+") 493 | 494 | ## Reference to template obj file 495 | with open(template_destination) as f: 496 | content = f.readlines() 497 | 498 | content = [x.strip() for x in content] 499 | 500 | 501 | vertex_counter = 0 502 | vn_counter = 0 503 | 504 | decimal_points = 6 505 | 506 | output.write('# Generated by CARV3D. It is illegal to use this identity without the approval of CARV3D.' + '\n') 507 | 508 | for line in content: 509 | temp = line.split() 510 | 511 | if len(temp)>0: 512 | 513 | if temp[0] == 'v': 514 | new_line = 'v ' + str(round(vertices[vertex_counter][0], decimal_points)) + ' ' + str(round(vertices[vertex_counter][1], decimal_points)) + ' ' + str(round(vertices[vertex_counter][2], decimal_points)) + '\n' 515 | output.write(new_line) 516 | vertex_counter = vertex_counter + 1 517 | 518 | elif temp[0] == 'vn' and vn_counter