├── MANIFEST.in ├── mesh ├── resources │ └── Arial.ttf ├── geometry │ ├── __init__.py │ ├── triangle_area.py │ ├── cross_product.py │ ├── vert_normals.py │ ├── barycentric_coordinates_of_projection.py │ ├── tri_normals.py │ └── rodrigues.py ├── serialization │ ├── __init__.py │ └── serialization.py ├── version.py ├── errors.py ├── src │ ├── plyutils.h │ ├── hijack_python_headers.hpp │ ├── py_loadobj.cpp │ ├── plyutils.c │ └── rply.h ├── utils.py ├── __init__.py ├── lines.py ├── fonts.py ├── texture.py ├── sphere.py ├── landmarks.py ├── processing.py ├── arcball.py ├── mesh.py └── colors.py ├── download ├── psbody_meshlite-0.1-cp27-cp27mu-linux_x86_64.whl └── psbody_meshlite-0.1-cp27-cp27m-macosx_10_12_intel.whl ├── psbody-meshlite-namespace └── __init__.py ├── sample └── hello_world.py ├── Makefile ├── LICENSE.txt ├── setup.py └── README.md /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include mesh/src * 2 | recursive-include mesh/resources * -------------------------------------------------------------------------------- /mesh/resources/Arial.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MPI-IS/meshlite/HEAD/mesh/resources/Arial.ttf -------------------------------------------------------------------------------- /download/psbody_meshlite-0.1-cp27-cp27mu-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MPI-IS/meshlite/HEAD/download/psbody_meshlite-0.1-cp27-cp27mu-linux_x86_64.whl -------------------------------------------------------------------------------- /mesh/geometry/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | -------------------------------------------------------------------------------- /download/psbody_meshlite-0.1-cp27-cp27m-macosx_10_12_intel.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MPI-IS/meshlite/HEAD/download/psbody_meshlite-0.1-cp27-cp27m-macosx_10_12_intel.whl -------------------------------------------------------------------------------- /mesh/serialization/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | -------------------------------------------------------------------------------- /mesh/version.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | __version__ = '0.1' 5 | -------------------------------------------------------------------------------- /psbody-meshlite-namespace/__init__.py: -------------------------------------------------------------------------------- 1 | # this is the setup tools way 2 | __import__('pkg_resources').declare_namespace(__name__) 3 | 4 | # this is the distutils way, but does not work with setuptools 5 | #from pkgutil import extend_path 6 | #__path__ = extend_path(__path__, __name__) 7 | -------------------------------------------------------------------------------- /mesh/errors.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | """ 5 | Error heirarchy for Mesh class 6 | """ 7 | 8 | 9 | class MeshError(Exception): 10 | """Base error class for Mesh-related errors""" 11 | pass 12 | 13 | 14 | class SerializationError(MeshError): 15 | """Mesh reading or writing errors""" 16 | pass 17 | -------------------------------------------------------------------------------- /mesh/geometry/triangle_area.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | from .tri_normals import TriToScaledNormal 6 | import numpy as np 7 | 8 | 9 | def triangle_area(v, f): 10 | """Computes the area associated to a set of triangles""" 11 | return (np.sqrt(np.sum(TriToScaledNormal(v, f) ** 2, axis=1)) / 2.).flatten() 12 | -------------------------------------------------------------------------------- /mesh/src/plyutils.h: -------------------------------------------------------------------------------- 1 | #ifndef PLYUTILS_H__ 2 | #define PLYUTILS_H__ 3 | 4 | // needed to avoid the link to debug "_d.lib" libraries 5 | #include "hijack_python_headers.hpp" 6 | #include "rply.h" 7 | 8 | static PyObject * plyutils_read(PyObject *self, PyObject *args); 9 | static PyObject * plyutils_write(PyObject *self, PyObject *args); 10 | void error_cb(const char *message); 11 | int vertex_cb(p_ply_argument argument); 12 | int face_cb(p_ply_argument argument); 13 | 14 | #endif /* PLYUTILS_H__ */ -------------------------------------------------------------------------------- /mesh/utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | def row(A): 6 | return A.reshape((1, -1)) 7 | 8 | 9 | def col(A): 10 | return A.reshape((-1, 1)) 11 | 12 | 13 | def sparse(i, j, data, m=None, n=None): 14 | import numpy as np 15 | from scipy.sparse import csc_matrix 16 | ij = np.vstack((i.flatten().reshape(1, -1), j.flatten().reshape(1, -1))) 17 | 18 | if m is None: 19 | return csc_matrix((data, ij)) 20 | else: 21 | return csc_matrix((data, ij), shape=(m, n)) 22 | -------------------------------------------------------------------------------- /mesh/src/hijack_python_headers.hpp: -------------------------------------------------------------------------------- 1 | #ifndef MEAH_INCLUDE_PYTHON_HEADER_HPP__ 2 | #define MEAH_INCLUDE_PYTHON_HEADER_HPP__ 3 | 4 | /*!@file 5 | * This file hijacks the inclusion of the python libraries on Windows to 6 | * prevent the linking with the debug version of python.lib (that is named 7 | * python_d.lib and that is not provided by default). 8 | */ 9 | 10 | #undef MESH_HIJACK_AUTO_LINK 11 | 12 | #if defined(_WIN32) && defined(_DEBUG) 13 | #define MESH_HIJACK_AUTO_LINK 14 | #undef _DEBUG 15 | #endif 16 | 17 | #include 18 | 19 | #if defined(MESH_HIJACK_AUTO_LINK) 20 | #define _DEBUG 21 | #undef MESH_HIJACK_AUTO_LINK 22 | #endif 23 | 24 | 25 | #endif /* MEAH_INCLUDE_PYTHON_HEADER_HPP__ */ -------------------------------------------------------------------------------- /sample/hello_world.py: -------------------------------------------------------------------------------- 1 | from psbody.meshlite import Mesh, MeshViewer 2 | from os.path import dirname 3 | 4 | def main(): 5 | fdir = dirname(__file__) 6 | mesh = Mesh(filename='%s/teapot.obj' % fdir) 7 | mv = MeshViewer() 8 | mv.dynamic_meshes = [mesh] 9 | 10 | print "~~\nThis should display a MeshViewer window with a teapot" 11 | print "You can click and drag on the window to rotate around the mesh." 12 | print "Press enter to continue. This will save the mesh as an obj file to your /tmp folder.\n~~\n" 13 | raw_input() 14 | 15 | outname = '/tmp/teapot.obj' 16 | mesh.write_obj(outname) 17 | print 'Saved file: ', outname 18 | 19 | 20 | if __name__ == '__main__': 21 | main() 22 | 23 | 24 | -------------------------------------------------------------------------------- /mesh/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | import os 6 | from os.path import abspath, dirname, expanduser, join 7 | 8 | from .mesh import Mesh 9 | from .meshviewer import MeshViewer, MeshViewers 10 | 11 | texture_path = abspath(join(dirname(__file__), '..', 'data', 'template', 'texture_coordinates')) 12 | 13 | if 'PSBODY_MESH_CACHE' in os.environ: 14 | mesh_package_cache_folder = expanduser(os.environ['PSBODY_MESH_CACHE']) 15 | else: 16 | mesh_package_cache_folder = expanduser('~/.psbody/meshlite_package_cache') 17 | 18 | if not os.path.exists(mesh_package_cache_folder): 19 | os.makedirs(mesh_package_cache_folder) 20 | -------------------------------------------------------------------------------- /mesh/geometry/cross_product.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2012-07-20. 4 | 5 | import numpy as np 6 | 7 | 8 | def CrossProduct(a, b): 9 | """Computes the cross product of 2 vectors""" 10 | a = a.reshape(-1, 3) 11 | b = b.reshape(-1, 3) 12 | 13 | a1 = a[:, 0] 14 | a2 = a[:, 1] 15 | a3 = a[:, 2] 16 | 17 | Ax = np.zeros((len(a1), 3, 3)) 18 | Ax[:, 0, 1] = -a3 19 | Ax[:, 0, 2] = +a2 20 | Ax[:, 1, 0] = +a3 21 | Ax[:, 1, 2] = -a1 22 | Ax[:, 2, 0] = -a2 23 | Ax[:, 2, 1] = +a1 24 | 25 | return _call_einsum_matvec(Ax, b) 26 | 27 | 28 | def _call_einsum_matvec(m, righthand): 29 | r = righthand.reshape(m.shape[0], 3) 30 | return np.einsum('ijk,ik->ij', m, r).flatten() 31 | 32 | 33 | def _call_einsum_matmat(m, righthand): 34 | r = righthand.reshape(m.shape[0], 3, -1) 35 | return np.einsum('ijk,ikm->ijm', m, r).reshape(-1, r.shape[2]) 36 | -------------------------------------------------------------------------------- /mesh/geometry/vert_normals.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2013-03-12. 4 | 5 | 6 | import scipy.sparse as sp 7 | import numpy as np 8 | from .tri_normals import NormalizedNx3, TriNormalsScaled 9 | from ..utils import col 10 | 11 | 12 | def MatVecMult(mtx, vec): 13 | return mtx.dot(col(vec)).flatten() 14 | 15 | 16 | def VertNormals(v, f): 17 | return NormalizedNx3(VertNormalsScaled(v, f)) 18 | 19 | 20 | def VertNormalsScaled(v, f): 21 | IS = f.flatten() 22 | JS = np.array([range(f.shape[0])] * 3).T.flatten() 23 | data = np.ones(len(JS)) 24 | 25 | IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2)) 26 | JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2)) # is this right? 27 | data = np.concatenate((data, data, data)) 28 | 29 | faces_by_vertex = sp.csc_matrix((data, (IS, JS)), shape=(v.size, f.size)) 30 | 31 | # faces_by_vertex should be 3 x wider...? 32 | return NormalizedNx3(MatVecMult(faces_by_vertex, TriNormalsScaled(v, f))) 33 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | tmpdirbuild := temporary_test 3 | venv_dir := $(tmpdirbuild)/venv 4 | activate := $(venv_dir)/bin/activate 5 | package_name := psbody_meshlite 6 | 7 | .DEFAULT_GOAL := all 8 | 9 | $(tmpdirbuild): 10 | mkdir -p $(tmpdirbuild) 11 | 12 | $(tmpdirbuild)/package_creation: $(tmpdirbuild) 13 | @echo "********" $(package_name) ": Building the virtualenv for installation" 14 | @virtualenv --system-site-packages $(venv_dir) 15 | @ . $(activate) && pip install --upgrade pip virtualenv setuptools wheel 16 | @ . $(activate) && pip install numpy scipy pyopengl pyzmq 17 | 18 | @echo "******** [" ${package_name} "] Creating the source distribution" 19 | @ . $(activate) && python setup.py sdist 20 | 21 | @echo "******** [" ${package_name} "] Creating the wheel distribution" 22 | @ . $(activate) && python setup.py --verbose bdist_wheel 23 | 24 | ####### Cleaning some artifacts 25 | @rm -rf psbody_meshlite.egg-info 26 | ####### Touching the result 27 | @touch $@ 28 | 29 | all: $(tmpdirbuild)/package_creation 30 | 31 | install: 32 | @echo "********" $(package_name) ": installation" 33 | @echo "** installing " $(package_name) && cd dist && pip install --verbose --no-cache-dir *.whl ; 34 | 35 | clean: 36 | @rm -rf $(tmpdirbuild) 37 | @find . -name "*.pyc" -delete 38 | @rm -rf build 39 | @rm -rf dist 40 | @rm -rf psbody_meshlite.egg-info 41 | -------------------------------------------------------------------------------- /mesh/geometry/barycentric_coordinates_of_projection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | from numpy import cross, sum, isscalar, spacing, vstack 6 | 7 | 8 | def barycentric_coordinates_of_projection(p, q, u, v): 9 | """Given a point, gives projected coords of that point to a triangle 10 | in barycentric coordinates. 11 | 12 | See 13 | 14 | **Heidrich**, Computing the Barycentric Coordinates of a Projected Point, JGT 05 15 | at http://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf 16 | 17 | :param p: point to project 18 | :param q: a vertex of the triangle to project into 19 | :param u,v: edges of the the triangle such that it has vertices ``q``, ``q+u``, ``q+v`` 20 | 21 | :returns: barycentric coordinates of ``p``'s projection in triangle defined by ``q``, ``u``, ``v`` 22 | vectorized so ``p``, ``q``, ``u``, ``v`` can all be ``3xN`` 23 | """ 24 | 25 | p = p.T 26 | q = q.T 27 | u = u.T 28 | v = v.T 29 | 30 | n = cross(u, v, axis=0) 31 | s = sum(n * n, axis=0) 32 | 33 | # If the triangle edges are collinear, cross-product is zero, 34 | # which makes "s" 0, which gives us divide by zero. So we 35 | # make the arbitrary choice to set s to epsv (=numpy.spacing(1)), 36 | # the closest thing to zero 37 | if isscalar(s): 38 | s = s if s else spacing(1) 39 | else: 40 | s[s == 0] = spacing(1) 41 | 42 | oneOver4ASquared = 1.0 / s 43 | w = p - q 44 | b2 = sum(cross(u, w, axis=0) * n, axis=0) * oneOver4ASquared 45 | b1 = sum(cross(w, v, axis=0) * n, axis=0) * oneOver4ASquared 46 | b = vstack((1 - b1 - b2, b1, b2)) 47 | 48 | return b.T 49 | -------------------------------------------------------------------------------- /mesh/geometry/tri_normals.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2012-07-22. 4 | 5 | 6 | """ 7 | tri_normals.py 8 | 9 | """ 10 | 11 | from ..utils import col 12 | from .cross_product import CrossProduct 13 | 14 | import numpy as np 15 | 16 | 17 | def TriNormals(v, f): 18 | return NormalizedNx3(TriNormalsScaled(v, f)) 19 | 20 | 21 | def TriNormalsScaled(v, f): 22 | return CrossProduct(TriEdges(v, f, 1, 0), TriEdges(v, f, 2, 0)) 23 | 24 | 25 | def NormalizedNx3(v): 26 | ss = np.sum(v.reshape(-1, 3) ** 2, axis=1) 27 | ss[ss == 0] = 1 28 | s = np.sqrt(ss) 29 | 30 | return (v.reshape(-1, 3) / col(s)).flatten() 31 | 32 | 33 | def TriEdges(v, f, cplus, cminus): 34 | assert(cplus >= 0 and cplus <= 2 and cminus >= 0 and cminus <= 2) 35 | return _edges_for(v, f, cplus, cminus) 36 | 37 | 38 | def _edges_for(v, f, cplus, cminus): 39 | return ( 40 | v.reshape(-1, 3)[f[:, cplus], :] - 41 | v.reshape(-1, 3)[f[:, cminus], :]).ravel() 42 | 43 | 44 | def TriToScaledNormal(x, tri): 45 | 46 | v = x.reshape(-1, 3) 47 | 48 | def v_xyz(iV): 49 | return v[tri[:, iV], :] 50 | 51 | return np.cross(v_xyz(1) - v_xyz(0), v_xyz(2) - v_xyz(0)) 52 | 53 | 54 | def _bsxfun(oper, a, b): 55 | if a.shape[0] == b.shape[0] or a.shape[1] == b.shape[1]: 56 | return oper(a, b) 57 | elif min(a.shape) == 1 and min(b.shape) == 1: 58 | if a.shape[0] == 1: 59 | return oper(np.tile(a, (b.shape[0], 1)), b) 60 | else: 61 | return oper(np.tile(a, (1, b.shape[1], b))) 62 | else: 63 | raise '_bsxfun failure' 64 | 65 | 66 | def NormalizeRows(x): 67 | 68 | s = (np.sqrt(np.sum(x ** 2, axis=1))).flatten() 69 | s[s == 0] = 1 70 | return _bsxfun(np.divide, x, col(s)) 71 | -------------------------------------------------------------------------------- /mesh/lines.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | import numpy as np 5 | import colors 6 | 7 | 8 | class Lines(object): 9 | """Collection of 3D lines 10 | 11 | Attributes: 12 | v: Vx3 array of vertices 13 | e: Ex2 array of edges 14 | """ 15 | 16 | def __init__(self, v, e, vc=None, ec=None): 17 | 18 | self.v = np.array(v) 19 | self.e = np.array(e) 20 | 21 | if vc is not None: 22 | self.set_vertex_colors(vc) 23 | 24 | if ec is not None: 25 | self.set_edge_colors(ec) 26 | 27 | def colors_like(self, color, arr): 28 | from .utils import row, col 29 | if isinstance(color, str): 30 | color = colors.name_to_rgb[color] 31 | elif isinstance(color, list): 32 | color = np.array(color) 33 | 34 | if color.shape == (arr.shape[0],): 35 | def jet(v): 36 | fourValue = 4 * v 37 | red = min(fourValue - 1.5, -fourValue + 4.5) 38 | green = min(fourValue - 0.5, -fourValue + 3.5) 39 | blue = min(fourValue + 0.5, -fourValue + 2.5) 40 | result = np.array([red, green, blue]) 41 | result[result > 1.0] = 1.0 42 | result[result < 0.0] = 0.0 43 | return row(result) 44 | color = col(color) 45 | color = np.concatenate([jet(color[i]) for i in xrange(color.size)], axis=0) 46 | 47 | return np.ones((arr.shape[0], 3)) * color 48 | 49 | def set_vertex_colors(self, vc): 50 | self.vc = self.colors_like(vc, self.v) 51 | 52 | def set_edge_colors(self, ec): 53 | self.ec = self.colors_like(ec, self.e) 54 | 55 | def write_obj(self, filename): 56 | with open(filename, 'w') as fi: 57 | for r in self.v: 58 | fi.write('v %f %f %f\n' % (r[0], r[1], r[2])) 59 | for e in self.e: 60 | fi.write('l %d %d\n' % (e[0] + 1, e[1] + 1)) 61 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017-2018 Max Planck Society. All rights reserved. 2 | This software is provided for research purposes only. 3 | 4 | Contact: 5 | -------- 6 | email: ps-admin@tuebingen.mpg.de 7 | website: https://ps.is.tuebingen.mpg.de/ 8 | 9 | 10 | License: 11 | -------- 12 | 13 | Max-Planck grants you a non-exclusive, non-transferable, free of charge right to 14 | use the *psbody-meshlite package* on computers owned, leased or otherwise controlled 15 | by you and/or your organization for the sole purpose of performing non-commercial 16 | scientific research. 17 | 18 | Any other use, in particular any use for commercial purposes, is prohibited. 19 | This includes, without limitation, incorporation in a commercial product, use in 20 | a commercial service, or production of other artifacts for commercial purposes 21 | including, for example, web services, movies, television programs, or video 22 | games. The package may not be reproduced, modified and/or made available in any 23 | form to any third party without MPG’s prior written permission. By using the 24 | package, you agree not to reverse engineer it. 25 | 26 | You expressly acknowledge and agree that the Model is provided “AS IS”, may 27 | contain errors, and that any use of the Model is at your sole risk. MAX PLANCK 28 | MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE DATA, NEITHER 29 | EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY LEGAL OR ACTUAL DEFECTS, WHETHER 30 | DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, Max-Planck 31 | makes no representations or warranties (i) regarding the merchantability or 32 | fitness for a particular purpose of the Model, (ii) that the use of the package 33 | will not infringe any patents, copyrights or other intellectual property rights 34 | of a third party, and (iii) that the use of the package will not cause any damage 35 | of any kind to you or a third party. 36 | 37 | Under no circumstances shall Max-Planck be liable for any incidental, special, 38 | indirect or consequential damages arising out of or relating to this license, 39 | including but not limited to, any lost profits, business interruption, loss of 40 | programs or other data, or all other commercial damages or losses, even if 41 | advised of the possibility thereof. 42 | 43 | -------------------------------------------------------------------------------- /mesh/fonts.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | import os 5 | import numpy as np 6 | from OpenGL.GL import glPixelStorei, \ 7 | glGenTextures, \ 8 | glBindTexture, \ 9 | glGenerateMipmap, \ 10 | glHint, \ 11 | glTexImage2D 12 | from OpenGL.GL import GL_UNPACK_ALIGNMENT, \ 13 | GL_TEXTURE_2D, \ 14 | GL_RGB, \ 15 | GL_BGR, \ 16 | GL_GENERATE_MIPMAP_HINT, \ 17 | GL_NICEST, \ 18 | GL_UNSIGNED_BYTE 19 | 20 | 21 | def get_image_with_text(text, fgcolor, bgcolor): 22 | if not hasattr(get_image_with_text, 'cache'): 23 | get_image_with_text.cache = {} 24 | 25 | import zlib 26 | uid = str(zlib.crc32(text)) + str(zlib.crc32(np.array(fgcolor))) + str(zlib.crc32(np.array(bgcolor))) 27 | if uid not in get_image_with_text.cache: 28 | from PIL import ImageFont 29 | from PIL import Image 30 | from PIL import ImageDraw 31 | 32 | font = ImageFont.truetype("/Library/Fonts/Courier New.ttf", 30) 33 | 34 | imsize = (256, 256) 35 | 36 | bgarray = np.asarray(np.zeros((imsize[0], imsize[1], 3)), np.uint8) 37 | bgarray[:, :, 0] += bgcolor[0] 38 | bgarray[:, :, 1] += bgcolor[1] 39 | bgarray[:, :, 2] += bgcolor[2] 40 | img = Image.fromarray(bgarray) 41 | draw = ImageDraw.Draw(img) 42 | w, h = draw.textsize(text, font=font) 43 | text_pos = ((imsize[0] - w) / 2, (imsize[1] - h) / 2) 44 | draw.text(text_pos, text, fill=fgcolor, font=font) 45 | get_image_with_text.cache[uid] = np.array(img.getdata()).reshape(img.size[0], img.size[1], 3) * 255 46 | return get_image_with_text.cache[uid] 47 | 48 | 49 | def get_textureid_with_text(text, fgcolor, bgcolor): 50 | if not hasattr(get_textureid_with_text, 'cache'): 51 | get_textureid_with_text.cache = {} 52 | 53 | import zlib 54 | uid = str(zlib.crc32(text)) + str(zlib.crc32(np.array(fgcolor))) + str(zlib.crc32(np.array(bgcolor))) 55 | if uid not in get_textureid_with_text.cache: 56 | from PIL import ImageFont 57 | from PIL import Image 58 | from PIL import ImageDraw 59 | 60 | font = ImageFont.truetype(os.path.join(os.path.dirname(__file__), 61 | "resources", 62 | "Arial.ttf"), 63 | 100) 64 | 65 | imsize = (128, 128) 66 | 67 | bgarray = np.asarray(np.zeros((imsize[0], imsize[1], 3)), np.uint8) 68 | bgarray[:, :, 0] += bgcolor[0] 69 | bgarray[:, :, 1] += bgcolor[1] 70 | bgarray[:, :, 2] += bgcolor[2] 71 | img = Image.fromarray(bgarray) 72 | draw = ImageDraw.Draw(img) 73 | w, h = draw.textsize(text, font=font) 74 | text_pos = ((imsize[0] - w) / 2, (imsize[1] - h) / 2) 75 | draw.text(text_pos, text, fill=tuple(np.asarray(fgcolor, np.uint8)), font=font) 76 | texture_data = np.asarray(np.array(img.getdata()).reshape(img.size[0], img.size[1], 3) * 255, np.uint8) 77 | 78 | textureID = glGenTextures(1) 79 | glPixelStorei(GL_UNPACK_ALIGNMENT, 1) 80 | glBindTexture(GL_TEXTURE_2D, textureID) 81 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texture_data.shape[1], texture_data.shape[0], 0, GL_BGR, GL_UNSIGNED_BYTE, texture_data.flatten()) 82 | glHint(GL_GENERATE_MIPMAP_HINT, GL_NICEST) # must be GL_FASTEST, GL_NICEST or GL_DONT_CARE 83 | glGenerateMipmap(GL_TEXTURE_2D) 84 | get_textureid_with_text.cache[uid] = textureID 85 | 86 | return get_textureid_with_text.cache[uid] 87 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # (c) 2015-2016 Max Planck Society 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | try: 5 | # setuptools is required 6 | from setuptools import setup, Extension 7 | from setuptools.command.build_ext import build_ext 8 | from setuptools.command.install import install 9 | has_setup_tools = True 10 | except ImportError: 11 | from distutils.core import setup, Extension 12 | from distutils.command.install import install 13 | from distutils.command.build_ext import build_ext 14 | has_setup_tools = False 15 | 16 | from distutils.util import convert_path 17 | from distutils import log 18 | from distutils.command.sdist import sdist 19 | import os 20 | 21 | # this package will go to the following namespace 22 | namespace_package = 'psbody' 23 | 24 | 25 | def _get_version(): 26 | """Convenient function returning the version of this package""" 27 | 28 | ns = {} 29 | version_path = convert_path('mesh/version.py') 30 | if not os.path.exists(version_path): 31 | return None 32 | with open(version_path) as version_file: 33 | exec(version_file.read(), ns) 34 | 35 | log.warn('[VERSION] read version is %s', ns['__version__']) 36 | return ns['__version__'] 37 | 38 | 39 | def _get_all_extensions(): 40 | try: 41 | import numpy 42 | except: 43 | return [] 44 | 45 | # valid only for gcc/clang 46 | extra_args = ['-O3'] 47 | 48 | import sys 49 | if sys.platform.find('linux') > -1: 50 | extra_args += ['-fopenmp'] # openmp not supported on OSX 51 | 52 | define_macros = [('NDEBUG', '1')] 53 | undef_macros = [] 54 | package_name_and_srcs = [('serialization.plyutils', ['mesh/src/plyutils.c', 'mesh/src/rply.c'], []), 55 | ('serialization.loadobj', ['mesh/src/py_loadobj.cpp'], []), 56 | ] 57 | 58 | out = [] 59 | for current_package_name, src_list, additional_defines in package_name_and_srcs: 60 | ext = Extension("%s.meshlite.%s" % (namespace_package, current_package_name), 61 | src_list, 62 | language="c++", 63 | include_dirs=['mesh/src', numpy.get_include()], 64 | libraries=[], 65 | define_macros=define_macros + additional_defines, 66 | undef_macros=undef_macros, 67 | extra_compile_args=extra_args, 68 | extra_link_args=extra_args) 69 | 70 | out += [ext] 71 | return out 72 | 73 | all_extensions = _get_all_extensions() 74 | 75 | additional_kwargs = {} 76 | if has_setup_tools: 77 | # setup tools required for the 'setup_requires' ... 78 | additional_kwargs['setup_requires'] = ['setuptools', 'numpy'] 79 | additional_kwargs['install_requires'] = ['numpy >= 1.8', 'scipy', 'pyopengl', 'pyzmq'] 80 | additional_kwargs['zip_safe'] = not all_extensions 81 | additional_kwargs['test_suite'] = "tests" 82 | additional_kwargs['namespace_packages'] = [namespace_package] 83 | 84 | cmdclass = {'build_ext': build_ext, 85 | 'sdist': sdist, 86 | 'install': install} 87 | 88 | # check if the namespace works for python >= 3.3 89 | packages = [namespace_package, 90 | '%s.meshlite' % namespace_package, 91 | '%s.meshlite.geometry' % namespace_package, 92 | '%s.meshlite.serialization' % namespace_package 93 | ] # actual subpackage described here 94 | 95 | package_dir = {namespace_package: '%s-meshlite-namespace' % namespace_package, 96 | '%s.meshlite' % namespace_package: 'mesh', # actual subpackage described here 97 | '%s.meshlite.geometry' % namespace_package: 'mesh/geometry', 98 | '%s.meshlite.serialization' % namespace_package: 'mesh/serialization', 99 | } 100 | 101 | setup(name='%s-meshlite' % namespace_package, 102 | version=_get_version(), 103 | packages=packages, 104 | package_dir=package_dir, 105 | ext_modules=all_extensions, 106 | author='Max Planck Perceiving Systems - Body Group', 107 | maintainer='Naureen Mahmood', 108 | maintainer_email='naureen.mahmood@tuebingen.mpg.de', 109 | url='http://ps.is.tuebingen.mpg.de', 110 | description='Mesh and MeshViewer utilities', 111 | license='Unlicensed', 112 | cmdclass=cmdclass, 113 | ** additional_kwargs 114 | ) 115 | -------------------------------------------------------------------------------- /mesh/texture.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | import numpy as np 5 | __all__ = ['texture_coordinates_by_vertex', ] 6 | 7 | 8 | def texture_coordinates_by_vertex(self): 9 | texture_coordinates_by_vertex = [[] for i in range(len(self.v))] 10 | for i, face in enumerate(self.f): 11 | for j in [0, 1, 2]: 12 | texture_coordinates_by_vertex[face[j]].append(self.vt[self.ft[i][j]]) 13 | return texture_coordinates_by_vertex 14 | 15 | 16 | def reload_texture_image(self): 17 | import cv2 18 | # image is loaded as image_height-by-image_width-by-3 array in BGR color order. 19 | self._texture_image = cv2.imread(self.texture_filepath) if self.texture_filepath else None 20 | texture_sizes = [32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384] 21 | if self._texture_image is not None and (self._texture_image.shape[0] != self._texture_image.shape[1] or 22 | self._texture_image.shape[0] not in texture_sizes or 23 | self._texture_image.shape[0] not in texture_sizes): 24 | closest_texture_size_idx = (np.abs(np.array(texture_sizes) - max(self._texture_image.shape))).argmin() 25 | sz = texture_sizes[closest_texture_size_idx] 26 | self._texture_image = cv2.resize(self._texture_image, (sz, sz)) 27 | 28 | 29 | def load_texture(self, texture_version): 30 | ''' 31 | Expect a texture version number as an integer, load the texture version from 'texture_path' (global variable to the 32 | package). 33 | Currently there are versions [0,1,2,3] available. 34 | ''' 35 | import os 36 | from . import texture_path 37 | 38 | lowres_tex_template = os.path.join(texture_path, 'textured_template_low_v%d.obj' % texture_version) 39 | highres_tex_template = os.path.join(texture_path, 'textured_template_high_v%d.obj' % texture_version) 40 | from .mesh import Mesh 41 | 42 | mesh_with_texture = Mesh(filename=lowres_tex_template) 43 | if not np.all(mesh_with_texture.f.shape == self.f.shape): 44 | mesh_with_texture = Mesh(filename=highres_tex_template) 45 | self.transfer_texture(mesh_with_texture) 46 | 47 | 48 | def transfer_texture(self, mesh_with_texture): 49 | if not np.all(mesh_with_texture.f.shape == self.f.shape): 50 | raise Exception('Mesh topology mismatch') 51 | 52 | self.vt = mesh_with_texture.vt.copy() 53 | self.ft = mesh_with_texture.ft.copy() 54 | 55 | if not np.all(mesh_with_texture.f == self.f): 56 | if np.all(mesh_with_texture.f == np.fliplr(self.f)): 57 | self.ft = np.fliplr(self.ft) 58 | else: 59 | # Same shape; let's see if it's face ordering; this could be a bit faster... 60 | face_mapping = {} 61 | for f, ii in zip(self.f, range(len(self.f))): 62 | face_mapping[" ".join([str(x) for x in sorted(f)])] = ii 63 | self.ft = np.zeros(self.f.shape, dtype=np.uint32) 64 | 65 | for f, ft in zip(mesh_with_texture.f, mesh_with_texture.ft): 66 | k = " ".join([str(x) for x in sorted(f)]) 67 | if k not in face_mapping: 68 | raise Exception('Mesh topology mismatch') 69 | # the vertex order can be arbitrary... 70 | ids = [] 71 | for f_id in f: 72 | ids.append(np.where(self.f[face_mapping[k]] == f_id)[0][0]) 73 | ids = np.array(ids) 74 | self.ft[face_mapping[k]] = np.array(ft[ids]) 75 | 76 | self.texture_filepath = mesh_with_texture.texture_filepath 77 | self._texture_image = None 78 | 79 | 80 | def set_texture_image(self, path_to_texture): 81 | self.texture_filepath = path_to_texture 82 | 83 | 84 | def texture_rgb(self, texture_coordinate): 85 | h, w = np.array(self.texture_image.shape[:2]) - 1 86 | return np.double(self.texture_image[int(h * (1.0 - texture_coordinate[1]))][int(w * (texture_coordinate[0]))])[::-1] 87 | 88 | 89 | def texture_rgb_vec(self, texture_coordinates): 90 | h, w = np.array(self.texture_image.shape[:2]) - 1 91 | n_ch = self.texture_image.shape[2] 92 | # XXX texture_coordinates can be lower than 0! clip needed! 93 | d1 = (h * (1.0 - np.clip(texture_coordinates[:, 1], 0, 1))).astype(np.int) 94 | d0 = (w * (np.clip(texture_coordinates[:, 0], 0, 1))).astype(np.int) 95 | flat_texture = self.texture_image.flatten() 96 | indices = np.hstack([((d1 * (w + 1) * n_ch) + (d0 * n_ch) + (2 - i)).reshape(-1, 1) for i in range(n_ch)]) 97 | return flat_texture[indices] 98 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | MeshLite 2 | ======== 3 | This package was created as an accessory for visualizing, editing and saving meshes 4 | by the Perceiving Systems Department at Max Planck Institute for Intelligent Systems. 5 | The meshlite packaage was created specifically for use with our body, hands and face models. 6 | 7 | This package is being provided as an accessory for visualizing, editing and saving meshes for 8 | our body, hands and face models, namely SMPL, MANO and FLAME. To learn more about these models, 9 | please visit the model websites: 10 | 11 | - http://smpl.is.tue.mpg.de 12 | - http://mano.is.tue.mpg.de 13 | - http://flame.is.tue.mpg.de 14 | 15 | Please see the accompanying LICENSE.txt file for licensing and contact information. You must agree to the 16 | license terms before using this package. 17 | 18 | 19 | System Requirements 20 | =================== 21 | Currently this package has only been tested for the following: 22 | 23 | Python version: 24 | - Python 2.7 25 | 26 | Operating systems: 27 | - OSX (10.12) 28 | - Linux (Ubuntu 14.04.1 LTS) 29 | 30 | Installation Guide 31 | ================== 32 | 33 | ## Install from compiled binaries 34 | --------------------------------- 35 | You can install the package directly from the .whl binary files instead of compiling & installing through the 36 | source. The wheels (.whl files) are located in the downloads folder of the repository: 37 | 38 | ``` 39 | pip install --find-links=download psbody-meshlite 40 | ``` 41 | 42 | ## Install from source 43 | ----------------------- 44 | 45 | ### 1. Clone the repository 46 | --------------------------- 47 | Get the repository as follows: 48 | 49 | ``` 50 | git clone git@github.com:naureenm/meshlite.git 51 | ``` 52 | 53 | This will download the repository into a directory named `meshlite` by default. 54 | 55 | 56 | ### 2. Install pip 57 | ----------------- 58 | 59 | For LINUX: 60 | ``` 61 | sudo apt-get install python-pip 62 | ``` 63 | 64 | 65 | For OSX: 66 | Get the script get-pip.py from it's official website (https://pip.pypa.io/en/stable/installing/), 67 | then run the following commands on a terminal wnidow: 68 | 69 | ``` 70 | sudo python get-pip.py 71 | pip install --upgrade pip 72 | ``` 73 | 74 | 75 | ### 3. Install VirtualEnv 76 | ------------------------ 77 | It is a good practice to install the python packages in a Python virtualenv (https://virtualenv.pypa.io/en/stable/) 78 | 79 | Your system can have its own versions of the packages, while meshlite can use additional or updated ones. 80 | You do not need administrative privileges on your computer to install additional packages in a `virtualenv`, which 81 | lowers the risks of having an unstable computer and does not require the intervention of a sys-admin. 82 | 83 | 84 | For LINUX: 85 | ``` 86 | sudo apt-get install python-virtualenv 87 | ``` 88 | 89 | For OSX: 90 | Get the script get-pip.py from it's official website (https://pip.pypa.io/en/stable/installing/), 91 | then run the following commands on a terminal wnidow: 92 | 93 | ``` 94 | pip install virtualenv 95 | ``` 96 | 97 | 98 | ### 4. Setup VirtualEnv 99 | ---------------------- 100 | It's convenient to have a single location for all of your project virtualenvs, so we'll create one. 101 | 102 | ``` 103 | # Make a virtual env container directory, and create a venv named 'meshlite' 104 | export WORKON_HOME="$HOME/.virtualenvs" 105 | mkdir $WORKON_HOME 106 | virtualenv --system-site-packages $WORKON_HOME/meshlite 107 | 108 | # Now activate the new virtualenv 109 | source $WORKON_HOME/meshlite/bin/activate 110 | ``` 111 | 112 | *Note*: Remember to always activate your virtualenv before trying your frankengeist code 113 | 114 | 115 | 116 | ### 5. Install MeshLite 117 | ---------------------- 118 | Navigate to the meshlite directory, and run the following two commands 119 | 120 | ``` 121 | make 122 | make install 123 | ``` 124 | 125 | 126 | ### 6. Clean up 127 | -------------- 128 | During the build or the use of the repository, some intermediate files are generated. It is possible to remove those intermediate 129 | files by running the following: 130 | 131 | ``` 132 | make clean 133 | ``` 134 | 135 | 136 | Run Hello-World Script 137 | ====================== 138 | If all goes well, you are now ready to try the hello_world.py script. Simple run the following in a terminal window: 139 | 140 | ``` 141 | python sample/hello_world.py 142 | ``` 143 | 144 | - This should display a MeshViewer window with a teapot. 145 | - You can click and drag on the window to rotate around the mesh. 146 | - Press 'c' to continue. This will save the mesh as an obj file to your /tmp folder. 147 | -------------------------------------------------------------------------------- /mesh/sphere.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | import numpy as np 5 | from .mesh import Mesh 6 | from .colors import name_to_rgb 7 | 8 | 9 | __all__ = ['Sphere'] 10 | 11 | 12 | class Sphere(object): 13 | def __init__(self, center, radius): 14 | if(center.flatten().shape != (3,)): 15 | raise Exception("Center should have size(1,3) instead of %s" % center.shape) 16 | self.center = center.flatten() 17 | self.radius = radius 18 | 19 | def __str__(self): 20 | return "%s:%s" % (self.center, self.radius) 21 | 22 | def to_mesh(self, color=name_to_rgb['red']): 23 | v = np.array([[0.0000, -1.000, 0.0000], [0.7236, -0.447, 0.5257], 24 | [-0.278, -0.447, 0.8506], [-0.894, -0.447, 0.0000], 25 | [-0.278, -0.447, -0.850], [0.7236, -0.447, -0.525], 26 | [0.2765, 0.4472, 0.8506], [-0.723, 0.4472, 0.5257], 27 | [-0.720, 0.4472, -0.525], [0.2763, 0.4472, -0.850], 28 | [0.8945, 0.4472, 0.0000], [0.0000, 1.0000, 0.0000], 29 | [-0.165, -0.850, 0.4999], [0.4253, -0.850, 0.3090], 30 | [0.2629, -0.525, 0.8090], [0.4253, -0.850, -0.309], 31 | [0.8508, -0.525, 0.0000], [-0.525, -0.850, 0.0000], 32 | [-0.688, -0.525, 0.4999], [-0.162, -0.850, -0.499], 33 | [-0.688, -0.525, -0.499], [0.2628, -0.525, -0.809], 34 | [0.9518, 0.0000, -0.309], [0.9510, 0.0000, 0.3090], 35 | [0.5876, 0.0000, 0.8090], [0.0000, 0.0000, 1.0000], 36 | [-0.588, 0.0000, 0.8090], [-0.951, 0.0000, 0.3090], 37 | [-0.955, 0.0000, -0.309], [-0.587, 0.0000, -0.809], 38 | [0.0000, 0.0000, -1.000], [0.5877, 0.0000, -0.809], 39 | [0.6889, 0.5257, 0.4999], [-0.262, 0.5257, 0.8090], 40 | [-0.854, 0.5257, 0.0000], [-0.262, 0.5257, -0.809], 41 | [0.6889, 0.5257, -0.499], [0.5257, 0.8506, 0.0000], 42 | [0.1626, 0.8506, 0.4999], [-0.425, 0.8506, 0.3090], 43 | [-0.422, 0.8506, -0.309], [0.1624, 0.8506, -0.499]]) 44 | 45 | f = np.array([[15, 3, 13], [13, 14, 15], [2, 15, 14], [13, 1, 14], [17, 2, 14], [14, 16, 17], 46 | [6, 17, 16], [14, 1, 16], [19, 4, 18], [18, 13, 19], [3, 19, 13], [18, 1, 13], 47 | [21, 5, 20], [20, 18, 21], [4, 21, 18], [20, 1, 18], [22, 6, 16], [16, 20, 22], 48 | [5, 22, 20], [16, 1, 20], [24, 2, 17], [17, 23, 24], [11, 24, 23], [23, 17, 6], 49 | [26, 3, 15], [15, 25, 26], [7, 26, 25], [25, 15, 2], [28, 4, 19], [19, 27, 28], 50 | [8, 28, 27], [27, 19, 3], [30, 5, 21], [21, 29, 30], [9, 30, 29], [29, 21, 4], 51 | [32, 6, 22], [22, 31, 32], [10, 32, 31], [31, 22, 5], [33, 7, 25], [25, 24, 33], 52 | [11, 33, 24], [24, 25, 2], [34, 8, 27], [27, 26, 34], [7, 34, 26], [26, 27, 3], 53 | [35, 9, 29], [29, 28, 35], [8, 35, 28], [28, 29, 4], [36, 10, 31], [31, 30, 36], 54 | [9, 36, 30], [30, 31, 5], [37, 11, 23], [23, 32, 37], [10, 37, 32], [32, 23, 6], 55 | [39, 7, 33], [33, 38, 39], [12, 39, 38], [38, 33, 11], [40, 8, 34], [34, 39, 40], 56 | [12, 40, 39], [39, 34, 7], [41, 9, 35], [35, 40, 41], [12, 41, 40], [40, 35, 8], 57 | [42, 10, 36], [36, 41, 42], [12, 42, 41], [41, 36, 9], [38, 11, 37], [37, 42, 38], 58 | [12, 38, 42], [42, 37, 10]]) - 1 59 | 60 | return Mesh(v=v * self.radius + self.center, f=f, vc=np.tile(color, (v.shape[0], 1))) 61 | 62 | def has_inside(self, point): 63 | return np.linalg.norm(point - self.center) <= self.radius 64 | 65 | def intersects(self, sphere): 66 | return np.linalg.norm(sphere.center - self.center) < (self.radius + sphere.radius) 67 | 68 | def intersection_vol(self, sphere): 69 | if not self.intersects(sphere): 70 | return 0 71 | d = np.linalg.norm(sphere.center - self.center) 72 | R, r = (self.radius, sphere.radius) if (self.radius > sphere.radius) else (sphere.radius, self.radius) 73 | if R >= (d + r): 74 | return (4 * np.pi * (r ** 3)) / 3 75 | 76 | # http://mathworld.wolfram.com/Sphere-SphereIntersection.html 77 | return (np.pi * (R + r - d) ** 2 * (d ** 2 + 2 * d * r - 3 * r * r + 2 * d * R + 6 * r * R - 3 * R * R)) / (12 * d) 78 | -------------------------------------------------------------------------------- /mesh/landmarks.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2013-02-20. 4 | 5 | 6 | import numpy as np 7 | 8 | 9 | def landm_xyz_linear_transform(self, ordering=None): 10 | from .utils import col, sparse 11 | 12 | landmark_order = ordering if ordering else self.landm_names 13 | # construct a sparse matrix that converts between the landmark pts and all vertices, with height (# landmarks * 3) and width (# vertices * 3) 14 | if hasattr(self, 'landm_regressors'): 15 | landmark_coefficients = np.hstack([self.landm_regressors[name][1] for name in landmark_order]) 16 | landmark_indices = np.hstack([self.landm_regressors[name][0] for name in landmark_order]) 17 | column_indices = np.hstack([col(3 * landmark_indices + i) for i in range(3)]).flatten() 18 | row_indices = np.hstack([[3 * index, 3 * index + 1, 3 * index + 2] * len(self.landm_regressors[landmark_order[index]][0]) for index in np.arange(len(landmark_order))]) 19 | values = np.hstack([col(landmark_coefficients) for i in range(3)]).flatten() 20 | return sparse(row_indices, column_indices, values, 3 * len(landmark_order), 3 * self.v.shape[0]) 21 | elif hasattr(self, 'landm'): 22 | landmark_indices = np.array([self.landm[name] for name in landmark_order]) 23 | column_indices = np.hstack(([col(3 * landmark_indices + i) for i in range(3)])).flatten() 24 | row_indices = np.arange(3 * len(landmark_order)) 25 | return sparse(row_indices, column_indices, np.ones(len(column_indices)), 3 * len(landmark_order), 3 * self.v.shape[0]) 26 | else: 27 | return np.zeros((0, 0)) 28 | 29 | 30 | @property 31 | def landm_xyz(self, ordering=None): 32 | landmark_order = ordering if ordering else self.landm_names 33 | landmark_vertex_locations = (self.landm_xyz_linear_transform(landmark_order) * self.v.flatten()).reshape(-1, 3) if landmark_order else np.zeros((0, 0)) 34 | if landmark_order: 35 | return dict([(landmark_order[i], xyz) for i, xyz in enumerate(landmark_vertex_locations)]) 36 | return {} 37 | 38 | 39 | def recompute_landmark_indices(self, landmark_fname=None, safe_mode=True): 40 | filtered_landmarks = dict(filter(lambda e, : e[1] != [0.0, 0.0, 0.0], self.landm_raw_xyz.items()) if (landmark_fname and safe_mode) else self.landm_raw_xyz.items()) 41 | if len(filtered_landmarks) != len(self.landm_raw_xyz): 42 | print "WARNING: %d landmarks in file %s are positioned at (0.0, 0.0, 0.0) and were ignored" % (len(self.landm_raw_xyz) - len(filtered_landmarks), landmark_fname) 43 | 44 | self.landm = {} 45 | self.landm_regressors = {} 46 | if filtered_landmarks: 47 | landmark_names = filtered_landmarks.keys() 48 | closest_vertices, _ = self.closest_vertices(np.array(filtered_landmarks.values())) 49 | self.landm = dict(zip(landmark_names, closest_vertices)) 50 | if len(self.f): 51 | face_indices, closest_points = self.closest_faces_and_points(np.array(filtered_landmarks.values())) 52 | vertex_indices, coefficients = self.barycentric_coordinates_for_points(closest_points, face_indices) 53 | self.landm_regressors = dict([(name, (vertex_indices[i], coefficients[i])) for i, name in enumerate(landmark_names)]) 54 | else: 55 | self.landm_regressors = dict([(name, (np.array([closest_vertices[i]]), np.array([1.0]))) for i, name in enumerate(landmark_names)]) 56 | 57 | 58 | def set_landmarks_from_xyz(self, landm_raw_xyz): 59 | self.landm_raw_xyz = landm_raw_xyz if hasattr(landm_raw_xyz, 'keys') else dict((str(i), l) for i, l in enumerate(landm_raw_xyz)) 60 | self.recompute_landmark_indices() 61 | 62 | 63 | def set_landmarks_from_raw(self, landmarks): 64 | ''' 65 | can accept: 66 | {'name1': [float, float, float], 'name2': [float, float, float], ...} 67 | {'name1': np.array([float, float, float]), 'name2': np.array([float, float, float]), ...} 68 | [[float,float,float],[float,float,float], ...] 69 | np.array([[float,float,float],[float,float,float], ...]) 70 | [np.array([float,float,float]),np.array([float,float,float]), ...] 71 | {'name1': int, 'name2': int, ...} 72 | [int,int,int] 73 | np.array([int,int,int]) 74 | ''' 75 | landmarks = landmarks if hasattr(landmarks, 'keys') else dict((str(i), l) for i, l in enumerate(landmarks)) 76 | 77 | if np.all(map(lambda x: hasattr(x, "__iter__") and len(x) == 3, landmarks.values())): 78 | landmarks = dict((i, np.array(l)) for i, l in landmarks.items()) 79 | self.set_landmarks_from_xyz(landmarks) 80 | elif np.all(map(lambda x: isinstance(x, (int, long)), landmarks.values())): 81 | self.landm = landmarks 82 | self.recompute_landmark_xyz() 83 | else: 84 | raise Exception("Can't parse landmarks") 85 | -------------------------------------------------------------------------------- /mesh/geometry/rodrigues.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2012-07-22. 4 | 5 | import numpy as np 6 | 7 | 8 | def rodrigues(r, calculate_jacobian=True): 9 | """Computes the Rodrigues transform and its derivative 10 | 11 | :param r: either a 3-vector representing the rotation parameter, or a full rotation matrix 12 | :param calculate_jacobian: indicates if the Jacobian of the transform is also required 13 | :returns: If `calculate_jacobian` is `True`, the Jacobian is given as the second element of the returned tuple. 14 | """ 15 | 16 | r = np.array(r, dtype=np.double) 17 | eps = np.finfo(np.double).eps 18 | 19 | if np.all(r.shape == (3, 1)) or np.all(r.shape == (1, 3)) or np.all(r.shape == (3,)): 20 | r = r.flatten() 21 | theta = np.linalg.norm(r) 22 | if theta < eps: 23 | r_out = np.eye(3) 24 | if calculate_jacobian: 25 | jac = np.zeros((3, 9)) 26 | jac[0, 5] = jac[1, 6] = jac[2, 1] = -1 27 | jac[0, 7] = jac[1, 2] = jac[2, 3] = 1 28 | 29 | else: 30 | c = np.cos(theta) 31 | s = np.sin(theta) 32 | c1 = 1. - c 33 | itheta = 1.0 if theta == 0.0 else 1.0 / theta 34 | r *= itheta 35 | I = np.eye(3) 36 | rrt = np.array([r * r[0], r * r[1], r * r[2]]) 37 | _r_x_ = np.array([[0, -r[2], r[1]], [r[2], 0, -r[0]], [-r[1], r[0], 0]]) 38 | r_out = c * I + c1 * rrt + s * _r_x_ 39 | if calculate_jacobian: 40 | drrt = np.array([[r[0] + r[0], r[1], r[2], r[1], 0, 0, r[2], 0, 0], 41 | [0, r[0], 0, r[0], r[1] + r[1], r[2], 0, r[2], 0], 42 | [0, 0, r[0], 0, 0, r[1], r[0], r[1], r[2] + r[2]]]) 43 | d_r_x_ = np.array([[0, 0, 0, 0, 0, -1, 0, 1, 0], 44 | [0, 0, 1, 0, 0, 0, -1, 0, 0], 45 | [0, -1, 0, 1, 0, 0, 0, 0, 0]]) 46 | I = np.array([I.flatten(), I.flatten(), I.flatten()]) 47 | ri = np.array([[r[0]], [r[1]], [r[2]]]) 48 | a0 = -s * ri 49 | a1 = (s - 2 * c1 * itheta) * ri 50 | a2 = np.ones((3, 1)) * c1 * itheta 51 | a3 = (c - s * itheta) * ri 52 | a4 = np.ones((3, 1)) * s * itheta 53 | jac = a0 * I + a1 * rrt.flatten() + a2 * drrt + a3 * _r_x_.flatten() + a4 * d_r_x_ 54 | elif np.all(r.shape == (3, 3)): 55 | u, d, v = np.linalg.svd(r) 56 | r = np.dot(u, v) 57 | rx = r[2, 1] - r[1, 2] 58 | ry = r[0, 2] - r[2, 0] 59 | rz = r[1, 0] - r[0, 1] 60 | s = np.linalg.norm(np.array([rx, ry, rz])) * np.sqrt(0.25) 61 | c = np.clip((np.sum(np.diag(r)) - 1) * 0.5, -1, 1) 62 | theta = np.arccos(c) 63 | if s < 1e-5: 64 | if c > 0: 65 | r_out = np.zeros((3, 1)) 66 | else: 67 | rx, ry, rz = np.clip(np.sqrt((np.diag(r) + 1) * 0.5), 0, np.inf) 68 | if r[0, 1] < 0: 69 | ry = -ry 70 | if r[0, 2] < 0: 71 | rz = -rz 72 | if np.abs(rx) < np.abs(ry) and np.abs(rx) < np.abs(rz) and ((r[1, 2] > 0) != (ry * rz > 0)): 73 | rz = -rz 74 | 75 | r_out = np.array([[rx, ry, rz]]).T 76 | theta /= np.linalg.norm(r_out) 77 | r_out *= theta 78 | if calculate_jacobian: 79 | jac = np.zeros((9, 3)) 80 | if c > 0: 81 | jac[1, 2] = jac[5, 0] = jac[6, 1] = -0.5 82 | jac[2, 1] = jac[3, 2] = jac[7, 0] = 0.5 83 | else: 84 | vth = 1.0 / (2.0 * s) 85 | if calculate_jacobian: 86 | dtheta_dtr = -1. / s 87 | dvth_dtheta = -vth * c / s 88 | d1 = 0.5 * dvth_dtheta * dtheta_dtr 89 | d2 = 0.5 * dtheta_dtr 90 | dvardR = np.array([ 91 | [0, 0, 0, 0, 0, 1, 0, -1, 0], 92 | [0, 0, -1, 0, 0, 0, 1, 0, 0], 93 | [0, 1, 0, -1, 0, 0, 0, 0, 0], 94 | [d1, 0, 0, 0, d1, 0, 0, 0, d1], 95 | [d2, 0, 0, 0, d2, 0, 0, 0, d2]]) 96 | dvar2dvar = np.array([ 97 | [vth, 0, 0, rx, 0], 98 | [0, vth, 0, ry, 0], 99 | [0, 0, vth, rz, 0], 100 | [0, 0, 0, 0, 1]]) 101 | domegadvar2 = np.array([ 102 | [theta, 0, 0, rx * vth], 103 | [0, theta, 0, ry * vth], 104 | [0, 0, theta, rz * vth]]) 105 | jac = np.dot(np.dot(domegadvar2, dvar2dvar), dvardR) 106 | for ii in range(3): 107 | jac[ii] = jac[ii].reshape((3, 3)).T.flatten() 108 | jac = jac.T 109 | vth *= theta 110 | r_out = np.array([[rx, ry, rz]]).T * vth 111 | else: 112 | raise Exception("rodrigues: input matrix must be 1x3, 3x1 or 3x3.") 113 | if calculate_jacobian: 114 | return r_out, jac 115 | else: 116 | return r_out 117 | 118 | 119 | def rodrigues2rotmat(r): 120 | # R = np.zeros((3, 3)) 121 | r_skew = np.array([[0, -r[2], r[1]], [r[2], 0, -r[0]], [-r[1], r[0], 0]]) 122 | theta = np.linalg.norm(r) 123 | return np.identity(3) + np.sin(theta) * r_skew + (1 - np.cos(theta)) * r_skew.dot(r_skew) 124 | -------------------------------------------------------------------------------- /mesh/processing.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | """ 6 | Mesh processing backend 7 | ======================= 8 | 9 | """ 10 | 11 | import numpy as np 12 | 13 | 14 | def reset_normals(self, face_to_verts_sparse_matrix=None, reset_face_normals=False): 15 | self.vn = self.estimate_vertex_normals(face_to_verts_sparse_matrix=None) 16 | if reset_face_normals: 17 | self.fn = self.f.copy() 18 | return self 19 | 20 | 21 | def reset_face_normals(self): 22 | if not hasattr(self, 'vn'): 23 | self.reset_normals() 24 | self.fn = self.f 25 | return self 26 | 27 | 28 | def uniquified_mesh(self): 29 | """This function returns a copy of the mesh in which vertices are copied such that 30 | each vertex appears in only one face, and hence has only one texture""" 31 | from mesh import Mesh 32 | new_mesh = Mesh(v=self.v[self.f.flatten()], f=np.array(range(len(self.f.flatten()))).reshape(-1, 3)) 33 | 34 | if not hasattr(self, 'vn'): 35 | self.reset_normals() 36 | new_mesh.vn = self.vn[self.f.flatten()] 37 | 38 | if hasattr(self, 'vt'): 39 | new_mesh.vt = self.vt[self.ft.flatten()] 40 | new_mesh.ft = new_mesh.f.copy() 41 | return new_mesh 42 | 43 | 44 | def keep_vertices(self, keep_list): 45 | trans = dict((v, i) for i, v in enumerate(keep_list)) 46 | trans_f = np.array([trans[v] if v in trans else -1 for row in self.f for v in row], dtype=np.uint32).reshape(-1, 3) 47 | if hasattr(self, 'vn') and self.vn.shape[0] == self.vn.shape[0]: 48 | self.vn = self.vn.reshape(-1, 3)[keep_list] 49 | if hasattr(self, 'vc') and self.vc.shape[0] == self.v.shape[0]: 50 | self.vc = self.vc.reshape(-1, 3)[keep_list] 51 | if hasattr(self, 'landm_raw_xyz'): 52 | self.recompute_landmark_indices() 53 | 54 | self.v = self.v.reshape(-1, 3)[keep_list] 55 | self.f = trans_f[(trans_f != np.uint32(-1)).all(axis=1)] 56 | return self 57 | 58 | 59 | def point_cloud(self): 60 | from .mesh import Mesh 61 | return Mesh(v=self.v, f=[], vc=self.vc) if hasattr(self, 'vc') else Mesh(v=self.v, f=[]) 62 | 63 | 64 | def remove_faces(self, face_indices_to_remove): 65 | 66 | def arr_replace(arr_in, lookup_dict): 67 | arr_out = arr_in.copy() 68 | for k, v in lookup_dict.iteritems(): 69 | arr_out[arr_in == k] = v 70 | return arr_out 71 | 72 | f = np.delete(self.f, face_indices_to_remove, 0) 73 | v2keep = np.unique(f) 74 | self.v = self.v[v2keep] 75 | self.f = arr_replace(f, dict((v, i) for i, v in enumerate(v2keep))) 76 | 77 | if hasattr(self, 'fc'): 78 | self.fc = np.delete(self.fc, face_indices_to_remove, 0) 79 | if hasattr(self, 'vn') and self.vn.shape[0] == self.vn.shape[0]: 80 | self.vn = self.vn.reshape(-1, 3)[v2keep] 81 | if hasattr(self, 'vc') and self.vc.shape[0] == self.v.shape[0]: 82 | self.vc = self.vc.reshape(-1, 3)[v2keep] 83 | if hasattr(self, 'landm_raw_xyz'): 84 | self.recompute_landmark_indices() 85 | 86 | if hasattr(self, 'ft'): 87 | ft = np.delete(self.ft, face_indices_to_remove, 0) 88 | vt2keep = np.unique(ft) 89 | self.vt = self.vt[vt2keep] 90 | self.ft = arr_replace(ft, dict((v, i) for i, v in enumerate(vt2keep))) 91 | 92 | return self 93 | 94 | 95 | def flip_faces(self): 96 | self.f = self.f.copy() 97 | for i in range(len(self.f)): 98 | self.f[i] = self.f[i][::-1] 99 | if hasattr(self, 'ft'): 100 | for i in range(len(self.f)): 101 | self.ft[i] = self.ft[i][::-1] 102 | return self 103 | 104 | 105 | def scale_vertices(self, scale_factor): 106 | self.v *= scale_factor 107 | return self 108 | 109 | 110 | def rotate_vertices(self, rotation_matrix): 111 | import cv2 112 | rotation_matrix = np.matrix(cv2.Rodrigues(np.array(rotation_matrix))[0] if (np.array(rotation_matrix).shape != (3, 3)) else rotation_matrix) 113 | self.v = np.array(self.v * rotation_matrix.T) 114 | return self 115 | 116 | 117 | def translate_vertices(self, translation): 118 | self.v += translation 119 | return self 120 | 121 | 122 | def subdivide_triangles(self): 123 | new_faces = [] 124 | new_vertices = self.v.copy() 125 | for face in self.f: 126 | face_vertices = np.array([self.v[face[0], :], self.v[face[1], :], self.v[face[2], :]]) 127 | new_vertex = np.mean(face_vertices, axis=0) 128 | new_vertices = np.vstack([new_vertices, new_vertex]) 129 | new_vertex_index = len(new_vertices) - 1 130 | if len(new_faces): 131 | new_faces = np.vstack([new_faces, [face[0], face[1], new_vertex_index], [face[1], face[2], new_vertex_index], [face[2], face[0], new_vertex_index]]) 132 | else: 133 | new_faces = np.array([[face[0], face[1], new_vertex_index], [face[1], face[2], new_vertex_index], [face[2], face[0], new_vertex_index]]) 134 | self.v = new_vertices 135 | self.f = new_faces 136 | 137 | if hasattr(self, 'vt'): 138 | new_ft = [] 139 | new_texture_coordinates = self.vt.copy() 140 | for face_texture in self.ft: 141 | face_texture_coordinates = np.array([self.vt[face_texture[0], :], self.vt[face_texture[1], :], self.vt[face_texture[2], :]]) 142 | new_texture_coordinate = np.mean(face_texture_coordinates, axis=0) 143 | new_texture_coordinates = np.vstack([new_texture_coordinates, new_texture_coordinate]) 144 | new_texture_index = len(new_texture_coordinates) - 1 145 | if len(new_ft): 146 | new_ft = np.vstack([new_ft, [face_texture[0], face_texture[1], new_texture_index], [face_texture[1], face_texture[2], new_texture_index], [face_texture[2], face_texture[0], new_texture_index]]) 147 | else: 148 | new_ft = np.array([[face_texture[0], face_texture[1], new_texture_index], [face_texture[1], face_texture[2], new_texture_index], [face_texture[2], face_texture[0], new_texture_index]]) 149 | self.vt = new_texture_coordinates 150 | self.ft = new_ft 151 | return self 152 | 153 | 154 | def concatenate_mesh(self, mesh): 155 | if len(self.v) == 0: 156 | self.f = mesh.f.copy() 157 | self.v = mesh.v.copy() 158 | self.vc = mesh.vc.copy() if hasattr(mesh, 'vc') else None 159 | elif len(mesh.v): 160 | self.f = np.concatenate([self.f, mesh.f.copy() + len(self.v)]) 161 | self.v = np.concatenate([self.v, mesh.v]) 162 | self.vc = np.concatenate([self.vc, mesh.vc]) if (hasattr(mesh, 'vc') and hasattr(self, 'vc')) else None 163 | return self 164 | 165 | 166 | # new_ordering specifies the new index of each vertex. If new_ordering[i] = j, 167 | # vertex i should now be the j^th vertex. As such, each entry in new_ordering should be unique. 168 | def reorder_vertices(self, new_ordering, new_normal_ordering=None): 169 | if new_normal_ordering is None: 170 | new_normal_ordering = new_ordering 171 | inverse_ordering = np.zeros(len(new_ordering), dtype=int) 172 | for i, j in enumerate(new_ordering): 173 | inverse_ordering[j] = i 174 | inverse_normal_ordering = np.zeros(len(new_normal_ordering), dtype=int) 175 | for i, j in enumerate(new_normal_ordering): 176 | inverse_normal_ordering[j] = i 177 | self.v = self.v[inverse_ordering] 178 | if hasattr(self, 'vn'): 179 | self.vn = self.vn[inverse_normal_ordering] 180 | for i in range(len(self.f)): 181 | self.f[i] = np.array([new_ordering[vertex_index] for vertex_index in self.f[i]]) 182 | if hasattr(self, 'fn'): 183 | self.fn[i] = np.array([new_normal_ordering[normal_index] for normal_index in self.fn[i]]) 184 | -------------------------------------------------------------------------------- /mesh/arcball.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | """ 6 | Math utilities, vector, matrix types and ArcBall quaternion rotation class 7 | ========================================================================== 8 | 9 | """ 10 | 11 | import numpy as Numeric 12 | import copy 13 | from math import sqrt 14 | 15 | # //assuming IEEE-754(GLfloat), which i believe has max precision of 7 bits 16 | Epsilon = 1.0e-5 17 | 18 | 19 | class ArcBallT(object): 20 | def __init__(self, NewWidth, NewHeight): 21 | self.m_StVec = Vector3fT() 22 | self.m_EnVec = Vector3fT() 23 | self.m_AdjustWidth = 1.0 24 | self.m_AdjustHeight = 1.0 25 | self.setBounds(NewWidth, NewHeight) 26 | 27 | def __str__(self): 28 | str_rep = "" 29 | str_rep += "StVec = " + str(self.m_StVec) 30 | str_rep += "\nEnVec = " + str(self.m_EnVec) 31 | str_rep += "\n scale coords %f %f" % (self.m_AdjustWidth, self.m_AdjustHeight) 32 | return str_rep 33 | 34 | def setBounds(self, NewWidth, NewHeight): 35 | # //Set new bounds 36 | assert (NewWidth > 1.0 and NewHeight > 1.0), "Invalid width or height for bounds." 37 | # //Set adjustment factor for width/height 38 | self.m_AdjustWidth = 1.0 / ((NewWidth - 1.0) * 0.5) 39 | self.m_AdjustHeight = 1.0 / ((NewHeight - 1.0) * 0.5) 40 | 41 | def _mapToSphere(self, NewPt): 42 | # Given a new window coordinate, will modify NewVec in place 43 | X = 0 44 | Y = 1 45 | Z = 2 46 | 47 | NewVec = Vector3fT() 48 | # //Copy paramter into temp point 49 | TempPt = copy.copy(NewPt) 50 | # //Adjust point coords and scale down to range of [-1 ... 1] 51 | TempPt[X] = (NewPt[X] * self.m_AdjustWidth) - 1.0 52 | TempPt[Y] = 1.0 - (NewPt[Y] * self.m_AdjustHeight) 53 | # //Compute the square of the length of the vector to the point from the center 54 | length = Numeric.sum(Numeric.dot(TempPt, TempPt)) 55 | # //If the point is mapped outside of the sphere... (length > radius squared) 56 | if (length > 1.0): 57 | # //Compute a normalizing factor (radius / sqrt(length)) 58 | norm = 1.0 / sqrt(length) 59 | 60 | # //Return the "normalized" vector, a point on the sphere 61 | NewVec[X] = TempPt[X] * norm 62 | NewVec[Y] = TempPt[Y] * norm 63 | NewVec[Z] = 0.0 64 | else: # //Else it's on the inside 65 | # //Return a vector to a point mapped inside the sphere sqrt(radius squared - length) 66 | NewVec[X] = TempPt[X] 67 | NewVec[Y] = TempPt[Y] 68 | NewVec[Z] = sqrt(1.0 - length) 69 | 70 | return NewVec 71 | 72 | def click(self, NewPt): 73 | # Mouse down (Point2fT 74 | self.m_StVec = self._mapToSphere(NewPt) 75 | return 76 | 77 | def drag(self, NewPt): 78 | # Mouse drag, calculate rotation (Point2fT Quat4fT) 79 | """ drag (Point2fT mouse_coord) -> new_quaternion_rotation_vec 80 | """ 81 | X = 0 82 | Y = 1 83 | Z = 2 84 | W = 3 85 | 86 | self.m_EnVec = self._mapToSphere(NewPt) 87 | 88 | # //Compute the vector perpendicular to the begin and end vectors 89 | # Perp = Vector3fT () 90 | Perp = Vector3fCross(self.m_StVec, self.m_EnVec) 91 | 92 | NewRot = Quat4fT() 93 | # Compute the length of the perpendicular vector 94 | if (Vector3fLength(Perp) > Epsilon): 95 | # if its non-zero 96 | # We're ok, so return the perpendicular vector as the transform after all 97 | NewRot[X] = Perp[X] 98 | NewRot[Y] = Perp[Y] 99 | NewRot[Z] = Perp[Z] 100 | # //In the quaternion values, w is cosine (theta / 2), where theta is rotation angle 101 | NewRot[W] = Vector3fDot(self.m_StVec, self.m_EnVec) 102 | else: 103 | # if its zero 104 | # The begin and end vectors coincide, so return a quaternion of zero matrix (no rotation) 105 | NewRot[X] = NewRot[Y] = NewRot[Z] = NewRot[W] = 0.0 106 | 107 | return NewRot 108 | 109 | 110 | def Matrix4fT(): 111 | return Numeric.identity(4, 'f') 112 | 113 | 114 | def Matrix3fT(): 115 | return Numeric.identity(3, 'f') 116 | 117 | 118 | def Quat4fT(): 119 | return Numeric.zeros(4, 'f') 120 | 121 | 122 | def Vector3fT(): 123 | return Numeric.zeros(3, 'f') 124 | 125 | 126 | def Point2fT(x=0.0, y=0.0): 127 | pt = Numeric.zeros(2, 'f') 128 | pt[0] = x 129 | pt[1] = y 130 | return pt 131 | 132 | 133 | def Vector3fDot(u, v): 134 | # Dot product of two 3f vectors 135 | dotprod = Numeric.dot(u, v) 136 | return dotprod 137 | 138 | 139 | def Vector3fCross(u, v): 140 | # Cross product of two 3f vectors 141 | X = 0 142 | Y = 1 143 | Z = 2 144 | cross = Numeric.zeros(3, 'f') 145 | cross[X] = (u[Y] * v[Z]) - (u[Z] * v[Y]) 146 | cross[Y] = (u[Z] * v[X]) - (u[X] * v[Z]) 147 | cross[Z] = (u[X] * v[Y]) - (u[Y] * v[X]) 148 | return cross 149 | 150 | 151 | def Vector3fLength(u): 152 | mag_squared = Numeric.sum(Numeric.dot(u, u)) 153 | mag = sqrt(mag_squared) 154 | return mag 155 | 156 | 157 | def Matrix3fSetIdentity(): 158 | return Numeric.identity(3, 'f') 159 | 160 | 161 | def Matrix3fMulMatrix3f(matrix_a, matrix_b): 162 | return matrix_a.dot(matrix_b) 163 | 164 | 165 | def Matrix4fSVD(NewObj): 166 | X = 0 167 | Y = 1 168 | Z = 2 169 | s = sqrt(((NewObj[X][X] * NewObj[X][X]) + (NewObj[X][Y] * NewObj[X][Y]) + (NewObj[X][Z] * NewObj[X][Z]) + 170 | (NewObj[Y][X] * NewObj[Y][X]) + (NewObj[Y][Y] * NewObj[Y][Y]) + (NewObj[Y][Z] * NewObj[Y][Z]) + 171 | (NewObj[Z][X] * NewObj[Z][X]) + (NewObj[Z][Y] * NewObj[Z][Y]) + (NewObj[Z][Z] * NewObj[Z][Z])) / 3.0) 172 | return s 173 | 174 | 175 | def Matrix4fSetRotationScaleFromMatrix3f(NewObj, three_by_three_matrix): 176 | """Modifies NewObj in-place by replacing its upper 3x3 portion from the 177 | passed in 3x3 matrix. 178 | 179 | :param NewObj: a `Matrix4fT` 180 | """ 181 | NewObj[0:3, 0:3] = three_by_three_matrix 182 | return NewObj 183 | 184 | 185 | def Matrix4fSetRotationFromMatrix3f(NewObj, three_by_three_matrix): 186 | """ 187 | Sets the rotational component (upper 3x3) of this matrix to the matrix 188 | values in the T precision Matrix3d argument; the other elements of 189 | this matrix are unchanged; a singular value decomposition is performed 190 | on this object's upper 3x3 matrix to factor out the scale, then this 191 | object's upper 3x3 matrix components are replaced by the passed rotation 192 | components, and then the scale is reapplied to the rotational 193 | components. 194 | 195 | :param three_by_three_matrix: T precision 3x3 matrix 196 | """ 197 | scale = Matrix4fSVD(NewObj) 198 | 199 | NewObj = Matrix4fSetRotationScaleFromMatrix3f(NewObj, three_by_three_matrix) 200 | scaled_NewObj = NewObj * scale # Matrix4fMulRotationScale(NewObj, scale); 201 | return scaled_NewObj 202 | 203 | 204 | def Matrix3fSetRotationFromQuat4f(q1): 205 | """Converts the H quaternion q1 into a new equivalent 3x3 rotation matrix.""" 206 | X = 0 207 | Y = 1 208 | Z = 2 209 | W = 3 210 | 211 | NewObj = Matrix3fT() 212 | n = Numeric.sum(Numeric.dot(q1, q1)) 213 | s = 0.0 214 | if (n > 0.0): 215 | s = 2.0 / n 216 | 217 | xs = q1[X] * s 218 | ys = q1[Y] * s 219 | zs = q1[Z] * s 220 | 221 | wx = q1[W] * xs 222 | wy = q1[W] * ys 223 | wz = q1[W] * zs 224 | 225 | xx = q1[X] * xs 226 | xy = q1[X] * ys 227 | xz = q1[X] * zs 228 | 229 | yy = q1[Y] * ys 230 | yz = q1[Y] * zs 231 | zz = q1[Z] * zs 232 | 233 | # This math all comes about by way of algebra, complex math, and trig identities. 234 | # See Lengyel pages 88-92 235 | NewObj[X][X] = 1.0 - (yy + zz) 236 | NewObj[Y][X] = xy - wz 237 | NewObj[Z][X] = xz + wy 238 | 239 | NewObj[X][Y] = xy + wz 240 | NewObj[Y][Y] = 1.0 - (xx + zz) 241 | NewObj[Z][Y] = yz - wx 242 | 243 | NewObj[X][Z] = xz - wy 244 | NewObj[Y][Z] = yz + wx 245 | NewObj[Z][Z] = 1.0 - (xx + yy) 246 | 247 | return NewObj 248 | -------------------------------------------------------------------------------- /mesh/src/py_loadobj.cpp: -------------------------------------------------------------------------------- 1 | 2 | // needed to avoid the link to debug "_d.lib" libraries 3 | #include "hijack_python_headers.hpp" 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | using boost::uint32_t; 10 | using boost::array; 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | class LoadObjException: public std::exception { 20 | public: 21 | LoadObjException(std::string m="loadObjException!"):msg(m) {} 22 | ~LoadObjException() throw() {} 23 | const char* what() const throw() { return msg.c_str(); } 24 | private: 25 | std::string msg; 26 | }; 27 | 28 | static PyObject * 29 | loadobj(PyObject *self, PyObject *args, PyObject *keywds); 30 | 31 | static PyObject *LoadObjError; 32 | 33 | static PyMethodDef loadobj_methods[] = { 34 | {"loadobj", (PyCFunction) loadobj, 35 | METH_VARARGS | METH_KEYWORDS, "loadobj."}, 36 | {NULL, NULL, 0, NULL} /* Sentinel */ 37 | }; 38 | 39 | PyMODINIT_FUNC 40 | initloadobj(void) 41 | { 42 | PyObject *m = Py_InitModule("loadobj", loadobj_methods); 43 | if (m == NULL) 44 | return; 45 | 46 | import_array(); 47 | LoadObjError = PyErr_NewException(const_cast("loadobj.LoadObjError"), NULL, NULL); 48 | Py_INCREF(LoadObjError); 49 | PyModule_AddObject(m, "LoadObjError", LoadObjError); 50 | } 51 | 52 | static PyObject * 53 | loadobj(PyObject *self, PyObject *args, PyObject *keywds) 54 | { 55 | try { 56 | char py_objpatharr[256]; 57 | char *py_objpath = static_cast(py_objpatharr); 58 | 59 | // a copy of the literal string is done into a (non const) char 60 | char key1[] = "obj_path"; 61 | static char* kwlist[] = {key1, NULL}; 62 | 63 | if (!PyArg_ParseTupleAndKeywords(args, keywds, "s", kwlist, &py_objpath)) 64 | return NULL; 65 | 66 | std::ifstream obj_is(py_objpath,std::ios_base::binary | std::ios_base::in); 67 | if (!obj_is) { 68 | PyErr_SetString(PyExc_ValueError, "Could not load file"); 69 | return NULL; 70 | } 71 | 72 | std::vector v; 73 | std::vector vt; 74 | std::vector vn; 75 | std::vector f; 76 | std::vector ft; 77 | std::vector fn; 78 | v.reserve(30000); 79 | vt.reserve(30000); 80 | vn.reserve(30000); 81 | f.reserve(100000); 82 | ft.reserve(100000); 83 | fn.reserve(100000); 84 | std::map > segm; 85 | 86 | bool next_v_is_land = false; 87 | std::string land_name(""); 88 | std::map landm; 89 | 90 | std::string line; 91 | std::string curr_segm(""); 92 | std::string mtl_path(""); 93 | unsigned len_vt = 3; 94 | while (getline(obj_is, line)) { 95 | if (line.substr(0,6) == "mtllib") { 96 | mtl_path = line.substr(6); 97 | } 98 | 99 | if (line.substr(0,1) == "g"){ 100 | curr_segm = line.substr(2); 101 | if (segm.find(curr_segm) == segm.end()) 102 | segm[curr_segm] = std::vector(); 103 | } 104 | if (line.substr(0,2) == "vt"){ 105 | std::istringstream is(line.substr(2)); 106 | unsigned orig_vt_len = vt.size(); 107 | std::copy(std::istream_iterator(is), 108 | std::istream_iterator(), 109 | std::back_inserter(vt)); 110 | len_vt = vt.size() - orig_vt_len; 111 | } 112 | else if (line.substr(0,2) == "vn"){ 113 | std::istringstream is(line.substr(2)); 114 | std::copy(std::istream_iterator(is), 115 | std::istream_iterator(), 116 | std::back_inserter(vn)); 117 | } 118 | else if (line.substr(0,1) == "f"){ 119 | std::istringstream is(line.substr(1)); 120 | std::istream_iterator it(is); 121 | const std::string delims("/"); 122 | std::vector localf, localfn, localft; 123 | for(;it!=std::istream_iterator();++it){ 124 | // valid: v v/vt v/vt/vn v//vn 125 | unsigned counter=0; 126 | std::istringstream unparsed_face(*it); 127 | std::string el; 128 | while(std::getline(unparsed_face, el, '/')) { 129 | if (el.size() > 0) { // if the element has contents 130 | if (counter == 0) 131 | localf.push_back(atoi(el.c_str())); 132 | if (counter == 1) 133 | localft.push_back(atoi(el.c_str())); 134 | if (counter == 2) 135 | localfn.push_back(atoi(el.c_str())); 136 | } 137 | counter++; 138 | } 139 | } 140 | if (localf.size() > 0) { 141 | for (int i=1; i<(localf.size()-1); ++i) { 142 | f.push_back(localf[0] - 1); 143 | f.push_back(localf[i] - 1); 144 | f.push_back(localf[i+1] - 1); 145 | if (curr_segm != "") 146 | segm.find(curr_segm)->second.push_back((f.size()/3)-1); 147 | } 148 | } 149 | if (localft.size() > 0) { 150 | for (int i=1; i<(localft.size()-1); ++i){ 151 | ft.push_back(localft[0] - 1); 152 | ft.push_back(localft[i] - 1); 153 | ft.push_back(localft[i+1] - 1); 154 | } 155 | } 156 | if (localfn.size() > 0) { 157 | for (int i=1; i<(localfn.size()-1); ++i){ 158 | fn.push_back(localfn[0] - 1); 159 | fn.push_back(localfn[i] - 1); 160 | fn.push_back(localfn[i+1] - 1); 161 | } 162 | } 163 | } 164 | else if (line.substr(0,1) == "v"){ 165 | std::istringstream is(line.substr(1)); 166 | std::copy(std::istream_iterator(is), 167 | std::istream_iterator(), 168 | std::back_inserter(v)); 169 | if (next_v_is_land) { 170 | next_v_is_land = false; 171 | landm[land_name.c_str()] = v.size()/3-1; 172 | } 173 | } 174 | else if (line.substr(0,9) == "#landmark"){ 175 | next_v_is_land = true; 176 | land_name = line.substr(10); 177 | } 178 | } 179 | 180 | unsigned n_v = v.size()/3; 181 | unsigned n_vt = vt.size()/len_vt; 182 | unsigned n_vn = vn.size()/3; 183 | unsigned n_f = f.size()/3; 184 | unsigned n_ft = ft.size()/3; 185 | unsigned n_fn = fn.size()/3; 186 | npy_intp v_dims[] = {n_v,3}; 187 | npy_intp vn_dims[] = {n_vn,3}; 188 | npy_intp vt_dims[] = {n_vt,len_vt}; 189 | npy_intp f_dims[] = {n_f,3}; 190 | npy_intp ft_dims[] = {n_ft,3}; 191 | npy_intp fn_dims[] = {n_fn,3}; 192 | /* 193 | // XXX Memory from vectors get deallocated! 194 | PyObject *py_v = PyArray_SimpleNewFromData(2, v_dims, NPY_DOUBLE, v.data()); 195 | PyObject *py_vt = PyArray_SimpleNewFromData(2, vt_dims, NPY_DOUBLE, vt.data()); 196 | PyObject *py_vn = PyArray_SimpleNewFromData(2, vn_dims, NPY_DOUBLE, vn.data()); 197 | PyObject *py_f = PyArray_SimpleNewFromData(2, f_dims, NPY_UINT32, f.data()); 198 | PyObject *py_ft = PyArray_SimpleNewFromData(2, ft_dims, NPY_UINT32, ft.data()); 199 | PyObject *py_fn = PyArray_SimpleNewFromData(2, fn_dims, NPY_UINT32, fn.data()); 200 | */ 201 | // The following copy would be faster in C++11 with move semantics 202 | PyObject *py_v = PyArray_SimpleNew(2, v_dims, NPY_DOUBLE); 203 | std::copy(v.begin(), v.end(), reinterpret_cast(PyArray_DATA(py_v))); 204 | PyObject *py_vt = PyArray_SimpleNew(2, vt_dims, NPY_DOUBLE); 205 | std::copy(vt.begin(), vt.end(), reinterpret_cast(PyArray_DATA(py_vt))); 206 | PyObject *py_vn = PyArray_SimpleNew(2, vn_dims, NPY_DOUBLE); 207 | std::copy(vn.begin(), vn.end(), reinterpret_cast(PyArray_DATA(py_vn))); 208 | PyObject *py_f = PyArray_SimpleNew(2, f_dims, NPY_UINT32); 209 | std::copy(f.begin(), f.end(), reinterpret_cast(PyArray_DATA(py_f))); 210 | PyObject *py_ft = PyArray_SimpleNew(2, ft_dims, NPY_UINT32); 211 | std::copy(ft.begin(), ft.end(), reinterpret_cast(PyArray_DATA(py_ft))); 212 | PyObject *py_fn = PyArray_SimpleNew(2, fn_dims, NPY_UINT32); 213 | std::copy(fn.begin(), fn.end(), reinterpret_cast(PyArray_DATA(py_fn))); 214 | 215 | PyObject *py_landm = PyDict_New(); 216 | for (std::map::iterator it=landm.begin(); it!=landm.end(); ++it) 217 | PyDict_SetItemString(py_landm, it->first.c_str(), Py_BuildValue("l", it->second)); 218 | 219 | PyObject *py_segm = PyDict_New(); 220 | for (std::map >::iterator it=segm.begin(); it!=segm.end(); ++it) { 221 | unsigned n = it->second.size(); 222 | npy_intp dims[] = {n}; 223 | PyObject *temp = PyArray_SimpleNew(1, dims, NPY_UINT32); 224 | std::copy(it->second.begin(), it->second.end(), reinterpret_cast(PyArray_DATA(temp))); 225 | PyDict_SetItemString(py_segm, it->first.c_str(), Py_BuildValue("N", temp)); 226 | } 227 | 228 | return Py_BuildValue("NNNNNNsNN",py_v,py_vt,py_vn,py_f,py_ft,py_fn,mtl_path.c_str(),py_landm,py_segm); 229 | } catch (LoadObjException& e) { 230 | PyErr_SetString(LoadObjError, e.what()); 231 | return NULL; 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /mesh/src/plyutils.c: -------------------------------------------------------------------------------- 1 | #include "plyutils.h" 2 | 3 | static PyMethodDef PlyutilsMethods[] = { 4 | {"read", plyutils_read, METH_VARARGS, "Read a PLY file."}, 5 | {"write", plyutils_write, METH_VARARGS, "Write a PLY file."}, 6 | {NULL, NULL, 0, NULL} 7 | }; 8 | 9 | static PyObject *PlyutilsError; 10 | 11 | PyMODINIT_FUNC initplyutils(void) { 12 | PyObject *m; 13 | m = Py_InitModule("plyutils", PlyutilsMethods); 14 | if (m == NULL) 15 | return; 16 | 17 | PlyutilsError = PyErr_NewException("plyutils.error", NULL, NULL); 18 | Py_INCREF(PlyutilsError); 19 | PyModule_AddObject(m, "error", PlyutilsError); 20 | } 21 | 22 | int has_color(p_ply ply) { 23 | p_ply_element el = NULL; 24 | p_ply_property p = NULL; 25 | const char *name; 26 | while ((el = ply_get_next_element(ply, el))) { 27 | if (ply_get_element_info(el, &name, NULL) && !strcmp(name, "vertex")) { 28 | while ((p = ply_get_next_property(el, p))) { 29 | if (ply_get_property_info(p, &name, NULL, NULL, NULL) 30 | && (!strcmp(name, "red") || !strcmp(name, "green") || !strcmp(name, "blue"))) 31 | return 1; 32 | } 33 | } 34 | } 35 | return 0; 36 | } 37 | 38 | int has_normals(p_ply ply) { 39 | p_ply_element el = NULL; 40 | p_ply_property p = NULL; 41 | const char *name; 42 | while ((el = ply_get_next_element(ply, el))) { 43 | if (ply_get_element_info(el, &name, NULL) && !strcmp(name, "vertex")) { 44 | while ((p = ply_get_next_property(el, p))) { 45 | if (ply_get_property_info(p, &name, NULL, NULL, NULL) 46 | && (!strcmp(name, "nx") || !strcmp(name, "ny") || !strcmp(name, "nz"))) 47 | return 1; 48 | } 49 | } 50 | } 51 | return 0; 52 | } 53 | 54 | static PyObject * plyutils_read(PyObject *self, PyObject *args) 55 | { 56 | const char *filename; 57 | p_ply ply = NULL; 58 | int use_color, use_normals; 59 | long n_verts, n_faces; 60 | PyObject *x, *y, *z, *r, *g, *b; 61 | PyObject *nx, *ny, *nz; 62 | PyObject *tri; 63 | 64 | if (!PyArg_ParseTuple(args, "s", &filename)) { 65 | PyErr_SetString(PlyutilsError, "plyutils.read doesn't know what to do without a filename."); 66 | return NULL; 67 | } 68 | ply = ply_open(filename, error_cb); 69 | if (!ply) { 70 | PyErr_SetString(PlyutilsError, "Failed to open PLY file."); 71 | return NULL; 72 | } 73 | if (!ply_read_header(ply)) { 74 | PyErr_SetString(PlyutilsError, "plyread_mex: Bad raw header."); 75 | return NULL; 76 | } 77 | 78 | use_color = has_color(ply); 79 | use_normals = has_normals(ply); 80 | 81 | n_verts = ply_set_read_cb(ply, "vertex", "x", vertex_cb, (void*)&x, 0); 82 | ply_set_read_cb(ply, "vertex", "y", vertex_cb, (void*)&y, 0); 83 | ply_set_read_cb(ply, "vertex", "z", vertex_cb, (void*)&z, 0); 84 | if (use_color) { 85 | ply_set_read_cb(ply, "vertex", "red", vertex_cb, (void*)&r, 0); 86 | ply_set_read_cb(ply, "vertex", "green", vertex_cb, (void*)&g, 0); 87 | ply_set_read_cb(ply, "vertex", "blue", vertex_cb, (void*)&b, 0); 88 | } 89 | if (use_normals) { 90 | ply_set_read_cb(ply, "vertex", "nx", vertex_cb, (void*)&nx, 0); 91 | ply_set_read_cb(ply, "vertex", "ny", vertex_cb, (void*)&ny, 0); 92 | ply_set_read_cb(ply, "vertex", "nz", vertex_cb, (void*)&nz, 0); 93 | } 94 | n_faces = ply_set_read_cb(ply, "face", "vertex_indices", face_cb, (void*)&tri, 0); 95 | if (n_faces==0) 96 | n_faces = ply_set_read_cb(ply, "face", "vertex_index", face_cb, (void*)&tri, 0); 97 | 98 | x = PyList_New(n_verts); y = PyList_New(n_verts); z = PyList_New(n_verts); 99 | if (use_color) { 100 | r = PyList_New(n_verts); g = PyList_New(n_verts); b = PyList_New(n_verts); 101 | } 102 | if (use_normals) { 103 | nx = PyList_New(n_verts); ny = PyList_New(n_verts); nz = PyList_New(n_verts); 104 | } 105 | tri = Py_BuildValue("[N,N,N]", PyList_New(n_faces), PyList_New(n_faces), PyList_New(n_faces)); 106 | 107 | if (!ply_read(ply)) { 108 | PyErr_SetString(PlyutilsError, "Read failed."); 109 | return NULL; 110 | } 111 | ply_close(ply); 112 | 113 | if (use_color && !use_normals) 114 | return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N]}", "pts", x, y, z, "tri", tri, "color", r, g, b); 115 | if (!use_color && use_normals) 116 | return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N]}", "pts", x, y, z, "tri", tri, "normals", nx, ny, nz); 117 | if (use_color && use_normals) 118 | return Py_BuildValue("{s:[N,N,N],s:N,s:[N,N,N],s:[N,N,N]}", "pts", x, y, z, "tri", tri, "color", r, g, b, "normals", nx, ny, nz); 119 | else 120 | return Py_BuildValue("{s:[N,N,N],s:N}", "pts", x, y, z, "tri", tri); 121 | } 122 | 123 | static PyObject * plyutils_write(PyObject *self, PyObject *args) 124 | { 125 | const char *filename; 126 | PyObject *pts, *tri, *color, *ascii, *little_endian, *comments; 127 | PyObject *normals = NULL; 128 | int use_color, use_normals, res; 129 | p_ply ply = NULL; 130 | PyObject *row; 131 | long ii; 132 | const char *comment; 133 | 134 | if (!PyArg_ParseTuple(args, "OOOsO|O|OO", &pts, &tri, &color, &filename, &ascii, &little_endian, &comments, &normals)) 135 | return NULL; 136 | 137 | use_color = (PyList_Size(pts) == PyList_Size(color)); 138 | use_normals = 0; 139 | if (normals!=NULL) 140 | use_normals = (PyList_Size(pts) == PyList_Size(normals)); 141 | 142 | if (ascii == Py_True) 143 | ply = ply_create(filename, PLY_ASCII, error_cb); 144 | else { 145 | if (little_endian == Py_True) 146 | ply = ply_create(filename, PLY_LITTLE_ENDIAN, error_cb); 147 | else 148 | ply = ply_create(filename, PLY_BIG_ENDIAN, error_cb); 149 | } 150 | 151 | if (!ply) { 152 | PyErr_SetString(PlyutilsError, "Failed to create PLY file."); 153 | return NULL; 154 | } 155 | 156 | res = 1; 157 | 158 | for (ii = 0; ii < PyList_Size(comments); ++ii) { 159 | comment = PyString_AsString(PyObject_Str(PyList_GetItem(comments, ii))); 160 | res &= ply_add_comment(ply, comment); 161 | } 162 | 163 | res &= ply_add_element(ply, "vertex", PyList_Size(pts)); 164 | res &= ply_add_scalar_property(ply, "x", PLY_FLOAT); 165 | res &= ply_add_scalar_property(ply, "y", PLY_FLOAT); 166 | res &= ply_add_scalar_property(ply, "z", PLY_FLOAT); 167 | 168 | if(use_normals){ 169 | res &= ply_add_scalar_property(ply, "nx", PLY_FLOAT); 170 | res &= ply_add_scalar_property(ply, "ny", PLY_FLOAT); 171 | res &= ply_add_scalar_property(ply, "nz", PLY_FLOAT); 172 | } 173 | 174 | if(use_color){ 175 | res &= ply_add_scalar_property(ply, "red", PLY_UCHAR); 176 | res &= ply_add_scalar_property(ply, "green", PLY_UCHAR); 177 | res &= ply_add_scalar_property(ply, "blue", PLY_UCHAR); 178 | } 179 | 180 | res &= ply_add_element(ply, "face", PyList_Size(tri)); 181 | res &= ply_add_list_property(ply, "vertex_indices", PLY_UCHAR, PLY_INT); 182 | 183 | res &= ply_write_header(ply); 184 | if (!res) { 185 | PyErr_SetString(PlyutilsError, "Failed to write header."); 186 | return NULL; 187 | } 188 | 189 | 190 | 191 | for (ii = 0; ii < PyList_Size(pts); ++ii) { 192 | row = PyList_GetItem(pts, ii); 193 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); 194 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); 195 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); 196 | if(use_normals){ 197 | row = PyList_GetItem(normals, ii); 198 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); 199 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); 200 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); 201 | } 202 | if(use_color){ 203 | row = PyList_GetItem(color, ii); 204 | res &= ply_write(ply, (unsigned char)PyInt_AsUnsignedLongMask(PyList_GetItem(row, 0))); 205 | res &= ply_write(ply, (unsigned char)PyInt_AsUnsignedLongMask(PyList_GetItem(row, 1))); 206 | res &= ply_write(ply, (unsigned char)PyInt_AsUnsignedLongMask(PyList_GetItem(row, 2))); 207 | } 208 | } 209 | if (!res) { 210 | PyErr_SetString(PlyutilsError, "Error writing points."); 211 | return NULL; 212 | } 213 | 214 | for (ii = 0; ii < PyList_Size(tri); ++ii) { 215 | row = PyList_GetItem(tri, ii); 216 | res &= ply_write(ply, 3); 217 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 0))); 218 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 1))); 219 | res &= ply_write(ply, PyFloat_AsDouble(PyList_GetItem(row, 2))); 220 | } 221 | if (!res) { 222 | PyErr_SetString(PlyutilsError, "Error writing faces."); 223 | return NULL; 224 | } 225 | 226 | ply_close(ply); 227 | Py_INCREF(Py_None); 228 | return Py_None; 229 | } 230 | 231 | int vertex_cb(p_ply_argument argument) { 232 | void *p; PyObject* list; 233 | long ii; 234 | PyObject *val; 235 | 236 | ply_get_argument_element(argument, NULL, &ii); 237 | ply_get_argument_user_data(argument, &p, NULL); 238 | list = (PyObject*)(*(void**)p); 239 | 240 | val = PyFloat_FromDouble(ply_get_argument_value(argument)); 241 | // PyList_Append(list, val); 242 | // Py_DECREF(val); 243 | PyList_SET_ITEM(list, ii, val); 244 | 245 | return 1; 246 | } 247 | 248 | int face_cb(p_ply_argument argument) { 249 | void *p; PyObject* tri; 250 | long ii; 251 | long length, value_index; 252 | PyObject *val; 253 | 254 | ply_get_argument_element(argument, NULL, &ii); 255 | ply_get_argument_user_data(argument, &p, NULL); 256 | tri = (PyObject*)(*(void**)p); 257 | 258 | ply_get_argument_property(argument, NULL, &length, &value_index); 259 | if (value_index >= 0 && value_index < PyList_Size(tri)) { 260 | PyObject* slice = PyList_GetItem(tri, value_index); 261 | 262 | val = PyFloat_FromDouble(ply_get_argument_value(argument)); 263 | // PyList_Append(slice, val); 264 | // Py_DECREF(val); 265 | PyList_SET_ITEM(slice, ii, val); 266 | } 267 | 268 | return 1; 269 | } 270 | 271 | void error_cb(const char *message) { 272 | PyErr_SetString(PlyutilsError, message); 273 | } 274 | -------------------------------------------------------------------------------- /mesh/src/rply.h: -------------------------------------------------------------------------------- 1 | #ifndef PLY_H 2 | #define PLY_H 3 | /* ---------------------------------------------------------------------- 4 | * RPly library, read/write PLY files 5 | * Diego Nehab, Princeton University 6 | * http://www.cs.princeton.edu/~diego/professional/rply 7 | * 8 | * This library is distributed under the MIT License. See notice 9 | * at the end of this file. 10 | * ---------------------------------------------------------------------- */ 11 | 12 | #ifdef __cplusplus 13 | extern "C" { 14 | #endif 15 | 16 | #define RPLY_VERSION "RPly 1.01" 17 | #define RPLY_COPYRIGHT "Copyright (C) 2003-2005 Diego Nehab" 18 | #define RPLY_AUTHORS "Diego Nehab" 19 | 20 | /* ---------------------------------------------------------------------- 21 | * Types 22 | * ---------------------------------------------------------------------- */ 23 | /* structures are opaque */ 24 | typedef struct t_ply_ *p_ply; 25 | typedef struct t_ply_element_ *p_ply_element; 26 | typedef struct t_ply_property_ *p_ply_property; 27 | typedef struct t_ply_argument_ *p_ply_argument; 28 | 29 | /* ply format mode type */ 30 | typedef enum e_ply_storage_mode_ { 31 | PLY_BIG_ENDIAN, 32 | PLY_LITTLE_ENDIAN, 33 | PLY_ASCII, 34 | PLY_DEFAULT /* has to be the last in enum */ 35 | } e_ply_storage_mode; /* order matches ply_storage_mode_list */ 36 | 37 | /* ply data type */ 38 | typedef enum e_ply_type { 39 | PLY_INT8, PLY_UINT8, PLY_INT16, PLY_UINT16, 40 | PLY_INT32, PLY_UIN32, PLY_FLOAT32, PLY_FLOAT64, 41 | PLY_CHAR, PLY_UCHAR, PLY_SHORT, PLY_USHORT, 42 | PLY_INT, PLY_UINT, PLY_FLOAT, PLY_DOUBLE, 43 | PLY_LIST /* has to be the last in enum */ 44 | } e_ply_type; /* order matches ply_type_list */ 45 | 46 | /* ---------------------------------------------------------------------- 47 | * Property reading callback prototype 48 | * 49 | * message: error message 50 | * ---------------------------------------------------------------------- */ 51 | typedef void (*p_ply_error_cb)(const char *message); 52 | 53 | /* ---------------------------------------------------------------------- 54 | * Opens a ply file for reading (fails if file is not a ply file) 55 | * 56 | * error_cb: error callback function 57 | * name: file name 58 | * 59 | * Returns 1 if successful, 0 otherwise 60 | * ---------------------------------------------------------------------- */ 61 | p_ply ply_open(const char *name, p_ply_error_cb error_cb); 62 | 63 | /* ---------------------------------------------------------------------- 64 | * Reads and parses the header of a ply file returned by ply_open 65 | * 66 | * ply: handle returned by ply_open 67 | * 68 | * Returns 1 if successfull, 0 otherwise 69 | * ---------------------------------------------------------------------- */ 70 | int ply_read_header(p_ply ply); 71 | 72 | /* ---------------------------------------------------------------------- 73 | * Property reading callback prototype 74 | * 75 | * argument: parameters for property being processed when callback is called 76 | * 77 | * Returns 1 if should continue processing file, 0 if should abort. 78 | * ---------------------------------------------------------------------- */ 79 | typedef int (*p_ply_read_cb)(p_ply_argument argument); 80 | 81 | /* ---------------------------------------------------------------------- 82 | * Sets up callbacks for property reading after header was parsed 83 | * 84 | * ply: handle returned by ply_open 85 | * element_name: element where property is 86 | * property_name: property to associate element with 87 | * read_cb: function to be called for each property value 88 | * pdata/idata: user data that will be passed to callback 89 | * 90 | * Returns 0 if no element or no property in element, returns the 91 | * number of element instances otherwise. 92 | * ---------------------------------------------------------------------- */ 93 | long ply_set_read_cb(p_ply ply, const char *element_name, 94 | const char *property_name, p_ply_read_cb read_cb, 95 | void *pdata, long idata); 96 | 97 | /* ---------------------------------------------------------------------- 98 | * Returns information about the element originating a callback 99 | * 100 | * argument: handle to argument 101 | * element: receives a the element handle (if non-null) 102 | * instance_index: receives the index of the current element instance 103 | * (if non-null) 104 | * 105 | * Returns 1 if successfull, 0 otherwise 106 | * ---------------------------------------------------------------------- */ 107 | int ply_get_argument_element(p_ply_argument argument, 108 | p_ply_element *element, long *instance_index); 109 | 110 | /* ---------------------------------------------------------------------- 111 | * Returns information about the property originating a callback 112 | * 113 | * argument: handle to argument 114 | * property: receives the property handle (if non-null) 115 | * length: receives the number of values in this property (if non-null) 116 | * value_index: receives the index of current property value (if non-null) 117 | * 118 | * Returns 1 if successfull, 0 otherwise 119 | * ---------------------------------------------------------------------- */ 120 | int ply_get_argument_property(p_ply_argument argument, 121 | p_ply_property *property, long *length, long *value_index); 122 | 123 | /* ---------------------------------------------------------------------- 124 | * Returns user data associated with callback 125 | * 126 | * pdata: receives a copy of user custom data pointer (if non-null) 127 | * idata: receives a copy of user custom data integer (if non-null) 128 | * 129 | * Returns 1 if successfull, 0 otherwise 130 | * ---------------------------------------------------------------------- */ 131 | int ply_get_argument_user_data(p_ply_argument argument, void **pdata, 132 | long *idata); 133 | 134 | /* ---------------------------------------------------------------------- 135 | * Returns the value associated with a callback 136 | * 137 | * argument: handle to argument 138 | * 139 | * Returns the current data item 140 | * ---------------------------------------------------------------------- */ 141 | double ply_get_argument_value(p_ply_argument argument); 142 | 143 | /* ---------------------------------------------------------------------- 144 | * Reads all elements and properties calling the callbacks defined with 145 | * calls to ply_set_read_cb 146 | * 147 | * ply: handle returned by ply_open 148 | * 149 | * Returns 1 if successfull, 0 otherwise 150 | * ---------------------------------------------------------------------- */ 151 | int ply_read(p_ply ply); 152 | 153 | /* ---------------------------------------------------------------------- 154 | * Iterates over all elements by returning the next element. 155 | * Call with NULL to return handle to first element. 156 | * 157 | * ply: handle returned by ply_open 158 | * last: handle of last element returned (NULL for first element) 159 | * 160 | * Returns element if successfull or NULL if no more elements 161 | * ---------------------------------------------------------------------- */ 162 | p_ply_element ply_get_next_element(p_ply ply, p_ply_element last); 163 | 164 | /* ---------------------------------------------------------------------- 165 | * Iterates over all comments by returning the next comment. 166 | * Call with NULL to return pointer to first comment. 167 | * 168 | * ply: handle returned by ply_open 169 | * last: pointer to last comment returned (NULL for first comment) 170 | * 171 | * Returns comment if successfull or NULL if no more comments 172 | * ---------------------------------------------------------------------- */ 173 | const char *ply_get_next_comment(p_ply ply, const char *last); 174 | 175 | /* ---------------------------------------------------------------------- 176 | * Iterates over all obj_infos by returning the next obj_info. 177 | * Call with NULL to return pointer to first obj_info. 178 | * 179 | * ply: handle returned by ply_open 180 | * last: pointer to last obj_info returned (NULL for first obj_info) 181 | * 182 | * Returns obj_info if successfull or NULL if no more obj_infos 183 | * ---------------------------------------------------------------------- */ 184 | const char *ply_get_next_obj_info(p_ply ply, const char *last); 185 | 186 | /* ---------------------------------------------------------------------- 187 | * Returns information about an element 188 | * 189 | * element: element of interest 190 | * name: receives a pointer to internal copy of element name (if non-null) 191 | * ninstances: receives the number of instances of this element (if non-null) 192 | * 193 | * Returns 1 if successfull or 0 otherwise 194 | * ---------------------------------------------------------------------- */ 195 | int ply_get_element_info(p_ply_element element, const char** name, 196 | long *ninstances); 197 | 198 | /* ---------------------------------------------------------------------- 199 | * Iterates over all properties by returning the next property. 200 | * Call with NULL to return handle to first property. 201 | * 202 | * element: handle of element with the properties of interest 203 | * last: handle of last property returned (NULL for first property) 204 | * 205 | * Returns element if successfull or NULL if no more properties 206 | * ---------------------------------------------------------------------- */ 207 | p_ply_property ply_get_next_property(p_ply_element element, 208 | p_ply_property last); 209 | 210 | /* ---------------------------------------------------------------------- 211 | * Returns information about a property 212 | * 213 | * property: handle to property of interest 214 | * name: receives a pointer to internal copy of property name (if non-null) 215 | * type: receives the property type (if non-null) 216 | * length_type: for list properties, receives the scalar type of 217 | * the length field (if non-null) 218 | * value_type: for list properties, receives the scalar type of the value 219 | * fields (if non-null) 220 | * 221 | * Returns 1 if successfull or 0 otherwise 222 | * ---------------------------------------------------------------------- */ 223 | int ply_get_property_info(p_ply_property property, const char** name, 224 | e_ply_type *type, e_ply_type *length_type, e_ply_type *value_type); 225 | 226 | /* ---------------------------------------------------------------------- 227 | * Creates new ply file 228 | * 229 | * name: file name 230 | * storage_mode: file format mode 231 | * 232 | * Returns handle to ply file if successfull, NULL otherwise 233 | * ---------------------------------------------------------------------- */ 234 | p_ply ply_create(const char *name, e_ply_storage_mode storage_mode, 235 | p_ply_error_cb error_cb); 236 | 237 | /* ---------------------------------------------------------------------- 238 | * Adds a new element to the ply file created by ply_create 239 | * 240 | * ply: handle returned by ply_create 241 | * name: name of new element 242 | * ninstances: number of element of this time in file 243 | * 244 | * Returns 1 if successfull, 0 otherwise 245 | * ---------------------------------------------------------------------- */ 246 | int ply_add_element(p_ply ply, const char *name, long ninstances); 247 | 248 | /* ---------------------------------------------------------------------- 249 | * Adds a new property to the last element added by ply_add_element 250 | * 251 | * ply: handle returned by ply_create 252 | * name: name of new property 253 | * type: property type 254 | * length_type: scalar type of length field of a list property 255 | * value_type: scalar type of value fields of a list property 256 | * 257 | * Returns 1 if successfull, 0 otherwise 258 | * ---------------------------------------------------------------------- */ 259 | int ply_add_property(p_ply ply, const char *name, e_ply_type type, 260 | e_ply_type length_type, e_ply_type value_type); 261 | 262 | /* ---------------------------------------------------------------------- 263 | * Adds a new list property to the last element added by ply_add_element 264 | * 265 | * ply: handle returned by ply_create 266 | * name: name of new property 267 | * length_type: scalar type of length field of a list property 268 | * value_type: scalar type of value fields of a list property 269 | * 270 | * Returns 1 if successfull, 0 otherwise 271 | * ---------------------------------------------------------------------- */ 272 | int ply_add_list_property(p_ply ply, const char *name, 273 | e_ply_type length_type, e_ply_type value_type); 274 | 275 | /* ---------------------------------------------------------------------- 276 | * Adds a new property to the last element added by ply_add_element 277 | * 278 | * ply: handle returned by ply_create 279 | * name: name of new property 280 | * type: property type 281 | * 282 | * Returns 1 if successfull, 0 otherwise 283 | * ---------------------------------------------------------------------- */ 284 | int ply_add_scalar_property(p_ply ply, const char *name, e_ply_type type); 285 | 286 | /* ---------------------------------------------------------------------- 287 | * Adds a new comment item 288 | * 289 | * ply: handle returned by ply_create 290 | * comment: pointer to string with comment text 291 | * 292 | * Returns 1 if successfull, 0 otherwise 293 | * ---------------------------------------------------------------------- */ 294 | int ply_add_comment(p_ply ply, const char *comment); 295 | 296 | /* ---------------------------------------------------------------------- 297 | * Adds a new obj_info item 298 | * 299 | * ply: handle returned by ply_create 300 | * comment: pointer to string with obj_info data 301 | * 302 | * Returns 1 if successfull, 0 otherwise 303 | * ---------------------------------------------------------------------- */ 304 | int ply_add_obj_info(p_ply ply, const char *obj_info); 305 | 306 | /* ---------------------------------------------------------------------- 307 | * Writes the ply file header after all element and properties have been 308 | * defined by calls to ply_add_element and ply_add_property 309 | * 310 | * ply: handle returned by ply_create 311 | * 312 | * Returns 1 if successfull, 0 otherwise 313 | * ---------------------------------------------------------------------- */ 314 | int ply_write_header(p_ply ply); 315 | 316 | /* ---------------------------------------------------------------------- 317 | * Writes one property value, in the order they should be written to the 318 | * file. For each element type, write all elements of that type in order. 319 | * For each element, write all its properties in order. For scalar 320 | * properties, just write the value. For list properties, write the length 321 | * and then each of the values. 322 | * 323 | * ply: handle returned by ply_create 324 | * 325 | * Returns 1 if successfull, 0 otherwise 326 | * ---------------------------------------------------------------------- */ 327 | int ply_write(p_ply ply, double value); 328 | 329 | /* ---------------------------------------------------------------------- 330 | * Closes a ply file handle. Releases all memory used by handle 331 | * 332 | * ply: handle to be closed. 333 | * 334 | * Returns 1 if successfull, 0 otherwise 335 | * ---------------------------------------------------------------------- */ 336 | int ply_close(p_ply ply); 337 | 338 | #ifdef __cplusplus 339 | } 340 | #endif 341 | 342 | #endif /* RPLY_H */ 343 | 344 | /* ---------------------------------------------------------------------- 345 | * Copyright (C) 2003-2005 Diego Nehab. All rights reserved. 346 | * 347 | * Permission is hereby granted, free of charge, to any person obtaining 348 | * a copy of this software and associated documentation files (the 349 | * "Software"), to deal in the Software without restriction, including 350 | * without limitation the rights to use, copy, modify, merge, publish, 351 | * distribute, sublicense, and/or sell copies of the Software, and to 352 | * permit persons to whom the Software is furnished to do so, subject to 353 | * the following conditions: 354 | * 355 | * The above copyright notice and this permission notice shall be 356 | * included in all copies or substantial portions of the Software. 357 | * 358 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 359 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 360 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 361 | * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 362 | * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 363 | * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 364 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 365 | * ---------------------------------------------------------------------- */ 366 | -------------------------------------------------------------------------------- /mesh/serialization/serialization.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2013-02-20. 4 | 5 | 6 | import re 7 | import os 8 | import sys 9 | import numpy as np 10 | 11 | from ..errors import SerializationError 12 | 13 | 14 | """ 15 | serialization.py 16 | 17 | 18 | """ 19 | 20 | __all__ = ['load_from_obj', 'load_from_obj_cpp', 'write_obj', 'write_mtl', 21 | 'write_json', 'write_three_json', 22 | 'set_landmark_indices_from_ppfile', 'set_landmark_indices_from_lmrkfile', 23 | 'load_from_ply', 'load_from_file'] 24 | 25 | # import os.path 26 | 27 | 28 | def load_from_obj(self, filename): 29 | v = [] 30 | f = [] 31 | ft = [] 32 | fn = [] 33 | vt = [] 34 | vn = [] 35 | vc = [] 36 | segm = dict() 37 | landm_raw_xyz = dict() 38 | currSegm = '' 39 | currLandm = '' 40 | with open(filename, 'r', buffering=2 ** 10) as fp: 41 | for line in fp: 42 | line = line.split() 43 | if len(line) > 0: 44 | if line[0] == 'v': 45 | v.append([float(x) for x in line[1:4]]) 46 | if len(line) == 7: 47 | vc.append([float(x) for x in line[4:]]) 48 | if currLandm: 49 | landm_raw_xyz[currLandm] = v[-1] 50 | currLandm = '' 51 | elif line[0] == 'vt': 52 | vt.append([float(x) for x in line[1:]]) 53 | elif line[0] == 'vn': 54 | vn.append([float(x) for x in line[1:]]) 55 | elif line[0] == 'f': 56 | faces = [x.split('/') for x in line[1:]] 57 | for iV in range(1, len(faces) - 1): # trivially triangulate faces 58 | f.append([int(faces[0][0]), int(faces[iV][0]), int(faces[iV + 1][0])]) 59 | if (len(faces[0]) > 1) and faces[0][1]: 60 | ft.append([int(faces[0][1]), int(faces[iV][1]), int(faces[iV + 1][1])]) 61 | if (len(faces[0]) > 2) and faces[0][2]: 62 | fn.append([int(faces[0][2]), int(faces[iV][2]), int(faces[iV + 1][2])]) 63 | if currSegm: 64 | segm[currSegm].append(len(f) - 1) 65 | elif line[0] == 'g': 66 | currSegm = line[1] 67 | if currSegm not in segm.keys(): 68 | segm[currSegm] = [] 69 | elif line[0] == '#landmark': 70 | currLandm = line[1] 71 | elif line[0] == 'mtllib': 72 | self.materials_filepath = os.path.join(os.path.dirname(filename), line[1]) 73 | self.materials_file = file(self.materials_filepath, 'r').readlines() 74 | 75 | self.v = np.array(v) 76 | self.f = np.array(f) - 1 77 | if vt: 78 | self.vt = np.array(vt) 79 | if vn: 80 | self.vn = np.array(vn) 81 | if vc: 82 | self.vc = np.array(vc) 83 | if ft: 84 | self.ft = np.array(ft) - 1 85 | if fn: 86 | self.fn = np.array(fn) - 1 87 | self.segm = segm 88 | self.landm_raw_xyz = landm_raw_xyz 89 | self.recompute_landmark_indices() 90 | 91 | if hasattr(self, 'materials_file'): 92 | for line in self.materials_file: 93 | if line and line.split() and line.split()[0] == 'map_Ka': 94 | self.texture_filepath = os.path.abspath(os.path.join(os.path.dirname(filename), line.split()[1])) 95 | 96 | 97 | def load_from_obj_cpp(self, filename): 98 | from .loadobj import loadobj 99 | from collections import OrderedDict 100 | 101 | v, vt, vn, f, ft, fn, mtl_path, landm, segm = loadobj(filename) 102 | if v.size != 0: 103 | self.v = v 104 | if f.size != 0: 105 | self.f = f 106 | if vn.size != 0: 107 | self.vn = vn 108 | if vt.size != 0: 109 | self.vt = vt 110 | if fn.size != 0: 111 | self.fn = fn 112 | if ft.size != 0: 113 | self.ft = ft 114 | if segm: 115 | self.segm = OrderedDict([(k, v if type(v) is list else v.tolist()) for k, v in segm.items()]) 116 | if mtl_path: 117 | try: 118 | self.materials_filepath = os.path.join(os.path.dirname(filename), mtl_path.strip()) 119 | self.materials_file = file(self.materials_filepath, 'r').readlines() 120 | except: 121 | self.materials_filepath = None 122 | if hasattr(self, 'materials_file'): 123 | for line in self.materials_file: 124 | if line and line.split() and line.split()[0] == 'map_Ka': 125 | self.texture_filepath = os.path.abspath(os.path.join(os.path.dirname(filename), line.split()[1])) 126 | if landm: 127 | self.landm = landm 128 | self.recompute_landmark_xyz() 129 | 130 | 131 | def write_obj(self, filename, flip_faces=False, group=False, comments=None): 132 | if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): 133 | os.makedirs(os.path.dirname(filename)) 134 | 135 | ff = -1 if flip_faces else 1 136 | 137 | def write_face_to_obj_file(face_index, obj_file): 138 | vertex_indices = self.f[face_index][::ff] + 1 139 | 140 | if hasattr(self, 'ft'): 141 | texture_indices = self.ft[face_index][::ff] + 1 142 | if not hasattr(self, 'fn'): 143 | self.reset_face_normals() 144 | normal_indices = self.fn[face_index][::ff] + 1 145 | obj_file.write('f %d/%d/%d %d/%d/%d %d/%d/%d\n' % tuple(np.array([vertex_indices, texture_indices, normal_indices]).T.flatten())) 146 | elif hasattr(self, 'fn'): 147 | normal_indices = self.fn[face_index][::ff] + 1 148 | obj_file.write('f %d//%d %d//%d %d//%d\n' % tuple(np.array([vertex_indices, normal_indices]).T.flatten())) 149 | else: 150 | obj_file.write('f %d %d %d\n' % tuple(vertex_indices)) 151 | 152 | with open(filename, 'w') as fi: 153 | if comments is not None: 154 | if isinstance(comments, basestring): 155 | comments = [comments] 156 | for comment in comments: 157 | for line in comment.split("\n"): 158 | fi.write("# %s\n" % line) 159 | 160 | if hasattr(self, 'texture_filepath'): 161 | outfolder = os.path.dirname(filename) 162 | outbase = os.path.splitext(os.path.basename(filename))[0] 163 | mtlpath = outbase + '.mtl' 164 | fi.write('mtllib %s\n' % mtlpath) 165 | from shutil import copyfile 166 | texture_name = outbase + os.path.splitext(self.texture_filepath)[1] 167 | if os.path.abspath(self.texture_filepath) != os.path.abspath(os.path.join(outfolder, texture_name)): 168 | copyfile(self.texture_filepath, os.path.join(outfolder, texture_name)) 169 | self.write_mtl(os.path.join(outfolder, mtlpath), outbase, texture_name) 170 | 171 | for r in self.v: 172 | fi.write('v %f %f %f\n' % (r[0], r[1], r[2])) 173 | 174 | if hasattr(self, 'fn') and hasattr(self, 'vn'): 175 | for r in self.vn: 176 | fi.write('vn %f %f %f\n' % (r[0], r[1], r[2])) 177 | 178 | if hasattr(self, 'ft'): 179 | for r in self.vt: 180 | if len(r) == 3: 181 | fi.write('vt %f %f %f\n' % (r[0], r[1], r[2])) 182 | else: 183 | fi.write('vt %f %f\n' % (r[0], r[1])) 184 | if hasattr(self, 'segm') and self.segm and not group: 185 | for p in self.segm.keys(): 186 | fi.write('g %s\n' % p) 187 | for face_index in self.segm[p]: 188 | write_face_to_obj_file(face_index, fi) 189 | else: 190 | if hasattr(self, 'f'): 191 | for face_index in range(len(self.f)): 192 | write_face_to_obj_file(face_index, fi) 193 | 194 | 195 | def write_mtl(self, path, material_name, texture_name): 196 | """Material attribute file serialization""" 197 | with open(path, 'w') as f: 198 | f.write('newmtl %s\n' % material_name) 199 | # copied from another obj, no idea about what it does 200 | f.write('ka 0.329412 0.223529 0.027451\n') 201 | f.write('kd 0.780392 0.568627 0.113725\n') 202 | f.write('ks 0.992157 0.941176 0.807843\n') 203 | f.write('illum 0\n') 204 | f.write('map_Ka %s\n' % texture_name) 205 | f.write('map_Kd %s\n' % texture_name) 206 | f.write('map_Ks %s\n' % texture_name) 207 | 208 | 209 | def write_ply(self, filename, flip_faces=False, ascii=False, little_endian=True, comments=[]): 210 | import plyutils 211 | 212 | if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): 213 | os.makedirs(os.path.dirname(filename)) 214 | 215 | ff = -1 if flip_faces else 1 216 | 217 | if isinstance(comments, basestring): 218 | comments = [comments] 219 | comments = filter(lambda c: len(c) > 0, sum(map(lambda c: c.split("\n"), comments), [])) 220 | 221 | plyutils.write(list([list(x) for x in self.v]), 222 | list([list(x[::ff]) for x in self.f] if hasattr(self, 'f') else []), 223 | list([list((x * 255).astype(int)) for x in ([] if not hasattr(self, 'vc') else self.vc)]), 224 | filename, ascii, little_endian, list(comments), 225 | list([list(x) for x in ([] if not hasattr(self, 'vn') else self.vn)])) 226 | 227 | 228 | def write_three_json(self, filename, name=""): 229 | import json 230 | 231 | if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): 232 | os.makedirs(os.path.dirname(filename)) 233 | 234 | name = name if name else self.basename 235 | name = name if name else os.path.splitext(os.path.basename(filename))[0] 236 | 237 | metadata = {"formatVersion": 3.1, 238 | "sourceFile": "%s.obj" % name, 239 | "generatedBy": "korper", 240 | "vertices": len(self.v), 241 | "faces": len(self.f), 242 | "normals": len(self.vn), 243 | "colors": 0, 244 | "uvs": len(self.vt), 245 | "materials": 1 246 | } 247 | materials = [{"DbgColor": 15658734, 248 | "DbgIndex": 0, 249 | "DbgName": "defaultMat", 250 | "colorAmbient": [0.0, 0.0, 0.0], 251 | "colorDiffuse": [0.64, 0.64, 0.64], 252 | "colorSpecular": [0.5, 0.5, 0.5], 253 | "illumination": 2, 254 | "opticalDensity": 1.0, 255 | "specularCoef": 96.078431, 256 | "transparency": 1.0 257 | }] 258 | 259 | mesh_data = {"metadata": metadata, 260 | 'scale': 0.35, 261 | "materials": materials, 262 | "morphTargets": [], 263 | "morphColors": [], 264 | "colors": []} 265 | mesh_data["vertices"] = self.v.flatten().tolist() 266 | mesh_data["normals"] = self.vn.flatten().tolist() 267 | mesh_data["uvs"] = [np.array([[vt[0], vt[1]] for vt in self.vt]).flatten().tolist()] 268 | mesh_data["faces"] = np.array([[42, self.f[i][0], self.f[i][1], self.f[i][2], 0, self.ft[i][0], self.ft[i][1], self.ft[i][2], self.fn[i][0], self.fn[i][1], self.fn[i][2]] for i in range(len(self.f))]).flatten().tolist() 269 | 270 | json_or_js_file = open(filename, 'w') 271 | json_or_js_file.write(json.dumps(mesh_data, indent=4)) 272 | 273 | 274 | def write_json(self, filename, header="", footer="", name="", include_faces=True, texture_mode=True): 275 | import json 276 | 277 | if os.path.dirname(filename) and not os.path.exists(os.path.dirname(filename)): 278 | os.makedirs(os.path.dirname(filename)) 279 | 280 | name = name if name else self.basename 281 | name = name if name else os.path.splitext(os.path.basename(filename))[0] 282 | 283 | if texture_mode: 284 | vertex_texture_pairs = {} 285 | for face_index in range(len(self.f)): 286 | for i in [0, 1, 2]: 287 | v_index = self.f[face_index][i] 288 | t_index = self.ft[face_index][i] 289 | vertex_texture_pairs[(v_index, t_index)] = [] 290 | for face_index in range(len(self.f)): 291 | for i in [0, 1, 2]: 292 | v_index = self.f[face_index][i] 293 | t_index = self.ft[face_index][i] 294 | vertex_texture_pairs[(v_index, t_index)].append((face_index, i)) 295 | mesh_data = {'name': name, 296 | 'vertices': [], 297 | 'textures': [] 298 | } 299 | for v_index, t_index, faces_entries in vertex_texture_pairs.items(): 300 | mesh_data['vertices'].append() 301 | 302 | if include_faces: 303 | mesh_data['faces'] = list([[int(np.asscalar(i)) for i in list(x)] for x in self.f]) 304 | 305 | else: 306 | mesh_data = {'name': name, 307 | 'vertices': list([list(x) for x in self.v]) 308 | } 309 | if include_faces: 310 | mesh_data['faces'] = list([[int(np.asscalar(i)) for i in list(x)] for x in self.f]) 311 | 312 | json_or_js_file = open(filename, 'w') 313 | if os.path.basename(filename).endswith('js'): 314 | json_or_js_file.write(header + '\nmesh = ') if header else json_or_js_file.write('var mesh = ') 315 | json_or_js_file.write(json.dumps(mesh_data, indent=4)) 316 | json_or_js_file.write(footer) 317 | else: 318 | json_or_js_file.write(json.dumps(mesh_data, indent=4)) 319 | 320 | 321 | def set_landmark_indices_from_ppfile(self, ppfilename): 322 | from xml.etree import ElementTree 323 | tree = ElementTree.parse(ppfilename) 324 | 325 | def get_xyz(e): 326 | try: 327 | return [float(e.attrib['x']), float(e.attrib['y']), float(e.attrib['z'])] 328 | except: # may happen if landmarks are just spaces 329 | return [0, 0, 0] 330 | 331 | self.landm_raw_xyz = dict((e.attrib['name'], get_xyz(e)) for e in tree.iter() if e.tag == 'point') 332 | self.recompute_landmark_indices(ppfilename) 333 | 334 | 335 | def set_landmark_indices_from_lmrkfile(self, lmrkfilename): 336 | lmrkfile = open(lmrkfilename, 'rb') 337 | self.landm_raw_xyz = {} 338 | for line in lmrkfile.readlines(): 339 | if not line.strip(): 340 | continue 341 | command = line.split()[0] 342 | data = map(lambda x: float(x), line.split()[1:]) 343 | 344 | if command == '_scale': 345 | selfscale_factor = np.matrix(data) 346 | elif command == '_translate': 347 | self.caesar_translation_vector = np.matrix(data) 348 | elif command == '_rotation': 349 | self.caesar_rotation_matrix = np.matrix(data).reshape(3, 3) 350 | else: 351 | self.landm_raw_xyz[command] = [data[1], data[2], data[0]] 352 | self.recompute_landmark_indices(lmrkfilename) 353 | 354 | 355 | def _is_lmrkfile(filename): 356 | is_lmrk = re.compile('^_scale\s[-\d\.]+\s+_translate(\s[-\d\.]+){3}\s+_rotation(\s[-\d\.]+){9}\s+') 357 | with open(filename) as f: 358 | data = f.read() 359 | res = is_lmrk.match(data) 360 | return res 361 | 362 | 363 | def set_landmark_indices_from_any(self, landmarks): 364 | ''' 365 | Sets landmarks given any of: 366 | - ppfile 367 | - ldmk file 368 | - dict of {name:inds} (i.e. mesh.landm) 369 | - dict of {name:xyz} (i.e. mesh.landm_xyz) 370 | - pkl, json, yaml file containing either of the above dicts 371 | ''' 372 | import json 373 | import pickle 374 | 375 | try: 376 | path_exists = os.path.exists(landmarks) 377 | except: 378 | path_exists = False 379 | if path_exists: 380 | if re.search(".ya{0,1}ml$", landmarks): 381 | import yaml 382 | with open(landmarks) as f: 383 | self.set_landmarks_from_raw(yaml.load(f)) 384 | elif re.search(".json$", landmarks): 385 | with open(landmarks) as f: 386 | self.set_landmarks_from_raw(json.load(f)) 387 | elif re.search(".pkl$", landmarks): 388 | with open(landmarks) as f: 389 | self.set_landmarks_from_raw(pickle.load(f)) 390 | elif _is_lmrkfile(landmarks): 391 | self.set_landmark_indices_from_lmrkfile(landmarks) 392 | else: 393 | try: 394 | self.set_landmark_indices_from_ppfile(landmarks) 395 | except: 396 | raise Exception("Landmark file %s is of unknown format" % landmarks) 397 | else: 398 | self.set_landmarks_from_raw(landmarks) 399 | 400 | 401 | def load_from_file(self, filename, use_cpp=True): 402 | if re.search(".ply$", filename): 403 | self.load_from_ply(filename) 404 | elif re.search(".obj$", filename): 405 | # XXX experimental cpp obj loader, if problems, switch back to 406 | if use_cpp: 407 | self.load_from_obj_cpp(filename) 408 | else: 409 | self.load_from_obj(filename) 410 | 411 | elif re.search(".bsf$", filename): 412 | self.load_from_bsf(filename) 413 | else: 414 | raise NotImplementedError("Unknown mesh file format.") 415 | 416 | 417 | def load_from_ply(self, filename): 418 | import plyutils 419 | try: 420 | res = plyutils.read(filename) 421 | except plyutils.error, e: 422 | raise SerializationError(e.message) 423 | self.v = np.array(res['pts']).T.copy() 424 | self.f = np.array(res['tri']).T.copy() 425 | if 'color' in res: 426 | self.set_vertex_colors(np.array(res['color']).T.copy() / 255) 427 | if 'normals' in res: 428 | self.vn = np.array(res['normals']).T.copy() 429 | -------------------------------------------------------------------------------- /mesh/mesh.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | 4 | 5 | """ 6 | Mesh module 7 | ----------- 8 | 9 | """ 10 | 11 | 12 | import os 13 | import numpy as np 14 | 15 | import colors 16 | 17 | try: 18 | from .serialization import serialization 19 | except ImportError: 20 | pass 21 | 22 | from . import landmarks 23 | from . import texture 24 | from . import processing 25 | 26 | 27 | __all__ = ["Mesh"] 28 | 29 | 30 | class Mesh(object): 31 | """3d Triangulated Mesh class 32 | 33 | Attributes: 34 | v: Vx3 array of vertices 35 | f: Fx3 array of faces 36 | 37 | Optional attributes: 38 | fc: Fx3 array of face colors 39 | vc: Vx3 array of vertex colors 40 | vn: Vx3 array of vertex normals 41 | segm: dictionary of part names to triangle indices 42 | 43 | """ 44 | def __init__(self, 45 | v=None, 46 | f=None, 47 | segm=None, 48 | filename=None, 49 | ppfilename=None, 50 | lmrkfilename=None, 51 | basename=None, 52 | vc=None, 53 | fc=None, 54 | vscale=None, 55 | landmarks=None): 56 | """ 57 | :param v: vertices 58 | :param f: faces 59 | :param filename: a filename from which a mesh is loaded 60 | """ 61 | 62 | if filename is not None: 63 | self.load_from_file(filename) 64 | if hasattr(self, 'f'): 65 | self.f = np.require(self.f, dtype=np.uint32) 66 | self.v = np.require(self.v, dtype=np.float64) 67 | self.filename = filename 68 | if vscale is not None: 69 | self.v *= vscale 70 | if v is not None: 71 | self.v = np.array(v, dtype=np.float64) 72 | if vscale is not None: 73 | self.v *= vscale 74 | if f is not None: 75 | self.f = np.require(f, dtype=np.uint32) 76 | 77 | self.basename = basename 78 | if self.basename is None and filename is not None: 79 | self.basename = os.path.splitext(os.path.basename(filename))[0] 80 | 81 | if segm is not None: 82 | self.segm = segm 83 | if landmarks is not None: 84 | self.set_landmark_indices_from_any(landmarks) 85 | if ppfilename is not None: 86 | self.set_landmark_indices_from_ppfile(ppfilename) 87 | if lmrkfilename is not None: 88 | self.set_landmark_indices_from_lmrkfile(lmrkfilename) 89 | 90 | if vc is not None: 91 | self.set_vertex_colors(vc) 92 | 93 | if fc is not None: 94 | self.set_face_colors(fc) 95 | 96 | def __del__(self): 97 | if hasattr(self, 'textureID'): 98 | from OpenGL.GL import glDeleteTextures 99 | glDeleteTextures([self.textureID]) 100 | 101 | def edges_as_lines(self, copy_vertices=False): 102 | from .lines import Lines 103 | edges = self.f[:, [0, 1, 1, 2, 2, 0]].flatten().reshape(-1, 2) 104 | verts = self.v.copy() if copy_vertices else self.v 105 | return Lines(v=verts, e=edges) 106 | 107 | def show(self, mv=None, meshes=[], lines=[]): 108 | from .meshviewer import MeshViewer 109 | from .utils import row 110 | 111 | if mv is None: 112 | mv = MeshViewer(keepalive=True) 113 | 114 | if hasattr(self, 'landm'): 115 | from .sphere import Sphere 116 | sphere = Sphere(np.zeros((3)), 1.).to_mesh() 117 | scalefactor = 1e-2 * np.max(np.max(self.v) - np.min(self.v)) / np.max(np.max(sphere.v) - np.min(sphere.v)) 118 | sphere.v = sphere.v * scalefactor 119 | spheres = [Mesh(vc='SteelBlue', f=sphere.f, v=sphere.v + row(np.array(self.landm_raw_xyz[k]))) for k in self.landm.keys()] 120 | mv.set_dynamic_meshes([self] + spheres + meshes, blocking=True) 121 | else: 122 | mv.set_dynamic_meshes([self] + meshes, blocking=True) 123 | mv.set_dynamic_lines(lines) 124 | return mv 125 | 126 | def colors_like(self, color, arr=None): 127 | from .utils import row, col 128 | 129 | if arr is None: 130 | arr = np.zeros(self.v.shape) 131 | 132 | # if arr is single-dim, reshape it 133 | if arr.ndim == 1 or arr.shape[1] == 1: 134 | arr = arr.reshape(-1, 3) 135 | 136 | if isinstance(color, str): 137 | color = colors.name_to_rgb[color] 138 | elif isinstance(color, list): 139 | color = np.array(color) 140 | 141 | if color.shape[0] == arr.shape[0] and color.shape[0] == color.size: 142 | def jet(v): 143 | fourValue = 4 * v 144 | red = min(fourValue - 1.5, -fourValue + 4.5) 145 | green = min(fourValue - 0.5, -fourValue + 3.5) 146 | blue = min(fourValue + 0.5, -fourValue + 2.5) 147 | result = np.array([red, green, blue]) 148 | result[result > 1.0] = 1.0 149 | result[result < 0.0] = 0.0 150 | return row(result) 151 | color = col(color) 152 | color = np.concatenate([jet(color[i]) for i in xrange(color.size)], axis=0) 153 | 154 | return np.ones_like(arr) * color 155 | 156 | def set_vertex_colors(self, vc, vertex_indices=None): 157 | if vertex_indices is not None: 158 | self.vc[vertex_indices] = self.colors_like(vc, self.v[vertex_indices]) 159 | else: 160 | self.vc = self.colors_like(vc, self.v) 161 | return self 162 | 163 | def set_vertex_colors_from_weights(self, weights, scale_to_range_1=True, color=True): 164 | # from numpy import ones_like 165 | if weights is None: 166 | return self 167 | if scale_to_range_1: 168 | weights = weights - np.min(weights) 169 | weights = (1.0 - 0.0) * weights / np.max(weights) + 0.0 170 | if color: 171 | from matplotlib import cm 172 | self.vc = cm.jet(weights)[:, :3] 173 | else: 174 | self.vc = np.tile(np.reshape(weights, (len(weights), 1)), (1, 3)) # *ones_like(self.v) 175 | return self 176 | 177 | def scale_vertex_colors(self, weights, w_min=0.0, w_max=1.0): 178 | if weights is None: 179 | return self 180 | weights = weights - np.min(weights) 181 | weights = (w_max - w_min) * weights / np.max(weights) + w_min 182 | self.vc = (weights * self.vc.T).T if weights is not None else self.vc 183 | return self 184 | 185 | def set_face_colors(self, fc): 186 | self.fc = self.colors_like(fc, self.f) 187 | return self 188 | 189 | def faces_by_vertex(self, as_sparse_matrix=False): 190 | import scipy.sparse as sp 191 | if not as_sparse_matrix: 192 | faces_by_vertex = [[] for i in range(len(self.v))] 193 | for i, face in enumerate(self.f): 194 | faces_by_vertex[face[0]].append(i) 195 | faces_by_vertex[face[1]].append(i) 196 | faces_by_vertex[face[2]].append(i) 197 | else: 198 | row = self.f.flatten() 199 | col = np.array([range(self.f.shape[0])] * 3).T.flatten() 200 | data = np.ones(len(col)) 201 | faces_by_vertex = sp.csr_matrix((data, (row, col)), shape=(self.v.shape[0], self.f.shape[0])) 202 | return faces_by_vertex 203 | 204 | def estimate_vertex_normals(self, face_to_verts_sparse_matrix=None): 205 | from .geometry.tri_normals import TriNormalsScaled 206 | 207 | face_normals = TriNormalsScaled(self.v, self.f).reshape(-1, 3) 208 | ftov = face_to_verts_sparse_matrix if face_to_verts_sparse_matrix else self.faces_by_vertex(as_sparse_matrix=True) 209 | non_scaled_normals = ftov * face_normals 210 | norms = (np.sum(non_scaled_normals ** 2.0, axis=1) ** 0.5).T 211 | norms[norms == 0] = 1.0 212 | return (non_scaled_normals.T / norms).T 213 | 214 | def barycentric_coordinates_for_points(self, points, face_indices): 215 | from .geometry.barycentric_coordinates_of_projection import barycentric_coordinates_of_projection 216 | vertex_indices = self.f[face_indices.flatten(), :] 217 | tri_vertices = np.array([self.v[vertex_indices[:, 0]], self.v[vertex_indices[:, 1]], self.v[vertex_indices[:, 2]]]) 218 | return vertex_indices, barycentric_coordinates_of_projection(points, tri_vertices[0, :], tri_vertices[1, :] - tri_vertices[0, :], tri_vertices[2, :] - tri_vertices[0, :]) 219 | 220 | def transfer_segm(self, mesh, exclude_empty_parts=True): 221 | self.segm = {} 222 | if hasattr(mesh, 'segm'): 223 | face_centers = np.array([self.v[face, :].mean(axis=0) for face in self.f]) 224 | (closest_faces, closest_points) = mesh.closest_faces_and_points(face_centers) 225 | mesh_parts_by_face = mesh.parts_by_face() 226 | parts_by_face = [mesh_parts_by_face[face] for face in closest_faces.flatten()] 227 | self.segm = dict([(part, []) for part in mesh.segm.keys()]) 228 | for face, part in enumerate(parts_by_face): 229 | self.segm[part].append(face) 230 | for part in self.segm.keys(): 231 | self.segm[part].sort() 232 | if exclude_empty_parts and not self.segm[part]: 233 | del self.segm[part] 234 | 235 | @property 236 | def verts_by_segm(self): 237 | return dict((segment, sorted(set(self.f[indices].flatten()))) for segment, indices in self.segm.items()) 238 | 239 | def parts_by_face(self): 240 | segments_by_face = [''] * len(self.f) 241 | for part in self.segm.keys(): 242 | for face in self.segm[part]: 243 | segments_by_face[face] = part 244 | return segments_by_face 245 | 246 | def verts_in_common(self, segments): 247 | """ 248 | returns array of all vertex indices common to each segment in segments""" 249 | return sorted(reduce(lambda s0, s1: s0.intersection(s1), 250 | [set(self.verts_by_segm[segm]) for segm in segments])) 251 | # # indices of vertices in the faces of the first segment 252 | # indices = self.verts_by_segm[segments[0]] 253 | # for segment in segments[1:] : 254 | # indices = sorted([index for index in self.verts_by_segm[segment] if index in indices]) # Intersect current segment with current indices 255 | # return sorted(set(indices)) 256 | 257 | @property 258 | def joint_names(self): 259 | return self.joint_regressors.keys() 260 | 261 | @property 262 | def joint_xyz(self): 263 | joint_locations = {} 264 | for name in self.joint_names: 265 | joint_locations[name] = self.joint_regressors[name]['offset'] + \ 266 | np.sum(self.v[self.joint_regressors[name]['v_indices']].T * self.joint_regressors[name]['coeff'], axis=1) 267 | return joint_locations 268 | 269 | # creates joint_regressors from a list of joint names and a per joint list of vertex indices (e.g. a ring of vertices) 270 | # For the regression coefficients, all vertices for a given joint are given equal weight 271 | def set_joints(self, joint_names, vertex_indices): 272 | self.joint_regressors = {} 273 | for name, indices in zip(joint_names, vertex_indices): 274 | self.joint_regressors[name] = {'v_indices': indices, 275 | 'coeff': [1.0 / len(indices)] * len(indices), 276 | 'offset': np.array([0., 0., 0.])} 277 | 278 | def estimate_circumference(self, plane_normal, plane_distance, partNamesAllowed=None, want_edges=False): 279 | raise Exception('estimate_circumference function has moved to body.mesh.metrics.circumferences') 280 | 281 | # ###################################################### 282 | # Processing 283 | def reset_normals(self, face_to_verts_sparse_matrix=None, reset_face_normals=False): 284 | return processing.reset_normals(self, face_to_verts_sparse_matrix, reset_face_normals) 285 | 286 | def reset_face_normals(self): 287 | return processing.reset_face_normals(self) 288 | 289 | def uniquified_mesh(self): 290 | """This function returns a copy of the mesh in which vertices are copied such that 291 | each vertex appears in only one face, and hence has only one texture""" 292 | return processing.uniquified_mesh(self) 293 | 294 | def keep_vertices(self, keep_list): 295 | return processing.keep_vertices(self, keep_list) 296 | 297 | def remove_vertices(self, v_list): 298 | return self.keep_vertices(np.setdiff1d(np.arange(self.v.shape[0]), v_list)) 299 | 300 | def point_cloud(self): 301 | return Mesh(v=self.v, f=[], vc=self.vc) if hasattr(self, 'vc') else Mesh(v=self.v, f=[]) 302 | 303 | def remove_faces(self, face_indices_to_remove): 304 | return processing.remove_faces(self, face_indices_to_remove) 305 | 306 | def scale_vertices(self, scale_factor): 307 | return processing.scale_vertices(self, scale_factor) 308 | 309 | def rotate_vertices(self, rotation): 310 | return processing.rotate_vertices(self, rotation) 311 | 312 | def translate_vertices(self, translation): 313 | return processing.translate_vertices(self, translation) 314 | 315 | def flip_faces(self): 316 | return processing.flip_faces(self) 317 | 318 | def subdivide_triangles(self): 319 | return processing.subdivide_triangles(self) 320 | 321 | def concatenate_mesh(self, mesh): 322 | return processing.concatenate_mesh(self, mesh) 323 | 324 | # new_ordering specifies the new index of each vertex. If new_ordering[i] = j, 325 | # vertex i should now be the j^th vertex. As such, each entry in new_ordering should be unique. 326 | def reorder_vertices(self, new_ordering, new_normal_ordering=None): 327 | processing.reorder_vertices(self, new_ordering, new_normal_ordering) 328 | 329 | # ###################################################### 330 | # Landmark methods 331 | 332 | @property 333 | def landm_names(self): 334 | if hasattr(self, 'landm_regressors') or hasattr(self, 'landm'): 335 | return self.landm_regressors.keys() if hasattr(self, 'landm_regressors') else self.landm.keys() 336 | else: 337 | return [] 338 | 339 | @property 340 | def landm_xyz(self, ordering=None): 341 | landmark_order = ordering if ordering else self.landm_names 342 | landmark_vertex_locations = (self.landm_xyz_linear_transform(landmark_order) * self.v.flatten()).reshape(-1, 3) if landmark_order else np.zeros((0, 0)) 343 | return dict([(landmark_order[i], xyz) for i, xyz in enumerate(landmark_vertex_locations)]) if landmark_order else {} 344 | 345 | def set_landmarks_from_xyz(self, landm_raw_xyz): 346 | self.landm_raw_xyz = landm_raw_xyz if hasattr(landm_raw_xyz, 'keys') else dict((str(i), l) for i, l in enumerate(landm_raw_xyz)) 347 | self.recompute_landmark_indices() 348 | 349 | def landm_xyz_linear_transform(self, ordering=None): 350 | return landmarks.landm_xyz_linear_transform(self, ordering) 351 | 352 | def recompute_landmark_xyz(self): 353 | self.landm_raw_xyz = dict((name, self.v[ind]) for name, ind in self.landm.items()) 354 | 355 | def recompute_landmark_indices(self, landmark_fname=None, safe_mode=True): 356 | landmarks.recompute_landmark_indices(self, landmark_fname, safe_mode) 357 | 358 | def set_landmarks_from_regressors(self, regressors): 359 | self.landm_regressors = regressors 360 | 361 | def set_landmark_indices_from_any(self, landmark_file_or_values): 362 | serialization.set_landmark_indices_from_any(self, landmark_file_or_values) 363 | 364 | def set_landmarks_from_raw(self, landmark_file_or_values): 365 | landmarks.set_landmarks_from_raw(self, landmark_file_or_values) 366 | 367 | ####################################################### 368 | # Texture methods 369 | 370 | @property 371 | def texture_image(self): 372 | if not hasattr(self, '_texture_image'): 373 | self.reload_texture_image() 374 | return self._texture_image 375 | 376 | def set_texture_image(self, path_to_texture): 377 | self.texture_filepath = path_to_texture 378 | 379 | def texture_coordinates_by_vertex(self): 380 | return texture.texture_coordinates_by_vertex(self) 381 | 382 | def reload_texture_image(self): 383 | texture.reload_texture_image(self) 384 | 385 | def transfer_texture(self, mesh_with_texture): 386 | texture.transfer_texture(self, mesh_with_texture) 387 | 388 | def load_texture(self, texture_version): 389 | texture.load_texture(self, texture_version) 390 | 391 | def texture_rgb(self, texture_coordinate): 392 | return texture.texture_rgb(self, texture_coordinate) 393 | 394 | def texture_rgb_vec(self, texture_coordinates): 395 | return texture.texture_rgb_vec(self, texture_coordinates) 396 | 397 | ####################################################### 398 | # Serialization methods 399 | 400 | def load_from_file(self, filename): 401 | serialization.load_from_file(self, filename) 402 | 403 | def load_from_ply(self, filename): 404 | serialization.load_from_ply(self, filename) 405 | 406 | def load_from_obj(self, filename): 407 | serialization.load_from_obj(self, filename) 408 | 409 | def write_json(self, filename, header="", footer="", name="", include_faces=True, texture_mode=True): 410 | serialization.write_json(self, filename, header, footer, name, include_faces, texture_mode) 411 | 412 | def write_three_json(self, filename, name=""): 413 | serialization.write_three_json(self, filename, name) 414 | 415 | def write_ply(self, filename, flip_faces=False, ascii=False, little_endian=True, comments=[]): 416 | serialization.write_ply(self, filename, flip_faces, ascii, little_endian, comments) 417 | 418 | def write_mtl(self, path, material_name, texture_name): 419 | """Serializes a material attributes file""" 420 | serialization.write_mtl(self, path, material_name, texture_name) 421 | 422 | def write_obj(self, filename, flip_faces=False, group=False, comments=None): 423 | serialization.write_obj(self, filename, flip_faces, group, comments) 424 | 425 | def load_from_obj_cpp(self, filename): 426 | serialization.load_from_obj_cpp(self, filename) 427 | 428 | def set_landmark_indices_from_ppfile(self, ppfilename): 429 | serialization.set_landmark_indices_from_ppfile(self, ppfilename) 430 | 431 | def set_landmark_indices_from_lmrkfile(self, lmrkfilename): 432 | serialization.set_landmark_indices_from_lmrkfile(self, lmrkfilename) 433 | -------------------------------------------------------------------------------- /mesh/colors.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Max Planck Society. All rights reserved. 2 | # see accompanying LICENSE.txt file for licensing and contact information 3 | # Created by Matthew Loper on 2012-05-12. 4 | 5 | """ 6 | Colors utilities 7 | ================ 8 | 9 | 10 | """ 11 | 12 | import re 13 | import numpy as np 14 | 15 | 16 | def main(): 17 | """Generates code for name_to_rgb dict, assuming an rgb.txt file available (in X11 format).""" 18 | with open('rgb.txt') as fp: 19 | 20 | line = fp.readline() 21 | while line: 22 | reg = re.match('\s*(\d+)\s*(\d+)\s*(\d+)\s*(\w.*\w).*', line) 23 | if reg: 24 | r = int(reg.group(1)) / 255. 25 | g = int(reg.group(2)) / 255. 26 | b = int(reg.group(3)) / 255. 27 | d = reg.group(4) 28 | print "'%s': np.array([%.2f, %.2f, %.2f])," % (d, r, g, b) 29 | line = fp.readline() 30 | 31 | 32 | name_to_rgb = { 33 | 'snow': np.array([1.00, 0.98, 0.98]), 34 | 'ghost white': np.array([0.97, 0.97, 1.00]), 35 | 'GhostWhite': np.array([0.97, 0.97, 1.00]), 36 | 'white smoke': np.array([0.96, 0.96, 0.96]), 37 | 'WhiteSmoke': np.array([0.96, 0.96, 0.96]), 38 | 'gainsboro': np.array([0.86, 0.86, 0.86]), 39 | 'floral white': np.array([1.00, 0.98, 0.94]), 40 | 'FloralWhite': np.array([1.00, 0.98, 0.94]), 41 | 'old lace': np.array([0.99, 0.96, 0.90]), 42 | 'OldLace': np.array([0.99, 0.96, 0.90]), 43 | 'linen': np.array([0.98, 0.94, 0.90]), 44 | 'antique white': np.array([0.98, 0.92, 0.84]), 45 | 'AntiqueWhite': np.array([0.98, 0.92, 0.84]), 46 | 'papaya whip': np.array([1.00, 0.94, 0.84]), 47 | 'PapayaWhip': np.array([1.00, 0.94, 0.84]), 48 | 'blanched almond': np.array([1.00, 0.92, 0.80]), 49 | 'BlanchedAlmond': np.array([1.00, 0.92, 0.80]), 50 | 'bisque': np.array([1.00, 0.89, 0.77]), 51 | 'peach puff': np.array([1.00, 0.85, 0.73]), 52 | 'PeachPuff': np.array([1.00, 0.85, 0.73]), 53 | 'navajo white': np.array([1.00, 0.87, 0.68]), 54 | 'NavajoWhite': np.array([1.00, 0.87, 0.68]), 55 | 'moccasin': np.array([1.00, 0.89, 0.71]), 56 | 'cornsilk': np.array([1.00, 0.97, 0.86]), 57 | 'ivory': np.array([1.00, 1.00, 0.94]), 58 | 'lemon chiffon': np.array([1.00, 0.98, 0.80]), 59 | 'LemonChiffon': np.array([1.00, 0.98, 0.80]), 60 | 'seashell': np.array([1.00, 0.96, 0.93]), 61 | 'honeydew': np.array([0.94, 1.00, 0.94]), 62 | 'mint cream': np.array([0.96, 1.00, 0.98]), 63 | 'MintCream': np.array([0.96, 1.00, 0.98]), 64 | 'azure': np.array([0.94, 1.00, 1.00]), 65 | 'alice blue': np.array([0.94, 0.97, 1.00]), 66 | 'AliceBlue': np.array([0.94, 0.97, 1.00]), 67 | 'lavender': np.array([0.90, 0.90, 0.98]), 68 | 'lavender blush': np.array([1.00, 0.94, 0.96]), 69 | 'LavenderBlush': np.array([1.00, 0.94, 0.96]), 70 | 'misty rose': np.array([1.00, 0.89, 0.88]), 71 | 'MistyRose': np.array([1.00, 0.89, 0.88]), 72 | 'white': np.array([1.00, 1.00, 1.00]), 73 | 'black': np.array([0.00, 0.00, 0.00]), 74 | 'dark slate gray': np.array([0.18, 0.31, 0.31]), 75 | 'DarkSlateGray': np.array([0.18, 0.31, 0.31]), 76 | 'dark slate grey': np.array([0.18, 0.31, 0.31]), 77 | 'DarkSlateGrey': np.array([0.18, 0.31, 0.31]), 78 | 'dim gray': np.array([0.41, 0.41, 0.41]), 79 | 'DimGray': np.array([0.41, 0.41, 0.41]), 80 | 'dim grey': np.array([0.41, 0.41, 0.41]), 81 | 'DimGrey': np.array([0.41, 0.41, 0.41]), 82 | 'slate gray': np.array([0.44, 0.50, 0.56]), 83 | 'SlateGray': np.array([0.44, 0.50, 0.56]), 84 | 'slate grey': np.array([0.44, 0.50, 0.56]), 85 | 'SlateGrey': np.array([0.44, 0.50, 0.56]), 86 | 'light slate gray': np.array([0.47, 0.53, 0.60]), 87 | 'LightSlateGray': np.array([0.47, 0.53, 0.60]), 88 | 'light slate grey': np.array([0.47, 0.53, 0.60]), 89 | 'LightSlateGrey': np.array([0.47, 0.53, 0.60]), 90 | 'gray': np.array([0.75, 0.75, 0.75]), 91 | 'grey': np.array([0.75, 0.75, 0.75]), 92 | 'light grey': np.array([0.83, 0.83, 0.83]), 93 | 'LightGrey': np.array([0.83, 0.83, 0.83]), 94 | 'light gray': np.array([0.83, 0.83, 0.83]), 95 | 'LightGray': np.array([0.83, 0.83, 0.83]), 96 | 'midnight blue': np.array([0.10, 0.10, 0.44]), 97 | 'MidnightBlue': np.array([0.10, 0.10, 0.44]), 98 | 'navy': np.array([0.00, 0.00, 0.50]), 99 | 'navy blue': np.array([0.00, 0.00, 0.50]), 100 | 'NavyBlue': np.array([0.00, 0.00, 0.50]), 101 | 'cornflower blue': np.array([0.39, 0.58, 0.93]), 102 | 'CornflowerBlue': np.array([0.39, 0.58, 0.93]), 103 | 'dark slate blue': np.array([0.28, 0.24, 0.55]), 104 | 'DarkSlateBlue': np.array([0.28, 0.24, 0.55]), 105 | 'slate blue': np.array([0.42, 0.35, 0.80]), 106 | 'SlateBlue': np.array([0.42, 0.35, 0.80]), 107 | 'medium slate blue': np.array([0.48, 0.41, 0.93]), 108 | 'MediumSlateBlue': np.array([0.48, 0.41, 0.93]), 109 | 'light slate blue': np.array([0.52, 0.44, 1.00]), 110 | 'LightSlateBlue': np.array([0.52, 0.44, 1.00]), 111 | 'medium blue': np.array([0.00, 0.00, 0.80]), 112 | 'MediumBlue': np.array([0.00, 0.00, 0.80]), 113 | 'royal blue': np.array([0.25, 0.41, 0.88]), 114 | 'RoyalBlue': np.array([0.25, 0.41, 0.88]), 115 | 'blue': np.array([0.00, 0.00, 1.00]), 116 | 'dodger blue': np.array([0.12, 0.56, 1.00]), 117 | 'DodgerBlue': np.array([0.12, 0.56, 1.00]), 118 | 'deep sky blue': np.array([0.00, 0.75, 1.00]), 119 | 'DeepSkyBlue': np.array([0.00, 0.75, 1.00]), 120 | 'sky blue': np.array([0.53, 0.81, 0.92]), 121 | 'SkyBlue': np.array([0.53, 0.81, 0.92]), 122 | 'light sky blue': np.array([0.53, 0.81, 0.98]), 123 | 'LightSkyBlue': np.array([0.53, 0.81, 0.98]), 124 | 'steel blue': np.array([0.27, 0.51, 0.71]), 125 | 'SteelBlue': np.array([0.27, 0.51, 0.71]), 126 | 'light steel blue': np.array([0.69, 0.77, 0.87]), 127 | 'LightSteelBlue': np.array([0.69, 0.77, 0.87]), 128 | 'light blue': np.array([0.68, 0.85, 0.90]), 129 | 'LightBlue': np.array([0.68, 0.85, 0.90]), 130 | 'powder blue': np.array([0.69, 0.88, 0.90]), 131 | 'PowderBlue': np.array([0.69, 0.88, 0.90]), 132 | 'pale turquoise': np.array([0.69, 0.93, 0.93]), 133 | 'PaleTurquoise': np.array([0.69, 0.93, 0.93]), 134 | 'dark turquoise': np.array([0.00, 0.81, 0.82]), 135 | 'DarkTurquoise': np.array([0.00, 0.81, 0.82]), 136 | 'medium turquoise': np.array([0.28, 0.82, 0.80]), 137 | 'MediumTurquoise': np.array([0.28, 0.82, 0.80]), 138 | 'turquoise': np.array([0.25, 0.88, 0.82]), 139 | 'cyan': np.array([0.00, 1.00, 1.00]), 140 | 'light cyan': np.array([0.88, 1.00, 1.00]), 141 | 'LightCyan': np.array([0.88, 1.00, 1.00]), 142 | 'cadet blue': np.array([0.37, 0.62, 0.63]), 143 | 'CadetBlue': np.array([0.37, 0.62, 0.63]), 144 | 'medium aquamarine': np.array([0.40, 0.80, 0.67]), 145 | 'MediumAquamarine': np.array([0.40, 0.80, 0.67]), 146 | 'aquamarine': np.array([0.50, 1.00, 0.83]), 147 | 'dark green': np.array([0.00, 0.39, 0.00]), 148 | 'DarkGreen': np.array([0.00, 0.39, 0.00]), 149 | 'dark olive green': np.array([0.33, 0.42, 0.18]), 150 | 'DarkOliveGreen': np.array([0.33, 0.42, 0.18]), 151 | 'dark sea green': np.array([0.56, 0.74, 0.56]), 152 | 'DarkSeaGreen': np.array([0.56, 0.74, 0.56]), 153 | 'sea green': np.array([0.18, 0.55, 0.34]), 154 | 'SeaGreen': np.array([0.18, 0.55, 0.34]), 155 | 'medium sea green': np.array([0.24, 0.70, 0.44]), 156 | 'MediumSeaGreen': np.array([0.24, 0.70, 0.44]), 157 | 'light sea green': np.array([0.13, 0.70, 0.67]), 158 | 'LightSeaGreen': np.array([0.13, 0.70, 0.67]), 159 | 'pale green': np.array([0.60, 0.98, 0.60]), 160 | 'PaleGreen': np.array([0.60, 0.98, 0.60]), 161 | 'spring green': np.array([0.00, 1.00, 0.50]), 162 | 'SpringGreen': np.array([0.00, 1.00, 0.50]), 163 | 'lawn green': np.array([0.49, 0.99, 0.00]), 164 | 'LawnGreen': np.array([0.49, 0.99, 0.00]), 165 | 'green': np.array([0.00, 1.00, 0.00]), 166 | 'chartreuse': np.array([0.50, 1.00, 0.00]), 167 | 'medium spring green': np.array([0.00, 0.98, 0.60]), 168 | 'MediumSpringGreen': np.array([0.00, 0.98, 0.60]), 169 | 'green yellow': np.array([0.68, 1.00, 0.18]), 170 | 'GreenYellow': np.array([0.68, 1.00, 0.18]), 171 | 'lime green': np.array([0.20, 0.80, 0.20]), 172 | 'LimeGreen': np.array([0.20, 0.80, 0.20]), 173 | 'yellow green': np.array([0.60, 0.80, 0.20]), 174 | 'YellowGreen': np.array([0.60, 0.80, 0.20]), 175 | 'forest green': np.array([0.13, 0.55, 0.13]), 176 | 'ForestGreen': np.array([0.13, 0.55, 0.13]), 177 | 'olive drab': np.array([0.42, 0.56, 0.14]), 178 | 'OliveDrab': np.array([0.42, 0.56, 0.14]), 179 | 'dark khaki': np.array([0.74, 0.72, 0.42]), 180 | 'DarkKhaki': np.array([0.74, 0.72, 0.42]), 181 | 'khaki': np.array([0.94, 0.90, 0.55]), 182 | 'pale goldenrod': np.array([0.93, 0.91, 0.67]), 183 | 'PaleGoldenrod': np.array([0.93, 0.91, 0.67]), 184 | 'light goldenrod yellow': np.array([0.98, 0.98, 0.82]), 185 | 'LightGoldenrodYellow': np.array([0.98, 0.98, 0.82]), 186 | 'light yellow': np.array([1.00, 1.00, 0.88]), 187 | 'LightYellow': np.array([1.00, 1.00, 0.88]), 188 | 'yellow': np.array([1.00, 1.00, 0.00]), 189 | 'gold': np.array([1.00, 0.84, 0.00]), 190 | 'light goldenrod': np.array([0.93, 0.87, 0.51]), 191 | 'LightGoldenrod': np.array([0.93, 0.87, 0.51]), 192 | 'goldenrod': np.array([0.85, 0.65, 0.13]), 193 | 'dark goldenrod': np.array([0.72, 0.53, 0.04]), 194 | 'DarkGoldenrod': np.array([0.72, 0.53, 0.04]), 195 | 'rosy brown': np.array([0.74, 0.56, 0.56]), 196 | 'RosyBrown': np.array([0.74, 0.56, 0.56]), 197 | 'indian red': np.array([0.80, 0.36, 0.36]), 198 | 'IndianRed': np.array([0.80, 0.36, 0.36]), 199 | 'saddle brown': np.array([0.55, 0.27, 0.07]), 200 | 'SaddleBrown': np.array([0.55, 0.27, 0.07]), 201 | 'sienna': np.array([0.63, 0.32, 0.18]), 202 | 'peru': np.array([0.80, 0.52, 0.25]), 203 | 'burlywood': np.array([0.87, 0.72, 0.53]), 204 | 'beige': np.array([0.96, 0.96, 0.86]), 205 | 'wheat': np.array([0.96, 0.87, 0.70]), 206 | 'sandy brown': np.array([0.96, 0.64, 0.38]), 207 | 'SandyBrown': np.array([0.96, 0.64, 0.38]), 208 | 'tan': np.array([0.82, 0.71, 0.55]), 209 | 'chocolate': np.array([0.82, 0.41, 0.12]), 210 | 'firebrick': np.array([0.70, 0.13, 0.13]), 211 | 'brown': np.array([0.65, 0.16, 0.16]), 212 | 'dark salmon': np.array([0.91, 0.59, 0.48]), 213 | 'DarkSalmon': np.array([0.91, 0.59, 0.48]), 214 | 'salmon': np.array([0.98, 0.50, 0.45]), 215 | 'light salmon': np.array([1.00, 0.63, 0.48]), 216 | 'LightSalmon': np.array([1.00, 0.63, 0.48]), 217 | 'orange': np.array([1.00, 0.65, 0.00]), 218 | 'dark orange': np.array([1.00, 0.55, 0.00]), 219 | 'DarkOrange': np.array([1.00, 0.55, 0.00]), 220 | 'coral': np.array([1.00, 0.50, 0.31]), 221 | 'light coral': np.array([0.94, 0.50, 0.50]), 222 | 'LightCoral': np.array([0.94, 0.50, 0.50]), 223 | 'tomato': np.array([1.00, 0.39, 0.28]), 224 | 'orange red': np.array([1.00, 0.27, 0.00]), 225 | 'OrangeRed': np.array([1.00, 0.27, 0.00]), 226 | 'red': np.array([1.00, 0.00, 0.00]), 227 | 'hot pink': np.array([1.00, 0.41, 0.71]), 228 | 'HotPink': np.array([1.00, 0.41, 0.71]), 229 | 'deep pink': np.array([1.00, 0.08, 0.58]), 230 | 'DeepPink': np.array([1.00, 0.08, 0.58]), 231 | 'pink': np.array([1.00, 0.75, 0.80]), 232 | 'light pink': np.array([1.00, 0.71, 0.76]), 233 | 'LightPink': np.array([1.00, 0.71, 0.76]), 234 | 'pale violet red': np.array([0.86, 0.44, 0.58]), 235 | 'PaleVioletRed': np.array([0.86, 0.44, 0.58]), 236 | 'maroon': np.array([0.69, 0.19, 0.38]), 237 | 'medium violet red': np.array([0.78, 0.08, 0.52]), 238 | 'MediumVioletRed': np.array([0.78, 0.08, 0.52]), 239 | 'violet red': np.array([0.82, 0.13, 0.56]), 240 | 'VioletRed': np.array([0.82, 0.13, 0.56]), 241 | 'magenta': np.array([1.00, 0.00, 1.00]), 242 | 'violet': np.array([0.93, 0.51, 0.93]), 243 | 'plum': np.array([0.87, 0.63, 0.87]), 244 | 'orchid': np.array([0.85, 0.44, 0.84]), 245 | 'medium orchid': np.array([0.73, 0.33, 0.83]), 246 | 'MediumOrchid': np.array([0.73, 0.33, 0.83]), 247 | 'dark orchid': np.array([0.60, 0.20, 0.80]), 248 | 'DarkOrchid': np.array([0.60, 0.20, 0.80]), 249 | 'dark violet': np.array([0.58, 0.00, 0.83]), 250 | 'DarkViolet': np.array([0.58, 0.00, 0.83]), 251 | 'blue violet': np.array([0.54, 0.17, 0.89]), 252 | 'BlueViolet': np.array([0.54, 0.17, 0.89]), 253 | 'purple': np.array([0.63, 0.13, 0.94]), 254 | 'medium purple': np.array([0.58, 0.44, 0.86]), 255 | 'MediumPurple': np.array([0.58, 0.44, 0.86]), 256 | 'thistle': np.array([0.85, 0.75, 0.85]), 257 | 'snow1': np.array([1.00, 0.98, 0.98]), 258 | 'snow2': np.array([0.93, 0.91, 0.91]), 259 | 'snow3': np.array([0.80, 0.79, 0.79]), 260 | 'snow4': np.array([0.55, 0.54, 0.54]), 261 | 'seashell1': np.array([1.00, 0.96, 0.93]), 262 | 'seashell2': np.array([0.93, 0.90, 0.87]), 263 | 'seashell3': np.array([0.80, 0.77, 0.75]), 264 | 'seashell4': np.array([0.55, 0.53, 0.51]), 265 | 'AntiqueWhite1': np.array([1.00, 0.94, 0.86]), 266 | 'AntiqueWhite2': np.array([0.93, 0.87, 0.80]), 267 | 'AntiqueWhite3': np.array([0.80, 0.75, 0.69]), 268 | 'AntiqueWhite4': np.array([0.55, 0.51, 0.47]), 269 | 'bisque1': np.array([1.00, 0.89, 0.77]), 270 | 'bisque2': np.array([0.93, 0.84, 0.72]), 271 | 'bisque3': np.array([0.80, 0.72, 0.62]), 272 | 'bisque4': np.array([0.55, 0.49, 0.42]), 273 | 'PeachPuff1': np.array([1.00, 0.85, 0.73]), 274 | 'PeachPuff2': np.array([0.93, 0.80, 0.68]), 275 | 'PeachPuff3': np.array([0.80, 0.69, 0.58]), 276 | 'PeachPuff4': np.array([0.55, 0.47, 0.40]), 277 | 'NavajoWhite1': np.array([1.00, 0.87, 0.68]), 278 | 'NavajoWhite2': np.array([0.93, 0.81, 0.63]), 279 | 'NavajoWhite3': np.array([0.80, 0.70, 0.55]), 280 | 'NavajoWhite4': np.array([0.55, 0.47, 0.37]), 281 | 'LemonChiffon1': np.array([1.00, 0.98, 0.80]), 282 | 'LemonChiffon2': np.array([0.93, 0.91, 0.75]), 283 | 'LemonChiffon3': np.array([0.80, 0.79, 0.65]), 284 | 'LemonChiffon4': np.array([0.55, 0.54, 0.44]), 285 | 'cornsilk1': np.array([1.00, 0.97, 0.86]), 286 | 'cornsilk2': np.array([0.93, 0.91, 0.80]), 287 | 'cornsilk3': np.array([0.80, 0.78, 0.69]), 288 | 'cornsilk4': np.array([0.55, 0.53, 0.47]), 289 | 'ivory1': np.array([1.00, 1.00, 0.94]), 290 | 'ivory2': np.array([0.93, 0.93, 0.88]), 291 | 'ivory3': np.array([0.80, 0.80, 0.76]), 292 | 'ivory4': np.array([0.55, 0.55, 0.51]), 293 | 'honeydew1': np.array([0.94, 1.00, 0.94]), 294 | 'honeydew2': np.array([0.88, 0.93, 0.88]), 295 | 'honeydew3': np.array([0.76, 0.80, 0.76]), 296 | 'honeydew4': np.array([0.51, 0.55, 0.51]), 297 | 'LavenderBlush1': np.array([1.00, 0.94, 0.96]), 298 | 'LavenderBlush2': np.array([0.93, 0.88, 0.90]), 299 | 'LavenderBlush3': np.array([0.80, 0.76, 0.77]), 300 | 'LavenderBlush4': np.array([0.55, 0.51, 0.53]), 301 | 'MistyRose1': np.array([1.00, 0.89, 0.88]), 302 | 'MistyRose2': np.array([0.93, 0.84, 0.82]), 303 | 'MistyRose3': np.array([0.80, 0.72, 0.71]), 304 | 'MistyRose4': np.array([0.55, 0.49, 0.48]), 305 | 'azure1': np.array([0.94, 1.00, 1.00]), 306 | 'azure2': np.array([0.88, 0.93, 0.93]), 307 | 'azure3': np.array([0.76, 0.80, 0.80]), 308 | 'azure4': np.array([0.51, 0.55, 0.55]), 309 | 'SlateBlue1': np.array([0.51, 0.44, 1.00]), 310 | 'SlateBlue2': np.array([0.48, 0.40, 0.93]), 311 | 'SlateBlue3': np.array([0.41, 0.35, 0.80]), 312 | 'SlateBlue4': np.array([0.28, 0.24, 0.55]), 313 | 'RoyalBlue1': np.array([0.28, 0.46, 1.00]), 314 | 'RoyalBlue2': np.array([0.26, 0.43, 0.93]), 315 | 'RoyalBlue3': np.array([0.23, 0.37, 0.80]), 316 | 'RoyalBlue4': np.array([0.15, 0.25, 0.55]), 317 | 'blue1': np.array([0.00, 0.00, 1.00]), 318 | 'blue2': np.array([0.00, 0.00, 0.93]), 319 | 'blue3': np.array([0.00, 0.00, 0.80]), 320 | 'blue4': np.array([0.00, 0.00, 0.55]), 321 | 'DodgerBlue1': np.array([0.12, 0.56, 1.00]), 322 | 'DodgerBlue2': np.array([0.11, 0.53, 0.93]), 323 | 'DodgerBlue3': np.array([0.09, 0.45, 0.80]), 324 | 'DodgerBlue4': np.array([0.06, 0.31, 0.55]), 325 | 'SteelBlue1': np.array([0.39, 0.72, 1.00]), 326 | 'SteelBlue2': np.array([0.36, 0.67, 0.93]), 327 | 'SteelBlue3': np.array([0.31, 0.58, 0.80]), 328 | 'SteelBlue4': np.array([0.21, 0.39, 0.55]), 329 | 'DeepSkyBlue1': np.array([0.00, 0.75, 1.00]), 330 | 'DeepSkyBlue2': np.array([0.00, 0.70, 0.93]), 331 | 'DeepSkyBlue3': np.array([0.00, 0.60, 0.80]), 332 | 'DeepSkyBlue4': np.array([0.00, 0.41, 0.55]), 333 | 'SkyBlue1': np.array([0.53, 0.81, 1.00]), 334 | 'SkyBlue2': np.array([0.49, 0.75, 0.93]), 335 | 'SkyBlue3': np.array([0.42, 0.65, 0.80]), 336 | 'SkyBlue4': np.array([0.29, 0.44, 0.55]), 337 | 'LightSkyBlue1': np.array([0.69, 0.89, 1.00]), 338 | 'LightSkyBlue2': np.array([0.64, 0.83, 0.93]), 339 | 'LightSkyBlue3': np.array([0.55, 0.71, 0.80]), 340 | 'LightSkyBlue4': np.array([0.38, 0.48, 0.55]), 341 | 'SlateGray1': np.array([0.78, 0.89, 1.00]), 342 | 'SlateGray2': np.array([0.73, 0.83, 0.93]), 343 | 'SlateGray3': np.array([0.62, 0.71, 0.80]), 344 | 'SlateGray4': np.array([0.42, 0.48, 0.55]), 345 | 'LightSteelBlue1': np.array([0.79, 0.88, 1.00]), 346 | 'LightSteelBlue2': np.array([0.74, 0.82, 0.93]), 347 | 'LightSteelBlue3': np.array([0.64, 0.71, 0.80]), 348 | 'LightSteelBlue4': np.array([0.43, 0.48, 0.55]), 349 | 'LightBlue1': np.array([0.75, 0.94, 1.00]), 350 | 'LightBlue2': np.array([0.70, 0.87, 0.93]), 351 | 'LightBlue3': np.array([0.60, 0.75, 0.80]), 352 | 'LightBlue4': np.array([0.41, 0.51, 0.55]), 353 | 'LightCyan1': np.array([0.88, 1.00, 1.00]), 354 | 'LightCyan2': np.array([0.82, 0.93, 0.93]), 355 | 'LightCyan3': np.array([0.71, 0.80, 0.80]), 356 | 'LightCyan4': np.array([0.48, 0.55, 0.55]), 357 | 'PaleTurquoise1': np.array([0.73, 1.00, 1.00]), 358 | 'PaleTurquoise2': np.array([0.68, 0.93, 0.93]), 359 | 'PaleTurquoise3': np.array([0.59, 0.80, 0.80]), 360 | 'PaleTurquoise4': np.array([0.40, 0.55, 0.55]), 361 | 'CadetBlue1': np.array([0.60, 0.96, 1.00]), 362 | 'CadetBlue2': np.array([0.56, 0.90, 0.93]), 363 | 'CadetBlue3': np.array([0.48, 0.77, 0.80]), 364 | 'CadetBlue4': np.array([0.33, 0.53, 0.55]), 365 | 'turquoise1': np.array([0.00, 0.96, 1.00]), 366 | 'turquoise2': np.array([0.00, 0.90, 0.93]), 367 | 'turquoise3': np.array([0.00, 0.77, 0.80]), 368 | 'turquoise4': np.array([0.00, 0.53, 0.55]), 369 | 'cyan1': np.array([0.00, 1.00, 1.00]), 370 | 'cyan2': np.array([0.00, 0.93, 0.93]), 371 | 'cyan3': np.array([0.00, 0.80, 0.80]), 372 | 'cyan4': np.array([0.00, 0.55, 0.55]), 373 | 'DarkSlateGray1': np.array([0.59, 1.00, 1.00]), 374 | 'DarkSlateGray2': np.array([0.55, 0.93, 0.93]), 375 | 'DarkSlateGray3': np.array([0.47, 0.80, 0.80]), 376 | 'DarkSlateGray4': np.array([0.32, 0.55, 0.55]), 377 | 'aquamarine1': np.array([0.50, 1.00, 0.83]), 378 | 'aquamarine2': np.array([0.46, 0.93, 0.78]), 379 | 'aquamarine3': np.array([0.40, 0.80, 0.67]), 380 | 'aquamarine4': np.array([0.27, 0.55, 0.45]), 381 | 'DarkSeaGreen1': np.array([0.76, 1.00, 0.76]), 382 | 'DarkSeaGreen2': np.array([0.71, 0.93, 0.71]), 383 | 'DarkSeaGreen3': np.array([0.61, 0.80, 0.61]), 384 | 'DarkSeaGreen4': np.array([0.41, 0.55, 0.41]), 385 | 'SeaGreen1': np.array([0.33, 1.00, 0.62]), 386 | 'SeaGreen2': np.array([0.31, 0.93, 0.58]), 387 | 'SeaGreen3': np.array([0.26, 0.80, 0.50]), 388 | 'SeaGreen4': np.array([0.18, 0.55, 0.34]), 389 | 'PaleGreen1': np.array([0.60, 1.00, 0.60]), 390 | 'PaleGreen2': np.array([0.56, 0.93, 0.56]), 391 | 'PaleGreen3': np.array([0.49, 0.80, 0.49]), 392 | 'PaleGreen4': np.array([0.33, 0.55, 0.33]), 393 | 'SpringGreen1': np.array([0.00, 1.00, 0.50]), 394 | 'SpringGreen2': np.array([0.00, 0.93, 0.46]), 395 | 'SpringGreen3': np.array([0.00, 0.80, 0.40]), 396 | 'SpringGreen4': np.array([0.00, 0.55, 0.27]), 397 | 'green1': np.array([0.00, 1.00, 0.00]), 398 | 'green2': np.array([0.00, 0.93, 0.00]), 399 | 'green3': np.array([0.00, 0.80, 0.00]), 400 | 'green4': np.array([0.00, 0.55, 0.00]), 401 | 'chartreuse1': np.array([0.50, 1.00, 0.00]), 402 | 'chartreuse2': np.array([0.46, 0.93, 0.00]), 403 | 'chartreuse3': np.array([0.40, 0.80, 0.00]), 404 | 'chartreuse4': np.array([0.27, 0.55, 0.00]), 405 | 'OliveDrab1': np.array([0.75, 1.00, 0.24]), 406 | 'OliveDrab2': np.array([0.70, 0.93, 0.23]), 407 | 'OliveDrab3': np.array([0.60, 0.80, 0.20]), 408 | 'OliveDrab4': np.array([0.41, 0.55, 0.13]), 409 | 'DarkOliveGreen1': np.array([0.79, 1.00, 0.44]), 410 | 'DarkOliveGreen2': np.array([0.74, 0.93, 0.41]), 411 | 'DarkOliveGreen3': np.array([0.64, 0.80, 0.35]), 412 | 'DarkOliveGreen4': np.array([0.43, 0.55, 0.24]), 413 | 'khaki1': np.array([1.00, 0.96, 0.56]), 414 | 'khaki2': np.array([0.93, 0.90, 0.52]), 415 | 'khaki3': np.array([0.80, 0.78, 0.45]), 416 | 'khaki4': np.array([0.55, 0.53, 0.31]), 417 | 'LightGoldenrod1': np.array([1.00, 0.93, 0.55]), 418 | 'LightGoldenrod2': np.array([0.93, 0.86, 0.51]), 419 | 'LightGoldenrod3': np.array([0.80, 0.75, 0.44]), 420 | 'LightGoldenrod4': np.array([0.55, 0.51, 0.30]), 421 | 'LightYellow1': np.array([1.00, 1.00, 0.88]), 422 | 'LightYellow2': np.array([0.93, 0.93, 0.82]), 423 | 'LightYellow3': np.array([0.80, 0.80, 0.71]), 424 | 'LightYellow4': np.array([0.55, 0.55, 0.48]), 425 | 'yellow1': np.array([1.00, 1.00, 0.00]), 426 | 'yellow2': np.array([0.93, 0.93, 0.00]), 427 | 'yellow3': np.array([0.80, 0.80, 0.00]), 428 | 'yellow4': np.array([0.55, 0.55, 0.00]), 429 | 'gold1': np.array([1.00, 0.84, 0.00]), 430 | 'gold2': np.array([0.93, 0.79, 0.00]), 431 | 'gold3': np.array([0.80, 0.68, 0.00]), 432 | 'gold4': np.array([0.55, 0.46, 0.00]), 433 | 'goldenrod1': np.array([1.00, 0.76, 0.15]), 434 | 'goldenrod2': np.array([0.93, 0.71, 0.13]), 435 | 'goldenrod3': np.array([0.80, 0.61, 0.11]), 436 | 'goldenrod4': np.array([0.55, 0.41, 0.08]), 437 | 'DarkGoldenrod1': np.array([1.00, 0.73, 0.06]), 438 | 'DarkGoldenrod2': np.array([0.93, 0.68, 0.05]), 439 | 'DarkGoldenrod3': np.array([0.80, 0.58, 0.05]), 440 | 'DarkGoldenrod4': np.array([0.55, 0.40, 0.03]), 441 | 'RosyBrown1': np.array([1.00, 0.76, 0.76]), 442 | 'RosyBrown2': np.array([0.93, 0.71, 0.71]), 443 | 'RosyBrown3': np.array([0.80, 0.61, 0.61]), 444 | 'RosyBrown4': np.array([0.55, 0.41, 0.41]), 445 | 'IndianRed1': np.array([1.00, 0.42, 0.42]), 446 | 'IndianRed2': np.array([0.93, 0.39, 0.39]), 447 | 'IndianRed3': np.array([0.80, 0.33, 0.33]), 448 | 'IndianRed4': np.array([0.55, 0.23, 0.23]), 449 | 'sienna1': np.array([1.00, 0.51, 0.28]), 450 | 'sienna2': np.array([0.93, 0.47, 0.26]), 451 | 'sienna3': np.array([0.80, 0.41, 0.22]), 452 | 'sienna4': np.array([0.55, 0.28, 0.15]), 453 | 'burlywood1': np.array([1.00, 0.83, 0.61]), 454 | 'burlywood2': np.array([0.93, 0.77, 0.57]), 455 | 'burlywood3': np.array([0.80, 0.67, 0.49]), 456 | 'burlywood4': np.array([0.55, 0.45, 0.33]), 457 | 'wheat1': np.array([1.00, 0.91, 0.73]), 458 | 'wheat2': np.array([0.93, 0.85, 0.68]), 459 | 'wheat3': np.array([0.80, 0.73, 0.59]), 460 | 'wheat4': np.array([0.55, 0.49, 0.40]), 461 | 'tan1': np.array([1.00, 0.65, 0.31]), 462 | 'tan2': np.array([0.93, 0.60, 0.29]), 463 | 'tan3': np.array([0.80, 0.52, 0.25]), 464 | 'tan4': np.array([0.55, 0.35, 0.17]), 465 | 'chocolate1': np.array([1.00, 0.50, 0.14]), 466 | 'chocolate2': np.array([0.93, 0.46, 0.13]), 467 | 'chocolate3': np.array([0.80, 0.40, 0.11]), 468 | 'chocolate4': np.array([0.55, 0.27, 0.07]), 469 | 'firebrick1': np.array([1.00, 0.19, 0.19]), 470 | 'firebrick2': np.array([0.93, 0.17, 0.17]), 471 | 'firebrick3': np.array([0.80, 0.15, 0.15]), 472 | 'firebrick4': np.array([0.55, 0.10, 0.10]), 473 | 'brown1': np.array([1.00, 0.25, 0.25]), 474 | 'brown2': np.array([0.93, 0.23, 0.23]), 475 | 'brown3': np.array([0.80, 0.20, 0.20]), 476 | 'brown4': np.array([0.55, 0.14, 0.14]), 477 | 'salmon1': np.array([1.00, 0.55, 0.41]), 478 | 'salmon2': np.array([0.93, 0.51, 0.38]), 479 | 'salmon3': np.array([0.80, 0.44, 0.33]), 480 | 'salmon4': np.array([0.55, 0.30, 0.22]), 481 | 'LightSalmon1': np.array([1.00, 0.63, 0.48]), 482 | 'LightSalmon2': np.array([0.93, 0.58, 0.45]), 483 | 'LightSalmon3': np.array([0.80, 0.51, 0.38]), 484 | 'LightSalmon4': np.array([0.55, 0.34, 0.26]), 485 | 'orange1': np.array([1.00, 0.65, 0.00]), 486 | 'orange2': np.array([0.93, 0.60, 0.00]), 487 | 'orange3': np.array([0.80, 0.52, 0.00]), 488 | 'orange4': np.array([0.55, 0.35, 0.00]), 489 | 'DarkOrange1': np.array([1.00, 0.50, 0.00]), 490 | 'DarkOrange2': np.array([0.93, 0.46, 0.00]), 491 | 'DarkOrange3': np.array([0.80, 0.40, 0.00]), 492 | 'DarkOrange4': np.array([0.55, 0.27, 0.00]), 493 | 'coral1': np.array([1.00, 0.45, 0.34]), 494 | 'coral2': np.array([0.93, 0.42, 0.31]), 495 | 'coral3': np.array([0.80, 0.36, 0.27]), 496 | 'coral4': np.array([0.55, 0.24, 0.18]), 497 | 'tomato1': np.array([1.00, 0.39, 0.28]), 498 | 'tomato2': np.array([0.93, 0.36, 0.26]), 499 | 'tomato3': np.array([0.80, 0.31, 0.22]), 500 | 'tomato4': np.array([0.55, 0.21, 0.15]), 501 | 'OrangeRed1': np.array([1.00, 0.27, 0.00]), 502 | 'OrangeRed2': np.array([0.93, 0.25, 0.00]), 503 | 'OrangeRed3': np.array([0.80, 0.22, 0.00]), 504 | 'OrangeRed4': np.array([0.55, 0.15, 0.00]), 505 | 'red1': np.array([1.00, 0.00, 0.00]), 506 | 'red2': np.array([0.93, 0.00, 0.00]), 507 | 'red3': np.array([0.80, 0.00, 0.00]), 508 | 'red4': np.array([0.55, 0.00, 0.00]), 509 | 'DeepPink1': np.array([1.00, 0.08, 0.58]), 510 | 'DeepPink2': np.array([0.93, 0.07, 0.54]), 511 | 'DeepPink3': np.array([0.80, 0.06, 0.46]), 512 | 'DeepPink4': np.array([0.55, 0.04, 0.31]), 513 | 'HotPink1': np.array([1.00, 0.43, 0.71]), 514 | 'HotPink2': np.array([0.93, 0.42, 0.65]), 515 | 'HotPink3': np.array([0.80, 0.38, 0.56]), 516 | 'HotPink4': np.array([0.55, 0.23, 0.38]), 517 | 'pink1': np.array([1.00, 0.71, 0.77]), 518 | 'pink2': np.array([0.93, 0.66, 0.72]), 519 | 'pink3': np.array([0.80, 0.57, 0.62]), 520 | 'pink4': np.array([0.55, 0.39, 0.42]), 521 | 'LightPink1': np.array([1.00, 0.68, 0.73]), 522 | 'LightPink2': np.array([0.93, 0.64, 0.68]), 523 | 'LightPink3': np.array([0.80, 0.55, 0.58]), 524 | 'LightPink4': np.array([0.55, 0.37, 0.40]), 525 | 'PaleVioletRed1': np.array([1.00, 0.51, 0.67]), 526 | 'PaleVioletRed2': np.array([0.93, 0.47, 0.62]), 527 | 'PaleVioletRed3': np.array([0.80, 0.41, 0.54]), 528 | 'PaleVioletRed4': np.array([0.55, 0.28, 0.36]), 529 | 'maroon1': np.array([1.00, 0.20, 0.70]), 530 | 'maroon2': np.array([0.93, 0.19, 0.65]), 531 | 'maroon3': np.array([0.80, 0.16, 0.56]), 532 | 'maroon4': np.array([0.55, 0.11, 0.38]), 533 | 'VioletRed1': np.array([1.00, 0.24, 0.59]), 534 | 'VioletRed2': np.array([0.93, 0.23, 0.55]), 535 | 'VioletRed3': np.array([0.80, 0.20, 0.47]), 536 | 'VioletRed4': np.array([0.55, 0.13, 0.32]), 537 | 'magenta1': np.array([1.00, 0.00, 1.00]), 538 | 'magenta2': np.array([0.93, 0.00, 0.93]), 539 | 'magenta3': np.array([0.80, 0.00, 0.80]), 540 | 'magenta4': np.array([0.55, 0.00, 0.55]), 541 | 'orchid1': np.array([1.00, 0.51, 0.98]), 542 | 'orchid2': np.array([0.93, 0.48, 0.91]), 543 | 'orchid3': np.array([0.80, 0.41, 0.79]), 544 | 'orchid4': np.array([0.55, 0.28, 0.54]), 545 | 'plum1': np.array([1.00, 0.73, 1.00]), 546 | 'plum2': np.array([0.93, 0.68, 0.93]), 547 | 'plum3': np.array([0.80, 0.59, 0.80]), 548 | 'plum4': np.array([0.55, 0.40, 0.55]), 549 | 'MediumOrchid1': np.array([0.88, 0.40, 1.00]), 550 | 'MediumOrchid2': np.array([0.82, 0.37, 0.93]), 551 | 'MediumOrchid3': np.array([0.71, 0.32, 0.80]), 552 | 'MediumOrchid4': np.array([0.48, 0.22, 0.55]), 553 | 'DarkOrchid1': np.array([0.75, 0.24, 1.00]), 554 | 'DarkOrchid2': np.array([0.70, 0.23, 0.93]), 555 | 'DarkOrchid3': np.array([0.60, 0.20, 0.80]), 556 | 'DarkOrchid4': np.array([0.41, 0.13, 0.55]), 557 | 'purple1': np.array([0.61, 0.19, 1.00]), 558 | 'purple2': np.array([0.57, 0.17, 0.93]), 559 | 'purple3': np.array([0.49, 0.15, 0.80]), 560 | 'purple4': np.array([0.33, 0.10, 0.55]), 561 | 'MediumPurple1': np.array([0.67, 0.51, 1.00]), 562 | 'MediumPurple2': np.array([0.62, 0.47, 0.93]), 563 | 'MediumPurple3': np.array([0.54, 0.41, 0.80]), 564 | 'MediumPurple4': np.array([0.36, 0.28, 0.55]), 565 | 'thistle1': np.array([1.00, 0.88, 1.00]), 566 | 'thistle2': np.array([0.93, 0.82, 0.93]), 567 | 'thistle3': np.array([0.80, 0.71, 0.80]), 568 | 'thistle4': np.array([0.55, 0.48, 0.55]), 569 | 'gray0': np.array([0.00, 0.00, 0.00]), 570 | 'grey0': np.array([0.00, 0.00, 0.00]), 571 | 'gray1': np.array([0.01, 0.01, 0.01]), 572 | 'grey1': np.array([0.01, 0.01, 0.01]), 573 | 'gray2': np.array([0.02, 0.02, 0.02]), 574 | 'grey2': np.array([0.02, 0.02, 0.02]), 575 | 'gray3': np.array([0.03, 0.03, 0.03]), 576 | 'grey3': np.array([0.03, 0.03, 0.03]), 577 | 'gray4': np.array([0.04, 0.04, 0.04]), 578 | 'grey4': np.array([0.04, 0.04, 0.04]), 579 | 'gray5': np.array([0.05, 0.05, 0.05]), 580 | 'grey5': np.array([0.05, 0.05, 0.05]), 581 | 'gray6': np.array([0.06, 0.06, 0.06]), 582 | 'grey6': np.array([0.06, 0.06, 0.06]), 583 | 'gray7': np.array([0.07, 0.07, 0.07]), 584 | 'grey7': np.array([0.07, 0.07, 0.07]), 585 | 'gray8': np.array([0.08, 0.08, 0.08]), 586 | 'grey8': np.array([0.08, 0.08, 0.08]), 587 | 'gray9': np.array([0.09, 0.09, 0.09]), 588 | 'grey9': np.array([0.09, 0.09, 0.09]), 589 | 'gray10': np.array([0.10, 0.10, 0.10]), 590 | 'grey10': np.array([0.10, 0.10, 0.10]), 591 | 'gray11': np.array([0.11, 0.11, 0.11]), 592 | 'grey11': np.array([0.11, 0.11, 0.11]), 593 | 'gray12': np.array([0.12, 0.12, 0.12]), 594 | 'grey12': np.array([0.12, 0.12, 0.12]), 595 | 'gray13': np.array([0.13, 0.13, 0.13]), 596 | 'grey13': np.array([0.13, 0.13, 0.13]), 597 | 'gray14': np.array([0.14, 0.14, 0.14]), 598 | 'grey14': np.array([0.14, 0.14, 0.14]), 599 | 'gray15': np.array([0.15, 0.15, 0.15]), 600 | 'grey15': np.array([0.15, 0.15, 0.15]), 601 | 'gray16': np.array([0.16, 0.16, 0.16]), 602 | 'grey16': np.array([0.16, 0.16, 0.16]), 603 | 'gray17': np.array([0.17, 0.17, 0.17]), 604 | 'grey17': np.array([0.17, 0.17, 0.17]), 605 | 'gray18': np.array([0.18, 0.18, 0.18]), 606 | 'grey18': np.array([0.18, 0.18, 0.18]), 607 | 'gray19': np.array([0.19, 0.19, 0.19]), 608 | 'grey19': np.array([0.19, 0.19, 0.19]), 609 | 'gray20': np.array([0.20, 0.20, 0.20]), 610 | 'grey20': np.array([0.20, 0.20, 0.20]), 611 | 'gray21': np.array([0.21, 0.21, 0.21]), 612 | 'grey21': np.array([0.21, 0.21, 0.21]), 613 | 'gray22': np.array([0.22, 0.22, 0.22]), 614 | 'grey22': np.array([0.22, 0.22, 0.22]), 615 | 'gray23': np.array([0.23, 0.23, 0.23]), 616 | 'grey23': np.array([0.23, 0.23, 0.23]), 617 | 'gray24': np.array([0.24, 0.24, 0.24]), 618 | 'grey24': np.array([0.24, 0.24, 0.24]), 619 | 'gray25': np.array([0.25, 0.25, 0.25]), 620 | 'grey25': np.array([0.25, 0.25, 0.25]), 621 | 'gray26': np.array([0.26, 0.26, 0.26]), 622 | 'grey26': np.array([0.26, 0.26, 0.26]), 623 | 'gray27': np.array([0.27, 0.27, 0.27]), 624 | 'grey27': np.array([0.27, 0.27, 0.27]), 625 | 'gray28': np.array([0.28, 0.28, 0.28]), 626 | 'grey28': np.array([0.28, 0.28, 0.28]), 627 | 'gray29': np.array([0.29, 0.29, 0.29]), 628 | 'grey29': np.array([0.29, 0.29, 0.29]), 629 | 'gray30': np.array([0.30, 0.30, 0.30]), 630 | 'grey30': np.array([0.30, 0.30, 0.30]), 631 | 'gray31': np.array([0.31, 0.31, 0.31]), 632 | 'grey31': np.array([0.31, 0.31, 0.31]), 633 | 'gray32': np.array([0.32, 0.32, 0.32]), 634 | 'grey32': np.array([0.32, 0.32, 0.32]), 635 | 'gray33': np.array([0.33, 0.33, 0.33]), 636 | 'grey33': np.array([0.33, 0.33, 0.33]), 637 | 'gray34': np.array([0.34, 0.34, 0.34]), 638 | 'grey34': np.array([0.34, 0.34, 0.34]), 639 | 'gray35': np.array([0.35, 0.35, 0.35]), 640 | 'grey35': np.array([0.35, 0.35, 0.35]), 641 | 'gray36': np.array([0.36, 0.36, 0.36]), 642 | 'grey36': np.array([0.36, 0.36, 0.36]), 643 | 'gray37': np.array([0.37, 0.37, 0.37]), 644 | 'grey37': np.array([0.37, 0.37, 0.37]), 645 | 'gray38': np.array([0.38, 0.38, 0.38]), 646 | 'grey38': np.array([0.38, 0.38, 0.38]), 647 | 'gray39': np.array([0.39, 0.39, 0.39]), 648 | 'grey39': np.array([0.39, 0.39, 0.39]), 649 | 'gray40': np.array([0.40, 0.40, 0.40]), 650 | 'grey40': np.array([0.40, 0.40, 0.40]), 651 | 'gray41': np.array([0.41, 0.41, 0.41]), 652 | 'grey41': np.array([0.41, 0.41, 0.41]), 653 | 'gray42': np.array([0.42, 0.42, 0.42]), 654 | 'grey42': np.array([0.42, 0.42, 0.42]), 655 | 'gray43': np.array([0.43, 0.43, 0.43]), 656 | 'grey43': np.array([0.43, 0.43, 0.43]), 657 | 'gray44': np.array([0.44, 0.44, 0.44]), 658 | 'grey44': np.array([0.44, 0.44, 0.44]), 659 | 'gray45': np.array([0.45, 0.45, 0.45]), 660 | 'grey45': np.array([0.45, 0.45, 0.45]), 661 | 'gray46': np.array([0.46, 0.46, 0.46]), 662 | 'grey46': np.array([0.46, 0.46, 0.46]), 663 | 'gray47': np.array([0.47, 0.47, 0.47]), 664 | 'grey47': np.array([0.47, 0.47, 0.47]), 665 | 'gray48': np.array([0.48, 0.48, 0.48]), 666 | 'grey48': np.array([0.48, 0.48, 0.48]), 667 | 'gray49': np.array([0.49, 0.49, 0.49]), 668 | 'grey49': np.array([0.49, 0.49, 0.49]), 669 | 'gray50': np.array([0.50, 0.50, 0.50]), 670 | 'grey50': np.array([0.50, 0.50, 0.50]), 671 | 'gray51': np.array([0.51, 0.51, 0.51]), 672 | 'grey51': np.array([0.51, 0.51, 0.51]), 673 | 'gray52': np.array([0.52, 0.52, 0.52]), 674 | 'grey52': np.array([0.52, 0.52, 0.52]), 675 | 'gray53': np.array([0.53, 0.53, 0.53]), 676 | 'grey53': np.array([0.53, 0.53, 0.53]), 677 | 'gray54': np.array([0.54, 0.54, 0.54]), 678 | 'grey54': np.array([0.54, 0.54, 0.54]), 679 | 'gray55': np.array([0.55, 0.55, 0.55]), 680 | 'grey55': np.array([0.55, 0.55, 0.55]), 681 | 'gray56': np.array([0.56, 0.56, 0.56]), 682 | 'grey56': np.array([0.56, 0.56, 0.56]), 683 | 'gray57': np.array([0.57, 0.57, 0.57]), 684 | 'grey57': np.array([0.57, 0.57, 0.57]), 685 | 'gray58': np.array([0.58, 0.58, 0.58]), 686 | 'grey58': np.array([0.58, 0.58, 0.58]), 687 | 'gray59': np.array([0.59, 0.59, 0.59]), 688 | 'grey59': np.array([0.59, 0.59, 0.59]), 689 | 'gray60': np.array([0.60, 0.60, 0.60]), 690 | 'grey60': np.array([0.60, 0.60, 0.60]), 691 | 'gray61': np.array([0.61, 0.61, 0.61]), 692 | 'grey61': np.array([0.61, 0.61, 0.61]), 693 | 'gray62': np.array([0.62, 0.62, 0.62]), 694 | 'grey62': np.array([0.62, 0.62, 0.62]), 695 | 'gray63': np.array([0.63, 0.63, 0.63]), 696 | 'grey63': np.array([0.63, 0.63, 0.63]), 697 | 'gray64': np.array([0.64, 0.64, 0.64]), 698 | 'grey64': np.array([0.64, 0.64, 0.64]), 699 | 'gray65': np.array([0.65, 0.65, 0.65]), 700 | 'grey65': np.array([0.65, 0.65, 0.65]), 701 | 'gray66': np.array([0.66, 0.66, 0.66]), 702 | 'grey66': np.array([0.66, 0.66, 0.66]), 703 | 'gray67': np.array([0.67, 0.67, 0.67]), 704 | 'grey67': np.array([0.67, 0.67, 0.67]), 705 | 'gray68': np.array([0.68, 0.68, 0.68]), 706 | 'grey68': np.array([0.68, 0.68, 0.68]), 707 | 'gray69': np.array([0.69, 0.69, 0.69]), 708 | 'grey69': np.array([0.69, 0.69, 0.69]), 709 | 'gray70': np.array([0.70, 0.70, 0.70]), 710 | 'grey70': np.array([0.70, 0.70, 0.70]), 711 | 'gray71': np.array([0.71, 0.71, 0.71]), 712 | 'grey71': np.array([0.71, 0.71, 0.71]), 713 | 'gray72': np.array([0.72, 0.72, 0.72]), 714 | 'grey72': np.array([0.72, 0.72, 0.72]), 715 | 'gray73': np.array([0.73, 0.73, 0.73]), 716 | 'grey73': np.array([0.73, 0.73, 0.73]), 717 | 'gray74': np.array([0.74, 0.74, 0.74]), 718 | 'grey74': np.array([0.74, 0.74, 0.74]), 719 | 'gray75': np.array([0.75, 0.75, 0.75]), 720 | 'grey75': np.array([0.75, 0.75, 0.75]), 721 | 'gray76': np.array([0.76, 0.76, 0.76]), 722 | 'grey76': np.array([0.76, 0.76, 0.76]), 723 | 'gray77': np.array([0.77, 0.77, 0.77]), 724 | 'grey77': np.array([0.77, 0.77, 0.77]), 725 | 'gray78': np.array([0.78, 0.78, 0.78]), 726 | 'grey78': np.array([0.78, 0.78, 0.78]), 727 | 'gray79': np.array([0.79, 0.79, 0.79]), 728 | 'grey79': np.array([0.79, 0.79, 0.79]), 729 | 'gray80': np.array([0.80, 0.80, 0.80]), 730 | 'grey80': np.array([0.80, 0.80, 0.80]), 731 | 'gray81': np.array([0.81, 0.81, 0.81]), 732 | 'grey81': np.array([0.81, 0.81, 0.81]), 733 | 'gray82': np.array([0.82, 0.82, 0.82]), 734 | 'grey82': np.array([0.82, 0.82, 0.82]), 735 | 'gray83': np.array([0.83, 0.83, 0.83]), 736 | 'grey83': np.array([0.83, 0.83, 0.83]), 737 | 'gray84': np.array([0.84, 0.84, 0.84]), 738 | 'grey84': np.array([0.84, 0.84, 0.84]), 739 | 'gray85': np.array([0.85, 0.85, 0.85]), 740 | 'grey85': np.array([0.85, 0.85, 0.85]), 741 | 'gray86': np.array([0.86, 0.86, 0.86]), 742 | 'grey86': np.array([0.86, 0.86, 0.86]), 743 | 'gray87': np.array([0.87, 0.87, 0.87]), 744 | 'grey87': np.array([0.87, 0.87, 0.87]), 745 | 'gray88': np.array([0.88, 0.88, 0.88]), 746 | 'grey88': np.array([0.88, 0.88, 0.88]), 747 | 'gray89': np.array([0.89, 0.89, 0.89]), 748 | 'grey89': np.array([0.89, 0.89, 0.89]), 749 | 'gray90': np.array([0.90, 0.90, 0.90]), 750 | 'grey90': np.array([0.90, 0.90, 0.90]), 751 | 'gray91': np.array([0.91, 0.91, 0.91]), 752 | 'grey91': np.array([0.91, 0.91, 0.91]), 753 | 'gray92': np.array([0.92, 0.92, 0.92]), 754 | 'grey92': np.array([0.92, 0.92, 0.92]), 755 | 'gray93': np.array([0.93, 0.93, 0.93]), 756 | 'grey93': np.array([0.93, 0.93, 0.93]), 757 | 'gray94': np.array([0.94, 0.94, 0.94]), 758 | 'grey94': np.array([0.94, 0.94, 0.94]), 759 | 'gray95': np.array([0.95, 0.95, 0.95]), 760 | 'grey95': np.array([0.95, 0.95, 0.95]), 761 | 'gray96': np.array([0.96, 0.96, 0.96]), 762 | 'grey96': np.array([0.96, 0.96, 0.96]), 763 | 'gray97': np.array([0.97, 0.97, 0.97]), 764 | 'grey97': np.array([0.97, 0.97, 0.97]), 765 | 'gray98': np.array([0.98, 0.98, 0.98]), 766 | 'grey98': np.array([0.98, 0.98, 0.98]), 767 | 'gray99': np.array([0.99, 0.99, 0.99]), 768 | 'grey99': np.array([0.99, 0.99, 0.99]), 769 | 'gray100': np.array([1.00, 1.00, 1.00]), 770 | 'grey100': np.array([1.00, 1.00, 1.00]), 771 | 'dark grey': np.array([0.66, 0.66, 0.66]), 772 | 'DarkGrey': np.array([0.66, 0.66, 0.66]), 773 | 'dark gray': np.array([0.66, 0.66, 0.66]), 774 | 'DarkGray': np.array([0.66, 0.66, 0.66]), 775 | 'dark blue': np.array([0.00, 0.00, 0.55]), 776 | 'DarkBlue': np.array([0.00, 0.00, 0.55]), 777 | 'dark cyan': np.array([0.00, 0.55, 0.55]), 778 | 'DarkCyan': np.array([0.00, 0.55, 0.55]), 779 | 'dark magenta': np.array([0.55, 0.00, 0.55]), 780 | 'DarkMagenta': np.array([0.55, 0.00, 0.55]), 781 | 'dark red': np.array([0.55, 0.00, 0.00]), 782 | 'DarkRed': np.array([0.55, 0.00, 0.00]), 783 | 'light green': np.array([0.56, 0.93, 0.56]), 784 | 'LightGreen': np.array([0.56, 0.93, 0.56]) 785 | } 786 | 787 | 788 | if __name__ == '__main__': 789 | main() 790 | --------------------------------------------------------------------------------