├── .gitignore ├── LICENSE ├── README.md ├── convert_to_torch.lua ├── decimate_labels.py ├── decimate_mesh.py ├── labelsIdx.txt ├── labels_to_y.py ├── msh_to_graph.py ├── npy_to_mat.py ├── plot_graph.m ├── seg_to_signal.py ├── subsample_signals.py ├── utils.py └── visualize.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeepLearningOnGraph 2 | Use a CNN to segment and classify 3d meshes. 3 | 4 | The meshes used for this task are the one provided by the Princeton Segmentation Benchmark available here: 5 | http://segeval.cs.princeton.edu/ 6 | 7 | The labels are extracted from: 8 | http://people.cs.umass.edu/~kalo/papers/LabelMeshes/ 9 | 10 | The visualization of the mesh is done with the tools provided with the Princeton Dataset. The training is done with the help of spectral-lib (https://github.com/mbhenaff/spectral-lib). 11 | -------------------------------------------------------------------------------- /convert_to_torch.lua: -------------------------------------------------------------------------------- 1 | --[[ 2 | Convert mat training and testing files and merge them into a unique .th file (for spectral-lib) 3 | --]] 4 | 5 | matio = require 'matio' -- For loading .mat files 6 | 7 | -- Parametters 8 | 9 | directoryIn = '../Data/Test_mesh_01/samples/' 10 | directoryOut = '../Data/Test_mesh_01/' 11 | 12 | numFeature = 1000 13 | 14 | -- Utils functions 15 | 16 | function alphanumSort(o) 17 | local function padnum(d) return ("%03d%s"):format(#d, d) end 18 | table.sort(o, function(a,b) 19 | return tostring(a):gsub("%d+",padnum) < tostring(b):gsub("%d+",padnum) end) 20 | return o 21 | end 22 | 23 | function stringEndsWith(String, End) 24 | return End=='' or string.sub(String,-string.len(End))==End 25 | end 26 | 27 | function loadListFiles(directoryName) 28 | -- Load, filter, sort and return the list of the files in the given directory 29 | 30 | --Open directory look for files, save data in p. By giving '-type f' as parameter, it returns all files 31 | local p = io.popen('find "'..directoryName..'" -type f | sort') 32 | 33 | filelines = {} 34 | i=1 35 | for file in p:lines() do 36 | if stringEndsWith(file, '.mat') then -- Filter rights files 37 | filelines[i] = file:sub(0, file:len() - 4) 38 | i=i+1 39 | end 40 | end 41 | filelines = alphanumSort(filelines) 42 | return filelines 43 | end 44 | 45 | 46 | -- Main Script 47 | 48 | function getSamplesTensor(mode) 49 | if mode == 'Training' then 50 | listFiles = loadListFiles(directoryIn..'tr/') 51 | nameMode = 'tr' 52 | elseif mode == 'Testing' then 53 | listFiles = loadListFiles(directoryIn..'te/') 54 | nameMode = 'te' 55 | end 56 | 57 | numSample = table.getn(listFiles) 58 | 59 | tensorSamples = torch.Tensor(numSample, 1, numFeature) -- [nSamples x nChannels x nFeatures] 60 | tensorLabels = torch.Tensor(numSample) 61 | 62 | print(numSample..' samples for '..mode) 63 | 64 | -- Load the samples 65 | for i, filename in pairs(listFiles) do --Loop through all files 66 | -- Load the file to convert 67 | print('Loading '..i..': '..filename) 68 | x = matio.load(filename..'.mat') 69 | 70 | -- Conversion and adding it to the tensor 71 | tensorSamples[i][1] = x.y 72 | 73 | -- Debug message (to check correctness) 74 | print('Debug for '..i..':') 75 | for j=1,10 do 76 | print(tensorSamples[i][1][j]) 77 | end 78 | end 79 | 80 | -- Load the labels 81 | x = matio.load(directoryIn..nameMode..'labels.mat') 82 | for i = 1, numSample do 83 | tensorLabels[i] = x.labels[1][i] 84 | end 85 | 86 | print(tensorLabels:size()) 87 | 88 | print('Debug labels:') 89 | for i=1,600 do 90 | print(tensorLabels[i]) 91 | end 92 | 93 | -- Return all 94 | return tensorSamples, tensorLabels 95 | end 96 | 97 | print('----------------------------------------------------') 98 | print('------------------TRAINING------------------------') 99 | print('----------------------------------------------------') 100 | tensTrSamp, tensTrLab = getSamplesTensor('Training') 101 | print('----------------------------------------------------') 102 | print('------------------TESTING-------------------------') 103 | print('----------------------------------------------------') 104 | tensTeSamp, tensTeLab = getSamplesTensor('Testing') 105 | 106 | print('Try saving the dataset...') 107 | torch.save(directoryOut..'meshData.th',{trdata=tensTrSamp , trlabels=tensTrLab , tedata=tensTeSamp , telabels=tensTeLab }) 108 | -------------------------------------------------------------------------------- /decimate_labels.py: -------------------------------------------------------------------------------- 1 | """ 2 | Match the labels (body parts) of each original mesh to its equivalent in decimated mesh 3 | Use 1-NN to match each face of the mesh 4 | By inverting dirLabels and dirLabelsDecimated, it should be possible to do the inverse 5 | mapping 6 | 7 | Just indicate the parameters and target directories. 8 | 9 | Use python 3 (but should be working with python 2) 10 | """ 11 | 12 | import os, sys 13 | import numpy as np 14 | import utils 15 | 16 | # Set directories 17 | root = os.getcwd() 18 | dirMesh = root + '/../MeshsegBenchmark-1.0/data/off/' 19 | dirMeshDecimated = root + '/../MeshsegBenchmark-1.0/data/off_decimated/' # Should contain the sames number and filename as above 20 | dirLabels = root + '/../MeshsegBenchmark-1.0/data/seg/Bench/' 21 | dirLabelsDecimated = root + '/../MeshsegBenchmark-1.0/data/seg/Bench_decimated/' 22 | 23 | 24 | def main(): 25 | 26 | # Global check 27 | assert(os.path.exists(dirMesh)) 28 | assert(os.path.exists(dirMeshDecimated)) 29 | assert(os.path.exists(dirLabels)) 30 | assert(os.path.exists(dirLabelsDecimated)) 31 | 32 | # For each mesh 33 | filesList = utils.sortFiles(os.listdir(dirMesh)) 34 | for filename in filesList: 35 | if filename.endswith('.off'): # Candidate 36 | print('Try matching ', filename) 37 | idMesh = filename.split('.')[0] 38 | 39 | # Loading the original and reduced meshes and compute the center of each face 40 | vertices, faces = utils.extractMesh(dirMesh + filename) 41 | verticesDecimated, facesDecimated = utils.extractMesh(dirMeshDecimated + filename) 42 | 43 | pointCloud = utils.meshToPointCloud(vertices, faces) 44 | pointCloudDecimated = utils.meshToPointCloud(verticesDecimated, facesDecimated) 45 | 46 | # Extract the label list 47 | labelList = utils.loadLabelList(dirLabels + idMesh + '.seg') 48 | labelListDecimated = [] 49 | 50 | # Use K-NN on each face of the decimated mesh 51 | for pointDecimated in pointCloudDecimated: 52 | # Search the closest point index in the original point cloud 53 | minIndex = 0 54 | minDist = -1 # Initialize 55 | for i, point in enumerate(pointCloud): 56 | distance = np.linalg.norm(pointDecimated - point) 57 | if distance < minDist or minDist == -1: 58 | minDist = distance 59 | minIndex = i 60 | 61 | labelListDecimated.append(labelList[minIndex]) 62 | if len(labelListDecimated) % 100 == 0: 63 | print(len(labelListDecimated) / len(pointCloudDecimated) * 100, '%') 64 | 65 | # Save the values of the labels of the closest match indexes 66 | saveName = dirLabelsDecimated + idMesh + '.seg' 67 | print('Saving ', saveName) 68 | utils.saveLabelList(labelListDecimated, saveName) 69 | 70 | 71 | if __name__ == "__main__": 72 | main() 73 | -------------------------------------------------------------------------------- /decimate_mesh.py: -------------------------------------------------------------------------------- 1 | """ 2 | Reduce the mesh number of vertices and faces (compress the mesh) 3 | Load and save in .off format 4 | Use a modify version of Khaled Mamou program for the mesh reduction (BSD Licence) 5 | https://github.com/kmammou/MeshDecimation 6 | 7 | The modifications are just to allow the program to save in .off instead 8 | of .obj (Warning: the starting index for the face are not the same) 9 | 10 | 11 | Just indicate the parameters and target directories. 12 | 13 | Use python 3 (but should be working with python 2) 14 | """ 15 | 16 | import os, sys 17 | 18 | # Set directories 19 | root = os.getcwd() 20 | pathReductionTool = root + '/../Tools/MeshDecimation/src-build/MeshSimplification' 21 | dirDataIn = root + '/../MeshsegBenchmark-1.0/data/off/' 22 | dirDataOut = root + '/../MeshsegBenchmark-1.0/data/off_decimated/' 23 | 24 | # Program parameters 25 | maxDecimationError = 1.0 # Float 26 | targetVertices = 1000 # Integer 27 | targetFaces = 500 28 | 29 | 30 | def main(): 31 | 32 | # Global check 33 | if not os.path.exists(dirDataIn): 34 | raise IOError('Cannot find dirDataIn: ', dirDataIn) 35 | if not os.path.exists(dirDataOut): 36 | raise IOError('Cannot find dirDataOut: ', dirDataOut) 37 | 38 | # For each file 39 | filesList = os.listdir(dirDataIn) 40 | for filename in filesList: 41 | if filename.endswith('.off'): # Candidate 42 | print('Try reducing ', filename) 43 | fileIn = dirDataIn + filename 44 | fileOut = dirDataOut + filename 45 | cmd = "%s %s %d %d %f %s" % (pathReductionTool, fileIn, targetVertices, targetFaces, maxDecimationError, fileOut) 46 | print(cmd) 47 | os.system(cmd) 48 | 49 | 50 | if __name__ == "__main__": 51 | main() 52 | -------------------------------------------------------------------------------- /labelsIdx.txt: -------------------------------------------------------------------------------- 1 | ---Human--- 1 20 2 | lowerleg 3 | torso 4 | upperleg 5 | foot 6 | head 7 | lowerarm 8 | upperarm 9 | hand 10 | ---Cup--- 21 40 11 | body 12 | handle 13 | ---Glasses--- 41 60 14 | lens 15 | skeleton 16 | middle 17 | ---Airplane--- 61 80 18 | body 19 | wing 20 | stabilizer 21 | rudder 22 | engine 23 | ---Ant--- 81 100 24 | torso 25 | head 26 | antenna 27 | back 28 | leg 29 | ---Chair--- 101 120 30 | support 31 | leg 32 | arm 33 | backsupport 34 | ---Octopus--- 121 140 35 | body 36 | leg 37 | ---Table--- 141 160 38 | support 39 | leg 40 | ---Teddy--- 161 180 41 | torso 42 | hand 43 | leg 44 | head 45 | ear 46 | ---Hand--- 181 200 47 | hand 48 | thumb 49 | finger1 50 | finger2 51 | finger3 52 | finger4 53 | ---Plier--- 201 220 54 | center 55 | handle 56 | edge 57 | ---Fish--- 221 240 58 | body 59 | fin 60 | tail 61 | ---Bird--- 241 260 62 | body 63 | head 64 | wing 65 | tail 66 | leg 67 | ---Armadillo--- 281 300 68 | torso 69 | foot 70 | lowerLeg 71 | upperLeg 72 | tail 73 | hand 74 | lowerArm 75 | upperArm 76 | ear 77 | head 78 | back 79 | ---Bust--- 301 320 80 | hair 81 | background 82 | nose 83 | neck 84 | ear 85 | eye 86 | face 87 | mouth 88 | ---Mesh--- 321 340 89 | bigbox 90 | smallbox 91 | smallcylinder 92 | mediumcylinder 93 | bigcylinder 94 | ---Bearing--- 341 360 95 | bigroller 96 | smallroller 97 | tinyroller 98 | mediumroller 99 | base1 100 | base2 101 | ---Vase--- 361 380 102 | body 103 | spout 104 | handle 105 | top 106 | base 107 | ---FouLeg--- 381 400 108 | torso 109 | head 110 | earhorn 111 | leg 112 | neck 113 | tail 114 | -------------------------------------------------------------------------------- /labels_to_y.py: -------------------------------------------------------------------------------- 1 | """ 2 | DEPRECATED: Use seg_to_signal instead !! 3 | 4 | Generate training signals and the segmentation files (used for 5 | visualisation) from the labels 6 | 7 | Just indicate the input and output directory. 8 | 9 | Use python 3 (but should be working with python 2) 10 | """ 11 | 12 | import os, sys 13 | import numpy as np 14 | 15 | # Set directories 16 | root = os.getcwd() 17 | dirLabelData = root + '/../MeshsegBenchmark-1.0/data/labels/' 18 | dirOutSegmentation = root + '/../MeshsegBenchmark-1.0/data/seg/Benchmark2/' 19 | dirOutSignal = root + '/../Data/Signals/' 20 | 21 | 22 | class Category(): 23 | """ 24 | Contain informations of a mesh category (plane, human, ant,...) 25 | """ 26 | def __init__(self): 27 | self.name = "" # Name of the categories 28 | self.min = 0 29 | self.max = 0 # Idx of the mesh 30 | self.labels = [] # List of the labels 31 | 32 | def main(): 33 | 34 | # Global check 35 | if not os.path.exists(dirLabelData): 36 | raise IOError('Cannot find dirLabelData: ', dirLabelData) 37 | if not os.path.exists(dirOutSegmentation): 38 | raise IOError('Cannot find dirOutSegmentation: ', dirOutSegmentation) 39 | if not os.path.exists(dirOutSignal): 40 | raise IOError('Cannot find dirOutSignal: ', dirOutSignal) 41 | 42 | # First, extract the labels idx for each category 43 | labelsFile = open(root + '/labelsIdx.txt', 'r') 44 | lines = labelsFile.readlines() 45 | labelsFile.close() 46 | 47 | categories = [] 48 | i = 0 # Current category 49 | for line in lines: 50 | words = line.split() 51 | if len(words) == 3: # New category 52 | print('Category: ', words[0]) 53 | newCategory = Category() 54 | newCategory.name = words[0] 55 | newCategory.min = int(words[1]) 56 | newCategory.max = int(words[2]) 57 | categories.append(newCategory) # Add a new categories 58 | i = i+1 59 | if len(words) == 1: # Label name 60 | print('Label: ', words[0]) 61 | categories[i-1].labels.append(words[0]) 62 | else: 63 | pass 64 | print(len(categories), ' categories detected.') 65 | 66 | # For each labels 67 | labelsFilesList = os.listdir(dirLabelData) 68 | for labelsFilename in labelsFilesList: 69 | idMesh = int(labelsFilename.split('_')[0]) # Little hack to get the id 70 | idCategory = -1 71 | for i, category in enumerate(categories): 72 | if idMesh <= category.max and idMesh >= category.min: 73 | idCategory = i 74 | assert idCategory != -1, 'Error: no corresponding category for the mesh %d' % idMesh 75 | 76 | # Extract 77 | labelList = readLabels(dirLabelData + labelsFilename, categories[idCategory]) 78 | 79 | # Save result 80 | saveLabels(labelList, idMesh) 81 | 82 | 83 | def readLabels(labelsFilename, category): 84 | """ 85 | Extract the labels informations from the file 86 | 87 | Args: 88 | labelsFilename: path of the label file 89 | Returns: 90 | Array of the index of each vertex 91 | """ 92 | 93 | # Open the file 94 | print('Open file ', labelsFilename) 95 | print('Category: ', category.name) 96 | labelsFile = open(labelsFilename, 'r') 97 | lines = labelsFile.readlines() 98 | labelsFile.close() 99 | 100 | # Extract rows 101 | labelIdx = -1 102 | #labelsArray = [[] for i in range(len(category.labels))] 103 | labelsArray = len(category.labels) * [[]] 104 | for line in lines: 105 | words = line.split() 106 | if len(words) == 1: # Label name 107 | labelIdx = category.labels.index(words[0]) 108 | else: 109 | row = [] 110 | for word in words: 111 | row.append(int(word)) 112 | labelsArray[labelIdx] = row 113 | 114 | ## Sort the rows (same for everyone) 115 | ## TODO: Manually associate labels with name for each of the category (get category online, 116 | ## reacord the category on a text file and compare it to have the right id) !!!!!!! 117 | #points = zip(labelsNameArray, labelsArray) 118 | #sorted_points = sorted(points) 119 | #labelsNameArray = [point[0] for point in sorted_points] 120 | #labelsArray = [point[1] for point in sorted_points] 121 | 122 | #print('Extracted labels: ', labelsNameArray) 123 | 124 | # Merge the rows 125 | totalLen = 0 126 | for subList in labelsArray: 127 | for i in subList: 128 | if i > totalLen: 129 | totalLen = i 130 | print ('Nb total of element', totalLen) 131 | labelList = np.zeros(totalLen, dtype=np.uint8) # Prealocate 132 | # WARNING: Due to the dtype=np.uint8, there cannot be more than 255 label categories 133 | 134 | for i in range(len(labelsArray)): 135 | for j in range(len(labelsArray[i])): 136 | labelList[labelsArray[i][j] - 1] = i 137 | 138 | return labelList 139 | 140 | def saveLabels(labelList, idMesh): 141 | """ 142 | Generate the input files 143 | Multiples files can be generated (signals and seg files) 144 | 145 | Args: 146 | labelList: list of the labels 147 | idMesh: name of the mesh filename to save 148 | Returns: 149 | Array of the index of each vertex 150 | """ 151 | # Options 152 | writeSegmentation = True 153 | writeSignal = True 154 | subsample = True 155 | 156 | if writeSegmentation: 157 | # Write the segmentations files (for visualisation) 158 | segFile = open(dirOutSegmentation + '%d.seg' % idMesh , "w") 159 | for label in labelList: 160 | segFile.write('%d\n' % label) 161 | segFile.close() 162 | 163 | if writeSignal: 164 | # Generate some signal files 165 | numberCategory = max(labelList) + 1 # Number of labels 166 | for i in range(numberCategory): 167 | print('Generate signal for label ', i) 168 | 169 | # Generate the signal (1 for current label, 0 otherwise) 170 | currentSignal = np.zeros(labelList.shape) # Copy the signal we will modify (no need for deepcopy here) 171 | currentSignal[labelList == i] = 1 172 | currentSignal[labelList != i] = 0 173 | 174 | # Check if at least 1 of the current label is present (ex: mesh with no arm), skip if otherwise 175 | skip = False 176 | if max(currentSignal) != 1: 177 | print('Warning: skip idx ', i, ' for the mesh ', idMesh, ' (Not existant)') 178 | skip = True 179 | 180 | # Record the signal 181 | if skip == False: 182 | # TODO: Subsample the signal to generate more training sample 183 | signalFile = open(dirOutSignal + '%d_%d.txt' % (idMesh, i) , "w") 184 | for signal in currentSignal: 185 | signalFile.write('%d\n' % signal) 186 | signalFile.close() 187 | 188 | if __name__ == "__main__": 189 | main() 190 | -------------------------------------------------------------------------------- /msh_to_graph.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate dataset from mesh meshFilesList 3 | 4 | Just indicate the input and output folder by modifying dirMeshData and dirOut. 5 | The mesh have to be from ".off" format 6 | The laplacian generated is normalized 7 | 8 | Use python 3 (but should be working with python 2) 9 | """ 10 | 11 | import os, sys 12 | import pygsp 13 | import numpy as np 14 | import utils 15 | 16 | # Set directories 17 | root = os.getcwd() 18 | dirMeshData = root + '/../MeshsegBenchmark-1.0/data/off_decimated/' 19 | dirOut = root + '/../Data/Meshes_decimated/' 20 | dirOutPlot = root + '/../Data/Plot_decimated/' 21 | 22 | startFromMesh = '1.off' # Avoid recomputing from scratch (1.off to restart from begining) 23 | 24 | def main(): 25 | 26 | # Global check 27 | assert(os.path.exists(dirMeshData)) 28 | assert(os.path.exists(dirOut)) 29 | assert(os.path.exists(dirOutPlot)) 30 | 31 | # For each mesh 32 | meshFilesList = utils.sortFiles(os.listdir(dirMeshData), startFromMesh) 33 | for meshFilename in meshFilesList: 34 | # Generate the graph data 35 | idMesh = meshFilename.split('.')[0] 36 | print('Extract mesh ', idMesh) 37 | meshFilename = dirMeshData + '%s.off' % idMesh 38 | meshVertices, meshFaces = utils.extractMesh(meshFilename) 39 | print('Compute point cloud...') 40 | pointCloud = utils.meshToPointCloud(meshVertices, meshFaces) 41 | print('Create graph...') 42 | #graph = pygsp.graphs.NNGraph(pointCloud, NNtype='radius', center=True, rescale=True, epsilon=0.2) 43 | graph = pygsp.graphs.NNGraph(pointCloud, center=True, rescale=True)# > Use KNN, much faster 44 | print('Generate laplacian...') 45 | graph.create_laplacian('normalized') 46 | 47 | # Save result 48 | print('Saving...') 49 | baseSaveName = dirOut + idMesh 50 | np.save(baseSaveName + '_cloud.npy', pointCloud, fix_imports=True) 51 | np.save(baseSaveName + '_graph.npy', graph.W, fix_imports=True) 52 | np.save(baseSaveName + '_laplacian.npy', graph.L, fix_imports=True) # fix_imports set to allow retrocompatibility with python 2 53 | 54 | # Save plot 55 | print('Plotting ', graph.N, ' nodes, ', graph.Ne, ' edges') 56 | plotSavedName = dirOutPlot + idMesh + '_' 57 | graph.plot (default_qtg=False, savefig=True, plot_name=plotSavedName + 'graph') 58 | #input("Press Enter to continue...") 59 | 60 | if __name__ == "__main__": 61 | main() 62 | -------------------------------------------------------------------------------- /npy_to_mat.py: -------------------------------------------------------------------------------- 1 | """ 2 | Convert the .npy into .mat (for plotting into matlab) 3 | 4 | Just indicate the input and output directory. 5 | 6 | Use python 3 (but should be working with python 2) 7 | """ 8 | 9 | import os, sys 10 | import numpy as np 11 | import scipy.io as sio 12 | import utils 13 | 14 | # Set directories 15 | root = os.getcwd() 16 | #dirDataIn = root + '/../Data/Meshs/' 17 | dirDataIn = root + '/../Data/Test_mesh_01/samples/' 18 | dirDataOut = root + '/../Data/Test_mesh_01/samples_mat/' 19 | savedVariableName = 'y' 20 | 21 | 22 | def main(): 23 | 24 | # Global check 25 | assert(os.path.exists(dirDataIn)) 26 | assert(os.path.exists(dirDataOut)) 27 | 28 | # For each file 29 | filesList = os.listdir(dirDataIn) 30 | for filename in filesList: 31 | print('Try converting ', filename) 32 | name = filename.split('.')[0] # Little hack to get the filename 33 | if filename.endswith('.npy'): # Candidate 34 | matrix = np.load(dirDataIn + filename, fix_imports = True) 35 | elif filename.endswith('.txt'): # Candidate 36 | matrix = utils.loadLabelList(dirDataIn + filename) 37 | else: 38 | print('Wrong format, skiped') 39 | continue 40 | sio.savemat(dirDataOut + name + '.mat', {savedVariableName:matrix}) 41 | 42 | if __name__ == "__main__": 43 | main() 44 | -------------------------------------------------------------------------------- /plot_graph.m: -------------------------------------------------------------------------------- 1 | clearvars; 2 | 3 | % Initialisation 4 | idMesh = '213'; 5 | folderName = '../Data/Meshs/'; 6 | 7 | addpath(genpath('../Tools/gspbox/')); 8 | gsp_start 9 | 10 | % Loading the coordinates and crating the graph 11 | coords = load([folderName, idMesh, '_cloud.mat']); 12 | weights = load([folderName, idMesh, '_graph.mat']); 13 | 14 | G = gsp_graph(weights.M{1,1}); 15 | G.coords = coords.M; 16 | 17 | disp(['Plot ', num2str(size(coords.M, 1)), ' vertices']) 18 | 19 | % Plotting the graph 20 | param = struct; 21 | %param.show_edges = 1; % Warning: Really slow !!! 22 | gsp_plot_graph(G, param); -------------------------------------------------------------------------------- /seg_to_signal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate training signals and the segmentation files (used for 3 | visualisation) from the labels 4 | 5 | Just indicate the input and output directory. 6 | 7 | Use python 3 (but should be working with python 2) 8 | """ 9 | 10 | import os, sys 11 | import numpy as np 12 | import utils 13 | 14 | # Set directories 15 | root = os.getcwd() 16 | dirInSegmentation = root + '/../MeshsegBenchmark-1.0/data/seg/Bench_decimated/' 17 | dirOutSignal = root + '/../Data/Signals_decimated/' 18 | 19 | 20 | def main(): 21 | 22 | # Global check 23 | assert(os.path.exists(dirInSegmentation)) 24 | assert(os.path.exists(dirOutSignal)) 25 | 26 | # For each mesh 27 | labelsFilesList = utils.sortFiles(os.listdir(dirInSegmentation)) 28 | for labelsFilename in labelsFilesList: 29 | print('Generate signal for ', labelsFilename) 30 | idMesh = labelsFilename.split('.')[0] # Little hack to get the id 31 | 32 | # Eventually create the subfolder where to put the signal 33 | dirOutCurrentSignal = dirOutSignal + idMesh + '/' 34 | if not os.path.exists(dirOutCurrentSignal): 35 | os.makedirs(dirOutCurrentSignal) 36 | 37 | # Extract 38 | labelList = utils.loadLabelList(dirInSegmentation + labelsFilename) 39 | 40 | # Generate some signal files 41 | generateSignal(labelList, idMesh) 42 | 43 | 44 | def generateSignal(labelList, idMesh): 45 | """ 46 | Generate the input files 47 | Multiples files can be generated (signals and seg files) 48 | 49 | Warning: The subfolder name has to be consistent with the one describe in 50 | the main function 51 | 52 | Args: 53 | labelList: list of the labels 54 | idMesh: name of the mesh filename to save 55 | Returns: 56 | Array of the index of each vertex 57 | """ 58 | 59 | # Generate some signal files 60 | numberCategory = max(labelList) + 1 # Number of labels 61 | for i in range(numberCategory): 62 | # Check if at least 1 of the current label is present (ex: mesh with no arm), skip if otherwise 63 | if i in labelList: 64 | print('Generate signal for label ', i) 65 | 66 | # Generate the signal (1 for current label, 0 otherwise) 67 | currentSignal = np.zeros((len(labelList), 1)) # Copy the signal we will modify (no need for deepcopy here) 68 | currentSignal[labelList == i] = 1 69 | currentSignal[labelList != i] = 0 70 | 71 | # Record the signal (On the subfolder) 72 | # We do not subsample the signal here (to generate more training sample) but we do it when loading the training sample 73 | utils.saveLabelList(currentSignal, dirOutSignal + '%s/%s_%d.txt' % (idMesh, idMesh, i)) 74 | else: 75 | print ('Warning: skip idx ', i, ' for the mesh ', idMesh, ' (Not existant)') 76 | 77 | 78 | if __name__ == "__main__": 79 | main() 80 | -------------------------------------------------------------------------------- /subsample_signals.py: -------------------------------------------------------------------------------- 1 | """ 2 | Subsample the signals files to demultipliate the number of 3 | training samples. 4 | This is the script which generate the dataset training and 5 | testing samples (before convertion) 6 | 7 | Warning: Be careful to the natural orders of the files (the labels 8 | must be on the same order than the file lists). Same for the conversion 9 | to lua (orders must correspond too) 10 | Warning: In the current version, all categories must be present (there 11 | cannot be a model without an arm for instance). No skipped allowed 12 | 13 | Just indicate the input and output directory. 14 | 15 | Use python 3 (but should be working with python 2) 16 | """ 17 | 18 | import os, sys 19 | import random 20 | import numpy as np 21 | import scipy.io as sio 22 | import utils 23 | 24 | # Set directories 25 | root = os.getcwd() 26 | dirInSignals = root + '/../Data/Test_mesh_01/signals/' 27 | dirOutSamples = root + '/../Data/Test_mesh_01/samples/' 28 | dirOutSamplesTr = dirOutSamples + 'tr/' 29 | dirOutSamplesTe = dirOutSamples + 'te/' 30 | 31 | ratioSubsampling = 80 # 0 > no subsampling, 100 > full subsampling 32 | 33 | 34 | def main(): 35 | 36 | # Global check 37 | assert(os.path.exists(dirInSignals)) 38 | assert(os.path.exists(dirOutSamples)) 39 | assert(os.path.exists(dirOutSamplesTr)) 40 | assert(os.path.exists(dirOutSamplesTe)) 41 | 42 | # Training and testing labels 43 | labelsListTr = [] 44 | labelsListTe = [] 45 | 46 | # For each mesh 47 | signalsFilesList = utils.sortFiles(os.listdir(dirInSignals)) # Warning: Natural order important 48 | categoryId = 0 # TODO: Should be extracted from the filename 49 | for signalFilename in signalsFilesList: # One file correspond to one category (class) 50 | # Load signal 51 | print('Subsample ', signalFilename) 52 | idSignal = signalFilename.split('.')[0] # Little hack to get the id 53 | completeSignal = utils.loadLabelList(dirInSignals + signalFilename) 54 | 55 | # Training subsampling 56 | # Randomly remove signals from categories 57 | # For each signal, we generate multples samples 58 | print('Training...') 59 | for i in range(500): # TODO: Tune this variable (dynamically depend of the signal ?) 60 | decimatedSignal = np.copy(completeSignal) 61 | for j in range(len(completeSignal)): # Iterate over 62 | if completeSignal[j] == 1: # Candidate for subsampling 63 | if random.randrange(100) < ratioSubsampling: # x% chance of removal 64 | decimatedSignal[j] = 0 # Subsample 65 | utils.saveLabelList(decimatedSignal, dirOutSamplesTr + idSignal + '_' + str(i) + '.txt') # Save (Warning: the natural order must correspond to the label list order) 66 | sio.savemat(dirOutSamplesTr + idSignal + '_' + str(i) + '.mat', {'y':decimatedSignal}) 67 | labelsListTr.append(categoryId) # Add the category of the label (Warning: No categories skippables !!!) 68 | 69 | # Testing subsampling 70 | # Only one vertex active, the rest of zeroes 71 | print('Testing...') 72 | for j in range(len(completeSignal)): # Iterate over the signal 73 | if completeSignal[j] == 1: # Candidate for subsampling 74 | decimatedSignal = np.zeros(completeSignal.shape) # Only zeroes 75 | decimatedSignal[j] = 1 # Except for the current signal 76 | utils.saveLabelList(decimatedSignal, dirOutSamplesTe + idSignal + '_' + str(j) + '.txt') 77 | sio.savemat(dirOutSamplesTe + idSignal + '_' + str(j) + '.mat', {'y':decimatedSignal}) 78 | labelsListTe.append(categoryId) # Add the category of the label (Warning: No categories skippables !!!) 79 | 80 | categoryId += 1 # Next categories 81 | 82 | # Save labels 83 | utils.saveLabelList(labelsListTr, dirOutSamples + 'trlabels.txt') 84 | utils.saveLabelList(labelsListTe, dirOutSamples + 'telabels.txt') 85 | sio.savemat(dirOutSamples + 'trlabels.mat', {'labels':np.array(labelsListTr)}) 86 | sio.savemat(dirOutSamples + 'telabels.mat', {'labels':np.array(labelsListTe)}) 87 | 88 | 89 | if __name__ == "__main__": 90 | main() 91 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Some utilities functions used in multiple scripts 3 | 4 | Use python 3 (but should be working with python 2) 5 | """ 6 | 7 | import os, sys 8 | import re 9 | import numpy as np 10 | 11 | def extractMesh(meshFilename): 12 | """ 13 | Extract the mesh informations from the file 14 | 15 | Args: 16 | meshFilename: path of the mesh file (.off format only !!) 17 | Returns: 18 | vertices: Array of the x,y,z position of each mesh points 19 | faces: Array of the vertex indexes of each triangle 20 | """ 21 | 22 | # Open the file 23 | print('Open file ', meshFilename) 24 | meshFile = open(meshFilename, 'r') 25 | lines = meshFile.readlines() 26 | meshFile.close() 27 | 28 | # Initialisation and global information 29 | meshCount = lines[1].split() 30 | vertexCount = int(meshCount[0]) 31 | faceCount = int(meshCount[1]) 32 | edgeCount = int(meshCount[2]) 33 | print('Mesh: ', vertexCount, ' vertices, ', faceCount, ' faces, ', edgeCount, ' edges') 34 | 35 | vertices = [] 36 | faces = [] 37 | 38 | # For each line of the file 39 | for line in lines[2:]: # Skip the first two lines (OFF and number of vertices) 40 | words = line.split() 41 | 42 | if len(words) == 3: # Read points 43 | # Save each point coordinates in an array 44 | vertices.append([float(words[0]), float(words[1]), float(words[2])]) 45 | elif len(words) == 4: # Read triangles >> vertex 46 | faces.append([int(words[1]), int(words[2]), int(words[3])]) 47 | 48 | if len(vertices) != vertexCount: 49 | print('Error: Number of vertices does not matches') 50 | if len(faces) != faceCount: 51 | print('Error: Number of faces does not matches') 52 | 53 | return vertices, faces 54 | 55 | def meshToPointCloud(meshVertices, meshFaces): 56 | """ 57 | Compute the point clouds informations from the mesh informations 58 | 59 | Args: 60 | vertices: Array of the x,y,z position of each mesh points 61 | faces: Array of the vertex indexes of each triangle 62 | Returns: 63 | A point cloud (list of coordinates) 64 | """ 65 | 66 | Xin = np.zeros((len(meshFaces), 3)) 67 | 68 | for i, face in enumerate(meshFaces): 69 | # Compute the centroid of the triangle 70 | 71 | A = np.array(meshVertices[face[0]]) 72 | B = np.array(meshVertices[face[1]]) 73 | C = np.array(meshVertices[face[2]]) # Triangle coordinates 74 | 75 | center = (A + B + C)/3 76 | Xin[i,:] = center 77 | 78 | return Xin 79 | 80 | def sortFiles(filesList, startFrom=None): 81 | """ 82 | Generate an ordered list of files to compute 83 | 84 | Args: 85 | filesList: The file list to sort (will be modified) 86 | startFrom: To start from a particular file (avoid recomputing the beginning each times) 87 | Returns: 88 | The sorted list 89 | """ 90 | 91 | filesList = naturalSort(filesList) # Alphabetical order 92 | startIndex = 0 93 | if startFrom != None: 94 | startIndex = filesList.index(startFrom) # Raise an exception if not found 95 | return filesList[startIndex:] 96 | 97 | def naturalSort(l): 98 | """ 99 | Return the list sorted correctly (take into account the numerical values 100 | instead of just the alphabetical order) 101 | 102 | Args: 103 | l: The file list to sort (will be modified) 104 | Returns: 105 | The sorted list 106 | """ 107 | convert = lambda text: int(text) if text.isdigit() else text.lower() 108 | alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ] 109 | return sorted(l, key = alphanum_key) 110 | 111 | def loadLabelList(filename): 112 | """ 113 | Load the labels informations from the segmentation file 114 | 115 | Args: 116 | filename: path of the label file 117 | Returns: 118 | Array of the label of each node 119 | """ 120 | assert filename.endswith('.seg') or filename.endswith('.txt'), 'Wrong file format' 121 | 122 | labelList = [] 123 | 124 | labelsFile = open(filename, 'r') 125 | lines = labelsFile.readlines() 126 | labelsFile.close() 127 | 128 | # Extract rows 129 | for line in lines: 130 | labelList.append(int(line)) 131 | 132 | return np.asarray(labelList) 133 | 134 | def saveLabelList(labelList, filename): 135 | """ 136 | Save the labels informations on a seg file (or signal file) 137 | 138 | Args: 139 | labelsFilename: Array of the label of each node 140 | filename: path of the label file 141 | """ 142 | assert filename.endswith('.seg') or filename.endswith('.txt'), 'Wrong file format' 143 | 144 | segFile = open(filename, "w") 145 | for label in labelList: 146 | segFile.write('%d\n' % label) 147 | segFile.close() 148 | -------------------------------------------------------------------------------- /visualize.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Visualize one mesh without segmentation 4 | #./mshview ../data/off/1.off 5 | 6 | # s: View segmentation 7 | # S: Toogle segment number 8 | 9 | # Visualize one mesh with segmentation 10 | 11 | ./mshview ../../Tools/MeshDecimation/data/1_decimated.off 12 | #./../MeshsegBenchmark-1.0/exe/mshview ../data/off/1.off -seg ../data/seg/Bench/1.seg -segments 13 | #./mshview ../data/off_decimated/343.off -seg ../data/seg/Bench_decimated/343.seg -segments --------------------------------------------------------------------------------