├── data ├── ball_mask.png ├── bear_mask.png ├── cat_mask.png ├── cow_mask.png ├── pot1_mask.png ├── pot2_mask.png ├── ball_normal.mat ├── bear_normal.mat ├── buddha_mask.png ├── bunny_mask.png ├── bunny_normal.npy ├── cat_normal.mat ├── cow_normal.mat ├── goblet_mask.png ├── harvest_mask.png ├── pot1_normal.mat ├── pot2_normal.mat ├── reading_mask.png ├── buddha_normal.mat ├── goblet_normal.mat ├── harvest_normal.mat └── reading_normal.mat ├── README.md ├── .gitignore └── DGP.py /data/ball_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/ball_mask.png -------------------------------------------------------------------------------- /data/bear_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/bear_mask.png -------------------------------------------------------------------------------- /data/cat_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/cat_mask.png -------------------------------------------------------------------------------- /data/cow_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/cow_mask.png -------------------------------------------------------------------------------- /data/pot1_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/pot1_mask.png -------------------------------------------------------------------------------- /data/pot2_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/pot2_mask.png -------------------------------------------------------------------------------- /data/ball_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/ball_normal.mat -------------------------------------------------------------------------------- /data/bear_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/bear_normal.mat -------------------------------------------------------------------------------- /data/buddha_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/buddha_mask.png -------------------------------------------------------------------------------- /data/bunny_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/bunny_mask.png -------------------------------------------------------------------------------- /data/bunny_normal.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/bunny_normal.npy -------------------------------------------------------------------------------- /data/cat_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/cat_normal.mat -------------------------------------------------------------------------------- /data/cow_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/cow_normal.mat -------------------------------------------------------------------------------- /data/goblet_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/goblet_mask.png -------------------------------------------------------------------------------- /data/harvest_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/harvest_mask.png -------------------------------------------------------------------------------- /data/pot1_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/pot1_normal.mat -------------------------------------------------------------------------------- /data/pot2_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/pot2_normal.mat -------------------------------------------------------------------------------- /data/reading_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/reading_mask.png -------------------------------------------------------------------------------- /data/buddha_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/buddha_normal.mat -------------------------------------------------------------------------------- /data/goblet_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/goblet_normal.mat -------------------------------------------------------------------------------- /data/harvest_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/harvest_normal.mat -------------------------------------------------------------------------------- /data/reading_normal.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kwong292521/DGP/HEAD/data/reading_normal.mat -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DGP 2 | An python implementation of the paper [Surface-from-Gradients: An Approach Based on Discrete Geometry Processing](https://www.cv-foundation.org/openaccess/content_cvpr_2014/html/Xie_Surface-from-Gradients_An_Approach_2014_CVPR_paper.html) 3 | 4 | This method aims at reconstruct the surface from the normal map compute by Photometric Stereo. 5 | 6 | In python implementation, cholesky factorization in sparse matrix could not excuted(The implementation the paper mentioned is generated on C++ with the help of TAUCS Library, and I will try to implement the C++ version in the future) 7 | 8 | I wrote a comprehension of the paper [here](https://blog.csdn.net/SZU_Kwong/article/details/123013606) 9 | 10 | ## Run the code 11 | Clone the repository and run the DGP.py 12 | 13 | if you use your own data 14 | ``` 15 | python DGP.py -n [path-to-normal-file] -m [path-to-mask-file] -o [path-to-output-obj-file] -i 1 16 | ``` 17 | the default is the bunny obj 18 | 19 | if you want to test the obj from Diligent dataset, such as cat 20 | ``` 21 | python DPG.py --obj cat_mat_png -i 1 22 | ``` -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /DGP.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.sparse import coo_matrix, csr_matrix, csc_matrix, vstack 3 | from scipy.sparse.linalg import cg, spsolve_triangular, spsolve, splu 4 | from scipy.linalg import cholesky 5 | import scipy.io as sio 6 | import cv2 7 | import time 8 | import argparse 9 | 10 | 11 | def load_normal_mask(normal_path, mask_path): 12 | if normal_path[-4:] == '.npy': 13 | normal = np.load(normal_path) 14 | elif normal_path[-4:] == '.mat': 15 | normal = sio.loadmat(normal_path) 16 | normal = normal['normal'] 17 | else: 18 | normal = cv2.cvtColor(cv2.imread(normal_path), cv2.COLOR_BGR2RGB) 19 | 20 | if mask_path[-4:] == '.npy': 21 | mask = np.load(mask_path) 22 | else: 23 | mask = cv2.imread(mask_path) 24 | 25 | if mask.ndim == 2: 26 | pass 27 | elif mask.shape[-1] == 3: 28 | mask = np.linalg.norm(mask, axis=-1) 29 | else: 30 | mask = np.linalg.norm(mask, axis=0) 31 | mask = (mask != 0) 32 | 33 | return normal, mask 34 | 35 | def write_obj(points, mask, depth, h_value, save_path): 36 | """ 37 | write the 3d object into the obj. file 38 | Params: 39 | points -- index of the vertex shape=(r,c) 40 | mask -- mask of the object,should be boolen img 41 | depth -- depth map of the object 42 | h_value -- the actual width of per pixel, 1 is recommended 43 | save_path -- the saved file path of the obj 44 | The triangulation procedure is from https://github.com/gray0018/Discrete-normal-integration/blob/master/main.py 45 | """ 46 | f = open(save_path, 'w') 47 | 48 | # vertexs 49 | for (r, c) in points: 50 | seq = 'v' + ' ' + str(float(r)*h_value) + ' ' + str(float(c)*h_value) + ' ' + str(depth[r, c]) + '\n' 51 | f.writelines(seq) 52 | 53 | # get vertexs index 54 | vidx = np.zeros_like(mask, dtype=np.uint32) 55 | vidx[mask] = np.arange(np.sum(mask)) + 1 56 | 57 | # cyclic shift to left by 1 pixel, the raw mask is right relative to right_mask 58 | right = np.roll(vidx, -1, axis=1) 59 | right[:, -1] = 0 60 | right_mask = right > 0 61 | 62 | # cyclic shift to up by 1 pixel, the raw mask is down relative to right_mask 63 | down = np.roll(vidx, -1, axis=0) 64 | down[-1, :] = 0 65 | down_mask = down > 0 66 | 67 | # first cyclic shift to the left and then right both by 1 pixel, the raw mask is right-down relative to the rd_mask 68 | rd = np.roll(vidx, -1, axis=1) 69 | rd = np.roll(rd, -1, axis=0) 70 | rd[-1, :] = 0 71 | rd[:, -1] = 0 72 | rd_mask = rd > 0 73 | 74 | up_tri = mask & rd_mask & right_mask 75 | low_tri = mask & down_mask & rd_mask 76 | 77 | # get a mesh constructed by a point along with its right point and right-down point 78 | mesh = np.vstack((vidx[up_tri], rd[up_tri], right[up_tri])).T 79 | for i, j, k in mesh: 80 | seq = 'f' + ' ' + str(i) + ' ' + str(j) + ' ' + str(k) + '\n' 81 | f.writelines(seq) 82 | # get a mesh constructed by a point along with its right point and right-down point 83 | mesh = np.vstack((vidx[low_tri], down[low_tri], rd[low_tri])).T 84 | for i, j, k in mesh: 85 | seq = 'f' + ' ' + str(i) + ' ' + str(j) + ' ' + str(k) + '\n' 86 | f.writelines(seq) 87 | 88 | f.close() 89 | 90 | 91 | def DGP_solver(normal, mask, h_value=1, iters=5, eps=0.0871557, solver='Cholesky'): 92 | """ 93 | reconstruct depth from normal 94 | use method in "Surface-from-Gradients: An Approach Based on Discrete Geometry Processing." 95 | Params: 96 | normal -- normal map with size(H, W, 3) 97 | mask -- ROI mask img of the object, size (H, W), boolen image 98 | h_value -- the parameter h in the paper, default is set to 1 99 | iter -- iteration time, default is set to 0.0871557, i.e. 5°; if eps is not, do not excute the outlier handle procedure 100 | solver -- method to solve the least square eqution, options:Cholesky, LU, direct, cg. But Cholesky is not implemented 101 | Rets: 102 | depth_map -- reconstructed depth map 103 | 104 | coordinate system: facet: 105 | y| 106 | | v{i,j+1}_______v{i+1,j+1} 107 | |_______x | | 108 | / | | 109 | / v{i,j} |_______|v{i+1,j} 110 | z/ 111 | 112 | Notice: the given normal map should be in the same coordinate system 113 | 114 | Thanks the code from "https://github.com/hoshino042/NormalIntegration/blob/main/methods/orthographic_DGP.py" 115 | for some inspiration which can be seen in this code, but the code above do not repuduce the paper correctly, so in this code I 116 | implement a more feasible one 117 | """ 118 | # img size 119 | img_H, img_W = mask.shape 120 | 121 | # get facet index 122 | facet_idx = np.zeros_like(mask, dtype=np.int32) 123 | facet_idx[mask] = np.arange(np.sum(mask)) 124 | 125 | # get the mask of four vertexs of all facets 126 | # top_left <-> v{i, j+1}, top_right <-> v{i+1, j+1}, bottom_left <-> v{i,j}, bottom_right <-> v{i+1,j} 127 | top_left_mask = np.pad(mask, ((0, 1), (0, 1)), "constant", constant_values=0) 128 | top_right_mask = np.pad(mask, ((0, 1), (1, 0)), "constant", constant_values=0) 129 | bottom_left_mask = np.pad(mask, ((1, 0), (0, 1)), "constant", constant_values=0) 130 | bottom_right_mask = np.pad(mask, ((1, 0), (1, 0)), "constant", constant_values=0) 131 | 132 | # get the index of vertexs 133 | vertex_mask = np.logical_or.reduce((top_right_mask, top_left_mask, bottom_right_mask, bottom_left_mask)) 134 | vertex_idx = np.zeros((img_H + 1, img_W + 1), dtype=np.int32) 135 | vertex_idx[vertex_mask] = np.arange(np.sum(vertex_mask)) 136 | 137 | # get the amounts of facets and vertexs 138 | num_facet = np.sum(mask) 139 | num_vertex = np.sum(vertex_mask) 140 | 141 | # get the index of four vertexs for all facets 142 | top_left_vertex = vertex_idx[top_left_mask].flatten() 143 | top_right_vertex = vertex_idx[top_right_mask].flatten() 144 | bottom_left_vertex = vertex_idx[bottom_left_mask].flatten() 145 | bottom_right_vertex = vertex_idx[bottom_right_mask].flatten() 146 | # in facet_id_vertice_id, the row index is correspond to the facet index and the four elements in each row is four vertexs index belong to this facet 147 | facet_id_vertice_id = np.hstack((bottom_left_vertex[:, None], 148 | bottom_right_vertex[:, None], 149 | top_right_vertex[:, None], 150 | top_left_vertex[:, None])) 151 | 152 | # initialize the depth of the center of the facet c{i,j} to be zero 153 | depth = np.zeros(num_facet, dtype=normal.dtype) 154 | 155 | # get x,y,z components of the normal belongs to every facets 156 | nx = normal[mask, 0] 157 | ny = normal[mask, 1] 158 | nz = normal[mask, 2] 159 | 160 | # detect the abnormal normals(outliers), substitute the outliers to the normal of current facet.Before the first iteration, 161 | # all facet normal is (0,0,1) 162 | if eps is not None: 163 | outlier_mask = nz < eps 164 | outlier_idx = np.where(outlier_mask)[0] 165 | nx[outlier_mask] = 0 166 | ny[outlier_mask] = 0 167 | nz[outlier_mask] = 1 168 | 169 | ############### local shaping -> global blending ############# 170 | 171 | ############### Construct A and b of the linear system Ax=b ############### 172 | ### A is only constructed by the matrix N in the paper, which is a sparse matrix 173 | ### x is the depth of each vertexs after global blending 174 | ### b is a col vector construted by the relative vector Np{i,j}, p{i,j} is the projected facet after local shaping 175 | ### m and n is the amounts of facets and vertexs, respectively 176 | ### shape: A(4mxn) x(nx1) b(4mx1) 177 | ############################################### 178 | ### We can construct A beforehand cause it is the same in every iteration, and b need to be updated during iteration 179 | ############################################### 180 | 181 | # In A and b construction, we construct them for 4 step(4 mxn matrix), the i mxn matrix is filled by product of the i row data of N and z(f_ij) 182 | # (the f_ij here is facet after global blending).This mxn matrix has four non-zero value(N's row elements), a row stand for a facet, the col index of the four values 183 | # are correspond to the four vertex index of the facet. After product the i mxn matrix and x, we can get all the i component of the relative vectors for all facets. 184 | # The counterpart in b is the product of the i row data of N and p(f_ij) 185 | row_idx = np.arange(num_facet) 186 | row_idx = np.repeat(row_idx, 4) 187 | col_idx = facet_id_vertice_id.flatten() 188 | 189 | N1 = [0.75, -0.25, -0.25, -0.25] * num_facet 190 | A1 = coo_matrix((N1, (row_idx, col_idx)), shape=(num_facet, num_vertex)) 191 | 192 | N2 = [-0.25, 0.75, -0.25, -0.25] * num_facet 193 | A2 = coo_matrix((N2, (row_idx, col_idx)), shape=(num_facet, num_vertex)) 194 | 195 | N3 = [-0.25, -0.25, 0.75, -0.25] * num_facet 196 | A3 = coo_matrix((N3, (row_idx, col_idx)), shape=(num_facet, num_vertex)) 197 | 198 | N4 = [-0.25, -0.25, -0.25, 0.75] * num_facet 199 | A4 = coo_matrix((N4, (row_idx, col_idx)), shape=(num_facet, num_vertex)) 200 | 201 | A = vstack([A1, A2, A3, A4]) 202 | 203 | if solver == 'Cholesky': 204 | # excute cholesky factorization to symmetric positive defined matrix A.T@A -> A=L@L.T 205 | # P.S scipy could not excute cholesky factorization for sparse matrix so we have to use other method 206 | raise NotImplementedError 207 | L = cholesky(A.T@A, lower=True) 208 | L = csr_matrix(L) 209 | elif solver == 'LU': 210 | # A = csc_matrix(A) 211 | LU = splu(csc_matrix(A.T@A)) 212 | 213 | for _ in range(iters): 214 | ############## Step 1. local shaping ############# 215 | # get projected vertex according to equ.(2) in the paper. Here have two projection method, the latter one is from the paper 216 | # and the former one is a substituable and effective. 217 | projection_bottom_left = depth - (-0.5*nx*h_value - 0.5*ny*h_value) / nz 218 | projection_bottom_right = depth - (0.5*nx*h_value - 0.5*ny*h_value) / nz 219 | projection_top_right = depth - (0.5*nx*h_value + 0.5*ny*h_value) / nz 220 | projection_top_left = depth - (-0.5*nx*h_value + 0.5*ny*h_value) / nz 221 | 222 | # projection_bottom_left = depth 223 | # projection_bottom_right = depth - nx*h_value / nz 224 | # projection_top_right = depth - (nx*h_value + ny*h_value) / nz 225 | # projection_top_left = depth - ny*h_value / nz 226 | 227 | ############# Step 2. global blending ############ 228 | projection = np.array([ 229 | projection_bottom_left, projection_bottom_right, projection_top_right, projection_top_left 230 | ]) 231 | 232 | b1 = (np.array([[0.75, -0.25, -0.25, -0.25]]) @ projection).T 233 | b2 = (np.array([[-0.25, 0.75, -0.25, -0.25]]) @ projection).T 234 | b3 = (np.array([[-0.25, -0.25, 0.75, -0.25]]) @ projection).T 235 | b4 = (np.array([[-0.25, -0.25, -0.25, 0.75]]) @ projection).T 236 | 237 | b = np.concatenate((b1, b2, b3, b4)) 238 | 239 | if solver == 'Cholesky': 240 | # Ax=b -> L@L.T@x=b -> L@y=b -> L.T@x=y 241 | raise NotImplementedError 242 | y = spsolve_triangular(L, b, lower=True) 243 | x = spsolve_triangular(L.T, y, lower=False) 244 | elif solver == 'cg': 245 | x, _ = cg(A.T @ A, A.T @ b, maxiter=1000, tol=1e-9) 246 | elif solver == 'direct': 247 | x = spsolve(A.T @ A, A.T @ b) 248 | elif solver == 'LU': 249 | x = LU.solve(A.T @ b) 250 | x = np.squeeze(x) 251 | 252 | # get the depth of c{i,j} by compute the mean value of four vertex of the facet 253 | depth_vertex_per_facet = x[facet_id_vertice_id] 254 | depth = np.mean(depth_vertex_per_facet, axis=-1) 255 | 256 | # Update the abnormal normal of the outlier facet 257 | # method: four vertex for a facet and three can determine a plane(i.e determine by two vector) 258 | # we use tow group and three points per group to get two normal and compute their mean value to get 259 | # the new normal of the outlier facet 260 | if eps is not None: 261 | outlier_facet_vertex_depth = depth_vertex_per_facet[outlier_mask, :] 262 | for i in range(outlier_facet_vertex_depth.shape[0]): 263 | v1 = np.array([h_value, 0, outlier_facet_vertex_depth[i, 0] - outlier_facet_vertex_depth[i, 1]]) 264 | v2 = np.array([0, h_value, outlier_facet_vertex_depth[i, 2] - outlier_facet_vertex_depth[i, 1]]) 265 | v3 = np.array([h_value, h_value, outlier_facet_vertex_depth[i, 3] - outlier_facet_vertex_depth[i, 1]]) 266 | # [v1, v3]n = 0 for general solution n1, [v2, v3]n = 0 for general solution n2 267 | V = np.array([v1, v3]) 268 | eval, evec = np.linalg.eig(V.T@V) 269 | n1 = evec[:, np.argmin(eval)] 270 | V = np.array([v2, v3]) 271 | eval, evec = np.linalg.eig(V.T@V) 272 | n2 = evec[:, np.argmin(eval)] 273 | n = (n1 + n2) / 2 274 | # update 275 | nx[outlier_idx[i]], ny[outlier_idx[i]], nz[outlier_idx[i]] = n[0], n[1], n[2] 276 | 277 | depth_map = np.zeros(mask.shape) 278 | depth_map[mask] = depth 279 | 280 | return depth_map 281 | 282 | 283 | if __name__ == '__main__': 284 | parser = argparse.ArgumentParser(description="Surface-from-Gradients: An Approach Based on Discrete Geometry Processing.") 285 | parser.add_argument('--obj', default=None, help='the obj name in the data, the format should be objName_objNormalSuffix_objMaskSuffix') 286 | parser.add_argument('-n', '--normal_path', default='./data/bunny_normal.npy', help='the path of the normal map, npy,mat file and image is supported') 287 | parser.add_argument('-m', '--mask_path', default='./data/bunny_mask.png', help='the path of the object mask, npy file and image file is supported') 288 | parser.add_argument('-o', '--output_path', default='./data/bunny.obj', help='the path of the output obj file') 289 | parser.add_argument('-w', '--h_value', default=1, help='the size of width of one pixel') 290 | parser.add_argument('-i', '--iters', default=5, help='iteration time of the DGP algorithm') 291 | parser.add_argument('-s', '--solver', default='cg', help='the sparse linear system solver', choices=['cg', 'Cholesky', 'direct', 'LU']) 292 | par = parser.parse_args() 293 | 294 | if par.obj is not None: 295 | items = par.obj.split('_') 296 | normal_path = './data/' + items[0] + '_normal' + '.' + items[1] 297 | mask_path = './data/' + items[0] + '_mask' + '.' + items[2] 298 | output_path = './data/' + items[0] + '.obj' 299 | else: 300 | normal_path = par.normal_path 301 | mask_path = par.mask_path 302 | output_path = par.output_path 303 | 304 | normal, mask = load_normal_mask(normal_path, mask_path) 305 | 306 | start_time = time.time() 307 | depth_map = DGP_solver(normal, mask, h_value=float(par.h_value), iters=int(par.iters), solver=par.solver) 308 | end_time = time.time() 309 | 310 | r, c = np.where(depth_map != 0) 311 | points = np.array([r, c]).T 312 | write_obj(points, mask, depth_map, float(par.h_value), output_path) 313 | 314 | print("Reconstruction Finish! Used Time:{:.5f}s".format(end_time-start_time)) 315 | --------------------------------------------------------------------------------