' + _('Hide Search Matches') + '
') 239 | .appendTo($('#searchbox')); 240 | } 241 | }, 242 | 243 | /** 244 | * init the domain index toggle buttons 245 | */ 246 | initIndexTable : function() { 247 | var togglers = $('img.toggler').click(function() { 248 | var src = $(this).attr('src'); 249 | var idnum = $(this).attr('id').substr(7); 250 | $('tr.cg-' + idnum).toggle(); 251 | if (src.substr(-9) === 'minus.png') 252 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 253 | else 254 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 255 | }).css('display', ''); 256 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 257 | togglers.click(); 258 | } 259 | }, 260 | 261 | /** 262 | * helper function to hide the search marks again 263 | */ 264 | hideSearchWords : function() { 265 | $('#searchbox .highlight-link').fadeOut(300); 266 | $('span.highlighted').removeClass('highlighted'); 267 | }, 268 | 269 | /** 270 | * make the url absolute 271 | */ 272 | makeURL : function(relativeURL) { 273 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 274 | }, 275 | 276 | /** 277 | * get the current relative url 278 | */ 279 | getCurrentURL : function() { 280 | var path = document.location.pathname; 281 | var parts = path.split(/\//); 282 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 283 | if (this === '..') 284 | parts.pop(); 285 | }); 286 | var url = parts.join('/'); 287 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 288 | }, 289 | 290 | initOnKeyListeners: function() { 291 | $(document).keydown(function(event) { 292 | var activeElementType = document.activeElement.tagName; 293 | // don't navigate when in search box, textarea, dropdown or button 294 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' 295 | && activeElementType !== 'BUTTON' && !event.altKey && !event.ctrlKey && !event.metaKey 296 | && !event.shiftKey) { 297 | switch (event.keyCode) { 298 | case 37: // left 299 | var prevHref = $('link[rel="prev"]').prop('href'); 300 | if (prevHref) { 301 | window.location.href = prevHref; 302 | return false; 303 | } 304 | case 39: // right 305 | var nextHref = $('link[rel="next"]').prop('href'); 306 | if (nextHref) { 307 | window.location.href = nextHref; 308 | return false; 309 | } 310 | } 311 | } 312 | }); 313 | } 314 | }; 315 | 316 | // quick alias for translations 317 | _ = Documentation.gettext; 318 | 319 | $(document).ready(function() { 320 | Documentation.init(); 321 | }); 322 | -------------------------------------------------------------------------------- /docs/build/html/ex_nerf.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |In this example, we load a stored PlenOctree npz model file and render it using 177 | VolumeRenderer. 178 | You can download the npz from: 179 | https://drive.google.com/file/d/1XRi_YqdcDrfwqOqV3QPoEOZynuHAGI5M/view?usp=sharing
180 |import svox
181 | import torch
182 | import matplotlib.pyplot as plt
183 |
184 | device = 'cuda:0'
185 |
186 | t = svox.N3Tree.load("lego.npz", device=device)
187 | r = svox.VolumeRenderer(t)
188 |
189 | # Matrix copied from lego test set image 0
190 | c2w = torch.tensor([[ -0.9999999403953552, 0.0, 0.0, 0.0 ],
191 | [ 0.0, -0.7341099977493286, 0.6790305972099304, 2.737260103225708 ],
192 | [ 0.0, 0.6790306568145752, 0.7341098785400391, 2.959291696548462 ],
193 | [ 0.0, 0.0, 0.0, 1.0 ],
194 | ], device=device)
195 |
196 | with torch.no_grad():
197 | im = r.render_persp(c2w, height=800, width=800, fx=1111.111).clamp_(0.0, 1.0)
198 | plt.imshow(im.cpu())
199 | plt.show()
200 |
203 | This is a PlenOctree volume rendering implementation as a PyTorch extension with CUDA acceleration.
176 |Note this only implements octree (or more generally N^3 tree) operations and differentiable volume rendering. It does not relate to NeRF training part of the project 177 | and does not involve a neural network.
178 |The code is available in this repo. 179 | This is part of the code release of
180 |Install with pip install svox.
>>> import svox
191 | >>> tree = svox.N3Tree(data_dim=4)
192 | >>> print(tree)
193 | svox.N3Tree(N=2, data_dim=4, depth_limit=10; capacity:1/1 max_depth:0)
194 | >>> tree.to('cuda:0')
195 | data_dim is the size of data stored at each leaf.
Please see Reference for detailed per-method documentation and Quick Guide for a quick overview of features. 199 | This is the documentation of svox version: 0.2.32-dev.
200 |Contents:
202 |