├── .gitignore ├── LICENSE ├── README.md ├── binary-to-json.js ├── decode-binary.js ├── decode-json.js ├── encode-binary.js ├── encode-json.js ├── json-to-binary.js ├── lib ├── decoder.js ├── encoder.js ├── heap.js └── neighbors.js ├── package.json └── test ├── binary-convert.js ├── decode-I.js ├── decode-mC.js ├── decoder.js ├── encode-I.js ├── encode-mC.js ├── encoder.js ├── heap.js ├── test-I.js ├── test-bunny.js ├── test-codec.js ├── test-dragon.js └── test-mC.js /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | 10 | pids 11 | logs 12 | results 13 | 14 | npm-debug.log 15 | node_modules/* 16 | *.DS_Store -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | The MIT License (MIT) 3 | 4 | Copyright (c) 2014 Mikola Lysenko 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 3p: Progressive Triangle Streams 2 | ================================ 3 | 4 | Progressive triangle streams are an implementation of [Hugues Hoppe's progressive meshes](http://research.microsoft.com/en-us/um/people/hoppe/proj/pm/) with minor modifications favoring fast decoding over visual fidelity. The format is flexible and different codecs can choose different strategies for splitting vertices. This module documents progressive triangle streams and implements reference codecs for the binary and JSON formats. The intention of this file format is to provide a basic container format for experimenting with different strategies for performing edge collapses on meshes, and to provide a common language for processing progressive mesh data. 5 | 6 | ## Why use 3p? 7 | 8 | Progressive meshes have two advantages over standard mesh representations like indexed face lists: 9 | 10 | 1. They are typically much smaller. In a binary 3P file, the topology data of the mesh uses 1/4 as much space as in a binary indexed triangle mesh and up to 1/10 as much space as an ASCII encoded equivalent. 11 | 2. They can be loaded incrementally. It is possible to process a truncated 3P file and recover an approximate geometry immediately. This decreases the amount of time spent waiting for geometry to load. 12 | 13 | Like the PLY file format, 3P files can specify arbitrary vertex and face data. 3P is also a lossless encoding, so attributes like vertex positions are not truncated in intermediate representations. 3P can be combined with standard HTTP compression schemes like gzip for further size reductions. 14 | 15 | ## Other implementations 16 | 17 | # Reference Codec API 18 | 19 | These reference codecs are installable via npm: 20 | 21 | ``` 22 | npm install 3p 23 | ``` 24 | 25 | Once installed, they can be required and used as CommonJS modules. 26 | 27 | **Note** Reference codecs are not optimized. 28 | 29 | ## Encoder 30 | 31 | ### JSON 32 | 33 | ##### `require('3p/encode-json')(vertexCount, cells[, vertexAttributes, cellAttributes, vertexTypes, cellTypes])` 34 | 35 | Compresses a triangulated mesh into a JSON formatted progressive triangle stream. 36 | 37 | * `cells` is a list of triangles, each encoded as a list of 3 vertex indices 38 | * `vertexAttributes` is an optional array of vertex attributes 39 | * `cellAttributes` is an optional array of per-face attributes 40 | 41 | **Returns** A 3PJ encoded mesh object 42 | 43 | ### Binary 44 | 45 | ##### `require('3p/encode-binary')(vertexCount, cells[, vertexAttributes, cellAttributes, vertexTypes, cellTypes])` 46 | 47 | Same interface as above, except returns a node.js Buffer object storing a binary 3PB file. 48 | 49 | ## Decoder 50 | 51 | ### JSON 52 | 53 | ##### `require('3p/decode-json')(json)` 54 | 55 | Decodes a JSON formatted 3PJ object. 56 | 57 | * `json` is a plain old JavaScript object storing the parsed 3PJ data 58 | 59 | **Returns** An object representing the mesh with with the following properties: 60 | 61 | * `cells` is an array storing the faces of the mesh 62 | * `vertexAttributes` is an array of vertex attributes 63 | * `cellAttributes` is an array of cell attributes 64 | 65 | ### Binary 66 | 67 | ##### `require('3p/decode-binary')(buffer)` 68 | 69 | Same as above, except takes a binary 3PB file instead of JSON. 70 | 71 | ## JSON and binary conversion 72 | 73 | ##### `require('3p/json-to-binary')(json)` 74 | Converts a JSON 3PJ file to a binary 3PB buffer 75 | 76 | * `json` is a 3PJ javascript object 77 | 78 | **Returns** A `Buffer` representing a binary `3PB` file 79 | 80 | ##### `require('3p/binary-to-json')(buffer)` 81 | Converts a binary 3PB file to a JSON 3PJ object 82 | 83 | * `buffer` is a `Buffer` encoding a 3PB object 84 | 85 | **Returns** A JSON 3PJ object 86 | 87 | # Format description 88 | 89 | Progressive triangle streams encode 3D triangulated meshes as a sequence of vertex split operations. Progressive triangle streams can have any number of vertex and/or face attributes, and can be truncated to produce approximations of the initial geometry. Progressive triangle streams support two distinct formats: a reference JSON format for debugging and a binary format. These formats store equivalent information. 90 | 91 | ## JSON format (.3PJ) 92 | 93 | For debugging purposes, 3P supports a JSON format. The JSON format for a progressive triangle stream contains the same data as above. Each `3P` JSON object has 3 fields with the following data: 94 | 95 | * The file header, storing: 96 | + `version` - a string representing the version of the 3P file in semver format 97 | + `vertexCount` - the number of vertices in the stream 98 | + `cellCount` - the number of cells in the stream 99 | + `vertexAttributeTypes` - an array of types for each vertex attribute 100 | + `cellAttributeTypes` - an array of types for each cell attribute 101 | * An initial triangulated mesh, with 4 arrays: 102 | + `cells` - an array of 3 tuples of integers representing the vertex indices for each triangle 103 | + `vertexAttributes` - an array of arrays of vertex attributes 104 | + `cellAttributes` - an array of arrays of cell attributes 105 | * An array of vertex split operations, each having the following properties: 106 | + `baseVertex` - the vertex to split 107 | + `attributes` - attributes for newly created vertex 108 | + `left` - index of left vertex in 1-ring around base vertex 109 | + `leftOrientation` - orientation of left face 110 | + `leftAttributes` - attributes for left face 111 | + `right` - index of right face 112 | + `rightOrientation` - orientation of right face 113 | + `rightAttributes` - attributes for right face 114 | 115 | Each type declaration should have the following data: 116 | 117 | * `name` which is an ascii string storing the name of the type 118 | * `count` which is the size of the type value 119 | * `type` a string encoding the type of the attribute 120 | 121 | The possible values for `type` are as follows: 122 | 123 | * `uint8` an unsigned 8 bit integer 124 | * `uint16` an unsigned 16 bit integer 125 | * `uint32` an unsigned 32 bit integer 126 | * `int8` 127 | * `int16` 128 | * `int32` 129 | * `float32` 130 | * `float64` 131 | 132 | JSON formatted progressive triangle streams should use the file extension .3PJ 133 | 134 | ## Binary format (.3PB) 135 | 136 | * Network byte order 137 | * Prefer .3PB file extension 138 | 139 | ``` 140 | struct S3PBFile { 141 | uint8[4] "3PB\n" 142 | S3PBHeader header 143 | S3PBComplex initialComplex 144 | S3PBVertexSplit[] vertexSplits 145 | } 146 | 147 | struct S3PBHeader { 148 | uint32 splitOffset 149 | uint32 majorVersion 150 | uint32 minorVersion 151 | uint32 patchVersion 152 | uint32 vertexCount 153 | uint32 cellCount 154 | uint32 vertexAttributeCount 155 | uint32 cellAttributeCount 156 | S3PBAttribute[] vertexAttributeTypes 157 | S3PBAttribute[] cellAttributeTypes 158 | } 159 | 160 | struct S3PBAttribute { 161 | uint32 count 162 | S3PBAttributeType type 163 | uint32 nameLength 164 | char[] name 165 | } 166 | 167 | enum S3PBAttributeType: uint32 { 168 | uint8: 0 169 | uint16: 1 170 | uint32: 2 171 | int8: 3 172 | int16: 4 173 | int32: 5 174 | float32: 6 175 | float64: 7 176 | } 177 | 178 | struct S3PBComplex { 179 | uint32 initialVertexCount 180 | uint32 initialCellCount 181 | VertexAttribute[] vertexAttributes 182 | uint32[3][] cells 183 | CellAttribute[] cellAttributes 184 | } 185 | 186 | struct S3PBVertexSplit { 187 | uint32 baseVertex 188 | uint8 left 189 | uint8 right 190 | VertexAttribute attributes 191 | CellAttribute leftAttributes 192 | CellAttribute rightAttributes 193 | } 194 | ``` 195 | 196 | ## Notes 197 | 198 | * Manifold vertices must be stored in initial complex, non-manifold vertices and their neighbors can't be split 199 | * In `S3PBVertexSplit`, the upper bit of `left` and `right` stores the orientation of the vertex 200 | * The lower 7 bits of left and right are an index into the neighbors of `s` 201 | * splitOffset is the start of the vertex split section in bytes 202 | * Attribute names are stored as ASCII text 203 | * Encoders must not collapse edges incident to non-manifold or boundary vertices 204 | * Vertices with more than 15 neighbors must not be split 205 | * Encoders should prioritize edge collapses with minimal visual impact on images 206 | * Binary decoders should gracefully handle truncated input 207 | * Encoders may not preserve the index of each vertex. Encoding/decoding may permute the order of cells/vertices in the mesh. 208 | * Encoding must preserve topology and all attributes 209 | * Codecs may collapse vertices in any order subject to the implementation 210 | * If a decoder recieves more vertices or cells than is specified in the header, then it should terminate 211 | * `cellCount` and `vertexCount` should describe the total number of vertices in the stream. If more vertices in the stream are encountered, the decoder may choose to continue processing additional splits 212 | * For each vertex split, the baseVertex must refer to a previous vertex in the stream 213 | 214 | # Benchmarks and comparisons 215 | 216 | Sizes are in bytes 217 | 218 | | Mesh | JSON | 3p | 3p + gzip | 219 | | --------------- | ---- | --- | --------- | 220 | | Stanford bunny | 110361 | 33190 | 27531 | 221 | | Stanford dragon | 2227889 | 643000 | 422663 | 222 | 223 | 224 | 225 | # References 226 | 227 | **TODO** 228 | 229 | # License 230 | 231 | Copyright 2014 Mikola Lysenko. MIT license -------------------------------------------------------------------------------- /binary-to-json.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = convertBinaryToJSON 4 | 5 | var MAGIC = '3PB\n' 6 | 7 | var TYPE_CODES = [ 8 | 'uint8', 9 | 'uint16', 10 | 'uint32', 11 | 'int8', 12 | 'int16', 13 | 'int32', 14 | 'float32', 15 | 'float64' 16 | ] 17 | 18 | var TYPE_SIZES = { 19 | 'uint8': 1, 20 | 'uint16': 2, 21 | 'uint32': 4, 22 | 'int8': 1, 23 | 'int16': 2, 24 | 'int32': 4, 25 | 'float32': 4, 26 | 'float64': 8 27 | } 28 | 29 | function computeTypeSize(typeInfo) { 30 | var size = 0 31 | for(var i=0; i= TYPE_CODES.length) { 61 | throw new Error('3p: invalid type number') 62 | } 63 | 64 | ptr += 12 65 | if(buffer.length < ptr + nameLength) { 66 | throw new Error('3p: error reading type name') 67 | } 68 | var name = buffer.slice(ptr, ptr+nameLength).toString('ascii') 69 | ptr += nameLength 70 | 71 | typeInfo.push({ 72 | count: count, 73 | type: TYPE_CODES[typeNo], 74 | name: name 75 | }) 76 | } 77 | buffer = buffer.slice(ptr) 78 | return typeInfo 79 | } 80 | 81 | function parseHeader() { 82 | if(buffer.length < 32) { 83 | throw new Error('3p: invalid header, too small') 84 | } 85 | 86 | //Skip splitOffset for now 87 | 88 | //Read in properties 89 | var majorVersion = buffer.readUInt32BE(4) 90 | var minorVersion = buffer.readUInt32BE(8) 91 | var patchVersion = buffer.readUInt32BE(12) 92 | var vertexCount = buffer.readUInt32BE(16) 93 | var cellCount = buffer.readUInt32BE(20) 94 | 95 | //Read attribute type info 96 | var vertexAttributeCount = buffer.readUInt32BE(24) 97 | var cellAttributeCount = buffer.readUInt32BE(28) 98 | buffer = buffer.slice(32) 99 | var vertexAttributeTypes = readAttributeTypes(vertexAttributeCount) 100 | var cellAttributeTypes = readAttributeTypes(cellAttributeCount) 101 | 102 | return { 103 | version: [ majorVersion, minorVersion, patchVersion ].join('.'), 104 | vertexCount: vertexCount, 105 | cellCount: cellCount, 106 | vertexAttributeTypes: vertexAttributeTypes, 107 | cellAttributeTypes: cellAttributeTypes 108 | } 109 | } 110 | 111 | var header = parseHeader() 112 | 113 | //Compute vertex attribute size and cell attribute size 114 | var vtypes = header.vertexAttributeTypes 115 | var vertexSize = computeTypeSize(vtypes) 116 | 117 | var ctypes = header.cellAttributeTypes 118 | var cellSize = computeTypeSize(ctypes) 119 | 120 | function readAttributeValue(ptr, type) { 121 | var ftype = type.type 122 | var result = new Array(type.count) 123 | for(var i=0; i= 128 262 | var rightOrient = rightV >= 128 263 | var left = leftV & 0x7f 264 | var right = rightV & 0x7f 265 | 266 | result.push({ 267 | baseVertex: baseVertex, 268 | vertexAttributes: battr, 269 | left: left, 270 | leftOrientation: leftOrient, 271 | leftAttributes: lattr, 272 | right: right, 273 | rightOrientation: rightOrient, 274 | rightAttributes: rattr 275 | }) 276 | } 277 | 278 | return result 279 | } 280 | 281 | var vertexSplits = parseVertexSplits() 282 | 283 | return { 284 | header: header, 285 | initialComplex: initialComplex, 286 | vertexSplits: vertexSplits 287 | } 288 | } -------------------------------------------------------------------------------- /decode-binary.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = decodeBinary 4 | 5 | var decodeJSON = require('./decode-json') 6 | var toJSON = require('./binary-to-json') 7 | 8 | function decodeBinary(buffer) { 9 | return decodeJSON(toJSON(buffer)) 10 | } -------------------------------------------------------------------------------- /decode-json.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = decodeJSON 4 | 5 | var MeshDecoder = require('./lib/decoder') 6 | 7 | function decodeJSON(json) { 8 | var initial = json.initialComplex 9 | var decoder = new MeshDecoder( 10 | initial.vertexCount, 11 | initial.vertexAttributes, 12 | initial.cells, 13 | initial.cellAttributes) 14 | 15 | var vsplits = json.vertexSplits 16 | for(var i=0; i 0) { 99 | numVerts = vattributes[0].length 100 | } else { 101 | numVerts = maxVertex(cells) 102 | } 103 | 104 | vtypes = convertTypes(vtypes || guessTypes(vattributes)) 105 | ftypes = convertTypes(ftypes || guessTypes(fattributes)) 106 | 107 | checkTypes(numVerts, vtypes, vattributes) 108 | checkTypes(numCells, ftypes, fattributes) 109 | 110 | var mesh = new MeshEncoder( 111 | numVerts, 112 | numCells, 113 | cells, 114 | vattributes, 115 | fattributes) 116 | 117 | var nbhdList = [] 118 | 119 | var order = new Array(numVerts) 120 | var ecollapse = [] 121 | var counter = numVerts-1 122 | while(true) { 123 | var c = mesh.pop() 124 | if(!c) { 125 | break 126 | } 127 | ecollapse.push(c) 128 | nbhdList.push(mesh.neighbors[c.s].slice()) 129 | order[c.t] = counter-- 130 | } 131 | 132 | var header = { 133 | version: "1.0.0", 134 | vertexCount: numVerts, 135 | cellCount: numCells, 136 | vertexAttributeTypes: vtypes, 137 | cellAttributeTypes: ftypes 138 | } 139 | 140 | //Get base mesh 141 | var base = mesh.base() 142 | 143 | //Convert into JSON format 144 | var initial = { 145 | vertexCount: base.verts.length, 146 | cellCount: base.cells.length, 147 | cells: new Array(base.cells.length), 148 | cellAttributes: new Array(ftypes.length), 149 | vertexAttributes: new Array(vtypes.length) 150 | } 151 | for(var i=0; i=0; --i) { 185 | var e = ecollapse[i] 186 | var vattr = [] 187 | for(var j=0; j>>0 110 | var minorVersion = semverParts[1]>>>0 111 | var patchVersion = semverParts[2]>>>0 112 | 113 | buffer.writeUInt32BE(splitSectionOffset, 0) 114 | buffer.writeUInt32BE(majorVersion, 4) 115 | buffer.writeUInt32BE(minorVersion, 8) 116 | buffer.writeUInt32BE(patchVersion, 12) 117 | buffer.writeUInt32BE(header.vertexCount, 16) 118 | buffer.writeUInt32BE(header.cellCount, 20) 119 | buffer.writeUInt32BE(vertexTypes.types.length, 24) 120 | buffer.writeUInt32BE(cellTypes.types.length, 28) 121 | 122 | var ptr = 32 123 | 124 | var vtypes = vertexTypes.types 125 | for(var i=0; i leftI) { 144 | leftI = leftI + nn 145 | } 146 | var snbhd = new Array(leftI-rightI+2) 147 | var sstar = new Array(leftI-rightI+2) 148 | snbhd[0] = t 149 | sstar[0] = leftF 150 | for(var i=rightI; i<=leftI; ++i) { 151 | snbhd[i-rightI+1] = nbhd[i%nn] 152 | sstar[i-rightI+1] = star[i%nn] 153 | } 154 | sstar[1] = rightF 155 | shiftMin(snbhd, sstar) 156 | this.neighbors[s] = snbhd 157 | this.stars[s] = sstar 158 | 159 | //Split t neighborhood 160 | leftI = leftI % nn 161 | if(leftI > rightI) { 162 | rightI += nn 163 | } 164 | var tstar = new Array(rightI-leftI+2) 165 | var tnbhd = new Array(rightI-leftI+2) 166 | tnbhd[0] = s 167 | tstar[0] = rightF 168 | for(var i=leftI; i<=rightI; ++i) { 169 | tnbhd[i-leftI+1] = nbhd[i%nn] 170 | tstar[i-leftI+1] = star[i%nn] 171 | } 172 | tstar[1] = leftF 173 | shiftMin(tnbhd, tstar) 174 | this.neighbors.push(tnbhd) 175 | this.stars.push(tstar) 176 | 177 | //Fix up neighborhoods topology around t 178 | for(var i=0; i Triangle incidence 80 | this.stars = sc.dual(this.cells, numVertices) 81 | this.stars.forEach(function(s) { 82 | s.sort(compareInt) 83 | }) 84 | 85 | //Topology: Vertex neighbors 86 | this.neighbors = new Array(numVertices) 87 | for(var i=0; i 15) { 137 | return Infinity 138 | } 139 | 140 | //Vertices must have exactly 2 common neighbors 141 | var common = 0 142 | for(var i=0; i= 0) { 144 | common += 1 145 | } 146 | if(!vertexManifold[nbhd0[i]]) { 147 | return Infinity 148 | } 149 | } 150 | if(common !== 2) { 151 | return Infinity 152 | } 153 | if(nbhd0.length === 3 && nbhd1.length === 3) { 154 | return Infinity 155 | } 156 | for(var i=0; i= 0) { 174 | array.splice(idx, 1) 175 | } 176 | } 177 | 178 | //Recomputes an edge priority 179 | proto.updateEdge = function(i,j) { 180 | var a = Math.min(i,j) 181 | var b = Math.max(i,j) 182 | 183 | //Compute edge error 184 | var error = this.edgeError(a,b) 185 | 186 | //Update priority queue and table 187 | var tok = a + ',' + b 188 | var event = this.edgeTable[tok] 189 | if(event) { 190 | heap.change(this.pendingCollapse, event, error) 191 | } else { 192 | event = new CollapseEvent(a, b, error) 193 | this.edgeTable[tok] = event 194 | heap.push(this.pendingCollapse, event) 195 | } 196 | } 197 | 198 | //Removes an old edge from the table 199 | proto.removeEdge = function(i,j) { 200 | var a = Math.min(i,j) 201 | var b = Math.max(i,j) 202 | 203 | //Look up edge in table 204 | var tok = a + ',' + b 205 | var event = this.edgeTable[tok] 206 | if(event) { 207 | //Remove from priority queue and table 208 | heap.remove(this.pendingCollapse, event) 209 | delete this.edgeTable[tok] 210 | } 211 | } 212 | 213 | function checkOrientation(cell, v0, v1) { 214 | var i0 = cell.indexOf(v0) 215 | var i1 = cell.indexOf(v1) 216 | return ((i0+1) % 3) === i1 217 | } 218 | 219 | //Collapse an edge and pop out a vertex split operation 220 | proto.pop = function() { 221 | while(this.pendingCollapse.length > 0) { 222 | //Get lowest error edge from heap 223 | var next = heap.pop(this.pendingCollapse) 224 | if(!next) { 225 | return null 226 | } 227 | 228 | 229 | //Check that edge is valid 230 | var v0 = Math.min(next.v0, next.v1) 231 | var v1 = Math.max(next.v0, next.v1) 232 | 233 | 234 | if(!this.vertexLive[v0] || 235 | !this.vertexLive[v1] || 236 | !this.vertexManifold[v0] || 237 | !this.vertexManifold[v1] || 238 | next.error >= Infinity || 239 | this.neighbors[v0].indexOf(v1) < 0) { 240 | this.removeEdge(v0, v1) 241 | continue 242 | } 243 | 244 | //Check that event is valid 245 | var error = this.edgeError(v0, v1) 246 | if(error === Infinity) { 247 | this.removeEdge(v0, v1) 248 | continue 249 | } 250 | 251 | //If error updated, then requeue edge 252 | if(error > next.error) { 253 | next.error = error 254 | heap.push(this.pendingCollapse, next) 255 | continue 256 | } 257 | 258 | //Read in topology data for edge 259 | var star0 = this.stars[v0] 260 | var star1 = this.stars[v1] 261 | var nbhd0 = this.neighbors[v0] 262 | var nbhd1 = this.neighbors[v1] 263 | 264 | //Glue neighborhood of v0 into v1, store result 265 | var i0 = nbhd0.indexOf(v1) 266 | var n0 = nbhd0.length 267 | var i1 = nbhd1.indexOf(v0) 268 | var n1 = nbhd1.length 269 | 270 | var left = (i0 + n0 - 1) % n0 271 | var leftV = nbhd0[left] 272 | var leftF = star0[left] 273 | var leftC = this.cells[leftF] 274 | 275 | var right = (i0 + 1)%n0 276 | var rightV = nbhd0[right] 277 | var rightF = star0[right] 278 | var rightC = this.cells[rightF] 279 | 280 | if(rightC.indexOf(v1) >= 0) { 281 | leftF = star0[(left+1)%n0] 282 | leftC = this.cells[leftF] 283 | } else if(leftC.indexOf(v1) >= 0) { 284 | rightF = star1[(right+n0-1)%n0] 285 | rightC = this.cells[rightF] 286 | } 287 | 288 | var leftOrientation = checkOrientation(leftC, v0, leftV) 289 | var rightOrientation = checkOrientation(rightC, v0, rightV) 290 | 291 | //Clear out properties 292 | leftC[0] = leftC[1] = leftC[2] = 293 | rightC[0] = rightC[1] = rightC[2] = -1 294 | 295 | //Collapse v1 into v0 296 | for(var i=0; i> 1 12 | } 13 | return (i >> 1) - 1 14 | } 15 | 16 | function heapUp(heap, i) { 17 | var node = heap[i] 18 | while(true) { 19 | var parentIndex = heapParent(i) 20 | if(parentIndex >= 0) { 21 | var parent = heap[parentIndex] 22 | if(node.error < parent.error) { 23 | heap[i] = parent 24 | parent.index = i 25 | i = parentIndex 26 | continue 27 | } 28 | } 29 | heap[i] = node 30 | node.index = i 31 | break 32 | } 33 | } 34 | 35 | function heapDown(heap, i) { 36 | var node = heap[i] 37 | var count = heap.length 38 | while(true) { 39 | var left = 2*i + 1 40 | var right = 2*(i + 1) 41 | var next = i 42 | var w = node.error 43 | if(left < count) { 44 | var lw = heap[left].error 45 | if(lw < w) { 46 | w = lw 47 | next = left 48 | } 49 | } 50 | if(right < count) { 51 | var rw = heap[right].error 52 | if(rw < w) { 53 | w = rw 54 | next = right 55 | } 56 | } 57 | if(next === i) { 58 | heap[i] = node 59 | node.index = i 60 | break 61 | } 62 | heap[i] = heap[next] 63 | heap[i].index = i 64 | i = next 65 | } 66 | } 67 | 68 | function heapPop(heap) { 69 | if(heap.length <= 0) { 70 | return null 71 | } 72 | var head = heap[0] 73 | if(heap.length > 1) { 74 | var head = heap[0] 75 | heap[0] = heap.pop() 76 | heapDown(heap, 0) 77 | } else { 78 | heap.pop() 79 | } 80 | head.index = -1 81 | return head 82 | } 83 | 84 | function heapPush(heap, item) { 85 | var count = heap.length 86 | heap.push(item) 87 | heapUp(heap, count) 88 | } 89 | 90 | function heapRemove(heap, item) { 91 | if(item.index < 0) { 92 | return 93 | } 94 | item.error = -Infinity 95 | heapUp(heap, item.index) 96 | return heapPop(heap) 97 | } 98 | 99 | function heapChange(heap, item, nerror) { 100 | if(item.error === nerror) { 101 | return 102 | } 103 | heapRemove(heap, item) 104 | item.error = nerror 105 | heapPush(heap, item) 106 | } 107 | 108 | function heapify(heap) { 109 | for(var i=1; i= 0 && 22 | cell.indexOf(b) >= 0) { 23 | result[i] = star[j] 24 | } 25 | } 26 | } 27 | 28 | for(var i=0; i= 0 && cu >= 0 && cw >= 0, 'cell valid: ' + c) 71 | t.equals((cv+1)%3, cu, 'check neighborhood incoming edge') 72 | t.equals((cv+2)%3, cw, 'check neighborhood outgoing edge') 73 | } 74 | } 75 | 76 | function verifyMesh() { 77 | var numVerts = decoder.numVerts 78 | var numCells = decoder.cells.length 79 | 80 | /* 81 | t.equals(decoder.vertexAttributes.length, geometry.vertexAttributes.length, 'vattributes ok') 82 | t.equals(decoder.cellAttributes.length, geometry.cellAttributes.length, 'vattributes ok') 83 | */ 84 | 85 | var stars = new Array(numVerts) 86 | for(var i=0; i> 1 13 | } 14 | return (i >> 1) - 1 15 | } 16 | 17 | function testEncoder(name, data, step) { 18 | tape('encode: ' + name, function(t) { 19 | var mesh = new MeshEncoder( 20 | data.positions.length, 21 | data.cells.length, 22 | data.cells, 23 | [data.positions], 24 | []) 25 | 26 | t.equals(mesh.numVertices, data.positions.length) 27 | t.equals(mesh.numCells, data.cells.length) 28 | 29 | step = step||0 30 | 31 | function computeEdges() { 32 | var edges = [] 33 | for(var i=0; i0 && edges[i] === edges[i-1]) { 56 | continue 57 | } 58 | edges[ptr++] = edges[i] 59 | } 60 | edges.length = ptr 61 | return edges 62 | } 63 | 64 | var numVertices = data.positions.length 65 | var count = 0 66 | function checkTopology() { 67 | if(--count > 0) { 68 | return 69 | } 70 | 71 | count = step 72 | var stars = new Array(numVertices) 73 | for(var i=0; i= 0) { 85 | stars[c[i]].push(j) 86 | } 87 | } 88 | } 89 | 90 | for(var i=0; i= 0, 'linked') 107 | } 108 | } 109 | 110 | var edges = computeEdges() 111 | var table = {} 112 | for(var i=0; i= mesh.pendingCollapse[heapParent(i)].error, 'heap invariant ' + i + ' vs ' + heapParent(i)) 136 | } 137 | } 138 | 139 | checkTopology(mesh) 140 | 141 | var fcount = mesh.cells.length 142 | while(true) { 143 | var prevNeighbors = mesh.neighbors.map(function(nbhd) { 144 | return nbhd.slice() 145 | }) 146 | var prevCells = mesh.cells.map(function(cell) { 147 | return cell.slice() 148 | }) 149 | var prevStars = mesh.stars.map(function(star) { 150 | return star.slice() 151 | }) 152 | var ecol = mesh.pop() 153 | if(!ecol) { 154 | break 155 | } 156 | 157 | var es = ecol.s 158 | t.ok(0 <= es && prevNeighbors[es].length > 0, 'check s valid') 159 | 160 | var et = ecol.t 161 | t.ok(0 <= et && prevNeighbors[et].length > 0, 'check t valid') 162 | 163 | t.ok(prevNeighbors[es].indexOf(et) >= 0, 'check linked s->t') 164 | t.ok(prevNeighbors[et].indexOf(es) >= 0, 'check linked t->s') 165 | 166 | var el = ecol.left 167 | t.ok(0 <= el && el < mesh.stars[es].length, 'check left valid') 168 | 169 | var er = ecol.right 170 | t.ok(0 <= er && er < mesh.stars[es].length, 'check left valid') 171 | 172 | var estar = prevStars[es] 173 | var enbhd = mesh.neighbors[es] 174 | 175 | var lv = enbhd[el] 176 | var rv = enbhd[er] 177 | var rf = -1 178 | var lf = -1 179 | for(var i=0; i= 0) { 183 | if(cell.indexOf(lv) >= 0) { 184 | lf = f 185 | } else if(cell.indexOf(rv) >= 0) { 186 | rf = f 187 | } 188 | } 189 | } 190 | if(lf < 0) { 191 | t.fail('missing left face') 192 | } else { 193 | t.pass('left face ok') 194 | } 195 | if(rf < 0) { 196 | t.fail('missing right face: ' + JSON.stringify(ecol) + ' -- ' + rv) 197 | } else { 198 | t.pass('right face ok') 199 | } 200 | 201 | checkTopology(mesh) 202 | } 203 | 204 | count = 0 205 | checkTopology(mesh) 206 | 207 | t.end() 208 | }) 209 | } -------------------------------------------------------------------------------- /test/heap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var heap = require('../lib/heap') 4 | var tape = require('tape') 5 | 6 | function heapParent(i) { 7 | if(i & 1) { 8 | return (i - 1) >> 1 9 | } 10 | return (i >> 1) - 1 11 | } 12 | 13 | 14 | tape('heap', function(t) { 15 | var items = [0, 5, 1, 3, 4, 2, 8, 7, 6, 9].map(function(i) { 16 | return { error: i } 17 | }) 18 | 19 | function checkHeapInvariant(items) { 20 | for(var i=1; i items[heapParent(i)].error, 'heap invariant ' + i + ' vs ' + heapParent(i)) 23 | } 24 | } 25 | 26 | heap(items) 27 | checkHeapInvariant(items) 28 | 29 | t.equals(items.length, 10) 30 | for(var i=0; i<10; ++i) { 31 | t.equals(heap.pop(items).error, i) 32 | t.equals(items.length, 9-i, 'heap capacity ' + i) 33 | checkHeapInvariant(items) 34 | } 35 | 36 | var pending = [] 37 | for(var i=0; i<10; ++i) { 38 | pending[i] = { error: Math.random() } 39 | } 40 | var r = pending.slice() 41 | heap(pending) 42 | for(var i=9; i>=0; --i) { 43 | var x = heap.remove(pending, r[i]) 44 | t.equals(x.index, -1, 'index cleared') 45 | t.equals(pending.length, i, 'heap length') 46 | checkHeapInvariant(pending) 47 | } 48 | 49 | var nitems = new Array(10) 50 | for(var i=0; i<10; ++i) { 51 | nitems[i] = { error: 10*Math.random() } 52 | } 53 | heap(nitems) 54 | checkHeapInvariant(nitems) 55 | var q = nitems.slice() 56 | for(var i=0; i<10; ++i) { 57 | heap.change(nitems, q[i], i) 58 | t.equals(nitems.length, 10, 'heap length unchanged') 59 | checkHeapInvariant(nitems) 60 | } 61 | 62 | t.equals(nitems.length, 10) 63 | for(var i=0; i<10; ++i) { 64 | t.equals(heap.pop(nitems).error, i) 65 | t.equals(nitems.length, 9-i, 'heap capacity ' + i) 66 | checkHeapInvariant(nitems) 67 | } 68 | 69 | t.end() 70 | }) -------------------------------------------------------------------------------- /test/test-I.js: -------------------------------------------------------------------------------- 1 | var testCodec = require('./test-codec') 2 | var icos = require('conway-hart')('I') 3 | 4 | testCodec('icosahedron', 20, icos.cells) -------------------------------------------------------------------------------- /test/test-bunny.js: -------------------------------------------------------------------------------- 1 | var testCodec = require('./test-codec') 2 | var bunny = require('bunny') 3 | 4 | testCodec('bunny', bunny.positions.length, bunny.cells) -------------------------------------------------------------------------------- /test/test-codec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = testCodec 4 | 5 | var tape = require('tape') 6 | var encodeJSON = require('../encode-json') 7 | var decodeJSON = require('../decode-json') 8 | 9 | function getPermutation(tags) { 10 | var zipped = tags.map(function(t,i) { 11 | return [t, i] 12 | }) 13 | zipped.sort(function(a,b) { 14 | return a.t - b.t 15 | }) 16 | return zipped.map(function(pair) { 17 | return pair[1] 18 | }) 19 | } 20 | 21 | function canonicalizeCells(cells, tags) { 22 | 23 | var perm = getPermutation(tags) 24 | var ncells = cells.map(function(cell) { 25 | var c = [ 26 | perm[cell[0]], 27 | perm[cell[1]], 28 | perm[cell[2]] 29 | ] 30 | var k=0 31 | for(var i=0; i<3; ++i) { 32 | if(cell[i] < cell[k]) { 33 | k = i 34 | } 35 | } 36 | return [ 37 | cell[k], 38 | cell[(k+1)%3], 39 | cell[(k+2)%3] 40 | ] 41 | }) 42 | } 43 | 44 | function testCodec(name, numVerts, cells) { 45 | tape('test codec: ' + name, function(t) { 46 | 47 | var vertexLabels = new Array(numVerts) 48 | for(var i=0; i