├── README.md ├── blue_noise_particles.py └── fibonacci_heap_mod.py /README.md: -------------------------------------------------------------------------------- 1 | Blue Noise Particles 2 | ===================== 3 | 4 | **Important: Since Blender 2.92, the majority of functionality in the plugin is better achieved with the [Point Distribute Geometry Node][2] in Poisson Disk mode.** 5 | 6 | This Blender plugin generates a random arrangement of particles with a blue noise distribution. 7 | This is also known as Poisson Disk Sampling. 8 | 9 | This distribution of particles guarantees no two particles are very near each other. It's often considered a higher 10 | quality particle arrangement than Blender's default uniform sampling. It's particularly useful for organic 11 | arrangements, and randomly arranging meshes without collisions. 12 | 13 | The particular method of noise generation is called [Sample Elimination for Poisson Disk Sample Sets][1]. Thanks to 14 | Cem Yuksel for the research and the clear statement of the method. 15 | 16 | [1]: http://www.cemyuksel.com/research/sampleelimination/ 17 | 18 | [2]: https://docs.blender.org/manual/en/latest/modeling/geometry_nodes/point/point_distribute.html 19 | 20 | Installation 21 | ------------ 22 | Download the [zip][2], then go to `Edit > Preferences > Addons`. 23 | Select install from file and pick the zip. Then tick the checkbox to enable the addon. 24 | 25 | [2]: https://github.com/BorisTheBrave/blue-noise-particles/releases 26 | 27 | Or you can simply copy the python files from this repository into your Blender Addon dir, then enable the addon. 28 | 29 | Usage 30 | ----- 31 | 32 | Select any mesh, then run the plugin from the `Add > Mesh` menu, or the toolbox. 33 | The parameters work extremely similarly to the normal particle options. 34 | 35 | The plugin creates a new mesh with a particle system attached. You can then customize the particle system as 36 | normal, for example changing the render type to Object to draw a mesh at the location of each particle. 37 | 38 | Further explanation and examples can be found in the wiki on github: 39 | 40 | 41 | Further external reading can be found at: 42 | 43 | * 44 | 45 | * 46 | 47 | 48 | License 49 | ------- 50 | This code is licensed under the GPL License copyright Adam Newgas 2017. 51 | 52 | fibonacci_heap_mod has been attached for convenience. It is available under the Apache v2 License. 53 | 54 | -------------------------------------------------------------------------------- /blue_noise_particles.py: -------------------------------------------------------------------------------- 1 | # This program is free software: you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation, either version 3 of the License, or 4 | # (at your option) any later version. 5 | 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 9 | # GNU General Public License for more details. 10 | 11 | # You should have received a copy of the GNU General Public License 12 | # along with this program. If not, see . 13 | 14 | 15 | import math 16 | import fibonacci_heap_mod 17 | import bpy 18 | import bpy.props 19 | import bpy.utils 20 | import bmesh 21 | import mathutils 22 | import mathutils.geometry 23 | import mathutils.kdtree 24 | 25 | bl_info = { 26 | "name": "Blue Noise Particles", 27 | "description": "", 28 | "author": "Adam Newgas", 29 | "version": (1, 0, 1), 30 | "blender": (2, 80, 0), 31 | "location": "View3D > Add > Mesh > Blue Noise Particles", 32 | "warning": "", 33 | "wiki_url": "https://github.com/BorisTheBrave/blue-noise-particles/wiki", 34 | "category": "Add Mesh"} 35 | 36 | BLUE = 'BLUE' 37 | MAGENTA = 'MAGENTA' 38 | 39 | 40 | class SampleEliminator: 41 | def __init__(self, locations, densities, target_samples, is_volume, mesh_area, 42 | noise_type, patchiness): 43 | self.locations = locations 44 | self.noise_type = noise_type 45 | self.patchiness = patchiness 46 | 47 | # Setup a KD Tree of all locations 48 | self.tree = mathutils.kdtree.KDTree(len(locations)) 49 | for index, location in enumerate(locations): 50 | self.tree.insert(location, index) 51 | self.tree.balance() 52 | 53 | self.alpha = 8 54 | self.target_samples = target_samples 55 | self.current_samples = len(self.locations) 56 | 57 | M = self.current_samples 58 | N = self.target_samples 59 | 60 | # Choose rmax via heuristic 61 | bounds = [max(p[i] for p in locations) - min(p[i] for p in locations) 62 | for i in range(3)] 63 | 64 | # Volume based constraint 65 | A = bounds[0] * bounds[1] * bounds[2] 66 | rmax3 = (A / 4 / math.sqrt(2) / N) ** (1 / 3) 67 | 68 | # Volume estimate only valid for reasonably squarish things 69 | is_thin = rmax3 <= min(bounds) 70 | if is_thin: 71 | rmax3 = float('inf') 72 | 73 | if is_thin or not is_volume: 74 | # If we are constrained to 2d surface, then it is possible to 75 | # get a better bound for rmax. Depends on the mesh geometry. 76 | rmax2 = math.sqrt(mesh_area / 2 / math.sqrt(3) / N) 77 | else: 78 | rmax2 = float('inf') 79 | 80 | self.rmax = min(rmax2, rmax3) 81 | 82 | if densities is not None: 83 | # Need to be a bit more conservative if the faces are imbalanced. 84 | # This could still go wrong with extreme vertex weights... 85 | self.rmax *= 3 86 | dmax = max(densities) 87 | self.densities = [d / dmax for d in densities] 88 | else: 89 | self.densities = [1] * len(locations) 90 | 91 | # Choose rmin via heuristic 92 | gamma = 1.5 93 | beta = 0.65 94 | self.rmin = self.rmax * (1 - (N / M) ** gamma) * beta 95 | 96 | # Build initial heap 97 | self.heap = fibonacci_heap_mod.Fibonacci_heap() 98 | self.heap_items = {} 99 | for index, location in enumerate(locations): 100 | tot_weight = 0 101 | for location2, index2, d in self.tree.find_range(location, 2 * self.rmax): 102 | tot_weight += self.w(d, index, index2) 103 | item = self.heap.enqueue(index, -tot_weight) 104 | self.heap_items[index] = item 105 | 106 | def eliminate_one(self): 107 | # Extract highest priority item 108 | item = self.heap.dequeue_min() 109 | index = item.get_value() 110 | del self.heap_items[index] 111 | location = self.locations[index] 112 | 113 | # Update all adjacent items 114 | for location2, index2, d in self.tree.find_range(location, 2 * self.rmax): 115 | item2 = self.heap_items.get(index2) 116 | if item2: 117 | new_weight = item2.get_priority() + self.w(d, index2, index) 118 | self.heap.delete(item2) 119 | item2 = self.heap.enqueue(index2, new_weight) 120 | self.heap_items[index2] = item2 121 | self.current_samples -= 1 122 | 123 | def eliminate(self): 124 | while self.current_samples > self.target_samples: 125 | self.eliminate_one() 126 | 127 | def get_indices(self): 128 | return list(self.heap_items.keys()) 129 | 130 | def d(self, i, j): 131 | li = self.locations[i] 132 | lj = self.locations[j] 133 | return math.sqrt((li[0] - lj[0]) ** 2 + 134 | (li[1] - lj[1]) ** 2 + 135 | (li[2] - lj[2]) ** 2) 136 | 137 | def w(self, d, i, j): 138 | # This sqrt is important as it ensures our distance scale (a length) 139 | # is consistent with the density scale (an area^-1) 140 | # If they are not consistent, then the distribution that generated 141 | # the points won't be close to what we are eliminating down to 142 | # leading to poor quality results. 143 | d *= math.sqrt(self.densities[i]) 144 | adj_d = min(d, 2 * self.rmax) 145 | if self.noise_type == BLUE: 146 | return (1 - adj_d / 2 / self.rmax) ** self.alpha 147 | else: 148 | if adj_d == 0: 149 | return 1e10 150 | return 2 * self.rmax / adj_d - self.patchiness 151 | 152 | 153 | def get_mesh_area(obj): 154 | bm = bmesh.new() 155 | bm.from_mesh(obj.data) 156 | area = sum(f.calc_area() for f in bm.faces) 157 | bm.free() 158 | return area 159 | 160 | def particle_distribute(obj, particle_count, emit_from, scene): 161 | # Uses blender's built in particle system. 162 | # Sadly doesn't work with density weighting as it is not possible 163 | # to extract vertex densities 164 | bpy.ops.object.particle_system_add() 165 | psys = obj.particle_systems[-1] # type: bpy.types.ParticleSystem 166 | pset = psys.settings 167 | pset.count = particle_count 168 | pset.emit_from = emit_from 169 | pset.distribution = 'RAND' 170 | pset.use_even_distribution = True 171 | 172 | # Force a scene update (generates particle locations) 173 | bpy.context.view_layer.update() 174 | 175 | # Force depsgraph evaluation (https://developer.blender.org/T58792) 176 | eval_obj = bpy.context.evaluated_depsgraph_get() 177 | 178 | dp = bpy.context.evaluated_depsgraph_get() 179 | ob = bpy.context.active_object 180 | eval_obj = dp.objects.get(ob.name, None) 181 | 182 | # Extract locations 183 | particles = eval_obj.particle_systems[-1].particles 184 | locations = [mathutils.Vector(particle.location) for (index, particle) in particles.items()] 185 | normals = [mathutils.Vector(particle.velocity) for (index, particle) in particles.items()] 186 | 187 | # Delete particle system 188 | bpy.ops.object.particle_system_remove() 189 | 190 | return locations, normals, None 191 | 192 | V1 = mathutils.Vector([0, 0, 0]) 193 | V2 = mathutils.Vector([0, 0, 1]) 194 | V3 = mathutils.Vector([0, 1, 0]) 195 | 196 | 197 | def weighted_particle_distribute(obj, particle_count, weight_group): 198 | # Distributes points similarly to blender's built in particle system 199 | # for emit_from=FACES, random type=RAND, even_distribution=True 200 | # and with the given vertex weight group for density. 201 | # It returns the locations of the particles, and the density of the 202 | # area that particle was found 203 | import numpy as np 204 | np.random.seed(0) 205 | 206 | # This is a rough port of what the C code does for random particles 207 | # See particle_distribute.c, distribute_from_faces_exec 208 | 209 | bm = bmesh.new() 210 | bm.from_mesh(obj.data) 211 | # TODO: The original code handles quads. 212 | bmesh.ops.triangulate(bm, faces=bm.faces) 213 | bm.faces.ensure_lookup_table() 214 | areas = np.array([f.calc_area() for f in bm.faces]) 215 | 216 | # Compute face relative densities 217 | group_index = obj.vertex_groups[weight_group].index 218 | layer = bm.verts.layers.deform[0] 219 | face_densities = np.zeros(len(bm.faces)) 220 | for face in bm.faces: 221 | w = np.mean([v[layer].get(group_index, 0) for v in face.verts]) 222 | face_densities[face.index] = w 223 | areas *= face_densities 224 | 225 | 226 | # Precompute distribution amongst faces 227 | careas = areas.cumsum() 228 | total_area = careas[-1] 229 | 230 | # Randomly pick which face each particle goes to 231 | rand_index = np.random.uniform(0, total_area, particle_count) 232 | face_indices = np.searchsorted(careas, rand_index) 233 | 234 | # Randomly pick where in each face the particle is 235 | rand_u = np.random.uniform(size=particle_count) 236 | rand_v = np.random.uniform(size=particle_count) 237 | 238 | V = mathutils.Vector 239 | locations = [] 240 | normals = [] 241 | densities = [] 242 | for i in range(particle_count): 243 | face_index = face_indices[i] 244 | face = bm.faces[face_index] 245 | vc = len(face.verts) 246 | assert vc == 3 247 | u = rand_u[i] 248 | v = rand_v[i] 249 | if u + v > 1: 250 | u = 1 - u 251 | v = 1 - v 252 | loc = mathutils.geometry.barycentric_transform( 253 | V([0, u, v]), 254 | V1, 255 | V2, 256 | V3, 257 | face.verts[0].co, 258 | face.verts[1].co, 259 | face.verts[2].co, 260 | ) 261 | loc = obj.matrix_world @ loc 262 | locations.append(mathutils.Vector(loc)) 263 | normals.append(mathutils.Vector(face.normal)) 264 | densities.append(face_densities[face_index]) 265 | 266 | 267 | bm.free() 268 | return locations, normals, densities 269 | 270 | def set_face_cloud(me, locations, normals): 271 | """Fills a mesh with tiny planes, one for each location/normals""" 272 | up = mathutils.Vector([0, 1, 0]) 273 | 274 | def get_tangent(v): 275 | t = up.cross(v) 276 | if t.length_squared < 1e-12: t = mathutils.Vector([0, 0, 1]) 277 | # Need to make these reasonably large or they are invisible in the blender UI 278 | return t * 1e-2 279 | 280 | tangents = list(map(get_tangent, normals)) 281 | tangents2 = list(map(lambda a, b: a.cross(b), tangents, normals)) 282 | 283 | vertices = ([v + t for (v, t) in zip(locations, tangents2)] + 284 | [v + t for (v, t) in zip(locations, tangents)] + 285 | [v - t for (v, t) in zip(locations, tangents2)] + 286 | [v - t for (v, t) in zip(locations, tangents)]) 287 | 288 | n = len(locations) 289 | faces = [(i, i+n, i+2*n, i+3*n) for i in range(n)] 290 | 291 | me.from_pydata(vertices, [] , faces) 292 | 293 | class BlueNoiseParticles(bpy.types.Operator): 294 | bl_idname = "object.blue_noise_particles_operator" 295 | bl_label = "Blue Noise Particles" 296 | bl_options = {'REGISTER', 'UNDO'} 297 | 298 | emit_from_types = [("VERT", "Verts", "Emit from vertices"), 299 | ("FACE", "Faces", "Emit from faces"), 300 | ("VOLUME", "Volume", "Emit from volume")] 301 | emit_from: bpy.props.EnumProperty(items=emit_from_types, 302 | name="Emit From", 303 | description="Controls where particles are generated", 304 | default="FACE") 305 | 306 | quality_types = [("1", "None", ""), 307 | ("1.5", "Low", ""), 308 | ("2", "Medium", ""), 309 | ("5", "High", "")] 310 | quality: bpy.props.EnumProperty(items=quality_types, 311 | name="Quality", 312 | description="Controls how much oversampling is done", 313 | default="2") 314 | 315 | count: bpy.props.IntProperty(name="Count", 316 | description="Number of particles to emit", 317 | default=1000, 318 | min=0) 319 | 320 | vertex_group_density: bpy.props.StringProperty(name="Density", 321 | description="Vertex group to control density") 322 | 323 | noise_types = [(BLUE, "Even", "Spreads particles out with no two near each other"), 324 | (MAGENTA, "Patchy", "Clumps particles while still keeping a minimum distance")] 325 | noise_type: bpy.props.EnumProperty(items=noise_types, 326 | name="Noise Type", 327 | description="Controls distribution of particles", 328 | default=BLUE) 329 | 330 | patchiness: bpy.props.FloatProperty(name="Patchiness", 331 | description="Controls how strongly particles clump together", 332 | default=3, 333 | soft_min=0, 334 | soft_max=10) 335 | 336 | generate_types = [("FACE", "Faces", ""), 337 | ("VERT", "Vertices", "")] 338 | generate_type: bpy.props.EnumProperty(items=generate_types, 339 | name="Generate", 340 | description="Use faces or vertices for each particle", 341 | default="FACE") 342 | 343 | @classmethod 344 | def poll(cls, context): 345 | ob = context.active_object 346 | return ((ob is not None) and 347 | (ob.mode == "OBJECT") and 348 | (ob.type == "MESH") and 349 | (context.mode == "OBJECT")) 350 | 351 | def draw(self, context): 352 | layout = self.layout 353 | layout.prop(self, "emit_from") 354 | layout.prop(self, "quality") 355 | layout.prop(self, "count") 356 | obj = bpy.data.objects[self.obj_name] 357 | if self.emit_from == "FACE": 358 | layout.prop_search(self, "vertex_group_density", obj, "vertex_groups", text="Density") 359 | layout.prop(self, "noise_type") 360 | if self.noise_type == MAGENTA: 361 | layout.prop(self, "patchiness") 362 | layout.prop(self, "generate_type") 363 | 364 | def check(self, context): 365 | return True 366 | 367 | def execute(self, context): 368 | obj = context.active_object # type: bpy.types.Object 369 | scene = bpy.context.scene 370 | 371 | initial_particle_count = int(self.count * float(self.quality)) 372 | 373 | is_volume = self.emit_from == 'VOLUME' 374 | mesh_area = get_mesh_area(obj) 375 | 376 | if not self.vertex_group_density or self.emit_from != "FACE": 377 | locations, normals, densities = particle_distribute(obj, initial_particle_count, self.emit_from, scene) 378 | else: 379 | locations, normals, densities = weighted_particle_distribute(obj, initial_particle_count, self.vertex_group_density) 380 | 381 | # Run sample elimination 382 | se = SampleEliminator(locations, densities, self.count, is_volume, mesh_area, self.noise_type, self.patchiness) 383 | se.eliminate() 384 | alive_indices = se.get_indices() 385 | alive_locations = [locations[i] for i in alive_indices] 386 | alive_normals = [normals[i] for i in alive_indices] 387 | 388 | 389 | # Create a new object, with vertices according the the alive locations 390 | me = bpy.data.meshes.new(obj.name + " ParticleMesh") 391 | ob = bpy.data.objects.new(obj.name + " Particles", me) 392 | # Sigh, it doesn't seem possible to create vertices with custom normals, nor does the particle system respect it 393 | if self.generate_type == "FACE": 394 | #me.from_pydata(alive_locations, [], []) 395 | #me.vertices.foreach_set("normal", [v for n in alive_normals for v in n]) 396 | #me.normals_split_custom_set_from_vertices(alive_normals) 397 | # So instead we create a mesh with lots of tiny faces 398 | set_face_cloud(me, alive_locations, alive_normals) 399 | else: 400 | me.from_pydata(alive_locations, [], []) 401 | 402 | scene.collection.objects.link(ob) 403 | me.update() 404 | 405 | # Select new object 406 | context.view_layer.objects.active = ob 407 | obj.select_set(False) 408 | ob.select_set(True) 409 | 410 | # Add a particle system to the new object 411 | bpy.ops.object.particle_system_add() 412 | psys = ob.particle_systems[-1] # type: bpy.types.ParticleSystem 413 | pset = psys.settings 414 | pset.count = self.count 415 | pset.emit_from = self.generate_type 416 | pset.use_emit_random = False 417 | pset.frame_start = 0 418 | pset.frame_end = 0 419 | pset.physics_type = 'NO' 420 | 421 | return {'FINISHED'} 422 | 423 | def invoke(self, context, event): 424 | self.obj_name = context.active_object.name 425 | return context.window_manager.invoke_props_dialog(self) 426 | 427 | def menu_func(self, context): 428 | self.layout.operator(BlueNoiseParticles.bl_idname, 429 | text="Blue Noise Particles", 430 | icon='PLUGIN') 431 | 432 | 433 | def register(): 434 | bpy.utils.register_class(BlueNoiseParticles) 435 | bpy.types.VIEW3D_MT_mesh_add.append(menu_func) 436 | 437 | 438 | def unregister(): 439 | bpy.types.VIEW3D_MT_mesh_add.remove(menu_func) 440 | bpy.utils.unregister_class(BlueNoiseParticles) 441 | 442 | if __name__ == "__main__": 443 | register() 444 | -------------------------------------------------------------------------------- /fibonacci_heap_mod.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Fibonacci heap. 4 | 5 | File: fibonacci_heap_mod.py 6 | Author: Keith Schwarz (htiek@cs.stanford.edu) 7 | Ported to Python by Dan Stromberg (strombrg@gmail.com) 8 | 9 | An implementation of a priority queue backed by a Fibonacci heap, as described 10 | by Fredman and Tarjan. Fibonacci heaps are interesting theoretically because 11 | they have asymptotically good runtime guarantees for many operations. In 12 | particular, insert, peek, and decrease-key all run in amortized O(1) time. 13 | dequeue_min and delete each run in amortized O(lg n) time. This allows 14 | algorithms that rely heavily on decrease-key to gain significant performance 15 | boosts. For example, Dijkstra's algorithm for single-source shortest paths can 16 | be shown to run in O(m + n lg n) using a Fibonacci heap, compared to O(m lg n) 17 | using a standard binary or binomial heap. 18 | 19 | Internally, a Fibonacci heap is represented as a circular, doubly-linked list 20 | of trees obeying the min-heap property. Each node stores pointers to its 21 | parent (if any) and some arbitrary child. Additionally, every node stores its 22 | degree (the number of children it has) and whether it is a "marked" node. 23 | Finally, each Fibonacci heap stores a pointer to the tree with the minimum 24 | value. 25 | 26 | To insert a node into a Fibonacci heap, a singleton tree is created and merged 27 | into the rest of the trees. The merge operation works by simply splicing 28 | together the doubly-linked lists of the two trees, then updating the min 29 | pointer to be the smaller of the minima of the two heaps. Peeking at the 30 | smallest element can therefore be accomplished by just looking at the min 31 | element. All of these operations complete in O(1) time. 32 | 33 | The tricky operations are dequeue_min and decrease_key. dequeue_min works by 34 | removing the root of the tree containing the smallest element, then merging its 35 | children with the topmost roots. Then, the roots are scanned and merged so 36 | that there is only one tree of each degree in the root list. This works by 37 | maintaining a dynamic array of trees, each initially null, pointing to the 38 | roots of trees of each dimension. The list is then scanned and this array is 39 | populated. Whenever a conflict is discovered, the appropriate trees are merged 40 | together until no more conflicts exist. The resulting trees are then put into 41 | the root list. A clever analysis using the potential method can be used to 42 | show that the amortized cost of this operation is O(lg n), see "Introduction to 43 | Algorithms, Second Edition" by Cormen, Rivest, Leiserson, and Stein for more 44 | details. 45 | 46 | The other hard operation is decrease_key, which works as follows. First, we 47 | update the key of the node to be the new value. If this leaves the node 48 | smaller than its parent, we're done. Otherwise, we cut the node from its 49 | parent, add it as a root, and then mark its parent. If the parent was already 50 | marked, we cut that node as well, recursively mark its parent, and continue 51 | this process. This can be shown to run in O(1) amortized time using yet 52 | another clever potential function. Finally, given this function, we can 53 | implement delete by decreasing a key to -infinity, then calling dequeue_min to 54 | extract it. 55 | """ 56 | 57 | import math 58 | import collections 59 | 60 | 61 | def merge_lists(one, two): 62 | """ 63 | Merge 2 lists. 64 | 65 | Utility function which, given two pointers into disjoint circularly- 66 | linked lists, merges the two lists together into one circularly-linked 67 | list in O(1) time. Because the lists may be empty, the return value 68 | is the only pointer that's guaranteed to be to an element of the 69 | resulting list. 70 | 71 | This function assumes that one and two are the minimum elements of the 72 | lists they are in, and returns a pointer to whichever is smaller. If 73 | this condition does not hold, the return value is some arbitrary pointer 74 | into the doubly-linked list. 75 | 76 | @param one A reference to one of the two deques. 77 | @param two A reference to the other of the two deques. 78 | @return A reference to the smallest element of the resulting list. 79 | """ 80 | # There are four cases depending on whether the lists are None or not. 81 | # We consider each separately. 82 | if one is None and two is None: 83 | # Both None, resulting list is None. 84 | return None 85 | elif one is not None and two is None: 86 | # Two is None, result is one. 87 | return one 88 | elif one is None and two is not None: 89 | # One is None, result is two. 90 | return two 91 | else: 92 | # Both non-None; actually do the splice. 93 | 94 | # This is actually not as easy as it seems. The idea is that we'll 95 | # have two lists that look like this: 96 | # 97 | # +----+ +----+ +----+ 98 | # | |--N->|one |--N->| | 99 | # | |<-P--| |<-P--| | 100 | # +----+ +----+ +----+ 101 | # 102 | # 103 | # +----+ +----+ +----+ 104 | # | |--N->|two |--N->| | 105 | # | |<-P--| |<-P--| | 106 | # +----+ +----+ +----+ 107 | # 108 | # And we want to relink everything to get 109 | # 110 | # +----+ +----+ +----+---+ 111 | # | |--N->|one | | | | 112 | # | |<-P--| | | |<+ | 113 | # +----+ +----+<-\ +----+ | | 114 | # \ P | | 115 | # N \ N | 116 | # +----+ +----+ \->+----+ | | 117 | # | |--N->|two | | | | | 118 | # | |<-P--| | | | | P 119 | # +----+ +----+ +----+ | | 120 | # ^ | | | 121 | # | +-------------+ | 122 | # +-----------------+ 123 | 124 | # Cache this since we're about to overwrite it. 125 | one_next = one.m_next 126 | 127 | one.m_next = two.m_next 128 | one.m_next.m_prev = one 129 | two.m_next = one_next 130 | two.m_next.m_prev = two 131 | 132 | # Return a pointer to whichever's smaller. 133 | if one.m_priority < two.m_priority: 134 | return one 135 | else: 136 | return two 137 | 138 | 139 | def merge(one, two): 140 | """ 141 | Merge 2 Fibonacci heaps. 142 | 143 | Given two Fibonacci heaps, returns a new Fibonacci heap that contains 144 | all of the elements of the two heaps. Each of the input heaps is 145 | destructively modified by having all its elements removed. You can 146 | continue to use those heaps, but be aware that they will be empty 147 | after this call completes. 148 | 149 | @param one The first Fibonacci heap to merge. 150 | @param two The second Fibonacci heap to merge. 151 | @return A new Fibonacci_heap containing all of the elements of both 152 | heaps. 153 | """ 154 | # Create a new Fibonacci_heap to hold the result. 155 | result = Fibonacci_heap() 156 | 157 | # Merge the two Fibonacci heap root lists together. This helper function 158 | # also computes the min of the two lists, so we can store the result in 159 | # the m_min field of the new heap. 160 | result.m_min = merge_lists(one.m_min, two.m_min) 161 | 162 | # The size of the new heap is the sum of the sizes of the input heaps. 163 | result.m_size = one.m_size + two.m_size 164 | 165 | # Clear the old heaps. 166 | one.m_size = two.m_size = 0 167 | one.m_min = None 168 | two.m_min = None 169 | 170 | # Return the newly-merged heap. 171 | return result 172 | 173 | 174 | # In order for all of the Fibonacci heap operations to complete in O(1), 175 | # clients need to have O(1) access to any element in the heap. We make 176 | # this work by having each insertion operation produce a handle to the 177 | # node in the tree. In actuality, this handle is the node itself. 178 | class Entry(object): 179 | # pylint: disable=too-many-instance-attributes 180 | 181 | """Hold an entry in the heap.""" 182 | 183 | __slots__ = ['m_degree', 'm_is_marked', 'm_parent', 'm_child', 'm_next', 'm_prev', 'm_elem', 'm_priority'] 184 | 185 | def __init__(self, elem, priority): 186 | """Initialize an Entry in the heap.""" 187 | # Number of children 188 | self.m_degree = 0 189 | # Whether this node is marked 190 | self.m_is_marked = False 191 | 192 | # Parent in the tree, if any. 193 | self.m_parent = None 194 | 195 | # Child node, if any. 196 | self.m_child = None 197 | 198 | self.m_next = self.m_prev = self 199 | self.m_elem = elem 200 | self.m_priority = priority 201 | 202 | def __lt__(self, other): 203 | """Return True iff self's priority is less than other's.""" 204 | return self.m_priority < other.m_priority 205 | 206 | def __eq__(self, other): 207 | """Return True iff ==.""" 208 | if self.m_priority == other.m_priority: 209 | return True 210 | else: 211 | return self.m_elem == other.m_elem 212 | 213 | def __gt__(self, other): 214 | """Return True iff >.""" 215 | if self.m_priority > other.m_priority: 216 | return True 217 | else: 218 | return self.m_elem > other.m_elem 219 | 220 | def __cmp__(self, other): 221 | """Python 2.x-style comparison.""" 222 | if self.__lt__(other): 223 | return -1 224 | elif self.__gt__(other): 225 | return 1 226 | else: 227 | return 0 228 | 229 | # def __cmp__(self, other): 230 | # """ 231 | # Comparison method, 2.x style. 232 | # We compare object identity, rather than priority and value 233 | # """ 234 | # if id(self) == id(other): 235 | # return 0 236 | # elif id(self) < id(other): 237 | # return -1 238 | # else: 239 | # return 1 240 | # 241 | # def __lt__(self, other): 242 | # """Comparison method, 3.x style""" 243 | # if self.__cmp__(other) == -1: 244 | # return True 245 | # else: 246 | # return False 247 | # 248 | # def __eq__(self, other): 249 | # """Comparison method, 3.x style""" 250 | # if self.__cmp__(other) == 0: 251 | # return True 252 | # else: 253 | # return False 254 | 255 | def get_value(self): 256 | """ 257 | Return the element represented by this heap entry. 258 | 259 | @return The element represented by this heap entry. 260 | """ 261 | return self.m_elem 262 | 263 | def set_value(self, value): 264 | """ 265 | Set the element associated with this heap entry. 266 | 267 | @param value The element to associate with this heap entry. 268 | """ 269 | self.m_elem = value 270 | 271 | def get_priority(self): 272 | """ 273 | Return the priority of this element. 274 | 275 | @return The priority of this element. 276 | """ 277 | return self.m_priority 278 | 279 | def _entry(self, elem, priority): 280 | """ 281 | Construct a new Entry that holds the given element with the indicated priority. 282 | 283 | @param elem The element stored in this node. 284 | @param priority The priority of this element. 285 | """ 286 | self.m_next = self.m_prev = self 287 | self.m_elem = elem 288 | self.m_priority = priority 289 | 290 | 291 | class Fibonacci_heap(object): 292 | 293 | """ 294 | A class representing a Fibonacci heap. 295 | 296 | @author Keith Schwarz (htiek@cs.stanford.edu) 297 | """ 298 | 299 | def __init__(self): 300 | """Initialize the fibonacci heap.""" 301 | # Pointer to the minimum element in the heap. 302 | self.m_min = None 303 | 304 | # Cached size of the heap, so we don't have to recompute this explicitly. 305 | self.m_size = 0 306 | 307 | def enqueue(self, value, priority): 308 | """ 309 | Insert the specified element into the Fibonacci heap with the specified priority. 310 | 311 | Its priority must be a valid double, so you cannot set the priority to NaN. 312 | 313 | @param value The value to insert. 314 | @param priority Its priority, which must be valid. 315 | @return An Entry representing that element in the tree. 316 | """ 317 | self._check_priority(priority) 318 | 319 | # Create the entry object, which is a circularly-linked list of length 320 | # one. 321 | result = Entry(value, priority) 322 | 323 | # Merge this singleton list with the tree list. 324 | self.m_min = merge_lists(self.m_min, result) 325 | 326 | # Increase the size of the heap; we just added something. 327 | self.m_size += 1 328 | 329 | # Return the reference to the new element. 330 | return result 331 | 332 | def min(self): 333 | """ 334 | Return an Entry object corresponding to the minimum element of the Fibonacci heap. 335 | 336 | Raise an IndexError if the heap is empty. 337 | 338 | @return The smallest element of the heap. 339 | @raises IndexError If the heap is empty. 340 | """ 341 | if not bool(self): 342 | raise IndexError("Heap is empty.") 343 | return self.m_min 344 | 345 | def __bool__(self): 346 | """ 347 | Return whether the heap is nonempty. 348 | 349 | @return Whether the heap is nonempty. 350 | """ 351 | return self.m_min is not None 352 | 353 | __nonzero__ = __bool__ 354 | 355 | def __len__(self): 356 | """ 357 | Return the number of elements in the heap. 358 | 359 | @return The number of elements in the heap. 360 | """ 361 | return self.m_size 362 | 363 | def dequeue_min(self): 364 | # pylint: disable=too-many-branches 365 | """ 366 | Dequeue and return the minimum element of the Fibonacci heap. 367 | 368 | If the heap is empty, this throws an IndexError. 369 | 370 | @return The smallest element of the Fibonacci heap. 371 | @raises IndexError if the heap is empty. 372 | """ 373 | # Check for whether we're empty. 374 | if not bool(self): 375 | raise IndexError("Heap is empty.") 376 | 377 | # Otherwise, we're about to lose an element, so decrement the number of 378 | # entries in this heap. 379 | self.m_size -= 1 380 | 381 | # Grab the minimum element so we know what to return. 382 | min_elem = self.m_min 383 | 384 | # Now, we need to get rid of this element from the list of roots. There 385 | # are two cases to consider. First, if this is the only element in the 386 | # list of roots, we set the list of roots to be None by clearing m_min. 387 | # Otherwise, if it's not None, then we write the elements next to the 388 | # min element around the min element to remove it, then arbitrarily 389 | # reassign the min. 390 | if self.m_min.m_next is self.m_min: 391 | # Case one 392 | self.m_min = None 393 | else: 394 | # Case two 395 | self.m_min.m_prev.m_next = self.m_min.m_next 396 | self.m_min.m_next.m_prev = self.m_min.m_prev 397 | # Arbitrary element of the root list. 398 | self.m_min = self.m_min.m_next 399 | 400 | # Next, clear the parent fields of all of the min element's children, 401 | # since they're about to become roots. Because the elements are 402 | # stored in a circular list, the traversal is a bit complex. 403 | if min_elem.m_child is not None: 404 | # Keep track of the first visited node. 405 | curr = min_elem.m_child 406 | while True: 407 | curr.m_parent = None 408 | 409 | # Walk to the next node, then stop if this is the node we 410 | # started at. 411 | curr = curr.m_next 412 | if curr is min_elem.m_child: 413 | # This was a do-while (curr != minElem.mChild); 414 | break 415 | 416 | # Next, splice the children of the root node into the topmost list, 417 | # then set self.m_min to point somewhere in that list. 418 | self.m_min = merge_lists(self.m_min, min_elem.m_child) 419 | 420 | # If there are no entries left, we're done. 421 | if self.m_min is None: 422 | return min_elem 423 | 424 | # Next, we need to coalesce all of the roots so that there is only one 425 | # tree of each degree. To track trees of each size, we allocate an 426 | # ArrayList where the entry at position i is either None or the 427 | # unique tree of degree i. 428 | tree_table = collections.deque() 429 | 430 | # We need to traverse the entire list, but since we're going to be 431 | # messing around with it we have to be careful not to break our 432 | # traversal order mid-stream. One major challenge is how to detect 433 | # whether we're visiting the same node twice. To do this, we'll 434 | # spent a bit of overhead adding all of the nodes to a list, and 435 | # then will visit each element of this list in order. 436 | to_visit = collections.deque() 437 | 438 | # To add everything, we'll iterate across the elements until we 439 | # find the first element twice. We check this by looping while the 440 | # list is empty or while the current element isn't the first element 441 | # of that list. 442 | 443 | # for (Entry curr = self.m_min; toVisit.isEmpty() || toVisit.get(0) != curr; curr = curr.m_next) 444 | curr = self.m_min 445 | while not to_visit or to_visit[0] is not curr: 446 | to_visit.append(curr) 447 | curr = curr.m_next 448 | 449 | # Traverse this list and perform the appropriate unioning steps. 450 | for curr in to_visit: 451 | # Keep merging until a match arises. 452 | while True: 453 | # Ensure that the list is long enough to hold an element of this 454 | # degree. 455 | while curr.m_degree >= len(tree_table): 456 | tree_table.append(None) 457 | 458 | # If nothing's here, we can record that this tree has this size 459 | # and are done processing. 460 | if tree_table[curr.m_degree] is None: 461 | tree_table[curr.m_degree] = curr 462 | break 463 | 464 | # Otherwise, merge with what's there. 465 | other = tree_table[curr.m_degree] 466 | # Clear the slot 467 | tree_table[curr.m_degree] = None 468 | 469 | # Determine which of the two trees has the smaller root, storing 470 | # the two trees accordingly. 471 | # minimum = (other.m_priority < curr.m_priority)? other : curr 472 | if other.m_priority < curr.m_priority: 473 | minimum = other 474 | else: 475 | minimum = curr 476 | # maximum = (other.m_priority < curr.m_priority)? curr : other 477 | if other.m_priority < curr.m_priority: 478 | maximum = curr 479 | else: 480 | maximum = other 481 | 482 | # Break max out of the root list, then merge it into min's child 483 | # list. 484 | maximum.m_next.m_prev = maximum.m_prev 485 | maximum.m_prev.m_next = maximum.m_next 486 | 487 | # Make it a singleton so that we can merge it. 488 | maximum.m_next = maximum.m_prev = maximum 489 | minimum.m_child = merge_lists(minimum.m_child, maximum) 490 | 491 | # Reparent maximum appropriately. 492 | maximum.m_parent = minimum 493 | 494 | # Clear maximum's mark, since it can now lose another child. 495 | maximum.m_is_marked = False 496 | 497 | # Increase minimum's degree; it now has another child. 498 | minimum.m_degree += 1 499 | 500 | # Continue merging this tree. 501 | curr = minimum 502 | 503 | # Update the global min based on this node. Note that we compare 504 | # for <= instead of < here. That's because if we just did a 505 | # reparent operation that merged two different trees of equal 506 | # priority, we need to make sure that the min pointer points to 507 | # the root-level one. 508 | if curr.m_priority <= self.m_min.m_priority: 509 | self.m_min = curr 510 | return min_elem 511 | 512 | def decrease_key(self, entry, new_priority): 513 | """ 514 | Decrease the key of the specified element to the new priority. 515 | 516 | If the new priority is greater than the old priority, this function raises an ValueError. The new priority must 517 | be a finite double, so you cannot set the priority to be NaN, or +/- infinity. Doing so also raises an 518 | ValueError. 519 | 520 | It is assumed that the entry belongs in this heap. For efficiency reasons, this is not checked at runtime. 521 | 522 | @param entry The element whose priority should be decreased. 523 | @param new_priority The new priority to associate with this entry. 524 | @raises ValueError If the new priority exceeds the old 525 | priority, or if the argument is not a finite double. 526 | """ 527 | self._check_priority(new_priority) 528 | if new_priority > entry.m_priority: 529 | raise ValueError("New priority exceeds old.") 530 | 531 | # Forward this to a helper function. 532 | self.decrease_key_unchecked(entry, new_priority) 533 | 534 | def delete(self, entry): 535 | """ 536 | Delete this Entry from the Fibonacci heap that contains it. 537 | 538 | It is assumed that the entry belongs in this heap. For efficiency 539 | reasons, this is not checked at runtime. 540 | 541 | @param entry The entry to delete. 542 | """ 543 | # Use decreaseKey to drop the entry's key to -infinity. This will 544 | # guarantee that the node is cut and set to the global minimum. 545 | self.decrease_key_unchecked(entry, float("-inf")) 546 | 547 | # Call dequeue_min to remove it. 548 | self.dequeue_min() 549 | 550 | @staticmethod 551 | def _check_priority(priority): 552 | """ 553 | Utility function: given a user-specified priority, check whether it's a valid double and throw a ValueError otherwise. 554 | 555 | @param priority The user's specified priority. 556 | @raises ValueError if it is not valid. 557 | """ 558 | if math.isnan(priority) or math.isinf(priority): 559 | raise ValueError("Priority {} is invalid.".format(priority)) 560 | 561 | def decrease_key_unchecked(self, entry, priority): 562 | """ 563 | Decrease the key of a node in the tree without doing any checking to ensure that the new priority is valid. 564 | 565 | @param entry The node whose key should be decreased. 566 | @param priority The node's new priority. 567 | """ 568 | # First, change the node's priority. 569 | entry.m_priority = priority 570 | 571 | # If the node no longer has a higher priority than its parent, cut it. 572 | # Note that this also means that if we try to run a delete operation 573 | # that decreases the key to -infinity, it's guaranteed to cut the node 574 | # from its parent. 575 | if entry.m_parent is not None and entry.m_priority <= entry.m_parent.m_priority: 576 | self.cut_node(entry) 577 | 578 | # If our new value is the new min, mark it as such. Note that if we 579 | # ended up decreasing the key in a way that ties the current minimum 580 | # priority, this will change the min accordingly. 581 | if entry.m_priority <= self.m_min.m_priority: 582 | self.m_min = entry 583 | 584 | def cut_node(self, entry): 585 | """ 586 | Cut a node from its parent. 587 | 588 | If the parent was already marked, recursively cuts that node from its parent as well. 589 | 590 | @param entry The node to cut from its parent. 591 | """ 592 | # Begin by clearing the node's mark, since we just cut it. 593 | entry.m_is_marked = False 594 | 595 | # Base case: If the node has no parent, we're done. 596 | if entry.m_parent is None: 597 | return 598 | 599 | # Rewire the node's siblings around it, if it has any siblings. 600 | if entry.m_next is not entry: 601 | # Has siblings 602 | entry.m_next.m_prev = entry.m_prev 603 | entry.m_prev.m_next = entry.m_next 604 | 605 | # If the node is the one identified by its parent as its child, 606 | # we need to rewrite that pointer to point to some arbitrary other 607 | # child. 608 | if entry.m_parent.m_child is entry: 609 | if entry.m_next is not entry: 610 | # If there are any other children, pick one of them arbitrarily. 611 | entry.m_parent.m_child = entry.m_next 612 | else: 613 | # Otherwise, there aren't any children left and we should clear the 614 | # pointer and drop the node's degree. 615 | entry.m_parent.m_child = None 616 | 617 | # Decrease the degree of the parent, since it just lost a child. 618 | entry.m_parent.m_degree -= 1 619 | 620 | # Splice this tree into the root list by converting it to a singleton 621 | # and invoking the merge subroutine. 622 | entry.m_prev = entry.m_next = entry 623 | self.m_min = merge_lists(self.m_min, entry) 624 | 625 | # Mark the parent and recursively cut it if it's already been 626 | # marked. 627 | if entry.m_parent.m_is_marked: 628 | self.cut_node(entry.m_parent) 629 | else: 630 | entry.m_parent.m_is_marked = True 631 | 632 | # Clear the relocated node's parent; it's now a root. 633 | entry.m_parent = None 634 | --------------------------------------------------------------------------------