├── .gitignore ├── .project ├── .pydevproject ├── Breadth-First-Search ├── .gitignore ├── BFS.dis ├── BFS.dis.py ├── ReadMe.md ├── graph_gen.py ├── run.py └── test_run_10_100_200.txt ├── ConcurrentMutex ├── ReadMe.md ├── auxiliary.py ├── bakery.py ├── fast.py └── main.py ├── DistributedMutex ├── .gitignore ├── RAtoken.dis ├── ReadMe.md ├── SKtoken.dis ├── lamport.dis ├── main.py └── mutex2n.dis ├── Maximal-Independent-Set ├── .gitignore ├── InputGraph.py ├── MIS-sequential.png ├── MIS.dis ├── MIS.dis.py ├── ReadMe.md ├── graph-1 ├── graph-1.png ├── graph-2 ├── graph-2a.png ├── graph-2b.png ├── graph-2c.png ├── graph-2d.png └── run.py ├── Minimum-Spanning-Tree ├── .gitignore ├── 1000edge-100node-graph ├── 1000edge-100node-graph-output.txt ├── Kruskal.py ├── MST.dis ├── MST.dis.py ├── ReadMe.md ├── graph-1 ├── graph-2 ├── graph-3 ├── graph-3-output.txt ├── graph_gen.py ├── img │ ├── 1000edge-100node-graph.png │ ├── MST-figure.png │ ├── MST_algorithm.png │ ├── graph-3-segment1sol.png │ ├── graph-3-segment2sol.png │ ├── graph-3-sketch.png │ ├── graph-3-sol.png │ ├── test_case_1.png │ └── test_case_2.png ├── old │ ├── .gitignore │ ├── mst_attempt_1.dis │ ├── mst_attempt_2.dis │ └── sequential_messaging_test.dis ├── papers │ ├── GHS_enhanced.pdf │ └── GHS_original.pdf ├── run.py └── tools.py ├── ReadMe.md ├── ShortestPath ├── .gitignore ├── InputGraph.py ├── ReadMe.md ├── ShortestPath.dis ├── ShortestPath.dis.py ├── graph-1 ├── graph-2 └── run.py ├── distalgo ├── PKG-INFO ├── README ├── __init__.py ├── compiler │ ├── __init__.py │ ├── __main__.py │ ├── await.py │ ├── base.py │ ├── codegen.py │ ├── compiler.py │ ├── consts.py │ ├── dist.py │ ├── distast.py │ ├── event.py │ ├── exceptions.py │ ├── info.py │ ├── label.py │ ├── mesgcomp.py │ └── send.py └── runtime │ ├── __init__.py │ ├── __main__.py │ ├── endpoint.py │ ├── event.py │ ├── proc.py │ ├── sim.py │ ├── tcp.py │ ├── udp.py │ └── util.py ├── draw.py ├── graph_gen.py ├── networkx ├── __init__.py ├── algorithms │ ├── __init__.py │ ├── approximation │ │ ├── __init__.py │ │ ├── clique.py │ │ ├── dominating_set.py │ │ ├── independent_set.py │ │ ├── matching.py │ │ ├── ramsey.py │ │ ├── tests │ │ │ ├── test_clique.py │ │ │ ├── test_dominating_set.py │ │ │ ├── test_independent_set.py │ │ │ ├── test_matching.py │ │ │ ├── test_ramsey.py │ │ │ └── test_vertex_cover.py │ │ └── vertex_cover.py │ ├── assortativity │ │ ├── __init__.py │ │ ├── connectivity.py │ │ ├── correlation.py │ │ ├── mixing.py │ │ ├── neighbor_degree.py │ │ ├── pairs.py │ │ └── tests │ │ │ ├── base_test.py │ │ │ ├── test_connectivity.py │ │ │ ├── test_correlation.py │ │ │ ├── test_mixing.py │ │ │ ├── test_neighbor_degree.py │ │ │ └── test_pairs.py │ ├── bipartite │ │ ├── __init__.py │ │ ├── basic.py │ │ ├── centrality.py │ │ ├── cluster.py │ │ ├── projection.py │ │ ├── redundancy.py │ │ ├── spectral.py │ │ └── tests │ │ │ ├── test_basic.py │ │ │ ├── test_centrality.py │ │ │ ├── test_cluster.py │ │ │ ├── test_project.py │ │ │ └── test_spectral_bipartivity.py │ ├── block.py │ ├── boundary.py │ ├── centrality │ │ ├── __init__.py │ │ ├── betweenness.py │ │ ├── betweenness_subset.py │ │ ├── closeness.py │ │ ├── communicability_alg.py │ │ ├── current_flow_betweenness.py │ │ ├── current_flow_betweenness_subset.py │ │ ├── current_flow_closeness.py │ │ ├── degree_alg.py │ │ ├── eigenvector.py │ │ ├── flow_matrix.py │ │ ├── load.py │ │ └── tests │ │ │ ├── test_betweenness_centrality.py │ │ │ ├── test_betweenness_centrality_subset.py │ │ │ ├── test_closeness_centrality.py │ │ │ ├── test_communicability.py │ │ │ ├── test_current_flow_betweenness_centrality.py │ │ │ ├── test_current_flow_betweenness_centrality_subset.py │ │ │ ├── test_current_flow_closeness.py │ │ │ ├── test_degree_centrality.py │ │ │ ├── test_eigenvector_centrality.py │ │ │ └── test_load_centrality.py │ ├── chordal │ │ ├── __init__.py │ │ ├── chordal_alg.py │ │ └── tests │ │ │ └── test_chordal.py │ ├── clique.py │ ├── cluster.py │ ├── community │ │ ├── __init__.py │ │ ├── kclique.py │ │ └── tests │ │ │ └── test_kclique.py │ ├── components │ │ ├── __init__.py │ │ ├── attracting.py │ │ ├── biconnected.py │ │ ├── connected.py │ │ ├── strongly_connected.py │ │ ├── tests │ │ │ ├── test_attracting.py │ │ │ ├── test_biconnected.py │ │ │ ├── test_connected.py │ │ │ ├── test_strongly_connected.py │ │ │ └── test_weakly_connected.py │ │ └── weakly_connected.py │ ├── core.py │ ├── cycles.py │ ├── dag.py │ ├── distance_measures.py │ ├── distance_regular.py │ ├── euler.py │ ├── flow │ │ ├── __init__.py │ │ ├── maxflow.py │ │ ├── mincost.py │ │ └── tests │ │ │ ├── test_maxflow.py │ │ │ ├── test_maxflow_large_graph.py │ │ │ └── test_mincost.py │ ├── graphical.py │ ├── hierarchy.py │ ├── isolate.py │ ├── isomorphism │ │ ├── __init__.py │ │ ├── isomorph.py │ │ ├── isomorphvf2.py │ │ ├── matchhelpers.py │ │ ├── tests │ │ │ ├── iso_r01_s80.A99 │ │ │ ├── iso_r01_s80.B99 │ │ │ ├── si2_b06_m200.A99 │ │ │ ├── si2_b06_m200.B99 │ │ │ ├── test_isomorphism.py │ │ │ ├── test_isomorphvf2.py │ │ │ └── test_vf2userfunc.py │ │ └── vf2userfunc.py │ ├── link_analysis │ │ ├── __init__.py │ │ ├── hits_alg.py │ │ ├── pagerank_alg.py │ │ └── tests │ │ │ ├── test_hits.py │ │ │ └── test_pagerank.py │ ├── matching.py │ ├── mis.py │ ├── mst.py │ ├── operators │ │ ├── __init__.py │ │ ├── all.py │ │ ├── binary.py │ │ ├── product.py │ │ ├── tests │ │ │ ├── test_all.py │ │ │ ├── test_binary.py │ │ │ ├── test_product.py │ │ │ └── test_unary.py │ │ └── unary.py │ ├── richclub.py │ ├── shortest_paths │ │ ├── __init__.py │ │ ├── astar.py │ │ ├── dense.py │ │ ├── generic.py │ │ ├── tests │ │ │ ├── test_astar.py │ │ │ ├── test_dense.py │ │ │ ├── test_dense_numpy.py │ │ │ ├── test_generic.py │ │ │ ├── test_unweighted.py │ │ │ └── test_weighted.py │ │ ├── unweighted.py │ │ └── weighted.py │ ├── simple_paths.py │ ├── smetric.py │ ├── swap.py │ ├── tests │ │ ├── test_block.py │ │ ├── test_boundary.py │ │ ├── test_clique.py │ │ ├── test_cluster.py │ │ ├── test_core.py │ │ ├── test_cycles.py │ │ ├── test_dag.py │ │ ├── test_distance_measures.py │ │ ├── test_distance_regular.py │ │ ├── test_euler.py │ │ ├── test_graphical.py │ │ ├── test_hierarchy.py │ │ ├── test_matching.py │ │ ├── test_mis.py │ │ ├── test_mst.py │ │ ├── test_richclub.py │ │ ├── test_simple_paths.py │ │ ├── test_smetric.py │ │ ├── test_swap.py │ │ └── test_vitality.py │ ├── traversal │ │ ├── __init__.py │ │ ├── breadth_first_search.py │ │ ├── depth_first_search.py │ │ └── tests │ │ │ ├── test_bfs.py │ │ │ └── test_dfs.py │ └── vitality.py ├── classes │ ├── __init__.py │ ├── digraph.py │ ├── function.py │ ├── graph.py │ ├── multidigraph.py │ ├── multigraph.py │ └── tests │ │ ├── historical_tests.py │ │ ├── test_digraph.py │ │ ├── test_digraph_historical.py │ │ ├── test_function.py │ │ ├── test_graph.py │ │ ├── test_graph_historical.py │ │ ├── test_multidigraph.py │ │ └── test_multigraph.py ├── convert.py ├── drawing │ ├── __init__.py │ ├── layout.py │ ├── nx_agraph.py │ ├── nx_pydot.py │ ├── nx_pylab.py │ └── tests │ │ ├── test_agraph.py │ │ ├── test_layout.py │ │ ├── test_pydot.py │ │ └── test_pylab.py ├── exception.py ├── external │ ├── __init__.py │ └── decorator │ │ ├── __init__.py │ │ ├── _decorator.py │ │ └── _decorator3.py ├── generators │ ├── __init__.py │ ├── atlas.py │ ├── bipartite.py │ ├── classic.py │ ├── degree_seq.py │ ├── directed.py │ ├── ego.py │ ├── geometric.py │ ├── hybrid.py │ ├── intersection.py │ ├── line.py │ ├── random_clustered.py │ ├── random_graphs.py │ ├── small.py │ ├── social.py │ ├── stochastic.py │ ├── tests │ │ ├── test_atlas.py │ │ ├── test_bipartite.py │ │ ├── test_classic.py │ │ ├── test_degree_seq.py │ │ ├── test_directed.py │ │ ├── test_ego.py │ │ ├── test_geometric.py │ │ ├── test_hybrid.py │ │ ├── test_intersection.py │ │ ├── test_line.py │ │ ├── test_random_clustered.py │ │ ├── test_random_graphs.py │ │ ├── test_small.py │ │ ├── test_stochastic.py │ │ └── test_threshold.py │ └── threshold.py ├── linalg │ ├── __init__.py │ ├── attrmatrix.py │ ├── graphmatrix.py │ ├── laplacianmatrix.py │ ├── spectrum.py │ └── tests │ │ ├── test_graphmatrix.py │ │ ├── test_laplaican.py │ │ └── test_spectrum.py ├── readwrite │ ├── __init__.py │ ├── adjlist.py │ ├── edgelist.py │ ├── gexf.py │ ├── gml.py │ ├── gpickle.py │ ├── graphml.py │ ├── json_graph │ │ ├── __init__.py │ │ ├── adjacency.py │ │ ├── node_link.py │ │ ├── serialize.py │ │ ├── tests │ │ │ ├── test_adjacency.py │ │ │ ├── test_node_link.py │ │ │ ├── test_serialize.py │ │ │ └── test_tree.py │ │ └── tree.py │ ├── leda.py │ ├── multiline_adjlist.py │ ├── nx_shp.py │ ├── nx_yaml.py │ ├── p2g.py │ ├── pajek.py │ ├── sparsegraph6.py │ └── tests │ │ ├── test_adjlist.py │ │ ├── test_edgelist.py │ │ ├── test_gexf.py │ │ ├── test_gml.py │ │ ├── test_gpickle.py │ │ ├── test_graphml.py │ │ ├── test_leda.py │ │ ├── test_p2g.py │ │ ├── test_pajek.py │ │ ├── test_shp.py │ │ ├── test_sparsegraph6.py │ │ └── test_yaml.py ├── relabel.py ├── release.py ├── testing │ ├── __init__.py │ ├── tests │ │ └── test_utils.py │ └── utils.py ├── tests │ ├── __init__.py │ ├── benchmark.py │ ├── test.py │ ├── test_convert.py │ ├── test_convert_numpy.py │ ├── test_convert_scipy.py │ ├── test_exceptions.py │ └── test_relabel.py ├── utils │ ├── __init__.py │ ├── decorators.py │ ├── misc.py │ ├── random_sequence.py │ ├── rcm.py │ ├── tests │ │ ├── test_decorators.py │ │ ├── test_misc.py │ │ ├── test_random_sequence.py │ │ └── test_rcm.py │ └── union_find.py └── version.py ├── nx_test.py └── pympler ├── __init__.py ├── asizeof.py ├── charts.py ├── classtracker.py ├── classtracker_stats.py ├── garbagegraph.py ├── metadata.py ├── mprofile.py ├── muppy.py ├── process.py ├── refbrowser.py ├── refgraph.py ├── summary.py ├── tracker.py └── web.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[co] 2 | 3 | # Packages 4 | *.egg 5 | *.egg-info 6 | dist 7 | build 8 | eggs 9 | parts 10 | bin 11 | var 12 | sdist 13 | develop-eggs 14 | .installed.cfg 15 | 16 | # Installer logs 17 | pip-log.txt 18 | 19 | # Unit test / coverage reports 20 | .coverage 21 | .tox 22 | 23 | #Translations 24 | *.mo 25 | 26 | #Mr Developer 27 | .mr.developer.cfg 28 | 29 | #Text editor backup files 30 | *~ 31 | 32 | random_graph 33 | 34 | ignore 35 | -------------------------------------------------------------------------------- /.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | DistAlgo 4 | 5 | 6 | 7 | 8 | 9 | org.python.pydev.PyDevBuilder 10 | 11 | 12 | 13 | 14 | 15 | org.python.pydev.pythonNature 16 | 17 | 18 | -------------------------------------------------------------------------------- /.pydevproject: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | /DistAlgo 7 | 8 | python 3.0 9 | Default 10 | 11 | -------------------------------------------------------------------------------- /Breadth-First-Search/.gitignore: -------------------------------------------------------------------------------- 1 | # DistAlgo compiled .py files & runtime log files 2 | BFS.py 3 | BFS.log 4 | BFS_full.py 5 | -------------------------------------------------------------------------------- /Breadth-First-Search/BFS.dis: -------------------------------------------------------------------------------- 1 | BFS.dis.py -------------------------------------------------------------------------------- /Breadth-First-Search/graph_gen.py: -------------------------------------------------------------------------------- 1 | ../graph_gen.py -------------------------------------------------------------------------------- /Breadth-First-Search/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory 5 | 6 | from distalgo.runtime import * 7 | 8 | sys.argv = [sys.argv[0], "BFS.dis"] + sys.argv[1:] 9 | 10 | libmain() 11 | -------------------------------------------------------------------------------- /ConcurrentMutex/ReadMe.md: -------------------------------------------------------------------------------- 1 | Concurrent Mutex algorithms 2 | --------------------------- 3 | This is an implementation of Lamport's fast mutual exclusion and bakery 4 | algorithms for atomizing access to crucial resources. The purpose of 5 | both algorithms are to ensure that a segment of code known as the 6 | **critical section** does _not_ get executed concurrently. 7 | 8 | The module `fast.py` implements Lamport's fast mutex algorithm and 9 | the module `bakery.py` implements Lamport's bakery algorithm. 10 | 11 | The module `auxiliary.py` defines three important functions: `random_distribution`, `await` and `default_task`. 12 | 13 | `random_distribution` is a function that takes a number of threads, 14 | and a total number of requests amd returns a list L where L[i] 15 | epresents a number of requests (randomly assigned) to thread i. 16 | 17 | `await(func)` takes a function as argument, busy waits until 18 | the return value of `func()` becomes True. 19 | 20 | `default_task()` defines the default task to be executed while 21 | inside the critical section. Currently it is a CPU hog that 22 | computes prime numbers up to an "nth" value specified inside the 23 | module. The prime number calculator was lifted from Stack Overflow. 24 | 25 | `main.py` starts up n threads and m requests per thread, n and m being 26 | passed as command line arguments. It runs both tests & terminates. 27 | -------------------------------------------------------------------------------- /ConcurrentMutex/auxiliary.py: -------------------------------------------------------------------------------- 1 | 2 | import random 3 | 4 | def random_distribution(total_requests, num_threads): 5 | """ 6 | Return a list with a random distribution of 7 | requests per thread. For returned list L and 8 | thread index i, L[i] represents the number 9 | of requests (randomly assigned) to thread i. 10 | """ 11 | 12 | requests_per_thread = [0] * num_threads 13 | 14 | while total_requests > 0: 15 | for i in range(num_threads): 16 | if random.choice( (True, False) ): 17 | requests_per_thread[i] += 1 18 | total_requests -= 1 19 | return requests_per_thread 20 | 21 | def await(func): 22 | while not func(): 23 | pass 24 | 25 | def default_task(): 26 | """ Compute primes up to a specified value """ 27 | 28 | def primes(n): 29 | # http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 30 | """ Returns a list of primes < n """ 31 | sieve = [True] * n 32 | for i in range(3,int(n**0.5)+1,2): 33 | if sieve[i]: 34 | sieve[i*i::2*i]=[False]* int((n-i*i-1)/(2*i)+1) 35 | return [2] + [i for i in range(3,n,2) if sieve[i]] 36 | 37 | # change this to a lower value to speed things up: 38 | primes(185 * 1000) 39 | -------------------------------------------------------------------------------- /ConcurrentMutex/bakery.py: -------------------------------------------------------------------------------- 1 | 2 | import threading 3 | from auxiliary import * 4 | 5 | class Bakery(threading.Thread): 6 | """ Lamport's bakery algorithm """ 7 | 8 | # These are shared (static class) variables: 9 | 10 | threads = None 11 | thread_count = 0 12 | req_count = 0 13 | 14 | go = False # used to delay run() until all threads have started. 15 | 16 | choosing = [] 17 | num = [] 18 | 19 | x, y = 0, 0 20 | 21 | def __init__(self, i): 22 | super().__init__() 23 | self.i = i 24 | #print('Constructed Process', i, 'Thread object (%s).' % self.getName()) 25 | 26 | def cs(self, task = default_task): 27 | 28 | print('Process', self.i, 'requesting CS') 29 | 30 | Bakery.choosing[self.i] = 1 31 | 32 | Bakery.num[self.i] = 1 + max(Bakery.num) 33 | 34 | Bakery.choosing[self.i] = 0 35 | 36 | for j in range(1, Bakery.thread_count + 1): 37 | # await choosing[j] == 0 38 | await(lambda: Bakery.choosing[j] == 0) 39 | 40 | # await num[j] == 0 or (num[j],j) >= (num[i],i) 41 | await( lambda: Bakery.num[j] == 0 or (Bakery.num[j], j) >= (Bakery.num[self.i], self.i) ) 42 | 43 | print('Process', self.i, 'entering CS') 44 | 45 | task() 46 | 47 | print('Process', self.i, 'exiting CS') 48 | 49 | Bakery.num[self.i] = 0 50 | 51 | def run(self): 52 | # wait until all threads have started: 53 | while not Bakery.go: 54 | pass 55 | 56 | # call cs() req_count times: 57 | for _ in range(Bakery.req_count[self.i]): 58 | self.cs() 59 | 60 | 61 | def setup(threads, req_count): 62 | Bakery.threads = threads 63 | Bakery.thread_count = len(threads) 64 | Bakery.req_count = random_distribution(req_count, Bakery.thread_count) 65 | 66 | Bakery.choosing = [0] * (Bakery.thread_count + 1) 67 | Bakery.num = [0] * (Bakery.thread_count + 1) 68 | 69 | def start(): 70 | for thread in Bakery.threads: 71 | thread.start() 72 | Bakery.go = True 73 | -------------------------------------------------------------------------------- /ConcurrentMutex/fast.py: -------------------------------------------------------------------------------- 1 | 2 | import threading 3 | from auxiliary import * 4 | 5 | class Fast(threading.Thread): 6 | """ Lamport's fast mutual exclusion algorithm """ 7 | 8 | # These are shared (static class) variables: 9 | 10 | threads = None 11 | thread_count = 0 12 | req_count = 0 13 | 14 | go = False # used to delay run() until all threads have started. 15 | 16 | choosing = [] 17 | x, y = 0, 0 18 | 19 | def __init__(self, i): 20 | super().__init__() 21 | self.i = i 22 | #print('Constructed Process', i, 'Thread object (%s).' % self.getName()) 23 | 24 | def cs(self, task = default_task): 25 | 26 | print('Process', self.i, 'requesting CS') 27 | 28 | def can_i_enter_cs(): 29 | Fast.choosing[self.i] = 1 30 | Fast.x = self.i 31 | 32 | if Fast.y != 0: 33 | Fast.choosing[self.i] = 0 34 | 35 | # await y == 0: 36 | await(lambda: Fast.y == 0) 37 | 38 | return False 39 | 40 | Fast.y = self.i 41 | 42 | if Fast.x != self.i: 43 | Fast.choosing[self.i] = 0 44 | 45 | # for j:=1..thread_count+1: await b[j] == 0: 46 | [await(lambda: Fast.choosing[j] == 0) for j in range(1, Fast.thread_count + 1)] 47 | 48 | if Fast.y != self.i: 49 | # await y == 0: 50 | await(lambda: Fast.y == 0) 51 | return False 52 | 53 | return True 54 | 55 | while not can_i_enter_cs(): 56 | pass 57 | 58 | print('Process', self.i, 'entering CS') 59 | 60 | task() 61 | 62 | print('Process', self.i, 'exiting CS') 63 | 64 | Fast.y = 0 65 | Fast.choosing[self.i] = 0 66 | 67 | def run(self): 68 | # wait until all threads have started: 69 | while not Fast.go: 70 | pass 71 | 72 | # call cs() req_count times: 73 | for _ in range(Fast.req_count[self.i]): 74 | self.cs() 75 | 76 | 77 | def setup(threads, req_count): 78 | Fast.threads = threads 79 | Fast.thread_count = len(threads) 80 | Fast.req_count = random_distribution(req_count, Fast.thread_count) 81 | 82 | Fast.choosing = [0] * (Fast.thread_count + 1) 83 | 84 | def start(): 85 | for thread in Fast.threads: 86 | thread.start() 87 | Fast.go = True 88 | -------------------------------------------------------------------------------- /ConcurrentMutex/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys, fast, bakery 4 | 5 | if __name__ == "__main__": 6 | if len(sys.argv) == 3: 7 | num_of_threads, num_of_reqs = int(sys.argv[1]), int(sys.argv[2]) 8 | else: 9 | num_of_threads, num_of_reqs = 3, 15 # defaults 10 | 11 | print("\nRunning Lamport's fast mutual exclusion algorithm") 12 | threads = [fast.Fast(i) for i in range(num_of_threads)] 13 | fast.setup(threads, num_of_reqs) 14 | fast.start() 15 | 16 | # wait for all threads to die.. 17 | for thread in threads: 18 | thread.join() 19 | 20 | print("\n\nRunning Lamport's bakery algorithm") 21 | threads = [bakery.Bakery(i) for i in range(num_of_threads)] 22 | bakery.setup(threads, num_of_reqs ) 23 | bakery.start() 24 | 25 | # wait for all threads to die.. 26 | for thread in threads: 27 | thread.join() 28 | 29 | print() 30 | -------------------------------------------------------------------------------- /DistributedMutex/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | old 3 | lamport.py 4 | mutex2n.py 5 | RAtoken.py 6 | SKtoken.py 7 | test* 8 | *.log 9 | -------------------------------------------------------------------------------- /DistributedMutex/ReadMe.md: -------------------------------------------------------------------------------- 1 | Distributed Mutex (DMX) algorithms 2 | ---------------------------------- 3 | This is an implementation of two token-based DMX algorithms in DistAlgo: Ricart-Agrawala's token-based algorithm and Suzuki-Kasami's token-based algorithm. 4 | 5 | * `RAtoken.dis` contains Ricart-Agrawala's algorithm. For this algorithm, I followed the pseudocode which can be found in the top-level comment in `RAtoken.dis`. 6 | 7 | * `SKtoken.dis` contains Suzuki-Kasami's algorithm. For this algorithm, I followed [a description of the algorithm by Mikhail Nesterenko](http://vega.cs.kent.edu/~mikhail/classes/aos.f01/l17tokenDMX.pdf) of Kent State. 8 | 9 | Both DistAlgo programs accept a single integer command-line argument specifying the number of processes to start. The default value for this for both is `5`. 10 | 11 | You can ignore `lamport.dis`, `mutex2n.dis` and `main.dis`. 12 | -------------------------------------------------------------------------------- /DistributedMutex/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | ''' 4 | This was a file used during development. You can ignore this file. 5 | 6 | To run the DistAlgo programs, just use: 7 | 8 | python3 -m distalgo.runtime RAtoken.dis 9 | 10 | python3 -m distalgo.runtime SKtoken.dis 11 | 12 | Disclaimer: For some strange reaosn this file doesn't work properly when run from command line. I used 13 | Eclipse during development, and it worked fine in it. Might have something to do with PYTHONPATH... 14 | ''' 15 | 16 | import sys 17 | from distalgo.runtime import * 18 | 19 | RA, SK = 'RA', 'SK' 20 | 21 | prog = SK 22 | 23 | if len(sys.argv) > 1: 24 | prog = sys.argv[1] 25 | 26 | if prog == RA: 27 | sys.argv = [ sys.argv[0], "RAtoken.dis" ] 28 | libmain() 29 | 30 | elif prog == SK: 31 | sys.argv = [ sys.argv[0], "SKtoken.dis" ] 32 | libmain() 33 | 34 | else: 35 | print("Command-line argument must be 'RA' or 'SK'. (Not %s)" % sys.argv[1]) 36 | sys.exit(1) 37 | -------------------------------------------------------------------------------- /Maximal-Independent-Set/.gitignore: -------------------------------------------------------------------------------- 1 | # DistAlgo compiled .py files & runtime log files 2 | MIS.py 3 | MIS.log 4 | -------------------------------------------------------------------------------- /Maximal-Independent-Set/InputGraph.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | sys.path.append('..') 4 | import networkx as nx 5 | 6 | def get_graph(): 7 | "Process command-line arguments and build the graph." 8 | 9 | sys.argv = sys.argv[1:] 10 | 11 | def construct_graph(file): 12 | def edge(n1, n2, w): 13 | return (n1, n2, {'weight':w}) 14 | 15 | edge_list = list() 16 | 17 | with open(file, 'r') as f: 18 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2])) 19 | for ed in 20 | (e.strip() for e in f.readlines() if e.strip() != "") 21 | if len(ed.split()) == 3 ) 22 | 23 | G = nx.Graph() 24 | G.add_edges_from(edge_list) 25 | return G 26 | 27 | import argparse 28 | parser = argparse.ArgumentParser(description='Finds the vertices of the Maximal Independent Set (MST) of a given graph.') 29 | parser.add_argument('graph', nargs='?', type=construct_graph, default='graph-1', help= 30 | 'File listing the edges of a graph line-by-line in the following style: "A B 2", where "A" and "B" are node names and "2" is the weight of the edge connecting them.') 31 | 32 | args = parser.parse_args() 33 | return args.graph 34 | -------------------------------------------------------------------------------- /Maximal-Independent-Set/MIS-sequential.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/MIS-sequential.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/MIS.dis: -------------------------------------------------------------------------------- 1 | MIS.dis.py -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-1: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-1.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-2: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | 22 | L M 20 23 | K L 21 24 | K M 22 25 | J K 23 26 | -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-2a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2a.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-2b.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2b.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-2c.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2c.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/graph-2d.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2d.png -------------------------------------------------------------------------------- /Maximal-Independent-Set/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory 5 | 6 | from distalgo.runtime import * 7 | 8 | sys.argv = [sys.argv[0], "MIS.dis"] + sys.argv[1:] 9 | 10 | libmain() 11 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/.gitignore: -------------------------------------------------------------------------------- 1 | # DistAlgo compiled .py files & runtime log files 2 | MST.py 3 | MST.log 4 | sol 5 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/MST.dis: -------------------------------------------------------------------------------- 1 | MST.dis.py -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/graph-1: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/graph-2: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | 22 | L M 20 23 | K L 21 24 | K M 22 25 | J K 23 26 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/graph-3: -------------------------------------------------------------------------------- 1 | 2 | A B 1 3 | B C 10 4 | C D 14 5 | D E 15 6 | E F 13 7 | F A 12 8 | E B 5 9 | 10 | A G 20 11 | 12 | G I 3 13 | I H 17 14 | H G 16 15 | 16 | I J 57 17 | 18 | J K 22 19 | K L 23 20 | L M 21 21 | M J 2 22 | 23 | M N 54 24 | 25 | N O 36 26 | O P 34 27 | P U 42 28 | U V 48 29 | V S 41 30 | S R 35 31 | R N 47 32 | 33 | S Q 49 34 | R Q 53 35 | N Q 33 36 | O Q 45 37 | P Q 43 38 | 39 | Q T 50 40 | P T 38 41 | U T 44 42 | V T 37 43 | S T 52 44 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/graph_gen.py: -------------------------------------------------------------------------------- 1 | ../graph_gen.py -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/1000edge-100node-graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/1000edge-100node-graph.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/MST-figure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/MST-figure.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/MST_algorithm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/MST_algorithm.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/graph-3-segment1sol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-segment1sol.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/graph-3-segment2sol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-segment2sol.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/graph-3-sketch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-sketch.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/graph-3-sol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-sol.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/test_case_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/test_case_1.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/img/test_case_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/test_case_2.png -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/old/.gitignore: -------------------------------------------------------------------------------- 1 | # DistAlgo compiled .py files & runtime log files 2 | test.py 3 | test.log 4 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/old/sequential_messaging_test.dis: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Test to see if DistAlgo sends messages sequentially regardless of message size (and not out of order) 4 | 5 | This is a necessary precondition for the GHS MST algorithm. 6 | 7 | Result: YES, it does! 8 | """ 9 | 10 | def l(n): 11 | n = 101 - n 12 | return [i for i in range(n)] 13 | 14 | class Spark(DistProcess): 15 | def setup(ps): 16 | ps = ps 17 | 18 | def main(): 19 | random_node = ps.pop() 20 | ps.update( {random_node} ) 21 | 22 | for i in range(1, 101): 23 | send( Msg( l(i) ), random_node ) 24 | 25 | class Node(DistProcess): 26 | def setup(): 27 | pass 28 | 29 | def OnMsg(m): 30 | output(len(m)) 31 | 32 | def main(): 33 | await(False) 34 | 35 | def main(): 36 | use_channel("tcp") 37 | 38 | # Setup the nodes 39 | # =============== 40 | node_ps = createprocs(Node, 10) 41 | 42 | for p in node_ps: 43 | setupprocs([p], []) 44 | 45 | # Setup up spark 46 | # =============== 47 | spark = createprocs(Spark, set(['Spark'])) 48 | spark_p = spark['Spark'] 49 | setupprocs([spark_p], [node_ps]) 50 | 51 | startprocs(node_ps) 52 | startprocs([spark_p]) 53 | 54 | # Wait for all processes to die... 55 | # -------------------------------- 56 | for p in node_ps: 57 | p.join() 58 | 59 | spark_p.join() 60 | -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/papers/GHS_enhanced.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/papers/GHS_enhanced.pdf -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/papers/GHS_original.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/papers/GHS_original.pdf -------------------------------------------------------------------------------- /Minimum-Spanning-Tree/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory 5 | 6 | from distalgo.runtime import * 7 | 8 | sys.argv = [sys.argv[0], "MST.dis"] + sys.argv[1:] 9 | 10 | libmain() 11 | -------------------------------------------------------------------------------- /ShortestPath/.gitignore: -------------------------------------------------------------------------------- 1 | ShortestPath.py 2 | ShortestPath.log 3 | -------------------------------------------------------------------------------- /ShortestPath/InputGraph.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | sys.path.append('..') 4 | import networkx as nx 5 | 6 | def graph_source_target(): 7 | "Process command-line arguments and build the graph." 8 | 9 | sys.argv = sys.argv[1:] 10 | 11 | def construct_graph(file): 12 | def edge(n1, n2, w): 13 | return (n1, n2, {'weight':w}) 14 | 15 | edge_list = list() 16 | 17 | with open(file, 'r') as f: 18 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2])) 19 | for ed in 20 | (e.strip() for e in f.readlines() if e.strip() != "") 21 | if len(ed.split()) == 3 ) 22 | 23 | G = nx.Graph() 24 | G.add_edges_from(edge_list) 25 | return G 26 | 27 | import argparse 28 | parser = argparse.ArgumentParser(description='Finds the shortest path.') 29 | parser.add_argument('graph', nargs='?', type=construct_graph, default='graph-1', help= 30 | 'File listing the edges of a graph line-by-line in the following style: "A B 2", where "A" and "B" are node names and "2" is the weight of the edge connecting them.') 31 | parser.add_argument('-s', '--source', nargs=1, type=str, default='G', help='The source node.') 32 | parser.add_argument('-t', '--target', nargs=1, type=str, default='C', help='The target node.') 33 | 34 | args = parser.parse_args() 35 | 36 | s = args.source[0] 37 | t = args.target[0] 38 | n = args.graph.nodes() 39 | g = args.graph 40 | 41 | if s not in n: 42 | print("%s not in %r" % (s, n)) 43 | sys.exit(1) 44 | 45 | if t not in n: 46 | print("%s not in %r" % (t, n)) 47 | sys.exit(1) 48 | 49 | return g, s, t 50 | -------------------------------------------------------------------------------- /ShortestPath/ShortestPath.dis: -------------------------------------------------------------------------------- 1 | ShortestPath.dis.py -------------------------------------------------------------------------------- /ShortestPath/ShortestPath.dis.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Straightforward distributed shortest path finder based on Dijkstra's sequential algorithm 3 | ''' 4 | 5 | from InputGraph import graph_source_target 6 | 7 | G, S, T = graph_source_target() 8 | 9 | INFINITY = 999999999 10 | 11 | class P(DistProcess): 12 | 13 | def setup(ps, edges): 14 | edges = edges 15 | weight = INFINITY 16 | path = "" 17 | 18 | def OnNewWeight(new_weight, new_path): 19 | newWeight(new_weight, new_path) 20 | 21 | def newWeight(new_weight, new_path): 22 | if new_weight < weight: 23 | weight = new_weight 24 | path = new_path 25 | if str(self) == T: 26 | output("New shortest path of weight %i: %s" 27 | % (weight, ' -> '.join(path))) 28 | propogate() 29 | 30 | def propogate(): 31 | for p, e_w in edges.items(): 32 | send(NewWeight(weight + e_w, path+str(p)), p) 33 | 34 | def main(): 35 | if str(self) == S: 36 | newWeight(0, str(self)) 37 | await(False) 38 | 39 | def main(): 40 | use_channel("tcp") 41 | 42 | procs_names = set(G.nodes()) 43 | #procs_names.update({'0'})# control process 44 | 45 | global procs 46 | procs = createprocs(P, procs_names) 47 | 48 | # setup the processes 49 | ps = set(procs.values()) 50 | 51 | for p in ps: 52 | p_edges = { procs[node] : data['weight'] 53 | for (node, data) in G[repr(p)].items() } 54 | setupprocs([p], [ps-{p}, p_edges]) 55 | 56 | startprocs(ps) 57 | 58 | for p in (ps): 59 | p.join() 60 | -------------------------------------------------------------------------------- /ShortestPath/graph-1: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | -------------------------------------------------------------------------------- /ShortestPath/graph-2: -------------------------------------------------------------------------------- 1 | 2 | A F 2 3 | F G 7 4 | G H 15 5 | H J 13 6 | J I 9 7 | I C 18 8 | C B 17 9 | B A 3 10 | 11 | E F 1 12 | E G 6 13 | E H 5 14 | E I 10 15 | E D 11 16 | 17 | I H 12 18 | D I 4 19 | D C 8 20 | D B 16 21 | 22 | L M 20 23 | K L 21 24 | K M 22 25 | J K 23 26 | -------------------------------------------------------------------------------- /ShortestPath/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory 5 | 6 | from distalgo.runtime import * 7 | 8 | sys.argv = [sys.argv[0], "ShortestPath.dis"] + sys.argv[1:] 9 | 10 | libmain() 11 | -------------------------------------------------------------------------------- /distalgo/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 1.0 2 | Name: DistAlgo 3 | Version: 0.2 4 | Summary: UNKNOWN 5 | Home-page: UNKNOWN 6 | Author: bolin 7 | Author-email: bolin@cs.stonybrook.edu 8 | License: UNKNOWN 9 | Description: UNKNOWN 10 | Platform: UNKNOWN 11 | -------------------------------------------------------------------------------- /distalgo/__init__.py: -------------------------------------------------------------------------------- 1 | # main package 2 | 3 | from . import compiler, runtime 4 | -------------------------------------------------------------------------------- /distalgo/compiler/__init__.py: -------------------------------------------------------------------------------- 1 | # Compiler package for Distalgo 2 | 3 | from .compiler import dist_compile, dist_compile_to_file, dist_compile_to_string 4 | 5 | __all__ = ["dist_compile", "dist_compile_to_file", "dist_compile_to_string"] 6 | -------------------------------------------------------------------------------- /distalgo/compiler/__main__.py: -------------------------------------------------------------------------------- 1 | """Main entry point""" 2 | 3 | import sys,os 4 | import time 5 | if sys.argv[0].endswith("__main__.py"): 6 | sys.argv[0] = "python -m distalgo" 7 | 8 | RUNTIMEPKG = "runtime" 9 | RUNTIMEFILES = ["event.py", "endpoint.py", "udp.py", "tcp.py", "sim.py", "util.py"] 10 | 11 | def parseArgs(argv): 12 | 13 | import optparse 14 | p = optparse.OptionParser() 15 | 16 | p.add_option("-p", action="store_true", dest='printsource') 17 | p.add_option("-F", action="store_true", dest='genfull') 18 | p.add_option("--full", action="store_true", dest='genfull') 19 | p.add_option("-O", action="store_true", dest='optimize') 20 | p.add_option("-D", action="store", dest='rootdir') 21 | p.add_option("-o", action="store", dest="outfile") 22 | 23 | p.set_defaults(printsource=False, 24 | genfull=False, 25 | optimize=False, 26 | outfile=None, 27 | rootdir=os.getcwd()) 28 | 29 | return p.parse_args() 30 | 31 | 32 | def printUsage(name): 33 | usage = """ 34 | Usage: %s [-p] [-o outfile] 35 | where is the file name of the distalgo source 36 | """ 37 | sys.stderr.write(usage % name) 38 | 39 | from .codegen import to_source 40 | from .compiler import dist_compile 41 | 42 | def main(): 43 | opts, args = parseArgs(sys.argv) 44 | print("rootdir is %s" % opts.rootdir) 45 | 46 | start = time.time() 47 | runtime = [] 48 | if opts.genfull: 49 | for f in RUNTIMEFILES: 50 | p = os.path.join(opts.rootdir, RUNTIMEPKG, f) 51 | if not os.path.isfile(p): 52 | sys.stderr.write("File %s not found. Please specify root directory using -D.\n"%p) 53 | sys.exit(1) 54 | else: 55 | pfd = open(p, "r") 56 | runtime.extend(pfd.readlines()) 57 | pfd.close() 58 | postamble = ["\nif __name__ == \"__main__\":\n", 59 | " main()\n"] 60 | 61 | for f in args: 62 | infd = open(f, 'r') 63 | pytree = dist_compile(infd) 64 | infd.close() 65 | 66 | pysource = to_source(pytree) 67 | 68 | if opts.printsource: 69 | sys.stdout.write(pysource) 70 | else: 71 | outfile = f[:-4] + ".py" 72 | outfd = open(outfile, 'w') 73 | if opts.genfull: 74 | outfd.writelines(runtime) 75 | outfd.write(pysource) 76 | if opts.genfull: 77 | outfd.writelines(postamble) 78 | outfd.close() 79 | sys.stderr.write("Written %s.\n"%outfile) 80 | 81 | elapsed = time.time() - start 82 | sys.stderr.write("\nTotal compilation time: %f second(s).\n" % elapsed) 83 | return 0 84 | 85 | if __name__ == '__main__': 86 | main() 87 | -------------------------------------------------------------------------------- /distalgo/compiler/compiler.py: -------------------------------------------------------------------------------- 1 | from ast import * 2 | from .dist import DistalgoTransformer 3 | from .codegen import to_source 4 | 5 | def dist_compile(fd): 6 | distree = parse(fd.read()) 7 | pytree = DistalgoTransformer().visit(distree) 8 | 9 | return pytree 10 | 11 | def dist_compile_to_string(fd): 12 | distree = parse(fd.read()) 13 | pytree = DistalgoTransformer().visit(distree) 14 | 15 | return to_source(pytree) 16 | 17 | def dist_compile_to_file(fd, outfd): 18 | distree = parse(fd.read()) 19 | pytree = DistalgoTransformer().visit(distree) 20 | source = to_source(pytree) 21 | outfd.write(source) 22 | 23 | return pytree 24 | 25 | -------------------------------------------------------------------------------- /distalgo/compiler/consts.py: -------------------------------------------------------------------------------- 1 | 2 | SENT_PATTERN_VARNAME = "_sent_patterns" 3 | EVENT_PATTERN_VARNAME = "_event_patterns" 4 | LABEL_EVENTS_VARNAME = "_label_events" 5 | EVENT_PROC_FUNNAME = "_process_event" 6 | 7 | TIMER_VARNAME = "__await_timer_" 8 | TIMEOUT_VARNAME = "_timeout" 9 | TIMELEFT_VARNAME = "_timeleft" 10 | TEMP_VARNAME = "__temp_" 11 | 12 | LOGICAL_TIMESTAMP_VARNAME = "_timestamp" 13 | MSG_SRCNODE_VARNAME = "_source" 14 | 15 | SENDMSG_FUNNAME = "send" 16 | RECEIVED_FUNNAME = "received" 17 | SENT_FUNNAME = "sent" 18 | 19 | DISTALGO_BASE_CLASSNAME = "DistProcess" 20 | -------------------------------------------------------------------------------- /distalgo/compiler/distast.py: -------------------------------------------------------------------------------- 1 | import ast 2 | 3 | 4 | class Label(stmt): 5 | _fields = ['name', 'body'] 6 | def __init__(self, name, body): 7 | self.name = name 8 | self.body = body 9 | 10 | 11 | class Event(stmt): 12 | _fields = ['name', 'arg', 'at', 'body'] 13 | def __init__ (self, name, arg, at=None, body): 14 | self.name = name 15 | self.arg = arg 16 | self.at = at 17 | self.body = body 18 | -------------------------------------------------------------------------------- /distalgo/compiler/exceptions.py: -------------------------------------------------------------------------------- 1 | 2 | class InvalidLabelException(Exception): 3 | pass 4 | 5 | class InvalidEventException(Exception): 6 | pass 7 | 8 | class InvalidAwaitException(Exception): 9 | pass 10 | 11 | class InvalidReceivedException(Exception): 12 | pass 13 | 14 | class InvalidSentException(Exception): 15 | pass 16 | 17 | class InvalidSendException(Exception): 18 | pass 19 | -------------------------------------------------------------------------------- /distalgo/compiler/info.py: -------------------------------------------------------------------------------- 1 | from .consts import * 2 | from ast import * 3 | 4 | class ClassInfo: 5 | """ A structure to hold info about classes. 6 | """ 7 | def __init__(self, name, isp = True): 8 | self.name = name # Obvious 9 | self.isp = isp # Is this class a process class? 10 | self.membervars = set() # Set of member variables names 11 | self.memberfuncs = set() # Set of member function names 12 | self.labels = set() # Set of label names 13 | self.events = [] 14 | self.sent_patterns = [] 15 | self.newstmts = [] # Stmts that need to be added to __init__ 16 | self.newdefs = [] # New func defs that need to be added to the 17 | # class 18 | 19 | self.memberfuncs.add(EVENT_PROC_FUNNAME) 20 | self.membervars.add(EVENT_PATTERN_VARNAME) 21 | 22 | 23 | def genSentPatternStmt(self): 24 | left = Attribute(Name("self", Load()), SENT_PATTERN_VARNAME, Store()) 25 | right = List([p.toNode() for p in self.sent_patterns], Load()) 26 | return Assign([left], right) 27 | 28 | def genEventPatternStmt(self): 29 | left = Attribute(Name("self", Load()), EVENT_PATTERN_VARNAME, Store()) 30 | right = List([e.toNode() for e in self.events], Load()) 31 | return Assign([left], right) 32 | 33 | def genLabelEventsStmt(self): 34 | left = Attribute(Name("self", Load()), LABEL_EVENTS_VARNAME, Store()) 35 | right = Dict([Str(l) for l in self.labels], 36 | [Attribute(Name("self", Load()), EVENT_PATTERN_VARNAME, 37 | Load()) 38 | for l in self.labels]) 39 | return Assign([left], right) 40 | -------------------------------------------------------------------------------- /distalgo/compiler/label.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from ast import * 3 | from .exceptions import InvalidLabelException 4 | 5 | LABEL_FUNC = "_label_" 6 | 7 | 8 | # This class generates unique names for all labels, aggregates all the label 9 | # names, and at the same time transforms the labels into function calls 10 | class LabelTransformer(NodeTransformer): 11 | """ Generate unique names for all labels in class scope. Flattens Label 12 | blocks and insert self._label_ function calls. Aggregates all label names 13 | into a set. 14 | """ 15 | 16 | def __init__(self, info): 17 | self.info = info 18 | self.hasLabelAst = hasattr(ast, "Label") 19 | info.memberfuncs.add(LABEL_FUNC) 20 | 21 | def insert_labels(self, body): 22 | new_body = [] 23 | for stmt in body: 24 | if isinstance(stmt, Expr): 25 | if (isinstance(stmt.value, UnaryOp) and 26 | isinstance(stmt.value.op, USub) and 27 | isinstance(stmt.value.operand, UnaryOp) and 28 | isinstance(stmt.value.operand.op, USub) and 29 | isinstance(stmt.value.operand.operand, Name)): 30 | 31 | fullname = stmt.value.operand.operand.id 32 | self.info.labels.add(fullname) 33 | stmt = self.genLabelCall(stmt, fullname) 34 | new_body.append(stmt) 35 | return new_body 36 | 37 | def visit_Block(self, node): 38 | new_node = self.generic_visit(node) 39 | if not self.hasLabelAst: 40 | new_node.body = self.insert_labels(new_node.body) 41 | if hasattr(new_node, "orelse"): 42 | new_node.orelse = self.insert_labels(new_node.orelse) 43 | return new_node 44 | 45 | visit_FunctionDef = visit_Block 46 | visit_For = visit_Block 47 | visit_If = visit_Block 48 | visit_While = visit_Block 49 | visit_With = visit_Block 50 | visit_TryExcept = visit_Block 51 | visit_TryFinally = visit_Block 52 | 53 | def visit_Label(self, node): 54 | fullname = node.name 55 | self.info.labels.add(fullname) 56 | 57 | new_node = self.generic_visit(node) 58 | labelcall = self.genLabelCall(node, fullname) 59 | new_node.body.insert(0, labelcall) 60 | return new_node.body 61 | 62 | def genLabelCall(self, node, fullname): 63 | return copy_location(Call(Name(LABEL_FUNC, Load()), 64 | [Str(fullname)], [], None, None), node) 65 | 66 | 67 | -------------------------------------------------------------------------------- /distalgo/compiler/send.py: -------------------------------------------------------------------------------- 1 | from ast import * 2 | from .exceptions import InvalidSendException 3 | from .consts import SENDMSG_FUNNAME 4 | 5 | class SendTransformer(NodeTransformer): 6 | """Translates 'send' arguments into Tuples. 7 | """ 8 | 9 | def __init__(self, info): 10 | self.info = info 11 | 12 | def visit_Expr(self, node): 13 | if (not (isinstance(node.value, Call) and 14 | isinstance(node.value.func, Name) and 15 | (node.value.func.id == SENDMSG_FUNNAME))): 16 | return node 17 | 18 | if (len(node.value.args) != 2): 19 | raise InvalidSendException() 20 | 21 | if (not isinstance(node.value.args[0], Call)): 22 | return node 23 | 24 | messCall = node.value.args[0] 25 | messTuple = Tuple([Str(messCall.func.id)] + messCall.args, Load()) 26 | node.value.args[0] = messTuple 27 | return node 28 | -------------------------------------------------------------------------------- /distalgo/runtime/__init__.py: -------------------------------------------------------------------------------- 1 | # runtime package 2 | 3 | from .__main__ import libmain 4 | 5 | __all__ = ["libmain"] 6 | -------------------------------------------------------------------------------- /distalgo/runtime/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from .util import entrypoint 4 | 5 | def parseArgs(argv): 6 | import optparse 7 | p = optparse.OptionParser() 8 | 9 | p.add_option("-s", action="store", dest='perffile') 10 | p.add_option("--dumpfile", action="store", dest='dumpfile') 11 | p.add_option("--nolog", action="store_true", dest="nolog") 12 | p.add_option("--logfile", action="store", dest="logfile") 13 | p.add_option("--logdir", action="store", dest="logdir") 14 | p.add_option("--logconsolelevel", action="store", dest="logconsolelevel") 15 | p.add_option("--logfilelevel", action="store", dest="logfilelevel") 16 | 17 | p.set_defaults(perffile=None, 18 | iterations="10", 19 | dumpfile=None, 20 | numprocs="1", 21 | other=None, 22 | nolog=False, 23 | logfile=None, 24 | logdir=None, 25 | logconsolelevel="INFO", 26 | logfilelevel="DEBUG") 27 | 28 | return p.parse_args(argv) 29 | 30 | 31 | def cut_cmdline(): 32 | for i, a in enumerate(sys.argv): 33 | if a.endswith(".dis") or a.endswith(".run"): 34 | return (sys.argv[1:i+1], sys.argv[i:]) 35 | die("No DistAlgo source file specified.") 36 | 37 | def libmain(): 38 | """ 39 | Main program entry point. Parses command line options, sets up global 40 | variables, and calls the 'main' function of the DistAlgo program. 41 | """ 42 | libcmdl, distcmdl = cut_cmdline() 43 | 44 | cmdline_options, args = parseArgs(libcmdl) 45 | 46 | entrypoint(cmdline_options, args, distcmdl) 47 | 48 | def die(mesg = None): 49 | if mesg != None: 50 | sys.stderr.write(mesg + "\n") 51 | sys.exit(1) 52 | 53 | if __name__ == '__main__': 54 | libmain() 55 | -------------------------------------------------------------------------------- /distalgo/runtime/endpoint.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | class EndPoint: 4 | def __init__(self, name=None): 5 | self._name = name 6 | self._proc = None 7 | self._log = logging.getLogger("runtime.EndPoint") 8 | self._address = ('localhost', 0) 9 | 10 | def send(self, data, src, timestamp = 0): 11 | pass 12 | 13 | def recv(self, block, timeout = None): 14 | pass 15 | 16 | def setname(self, name): 17 | self._name = name 18 | 19 | def getlogname(self): 20 | if self._name is None: 21 | return "%s_%s" % (self._address[0], str(self._address[1])) 22 | else: 23 | return self._name 24 | 25 | ################################################### 26 | # Make the EndPoint behave like a Process object: 27 | 28 | def is_alive(self): 29 | if self._proc is not None: 30 | return self._proc.is_alive() 31 | else: 32 | self._log.warn("is_alive can only be called from parent process.") 33 | return self 34 | 35 | def join(self): 36 | if self._proc is not None: 37 | return self._proc.join() 38 | else: 39 | self._log.warn("join can only be called from parent process.") 40 | return self 41 | 42 | def terminate(self): 43 | if self._proc is not None: 44 | return self._proc.terminate() 45 | else: 46 | self._log.warn("terminate can only be called from parent process.") 47 | return self 48 | 49 | ################################################### 50 | 51 | def __getstate__(self): 52 | return ("EndPoint", self._address, self._name) 53 | 54 | def __setstate__(self, value): 55 | proto, self._address, self._name = value 56 | self._log = logging.getLogger("runtime.EndPoint") 57 | 58 | def __str__(self): 59 | if self._name is None: 60 | return str(self._address) 61 | else: 62 | return self._name 63 | 64 | def __repr__(self): 65 | if self._name is None: 66 | return str(self._address[1]) 67 | else: 68 | return self._name 69 | 70 | def __hash__(self): 71 | return hash(self._address) 72 | 73 | def __eq__(self, obj): 74 | if not hasattr(obj, "_address"): 75 | return False 76 | return self._address == obj._address 77 | def __lt__(self, obj): 78 | return self._address < obj._address 79 | def __le__(self, obj): 80 | return self._address <= obj._address 81 | def __gt__(self, obj): 82 | return self._address > obj._address 83 | def __ge__(self, obj): 84 | return self._address >= obj._address 85 | def __ne__(self, obj): 86 | if not hasattr(obj, "_address"): 87 | return True 88 | return self._address != obj._address 89 | -------------------------------------------------------------------------------- /distalgo/runtime/event.py: -------------------------------------------------------------------------------- 1 | class EventPattern: 2 | """ Describes an event "pattern" that can be used to match against Event 3 | instances. 4 | """ 5 | def __init__(self, etype, mtype, consts, var, handlers=[]): 6 | self.etype = etype # Event type 7 | self.mtype = mtype # Message type 8 | self.consts = consts # Constants in pattern 9 | self.var = var # Variables in pattern 10 | self.handlers = handlers # Handlers for this kind of events 11 | 12 | def match(self, event): 13 | if (not ((self.etype == event.etype) and 14 | (self.mtype == event.mtype))): 15 | return False 16 | 17 | for (index, value) in self.consts: 18 | if (index >= len(event.data) or 19 | event.data[index] != value): 20 | return False 21 | for (index, name) in self.var: 22 | if (index >= len(event.data)): 23 | return False 24 | 25 | return True 26 | 27 | class Event: 28 | """ Describes a single event. 29 | 30 | Instances of Event are created by the backend thread and passed to the 31 | front end. 32 | """ 33 | # Event types: 34 | receive = 0 # A message was received 35 | send = 1 # A message was sent 36 | user = 2 # User defined 37 | peerjoin = 3 # A new peer joined the network 38 | peerdown = 4 # Connection to a peer is lost 39 | 40 | def __init__(self, etype, source, timestamp, message): 41 | self.etype = etype 42 | self.source = source 43 | self.timestamp = timestamp 44 | self.mtype = message[0] 45 | self.data = message 46 | -------------------------------------------------------------------------------- /distalgo/runtime/udp.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import pickle 3 | import random 4 | import logging 5 | 6 | if not __name__ == "__main__": 7 | from .event import * 8 | from .endpoint import EndPoint 9 | 10 | MIN_UDP_PORT = 10000 11 | MAX_UDP_PORT = 20000 12 | MAX_UDP_BUFSIZE = 200000 13 | 14 | class UdpEndPoint(EndPoint): 15 | sender = None 16 | 17 | def __init__(self, name=None, host='localhost', port=None): 18 | super().__init__(name) 19 | 20 | self._log = logging.getLogger("runtime.UdpEndPoint") 21 | UdpEndPoint.sender = None 22 | 23 | self._conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 24 | if port is None: 25 | while True: 26 | self._address = (host, 27 | random.randint(MIN_UDP_PORT, MAX_UDP_PORT)) 28 | try: 29 | self._conn.bind(self._address) 30 | break 31 | except socket.error: 32 | pass 33 | else: 34 | self._address = (host, port) 35 | self._conn.bind(self._address) 36 | 37 | self._log.debug("UdpEndPoint %s initialization complete", 38 | str(self._address)) 39 | 40 | 41 | def send(self, data, src, timestamp = 0): 42 | if UdpEndPoint.sender is None: 43 | UdpEndPoint.sender = socket.socket(socket.AF_INET, 44 | socket.SOCK_DGRAM) 45 | 46 | bytedata = pickle.dumps((src, timestamp, data)) 47 | if len(bytedata) > MAX_UDP_BUFSIZE: 48 | self._log.warn("Data size exceeded maximum buffer size!" + 49 | " Outgoing packet dropped.") 50 | self._log.debug("Dropped packet: %s", str((src, timestamp, data))) 51 | 52 | elif UdpEndPoint.sender.sendto(bytedata, self._address) != len(bytedata): 53 | raise socket.error() 54 | 55 | def recvmesgs(self): 56 | flags = 0 57 | 58 | try: 59 | while True: 60 | bytedata = self._conn.recv(MAX_UDP_BUFSIZE, flags) 61 | src, tstamp, data = pickle.loads(bytedata) 62 | if not isinstance(src, UdpEndPoint): 63 | raise TypeError() 64 | else: 65 | yield (src, tstamp, data) 66 | except socket.error as e: 67 | self._log.debug("socket.error occured, terminating receive loop.") 68 | 69 | def __getstate__(self): 70 | return ("UDP", self._address, self._name) 71 | 72 | def __setstate__(self, value): 73 | proto, self._address, self._name = value 74 | self._conn = None 75 | self._log = logging.getLogger("runtime.UdpEndPoint") 76 | -------------------------------------------------------------------------------- /draw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | file = sys.argv[1] 5 | 6 | def edge(n1, n2, w): 7 | return (n1, n2, {'weight':w}) 8 | 9 | edge_list = list() 10 | 11 | with open(file, 'r') as f: 12 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2])) 13 | for ed in 14 | (e.strip() for e in f.readlines() if e.strip() != "") 15 | if len(ed.split()) == 3 ) 16 | 17 | sys.path.append('..') 18 | import networkx as nx 19 | G = nx.Graph() 20 | G.add_edges_from(edge_list) 21 | 22 | "Draw graph using_matplotlib" 23 | import matplotlib 24 | if matplotlib.rcParams['backend'] == 'agg': 25 | matplotlib.rcParams['backend'] = 'Qt4Agg' 26 | 27 | import matplotlib.pyplot as plt 28 | pos=nx.spring_layout(G, weight = None) 29 | nx.draw_networkx_nodes(G,pos, node_size=330) 30 | nx.draw_networkx_edges(G,pos, set(G.edges()), width=2) 31 | nx.draw_networkx_labels(G,pos, font_size=12, font_family='sans-serif') 32 | 33 | plt.draw() 34 | plt.show() 35 | -------------------------------------------------------------------------------- /networkx/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | NetworkX 3 | ======== 4 | 5 | NetworkX (NX) is a Python package for the creation, manipulation, and 6 | study of the structure, dynamics, and functions of complex networks. 7 | 8 | https://networkx.lanl.gov/ 9 | 10 | Using 11 | ----- 12 | 13 | Just write in Python 14 | 15 | >>> import networkx as nx 16 | >>> G=nx.Graph() 17 | >>> G.add_edge(1,2) 18 | >>> G.add_node("spam") 19 | >>> print(G.nodes()) 20 | [1, 2, 'spam'] 21 | >>> print(G.edges()) 22 | [(1, 2)] 23 | """ 24 | # Copyright (C) 2004-2010 by 25 | # Aric Hagberg 26 | # Dan Schult 27 | # Pieter Swart 28 | # All rights reserved. 29 | # BSD license. 30 | # 31 | # Add platform dependent shared library path to sys.path 32 | # 33 | 34 | from __future__ import absolute_import 35 | 36 | import sys 37 | if sys.version_info[:2] < (2, 6): 38 | m = "Python version 2.6 or later is required for NetworkX (%d.%d detected)." 39 | raise ImportError(m % sys.version_info[:2]) 40 | del sys 41 | 42 | # Release data 43 | from networkx import release 44 | 45 | __author__ = '%s <%s>\n%s <%s>\n%s <%s>' % \ 46 | ( release.authors['Hagberg'] + release.authors['Schult'] + \ 47 | release.authors['Swart'] ) 48 | __license__ = release.license 49 | 50 | __date__ = release.date 51 | __version__ = release.version 52 | 53 | #These are import orderwise 54 | from networkx.exception import * 55 | import networkx.external 56 | import networkx.utils 57 | # these packages work with Python >= 2.6 58 | 59 | import networkx.classes 60 | from networkx.classes import * 61 | 62 | 63 | import networkx.convert 64 | from networkx.convert import * 65 | 66 | import networkx.relabel 67 | from networkx.relabel import * 68 | 69 | import networkx.generators 70 | from networkx.generators import * 71 | 72 | import networkx.readwrite 73 | from networkx.readwrite import * 74 | 75 | #Need to test with SciPy, when available 76 | import networkx.algorithms 77 | from networkx.algorithms import * 78 | import networkx.linalg 79 | 80 | from networkx.linalg import * 81 | from networkx.tests.test import run as test 82 | 83 | import networkx.drawing 84 | from networkx.drawing import * 85 | 86 | -------------------------------------------------------------------------------- /networkx/algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.assortativity import * 2 | from networkx.algorithms.block import * 3 | from networkx.algorithms.boundary import * 4 | from networkx.algorithms.centrality import * 5 | from networkx.algorithms.cluster import * 6 | from networkx.algorithms.clique import * 7 | from networkx.algorithms.community import * 8 | from networkx.algorithms.components import * 9 | from networkx.algorithms.core import * 10 | from networkx.algorithms.cycles import * 11 | from networkx.algorithms.dag import * 12 | from networkx.algorithms.distance_measures import * 13 | from networkx.algorithms.flow import * 14 | from networkx.algorithms.hierarchy import * 15 | from networkx.algorithms.matching import * 16 | from networkx.algorithms.mis import * 17 | from networkx.algorithms.mst import * 18 | from networkx.algorithms.link_analysis import * 19 | from networkx.algorithms.operators import * 20 | from networkx.algorithms.shortest_paths import * 21 | from networkx.algorithms.smetric import * 22 | from networkx.algorithms.traversal import * 23 | from networkx.algorithms.isolate import * 24 | from networkx.algorithms.euler import * 25 | from networkx.algorithms.vitality import * 26 | from networkx.algorithms.chordal import * 27 | from networkx.algorithms.richclub import * 28 | from networkx.algorithms.distance_regular import * 29 | from networkx.algorithms.swap import * 30 | from networkx.algorithms.graphical import * 31 | from networkx.algorithms.simple_paths import * 32 | 33 | import networkx.algorithms.assortativity 34 | import networkx.algorithms.bipartite 35 | import networkx.algorithms.centrality 36 | import networkx.algorithms.cluster 37 | import networkx.algorithms.clique 38 | import networkx.algorithms.components 39 | import networkx.algorithms.flow 40 | import networkx.algorithms.isomorphism 41 | import networkx.algorithms.link_analysis 42 | import networkx.algorithms.shortest_paths 43 | import networkx.algorithms.traversal 44 | import networkx.algorithms.chordal 45 | import networkx.algorithms.operators 46 | 47 | from networkx.algorithms.bipartite import projected_graph,project,is_bipartite 48 | from networkx.algorithms.isomorphism import is_isomorphic,could_be_isomorphic,\ 49 | fast_could_be_isomorphic,faster_could_be_isomorphic 50 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.approximation.clique import * 2 | from networkx.algorithms.approximation.dominating_set import * 3 | from networkx.algorithms.approximation.independent_set import * 4 | from networkx.algorithms.approximation.matching import * 5 | from networkx.algorithms.approximation.ramsey import * 6 | from networkx.algorithms.approximation.vertex_cover import * 7 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/independent_set.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Independent Set 4 | 5 | Independent set or stable set is a set of vertices in a graph, no two of 6 | which are adjacent. That is, it is a set I of vertices such that for every 7 | two vertices in I, there is no edge connecting the two. Equivalently, each 8 | edge in the graph has at most one endpoint in I. The size of an independent 9 | set is the number of vertices it contains. 10 | 11 | A maximum independent set is a largest independent set for a given graph G 12 | and its size is denoted α(G). The problem of finding such a set is called 13 | the maximum independent set problem and is an NP-hard optimization problem. 14 | As such, it is unlikely that there exists an efficient algorithm for finding 15 | a maximum independent set of a graph. 16 | 17 | http://en.wikipedia.org/wiki/Independent_set_(graph_theory) 18 | 19 | Independent set algorithm is based on the following paper: 20 | 21 | `O(|V|/(log|V|)^2)` apx of maximum clique/independent set. 22 | 23 | Boppana, R., & Halldórsson, M. M. (1992). 24 | Approximating maximum independent sets by excluding subgraphs. 25 | BIT Numerical Mathematics, 32(2), 180–196. Springer. 26 | doi:10.1007/BF01994876 27 | 28 | """ 29 | # Copyright (C) 2011-2012 by 30 | # Nicholas Mancuso 31 | # All rights reserved. 32 | # BSD license. 33 | from networkx.algorithms.approximation import clique_removal 34 | __all__ = ["maximum_independent_set"] 35 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" 36 | 37 | 38 | def maximum_independent_set(graph): 39 | """Return an approximate maximum independent set. 40 | 41 | Parameters 42 | ---------- 43 | graph : NetworkX graph 44 | Undirected graph 45 | 46 | Returns 47 | ------- 48 | iset : Set 49 | The apx-maximum independent set 50 | 51 | Notes 52 | ----- 53 | Finds the `O(|V|/(log|V|)^2)` apx of independent set in the worst case. 54 | 55 | 56 | References 57 | ---------- 58 | .. [1] Boppana, R., & Halldórsson, M. M. (1992). 59 | Approximating maximum independent sets by excluding subgraphs. 60 | BIT Numerical Mathematics, 32(2), 180–196. Springer. 61 | """ 62 | iset, _ = clique_removal(graph) 63 | return iset 64 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/matching.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | ************** 4 | Graph Matching 5 | ************** 6 | 7 | Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent 8 | edges; that is, no two edges share a common vertex. 9 | 10 | http://en.wikipedia.org/wiki/Matching_(graph_theory) 11 | """ 12 | # Copyright (C) 2011-2012 by 13 | # Nicholas Mancuso 14 | # All rights reserved. 15 | # BSD license. 16 | import networkx as nx 17 | __all__ = ["min_maximal_matching"] 18 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" 19 | 20 | def min_maximal_matching(graph): 21 | """Returns a set of edges such that no two edges share a common endpoint 22 | and every edge not in the set shares some common endpoint in the set. 23 | 24 | Parameters 25 | ---------- 26 | graph : NetworkX graph 27 | Undirected graph 28 | 29 | Returns 30 | ------- 31 | min_maximal_matching : set 32 | Returns a set of edges such that no two edges share a common endpoint 33 | and every edge not in the set shares some common endpoint in the set. 34 | Cardinality will be 2*OPT in the worst case. 35 | 36 | References 37 | ---------- 38 | .. [1] Vazirani, Vijay Approximation Algorithms (2001) 39 | """ 40 | return nx.maximal_matching(graph) 41 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/ramsey.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Ramsey numbers. 4 | """ 5 | # Copyright (C) 2011 by 6 | # Nicholas Mancuso 7 | # All rights reserved. 8 | # BSD license. 9 | import networkx as nx 10 | __all__ = ["ramsey_R2"] 11 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" 12 | 13 | def ramsey_R2(graph): 14 | r"""Approximately computes the Ramsey number `R(2;s,t)` for graph. 15 | 16 | Parameters 17 | ---------- 18 | graph : NetworkX graph 19 | Undirected graph 20 | 21 | Returns 22 | ------- 23 | max_pair : (set, set) tuple 24 | Maximum clique, Maximum independent set. 25 | """ 26 | if not graph: 27 | return (set([]), set([])) 28 | 29 | node = next(graph.nodes_iter()) 30 | nbrs = nx.all_neighbors(graph, node) 31 | nnbrs = nx.non_neighbors(graph, node) 32 | c_1, i_1 = ramsey_R2(graph.subgraph(nbrs)) 33 | c_2, i_2 = ramsey_R2(graph.subgraph(nnbrs)) 34 | 35 | c_1.add(node) 36 | i_2.add(node) 37 | return (max([c_1, c_2]), max([i_1, i_2])) 38 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_clique.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | import networkx.algorithms.approximation as apxa 4 | 5 | def test_clique_removal(): 6 | graph = nx.complete_graph(10) 7 | i, cs = apxa.clique_removal(graph) 8 | idens = nx.density(graph.subgraph(i)) 9 | eq_(idens, 0.0, "i-set not found by clique_removal!") 10 | for clique in cs: 11 | cdens = nx.density(graph.subgraph(clique)) 12 | eq_(cdens, 1.0, "clique not found by clique_removal!") 13 | 14 | graph = nx.trivial_graph(nx.Graph()) 15 | i, cs = apxa.clique_removal(graph) 16 | idens = nx.density(graph.subgraph(i)) 17 | eq_(idens, 0.0, "i-set not found by ramsey!") 18 | # we should only have 1-cliques. Just singleton nodes. 19 | for clique in cs: 20 | cdens = nx.density(graph.subgraph(clique)) 21 | eq_(cdens, 0.0, "clique not found by clique_removal!") 22 | 23 | graph = nx.barbell_graph(10, 5, nx.Graph()) 24 | i, cs = apxa.clique_removal(graph) 25 | idens = nx.density(graph.subgraph(i)) 26 | eq_(idens, 0.0, "i-set not found by ramsey!") 27 | for clique in cs: 28 | cdens = nx.density(graph.subgraph(clique)) 29 | eq_(cdens, 1.0, "clique not found by clique_removal!") 30 | 31 | def test_max_clique_smoke(): 32 | # smoke test 33 | G = nx.Graph() 34 | assert_equal(len(apxa.max_clique(G)),0) 35 | 36 | def test_max_clique(): 37 | # create a complete graph 38 | graph = nx.complete_graph(30) 39 | # this should return the entire graph 40 | mc = apxa.max_clique(graph) 41 | assert_equals(30, len(mc)) 42 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_dominating_set.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | import networkx.algorithms.approximation as apxa 5 | 6 | 7 | class TestMinWeightDominatingSet: 8 | 9 | def test_min_weighted_dominating_set(self): 10 | graph = nx.Graph() 11 | graph.add_edge(1, 2) 12 | graph.add_edge(1, 5) 13 | graph.add_edge(2, 3) 14 | graph.add_edge(2, 5) 15 | graph.add_edge(3, 4) 16 | graph.add_edge(3, 6) 17 | graph.add_edge(5, 6) 18 | 19 | vertices = set([1, 2, 3, 4, 5, 6]) 20 | # due to ties, this might be hard to test tight bounds 21 | dom_set = apxa.min_weighted_dominating_set(graph) 22 | for vertex in vertices - dom_set: 23 | neighbors = set(graph.neighbors(vertex)) 24 | ok_(len(neighbors & dom_set) > 0, "Non dominating set found!") 25 | 26 | def test_min_edge_dominating_set(self): 27 | graph = nx.path_graph(5) 28 | dom_set = apxa.min_edge_dominating_set(graph) 29 | 30 | # this is a crappy way to test, but good enough for now. 31 | for edge in graph.edges_iter(): 32 | if edge in dom_set: 33 | continue 34 | else: 35 | u, v = edge 36 | found = False 37 | for dom_edge in dom_set: 38 | found |= u == dom_edge[0] or u == dom_edge[1] 39 | ok_(found, "Non adjacent edge found!") 40 | 41 | graph = nx.complete_graph(10) 42 | dom_set = apxa.min_edge_dominating_set(graph) 43 | 44 | # this is a crappy way to test, but good enough for now. 45 | for edge in graph.edges_iter(): 46 | if edge in dom_set: 47 | continue 48 | else: 49 | u, v = edge 50 | found = False 51 | for dom_edge in dom_set: 52 | found |= u == dom_edge[0] or u == dom_edge[1] 53 | ok_(found, "Non adjacent edge found!") 54 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_independent_set.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | import networkx.algorithms.approximation as a 4 | 5 | def test_independent_set(): 6 | # smoke test 7 | G = nx.Graph() 8 | assert_equal(len(a.maximum_independent_set(G)),0) 9 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_matching.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | import networkx.algorithms.approximation as a 4 | 5 | def test_min_maximal_matching(): 6 | # smoke test 7 | G = nx.Graph() 8 | assert_equal(len(a.min_maximal_matching(G)),0) 9 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_ramsey.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | import networkx.algorithms.approximation as apxa 4 | 5 | def test_ramsey(): 6 | # this should only find the complete graph 7 | graph = nx.complete_graph(10) 8 | c, i = apxa.ramsey_R2(graph) 9 | cdens = nx.density(graph.subgraph(c)) 10 | eq_(cdens, 1.0, "clique not found by ramsey!") 11 | idens = nx.density(graph.subgraph(i)) 12 | eq_(idens, 0.0, "i-set not found by ramsey!") 13 | 14 | # this trival graph has no cliques. should just find i-sets 15 | graph = nx.trivial_graph(nx.Graph()) 16 | c, i = apxa.ramsey_R2(graph) 17 | cdens = nx.density(graph.subgraph(c)) 18 | eq_(cdens, 0.0, "clique not found by ramsey!") 19 | idens = nx.density(graph.subgraph(i)) 20 | eq_(idens, 0.0, "i-set not found by ramsey!") 21 | 22 | graph = nx.barbell_graph(10, 5, nx.Graph()) 23 | c, i = apxa.ramsey_R2(graph) 24 | cdens = nx.density(graph.subgraph(c)) 25 | eq_(cdens, 1.0, "clique not found by ramsey!") 26 | idens = nx.density(graph.subgraph(i)) 27 | eq_(idens, 0.0, "i-set not found by ramsey!") 28 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/tests/test_vertex_cover.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | from networkx.algorithms import approximation as a 5 | 6 | class TestMWVC: 7 | 8 | def test_min_vertex_cover(self): 9 | # create a simple star graph 10 | size = 50 11 | sg = nx.star_graph(size) 12 | cover = a.min_weighted_vertex_cover(sg) 13 | assert_equals(2, len(cover)) 14 | for u, v in sg.edges_iter(): 15 | ok_((u in cover or v in cover), "Node node covered!") 16 | 17 | wg = nx.Graph() 18 | wg.add_node(0, weight=10) 19 | wg.add_node(1, weight=1) 20 | wg.add_node(2, weight=1) 21 | wg.add_node(3, weight=1) 22 | wg.add_node(4, weight=1) 23 | 24 | wg.add_edge(0, 1) 25 | wg.add_edge(0, 2) 26 | wg.add_edge(0, 3) 27 | wg.add_edge(0, 4) 28 | 29 | wg.add_edge(1,2) 30 | wg.add_edge(2,3) 31 | wg.add_edge(3,4) 32 | wg.add_edge(4,1) 33 | 34 | cover = a.min_weighted_vertex_cover(wg, weight="weight") 35 | csum = sum(wg.node[node]["weight"] for node in cover) 36 | assert_equals(4, csum) 37 | 38 | for u, v in wg.edges_iter(): 39 | ok_((u in cover or v in cover), "Node node covered!") 40 | -------------------------------------------------------------------------------- /networkx/algorithms/approximation/vertex_cover.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | ************ 4 | Vertex Cover 5 | ************ 6 | 7 | Given an undirected graph `G = (V, E)` and a function w assigning nonnegative 8 | weights to its vertices, find a minimum weight subset of V such that each edge 9 | in E is incident to at least one vertex in the subset. 10 | 11 | http://en.wikipedia.org/wiki/Vertex_cover 12 | """ 13 | # Copyright (C) 2011-2012 by 14 | # Nicholas Mancuso 15 | # All rights reserved. 16 | # BSD license. 17 | from networkx.utils import * 18 | __all__ = ["min_weighted_vertex_cover"] 19 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" 20 | 21 | @not_implemented_for('directed') 22 | def min_weighted_vertex_cover(graph, weight=None): 23 | """2-OPT Local Ratio for Minimum Weighted Vertex Cover 24 | 25 | Find an approximate minimum weighted vertex cover of a graph. 26 | 27 | Parameters 28 | ---------- 29 | graph : NetworkX graph 30 | Undirected graph 31 | 32 | weight : None or string, optional (default = None) 33 | If None, every edge has weight/distance/cost 1. If a string, use this 34 | edge attribute as the edge weight. Any edge attribute not present 35 | defaults to 1. 36 | 37 | Returns 38 | ------- 39 | min_weighted_cover : set 40 | Returns a set of vertices whose weight sum is no more than 2 * OPT. 41 | 42 | References 43 | ---------- 44 | .. [1] Bar-Yehuda, R., & Even, S. (1985). A local-ratio theorem for 45 | approximating the weighted vertex cover problem. 46 | Annals of Discrete Mathematics, 25, 27–46 47 | http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf 48 | """ 49 | weight_func = lambda nd: nd.get(weight, 1) 50 | cost = dict((n, weight_func(nd)) for n, nd in graph.nodes(data=True)) 51 | 52 | # while there are edges uncovered, continue 53 | for u,v in graph.edges_iter(): 54 | # select some uncovered edge 55 | min_cost = min([cost[u], cost[v]]) 56 | cost[u] -= min_cost 57 | cost[v] -= min_cost 58 | 59 | return set(u for u in cost if cost[u] == 0) 60 | -------------------------------------------------------------------------------- /networkx/algorithms/assortativity/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.assortativity.connectivity import * 2 | from networkx.algorithms.assortativity.correlation import * 3 | from networkx.algorithms.assortativity.mixing import * 4 | from networkx.algorithms.assortativity.neighbor_degree import * 5 | from networkx.algorithms.assortativity.pairs import * 6 | -------------------------------------------------------------------------------- /networkx/algorithms/assortativity/tests/base_test.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | 3 | class BaseTestAttributeMixing(object): 4 | 5 | def setUp(self): 6 | G=nx.Graph() 7 | G.add_nodes_from([0,1],fish='one') 8 | G.add_nodes_from([2,3],fish='two') 9 | G.add_nodes_from([4],fish='red') 10 | G.add_nodes_from([5],fish='blue') 11 | G.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) 12 | self.G=G 13 | 14 | D=nx.DiGraph() 15 | D.add_nodes_from([0,1],fish='one') 16 | D.add_nodes_from([2,3],fish='two') 17 | D.add_nodes_from([4],fish='red') 18 | D.add_nodes_from([5],fish='blue') 19 | D.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) 20 | self.D=D 21 | 22 | M=nx.MultiGraph() 23 | M.add_nodes_from([0,1],fish='one') 24 | M.add_nodes_from([2,3],fish='two') 25 | M.add_nodes_from([4],fish='red') 26 | M.add_nodes_from([5],fish='blue') 27 | M.add_edges_from([(0,1),(0,1),(2,3)]) 28 | self.M=M 29 | 30 | S=nx.Graph() 31 | S.add_nodes_from([0,1],fish='one') 32 | S.add_nodes_from([2,3],fish='two') 33 | S.add_nodes_from([4],fish='red') 34 | S.add_nodes_from([5],fish='blue') 35 | S.add_edge(0,0) 36 | S.add_edge(2,2) 37 | self.S=S 38 | 39 | class BaseTestDegreeMixing(object): 40 | 41 | def setUp(self): 42 | self.P4=nx.path_graph(4) 43 | self.D=nx.DiGraph() 44 | self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) 45 | self.M=nx.MultiGraph() 46 | self.M.add_path(list(range(4))) 47 | self.M.add_edge(0,1) 48 | self.S=nx.Graph() 49 | self.S.add_edges_from([(0,0),(1,1)]) 50 | 51 | -------------------------------------------------------------------------------- /networkx/algorithms/assortativity/tests/test_neighbor_degree.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestAverageNeighbor(object): 6 | 7 | def test_degree_p4(self): 8 | G=nx.path_graph(4) 9 | answer={0:2,1:1.5,2:1.5,3:2} 10 | nd = nx.average_neighbor_degree(G) 11 | assert_equal(nd,answer) 12 | 13 | D=G.to_directed() 14 | nd = nx.average_neighbor_degree(D) 15 | assert_equal(nd,answer) 16 | 17 | D=G.to_directed() 18 | nd = nx.average_neighbor_degree(D) 19 | assert_equal(nd,answer) 20 | 21 | D=G.to_directed() 22 | nd = nx.average_neighbor_degree(D, source='in', target='in') 23 | assert_equal(nd,answer) 24 | 25 | def test_degree_p4_weighted(self): 26 | G=nx.path_graph(4) 27 | G[1][2]['weight']=4 28 | answer={0:2,1:1.8,2:1.8,3:2} 29 | nd = nx.average_neighbor_degree(G,weight='weight') 30 | assert_equal(nd,answer) 31 | 32 | D=G.to_directed() 33 | nd = nx.average_neighbor_degree(D,weight='weight') 34 | assert_equal(nd,answer) 35 | 36 | D=G.to_directed() 37 | nd = nx.average_neighbor_degree(D,weight='weight') 38 | assert_equal(nd,answer) 39 | nd = nx.average_neighbor_degree(D,source='out',target='out', 40 | weight='weight') 41 | assert_equal(nd,answer) 42 | 43 | D=G.to_directed() 44 | nd = nx.average_neighbor_degree(D,source='in',target='in', 45 | weight='weight') 46 | assert_equal(nd,answer) 47 | 48 | 49 | def test_degree_k4(self): 50 | G=nx.complete_graph(4) 51 | answer={0:3,1:3,2:3,3:3} 52 | nd = nx.average_neighbor_degree(G) 53 | assert_equal(nd,answer) 54 | 55 | D=G.to_directed() 56 | nd = nx.average_neighbor_degree(D) 57 | assert_equal(nd,answer) 58 | 59 | D=G.to_directed() 60 | nd = nx.average_neighbor_degree(D) 61 | assert_equal(nd,answer) 62 | 63 | D=G.to_directed() 64 | nd = nx.average_neighbor_degree(D,source='in',target='in') 65 | assert_equal(nd,answer) 66 | 67 | def test_degree_k4_nodes(self): 68 | G=nx.complete_graph(4) 69 | answer={1:3.0,2:3.0} 70 | nd = nx.average_neighbor_degree(G,nodes=[1,2]) 71 | assert_equal(nd,answer) 72 | 73 | def test_degree_barrat(self): 74 | G=nx.star_graph(5) 75 | G.add_edges_from([(5,6),(5,7),(5,8),(5,9)]) 76 | G[0][5]['weight']=5 77 | nd = nx.average_neighbor_degree(G)[5] 78 | assert_equal(nd,1.8) 79 | nd = nx.average_neighbor_degree(G,weight='weight')[5] 80 | assert_almost_equal(nd,3.222222,places=5) 81 | 82 | 83 | -------------------------------------------------------------------------------- /networkx/algorithms/bipartite/redundancy.py: -------------------------------------------------------------------------------- 1 | #-*- coding: utf-8 -*- 2 | """Node redundancy for bipartite graphs.""" 3 | # Copyright (C) 2011 by 4 | # Jordi Torrents 5 | # Aric Hagberg 6 | # All rights reserved. 7 | # BSD license. 8 | from itertools import combinations 9 | import networkx as nx 10 | 11 | __author__ = """\n""".join(['Jordi Torrents ', 12 | 'Aric Hagberg (hagberg@lanl.gov)']) 13 | __all__ = ['node_redundancy'] 14 | 15 | def node_redundancy(G, nodes=None): 16 | r"""Compute bipartite node redundancy coefficient. 17 | 18 | The redundancy coefficient of a node `v` is the fraction of pairs of 19 | neighbors of `v` that are both linked to other nodes. In a one-mode 20 | projection these nodes would be linked together even if `v` were 21 | not there. 22 | 23 | .. math:: 24 | 25 | rc(v) = \frac{|\{\{u,w\} \subseteq N(v), 26 | \: \exists v' \neq v,\: (v',u) \in E\: 27 | \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}} 28 | 29 | where `N(v)` are the neighbors of `v` in `G`. 30 | 31 | Parameters 32 | ---------- 33 | G : graph 34 | A bipartite graph 35 | 36 | nodes : list or iterable (optional) 37 | Compute redundancy for these nodes. The default is all nodes in G. 38 | 39 | Returns 40 | ------- 41 | redundancy : dictionary 42 | A dictionary keyed by node with the node redundancy value. 43 | 44 | Examples 45 | -------- 46 | >>> from networkx.algorithms import bipartite 47 | >>> G = nx.cycle_graph(4) 48 | >>> rc = bipartite.node_redundancy(G) 49 | >>> rc[0] 50 | 1.0 51 | 52 | Compute the average redundancy for the graph: 53 | 54 | >>> sum(rc.values())/len(G) 55 | 1.0 56 | 57 | Compute the average redundancy for a set of nodes: 58 | 59 | >>> nodes = [0, 2] 60 | >>> sum(rc[n] for n in nodes)/len(nodes) 61 | 1.0 62 | 63 | References 64 | ---------- 65 | .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). 66 | Basic notions for the analysis of large two-mode networks. 67 | Social Networks 30(1), 31--48. 68 | """ 69 | if nodes is None: 70 | nodes = G 71 | rc = {} 72 | for v in nodes: 73 | overlap = 0.0 74 | for u, w in combinations(G[v], 2): 75 | if len((set(G[u]) & set(G[w])) - set([v])) > 0: 76 | overlap += 1 77 | if overlap > 0: 78 | n = len(G[v]) 79 | norm = 2.0/(n*(n-1)) 80 | else: 81 | norm = 1.0 82 | rc[v] = overlap*norm 83 | return rc 84 | 85 | -------------------------------------------------------------------------------- /networkx/algorithms/bipartite/spectral.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Spectral bipartivity measure. 4 | """ 5 | import networkx as nx 6 | __author__ = """Aric Hagberg (hagberg@lanl.gov)""" 7 | # Copyright (C) 2011 by 8 | # Aric Hagberg 9 | # Dan Schult 10 | # Pieter Swart 11 | # All rights reserved. 12 | # BSD license. 13 | __all__ = ['spectral_bipartivity'] 14 | 15 | def spectral_bipartivity(G, nodes=None, weight='weight'): 16 | """Returns the spectral bipartivity. 17 | 18 | Parameters 19 | ---------- 20 | G : NetworkX graph 21 | 22 | nodes : list or container optional(default is all nodes) 23 | Nodes to return value of spectral bipartivity contribution. 24 | 25 | weight : string or None optional (default = 'weight') 26 | Edge data key to use for edge weights. If None, weights set to 1. 27 | 28 | Returns 29 | ------- 30 | sb : float or dict 31 | A single number if the keyword nodes is not specified, or 32 | a dictionary keyed by node with the spectral bipartivity contribution 33 | of that node as the value. 34 | 35 | Examples 36 | -------- 37 | >>> from networkx.algorithms import bipartite 38 | >>> G = nx.path_graph(4) 39 | >>> bipartite.spectral_bipartivity(G) 40 | 1.0 41 | 42 | Notes 43 | ----- 44 | This implementation uses Numpy (dense) matrices which are not efficient 45 | for storing large sparse graphs. 46 | 47 | See Also 48 | -------- 49 | color 50 | 51 | References 52 | ---------- 53 | .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of 54 | bipartivity in complex networks", PhysRev E 72, 046105 (2005) 55 | """ 56 | try: 57 | import scipy.linalg 58 | except ImportError: 59 | raise ImportError('spectral_bipartivity() requires SciPy: ', 60 | 'http://scipy.org/') 61 | nodelist = G.nodes() # ordering of nodes in matrix 62 | A = nx.to_numpy_matrix(G, nodelist, weight=weight) 63 | expA = scipy.linalg.expm(A) 64 | expmA = scipy.linalg.expm(-A) 65 | coshA = 0.5 * (expA + expmA) 66 | if nodes is None: 67 | # return single number for entire graph 68 | return coshA.diagonal().sum() / expA.diagonal().sum() 69 | else: 70 | # contribution for individual nodes 71 | index = dict(zip(nodelist, range(len(nodelist)))) 72 | sb = {} 73 | for n in nodes: 74 | i = index[n] 75 | sb[n] = coshA[i, i] / expA[i, i] 76 | return sb 77 | 78 | def setup_module(module): 79 | """Fixture for nose tests.""" 80 | from nose import SkipTest 81 | try: 82 | import numpy 83 | except: 84 | raise SkipTest("NumPy not available") 85 | try: 86 | import scipy 87 | except: 88 | raise SkipTest("SciPy not available") 89 | -------------------------------------------------------------------------------- /networkx/algorithms/bipartite/tests/test_cluster.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | from nose.tools import * 3 | from networkx.algorithms.bipartite.cluster import cc_dot,cc_min,cc_max 4 | import networkx.algorithms.bipartite as bipartite 5 | 6 | def test_pairwise_bipartite_cc_functions(): 7 | # Test functions for different kinds of bipartite clustering coefficients 8 | # between pairs of nodes using 3 example graphs from figure 5 p. 40 9 | # Latapy et al (2008) 10 | G1 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7)]) 11 | G2 = nx.Graph([(0,2),(0,3),(0,4),(1,3),(1,4),(1,5)]) 12 | G3 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7),(1,8),(1,9)]) 13 | result = {0:[1/3.0, 2/3.0, 2/5.0], 1:[1/2.0, 2/3.0, 2/3.0], 2:[2/8.0, 2/5.0, 2/5.0]} 14 | for i, G in enumerate([G1, G2, G3]): 15 | assert(bipartite.is_bipartite(G)) 16 | assert(cc_dot(set(G[0]), set(G[1])) == result[i][0]) 17 | assert(cc_min(set(G[0]), set(G[1])) == result[i][1]) 18 | assert(cc_max(set(G[0]), set(G[1])) == result[i][2]) 19 | 20 | def test_star_graph(): 21 | G=nx.star_graph(3) 22 | # all modes are the same 23 | answer={0:0,1:1,2:1,3:1} 24 | assert_equal(bipartite.clustering(G,mode='dot'),answer) 25 | assert_equal(bipartite.clustering(G,mode='min'),answer) 26 | assert_equal(bipartite.clustering(G,mode='max'),answer) 27 | 28 | @raises(nx.NetworkXError) 29 | def test_not_bipartite(): 30 | bipartite.clustering(nx.complete_graph(4)) 31 | 32 | @raises(nx.NetworkXError) 33 | def test_bad_mode(): 34 | bipartite.clustering(nx.path_graph(4),mode='foo') 35 | 36 | def test_path_graph(): 37 | G=nx.path_graph(4) 38 | answer={0:0.5,1:0.5,2:0.5,3:0.5} 39 | assert_equal(bipartite.clustering(G,mode='dot'),answer) 40 | assert_equal(bipartite.clustering(G,mode='max'),answer) 41 | answer={0:1,1:1,2:1,3:1} 42 | assert_equal(bipartite.clustering(G,mode='min'),answer) 43 | 44 | 45 | def test_average_path_graph(): 46 | G=nx.path_graph(4) 47 | assert_equal(bipartite.average_clustering(G,mode='dot'),0.5) 48 | assert_equal(bipartite.average_clustering(G,mode='max'),0.5) 49 | assert_equal(bipartite.average_clustering(G,mode='min'),1) 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /networkx/algorithms/centrality/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.centrality.betweenness import * 2 | from networkx.algorithms.centrality.betweenness_subset import * 3 | from networkx.algorithms.centrality.closeness import * 4 | from networkx.algorithms.centrality.current_flow_closeness import * 5 | from networkx.algorithms.centrality.current_flow_betweenness import * 6 | from networkx.algorithms.centrality.current_flow_betweenness_subset import * 7 | from networkx.algorithms.centrality.degree_alg import * 8 | from networkx.algorithms.centrality.eigenvector import * 9 | from networkx.algorithms.centrality.load import * 10 | from networkx.algorithms.centrality.communicability_alg import * 11 | import networkx.algorithms.centrality.betweenness 12 | import networkx.algorithms.centrality.closeness 13 | import networkx.algorithms.centrality.current_flow_betweenness 14 | import networkx.algorithms.centrality.current_flow_closeness 15 | import networkx.algorithms.centrality.degree_alg 16 | import networkx.algorithms.centrality.eigenvector 17 | import networkx.algorithms.centrality.load 18 | import networkx.algorithms.centrality.communicability_alg 19 | 20 | -------------------------------------------------------------------------------- /networkx/algorithms/centrality/closeness.py: -------------------------------------------------------------------------------- 1 | """ 2 | Closeness centrality measures. 3 | 4 | """ 5 | # Copyright (C) 2004-2010 by 6 | # Aric Hagberg 7 | # Dan Schult 8 | # Pieter Swart 9 | # All rights reserved. 10 | # BSD license. 11 | import functools 12 | import networkx as nx 13 | __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 14 | 'Pieter Swart (swart@lanl.gov)', 15 | 'Sasha Gutfraind (ag362@cornell.edu)']) 16 | __all__ = ['closeness_centrality'] 17 | 18 | 19 | def closeness_centrality(G, v=None, distance=None, normalized=True): 20 | """Compute closeness centrality for nodes. 21 | 22 | Closeness centrality at a node is 1/average distance to all other nodes. 23 | 24 | Parameters 25 | ---------- 26 | G : graph 27 | A networkx graph 28 | v : node, optional 29 | Return only the value for node v 30 | distance : string key, optional (default=None) 31 | Use specified edge key as edge distance. 32 | If True, use 'weight' as the edge key. 33 | normalized : bool, optional 34 | If True (default) normalize by the graph size. 35 | 36 | Returns 37 | ------- 38 | nodes : dictionary 39 | Dictionary of nodes with closeness centrality as the value. 40 | 41 | See Also 42 | -------- 43 | betweenness_centrality, load_centrality, eigenvector_centrality, 44 | degree_centrality 45 | 46 | Notes 47 | ----- 48 | The closeness centrality is normalized to to n-1 / size(G)-1 where 49 | n is the number of nodes in the connected part of graph containing 50 | the node. If the graph is not completely connected, this 51 | algorithm computes the closeness centrality for each connected 52 | part separately. 53 | """ 54 | if distance is not None: 55 | if distance is True: distance='weight' 56 | path_length=functools.partial(nx.single_source_dijkstra_path_length, 57 | weight=distance) 58 | else: 59 | path_length=nx.single_source_shortest_path_length 60 | 61 | if v is None: 62 | nodes=G.nodes() 63 | else: 64 | nodes=[v] 65 | closeness_centrality={} 66 | 67 | for n in nodes: 68 | sp=path_length(G,n) 69 | totsp=sum(sp.values()) 70 | if totsp > 0.0 and len(G) > 1: 71 | closeness_centrality[n]= (len(sp)-1.0) / totsp 72 | # normalize to number of nodes-1 in connected part 73 | if normalized: 74 | s=(len(sp)-1.0) / ( len(G) - 1 ) 75 | closeness_centrality[n] *= s 76 | else: 77 | closeness_centrality[n]=0.0 78 | if v is not None: 79 | return closeness_centrality[v] 80 | else: 81 | return closeness_centrality 82 | 83 | -------------------------------------------------------------------------------- /networkx/algorithms/centrality/tests/test_current_flow_closeness.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | from nose import SkipTest 4 | import networkx 5 | 6 | class TestFlowClosenessCentrality(object): 7 | numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test 8 | @classmethod 9 | def setupClass(cls): 10 | global np 11 | try: 12 | import numpy as np 13 | import scipy 14 | except ImportError: 15 | raise SkipTest('NumPy not available.') 16 | 17 | 18 | def test_K4(self): 19 | """Closeness centrality: K4""" 20 | G=networkx.complete_graph(4) 21 | b=networkx.current_flow_closeness_centrality(G,normalized=True) 22 | b_answer={0: 2.0, 1: 2.0, 2: 2.0, 3: 2.0} 23 | for n in sorted(G): 24 | assert_almost_equal(b[n],b_answer[n]) 25 | 26 | 27 | def test_P4_normalized(self): 28 | """Closeness centrality: P4 normalized""" 29 | G=networkx.path_graph(4) 30 | b=networkx.current_flow_closeness_centrality(G,normalized=True) 31 | b_answer={0: 1./2, 1: 3./4, 2: 3./4, 3:1./2} 32 | for n in sorted(G): 33 | assert_almost_equal(b[n],b_answer[n]) 34 | 35 | 36 | def test_P4(self): 37 | """Closeness centrality: P4""" 38 | G=networkx.path_graph(4) 39 | b=networkx.current_flow_closeness_centrality(G,normalized=False) 40 | b_answer={0: 1.0/6, 1: 1.0/4, 2: 1.0/4, 3:1.0/6} 41 | for n in sorted(G): 42 | assert_almost_equal(b[n],b_answer[n]) 43 | 44 | def test_star(self): 45 | """Closeness centrality: star """ 46 | G=networkx.Graph() 47 | G.add_star(['a','b','c','d']) 48 | b=networkx.current_flow_closeness_centrality(G,normalized=True) 49 | b_answer={'a': 1.0, 'b': 0.6, 'c': 0.6, 'd':0.6} 50 | for n in sorted(G): 51 | assert_almost_equal(b[n],b_answer[n]) 52 | 53 | 54 | 55 | class TestWeightedFlowClosenessCentrality(object): 56 | pass 57 | -------------------------------------------------------------------------------- /networkx/algorithms/chordal/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.chordal.chordal_alg import * 2 | 3 | 4 | -------------------------------------------------------------------------------- /networkx/algorithms/chordal/tests/test_chordal.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestMCS: 6 | 7 | def setUp(self): 8 | # simple graph 9 | connected_chordal_G=nx.Graph() 10 | connected_chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4), 11 | (3,5),(3,6),(4,5),(4,6),(5,6)]) 12 | self.connected_chordal_G=connected_chordal_G 13 | 14 | chordal_G = nx.Graph() 15 | chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4), 16 | (3,5),(3,6),(4,5),(4,6),(5,6),(7,8)]) 17 | chordal_G.add_node(9) 18 | self.chordal_G=chordal_G 19 | 20 | non_chordal_G = nx.Graph() 21 | non_chordal_G.add_edges_from([(1,2),(1,3),(2,4),(2,5),(3,4),(3,5)]) 22 | self.non_chordal_G = non_chordal_G 23 | 24 | def test_is_chordal(self): 25 | assert_false(nx.is_chordal(self.non_chordal_G)) 26 | assert_true(nx.is_chordal(self.chordal_G)) 27 | assert_true(nx.is_chordal(self.connected_chordal_G)) 28 | assert_true(nx.is_chordal(nx.complete_graph(3))) 29 | assert_true(nx.is_chordal(nx.cycle_graph(3))) 30 | assert_false(nx.is_chordal(nx.cycle_graph(5))) 31 | 32 | def test_induced_nodes(self): 33 | G = nx.generators.classic.path_graph(10) 34 | I = nx.find_induced_nodes(G,1,9,2) 35 | assert_equal(I,set([1,2,3,4,5,6,7,8,9])) 36 | assert_raises(nx.NetworkXTreewidthBoundExceeded, 37 | nx.find_induced_nodes,G,1,9,1) 38 | I = nx.find_induced_nodes(self.chordal_G,1,6) 39 | assert_equal(I,set([1,2,4,6])) 40 | assert_raises(nx.NetworkXError, 41 | nx.find_induced_nodes,self.non_chordal_G,1,5) 42 | 43 | def test_chordal_find_cliques(self): 44 | cliques = set([frozenset([9]),frozenset([7,8]),frozenset([1,2,3]), 45 | frozenset([2,3,4]),frozenset([3,4,5,6])]) 46 | assert_equal(nx.chordal_graph_cliques(self.chordal_G),cliques) 47 | 48 | def test_chordal_find_cliques_path(self): 49 | G = nx.path_graph(10) 50 | cliqueset = nx.chordal_graph_cliques(G) 51 | for (u,v) in G.edges_iter(): 52 | assert_true(frozenset([u,v]) in cliqueset 53 | or frozenset([v,u]) in cliqueset) 54 | 55 | def test_chordal_find_cliquesCC(self): 56 | cliques = set([frozenset([1,2,3]),frozenset([2,3,4]), 57 | frozenset([3,4,5,6])]) 58 | assert_equal(nx.chordal_graph_cliques(self.connected_chordal_G),cliques) 59 | 60 | -------------------------------------------------------------------------------- /networkx/algorithms/community/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.community.kclique import * 2 | -------------------------------------------------------------------------------- /networkx/algorithms/community/tests/test_kclique.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | from itertools import combinations 5 | from networkx import k_clique_communities 6 | 7 | def test_overlaping_K5(): 8 | G = nx.Graph() 9 | G.add_edges_from(combinations(range(5), 2)) # Add a five clique 10 | G.add_edges_from(combinations(range(2,7), 2)) # Add another five clique 11 | c = list(nx.k_clique_communities(G, 4)) 12 | assert_equal(c,[frozenset([0, 1, 2, 3, 4, 5, 6])]) 13 | c= list(nx.k_clique_communities(G, 5)) 14 | assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([2,3,4,5,6])])) 15 | 16 | def test_isolated_K5(): 17 | G = nx.Graph() 18 | G.add_edges_from(combinations(range(0,5), 2)) # Add a five clique 19 | G.add_edges_from(combinations(range(5,10), 2)) # Add another five clique 20 | c= list(nx.k_clique_communities(G, 5)) 21 | assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([5,6,7,8,9])])) 22 | 23 | def test_zachary(): 24 | z = nx.karate_club_graph() 25 | # clique percolation with k=2 is just connected components 26 | zachary_k2_ground_truth = set([frozenset(z.nodes())]) 27 | zachary_k3_ground_truth = set([frozenset([0, 1, 2, 3, 7, 8, 12, 13, 14, 28 | 15, 17, 18, 19, 20, 21, 22, 23, 29 | 26, 27, 28, 29, 30, 31, 32, 33]), 30 | frozenset([0, 4, 5, 6, 10, 16]), 31 | frozenset([24, 25, 31])]) 32 | zachary_k4_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13]), 33 | frozenset([8, 32, 30, 33]), 34 | frozenset([32, 33, 29, 23])]) 35 | zachary_k5_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13])]) 36 | zachary_k6_ground_truth = set([]) 37 | 38 | assert set(k_clique_communities(z, 2)) == zachary_k2_ground_truth 39 | assert set(k_clique_communities(z, 3)) == zachary_k3_ground_truth 40 | assert set(k_clique_communities(z, 4)) == zachary_k4_ground_truth 41 | assert set(k_clique_communities(z, 5)) == zachary_k5_ground_truth 42 | assert set(k_clique_communities(z, 6)) == zachary_k6_ground_truth 43 | 44 | @raises(nx.NetworkXError) 45 | def test_bad_k(): 46 | c = list(k_clique_communities(nx.Graph(),1)) 47 | -------------------------------------------------------------------------------- /networkx/algorithms/components/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.components.connected import * 2 | from networkx.algorithms.components.strongly_connected import * 3 | from networkx.algorithms.components.weakly_connected import * 4 | from networkx.algorithms.components.attracting import * 5 | from networkx.algorithms.components.biconnected import * 6 | -------------------------------------------------------------------------------- /networkx/algorithms/components/tests/test_attracting.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | 6 | class TestAttractingComponents(object): 7 | def setUp(self): 8 | self.G1 = nx.DiGraph() 9 | self.G1.add_edges_from([(5,11),(11,2),(11,9),(11,10), 10 | (7,11),(7,8),(8,9),(3,8),(3,10)]) 11 | self.G2 = nx.DiGraph() 12 | self.G2.add_edges_from([(0,1),(0,2),(1,1),(1,2),(2,1)]) 13 | 14 | self.G3 = nx.DiGraph() 15 | self.G3.add_edges_from([(0,1),(1,2),(2,1),(0,3),(3,4),(4,3)]) 16 | 17 | def test_attracting_components(self): 18 | ac = nx.attracting_components(self.G1) 19 | assert_true([2] in ac) 20 | assert_true([9] in ac) 21 | assert_true([10] in ac) 22 | 23 | ac = nx.attracting_components(self.G2) 24 | ac = [tuple(sorted(x)) for x in ac] 25 | assert_true(ac == [(1,2)]) 26 | 27 | ac = nx.attracting_components(self.G3) 28 | ac = [tuple(sorted(x)) for x in ac] 29 | assert_true((1,2) in ac) 30 | assert_true((3,4) in ac) 31 | assert_equal(len(ac), 2) 32 | 33 | def test_number_attacting_components(self): 34 | assert_equal(len(nx.attracting_components(self.G1)), 3) 35 | assert_equal(len(nx.attracting_components(self.G2)), 1) 36 | assert_equal(len(nx.attracting_components(self.G3)), 2) 37 | 38 | def test_is_attracting_component(self): 39 | assert_false(nx.is_attracting_component(self.G1)) 40 | assert_false(nx.is_attracting_component(self.G2)) 41 | assert_false(nx.is_attracting_component(self.G3)) 42 | g2 = self.G3.subgraph([1,2]) 43 | assert_true(nx.is_attracting_component(g2)) 44 | 45 | def test_attracting_component_subgraphs(self): 46 | subgraphs = nx.attracting_component_subgraphs(self.G1) 47 | for subgraph in subgraphs: 48 | assert_equal(len(subgraph), 1) 49 | 50 | self.G2.add_edge(1,2,eattr='red') # test attrs copied to subgraphs 51 | self.G2.node[2]['nattr']='blue' 52 | self.G2.graph['gattr']='green' 53 | subgraphs = nx.attracting_component_subgraphs(self.G2) 54 | assert_equal(len(subgraphs), 1) 55 | SG2=subgraphs[0] 56 | assert_true(1 in SG2) 57 | assert_true(2 in SG2) 58 | assert_equal(SG2[1][2]['eattr'],'red') 59 | assert_equal(SG2.node[2]['nattr'],'blue') 60 | assert_equal(SG2.graph['gattr'],'green') 61 | SG2.add_edge(1,2,eattr='blue') 62 | assert_equal(SG2[1][2]['eattr'],'blue') 63 | assert_equal(self.G2[1][2]['eattr'],'red') 64 | 65 | -------------------------------------------------------------------------------- /networkx/algorithms/flow/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.flow.maxflow import * 2 | from networkx.algorithms.flow.mincost import * 3 | 4 | -------------------------------------------------------------------------------- /networkx/algorithms/flow/tests/test_maxflow_large_graph.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Max flow algorithm test suite on large graphs. 3 | 4 | Run with nose: nosetests -v test_max_flow.py 5 | """ 6 | 7 | __author__ = """Loïc Séguin-C. """ 8 | # Copyright (C) 2010 Loïc Séguin-C. 9 | # All rights reserved. 10 | # BSD license. 11 | 12 | 13 | import networkx as nx 14 | from nose.tools import * 15 | 16 | def gen_pyramid(N): 17 | # This graph admits a flow of value 1 for which every arc is at 18 | # capacity (except the arcs incident to the sink which have 19 | # infinite capacity). 20 | G = nx.DiGraph() 21 | 22 | for i in range(N - 1): 23 | cap = 1. / (i + 2) 24 | for j in range(i + 1): 25 | G.add_edge((i, j), (i + 1, j), 26 | capacity = cap) 27 | cap = 1. / (i + 1) - cap 28 | G.add_edge((i, j), (i + 1, j + 1), 29 | capacity = cap) 30 | cap = 1. / (i + 2) - cap 31 | 32 | for j in range(N): 33 | G.add_edge((N - 1, j), 't') 34 | 35 | return G 36 | 37 | 38 | class TestMaxflowLargeGraph: 39 | def test_complete_graph(self): 40 | N = 50 41 | G = nx.complete_graph(N) 42 | for (u, v) in G.edges(): 43 | G[u][v]['capacity'] = 5 44 | assert_equal(nx.ford_fulkerson(G, 1, 2)[0], 5 * (N - 1)) 45 | 46 | def test_pyramid(self): 47 | N = 10 48 | # N = 100 # this gives a graph with 5051 nodes 49 | G = gen_pyramid(N) 50 | assert_almost_equal(nx.ford_fulkerson(G, (0, 0), 't')[0], 1.) 51 | 52 | -------------------------------------------------------------------------------- /networkx/algorithms/hierarchy.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Flow Hierarchy. 4 | """ 5 | # Copyright (C) 2004-2011 by 6 | # Aric Hagberg 7 | # Dan Schult 8 | # Pieter Swart 9 | # All rights reserved. 10 | # BSD license. 11 | import networkx as nx 12 | __authors__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)']) 13 | __all__ = ['flow_hierarchy'] 14 | 15 | def flow_hierarchy(G, weight=None): 16 | """Returns the flow hierarchy of a directed network. 17 | 18 | Flow hierarchy is defined as the fraction of edges not participating 19 | in cycles in a directed graph [1]_. 20 | 21 | Parameters 22 | ---------- 23 | G : DiGraph or MultiDiGraph 24 | A directed graph 25 | 26 | weight : key,optional (default=None) 27 | Attribute to use for node weights. If None the weight defaults to 1. 28 | 29 | Returns 30 | ------- 31 | h : float 32 | Flow heirarchy value 33 | 34 | Notes 35 | ----- 36 | The algorithm described in [1]_ computes the flow hierarchy through 37 | exponentiation of the adjacency matrix. This function implements an 38 | alternative approach that finds strongly connected components. 39 | An edge is in a cycle if and only if it is in a strongly connected 40 | component, which can be found in `O(m)` time using Tarjan's algorithm. 41 | 42 | References 43 | ---------- 44 | .. [1] Luo, J.; Magee, C.L. (2011), 45 | Detecting evolving patterns of self-organizing networks by flow 46 | hierarchy measurement, Complexity, Volume 16 Issue 6 53-61. 47 | DOI: 10.1002/cplx.20368 48 | http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf 49 | """ 50 | if not G.is_directed(): 51 | raise nx.NetworkXError("G must be a digraph in flow_heirarchy") 52 | scc = nx.strongly_connected_components(G) 53 | return 1.-sum(G.subgraph(c).size(weight) for c in scc)/float(G.size(weight)) 54 | -------------------------------------------------------------------------------- /networkx/algorithms/isolate.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | """ 3 | Functions for identifying isolate (degree zero) nodes. 4 | """ 5 | # Copyright (C) 2004-2011 by 6 | # Aric Hagberg 7 | # Dan Schult 8 | # Pieter Swart 9 | # All rights reserved. 10 | # BSD license. 11 | import networkx as nx 12 | __author__ = """\n""".join(['Drew Conway ', 13 | 'Aric Hagberg ']) 14 | __all__=['is_isolate','isolates'] 15 | 16 | def is_isolate(G,n): 17 | """Determine of node n is an isolate (degree zero). 18 | 19 | Parameters 20 | ---------- 21 | G : graph 22 | A networkx graph 23 | n : node 24 | A node in G 25 | 26 | Returns 27 | ------- 28 | isolate : bool 29 | True if n has no neighbors, False otherwise. 30 | 31 | Examples 32 | -------- 33 | >>> G=nx.Graph() 34 | >>> G.add_edge(1,2) 35 | >>> G.add_node(3) 36 | >>> nx.is_isolate(G,2) 37 | False 38 | >>> nx.is_isolate(G,3) 39 | True 40 | """ 41 | return G.degree(n)==0 42 | 43 | def isolates(G): 44 | """Return list of isolates in the graph. 45 | 46 | Isolates are nodes with no neighbors (degree zero). 47 | 48 | Parameters 49 | ---------- 50 | G : graph 51 | A networkx graph 52 | 53 | Returns 54 | ------- 55 | isolates : list 56 | List of isolate nodes. 57 | 58 | Examples 59 | -------- 60 | >>> G = nx.Graph() 61 | >>> G.add_edge(1,2) 62 | >>> G.add_node(3) 63 | >>> nx.isolates(G) 64 | [3] 65 | 66 | To remove all isolates in the graph use 67 | >>> G.remove_nodes_from(nx.isolates(G)) 68 | >>> G.nodes() 69 | [1, 2] 70 | 71 | For digraphs isolates have zero in-degree and zero out_degre 72 | >>> G = nx.DiGraph([(0,1),(1,2)]) 73 | >>> G.add_node(3) 74 | >>> nx.isolates(G) 75 | [3] 76 | """ 77 | return [n for (n,d) in G.degree_iter() if d==0] 78 | -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.isomorphism.isomorph import * 2 | from networkx.algorithms.isomorphism.vf2userfunc import * 3 | from networkx.algorithms.isomorphism.matchhelpers import * 4 | 5 | -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/tests/iso_r01_s80.A99: -------------------------------------------------------------------------------- 1 | P%6:CG ,-.9:G%014 !$+02:BJ !*567<AL 2 | 26 13DGIK,29M+/FJ -1 3 | (/6;ACDH   %1<?@AJ #%05 4 | ,/HMO .KL 5 | $-04=';@ $36;DO/268;!%):=H #)15G ",15BFK #+?@ #&)5@AGJM*=L *49F&(+/?*/58>DL02<>M 6 | %:C #&157A 7 | &(2<AD#*.D  8 | ')*+AFHKN,=AH*-/O-6"$&+1F>?FK&*1H)+2=IM<M 9 | !3> "%/  !$/47&?FI '-367HJNO/23HJK 10 | "$&/0J %(2<(23@ ,2ABJ &/CEJN +8&)3469=@BCHO!0@G >?C )-08<  (/6  ->CL#1<  )KLN &'-15B $%E  "#$+/ADFMO".:EHNO 0;BFM  "')46FHK!25;(?LO ')?O !)07C 11 | $,BGN 8J 12 | 13 |  (+236B 14 | &'(357K !BIAEF -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/tests/iso_r01_s80.B99: -------------------------------------------------------------------------------- 1 | P78<BG 2:FJ 2 | !$&,1:>FN25<?/;= ,K !"#%2@9=K&'>EKO3:<?AGIN &025<>K 3 | (4: ;@FJL!&.:@K!(*6CD $'-/4BK ",>@C !%9?AHN 4 | %7H *2CIL.18:M 5 | -<>BGI$+.6D"2K&, ?CFH "*.@HJN %4LN'IJO"1?@H 6 | K );D$348E;J#%)F  &-34@AM 7 |  8 | !#1<?BK !.@CFI  ')*56E ;O &;O $&127J$GI  $45:AKO !%./18?EN  %&8J05;>BIL *;EI4H  #05>GM +?@/2@  ./5>ACG 9 | 4B #+3EGH  *B ;?HN#27=FN23=>B  10 | !/38AHK-@  (-58:>?CGO -/02?JO7>$BMN $'(C"/24?I 9L(F &.0<J (+B  11 | ,<BK/29BCG 12 | !#&-2<>CMOM%DEHN;DH  13 | &.4=J !*/;B "')9C -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/tests/si2_b06_m200.A99: -------------------------------------------------------------------------------- 1 | ( 2 |   3 |    4 |  5 |   6 |   !!!" $%$%#!#'&$& !" -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/tests/si2_b06_m200.B99: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 -------------------------------------------------------------------------------- /networkx/algorithms/isomorphism/tests/test_isomorphism.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | from networkx.algorithms import isomorphism as iso 5 | 6 | class TestIsomorph: 7 | 8 | def setUp(self): 9 | self.G1=nx.Graph() 10 | self.G2=nx.Graph() 11 | self.G3=nx.Graph() 12 | self.G4=nx.Graph() 13 | self.G1.add_edges_from([ [1,2],[1,3],[1,5],[2,3] ]) 14 | self.G2.add_edges_from([ [10,20],[20,30],[10,30],[10,50] ]) 15 | self.G3.add_edges_from([ [1,2],[1,3],[1,5],[2,5] ]) 16 | self.G4.add_edges_from([ [1,2],[1,3],[1,5],[2,4] ]) 17 | 18 | def test_could_be_isomorphic(self): 19 | assert_true(iso.could_be_isomorphic(self.G1,self.G2)) 20 | assert_true(iso.could_be_isomorphic(self.G1,self.G3)) 21 | assert_false(iso.could_be_isomorphic(self.G1,self.G4)) 22 | assert_true(iso.could_be_isomorphic(self.G3,self.G2)) 23 | 24 | def test_fast_could_be_isomorphic(self): 25 | assert_true(iso.fast_could_be_isomorphic(self.G3,self.G2)) 26 | 27 | def test_faster_could_be_isomorphic(self): 28 | assert_true(iso.faster_could_be_isomorphic(self.G3,self.G2)) 29 | 30 | def test_is_isomorphic(self): 31 | assert_true(iso.is_isomorphic(self.G1,self.G2)) 32 | assert_false(iso.is_isomorphic(self.G1,self.G4)) 33 | -------------------------------------------------------------------------------- /networkx/algorithms/link_analysis/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.link_analysis.pagerank_alg import * 2 | from networkx.algorithms.link_analysis.hits_alg import * 3 | -------------------------------------------------------------------------------- /networkx/algorithms/link_analysis/tests/test_hits.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | from nose import SkipTest 4 | from nose.plugins.attrib import attr 5 | import networkx 6 | 7 | # Example from 8 | # A. Langville and C. Meyer, "A survey of eigenvector methods of web 9 | # information retrieval." http://citeseer.ist.psu.edu/713792.html 10 | 11 | 12 | class TestHITS: 13 | 14 | def setUp(self): 15 | 16 | G=networkx.DiGraph() 17 | 18 | edges=[(1,3),(1,5),\ 19 | (2,1),\ 20 | (3,5),\ 21 | (5,4),(5,3),\ 22 | (6,5)] 23 | 24 | G.add_edges_from(edges,weight=1) 25 | self.G=G 26 | self.G.a=dict(zip(G,[0.000000, 0.000000, 0.366025, 27 | 0.133975, 0.500000, 0.000000])) 28 | self.G.h=dict(zip(G,[ 0.366025, 0.000000, 0.211325, 29 | 0.000000, 0.211325, 0.211325])) 30 | 31 | 32 | def test_hits(self): 33 | G=self.G 34 | h,a=networkx.hits(G,tol=1.e-08) 35 | for n in G: 36 | assert_almost_equal(h[n],G.h[n],places=4) 37 | for n in G: 38 | assert_almost_equal(a[n],G.a[n],places=4) 39 | 40 | def test_hits_nstart(self): 41 | G = self.G 42 | nstart = dict([(i, 1./2) for i in G]) 43 | h, a = networkx.hits(G, nstart = nstart) 44 | 45 | @attr('numpy') 46 | def test_hits_numpy(self): 47 | try: 48 | import numpy as np 49 | except ImportError: 50 | raise SkipTest('NumPy not available.') 51 | 52 | 53 | G=self.G 54 | h,a=networkx.hits_numpy(G) 55 | for n in G: 56 | assert_almost_equal(h[n],G.h[n],places=4) 57 | for n in G: 58 | assert_almost_equal(a[n],G.a[n],places=4) 59 | 60 | 61 | def test_hits_scipy(self): 62 | try: 63 | import scipy as sp 64 | except ImportError: 65 | raise SkipTest('SciPy not available.') 66 | 67 | G=self.G 68 | h,a=networkx.hits_scipy(G,tol=1.e-08) 69 | for n in G: 70 | assert_almost_equal(h[n],G.h[n],places=4) 71 | for n in G: 72 | assert_almost_equal(a[n],G.a[n],places=4) 73 | 74 | 75 | @attr('numpy') 76 | def test_empty(self): 77 | try: 78 | import numpy 79 | except ImportError: 80 | raise SkipTest('numpy not available.') 81 | G=networkx.Graph() 82 | assert_equal(networkx.hits(G),({},{})) 83 | assert_equal(networkx.hits_numpy(G),({},{})) 84 | assert_equal(networkx.hits_scipy(G),({},{})) 85 | assert_equal(networkx.authority_matrix(G).shape,(0,0)) 86 | assert_equal(networkx.hub_matrix(G).shape,(0,0)) 87 | -------------------------------------------------------------------------------- /networkx/algorithms/mis.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # $Id: maximalIndependentSet.py 576 2011-03-01 05:50:34Z lleeoo $ 3 | """ 4 | Algorithm to find a maximal (not maximum) independent set. 5 | 6 | """ 7 | # Leo Lopes 8 | # Aric Hagberg 9 | # Dan Schult 10 | # Pieter Swart 11 | # All rights reserved. 12 | # BSD license. 13 | 14 | __author__ = "\n".join(["Leo Lopes ", 15 | "Loïc Séguin-C. "]) 16 | 17 | __all__ = ['maximal_independent_set'] 18 | 19 | import random 20 | import networkx as nx 21 | 22 | def maximal_independent_set(G, nodes=None): 23 | """Return a random maximal independent set guaranteed to contain 24 | a given set of nodes. 25 | 26 | An independent set is a set of nodes such that the subgraph 27 | of G induced by these nodes contains no edges. A maximal 28 | independent set is an independent set such that it is not possible 29 | to add a new node and still get an independent set. 30 | 31 | Parameters 32 | ---------- 33 | G : NetworkX graph 34 | 35 | nodes : list or iterable 36 | Nodes that must be part of the independent set. This set of nodes 37 | must be independent. 38 | 39 | Returns 40 | ------- 41 | indep_nodes : list 42 | List of nodes that are part of a maximal independent set. 43 | 44 | Raises 45 | ------ 46 | NetworkXUnfeasible 47 | If the nodes in the provided list are not part of the graph or 48 | do not form an independent set, an exception is raised. 49 | 50 | Examples 51 | -------- 52 | >>> G = nx.path_graph(5) 53 | >>> nx.maximal_independent_set(G) # doctest: +SKIP 54 | [4, 0, 2] 55 | >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP 56 | [1, 3] 57 | 58 | Notes 59 | ------ 60 | This algorithm does not solve the maximum independent set problem. 61 | 62 | """ 63 | if not nodes: 64 | nodes = set([random.choice(G.nodes())]) 65 | else: 66 | nodes = set(nodes) 67 | if not nodes.issubset(G): 68 | raise nx.NetworkXUnfeasible( 69 | "%s is not a subset of the nodes of G" % nodes) 70 | neighbors = set.union(*[set(G.neighbors(v)) for v in nodes]) 71 | if set.intersection(neighbors, nodes): 72 | raise nx.NetworkXUnfeasible( 73 | "%s is not an independent set of G" % nodes) 74 | indep_nodes = list(nodes) 75 | available_nodes = set(G.nodes()).difference(neighbors.union(nodes)) 76 | while available_nodes: 77 | node = random.choice(list(available_nodes)) 78 | indep_nodes.append(node) 79 | available_nodes.difference_update(G.neighbors(node) + [node]) 80 | return indep_nodes 81 | 82 | -------------------------------------------------------------------------------- /networkx/algorithms/operators/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.operators.all import * 2 | from networkx.algorithms.operators.binary import * 3 | from networkx.algorithms.operators.product import * 4 | from networkx.algorithms.operators.unary import * 5 | -------------------------------------------------------------------------------- /networkx/algorithms/operators/tests/test_unary.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | from networkx import * 4 | 5 | 6 | def test_complement(): 7 | null=null_graph() 8 | empty1=empty_graph(1) 9 | empty10=empty_graph(10) 10 | K3=complete_graph(3) 11 | K5=complete_graph(5) 12 | K10=complete_graph(10) 13 | P2=path_graph(2) 14 | P3=path_graph(3) 15 | P5=path_graph(5) 16 | P10=path_graph(10) 17 | #complement of the complete graph is empty 18 | 19 | G=complement(K3) 20 | assert_true(is_isomorphic(G,empty_graph(3))) 21 | G=complement(K5) 22 | assert_true(is_isomorphic(G,empty_graph(5))) 23 | # for any G, G=complement(complement(G)) 24 | P3cc=complement(complement(P3)) 25 | assert_true(is_isomorphic(P3,P3cc)) 26 | nullcc=complement(complement(null)) 27 | assert_true(is_isomorphic(null,nullcc)) 28 | b=bull_graph() 29 | bcc=complement(complement(b)) 30 | assert_true(is_isomorphic(b,bcc)) 31 | 32 | def test_complement_2(): 33 | G1=nx.DiGraph() 34 | G1.add_edge('A','B') 35 | G1.add_edge('A','C') 36 | G1.add_edge('A','D') 37 | G1C=complement(G1) 38 | assert_equal(sorted(G1C.edges()), 39 | [('B', 'A'), ('B', 'C'), 40 | ('B', 'D'), ('C', 'A'), ('C', 'B'), 41 | ('C', 'D'), ('D', 'A'), ('D', 'B'), ('D', 'C')]) 42 | -------------------------------------------------------------------------------- /networkx/algorithms/operators/unary.py: -------------------------------------------------------------------------------- 1 | """Unary operations on graphs""" 2 | # Copyright (C) 2004-2012 by 3 | # Aric Hagberg 4 | # Dan Schult 5 | # Pieter Swart 6 | # All rights reserved. 7 | # BSD license. 8 | import networkx as nx 9 | from networkx.utils import is_string_like 10 | __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 11 | 'Pieter Swart (swart@lanl.gov)', 12 | 'Dan Schult(dschult@colgate.edu)']) 13 | __all__ = ['complement'] 14 | 15 | def complement(G, name=None): 16 | """Return the graph complement of G. 17 | 18 | Parameters 19 | ---------- 20 | G : graph 21 | A NetworkX graph 22 | 23 | name : string 24 | Specify name for new graph 25 | 26 | Returns 27 | ------- 28 | GC : A new graph. 29 | 30 | Notes 31 | ------ 32 | Note that complement() does not create self-loops and also 33 | does not produce parallel edges for MultiGraphs. 34 | 35 | Graph, node, and edge data are not propagated to the new graph. 36 | """ 37 | if name is None: 38 | name="complement(%s)"%(G.name) 39 | R=G.__class__() 40 | R.name=name 41 | R.add_nodes_from(G) 42 | R.add_edges_from( ((n,n2) 43 | for n,nbrs in G.adjacency_iter() 44 | for n2 in G if n2 not in nbrs 45 | if n != n2) ) 46 | return R 47 | -------------------------------------------------------------------------------- /networkx/algorithms/shortest_paths/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.algorithms.shortest_paths.generic import * 2 | from networkx.algorithms.shortest_paths.unweighted import * 3 | from networkx.algorithms.shortest_paths.weighted import * 4 | from networkx.algorithms.shortest_paths.astar import * 5 | from networkx.algorithms.shortest_paths.dense import * 6 | 7 | -------------------------------------------------------------------------------- /networkx/algorithms/shortest_paths/tests/test_dense_numpy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | from nose import SkipTest 4 | import networkx as nx 5 | 6 | class TestFloydNumpy(object): 7 | numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test 8 | @classmethod 9 | def setupClass(cls): 10 | global numpy 11 | global assert_equal 12 | global assert_almost_equal 13 | try: 14 | import numpy 15 | from numpy.testing import assert_equal,assert_almost_equal 16 | except ImportError: 17 | raise SkipTest('NumPy not available.') 18 | 19 | def test_cycle_numpy(self): 20 | dist = nx.floyd_warshall_numpy(nx.cycle_graph(7)) 21 | assert_equal(dist[0,3],3) 22 | assert_equal(dist[0,4],3) 23 | 24 | def test_weighted_numpy(self): 25 | XG3=nx.Graph() 26 | XG3.add_weighted_edges_from([ [0,1,2],[1,2,12],[2,3,1], 27 | [3,4,5],[4,5,1],[5,0,10] ]) 28 | dist = nx.floyd_warshall_numpy(XG3) 29 | assert_equal(dist[0,3],15) 30 | 31 | def test_weighted_numpy(self): 32 | XG4=nx.Graph() 33 | XG4.add_weighted_edges_from([ [0,1,2],[1,2,2],[2,3,1], 34 | [3,4,1],[4,5,1],[5,6,1], 35 | [6,7,1],[7,0,1] ]) 36 | dist = nx.floyd_warshall_numpy(XG4) 37 | assert_equal(dist[0,2],4) 38 | 39 | def test_weight_parameter_numpy(self): 40 | XG4 = nx.Graph() 41 | XG4.add_edges_from([ (0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), 42 | (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), 43 | (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), 44 | (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1}) ]) 45 | dist = nx.floyd_warshall_numpy(XG4, weight='heavy') 46 | assert_equal(dist[0, 2], 4) 47 | 48 | def test_directed_cycle_numpy(self): 49 | G = nx.DiGraph() 50 | G.add_cycle([0,1,2,3]) 51 | pred,dist = nx.floyd_warshall_predecessor_and_distance(G) 52 | D = nx.utils.dict_to_numpy_array(dist) 53 | assert_equal(nx.floyd_warshall_numpy(G),D) 54 | -------------------------------------------------------------------------------- /networkx/algorithms/smetric.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | #from networkx.generators.smax import li_smax_graph 3 | 4 | def s_metric(G, normalized=True): 5 | """Return the s-metric of graph. 6 | 7 | The s-metric is defined as the sum of the products deg(u)*deg(v) 8 | for every edge (u,v) in G. If norm is provided construct the 9 | s-max graph and compute it's s_metric, and return the normalized 10 | s value 11 | 12 | Parameters 13 | ---------- 14 | G : graph 15 | The graph used to compute the s-metric. 16 | normalized : bool (optional) 17 | Normalize the value. 18 | 19 | Returns 20 | ------- 21 | s : float 22 | The s-metric of the graph. 23 | 24 | References 25 | ---------- 26 | .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger, 27 | Towards a Theory of Scale-Free Graphs: 28 | Definition, Properties, and Implications (Extended Version), 2005. 29 | http://arxiv.org/abs/cond-mat/0501169 30 | """ 31 | if normalized: 32 | raise nx.NetworkXError("Normalization not implemented") 33 | # Gmax = li_smax_graph(list(G.degree().values())) 34 | # return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) 35 | # else: 36 | return float(sum([G.degree(u)*G.degree(v) for (u,v) in G.edges_iter()])) 37 | 38 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_distance_measures.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx 4 | 5 | class TestDistance: 6 | 7 | def setUp(self): 8 | G=networkx.Graph() 9 | from networkx import convert_node_labels_to_integers as cnlti 10 | G=cnlti(networkx.grid_2d_graph(4,4),first_label=1,ordering="sorted") 11 | self.G=G 12 | 13 | def test_eccentricity(self): 14 | assert_equal(networkx.eccentricity(self.G,1),6) 15 | e=networkx.eccentricity(self.G) 16 | assert_equal(e[1],6) 17 | sp=networkx.shortest_path_length(self.G) 18 | e=networkx.eccentricity(self.G,sp=sp) 19 | assert_equal(e[1],6) 20 | e=networkx.eccentricity(self.G,v=1) 21 | assert_equal(e,6) 22 | e=networkx.eccentricity(self.G,v=[1,1]) 23 | assert_equal(e,6) 24 | 25 | 26 | 27 | def test_diameter(self): 28 | assert_equal(networkx.diameter(self.G),6) 29 | 30 | def test_radius(self): 31 | assert_equal(networkx.radius(self.G),4) 32 | 33 | def test_periphery(self): 34 | assert_equal(set(networkx.periphery(self.G)),set([1, 4, 13, 16])) 35 | 36 | def test_center(self): 37 | assert_equal(set(networkx.center(self.G)),set([6, 7, 10, 11])) 38 | 39 | def test_radius_exception(self): 40 | G=networkx.Graph() 41 | G.add_edge(1,2) 42 | G.add_edge(3,4) 43 | assert_raises(networkx.NetworkXError, networkx.diameter, G) 44 | 45 | @raises(networkx.NetworkXError) 46 | def test_eccentricity_infinite(self): 47 | G=networkx.Graph([(1,2),(3,4)]) 48 | e = networkx.eccentricity(G) 49 | 50 | @raises(networkx.NetworkXError) 51 | def test_eccentricity_invalid(self): 52 | G=networkx.Graph([(1,2),(3,4)]) 53 | e = networkx.eccentricity(G,sp=1) 54 | 55 | 56 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_distance_regular.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestDistanceRegular: 6 | 7 | def test_is_distance_regular(self): 8 | assert_true(nx.is_distance_regular(nx.icosahedral_graph())) 9 | assert_true(nx.is_distance_regular(nx.petersen_graph())) 10 | assert_true(nx.is_distance_regular(nx.cubical_graph())) 11 | assert_true(nx.is_distance_regular(nx.complete_bipartite_graph(3,3))) 12 | assert_true(nx.is_distance_regular(nx.tetrahedral_graph())) 13 | assert_true(nx.is_distance_regular(nx.dodecahedral_graph())) 14 | assert_true(nx.is_distance_regular(nx.pappus_graph())) 15 | assert_true(nx.is_distance_regular(nx.heawood_graph())) 16 | assert_true(nx.is_distance_regular(nx.cycle_graph(3))) 17 | # no distance regular 18 | assert_false(nx.is_distance_regular(nx.path_graph(4))) 19 | 20 | def test_not_connected(self): 21 | G=nx.cycle_graph(4) 22 | G.add_cycle([5,6,7]) 23 | assert_false(nx.is_distance_regular(G)) 24 | 25 | 26 | def test_global_parameters(self): 27 | b,c=nx.intersection_array(nx.cycle_graph(5)) 28 | g=nx.global_parameters(b,c) 29 | assert_equal(list(g),[(0, 0, 2), (1, 0, 1), (1, 1, 0)]) 30 | b,c=nx.intersection_array(nx.cycle_graph(3)) 31 | g=nx.global_parameters(b,c) 32 | assert_equal(list(g),[(0, 0, 2), (1, 1, 0)]) 33 | 34 | 35 | def test_intersection_array(self): 36 | b,c=nx.intersection_array(nx.cycle_graph(5)) 37 | assert_equal(b,[2, 1]) 38 | assert_equal(c,[1, 1]) 39 | b,c=nx.intersection_array(nx.dodecahedral_graph()) 40 | assert_equal(b,[3, 2, 1, 1, 1]) 41 | assert_equal(c,[1, 1, 1, 2, 3]) 42 | b,c=nx.intersection_array(nx.icosahedral_graph()) 43 | assert_equal(b,[5, 2, 1]) 44 | assert_equal(c,[1, 2, 5]) 45 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_graphical.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | def test_valid_degree_sequence1(): 6 | n = 100 7 | p = .3 8 | for i in range(10): 9 | G = nx.erdos_renyi_graph(n,p) 10 | deg = list(G.degree().values()) 11 | assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) 12 | assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) 13 | 14 | def test_valid_degree_sequence2(): 15 | n = 100 16 | for i in range(10): 17 | G = nx.barabasi_albert_graph(n,1) 18 | deg = list(G.degree().values()) 19 | assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) 20 | assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) 21 | 22 | @raises(nx.NetworkXException) 23 | def test_string_input(): 24 | a = nx.is_valid_degree_sequence([],'foo') 25 | 26 | def test_negative_input(): 27 | assert_false(nx.is_valid_degree_sequence([-1],'hh')) 28 | assert_false(nx.is_valid_degree_sequence([-1],'eg')) 29 | assert_false(nx.is_valid_degree_sequence([72.5],'eg')) 30 | 31 | 32 | def test_atlas(): 33 | for graph in nx.graph_atlas_g(): 34 | deg = list(graph.degree().values()) 35 | assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) 36 | assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) 37 | 38 | def test_small_graph_true(): 39 | z=[5,3,3,3,3,2,2,2,1,1,1] 40 | assert_true(nx.is_valid_degree_sequence(z, method='hh')) 41 | assert_true(nx.is_valid_degree_sequence(z, method='eg')) 42 | z=[10,3,3,3,3,2,2,2,2,2,2] 43 | assert_true(nx.is_valid_degree_sequence(z, method='hh')) 44 | assert_true(nx.is_valid_degree_sequence(z, method='eg')) 45 | z=[1, 1, 1, 1, 1, 2, 2, 2, 3, 4] 46 | assert_true(nx.is_valid_degree_sequence(z, method='hh')) 47 | assert_true(nx.is_valid_degree_sequence(z, method='eg')) 48 | 49 | 50 | 51 | def test_small_graph_false(): 52 | z=[1000,3,3,3,3,2,2,2,1,1,1] 53 | assert_false(nx.is_valid_degree_sequence(z, method='hh')) 54 | assert_false(nx.is_valid_degree_sequence(z, method='eg')) 55 | z=[6,5,4,4,2,1,1,1] 56 | assert_false(nx.is_valid_degree_sequence(z, method='hh')) 57 | assert_false(nx.is_valid_degree_sequence(z, method='eg')) 58 | z=[1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] 59 | assert_false(nx.is_valid_degree_sequence(z, method='hh')) 60 | assert_false(nx.is_valid_degree_sequence(z, method='eg')) 61 | 62 | 63 | 64 | def test_iterable(): 65 | G = nx.path_graph(4) 66 | seq = iter(G.degree().values()) 67 | assert_true(nx.is_valid_degree_sequence(seq, method='hh')) 68 | assert_true(nx.is_valid_degree_sequence(seq, method='eg')) 69 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_hierarchy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | def test_hierarchy_exception(): 6 | G = nx.cycle_graph(5) 7 | assert_raises(nx.NetworkXError,nx.flow_hierarchy,G) 8 | 9 | def test_hierarchy_cycle(): 10 | G = nx.cycle_graph(5,create_using=nx.DiGraph()) 11 | assert_equal(nx.flow_hierarchy(G),0.0) 12 | 13 | def test_hierarchy_tree(): 14 | G = nx.full_rary_tree(2,16,create_using=nx.DiGraph()) 15 | assert_equal(nx.flow_hierarchy(G),1.0) 16 | 17 | def test_hierarchy_1(): 18 | G = nx.DiGraph() 19 | G.add_edges_from([(0,1),(1,2),(2,3),(3,1),(3,4),(0,4)]) 20 | assert_equal(nx.flow_hierarchy(G),0.5) 21 | 22 | def test_hierarchy_weight(): 23 | G = nx.DiGraph() 24 | G.add_edges_from([(0,1,{'weight':.3}), 25 | (1,2,{'weight':.1}), 26 | (2,3,{'weight':.1}), 27 | (3,1,{'weight':.1}), 28 | (3,4,{'weight':.3}), 29 | (0,4,{'weight':.3})]) 30 | assert_equal(nx.flow_hierarchy(G,weight='weight'),.75) -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_richclub.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | from nose.tools import * 3 | 4 | 5 | def test_richclub(): 6 | G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) 7 | rc = nx.richclub.rich_club_coefficient(G,normalized=False) 8 | assert_equal(rc,{0: 12.0/30,1:8.0/12}) 9 | 10 | # test single value 11 | rc0 = nx.richclub.rich_club_coefficient(G,normalized=False)[0] 12 | assert_equal(rc0,12.0/30.0) 13 | 14 | def test_richclub_normalized(): 15 | G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) 16 | rcNorm = nx.richclub.rich_club_coefficient(G,Q=2) 17 | assert_equal(rcNorm,{0:1.0,1:1.0}) 18 | 19 | 20 | def test_richclub2(): 21 | T = nx.balanced_tree(2,10) 22 | rc = nx.richclub.rich_club_coefficient(T,normalized=False) 23 | assert_equal(rc,{0:4092/(2047*2046.0), 24 | 1:(2044.0/(1023*1022)), 25 | 2:(2040.0/(1022*1021))}) 26 | 27 | #def test_richclub2_normalized(): 28 | # T = nx.balanced_tree(2,10) 29 | # rcNorm = nx.richclub.rich_club_coefficient(T,Q=2) 30 | # assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9) 31 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_simple_paths.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | def test_all_simple_paths(): 6 | G = nx.path_graph(4) 7 | paths = nx.all_simple_paths(G,0,3) 8 | assert_equal(list(list(p) for p in paths),[[0,1,2,3]]) 9 | 10 | def test_all_simple_paths_cutoff(): 11 | G = nx.complete_graph(4) 12 | paths = nx.all_simple_paths(G,0,1,cutoff=1) 13 | assert_equal(list(list(p) for p in paths),[[0,1]]) 14 | paths = nx.all_simple_paths(G,0,1,cutoff=2) 15 | assert_equal(list(list(p) for p in paths),[[0,1],[0,2,1],[0,3,1]]) 16 | 17 | def test_all_simple_paths_multigraph(): 18 | G = nx.MultiGraph([(1,2),(1,2)]) 19 | paths = nx.all_simple_paths(G,1,2) 20 | assert_equal(list(list(p) for p in paths),[[1,2],[1,2]]) 21 | 22 | def test_all_simple_paths_multigraph_with_cutoff(): 23 | G = nx.MultiGraph([(1,2),(1,2),(1,10),(10,2)]) 24 | paths = nx.all_simple_paths(G,1,2, cutoff=1) 25 | assert_equal(list(list(p) for p in paths),[[1,2],[1,2]]) 26 | 27 | 28 | def test_all_simple_paths_directed(): 29 | G = nx.DiGraph() 30 | G.add_path([1,2,3]) 31 | G.add_path([3,2,1]) 32 | paths = nx.all_simple_paths(G,1,3) 33 | assert_equal(list(list(p) for p in paths),[[1,2,3]]) 34 | 35 | def test_all_simple_paths_empty(): 36 | G = nx.path_graph(4) 37 | paths = nx.all_simple_paths(G,0,3,cutoff=2) 38 | assert_equal(list(list(p) for p in paths),[]) 39 | 40 | def hamiltonian_path(G,source): 41 | source = next(G.nodes_iter()) 42 | neighbors = set(G[source])-set([source]) 43 | n = len(G) 44 | for target in neighbors: 45 | for path in nx.all_simple_paths(G,source,target): 46 | if len(path) == n: 47 | yield path 48 | 49 | def test_hamiltonian_path(): 50 | from itertools import permutations 51 | G=nx.complete_graph(4) 52 | paths = [list(p) for p in hamiltonian_path(G,0)] 53 | exact = [[0]+list(p) for p in permutations([1,2,3],3) ] 54 | assert_equal(sorted(paths),sorted(exact)) 55 | 56 | def test_cutoff_zero(): 57 | G = nx.complete_graph(4) 58 | paths = nx.all_simple_paths(G,0,3,cutoff=0) 59 | assert_equal(list(list(p) for p in paths),[]) 60 | paths = nx.all_simple_paths(nx.MultiGraph(G),0,3,cutoff=0) 61 | assert_equal(list(list(p) for p in paths),[]) 62 | 63 | @raises(nx.NetworkXError) 64 | def test_source_missing(): 65 | G = nx.Graph() 66 | G.add_path([1,2,3]) 67 | paths = list(nx.all_simple_paths(nx.MultiGraph(G),0,3)) 68 | 69 | @raises(nx.NetworkXError) 70 | def test_target_missing(): 71 | G = nx.Graph() 72 | G.add_path([1,2,3]) 73 | paths = list(nx.all_simple_paths(nx.MultiGraph(G),1,4)) 74 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_smetric.py: -------------------------------------------------------------------------------- 1 | 2 | from nose.tools import assert_equal,raises 3 | 4 | import networkx as nx 5 | 6 | def test_smetric(): 7 | g = nx.Graph() 8 | g.add_edge(1,2) 9 | g.add_edge(2,3) 10 | g.add_edge(2,4) 11 | g.add_edge(1,4) 12 | sm = nx.s_metric(g,normalized=False) 13 | assert_equal(sm, 19.0) 14 | # smNorm = nx.s_metric(g,normalized=True) 15 | # assert_equal(smNorm, 0.95) 16 | 17 | @raises(nx.NetworkXError) 18 | def test_normalized(): 19 | sm = nx.s_metric(nx.Graph(),normalized=True) 20 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_swap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | from networkx import * 4 | 5 | def test_double_edge_swap(): 6 | graph = barabasi_albert_graph(200,1) 7 | degrees = sorted(graph.degree().values()) 8 | G = double_edge_swap(graph, 40) 9 | assert_equal(degrees, sorted(graph.degree().values())) 10 | 11 | def test_connected_double_edge_swap(): 12 | graph = barabasi_albert_graph(200,1) 13 | degrees = sorted(graph.degree().values()) 14 | G = connected_double_edge_swap(graph, 40) 15 | assert_true(is_connected(graph)) 16 | assert_equal(degrees, sorted(graph.degree().values())) 17 | 18 | @raises(NetworkXError) 19 | def test_double_edge_swap_small(): 20 | G = nx.double_edge_swap(nx.path_graph(3)) 21 | 22 | @raises(NetworkXError) 23 | def test_double_edge_swap_tries(): 24 | G = nx.double_edge_swap(nx.path_graph(10),nswap=1,max_tries=0) 25 | 26 | @raises(NetworkXError) 27 | def test_connected_double_edge_swap_small(): 28 | G = nx.connected_double_edge_swap(nx.path_graph(3)) 29 | 30 | @raises(NetworkXError) 31 | def test_connected_double_edge_swap_not_connected(): 32 | G = nx.path_graph(3) 33 | G.add_path([10,11,12]) 34 | G = nx.connected_double_edge_swap(G) 35 | 36 | 37 | def test_degree_seq_c4(): 38 | G = cycle_graph(4) 39 | degrees = sorted(G.degree().values()) 40 | G = double_edge_swap(G,1,100) 41 | assert_equal(degrees, sorted(G.degree().values())) 42 | 43 | -------------------------------------------------------------------------------- /networkx/algorithms/tests/test_vitality.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestVitality: 6 | 7 | def test_closeness_vitality_unweighted(self): 8 | G=nx.cycle_graph(3) 9 | v=nx.closeness_vitality(G) 10 | assert_equal(v,{0:4.0, 1:4.0, 2:4.0}) 11 | assert_equal(v[0],4.0) 12 | 13 | def test_closeness_vitality_weighted(self): 14 | G=nx.Graph() 15 | G.add_cycle([0,1,2],weight=2) 16 | v=nx.closeness_vitality(G,weight='weight') 17 | assert_equal(v,{0:8.0, 1:8.0, 2:8.0}) 18 | 19 | def test_closeness_vitality_unweighted_digraph(self): 20 | G=nx.DiGraph() 21 | G.add_cycle([0,1,2]) 22 | v=nx.closeness_vitality(G) 23 | assert_equal(v,{0:8.0, 1:8.0, 2:8.0}) 24 | 25 | def test_closeness_vitality_weighted_digraph(self): 26 | G=nx.DiGraph() 27 | G.add_cycle([0,1,2],weight=2) 28 | v=nx.closeness_vitality(G,weight='weight') 29 | assert_equal(v,{0:16.0, 1:16.0, 2:16.0}) 30 | 31 | def test_closeness_vitality_weighted_multidigraph(self): 32 | G=nx.MultiDiGraph() 33 | G.add_cycle([0,1,2],weight=2) 34 | v=nx.closeness_vitality(G,weight='weight') 35 | assert_equal(v,{0:16.0, 1:16.0, 2:16.0}) 36 | -------------------------------------------------------------------------------- /networkx/algorithms/traversal/__init__.py: -------------------------------------------------------------------------------- 1 | import networkx.algorithms.traversal.depth_first_search 2 | from networkx.algorithms.traversal.depth_first_search import * 3 | import networkx.algorithms.traversal.breadth_first_search 4 | from networkx.algorithms.traversal.breadth_first_search import * 5 | -------------------------------------------------------------------------------- /networkx/algorithms/traversal/breadth_first_search.py: -------------------------------------------------------------------------------- 1 | """ 2 | ==================== 3 | Breadth-first search 4 | ==================== 5 | 6 | Basic algorithms for breadth-first searching. 7 | """ 8 | __author__ = """\n""".join(['Aric Hagberg ']) 9 | 10 | __all__ = ['bfs_edges', 'bfs_tree', 11 | 'bfs_predecessors', 'bfs_successors'] 12 | 13 | import networkx as nx 14 | from collections import defaultdict 15 | 16 | def bfs_edges(G,source): 17 | """Produce edges in a breadth-first-search starting at source.""" 18 | # Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py 19 | # by D. Eppstein, July 2004. 20 | visited=set([source]) 21 | stack = [(source,iter(G[source]))] 22 | while stack: 23 | parent,children = stack[0] 24 | try: 25 | child = next(children) 26 | if child not in visited: 27 | yield parent,child 28 | visited.add(child) 29 | stack.append((child,iter(G[child]))) 30 | except StopIteration: 31 | stack.pop(0) 32 | 33 | 34 | def bfs_tree(G, source): 35 | """Return directed tree of breadth-first-search from source.""" 36 | return nx.DiGraph(bfs_edges(G,source)) 37 | 38 | 39 | def bfs_predecessors(G, source): 40 | """Return dictionary of predecessors in breadth-first-search from source.""" 41 | return dict((t,s) for s,t in bfs_edges(G,source)) 42 | 43 | 44 | def bfs_successors(G, source): 45 | """Return dictionary of successors in breadth-first-search from source.""" 46 | d=defaultdict(list) 47 | for s,t in bfs_edges(G,source): 48 | d[s].append(t) 49 | return dict(d) 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /networkx/algorithms/traversal/tests/test_bfs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestBFS: 6 | 7 | def setUp(self): 8 | # simple graph 9 | G=nx.Graph() 10 | G.add_edges_from([(0,1),(1,2),(1,3),(2,4),(3,4)]) 11 | self.G=G 12 | 13 | def test_successor(self): 14 | assert_equal(nx.bfs_successors(self.G,source=0), 15 | {0: [1], 1: [2,3], 2:[4]}) 16 | 17 | def test_predecessor(self): 18 | assert_equal(nx.bfs_predecessors(self.G,source=0), 19 | {1: 0, 2: 1, 3: 1, 4: 2}) 20 | 21 | def test_bfs_tree(self): 22 | T=nx.bfs_tree(self.G,source=0) 23 | assert_equal(sorted(T.nodes()),sorted(self.G.nodes())) 24 | assert_equal(sorted(T.edges()),[(0, 1), (1, 2), (1, 3), (2, 4)]) 25 | 26 | def test_bfs_edges(self): 27 | edges=nx.bfs_edges(self.G,source=0) 28 | assert_equal(list(edges),[(0, 1), (1, 2), (1, 3), (2, 4)]) 29 | 30 | -------------------------------------------------------------------------------- /networkx/algorithms/traversal/tests/test_dfs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestDFS: 6 | 7 | def setUp(self): 8 | # simple graph 9 | G=nx.Graph() 10 | G.add_edges_from([(0,1),(1,2),(1,3),(2,4),(3,4)]) 11 | self.G=G 12 | # simple graph, disconnected 13 | D=nx.Graph() 14 | D.add_edges_from([(0,1),(2,3)]) 15 | self.D=D 16 | 17 | 18 | def test_preorder_nodes(self): 19 | assert_equal(list(nx.dfs_preorder_nodes(self.G,source=0)), 20 | [0, 1, 2, 4, 3]) 21 | assert_equal(list(nx.dfs_preorder_nodes(self.D)),[0, 1, 2, 3]) 22 | 23 | def test_postorder_nodes(self): 24 | assert_equal(list(nx.dfs_postorder_nodes(self.G,source=0)), 25 | [3, 4, 2, 1, 0]) 26 | assert_equal(list(nx.dfs_postorder_nodes(self.D)),[1, 0, 3, 2]) 27 | 28 | def test_successor(self): 29 | assert_equal(nx.dfs_successors(self.G,source=0), 30 | {0: [1], 1: [2], 2: [4], 4: [3]}) 31 | assert_equal(nx.dfs_successors(self.D), {0: [1], 2: [3]}) 32 | 33 | def test_predecessor(self): 34 | assert_equal(nx.dfs_predecessors(self.G,source=0), 35 | {1: 0, 2: 1, 3: 4, 4: 2}) 36 | assert_equal(nx.dfs_predecessors(self.D), {1: 0, 3: 2}) 37 | 38 | def test_dfs_tree(self): 39 | T=nx.dfs_tree(self.G,source=0) 40 | assert_equal(sorted(T.nodes()),sorted(self.G.nodes())) 41 | assert_equal(sorted(T.edges()),[(0, 1), (1, 2), (2, 4), (4, 3)]) 42 | 43 | def test_dfs_edges(self): 44 | edges=nx.dfs_edges(self.G,source=0) 45 | assert_equal(list(edges),[(0, 1), (1, 2), (2, 4), (4, 3)]) 46 | edges=nx.dfs_edges(self.D) 47 | assert_equal(list(edges),[(0, 1), (2, 3)]) 48 | 49 | def test_dfs_labeled_edges(self): 50 | edges=list(nx.dfs_labeled_edges(self.G,source=0)) 51 | forward=[(u,v) for (u,v,d) in edges if d['dir']=='forward'] 52 | assert_equal(forward,[(0,0), (0, 1), (1, 2), (2, 4), (4, 3)]) 53 | 54 | def test_dfs_labeled_disconnected_edges(self): 55 | edges=list(nx.dfs_labeled_edges(self.D)) 56 | forward=[(u,v) for (u,v,d) in edges if d['dir']=='forward'] 57 | assert_equal(forward,[(0, 0), (0, 1), (2, 2), (2, 3)]) 58 | 59 | -------------------------------------------------------------------------------- /networkx/algorithms/vitality.py: -------------------------------------------------------------------------------- 1 | """ 2 | Vitality measures. 3 | """ 4 | # Copyright (C) 2012 by 5 | # Aric Hagberg 6 | # Dan Schult 7 | # Pieter Swart 8 | # All rights reserved. 9 | # BSD license. 10 | import networkx as nx 11 | __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 12 | 'Renato Fabbri']) 13 | __all__ = ['closeness_vitality'] 14 | 15 | def weiner_index(G, weight=None): 16 | # compute sum of distances between all node pairs 17 | # (with optional weights) 18 | weiner=0.0 19 | if weight is None: 20 | for n in G: 21 | path_length=nx.single_source_shortest_path_length(G,n) 22 | weiner+=sum(path_length.values()) 23 | else: 24 | for n in G: 25 | path_length=nx.single_source_dijkstra_path_length(G, 26 | n,weight=weight) 27 | weiner+=sum(path_length.values()) 28 | return weiner 29 | 30 | 31 | def closeness_vitality(G, weight=None): 32 | """Compute closeness vitality for nodes. 33 | 34 | Closeness vitality of a node is the change in the sum of distances 35 | between all node pairs when excluding that node. 36 | 37 | Parameters 38 | ---------- 39 | G : graph 40 | 41 | weight : None or string (optional) 42 | The name of the edge attribute used as weight. If None the edge 43 | weights are ignored. 44 | 45 | Returns 46 | ------- 47 | nodes : dictionary 48 | Dictionary with nodes as keys and closeness vitality as the value. 49 | 50 | Examples 51 | -------- 52 | >>> G=nx.cycle_graph(3) 53 | >>> nx.closeness_vitality(G) 54 | {0: 4.0, 1: 4.0, 2: 4.0} 55 | 56 | See Also 57 | -------- 58 | closeness_centrality() 59 | 60 | References 61 | ---------- 62 | .. [1] Ulrik Brandes, Sec. 3.6.2 in 63 | Network Analysis: Methodological Foundations, Springer, 2005. 64 | http://books.google.com/books?id=TTNhSm7HYrIC 65 | """ 66 | multigraph = G.is_multigraph() 67 | wig = weiner_index(G,weight) 68 | closeness_vitality = {} 69 | for n in G: 70 | # remove edges connected to node n and keep list of edges with data 71 | # could remove node n but it doesn't count anyway 72 | if multigraph: 73 | edges = G.edges(n,data=True,keys=True) 74 | if G.is_directed(): 75 | edges += G.in_edges(n,data=True,keys=True) 76 | else: 77 | edges = G.edges(n,data=True) 78 | if G.is_directed(): 79 | edges += G.in_edges(n,data=True) 80 | G.remove_edges_from(edges) 81 | closeness_vitality[n] = wig - weiner_index(G,weight) 82 | # add edges and data back to graph 83 | G.add_edges_from(edges) 84 | return closeness_vitality 85 | -------------------------------------------------------------------------------- /networkx/classes/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.classes.graph import Graph 2 | from networkx.classes.digraph import DiGraph 3 | from networkx.classes.multigraph import MultiGraph 4 | from networkx.classes.multidigraph import MultiDiGraph 5 | from networkx.classes.function import * 6 | -------------------------------------------------------------------------------- /networkx/classes/tests/test_graph_historical.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Original NetworkX graph tests""" 3 | from nose.tools import * 4 | import networkx 5 | import networkx as nx 6 | 7 | from historical_tests import HistoricalTests 8 | 9 | class TestGraphHistorical(HistoricalTests): 10 | 11 | def setUp(self): 12 | HistoricalTests.setUp(self) 13 | self.G=nx.Graph 14 | 15 | -------------------------------------------------------------------------------- /networkx/drawing/__init__.py: -------------------------------------------------------------------------------- 1 | # graph drawing and interface to graphviz 2 | import sys 3 | from networkx.drawing.layout import * 4 | from networkx.drawing.nx_pylab import * 5 | 6 | # graphviz interface 7 | # prefer pygraphviz/agraph (it's faster) 8 | from networkx.drawing.nx_agraph import * 9 | try: 10 | import pydot 11 | import networkx.drawing.nx_pydot 12 | from networkx.drawing.nx_pydot import * 13 | except ImportError: 14 | pass 15 | try: 16 | import pygraphviz 17 | from networkx.drawing.nx_agraph import * 18 | except ImportError: 19 | pass 20 | 21 | -------------------------------------------------------------------------------- /networkx/drawing/tests/test_agraph.py: -------------------------------------------------------------------------------- 1 | """Unit tests for PyGraphviz intefaace. 2 | """ 3 | import os 4 | import tempfile 5 | 6 | from nose import SkipTest 7 | from nose.tools import assert_true,assert_equal 8 | 9 | import networkx as nx 10 | 11 | class TestAGraph(object): 12 | @classmethod 13 | def setupClass(cls): 14 | global pygraphviz 15 | try: 16 | import pygraphviz 17 | except ImportError: 18 | raise SkipTest('PyGraphviz not available.') 19 | 20 | def build_graph(self, G): 21 | G.add_edge('A','B') 22 | G.add_edge('A','C') 23 | G.add_edge('A','C') 24 | G.add_edge('B','C') 25 | G.add_edge('A','D') 26 | G.add_node('E') 27 | return G 28 | 29 | def assert_equal(self, G1, G2): 30 | assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) 31 | assert_true( sorted(G1.edges())==sorted(G2.edges()) ) 32 | 33 | 34 | def agraph_checks(self, G): 35 | G = self.build_graph(G) 36 | A=nx.to_agraph(G) 37 | H=nx.from_agraph(A) 38 | self.assert_equal(G, H) 39 | 40 | fname=tempfile.mktemp() 41 | nx.drawing.nx_agraph.write_dot(H,fname) 42 | Hin=nx.drawing.nx_agraph.read_dot(fname) 43 | os.unlink(fname) 44 | self.assert_equal(H,Hin) 45 | 46 | 47 | (fd,fname)=tempfile.mkstemp() 48 | fh=open(fname,'w') 49 | nx.drawing.nx_agraph.write_dot(H,fh) 50 | fh.close() 51 | 52 | fh=open(fname,'r') 53 | Hin=nx.drawing.nx_agraph.read_dot(fh) 54 | fh.close() 55 | os.unlink(fname) 56 | self.assert_equal(H,Hin) 57 | 58 | def test_from_agraph_name(self): 59 | G=nx.Graph(name='test') 60 | A=nx.to_agraph(G) 61 | H=nx.from_agraph(A) 62 | assert_equal(G.name,'test') 63 | 64 | 65 | def testUndirected(self): 66 | self.agraph_checks(nx.Graph()) 67 | 68 | def testDirected(self): 69 | self.agraph_checks(nx.DiGraph()) 70 | 71 | def testMultiUndirected(self): 72 | self.agraph_checks(nx.MultiGraph()) 73 | 74 | def testMultiDirected(self): 75 | self.agraph_checks(nx.MultiDiGraph()) 76 | -------------------------------------------------------------------------------- /networkx/drawing/tests/test_layout.py: -------------------------------------------------------------------------------- 1 | """Unit tests for layout functions.""" 2 | import sys 3 | from nose import SkipTest 4 | from nose.tools import assert_equal 5 | import networkx as nx 6 | 7 | class TestLayout(object): 8 | numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test 9 | @classmethod 10 | def setupClass(cls): 11 | global numpy 12 | try: 13 | import numpy 14 | except ImportError: 15 | raise SkipTest('numpy not available.') 16 | 17 | 18 | def setUp(self): 19 | self.Gi=nx.grid_2d_graph(5,5) 20 | self.Gs=nx.Graph() 21 | self.Gs.add_path('abcdef') 22 | self.bigG=nx.grid_2d_graph(25,25) #bigger than 500 nodes for sparse 23 | 24 | def test_smoke_int(self): 25 | G=self.Gi 26 | vpos=nx.random_layout(G) 27 | vpos=nx.circular_layout(G) 28 | vpos=nx.spring_layout(G) 29 | vpos=nx.fruchterman_reingold_layout(G) 30 | vpos=nx.spectral_layout(G) 31 | vpos=nx.spectral_layout(self.bigG) 32 | vpos=nx.shell_layout(G) 33 | 34 | def test_smoke_string(self): 35 | G=self.Gs 36 | vpos=nx.random_layout(G) 37 | vpos=nx.circular_layout(G) 38 | vpos=nx.spring_layout(G) 39 | vpos=nx.fruchterman_reingold_layout(G) 40 | vpos=nx.spectral_layout(G) 41 | vpos=nx.shell_layout(G) 42 | 43 | 44 | def test_adjacency_interface_numpy(self): 45 | A=nx.to_numpy_matrix(self.Gs) 46 | pos=nx.drawing.layout._fruchterman_reingold(A) 47 | pos=nx.drawing.layout._fruchterman_reingold(A,dim=3) 48 | assert_equal(pos.shape,(6,3)) 49 | 50 | def test_adjacency_interface_scipy(self): 51 | try: 52 | import scipy 53 | except ImportError: 54 | raise SkipTest('scipy not available.') 55 | 56 | A=nx.to_scipy_sparse_matrix(self.Gs,dtype='f') 57 | pos=nx.drawing.layout._sparse_fruchterman_reingold(A) 58 | pos=nx.drawing.layout._sparse_spectral(A) 59 | 60 | pos=nx.drawing.layout._sparse_fruchterman_reingold(A,dim=3) 61 | assert_equal(pos.shape,(6,3)) 62 | -------------------------------------------------------------------------------- /networkx/drawing/tests/test_pydot.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for pydot drawing functions. 3 | """ 4 | 5 | import os 6 | import tempfile 7 | 8 | from nose import SkipTest 9 | from nose.tools import assert_true 10 | 11 | import networkx as nx 12 | 13 | class TestPydot(object): 14 | @classmethod 15 | def setupClass(cls): 16 | global pydot 17 | try: 18 | import pydot 19 | except ImportError: 20 | raise SkipTest('pydot not available.') 21 | 22 | def build_graph(self, G): 23 | G.add_edge('A','B') 24 | G.add_edge('A','C') 25 | G.add_edge('B','C') 26 | G.add_edge('A','D') 27 | G.add_node('E') 28 | return G, nx.to_pydot(G) 29 | 30 | def assert_equal(self, G1, G2): 31 | assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) 32 | assert_true( sorted(G1.edges())==sorted(G2.edges()) ) 33 | 34 | def pydot_checks(self, G): 35 | H, P = self.build_graph(G) 36 | G2 = H.__class__(nx.from_pydot(P)) 37 | self.assert_equal(H, G2) 38 | 39 | fname = tempfile.mktemp() 40 | assert_true( P.write_raw(fname) ) 41 | 42 | Pin = pydot.graph_from_dot_file(fname) 43 | 44 | n1 = sorted([p.get_name() for p in P.get_node_list()]) 45 | n2 = sorted([p.get_name() for p in Pin.get_node_list()]) 46 | assert_true( n1 == n2 ) 47 | 48 | e1=[(e.get_source(),e.get_destination()) for e in P.get_edge_list()] 49 | e2=[(e.get_source(),e.get_destination()) for e in Pin.get_edge_list()] 50 | assert_true( sorted(e1)==sorted(e2) ) 51 | 52 | Hin = nx.drawing.nx_pydot.read_dot(fname) 53 | Hin = H.__class__(Hin) 54 | self.assert_equal(H, Hin) 55 | # os.unlink(fname) 56 | 57 | 58 | def testUndirected(self): 59 | self.pydot_checks(nx.Graph()) 60 | 61 | def testDirected(self): 62 | self.pydot_checks(nx.DiGraph()) 63 | 64 | 65 | -------------------------------------------------------------------------------- /networkx/drawing/tests/test_pylab.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for matplotlib drawing functions. 3 | """ 4 | 5 | import os 6 | 7 | from nose import SkipTest 8 | 9 | import networkx as nx 10 | 11 | class TestPylab(object): 12 | @classmethod 13 | def setupClass(cls): 14 | global pylab 15 | try: 16 | import matplotlib as mpl 17 | mpl.use('PS',warn=False) 18 | import pylab 19 | except ImportError: 20 | raise SkipTest('matplotlib not available.') 21 | except RuntimeError: 22 | raise SkipTest('matplotlib not available.') 23 | 24 | def setUp(self): 25 | self.G=nx.barbell_graph(5,10) 26 | 27 | 28 | def test_draw(self): 29 | # hold(False) 30 | N=self.G 31 | nx.draw_spring(N) 32 | pylab.savefig("test.ps") 33 | nx.draw_random(N) 34 | pylab.savefig("test.ps") 35 | nx.draw_circular(N) 36 | pylab.savefig("test.ps") 37 | nx.draw_spectral(N) 38 | pylab.savefig("test.ps") 39 | nx.draw_spring(N.to_directed()) 40 | pylab.savefig("test.ps") 41 | os.unlink('test.ps') 42 | -------------------------------------------------------------------------------- /networkx/exception.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | ********** 4 | Exceptions 5 | ********** 6 | 7 | Base exceptions and errors for NetworkX. 8 | 9 | """ 10 | __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)\nLoïc Séguin-C. """ 11 | # Copyright (C) 2004-2011 by 12 | # Aric Hagberg 13 | # Dan Schult 14 | # Pieter Swart 15 | # All rights reserved. 16 | # BSD license. 17 | # 18 | 19 | # Exception handling 20 | 21 | # the root of all Exceptions 22 | class NetworkXException(Exception): 23 | """Base class for exceptions in NetworkX.""" 24 | 25 | class NetworkXError(NetworkXException): 26 | """Exception for a serious error in NetworkX""" 27 | 28 | class NetworkXPointlessConcept(NetworkXException): 29 | """Harary, F. and Read, R. "Is the Null Graph a Pointless Concept?" 30 | In Graphs and Combinatorics Conference, George Washington University. 31 | New York: Springer-Verlag, 1973. 32 | """ 33 | 34 | class NetworkXAlgorithmError(NetworkXException): 35 | """Exception for unexpected termination of algorithms.""" 36 | 37 | class NetworkXUnfeasible(NetworkXAlgorithmError): 38 | """Exception raised by algorithms trying to solve a problem 39 | instance that has no feasible solution.""" 40 | 41 | class NetworkXNoPath(NetworkXUnfeasible): 42 | """Exception for algorithms that should return a path when running 43 | on graphs where such a path does not exist.""" 44 | 45 | class NetworkXUnbounded(NetworkXAlgorithmError): 46 | """Exception raised by algorithms trying to solve a maximization 47 | or a minimization problem instance that is unbounded.""" 48 | 49 | class NetworkXNotImplemented(NetworkXException): 50 | """Exception raised by algorithms not implemented for a type of graph.""" 51 | -------------------------------------------------------------------------------- /networkx/external/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/networkx/external/__init__.py -------------------------------------------------------------------------------- /networkx/external/decorator/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hack for including decorator-3.3.1 in NetworkX. 3 | """ 4 | 5 | import sys 6 | 7 | if sys.version >= '3': 8 | from ._decorator3 import * 9 | _decorator = _decorator3 10 | else: 11 | from ._decorator import * 12 | -------------------------------------------------------------------------------- /networkx/generators/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | A package for generating various graphs in networkx. 3 | 4 | """ 5 | from networkx.generators.atlas import * 6 | from networkx.generators.bipartite import * 7 | from networkx.generators.classic import * 8 | from networkx.generators.degree_seq import * 9 | from networkx.generators.directed import * 10 | from networkx.generators.ego import * 11 | from networkx.generators.geometric import * 12 | from networkx.generators.hybrid import * 13 | from networkx.generators.line import * 14 | from networkx.generators.random_graphs import * 15 | from networkx.generators.small import * 16 | from networkx.generators.stochastic import * 17 | from networkx.generators.social import * 18 | from networkx.generators.threshold import * 19 | from networkx.generators.intersection import * 20 | from networkx.generators.random_clustered import * 21 | 22 | -------------------------------------------------------------------------------- /networkx/generators/ego.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ego graph. 3 | """ 4 | # Copyright (C) 2010 by 5 | # Aric Hagberg 6 | # Dan Schult 7 | # Pieter Swart 8 | # All rights reserved. 9 | # BSD license. 10 | __author__ = """\n""".join(['Drew Conway ', 11 | 'Aric Hagberg ']) 12 | __all__ = ['ego_graph'] 13 | 14 | import networkx as nx 15 | 16 | def ego_graph(G,n,radius=1,center=True,undirected=False,distance=None): 17 | """Returns induced subgraph of neighbors centered at node n within 18 | a given radius. 19 | 20 | Parameters 21 | ---------- 22 | G : graph 23 | A NetworkX Graph or DiGraph 24 | 25 | n : node 26 | A single node 27 | 28 | radius : number, optional 29 | Include all neighbors of distance<=radius from n. 30 | 31 | center : bool, optional 32 | If False, do not include center node in graph 33 | 34 | undirected : bool, optional 35 | If True use both in- and out-neighbors of directed graphs. 36 | 37 | distance : key, optional 38 | Use specified edge data key as distance. For example, setting 39 | distance='weight' will use the edge weight to measure the 40 | distance from the node n. 41 | 42 | Notes 43 | ----- 44 | For directed graphs D this produces the "out" neighborhood 45 | or successors. If you want the neighborhood of predecessors 46 | first reverse the graph with D.reverse(). If you want both 47 | directions use the keyword argument undirected=True. 48 | 49 | Node, edge, and graph attributes are copied to the returned subgraph. 50 | """ 51 | if undirected: 52 | if distance is not None: 53 | sp,_=nx.single_source_dijkstra(G.to_undirected(), 54 | n,cutoff=radius, 55 | weight=distance) 56 | else: 57 | sp=nx.single_source_shortest_path_length(G.to_undirected(), 58 | n,cutoff=radius) 59 | else: 60 | if distance is not None: 61 | sp,_=nx.single_source_dijkstra(G, 62 | n,cutoff=radius, 63 | weight=distance) 64 | else: 65 | sp=nx.single_source_shortest_path_length(G,n,cutoff=radius) 66 | 67 | H=G.subgraph(sp).copy() 68 | if not center: 69 | H.remove_node(n) 70 | return H 71 | -------------------------------------------------------------------------------- /networkx/generators/line.py: -------------------------------------------------------------------------------- 1 | """ 2 | Line graphs. 3 | 4 | """ 5 | # Copyright (C) 2010 by 6 | # Aric Hagberg 7 | # Dan Schult 8 | # Pieter Swart 9 | # All rights reserved. 10 | # BSD license. 11 | __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)""" 12 | 13 | __all__ = ['line_graph'] 14 | 15 | import networkx as nx 16 | 17 | def line_graph(G): 18 | """Return the line graph of the graph or digraph G. 19 | 20 | The line graph of a graph G has a node for each edge 21 | in G and an edge between those nodes if the two edges 22 | in G share a common node. 23 | 24 | For DiGraphs an edge an edge represents a directed path of length 2. 25 | 26 | The original node labels are kept as two-tuple node labels 27 | in the line graph. 28 | 29 | Parameters 30 | ---------- 31 | G : graph 32 | A NetworkX Graph or DiGraph 33 | 34 | Examples 35 | -------- 36 | >>> G=nx.star_graph(3) 37 | >>> L=nx.line_graph(G) 38 | >>> print(sorted(L.edges())) # makes a clique, K3 39 | [((0, 1), (0, 2)), ((0, 1), (0, 3)), ((0, 3), (0, 2))] 40 | 41 | Notes 42 | ----- 43 | Not implemented for MultiGraph or MultiDiGraph classes. 44 | 45 | Graph, node, and edge data are not propagated to the new graph. 46 | 47 | """ 48 | if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: 49 | raise Exception("Line graph not implemented for Multi(Di)Graphs") 50 | L=G.__class__() 51 | if G.is_directed(): 52 | for u,nlist in G.adjacency_iter(): # same as successors for digraph 53 | # look for directed path of length two 54 | for n in nlist: 55 | nbrs=G[n] # successors 56 | for nbr in nbrs: 57 | if nbr!=u: 58 | L.add_edge((u,n),(n,nbr)) 59 | else: 60 | for u,nlist in G.adjacency_iter(): 61 | # label nodes as tuple of edge endpoints in original graph 62 | # "node tuple" must be in lexigraphical order 63 | nodes=[tuple(sorted(n)) for n in zip([u]*len(nlist),nlist)] 64 | # add clique of nodes to graph 65 | while nodes: 66 | u=nodes.pop() 67 | L.add_edges_from((u,v) for v in nodes) 68 | return L 69 | 70 | -------------------------------------------------------------------------------- /networkx/generators/stochastic.py: -------------------------------------------------------------------------------- 1 | """Stocastic graph.""" 2 | import networkx as nx 3 | # Copyright (C) 2010 by 4 | # Aric Hagberg 5 | # Dan Schult 6 | # Pieter Swart 7 | # All rights reserved. 8 | # BSD license. 9 | __author__ = "Aric Hagberg " 10 | __all__ = ['stochastic_graph'] 11 | 12 | def stochastic_graph(G, copy=True, weight='weight'): 13 | """Return a right-stochastic representation of G. 14 | 15 | A right-stochastic graph is a weighted graph in which all of 16 | the node (out) neighbors edge weights sum to 1. 17 | 18 | Parameters 19 | ----------- 20 | G : graph 21 | A NetworkX graph, must have valid edge weights 22 | 23 | copy : boolean, optional 24 | If True make a copy of the graph, otherwise modify original graph 25 | 26 | weight : key (optional) 27 | Edge data key used for weight. If None all weights are set to 1. 28 | """ 29 | if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: 30 | raise Exception("stochastic_graph not implemented for multigraphs") 31 | 32 | if not G.is_directed(): 33 | raise Exception("stochastic_graph not defined for undirected graphs") 34 | 35 | if copy: 36 | W=nx.DiGraph(G) 37 | else: 38 | W=G # reference original graph, no copy 39 | 40 | degree=W.out_degree(weight=weight) 41 | for (u,v,d) in W.edges(data=True): 42 | d[weight]=d.get(weight,1.0)/degree[u] 43 | return W 44 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_atlas.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | 4 | 5 | class TestAtlas(object): 6 | def setUp(self): 7 | self.GAG=nx.graph_atlas_g() 8 | 9 | def test_sizes(self): 10 | G=self.GAG[0] 11 | assert_equal(G.number_of_nodes(),0) 12 | assert_equal(G.number_of_edges(),0) 13 | 14 | G=self.GAG[7] 15 | assert_equal(G.number_of_nodes(),3) 16 | assert_equal(G.number_of_edges(),3) 17 | 18 | def test_names(self): 19 | i=0 20 | for g in self.GAG: 21 | name=g.name 22 | assert_equal(int(name[1:]),i) 23 | i+=1 24 | 25 | def test_monotone_nodes(self): 26 | # check for monotone increasing number of nodes 27 | previous=self.GAG[0] 28 | for g in self.GAG: 29 | assert_false(len(g)-len(previous) > 1) 30 | previous=g.copy() 31 | 32 | def test_monotone_nodes(self): 33 | # check for monotone increasing number of edges 34 | # (for fixed number of nodes) 35 | previous=self.GAG[0] 36 | for g in self.GAG: 37 | if len(g)==len(previous): 38 | assert_false(g.size()-previous.size() > 1) 39 | previous=g.copy() 40 | 41 | def test_monotone_degree_sequence(self): 42 | # check for monotone increasing degree sequence 43 | # (for fixed number f nodes and edges) 44 | # note that 111223 < 112222 45 | previous=self.GAG[0] 46 | for g in self.GAG: 47 | if len(g)==0: 48 | continue 49 | if len(g)==len(previous) & g.size()==previous.size(): 50 | deg_seq=sorted(g.degree().values()) 51 | previous_deg_seq=sorted(previous.degree().values()) 52 | assert_true(previous_deg_seq < deg_seq) 53 | previous=g.copy() 54 | 55 | 56 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_directed.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Generators - Directed Graphs 4 | ---------------------------- 5 | """ 6 | 7 | from nose.tools import * 8 | from networkx import * 9 | from networkx.generators.directed import * 10 | 11 | class TestGeneratorsDirected(): 12 | def test_smoke_test_random_graphs(self): 13 | G=gn_graph(100) 14 | G=gnr_graph(100,0.5) 15 | G=gnc_graph(100) 16 | G=scale_free_graph(100) 17 | 18 | def test_create_using_keyword_arguments(self): 19 | assert_raises(networkx.exception.NetworkXError, 20 | gn_graph, 100, create_using=Graph()) 21 | assert_raises(networkx.exception.NetworkXError, 22 | gnr_graph, 100, 0.5, create_using=Graph()) 23 | assert_raises(networkx.exception.NetworkXError, 24 | gnc_graph, 100, create_using=Graph()) 25 | assert_raises(networkx.exception.NetworkXError, 26 | scale_free_graph, 100, create_using=Graph()) 27 | G=gn_graph(100,seed=1) 28 | MG=gn_graph(100,create_using=MultiDiGraph(),seed=1) 29 | assert_equal(G.edges(), MG.edges()) 30 | G=gnr_graph(100,0.5,seed=1) 31 | MG=gnr_graph(100,0.5,create_using=MultiDiGraph(),seed=1) 32 | assert_equal(G.edges(), MG.edges()) 33 | G=gnc_graph(100,seed=1) 34 | MG=gnc_graph(100,create_using=MultiDiGraph(),seed=1) 35 | assert_equal(G.edges(), MG.edges()) 36 | 37 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_ego.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | ego graph 4 | --------- 5 | """ 6 | 7 | from nose.tools import assert_true, assert_equal 8 | import networkx as nx 9 | 10 | class TestGeneratorEgo(): 11 | def test_ego(self): 12 | G=nx.star_graph(3) 13 | H=nx.ego_graph(G,0) 14 | assert_true(nx.is_isomorphic(G,H)) 15 | G.add_edge(1,11) 16 | G.add_edge(2,22) 17 | G.add_edge(3,33) 18 | H=nx.ego_graph(G,0) 19 | assert_true(nx.is_isomorphic(nx.star_graph(3),H)) 20 | G=nx.path_graph(3) 21 | H=nx.ego_graph(G,0) 22 | assert_equal(H.edges(), [(0, 1)]) 23 | H=nx.ego_graph(G,0,undirected=True) 24 | assert_equal(H.edges(), [(0, 1)]) 25 | H=nx.ego_graph(G,0,center=False) 26 | assert_equal(H.edges(), []) 27 | 28 | 29 | def test_ego_distance(self): 30 | G=nx.Graph() 31 | G.add_edge(0,1,weight=2,distance=1) 32 | G.add_edge(1,2,weight=2,distance=2) 33 | G.add_edge(2,3,weight=2,distance=1) 34 | assert_equal(sorted(nx.ego_graph(G,0,radius=3).nodes()),[0,1,2,3]) 35 | eg=nx.ego_graph(G,0,radius=3,distance='weight') 36 | assert_equal(sorted(eg.nodes()),[0,1]) 37 | eg=nx.ego_graph(G,0,radius=3,distance='weight',undirected=True) 38 | assert_equal(sorted(eg.nodes()),[0,1]) 39 | eg=nx.ego_graph(G,0,radius=3,distance='distance') 40 | assert_equal(sorted(eg.nodes()),[0,1,2]) 41 | 42 | 43 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_geometric.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestGeneratorsGeometric(): 6 | def test_random_geometric_graph(self): 7 | G=nx.random_geometric_graph(50,0.25) 8 | assert_equal(len(G),50) 9 | 10 | def test_geographical_threshold_graph(self): 11 | G=nx.geographical_threshold_graph(50,100) 12 | assert_equal(len(G),50) 13 | 14 | def test_waxman_graph(self): 15 | G=nx.waxman_graph(50,0.5,0.1) 16 | assert_equal(len(G),50) 17 | G=nx.waxman_graph(50,0.5,0.1,L=1) 18 | assert_equal(len(G),50) 19 | 20 | def test_naviable_small_world(self): 21 | G = nx.navigable_small_world_graph(5,p=1,q=0) 22 | gg = nx.grid_2d_graph(5,5).to_directed() 23 | assert_true(nx.is_isomorphic(G,gg)) 24 | 25 | G = nx.navigable_small_world_graph(5,p=1,q=0,dim=3) 26 | gg = nx.grid_graph([5,5,5]).to_directed() 27 | assert_true(nx.is_isomorphic(G,gg)) 28 | 29 | G = nx.navigable_small_world_graph(5,p=1,q=0,dim=1) 30 | gg = nx.grid_graph([5]).to_directed() 31 | assert_true(nx.is_isomorphic(G,gg)) 32 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_hybrid.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | 4 | def test_2d_grid_graph(): 5 | # FC article claims 2d grid graph of size n is (3,3)-connected 6 | # and (5,9)-connected, but I don't think it is (5,9)-connected 7 | G=nx.grid_2d_graph(8,8,periodic=True) 8 | assert_true(nx.is_kl_connected(G,3,3)) 9 | assert_false(nx.is_kl_connected(G,5,9)) 10 | (H,graphOK)=nx.kl_connected_subgraph(G,5,9,same_as_graph=True) 11 | assert_false(graphOK) 12 | 13 | def test_small_graph(): 14 | G=nx.Graph() 15 | G.add_edge(1,2) 16 | G.add_edge(1,3) 17 | G.add_edge(2,3) 18 | assert_true(nx.is_kl_connected(G,2,2)) 19 | H=nx.kl_connected_subgraph(G,2,2) 20 | (H,graphOK)=nx.kl_connected_subgraph(G,2,2, 21 | low_memory=True, 22 | same_as_graph=True) 23 | assert_true(graphOK) 24 | 25 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_intersection.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | 5 | class TestIntersectionGraph(): 6 | def test_random_intersection_graph(self): 7 | G=nx.uniform_random_intersection_graph(10,5,0.5) 8 | assert_equal(len(G),10) 9 | 10 | def test_k_random_intersection_graph(self): 11 | G=nx.k_random_intersection_graph(10,5,2) 12 | assert_equal(len(G),10) 13 | 14 | def test_general_random_intersection_graph(self): 15 | G=nx.general_random_intersection_graph(10,5,[0.1,0.2,0.2,0.1,0.1]) 16 | assert_equal(len(G),10) 17 | assert_raises(ValueError, nx.general_random_intersection_graph,10,5, 18 | [0.1,0.2,0.2,0.1]) 19 | 20 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_line.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """line graph 4 | ---------- 5 | """ 6 | 7 | import networkx as nx 8 | from nose.tools import * 9 | 10 | 11 | class TestGeneratorLine(): 12 | def test_line(self): 13 | G=nx.star_graph(5) 14 | L=nx.line_graph(G) 15 | assert_true(nx.is_isomorphic(L,nx.complete_graph(5))) 16 | G=nx.path_graph(5) 17 | L=nx.line_graph(G) 18 | assert_true(nx.is_isomorphic(L,nx.path_graph(4))) 19 | G=nx.cycle_graph(5) 20 | L=nx.line_graph(G) 21 | assert_true(nx.is_isomorphic(L,G)) 22 | G=nx.DiGraph() 23 | G.add_edges_from([(0,1),(0,2),(0,3)]) 24 | L=nx.line_graph(G) 25 | assert_equal(L.adj, {}) 26 | G=nx.DiGraph() 27 | G.add_edges_from([(0,1),(1,2),(2,3)]) 28 | L=nx.line_graph(G) 29 | assert_equal(sorted(L.edges()), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) 30 | 31 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_random_clustered.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx 4 | 5 | class TestRandomClusteredGraph: 6 | 7 | def test_valid(self): 8 | node=[1,1,1,2,1,2,0,0] 9 | tri=[0,0,0,0,0,1,1,1] 10 | joint_degree_sequence=zip(node,tri) 11 | G = networkx.random_clustered_graph(joint_degree_sequence) 12 | assert_equal(G.number_of_nodes(),8) 13 | assert_equal(G.number_of_edges(),7) 14 | 15 | def test_valid2(self): 16 | G = networkx.random_clustered_graph(\ 17 | [(1,2),(2,1),(1,1),(1,1),(1,1),(2,0)]) 18 | assert_equal(G.number_of_nodes(),6) 19 | assert_equal(G.number_of_edges(),10) 20 | 21 | def test_invalid1(self): 22 | assert_raises((TypeError,networkx.NetworkXError), 23 | networkx.random_clustered_graph,[[1,1],[2,1],[0,1]]) 24 | 25 | def test_invalid2(self): 26 | assert_raises((TypeError,networkx.NetworkXError), 27 | networkx.random_clustered_graph,[[1,1],[1,2],[0,1]]) 28 | 29 | -------------------------------------------------------------------------------- /networkx/generators/tests/test_stochastic.py: -------------------------------------------------------------------------------- 1 | from nose.tools import assert_true, assert_equal,assert_raises 2 | import networkx as nx 3 | 4 | def test_stochastic(): 5 | G=nx.DiGraph() 6 | G.add_edge(0,1) 7 | G.add_edge(0,2) 8 | S=nx.stochastic_graph(G) 9 | assert_true(nx.is_isomorphic(G,S)) 10 | assert_equal(sorted(S.edges(data=True)), 11 | [(0, 1, {'weight': 0.5}), 12 | (0, 2, {'weight': 0.5})]) 13 | S=nx.stochastic_graph(G,copy=True) 14 | assert_equal(sorted(S.edges(data=True)), 15 | [(0, 1, {'weight': 0.5}), 16 | (0, 2, {'weight': 0.5})]) 17 | 18 | 19 | def test_stochastic_error(): 20 | G=nx.Graph() 21 | assert_raises(Exception,nx.stochastic_graph,G) 22 | G=nx.MultiGraph() 23 | assert_raises(Exception,nx.stochastic_graph,G) 24 | -------------------------------------------------------------------------------- /networkx/linalg/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.linalg.attrmatrix import * 2 | import networkx.linalg.attrmatrix 3 | from networkx.linalg.spectrum import * 4 | import networkx.linalg.spectrum 5 | from networkx.linalg.graphmatrix import * 6 | import networkx.linalg.graphmatrix 7 | from networkx.linalg.laplacianmatrix import * 8 | import networkx.linalg.laplacianmatrix 9 | 10 | -------------------------------------------------------------------------------- /networkx/linalg/spectrum.py: -------------------------------------------------------------------------------- 1 | """ 2 | Eigenvalue spectrum of graphs. 3 | """ 4 | # Copyright (C) 2004-2011 by 5 | # Aric Hagberg 6 | # Dan Schult 7 | # Pieter Swart 8 | # All rights reserved. 9 | # BSD license. 10 | import networkx as nx 11 | __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 12 | 'Pieter Swart (swart@lanl.gov)', 13 | 'Dan Schult(dschult@colgate.edu)']) 14 | 15 | __all__ = ['laplacian_spectrum', 'adjacency_spectrum'] 16 | 17 | 18 | def laplacian_spectrum(G, weight='weight'): 19 | """Return eigenvalues of the Laplacian of G 20 | 21 | Parameters 22 | ---------- 23 | G : graph 24 | A NetworkX graph 25 | 26 | weight : string or None, optional (default='weight') 27 | The edge data key used to compute each value in the matrix. 28 | If None, then each edge has weight 1. 29 | 30 | Returns 31 | ------- 32 | evals : NumPy array 33 | Eigenvalues 34 | 35 | Notes 36 | ----- 37 | For MultiGraph/MultiDiGraph, the edges weights are summed. 38 | See to_numpy_matrix for other options. 39 | 40 | See Also 41 | -------- 42 | laplacian_matrix 43 | """ 44 | try: 45 | import numpy as np 46 | except ImportError: 47 | raise ImportError( 48 | "laplacian_spectrum() requires NumPy: http://scipy.org/ ") 49 | return np.linalg.eigvals(nx.laplacian_matrix(G,weight=weight)) 50 | 51 | def adjacency_spectrum(G, weight='weight'): 52 | """Return eigenvalues of the adjacency matrix of G. 53 | 54 | Parameters 55 | ---------- 56 | G : graph 57 | A NetworkX graph 58 | 59 | weight : string or None, optional (default='weight') 60 | The edge data key used to compute each value in the matrix. 61 | If None, then each edge has weight 1. 62 | 63 | Returns 64 | ------- 65 | evals : NumPy array 66 | Eigenvalues 67 | 68 | Notes 69 | ----- 70 | For MultiGraph/MultiDiGraph, the edges weights are summed. 71 | See to_numpy_matrix for other options. 72 | 73 | See Also 74 | -------- 75 | adjacency_matrix 76 | """ 77 | try: 78 | import numpy as np 79 | except ImportError: 80 | raise ImportError( 81 | "adjacency_spectrum() requires NumPy: http://scipy.org/ ") 82 | return np.linalg.eigvals(nx.adjacency_matrix(G,weight=weight)) 83 | 84 | # fixture for nose tests 85 | def setup_module(module): 86 | from nose import SkipTest 87 | try: 88 | import numpy 89 | except: 90 | raise SkipTest("NumPy not available") 91 | -------------------------------------------------------------------------------- /networkx/linalg/tests/test_spectrum.py: -------------------------------------------------------------------------------- 1 | from nose import SkipTest 2 | 3 | import networkx as nx 4 | from networkx.generators.degree_seq import havel_hakimi_graph 5 | 6 | class TestSpectrum(object): 7 | numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test 8 | @classmethod 9 | def setupClass(cls): 10 | global numpy 11 | global assert_equal 12 | global assert_almost_equal 13 | try: 14 | import numpy 15 | from numpy.testing import assert_equal,assert_almost_equal 16 | except ImportError: 17 | raise SkipTest('NumPy not available.') 18 | 19 | def setUp(self): 20 | deg=[3,2,2,1,0] 21 | self.G=havel_hakimi_graph(deg) 22 | self.P=nx.path_graph(3) 23 | self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3}) 24 | for (u,v) in self.G.edges_iter() ) 25 | self.WG.add_node(4) 26 | 27 | def test_laplacian_spectrum(self): 28 | "Laplacian eigenvalues" 29 | evals=numpy.array([0, 0, 1, 3, 4]) 30 | e=sorted(nx.laplacian_spectrum(self.G)) 31 | assert_almost_equal(e,evals) 32 | e=sorted(nx.laplacian_spectrum(self.WG,weight=None)) 33 | assert_almost_equal(e,evals) 34 | e=sorted(nx.laplacian_spectrum(self.WG)) 35 | assert_almost_equal(e,0.5*evals) 36 | e=sorted(nx.laplacian_spectrum(self.WG,weight='other')) 37 | assert_almost_equal(e,0.3*evals) 38 | 39 | def test_adjacency_spectrum(self): 40 | "Adjacency eigenvalues" 41 | evals=numpy.array([-numpy.sqrt(2), 0, numpy.sqrt(2)]) 42 | e=sorted(nx.adjacency_spectrum(self.P)) 43 | assert_almost_equal(e,evals) 44 | 45 | -------------------------------------------------------------------------------- /networkx/readwrite/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | A package for reading and writing graphs in various formats. 3 | 4 | """ 5 | from networkx.readwrite.adjlist import * 6 | from networkx.readwrite.multiline_adjlist import * 7 | from networkx.readwrite.edgelist import * 8 | from networkx.readwrite.gpickle import * 9 | from networkx.readwrite.pajek import * 10 | from networkx.readwrite.leda import * 11 | from networkx.readwrite.sparsegraph6 import * 12 | from networkx.readwrite.nx_yaml import * 13 | from networkx.readwrite.gml import * 14 | from networkx.readwrite.graphml import * 15 | from networkx.readwrite.gexf import * 16 | from networkx.readwrite.nx_shp import * 17 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ********* 3 | JSON data 4 | ********* 5 | Generate and parse JSON serializable data for NetworkX graphs. 6 | """ 7 | from networkx.readwrite.json_graph.node_link import * 8 | from networkx.readwrite.json_graph.adjacency import * 9 | from networkx.readwrite.json_graph.tree import * 10 | from networkx.readwrite.json_graph.serialize import * 11 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/serialize.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2011 by 2 | # Aric Hagberg 3 | # Dan Schult 4 | # Pieter Swart 5 | # All rights reserved. 6 | # BSD license. 7 | from functools import partial,update_wrapper 8 | import json 9 | from networkx.readwrite.json_graph import node_link_data,node_link_graph 10 | __author__ = """Aric Hagberg (hagberg@lanl.gov))""" 11 | __all__ = ['dumps','loads','dump','load'] 12 | 13 | class NXJSONEncoder(json.JSONEncoder): 14 | def default(self, o): 15 | return node_link_data(o) 16 | 17 | 18 | class NXJSONDecoder(json.JSONDecoder): 19 | def decode(self, s): 20 | d = json.loads(s) 21 | return node_link_graph(d) 22 | 23 | # modification of json functions to serialize networkx graphs 24 | dumps = partial(json.dumps, cls=NXJSONEncoder) 25 | update_wrapper(dumps,json.dumps) 26 | loads = partial(json.loads, cls=NXJSONDecoder) 27 | update_wrapper(loads,json.loads) 28 | dump = partial(json.dump, cls=NXJSONEncoder) 29 | update_wrapper(dump,json.dump) 30 | load = partial(json.load, cls=NXJSONDecoder) 31 | update_wrapper(load,json.load) 32 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/tests/test_adjacency.py: -------------------------------------------------------------------------------- 1 | import json 2 | from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true 3 | import networkx as nx 4 | from networkx.readwrite.json_graph import * 5 | 6 | class TestAdjacency: 7 | 8 | def test_graph(self): 9 | G = nx.path_graph(4) 10 | H = adjacency_graph(adjacency_data(G)) 11 | nx.is_isomorphic(G,H) 12 | 13 | def test_graph_attributes(self): 14 | G = nx.path_graph(4) 15 | G.add_node(1,color='red') 16 | G.add_edge(1,2,width=7) 17 | G.graph['foo']='bar' 18 | G.graph[1]='one' 19 | 20 | H = adjacency_graph(adjacency_data(G)) 21 | assert_equal(H.graph['foo'],'bar') 22 | assert_equal(H.node[1]['color'],'red') 23 | assert_equal(H[1][2]['width'],7) 24 | 25 | d = json.dumps(adjacency_data(G)) 26 | H = adjacency_graph(json.loads(d)) 27 | assert_equal(H.graph['foo'],'bar') 28 | assert_equal(H.graph[1],'one') 29 | assert_equal(H.node[1]['color'],'red') 30 | assert_equal(H[1][2]['width'],7) 31 | 32 | def test_digraph(self): 33 | G = nx.DiGraph() 34 | H = adjacency_graph(adjacency_data(G)) 35 | assert_true(H.is_directed()) 36 | 37 | def test_multidigraph(self): 38 | G = nx.MultiDiGraph() 39 | H = adjacency_graph(adjacency_data(G)) 40 | assert_true(H.is_directed()) 41 | assert_true(H.is_multigraph()) 42 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/tests/test_node_link.py: -------------------------------------------------------------------------------- 1 | import json 2 | from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true 3 | import networkx as nx 4 | from networkx.readwrite.json_graph import * 5 | 6 | class TestNodeLink: 7 | 8 | def test_graph(self): 9 | G = nx.path_graph(4) 10 | H = node_link_graph(node_link_data(G)) 11 | nx.is_isomorphic(G,H) 12 | 13 | def test_graph_attributes(self): 14 | G = nx.path_graph(4) 15 | G.add_node(1,color='red') 16 | G.add_edge(1,2,width=7) 17 | G.graph[1]='one' 18 | G.graph['foo']='bar' 19 | 20 | H = node_link_graph(node_link_data(G)) 21 | assert_equal(H.graph['foo'],'bar') 22 | assert_equal(H.node[1]['color'],'red') 23 | assert_equal(H[1][2]['width'],7) 24 | 25 | d=json.dumps(node_link_data(G)) 26 | H = node_link_graph(json.loads(d)) 27 | assert_equal(H.graph['foo'],'bar') 28 | assert_equal(H.graph[1],'one') 29 | assert_equal(H.node[1]['color'],'red') 30 | assert_equal(H[1][2]['width'],7) 31 | 32 | def test_digraph(self): 33 | G = nx.DiGraph() 34 | H = node_link_graph(node_link_data(G)) 35 | assert_true(H.is_directed()) 36 | 37 | def test_multidigraph(self): 38 | G = nx.MultiDiGraph() 39 | H = node_link_graph(node_link_data(G)) 40 | assert_true(H.is_directed()) 41 | assert_true(H.is_multigraph()) 42 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/tests/test_serialize.py: -------------------------------------------------------------------------------- 1 | import json 2 | from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true 3 | import networkx as nx 4 | from networkx.readwrite.json_graph import * 5 | 6 | class TestAdjacency: 7 | 8 | def test_graph(self): 9 | G = nx.path_graph(4) 10 | H = loads(dumps(G)) 11 | nx.is_isomorphic(G,H) 12 | 13 | def test_graph_attributes(self): 14 | G = nx.path_graph(4) 15 | G.add_node(1,color='red') 16 | G.add_edge(1,2,width=7) 17 | G.graph['foo']='bar' 18 | G.graph[1]='one' 19 | 20 | H = loads(dumps(G)) 21 | assert_equal(H.graph['foo'],'bar') 22 | assert_equal(H.graph[1],'one') 23 | assert_equal(H.node[1]['color'],'red') 24 | assert_equal(H[1][2]['width'],7) 25 | 26 | try: 27 | from StringIO import StringIO 28 | except: 29 | from io import StringIO 30 | io = StringIO() 31 | dump(G,io) 32 | io.seek(0) 33 | H=load(io) 34 | assert_equal(H.graph['foo'],'bar') 35 | assert_equal(H.graph[1],'one') 36 | assert_equal(H.node[1]['color'],'red') 37 | assert_equal(H[1][2]['width'],7) 38 | 39 | 40 | def test_digraph(self): 41 | G = nx.DiGraph() 42 | H = loads(dumps(G)) 43 | assert_true(H.is_directed()) 44 | 45 | def test_multidigraph(self): 46 | G = nx.MultiDiGraph() 47 | H = loads(dumps(G)) 48 | assert_true(H.is_directed()) 49 | assert_true(H.is_multigraph()) 50 | -------------------------------------------------------------------------------- /networkx/readwrite/json_graph/tests/test_tree.py: -------------------------------------------------------------------------------- 1 | import json 2 | from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true 3 | import networkx as nx 4 | from networkx.readwrite.json_graph import * 5 | 6 | class TestTree: 7 | 8 | def test_graph(self): 9 | G=nx.DiGraph() 10 | G.add_nodes_from([1,2,3],color='red') 11 | G.add_edge(1,2,foo=7) 12 | G.add_edge(1,3,foo=10) 13 | G.add_edge(3,4,foo=10) 14 | H = tree_graph(tree_data(G,1)) 15 | nx.is_isomorphic(G,H) 16 | 17 | def test_graph_attributes(self): 18 | G=nx.DiGraph() 19 | G.add_nodes_from([1,2,3],color='red') 20 | G.add_edge(1,2,foo=7) 21 | G.add_edge(1,3,foo=10) 22 | G.add_edge(3,4,foo=10) 23 | H = tree_graph(tree_data(G,1)) 24 | assert_equal(H.node[1]['color'],'red') 25 | 26 | d = json.dumps(tree_data(G,1)) 27 | H = tree_graph(json.loads(d)) 28 | assert_equal(H.node[1]['color'],'red') 29 | 30 | -------------------------------------------------------------------------------- /networkx/readwrite/tests/test_gpickle.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import assert_equal 3 | import networkx as nx 4 | import os,tempfile 5 | 6 | class TestGpickle(object): 7 | def setUp(self): 8 | G=nx.Graph(name="test") 9 | e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] 10 | G.add_edges_from(e,width=10) 11 | G.add_node('g',color='green') 12 | G.graph['number']=1 13 | self.G=G 14 | 15 | def test_gpickle(self): 16 | G=self.G 17 | (fd,fname)=tempfile.mkstemp() 18 | nx.write_gpickle(G,fname); 19 | Gin=nx.read_gpickle(fname); 20 | assert_equal(sorted(G.nodes(data=True)), 21 | sorted(Gin.nodes(data=True))) 22 | assert_equal(sorted(G.edges(data=True)), 23 | sorted(Gin.edges(data=True))) 24 | assert_equal(G.graph,Gin.graph) 25 | os.close(fd) 26 | os.unlink(fname) 27 | 28 | -------------------------------------------------------------------------------- /networkx/readwrite/tests/test_leda.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | import networkx as nx 4 | import os,tempfile 5 | 6 | class TestLEDA(object): 7 | 8 | def test_parse_leda(self): 9 | data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" 10 | G=nx.parse_leda(data) 11 | G=nx.parse_leda(data.split('\n')) 12 | assert_equal(sorted(G.nodes()), 13 | ['v1', 'v2', 'v3', 'v4', 'v5']) 14 | assert_equal([e for e in sorted(G.edges(data=True))], 15 | [('v1', 'v2', {'label': '4'}), 16 | ('v1', 'v3', {'label': '3'}), 17 | ('v2', 'v3', {'label': '2'}), 18 | ('v3', 'v4', {'label': '3'}), 19 | ('v3', 'v5', {'label': '7'}), 20 | ('v4', 'v5', {'label': '6'}), 21 | ('v5', 'v1', {'label': 'foo'})]) 22 | 23 | 24 | def test_read_LEDA(self): 25 | data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" 26 | G=nx.parse_leda(data) 27 | (fd,fname)=tempfile.mkstemp() 28 | fh=open(fname,'w') 29 | b=fh.write(data) 30 | fh.close() 31 | Gin=nx.read_leda(fname) 32 | assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) 33 | assert_equal(sorted(G.edges()),sorted(Gin.edges())) 34 | os.close(fd) 35 | os.unlink(fname) 36 | -------------------------------------------------------------------------------- /networkx/readwrite/tests/test_p2g.py: -------------------------------------------------------------------------------- 1 | from nose.tools import assert_equal, assert_raises, assert_not_equal 2 | import networkx as nx 3 | import io 4 | import tempfile 5 | import os 6 | from networkx.readwrite.p2g import * 7 | from networkx.testing import * 8 | 9 | 10 | class TestP2G: 11 | 12 | def setUp(self): 13 | self.G=nx.Graph(name="test") 14 | e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] 15 | self.G.add_edges_from(e) 16 | self.G.add_node('g') 17 | self.DG=nx.DiGraph(self.G) 18 | 19 | def test_read_p2g(self): 20 | s = b"""\ 21 | name 22 | 3 4 23 | a 24 | 1 2 25 | b 26 | 27 | c 28 | 0 2 29 | """ 30 | bytesIO = io.BytesIO(s) 31 | G = read_p2g(bytesIO) 32 | assert_equal(G.name,'name') 33 | assert_equal(sorted(G),['a','b','c']) 34 | edges = [(str(u),str(v)) for u,v in G.edges()] 35 | assert_edges_equal(G.edges(),[('a','c'),('a','b'),('c','a'),('c','c')]) 36 | 37 | def test_write_p2g(self): 38 | s=b"""foo 39 | 3 2 40 | 1 41 | 1 42 | 2 43 | 2 44 | 3 45 | 46 | """ 47 | fh=io.BytesIO() 48 | G=nx.DiGraph() 49 | G.name='foo' 50 | G.add_edges_from([(1,2),(2,3)]) 51 | write_p2g(G,fh) 52 | fh.seek(0) 53 | r=fh.read() 54 | assert_equal(r,s) 55 | 56 | def test_write_read_p2g(self): 57 | fh=io.BytesIO() 58 | G=nx.DiGraph() 59 | G.name='foo' 60 | G.add_edges_from([('a','b'),('b','c')]) 61 | write_p2g(G,fh) 62 | fh.seek(0) 63 | H=read_p2g(fh) 64 | assert_edges_equal(G.edges(),H.edges()) 65 | -------------------------------------------------------------------------------- /networkx/readwrite/tests/test_pajek.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Pajek tests 4 | """ 5 | 6 | from nose.tools import assert_equal 7 | from networkx import * 8 | import os,tempfile 9 | from io import open 10 | 11 | class TestPajek(object): 12 | def setUp(self): 13 | self.data="""*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" 14 | self.G=nx.MultiDiGraph() 15 | self.G.add_nodes_from(['A1', 'Bb', 'C', 'D2']) 16 | self.G.add_edges_from([('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), 17 | ('Bb', 'A1'),('C', 'C'), ('C', 'D2'), 18 | ('D2', 'Bb')]) 19 | 20 | self.G.graph['name']='Tralala' 21 | (self.fd,self.fname)=tempfile.mkstemp() 22 | fh=open(self.fname,'wb') 23 | fh.write(self.data.encode('UTF-8')) 24 | fh.close() 25 | 26 | def tearDown(self): 27 | os.close(self.fd) 28 | os.unlink(self.fname) 29 | 30 | def test_parse_pajek_simple(self): 31 | # Example without node positions or shape 32 | data="""*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1""" 33 | G=parse_pajek(data) 34 | assert_equal(sorted(G.nodes()), ['1', '2']) 35 | assert_equal(sorted(G.edges()), [('1', '2'), ('1', '2')]) 36 | 37 | def test_parse_pajek(self): 38 | G=parse_pajek(self.data) 39 | assert_equal(sorted(G.nodes()), ['A1', 'Bb', 'C', 'D2']) 40 | assert_equal(sorted(G.edges()), 41 | [('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), ('Bb', 'A1'), 42 | ('C', 'C'), ('C', 'D2'), ('D2', 'Bb')]) 43 | 44 | def test_read_pajek(self): 45 | G=parse_pajek(self.data) 46 | Gin=read_pajek(self.fname) 47 | assert_equal(sorted(G.nodes()), sorted(Gin.nodes())) 48 | assert_equal(sorted(G.edges()), sorted(Gin.edges())) 49 | assert_equal(self.G.graph,Gin.graph) 50 | for n in G.node: 51 | assert_equal(G.node[n],Gin.node[n]) 52 | -------------------------------------------------------------------------------- /networkx/readwrite/tests/test_yaml.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for yaml. 3 | """ 4 | 5 | import os,tempfile 6 | from nose import SkipTest 7 | from nose.tools import assert_equal 8 | 9 | import networkx as nx 10 | 11 | class TestYaml(object): 12 | @classmethod 13 | def setupClass(cls): 14 | global yaml 15 | try: 16 | import yaml 17 | except ImportError: 18 | raise SkipTest('yaml not available.') 19 | 20 | def setUp(self): 21 | self.build_graphs() 22 | 23 | def build_graphs(self): 24 | self.G = nx.Graph(name="test") 25 | e = [('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] 26 | self.G.add_edges_from(e) 27 | self.G.add_node('g') 28 | 29 | self.DG = nx.DiGraph(self.G) 30 | 31 | self.MG = nx.MultiGraph() 32 | self.MG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)]) 33 | 34 | def assert_equal(self, G, data=False): 35 | (fd, fname) = tempfile.mkstemp() 36 | nx.write_yaml(G, fname) 37 | Gin = nx.read_yaml(fname); 38 | 39 | assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) 40 | assert_equal(G.edges(data=data),Gin.edges(data=data)) 41 | 42 | os.close(fd) 43 | os.unlink(fname) 44 | 45 | def testUndirected(self): 46 | self.assert_equal(self.G, False) 47 | 48 | def testDirected(self): 49 | self.assert_equal(self.DG, False) 50 | 51 | def testMultiGraph(self): 52 | self.assert_equal(self.MG, True) 53 | 54 | -------------------------------------------------------------------------------- /networkx/testing/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.testing.utils import * 2 | -------------------------------------------------------------------------------- /networkx/testing/utils.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from nose.tools import * 3 | __all__ = ['assert_nodes_equal', 'assert_edges_equal','assert_graphs_equal'] 4 | 5 | def assert_nodes_equal(nlist1, nlist2): 6 | # Assumes lists are either nodes, or (node,datadict) tuples, 7 | # and also that nodes are orderable/sortable. 8 | try: 9 | l = len(nlist1[0]) 10 | n1 = sorted(nlist1,key=operator.itemgetter(0)) 11 | n2 = sorted(nlist2,key=operator.itemgetter(0)) 12 | assert_equal(len(n1),len(n2)) 13 | for a,b in zip(n1,n2): 14 | assert_equal(a,b) 15 | except TypeError: 16 | assert_equal(set(nlist1),set(nlist2)) 17 | return 18 | 19 | def assert_edges_equal(elist1, elist2): 20 | # Assumes lists with u,v nodes either as 21 | # edge tuples (u,v) 22 | # edge tuples with data dicts (u,v,d) 23 | # edge tuples with keys and data dicts (u,v,k, d) 24 | # and also that nodes are orderable/sortable. 25 | e1 = sorted(elist1,key=lambda x: sorted(x[0:1])) 26 | e2 = sorted(elist2,key=lambda x: sorted(x[0:1])) 27 | assert_equal(len(e1),len(e2)) 28 | if len(e1[0]) == 2: 29 | for a,b in zip(e1,e2): 30 | assert_equal(set(a[0:2]),set(b[0:2])) 31 | elif len(e1[0]) == 3: 32 | for a,b in zip(e1,e2): 33 | assert_equal(set(a[0:2]),set(b[0:2])) 34 | assert_equal(a[2],b[2]) 35 | elif len(e1[0]) == 4: 36 | for a,b in zip(e1,e2): 37 | assert_equal(set(a[0:2]),set(b[0:2])) 38 | assert_equal(a[2],b[2]) 39 | assert_equal(a[3],b[3]) 40 | 41 | 42 | def assert_graphs_equal(graph1, graph2): 43 | if graph1.is_multigraph(): 44 | edges1 = graph1.edges(data=True,keys=True) 45 | else: 46 | edges1 = graph1.edges(data=True) 47 | if graph2.is_multigraph(): 48 | edges2 = graph2.edges(data=True,keys=True) 49 | else: 50 | edges2 = graph2.edges(data=True) 51 | assert_nodes_equal(graph1.nodes(data=True), 52 | graph2.nodes(data=True)) 53 | assert_edges_equal(edges1, edges2) 54 | assert_equal(graph1.graph,graph2.graph) 55 | return 56 | -------------------------------------------------------------------------------- /networkx/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/networkx/tests/__init__.py -------------------------------------------------------------------------------- /networkx/tests/test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from os import path,getcwd 4 | 5 | def run(verbosity=1,doctest=False,numpy=True): 6 | """Run NetworkX tests. 7 | 8 | Parameters 9 | ---------- 10 | verbosity: integer, optional 11 | Level of detail in test reports. Higher numbers provide more detail. 12 | 13 | doctest: bool, optional 14 | True to run doctests in code modules 15 | 16 | numpy: bool, optional 17 | True to test modules dependent on numpy 18 | """ 19 | try: 20 | import nose 21 | except ImportError: 22 | raise ImportError(\ 23 | "The nose package is needed to run the NetworkX tests.") 24 | 25 | sys.stderr.write("Running NetworkX tests:") 26 | nx_install_dir=path.join(path.dirname(__file__), path.pardir) 27 | # stop if running from source directory 28 | if getcwd() == path.abspath(path.join(nx_install_dir,path.pardir)): 29 | raise RuntimeError("Can't run tests from source directory.\n" 30 | "Run 'nosetests' from the command line.") 31 | 32 | argv=[' ','--verbosity=%d'%verbosity, 33 | '-w',nx_install_dir, 34 | '-exe'] 35 | if doctest: 36 | argv.extend(['--with-doctest','--doctest-extension=txt']) 37 | if not numpy: 38 | argv.extend(['-A not numpy']) 39 | 40 | 41 | nose.run(argv=argv) 42 | 43 | if __name__=="__main__": 44 | run() 45 | 46 | -------------------------------------------------------------------------------- /networkx/tests/test_exceptions.py: -------------------------------------------------------------------------------- 1 | from nose.tools import raises 2 | import networkx as nx 3 | 4 | # smoke tests for exceptions 5 | 6 | @raises(nx.NetworkXException) 7 | def test_raises_networkx_exception(): 8 | raise nx.NetworkXException 9 | 10 | @raises(nx.NetworkXError) 11 | def test_raises_networkx_error(): 12 | raise nx.NetworkXError 13 | 14 | @raises(nx.NetworkXPointlessConcept) 15 | def test_raises_networkx_pointless_concept(): 16 | raise nx.NetworkXPointlessConcept 17 | 18 | @raises(nx.NetworkXAlgorithmError) 19 | def test_raises_networkx_algorithm_error(): 20 | raise nx.NetworkXAlgorithmError 21 | 22 | @raises(nx.NetworkXUnfeasible) 23 | def test_raises_networkx_unfeasible(): 24 | raise nx.NetworkXUnfeasible 25 | 26 | @raises(nx.NetworkXNoPath) 27 | def test_raises_networkx_no_path(): 28 | raise nx.NetworkXNoPath 29 | 30 | @raises(nx.NetworkXUnbounded) 31 | def test_raises_networkx_unbounded(): 32 | raise nx.NetworkXUnbounded 33 | 34 | -------------------------------------------------------------------------------- /networkx/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from networkx.utils.misc import * 2 | from networkx.utils.decorators import * 3 | from networkx.utils.random_sequence import * 4 | from networkx.utils.union_find import * 5 | from networkx.utils.rcm import * 6 | -------------------------------------------------------------------------------- /networkx/utils/tests/test_misc.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | import networkx as nx 3 | from networkx.utils import * 4 | 5 | def test_is_string_like(): 6 | assert_true(is_string_like("aaaa")) 7 | assert_false(is_string_like(None)) 8 | assert_false(is_string_like(123)) 9 | 10 | def test_iterable(): 11 | assert_false(iterable(None)) 12 | assert_false(iterable(10)) 13 | assert_true(iterable([1,2,3])) 14 | assert_true(iterable((1,2,3))) 15 | assert_true(iterable({1:"A",2:"X"})) 16 | assert_true(iterable("ABC")) 17 | 18 | def test_graph_iterable(): 19 | K=nx.complete_graph(10) 20 | assert_true(iterable(K)) 21 | assert_true(iterable(K.nodes_iter())) 22 | assert_true(iterable(K.edges_iter())) 23 | 24 | def test_is_list_of_ints(): 25 | assert_true(is_list_of_ints([1,2,3,42])) 26 | assert_false(is_list_of_ints([1,2,3,"kermit"])) 27 | 28 | def test_random_number_distribution(): 29 | # smoke test only 30 | z=uniform_sequence(20) 31 | z=powerlaw_sequence(20,exponent=2.5) 32 | z=pareto_sequence(20,exponent=1.5) 33 | z=discrete_sequence(20,distribution=[0,0,0,0,1,1,1,1,2,2,3]) 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /networkx/utils/tests/test_random_sequence.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from nose.tools import * 3 | from networkx.utils import uniform_sequence,powerlaw_sequence,\ 4 | create_degree_sequence,zipf_rv,zipf_sequence,random_weighted_sample,\ 5 | weighted_choice 6 | import networkx.utils 7 | 8 | def test_degree_sequences(): 9 | seq=create_degree_sequence(10,uniform_sequence) 10 | assert_equal(len(seq), 10) 11 | seq=create_degree_sequence(10,powerlaw_sequence) 12 | assert_equal(len(seq), 10) 13 | 14 | def test_zipf_rv(): 15 | r = zipf_rv(2.3) 16 | assert_true(type(r),int) 17 | assert_raises(ValueError,zipf_rv,0.5) 18 | assert_raises(ValueError,zipf_rv,2,xmin=0) 19 | 20 | def test_zipf_sequence(): 21 | s = zipf_sequence(10) 22 | assert_equal(len(s),10) 23 | 24 | def test_random_weighted_sample(): 25 | mapping={'a':10,'b':20} 26 | s = random_weighted_sample(mapping,2) 27 | assert_equal(sorted(s),sorted(mapping.keys())) 28 | assert_raises(ValueError,random_weighted_sample,mapping,3) 29 | 30 | def test_random_weighted_choice(): 31 | mapping={'a':10,'b':0} 32 | c = weighted_choice(mapping) 33 | assert_equal(c,'a') 34 | -------------------------------------------------------------------------------- /networkx/utils/tests/test_rcm.py: -------------------------------------------------------------------------------- 1 | from nose.tools import * 2 | from networkx.utils import reverse_cuthill_mckee_ordering 3 | import networkx as nx 4 | 5 | def test_reverse_cuthill_mckee(): 6 | # example graph from 7 | # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp 8 | G = nx.Graph([(0,3),(0,5),(1,2),(1,4),(1,6),(1,9),(2,3), 9 | (2,4),(3,5),(3,8),(4,6),(5,6),(5,7),(6,7)]) 10 | rcm = list(reverse_cuthill_mckee_ordering(G,start=0)) 11 | assert_equal(rcm,[9, 1, 4, 6, 7, 2, 8, 5, 3, 0]) 12 | rcm = list(reverse_cuthill_mckee_ordering(G)) 13 | assert_equal(rcm,[0, 8, 5, 7, 3, 6, 4, 2, 1, 9]) 14 | -------------------------------------------------------------------------------- /networkx/utils/union_find.py: -------------------------------------------------------------------------------- 1 | """ 2 | Union-find data structure. 3 | """ 4 | # Copyright (C) 2004-2011 by 5 | # Aric Hagberg 6 | # Dan Schult 7 | # Pieter Swart 8 | # All rights reserved. 9 | # BSD license. 10 | import networkx as nx 11 | 12 | class UnionFind: 13 | """Union-find data structure. 14 | 15 | Each unionFind instance X maintains a family of disjoint sets of 16 | hashable objects, supporting the following two methods: 17 | 18 | - X[item] returns a name for the set containing the given item. 19 | Each set is named by an arbitrarily-chosen one of its members; as 20 | long as the set remains unchanged it will keep the same name. If 21 | the item is not yet part of a set in X, a new singleton set is 22 | created for it. 23 | 24 | - X.union(item1, item2, ...) merges the sets containing each item 25 | into a single larger set. If any item is not yet part of a set 26 | in X, it is added to X as one of the members of the merged set. 27 | 28 | Union-find data structure. Based on Josiah Carlson's code, 29 | http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912 30 | with significant additional changes by D. Eppstein. 31 | http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py 32 | 33 | """ 34 | 35 | def __init__(self): 36 | """Create a new empty union-find structure.""" 37 | self.weights = {} 38 | self.parents = {} 39 | 40 | def __getitem__(self, object): 41 | """Find and return the name of the set containing the object.""" 42 | 43 | # check for previously unknown object 44 | if object not in self.parents: 45 | self.parents[object] = object 46 | self.weights[object] = 1 47 | return object 48 | 49 | # find path of objects leading to the root 50 | path = [object] 51 | root = self.parents[object] 52 | while root != path[-1]: 53 | path.append(root) 54 | root = self.parents[root] 55 | 56 | # compress the path and return 57 | for ancestor in path: 58 | self.parents[ancestor] = root 59 | return root 60 | 61 | def __iter__(self): 62 | """Iterate through all items ever found or unioned by this structure.""" 63 | return iter(self.parents) 64 | 65 | def union(self, *objects): 66 | """Find the sets containing the objects and merge them all.""" 67 | roots = [self[x] for x in objects] 68 | heaviest = max([(self.weights[r],r) for r in roots])[1] 69 | for r in roots: 70 | if r != heaviest: 71 | self.weights[heaviest] += self.weights[r] 72 | self.parents[r] = heaviest 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /networkx/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Version information for NetworkX, created during installation. 3 | 4 | Do not add this file to the repository. 5 | 6 | """ 7 | 8 | import datetime 9 | 10 | version = '1.7' 11 | date = 'Wed Jul 4 17:52:30 2012' 12 | 13 | # Was NetworkX built from a development version? If so, remember that the major 14 | # and minor versions reference the "target" (rather than "current") release. 15 | dev = False 16 | 17 | # Format: (name, major, min, revision) 18 | version_info = ('networkx', 1, 7, '70eea5d9e665') 19 | 20 | # Format: a 'datetime.datetime' instance 21 | date_info = datetime.datetime(2012, 7, 4, 17, 52, 30, 850797) 22 | 23 | # Format: (vcs, vcs_tuple) 24 | vcs_info = ('mercurial', ('70eea5d9e665', 'tip')) 25 | 26 | -------------------------------------------------------------------------------- /nx_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import networkx as nx 4 | from collections import deque 5 | 6 | #q = deque() 7 | # 8 | #q.appendleft(1) 9 | #q.appendleft(2) 10 | #q.appendleft(3) 11 | # 12 | #print(q) 13 | # 14 | #print(q.pop()) 15 | 16 | #### 17 | 18 | #G = nx.balanced_tree(2, 3) 19 | # 20 | #print(G.nodes()) 21 | #print(G.edges()) 22 | # 23 | #print(set( G.neighbors(1) )) 24 | #nx.freeze(G) 25 | # 26 | #print(G[0]) 27 | # 28 | #G.node[1]['parent'] = 0 29 | # 30 | #print(G.nodes(data=True)) 31 | #print(G.node[1]) 32 | 33 | #### 34 | 35 | import matplotlib 36 | matplotlib.rcParams['backend'] = "Qt4Agg" 37 | import matplotlib.pyplot as plt 38 | 39 | g = nx.Graph() 40 | 41 | g.add_nodes_from(range(1,11)) 42 | 43 | g.add_edge(1,2, w = 3) 44 | 45 | G=nx.Graph() 46 | 47 | G.add_edge('a','b',weight=0.6) 48 | G.add_edge('a','c',weight=0.2) 49 | G.add_edge('c','d',weight=0.1) 50 | G.add_edge('c','e',weight=0.7) 51 | G.add_edge('c','f',weight=0.9) 52 | G.add_edge('a','d',weight=0.3) 53 | 54 | print(G.edges()) 55 | 56 | elarge=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] >0.5] 57 | esmall=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] <=0.5] 58 | 59 | pos=nx.spring_layout(G) # positions for all nodes 60 | 61 | # nodes 62 | nx.draw_networkx_nodes(G,pos,node_size=700) 63 | 64 | # edges 65 | nx.draw_networkx_edges(G,pos,edgelist=elarge, width=2) 66 | nx.draw_networkx_edges(G,pos,edgelist=esmall, 67 | width=3,alpha=0.5,edge_color='b',style='dashed') 68 | 69 | # labels 70 | nx.draw_networkx_labels(G,pos,font_size=20,font_family='sans-serif') 71 | 72 | plt.draw() 73 | plt.show() 74 | 75 | ##################################### 76 | 77 | #G = nx.Graph() 78 | #edge_list = [ 79 | # (1, 4, {'w':6}), 80 | # (1, 2, {'w':3.1}), 81 | # (1, 5, {'w':9.1}), 82 | # 83 | # (2, 4, {'w':4.1}), 84 | # (2, 5, {'w':9.2}), 85 | # (2, 3, {'w':2.1}), 86 | # 87 | # (2, 6, {'w':9.3}), 88 | # (3, 4, {'w':2.2}), 89 | # (3, 6, {'w':8.1}), 90 | # (6, 5, {'w':8.2}), 91 | # 92 | # (3, 7, {'w':9.4}), 93 | # (4, 7, {'w':9.5}), 94 | # 95 | # (5, 0, {'w':18}), 96 | # (6, 0, {'w':10}), 97 | # 98 | # (6, 7, {'w':7}), 99 | # (6, 9, {'w':9.6}), 100 | # (7, 9, {'w':5}), 101 | # 102 | # (7, 8, {'w':4.2}), 103 | # (8, 9, {'w':1}), 104 | # (9, 0, {'w':3.2}), 105 | # (8, 0, {'w':4.3}), 106 | #] 107 | #G.add_edges_from(edge_list) 108 | # 109 | #T=nx.minimum_spanning_tree(G) 110 | #print(sorted(T.edges(data=True))) 111 | # 112 | #nx.draw(G) 113 | #plt.draw() 114 | #plt.show() 115 | -------------------------------------------------------------------------------- /pympler/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | DATA_PATH = '' 4 | 5 | # DATA_PATH will be initialized from distutils when installing. If Pympler is 6 | # installed via setuptools/easy_install, the data will be installed alongside 7 | # the source files instead. 8 | if not os.path.exists(DATA_PATH): 9 | DATA_PATH = os.path.realpath(os.path.join(__file__, '..', '..')) 10 | -------------------------------------------------------------------------------- /pympler/charts.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate charts from gathered data. 3 | 4 | Requires **matplotlib**. 5 | """ 6 | 7 | try: 8 | import matplotlib 9 | matplotlib.use('Agg') 10 | import matplotlib.pyplot as plt 11 | 12 | def tracker_timespace(filename, stats): 13 | """ 14 | Create a time-space chart from a ``Stats`` instance. 15 | """ 16 | classlist = list(stats.index.keys()) 17 | classlist.sort() 18 | 19 | for snapshot in stats.snapshots: 20 | stats.annotate_snapshot(snapshot) 21 | 22 | timestamps = [fp.timestamp for fp in stats.snapshots] 23 | offsets = [0] * len(stats.snapshots) 24 | poly_labels = [] 25 | polys = [] 26 | for clsname in classlist: 27 | pct = [fp.classes[clsname]['pct'] for fp in stats.snapshots] 28 | if max(pct) > 3.0: 29 | sizes = [fp.classes[clsname]['sum'] for fp in stats.snapshots] 30 | sizes = [float(x)/(1024*1024) for x in sizes] 31 | sizes = [offset+size for offset, size in zip(offsets, sizes)] 32 | poly = matplotlib.mlab.poly_between(timestamps, offsets, sizes) 33 | polys.append( (poly, {'label': clsname}) ) 34 | poly_labels.append(clsname) 35 | offsets = sizes 36 | 37 | fig = plt.figure(figsize=(10, 4)) 38 | axis = fig.add_subplot(111) 39 | 40 | axis.set_title("Snapshot Memory") 41 | axis.set_xlabel("Execution Time [s]") 42 | axis.set_ylabel("Virtual Memory [MiB]") 43 | 44 | totals = [x.asizeof_total for x in stats.snapshots] 45 | totals = [float(x)/(1024*1024) for x in totals] 46 | axis.plot(timestamps, totals, 'r--', label='Total') 47 | tracked = [x.tracked_total for x in stats.snapshots] 48 | tracked = [float(x)/(1024*1024) for x in tracked] 49 | axis.plot(timestamps, tracked, 'b--', label='Tracked total') 50 | 51 | for (args, kwds) in polys: 52 | axis.fill(*args, **kwds) 53 | axis.legend(loc=2) # TODO fill legend 54 | fig.savefig(filename) 55 | 56 | except ImportError: 57 | def tracker_timespace(*_args): 58 | pass 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /pympler/garbagegraph.py: -------------------------------------------------------------------------------- 1 | 2 | from pympler.refgraph import ReferenceGraph 3 | from pympler.util.stringutils import trunc, pp 4 | 5 | import sys 6 | import gc 7 | 8 | __all__ = ['GarbageGraph', 'start_debug_garbage', 'end_debug_garbage'] 9 | 10 | 11 | class GarbageGraph(ReferenceGraph): 12 | """ 13 | The ``GarbageGraph`` is a ``ReferenceGraph`` that illustrates the objects building 14 | reference cycles. The garbage collector is switched to debug mode (all 15 | identified garbage is stored in `gc.garbage`) and the garbage collector is 16 | invoked. The collected objects are then illustrated in a directed graph. 17 | 18 | Large graphs can be reduced to the actual cycles by passing ``reduce=True`` to 19 | the constructor. 20 | 21 | It is recommended to disable the garbage collector when using the 22 | ``GarbageGraph``. 23 | 24 | >>> from pympler.garbagegraph import GarbageGraph, start_debug_garbage 25 | >>> start_debug_garbage() 26 | >>> l = [] 27 | >>> l.append(l) 28 | >>> del l 29 | >>> gb = GarbageGraph() 30 | >>> gb.render('garbage.eps') 31 | True 32 | """ 33 | def __init__(self, reduce=False, collectable=True): 34 | """ 35 | Initialize the GarbageGraph with the objects identified by the garbage 36 | collector. If `collectable` is true, every reference cycle is recorded. 37 | Otherwise only uncollectable objects are reported. 38 | """ 39 | if collectable: 40 | gc.set_debug(gc.DEBUG_SAVEALL) 41 | else: 42 | gc.set_debug(0) 43 | gc.collect() 44 | 45 | ReferenceGraph.__init__(self, gc.garbage, reduce) 46 | 47 | def print_stats(self, stream=None): 48 | """ 49 | Log annotated garbage objects to console or file. 50 | 51 | :param stream: open file, uses sys.stdout if not given 52 | """ 53 | if not stream: # pragma: no cover 54 | stream = sys.stdout 55 | self.metadata.sort(key=lambda x: -x.size) 56 | stream.write('%-10s %8s %-12s %-46s\n' % ('id', 'size', 'type', 'representation')) 57 | for g in self.metadata: 58 | stream.write('0x%08x %8d %-12s %-46s\n' % (g.id, g.size, trunc(g.type, 12), 59 | trunc(g.str, 46))) 60 | stream.write('Garbage: %8d collected objects (%s in cycles): %12s\n' % \ 61 | (self.count, self.num_in_cycles, pp(self.total_size))) 62 | 63 | 64 | def start_debug_garbage(): 65 | """ 66 | Turn off garbage collector to analyze *collectable* reference cycles. 67 | """ 68 | gc.collect() 69 | gc.disable() 70 | 71 | 72 | def end_debug_garbage(): 73 | """ 74 | Turn garbage collection on and disable debug output. 75 | """ 76 | gc.set_debug(0) 77 | gc.enable() 78 | 79 | 80 | -------------------------------------------------------------------------------- /pympler/metadata.py: -------------------------------------------------------------------------------- 1 | """Project metadata. 2 | 3 | This information is used in setup.py as well as in doc/source/conf.py. 4 | 5 | """ 6 | 7 | project_name = 'Pympler' 8 | version = '0.2.1' 9 | url = 'http://packages.python.org/Pympler/' 10 | license = 'Apache License, Version 2.0' #PYCHOK valid 11 | author = 'Jean Brouwers, Ludwig Haehne, Robert Schuppenies' 12 | author_email = 'pympler-dev@googlegroups.com' 13 | copyright = '2008-2011, ' + author #PYCHOK valid 14 | description = ('A development tool to measure, monitor and analyze ' 15 | 'the memory behavior of Python objects.') 16 | long_description = ''' 17 | Pympler is a development tool to measure, monitor and analyze the 18 | memory behavior of Python objects in a running Python application. 19 | 20 | By pympling a Python application, detailed insight in the size and 21 | the lifetime of Python objects can be obtained. Undesirable or 22 | unexpected runtime behavior like memory bloat and other "pymples" 23 | can easily be identified. 24 | 25 | Pympler integrates three previously separate projects into a single, 26 | comprehensive profiling tool. Asizeof provides basic size information 27 | for one or several Python objects, muppy is used for on-line 28 | monitoring of a Python application and the class tracker provides 29 | off-line analysis of the lifetime of selected Python objects. A 30 | web profiling frontend exposes process statistics, garbage 31 | visualisation and class tracker statistics. 32 | 33 | Pympler is written entirely in Python, with no dependencies to 34 | external libraries. It has been tested with Python 2.4, 2.5, 2.6, 2.7, 35 | 3.1, 3.2 on Linux, Windows and MacOS X. 36 | ''' 37 | --------------------------------------------------------------------------------