├── .gitignore
├── .project
├── .pydevproject
├── Breadth-First-Search
├── .gitignore
├── BFS.dis
├── BFS.dis.py
├── ReadMe.md
├── graph_gen.py
├── run.py
└── test_run_10_100_200.txt
├── ConcurrentMutex
├── ReadMe.md
├── auxiliary.py
├── bakery.py
├── fast.py
└── main.py
├── DistributedMutex
├── .gitignore
├── RAtoken.dis
├── ReadMe.md
├── SKtoken.dis
├── lamport.dis
├── main.py
└── mutex2n.dis
├── Maximal-Independent-Set
├── .gitignore
├── InputGraph.py
├── MIS-sequential.png
├── MIS.dis
├── MIS.dis.py
├── ReadMe.md
├── graph-1
├── graph-1.png
├── graph-2
├── graph-2a.png
├── graph-2b.png
├── graph-2c.png
├── graph-2d.png
└── run.py
├── Minimum-Spanning-Tree
├── .gitignore
├── 1000edge-100node-graph
├── 1000edge-100node-graph-output.txt
├── Kruskal.py
├── MST.dis
├── MST.dis.py
├── ReadMe.md
├── graph-1
├── graph-2
├── graph-3
├── graph-3-output.txt
├── graph_gen.py
├── img
│ ├── 1000edge-100node-graph.png
│ ├── MST-figure.png
│ ├── MST_algorithm.png
│ ├── graph-3-segment1sol.png
│ ├── graph-3-segment2sol.png
│ ├── graph-3-sketch.png
│ ├── graph-3-sol.png
│ ├── test_case_1.png
│ └── test_case_2.png
├── old
│ ├── .gitignore
│ ├── mst_attempt_1.dis
│ ├── mst_attempt_2.dis
│ └── sequential_messaging_test.dis
├── papers
│ ├── GHS_enhanced.pdf
│ └── GHS_original.pdf
├── run.py
└── tools.py
├── ReadMe.md
├── ShortestPath
├── .gitignore
├── InputGraph.py
├── ReadMe.md
├── ShortestPath.dis
├── ShortestPath.dis.py
├── graph-1
├── graph-2
└── run.py
├── distalgo
├── PKG-INFO
├── README
├── __init__.py
├── compiler
│ ├── __init__.py
│ ├── __main__.py
│ ├── await.py
│ ├── base.py
│ ├── codegen.py
│ ├── compiler.py
│ ├── consts.py
│ ├── dist.py
│ ├── distast.py
│ ├── event.py
│ ├── exceptions.py
│ ├── info.py
│ ├── label.py
│ ├── mesgcomp.py
│ └── send.py
└── runtime
│ ├── __init__.py
│ ├── __main__.py
│ ├── endpoint.py
│ ├── event.py
│ ├── proc.py
│ ├── sim.py
│ ├── tcp.py
│ ├── udp.py
│ └── util.py
├── draw.py
├── graph_gen.py
├── networkx
├── __init__.py
├── algorithms
│ ├── __init__.py
│ ├── approximation
│ │ ├── __init__.py
│ │ ├── clique.py
│ │ ├── dominating_set.py
│ │ ├── independent_set.py
│ │ ├── matching.py
│ │ ├── ramsey.py
│ │ ├── tests
│ │ │ ├── test_clique.py
│ │ │ ├── test_dominating_set.py
│ │ │ ├── test_independent_set.py
│ │ │ ├── test_matching.py
│ │ │ ├── test_ramsey.py
│ │ │ └── test_vertex_cover.py
│ │ └── vertex_cover.py
│ ├── assortativity
│ │ ├── __init__.py
│ │ ├── connectivity.py
│ │ ├── correlation.py
│ │ ├── mixing.py
│ │ ├── neighbor_degree.py
│ │ ├── pairs.py
│ │ └── tests
│ │ │ ├── base_test.py
│ │ │ ├── test_connectivity.py
│ │ │ ├── test_correlation.py
│ │ │ ├── test_mixing.py
│ │ │ ├── test_neighbor_degree.py
│ │ │ └── test_pairs.py
│ ├── bipartite
│ │ ├── __init__.py
│ │ ├── basic.py
│ │ ├── centrality.py
│ │ ├── cluster.py
│ │ ├── projection.py
│ │ ├── redundancy.py
│ │ ├── spectral.py
│ │ └── tests
│ │ │ ├── test_basic.py
│ │ │ ├── test_centrality.py
│ │ │ ├── test_cluster.py
│ │ │ ├── test_project.py
│ │ │ └── test_spectral_bipartivity.py
│ ├── block.py
│ ├── boundary.py
│ ├── centrality
│ │ ├── __init__.py
│ │ ├── betweenness.py
│ │ ├── betweenness_subset.py
│ │ ├── closeness.py
│ │ ├── communicability_alg.py
│ │ ├── current_flow_betweenness.py
│ │ ├── current_flow_betweenness_subset.py
│ │ ├── current_flow_closeness.py
│ │ ├── degree_alg.py
│ │ ├── eigenvector.py
│ │ ├── flow_matrix.py
│ │ ├── load.py
│ │ └── tests
│ │ │ ├── test_betweenness_centrality.py
│ │ │ ├── test_betweenness_centrality_subset.py
│ │ │ ├── test_closeness_centrality.py
│ │ │ ├── test_communicability.py
│ │ │ ├── test_current_flow_betweenness_centrality.py
│ │ │ ├── test_current_flow_betweenness_centrality_subset.py
│ │ │ ├── test_current_flow_closeness.py
│ │ │ ├── test_degree_centrality.py
│ │ │ ├── test_eigenvector_centrality.py
│ │ │ └── test_load_centrality.py
│ ├── chordal
│ │ ├── __init__.py
│ │ ├── chordal_alg.py
│ │ └── tests
│ │ │ └── test_chordal.py
│ ├── clique.py
│ ├── cluster.py
│ ├── community
│ │ ├── __init__.py
│ │ ├── kclique.py
│ │ └── tests
│ │ │ └── test_kclique.py
│ ├── components
│ │ ├── __init__.py
│ │ ├── attracting.py
│ │ ├── biconnected.py
│ │ ├── connected.py
│ │ ├── strongly_connected.py
│ │ ├── tests
│ │ │ ├── test_attracting.py
│ │ │ ├── test_biconnected.py
│ │ │ ├── test_connected.py
│ │ │ ├── test_strongly_connected.py
│ │ │ └── test_weakly_connected.py
│ │ └── weakly_connected.py
│ ├── core.py
│ ├── cycles.py
│ ├── dag.py
│ ├── distance_measures.py
│ ├── distance_regular.py
│ ├── euler.py
│ ├── flow
│ │ ├── __init__.py
│ │ ├── maxflow.py
│ │ ├── mincost.py
│ │ └── tests
│ │ │ ├── test_maxflow.py
│ │ │ ├── test_maxflow_large_graph.py
│ │ │ └── test_mincost.py
│ ├── graphical.py
│ ├── hierarchy.py
│ ├── isolate.py
│ ├── isomorphism
│ │ ├── __init__.py
│ │ ├── isomorph.py
│ │ ├── isomorphvf2.py
│ │ ├── matchhelpers.py
│ │ ├── tests
│ │ │ ├── iso_r01_s80.A99
│ │ │ ├── iso_r01_s80.B99
│ │ │ ├── si2_b06_m200.A99
│ │ │ ├── si2_b06_m200.B99
│ │ │ ├── test_isomorphism.py
│ │ │ ├── test_isomorphvf2.py
│ │ │ └── test_vf2userfunc.py
│ │ └── vf2userfunc.py
│ ├── link_analysis
│ │ ├── __init__.py
│ │ ├── hits_alg.py
│ │ ├── pagerank_alg.py
│ │ └── tests
│ │ │ ├── test_hits.py
│ │ │ └── test_pagerank.py
│ ├── matching.py
│ ├── mis.py
│ ├── mst.py
│ ├── operators
│ │ ├── __init__.py
│ │ ├── all.py
│ │ ├── binary.py
│ │ ├── product.py
│ │ ├── tests
│ │ │ ├── test_all.py
│ │ │ ├── test_binary.py
│ │ │ ├── test_product.py
│ │ │ └── test_unary.py
│ │ └── unary.py
│ ├── richclub.py
│ ├── shortest_paths
│ │ ├── __init__.py
│ │ ├── astar.py
│ │ ├── dense.py
│ │ ├── generic.py
│ │ ├── tests
│ │ │ ├── test_astar.py
│ │ │ ├── test_dense.py
│ │ │ ├── test_dense_numpy.py
│ │ │ ├── test_generic.py
│ │ │ ├── test_unweighted.py
│ │ │ └── test_weighted.py
│ │ ├── unweighted.py
│ │ └── weighted.py
│ ├── simple_paths.py
│ ├── smetric.py
│ ├── swap.py
│ ├── tests
│ │ ├── test_block.py
│ │ ├── test_boundary.py
│ │ ├── test_clique.py
│ │ ├── test_cluster.py
│ │ ├── test_core.py
│ │ ├── test_cycles.py
│ │ ├── test_dag.py
│ │ ├── test_distance_measures.py
│ │ ├── test_distance_regular.py
│ │ ├── test_euler.py
│ │ ├── test_graphical.py
│ │ ├── test_hierarchy.py
│ │ ├── test_matching.py
│ │ ├── test_mis.py
│ │ ├── test_mst.py
│ │ ├── test_richclub.py
│ │ ├── test_simple_paths.py
│ │ ├── test_smetric.py
│ │ ├── test_swap.py
│ │ └── test_vitality.py
│ ├── traversal
│ │ ├── __init__.py
│ │ ├── breadth_first_search.py
│ │ ├── depth_first_search.py
│ │ └── tests
│ │ │ ├── test_bfs.py
│ │ │ └── test_dfs.py
│ └── vitality.py
├── classes
│ ├── __init__.py
│ ├── digraph.py
│ ├── function.py
│ ├── graph.py
│ ├── multidigraph.py
│ ├── multigraph.py
│ └── tests
│ │ ├── historical_tests.py
│ │ ├── test_digraph.py
│ │ ├── test_digraph_historical.py
│ │ ├── test_function.py
│ │ ├── test_graph.py
│ │ ├── test_graph_historical.py
│ │ ├── test_multidigraph.py
│ │ └── test_multigraph.py
├── convert.py
├── drawing
│ ├── __init__.py
│ ├── layout.py
│ ├── nx_agraph.py
│ ├── nx_pydot.py
│ ├── nx_pylab.py
│ └── tests
│ │ ├── test_agraph.py
│ │ ├── test_layout.py
│ │ ├── test_pydot.py
│ │ └── test_pylab.py
├── exception.py
├── external
│ ├── __init__.py
│ └── decorator
│ │ ├── __init__.py
│ │ ├── _decorator.py
│ │ └── _decorator3.py
├── generators
│ ├── __init__.py
│ ├── atlas.py
│ ├── bipartite.py
│ ├── classic.py
│ ├── degree_seq.py
│ ├── directed.py
│ ├── ego.py
│ ├── geometric.py
│ ├── hybrid.py
│ ├── intersection.py
│ ├── line.py
│ ├── random_clustered.py
│ ├── random_graphs.py
│ ├── small.py
│ ├── social.py
│ ├── stochastic.py
│ ├── tests
│ │ ├── test_atlas.py
│ │ ├── test_bipartite.py
│ │ ├── test_classic.py
│ │ ├── test_degree_seq.py
│ │ ├── test_directed.py
│ │ ├── test_ego.py
│ │ ├── test_geometric.py
│ │ ├── test_hybrid.py
│ │ ├── test_intersection.py
│ │ ├── test_line.py
│ │ ├── test_random_clustered.py
│ │ ├── test_random_graphs.py
│ │ ├── test_small.py
│ │ ├── test_stochastic.py
│ │ └── test_threshold.py
│ └── threshold.py
├── linalg
│ ├── __init__.py
│ ├── attrmatrix.py
│ ├── graphmatrix.py
│ ├── laplacianmatrix.py
│ ├── spectrum.py
│ └── tests
│ │ ├── test_graphmatrix.py
│ │ ├── test_laplaican.py
│ │ └── test_spectrum.py
├── readwrite
│ ├── __init__.py
│ ├── adjlist.py
│ ├── edgelist.py
│ ├── gexf.py
│ ├── gml.py
│ ├── gpickle.py
│ ├── graphml.py
│ ├── json_graph
│ │ ├── __init__.py
│ │ ├── adjacency.py
│ │ ├── node_link.py
│ │ ├── serialize.py
│ │ ├── tests
│ │ │ ├── test_adjacency.py
│ │ │ ├── test_node_link.py
│ │ │ ├── test_serialize.py
│ │ │ └── test_tree.py
│ │ └── tree.py
│ ├── leda.py
│ ├── multiline_adjlist.py
│ ├── nx_shp.py
│ ├── nx_yaml.py
│ ├── p2g.py
│ ├── pajek.py
│ ├── sparsegraph6.py
│ └── tests
│ │ ├── test_adjlist.py
│ │ ├── test_edgelist.py
│ │ ├── test_gexf.py
│ │ ├── test_gml.py
│ │ ├── test_gpickle.py
│ │ ├── test_graphml.py
│ │ ├── test_leda.py
│ │ ├── test_p2g.py
│ │ ├── test_pajek.py
│ │ ├── test_shp.py
│ │ ├── test_sparsegraph6.py
│ │ └── test_yaml.py
├── relabel.py
├── release.py
├── testing
│ ├── __init__.py
│ ├── tests
│ │ └── test_utils.py
│ └── utils.py
├── tests
│ ├── __init__.py
│ ├── benchmark.py
│ ├── test.py
│ ├── test_convert.py
│ ├── test_convert_numpy.py
│ ├── test_convert_scipy.py
│ ├── test_exceptions.py
│ └── test_relabel.py
├── utils
│ ├── __init__.py
│ ├── decorators.py
│ ├── misc.py
│ ├── random_sequence.py
│ ├── rcm.py
│ ├── tests
│ │ ├── test_decorators.py
│ │ ├── test_misc.py
│ │ ├── test_random_sequence.py
│ │ └── test_rcm.py
│ └── union_find.py
└── version.py
├── nx_test.py
└── pympler
├── __init__.py
├── asizeof.py
├── charts.py
├── classtracker.py
├── classtracker_stats.py
├── garbagegraph.py
├── metadata.py
├── mprofile.py
├── muppy.py
├── process.py
├── refbrowser.py
├── refgraph.py
├── summary.py
├── tracker.py
└── web.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[co]
2 |
3 | # Packages
4 | *.egg
5 | *.egg-info
6 | dist
7 | build
8 | eggs
9 | parts
10 | bin
11 | var
12 | sdist
13 | develop-eggs
14 | .installed.cfg
15 |
16 | # Installer logs
17 | pip-log.txt
18 |
19 | # Unit test / coverage reports
20 | .coverage
21 | .tox
22 |
23 | #Translations
24 | *.mo
25 |
26 | #Mr Developer
27 | .mr.developer.cfg
28 |
29 | #Text editor backup files
30 | *~
31 |
32 | random_graph
33 |
34 | ignore
35 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | DistAlgo
4 |
5 |
6 |
7 |
8 |
9 | org.python.pydev.PyDevBuilder
10 |
11 |
12 |
13 |
14 |
15 | org.python.pydev.pythonNature
16 |
17 |
18 |
--------------------------------------------------------------------------------
/.pydevproject:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | /DistAlgo
7 |
8 | python 3.0
9 | Default
10 |
11 |
--------------------------------------------------------------------------------
/Breadth-First-Search/.gitignore:
--------------------------------------------------------------------------------
1 | # DistAlgo compiled .py files & runtime log files
2 | BFS.py
3 | BFS.log
4 | BFS_full.py
5 |
--------------------------------------------------------------------------------
/Breadth-First-Search/BFS.dis:
--------------------------------------------------------------------------------
1 | BFS.dis.py
--------------------------------------------------------------------------------
/Breadth-First-Search/graph_gen.py:
--------------------------------------------------------------------------------
1 | ../graph_gen.py
--------------------------------------------------------------------------------
/Breadth-First-Search/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys
4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory
5 |
6 | from distalgo.runtime import *
7 |
8 | sys.argv = [sys.argv[0], "BFS.dis"] + sys.argv[1:]
9 |
10 | libmain()
11 |
--------------------------------------------------------------------------------
/ConcurrentMutex/ReadMe.md:
--------------------------------------------------------------------------------
1 | Concurrent Mutex algorithms
2 | ---------------------------
3 | This is an implementation of Lamport's fast mutual exclusion and bakery
4 | algorithms for atomizing access to crucial resources. The purpose of
5 | both algorithms are to ensure that a segment of code known as the
6 | **critical section** does _not_ get executed concurrently.
7 |
8 | The module `fast.py` implements Lamport's fast mutex algorithm and
9 | the module `bakery.py` implements Lamport's bakery algorithm.
10 |
11 | The module `auxiliary.py` defines three important functions: `random_distribution`, `await` and `default_task`.
12 |
13 | `random_distribution` is a function that takes a number of threads,
14 | and a total number of requests amd returns a list L where L[i]
15 | epresents a number of requests (randomly assigned) to thread i.
16 |
17 | `await(func)` takes a function as argument, busy waits until
18 | the return value of `func()` becomes True.
19 |
20 | `default_task()` defines the default task to be executed while
21 | inside the critical section. Currently it is a CPU hog that
22 | computes prime numbers up to an "nth" value specified inside the
23 | module. The prime number calculator was lifted from Stack Overflow.
24 |
25 | `main.py` starts up n threads and m requests per thread, n and m being
26 | passed as command line arguments. It runs both tests & terminates.
27 |
--------------------------------------------------------------------------------
/ConcurrentMutex/auxiliary.py:
--------------------------------------------------------------------------------
1 |
2 | import random
3 |
4 | def random_distribution(total_requests, num_threads):
5 | """
6 | Return a list with a random distribution of
7 | requests per thread. For returned list L and
8 | thread index i, L[i] represents the number
9 | of requests (randomly assigned) to thread i.
10 | """
11 |
12 | requests_per_thread = [0] * num_threads
13 |
14 | while total_requests > 0:
15 | for i in range(num_threads):
16 | if random.choice( (True, False) ):
17 | requests_per_thread[i] += 1
18 | total_requests -= 1
19 | return requests_per_thread
20 |
21 | def await(func):
22 | while not func():
23 | pass
24 |
25 | def default_task():
26 | """ Compute primes up to a specified value """
27 |
28 | def primes(n):
29 | # http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188
30 | """ Returns a list of primes < n """
31 | sieve = [True] * n
32 | for i in range(3,int(n**0.5)+1,2):
33 | if sieve[i]:
34 | sieve[i*i::2*i]=[False]* int((n-i*i-1)/(2*i)+1)
35 | return [2] + [i for i in range(3,n,2) if sieve[i]]
36 |
37 | # change this to a lower value to speed things up:
38 | primes(185 * 1000)
39 |
--------------------------------------------------------------------------------
/ConcurrentMutex/bakery.py:
--------------------------------------------------------------------------------
1 |
2 | import threading
3 | from auxiliary import *
4 |
5 | class Bakery(threading.Thread):
6 | """ Lamport's bakery algorithm """
7 |
8 | # These are shared (static class) variables:
9 |
10 | threads = None
11 | thread_count = 0
12 | req_count = 0
13 |
14 | go = False # used to delay run() until all threads have started.
15 |
16 | choosing = []
17 | num = []
18 |
19 | x, y = 0, 0
20 |
21 | def __init__(self, i):
22 | super().__init__()
23 | self.i = i
24 | #print('Constructed Process', i, 'Thread object (%s).' % self.getName())
25 |
26 | def cs(self, task = default_task):
27 |
28 | print('Process', self.i, 'requesting CS')
29 |
30 | Bakery.choosing[self.i] = 1
31 |
32 | Bakery.num[self.i] = 1 + max(Bakery.num)
33 |
34 | Bakery.choosing[self.i] = 0
35 |
36 | for j in range(1, Bakery.thread_count + 1):
37 | # await choosing[j] == 0
38 | await(lambda: Bakery.choosing[j] == 0)
39 |
40 | # await num[j] == 0 or (num[j],j) >= (num[i],i)
41 | await( lambda: Bakery.num[j] == 0 or (Bakery.num[j], j) >= (Bakery.num[self.i], self.i) )
42 |
43 | print('Process', self.i, 'entering CS')
44 |
45 | task()
46 |
47 | print('Process', self.i, 'exiting CS')
48 |
49 | Bakery.num[self.i] = 0
50 |
51 | def run(self):
52 | # wait until all threads have started:
53 | while not Bakery.go:
54 | pass
55 |
56 | # call cs() req_count times:
57 | for _ in range(Bakery.req_count[self.i]):
58 | self.cs()
59 |
60 |
61 | def setup(threads, req_count):
62 | Bakery.threads = threads
63 | Bakery.thread_count = len(threads)
64 | Bakery.req_count = random_distribution(req_count, Bakery.thread_count)
65 |
66 | Bakery.choosing = [0] * (Bakery.thread_count + 1)
67 | Bakery.num = [0] * (Bakery.thread_count + 1)
68 |
69 | def start():
70 | for thread in Bakery.threads:
71 | thread.start()
72 | Bakery.go = True
73 |
--------------------------------------------------------------------------------
/ConcurrentMutex/fast.py:
--------------------------------------------------------------------------------
1 |
2 | import threading
3 | from auxiliary import *
4 |
5 | class Fast(threading.Thread):
6 | """ Lamport's fast mutual exclusion algorithm """
7 |
8 | # These are shared (static class) variables:
9 |
10 | threads = None
11 | thread_count = 0
12 | req_count = 0
13 |
14 | go = False # used to delay run() until all threads have started.
15 |
16 | choosing = []
17 | x, y = 0, 0
18 |
19 | def __init__(self, i):
20 | super().__init__()
21 | self.i = i
22 | #print('Constructed Process', i, 'Thread object (%s).' % self.getName())
23 |
24 | def cs(self, task = default_task):
25 |
26 | print('Process', self.i, 'requesting CS')
27 |
28 | def can_i_enter_cs():
29 | Fast.choosing[self.i] = 1
30 | Fast.x = self.i
31 |
32 | if Fast.y != 0:
33 | Fast.choosing[self.i] = 0
34 |
35 | # await y == 0:
36 | await(lambda: Fast.y == 0)
37 |
38 | return False
39 |
40 | Fast.y = self.i
41 |
42 | if Fast.x != self.i:
43 | Fast.choosing[self.i] = 0
44 |
45 | # for j:=1..thread_count+1: await b[j] == 0:
46 | [await(lambda: Fast.choosing[j] == 0) for j in range(1, Fast.thread_count + 1)]
47 |
48 | if Fast.y != self.i:
49 | # await y == 0:
50 | await(lambda: Fast.y == 0)
51 | return False
52 |
53 | return True
54 |
55 | while not can_i_enter_cs():
56 | pass
57 |
58 | print('Process', self.i, 'entering CS')
59 |
60 | task()
61 |
62 | print('Process', self.i, 'exiting CS')
63 |
64 | Fast.y = 0
65 | Fast.choosing[self.i] = 0
66 |
67 | def run(self):
68 | # wait until all threads have started:
69 | while not Fast.go:
70 | pass
71 |
72 | # call cs() req_count times:
73 | for _ in range(Fast.req_count[self.i]):
74 | self.cs()
75 |
76 |
77 | def setup(threads, req_count):
78 | Fast.threads = threads
79 | Fast.thread_count = len(threads)
80 | Fast.req_count = random_distribution(req_count, Fast.thread_count)
81 |
82 | Fast.choosing = [0] * (Fast.thread_count + 1)
83 |
84 | def start():
85 | for thread in Fast.threads:
86 | thread.start()
87 | Fast.go = True
88 |
--------------------------------------------------------------------------------
/ConcurrentMutex/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys, fast, bakery
4 |
5 | if __name__ == "__main__":
6 | if len(sys.argv) == 3:
7 | num_of_threads, num_of_reqs = int(sys.argv[1]), int(sys.argv[2])
8 | else:
9 | num_of_threads, num_of_reqs = 3, 15 # defaults
10 |
11 | print("\nRunning Lamport's fast mutual exclusion algorithm")
12 | threads = [fast.Fast(i) for i in range(num_of_threads)]
13 | fast.setup(threads, num_of_reqs)
14 | fast.start()
15 |
16 | # wait for all threads to die..
17 | for thread in threads:
18 | thread.join()
19 |
20 | print("\n\nRunning Lamport's bakery algorithm")
21 | threads = [bakery.Bakery(i) for i in range(num_of_threads)]
22 | bakery.setup(threads, num_of_reqs )
23 | bakery.start()
24 |
25 | # wait for all threads to die..
26 | for thread in threads:
27 | thread.join()
28 |
29 | print()
30 |
--------------------------------------------------------------------------------
/DistributedMutex/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | old
3 | lamport.py
4 | mutex2n.py
5 | RAtoken.py
6 | SKtoken.py
7 | test*
8 | *.log
9 |
--------------------------------------------------------------------------------
/DistributedMutex/ReadMe.md:
--------------------------------------------------------------------------------
1 | Distributed Mutex (DMX) algorithms
2 | ----------------------------------
3 | This is an implementation of two token-based DMX algorithms in DistAlgo: Ricart-Agrawala's token-based algorithm and Suzuki-Kasami's token-based algorithm.
4 |
5 | * `RAtoken.dis` contains Ricart-Agrawala's algorithm. For this algorithm, I followed the pseudocode which can be found in the top-level comment in `RAtoken.dis`.
6 |
7 | * `SKtoken.dis` contains Suzuki-Kasami's algorithm. For this algorithm, I followed [a description of the algorithm by Mikhail Nesterenko](http://vega.cs.kent.edu/~mikhail/classes/aos.f01/l17tokenDMX.pdf) of Kent State.
8 |
9 | Both DistAlgo programs accept a single integer command-line argument specifying the number of processes to start. The default value for this for both is `5`.
10 |
11 | You can ignore `lamport.dis`, `mutex2n.dis` and `main.dis`.
12 |
--------------------------------------------------------------------------------
/DistributedMutex/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | '''
4 | This was a file used during development. You can ignore this file.
5 |
6 | To run the DistAlgo programs, just use:
7 |
8 | python3 -m distalgo.runtime RAtoken.dis
9 |
10 | python3 -m distalgo.runtime SKtoken.dis
11 |
12 | Disclaimer: For some strange reaosn this file doesn't work properly when run from command line. I used
13 | Eclipse during development, and it worked fine in it. Might have something to do with PYTHONPATH...
14 | '''
15 |
16 | import sys
17 | from distalgo.runtime import *
18 |
19 | RA, SK = 'RA', 'SK'
20 |
21 | prog = SK
22 |
23 | if len(sys.argv) > 1:
24 | prog = sys.argv[1]
25 |
26 | if prog == RA:
27 | sys.argv = [ sys.argv[0], "RAtoken.dis" ]
28 | libmain()
29 |
30 | elif prog == SK:
31 | sys.argv = [ sys.argv[0], "SKtoken.dis" ]
32 | libmain()
33 |
34 | else:
35 | print("Command-line argument must be 'RA' or 'SK'. (Not %s)" % sys.argv[1])
36 | sys.exit(1)
37 |
--------------------------------------------------------------------------------
/Maximal-Independent-Set/.gitignore:
--------------------------------------------------------------------------------
1 | # DistAlgo compiled .py files & runtime log files
2 | MIS.py
3 | MIS.log
4 |
--------------------------------------------------------------------------------
/Maximal-Independent-Set/InputGraph.py:
--------------------------------------------------------------------------------
1 |
2 | import sys
3 | sys.path.append('..')
4 | import networkx as nx
5 |
6 | def get_graph():
7 | "Process command-line arguments and build the graph."
8 |
9 | sys.argv = sys.argv[1:]
10 |
11 | def construct_graph(file):
12 | def edge(n1, n2, w):
13 | return (n1, n2, {'weight':w})
14 |
15 | edge_list = list()
16 |
17 | with open(file, 'r') as f:
18 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2]))
19 | for ed in
20 | (e.strip() for e in f.readlines() if e.strip() != "")
21 | if len(ed.split()) == 3 )
22 |
23 | G = nx.Graph()
24 | G.add_edges_from(edge_list)
25 | return G
26 |
27 | import argparse
28 | parser = argparse.ArgumentParser(description='Finds the vertices of the Maximal Independent Set (MST) of a given graph.')
29 | parser.add_argument('graph', nargs='?', type=construct_graph, default='graph-1', help=
30 | 'File listing the edges of a graph line-by-line in the following style: "A B 2", where "A" and "B" are node names and "2" is the weight of the edge connecting them.')
31 |
32 | args = parser.parse_args()
33 | return args.graph
34 |
--------------------------------------------------------------------------------
/Maximal-Independent-Set/MIS-sequential.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/MIS-sequential.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/MIS.dis:
--------------------------------------------------------------------------------
1 | MIS.dis.py
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-1:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-1.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-2:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
22 | L M 20
23 | K L 21
24 | K M 22
25 | J K 23
26 |
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-2a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2a.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-2b.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2b.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-2c.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2c.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/graph-2d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Maximal-Independent-Set/graph-2d.png
--------------------------------------------------------------------------------
/Maximal-Independent-Set/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys
4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory
5 |
6 | from distalgo.runtime import *
7 |
8 | sys.argv = [sys.argv[0], "MIS.dis"] + sys.argv[1:]
9 |
10 | libmain()
11 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/.gitignore:
--------------------------------------------------------------------------------
1 | # DistAlgo compiled .py files & runtime log files
2 | MST.py
3 | MST.log
4 | sol
5 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/MST.dis:
--------------------------------------------------------------------------------
1 | MST.dis.py
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/graph-1:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/graph-2:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
22 | L M 20
23 | K L 21
24 | K M 22
25 | J K 23
26 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/graph-3:
--------------------------------------------------------------------------------
1 |
2 | A B 1
3 | B C 10
4 | C D 14
5 | D E 15
6 | E F 13
7 | F A 12
8 | E B 5
9 |
10 | A G 20
11 |
12 | G I 3
13 | I H 17
14 | H G 16
15 |
16 | I J 57
17 |
18 | J K 22
19 | K L 23
20 | L M 21
21 | M J 2
22 |
23 | M N 54
24 |
25 | N O 36
26 | O P 34
27 | P U 42
28 | U V 48
29 | V S 41
30 | S R 35
31 | R N 47
32 |
33 | S Q 49
34 | R Q 53
35 | N Q 33
36 | O Q 45
37 | P Q 43
38 |
39 | Q T 50
40 | P T 38
41 | U T 44
42 | V T 37
43 | S T 52
44 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/graph_gen.py:
--------------------------------------------------------------------------------
1 | ../graph_gen.py
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/1000edge-100node-graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/1000edge-100node-graph.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/MST-figure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/MST-figure.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/MST_algorithm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/MST_algorithm.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/graph-3-segment1sol.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-segment1sol.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/graph-3-segment2sol.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-segment2sol.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/graph-3-sketch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-sketch.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/graph-3-sol.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/graph-3-sol.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/test_case_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/test_case_1.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/img/test_case_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/img/test_case_2.png
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/old/.gitignore:
--------------------------------------------------------------------------------
1 | # DistAlgo compiled .py files & runtime log files
2 | test.py
3 | test.log
4 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/old/sequential_messaging_test.dis:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Test to see if DistAlgo sends messages sequentially regardless of message size (and not out of order)
4 |
5 | This is a necessary precondition for the GHS MST algorithm.
6 |
7 | Result: YES, it does!
8 | """
9 |
10 | def l(n):
11 | n = 101 - n
12 | return [i for i in range(n)]
13 |
14 | class Spark(DistProcess):
15 | def setup(ps):
16 | ps = ps
17 |
18 | def main():
19 | random_node = ps.pop()
20 | ps.update( {random_node} )
21 |
22 | for i in range(1, 101):
23 | send( Msg( l(i) ), random_node )
24 |
25 | class Node(DistProcess):
26 | def setup():
27 | pass
28 |
29 | def OnMsg(m):
30 | output(len(m))
31 |
32 | def main():
33 | await(False)
34 |
35 | def main():
36 | use_channel("tcp")
37 |
38 | # Setup the nodes
39 | # ===============
40 | node_ps = createprocs(Node, 10)
41 |
42 | for p in node_ps:
43 | setupprocs([p], [])
44 |
45 | # Setup up spark
46 | # ===============
47 | spark = createprocs(Spark, set(['Spark']))
48 | spark_p = spark['Spark']
49 | setupprocs([spark_p], [node_ps])
50 |
51 | startprocs(node_ps)
52 | startprocs([spark_p])
53 |
54 | # Wait for all processes to die...
55 | # --------------------------------
56 | for p in node_ps:
57 | p.join()
58 |
59 | spark_p.join()
60 |
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/papers/GHS_enhanced.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/papers/GHS_enhanced.pdf
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/papers/GHS_original.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arjun-menon/Distributed-Graph-Algorithms/a3db23a8e2046ddb736ea7cacabe4a59f4e5080a/Minimum-Spanning-Tree/papers/GHS_original.pdf
--------------------------------------------------------------------------------
/Minimum-Spanning-Tree/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys
4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory
5 |
6 | from distalgo.runtime import *
7 |
8 | sys.argv = [sys.argv[0], "MST.dis"] + sys.argv[1:]
9 |
10 | libmain()
11 |
--------------------------------------------------------------------------------
/ShortestPath/.gitignore:
--------------------------------------------------------------------------------
1 | ShortestPath.py
2 | ShortestPath.log
3 |
--------------------------------------------------------------------------------
/ShortestPath/InputGraph.py:
--------------------------------------------------------------------------------
1 |
2 | import sys
3 | sys.path.append('..')
4 | import networkx as nx
5 |
6 | def graph_source_target():
7 | "Process command-line arguments and build the graph."
8 |
9 | sys.argv = sys.argv[1:]
10 |
11 | def construct_graph(file):
12 | def edge(n1, n2, w):
13 | return (n1, n2, {'weight':w})
14 |
15 | edge_list = list()
16 |
17 | with open(file, 'r') as f:
18 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2]))
19 | for ed in
20 | (e.strip() for e in f.readlines() if e.strip() != "")
21 | if len(ed.split()) == 3 )
22 |
23 | G = nx.Graph()
24 | G.add_edges_from(edge_list)
25 | return G
26 |
27 | import argparse
28 | parser = argparse.ArgumentParser(description='Finds the shortest path.')
29 | parser.add_argument('graph', nargs='?', type=construct_graph, default='graph-1', help=
30 | 'File listing the edges of a graph line-by-line in the following style: "A B 2", where "A" and "B" are node names and "2" is the weight of the edge connecting them.')
31 | parser.add_argument('-s', '--source', nargs=1, type=str, default='G', help='The source node.')
32 | parser.add_argument('-t', '--target', nargs=1, type=str, default='C', help='The target node.')
33 |
34 | args = parser.parse_args()
35 |
36 | s = args.source[0]
37 | t = args.target[0]
38 | n = args.graph.nodes()
39 | g = args.graph
40 |
41 | if s not in n:
42 | print("%s not in %r" % (s, n))
43 | sys.exit(1)
44 |
45 | if t not in n:
46 | print("%s not in %r" % (t, n))
47 | sys.exit(1)
48 |
49 | return g, s, t
50 |
--------------------------------------------------------------------------------
/ShortestPath/ShortestPath.dis:
--------------------------------------------------------------------------------
1 | ShortestPath.dis.py
--------------------------------------------------------------------------------
/ShortestPath/ShortestPath.dis.py:
--------------------------------------------------------------------------------
1 | '''
2 | Straightforward distributed shortest path finder based on Dijkstra's sequential algorithm
3 | '''
4 |
5 | from InputGraph import graph_source_target
6 |
7 | G, S, T = graph_source_target()
8 |
9 | INFINITY = 999999999
10 |
11 | class P(DistProcess):
12 |
13 | def setup(ps, edges):
14 | edges = edges
15 | weight = INFINITY
16 | path = ""
17 |
18 | def OnNewWeight(new_weight, new_path):
19 | newWeight(new_weight, new_path)
20 |
21 | def newWeight(new_weight, new_path):
22 | if new_weight < weight:
23 | weight = new_weight
24 | path = new_path
25 | if str(self) == T:
26 | output("New shortest path of weight %i: %s"
27 | % (weight, ' -> '.join(path)))
28 | propogate()
29 |
30 | def propogate():
31 | for p, e_w in edges.items():
32 | send(NewWeight(weight + e_w, path+str(p)), p)
33 |
34 | def main():
35 | if str(self) == S:
36 | newWeight(0, str(self))
37 | await(False)
38 |
39 | def main():
40 | use_channel("tcp")
41 |
42 | procs_names = set(G.nodes())
43 | #procs_names.update({'0'})# control process
44 |
45 | global procs
46 | procs = createprocs(P, procs_names)
47 |
48 | # setup the processes
49 | ps = set(procs.values())
50 |
51 | for p in ps:
52 | p_edges = { procs[node] : data['weight']
53 | for (node, data) in G[repr(p)].items() }
54 | setupprocs([p], [ps-{p}, p_edges])
55 |
56 | startprocs(ps)
57 |
58 | for p in (ps):
59 | p.join()
60 |
--------------------------------------------------------------------------------
/ShortestPath/graph-1:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
--------------------------------------------------------------------------------
/ShortestPath/graph-2:
--------------------------------------------------------------------------------
1 |
2 | A F 2
3 | F G 7
4 | G H 15
5 | H J 13
6 | J I 9
7 | I C 18
8 | C B 17
9 | B A 3
10 |
11 | E F 1
12 | E G 6
13 | E H 5
14 | E I 10
15 | E D 11
16 |
17 | I H 12
18 | D I 4
19 | D C 8
20 | D B 16
21 |
22 | L M 20
23 | K L 21
24 | K M 22
25 | J K 23
26 |
--------------------------------------------------------------------------------
/ShortestPath/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys
4 | sys.path.append("..") # if DistAlgo is not installed, use the one in parent directory
5 |
6 | from distalgo.runtime import *
7 |
8 | sys.argv = [sys.argv[0], "ShortestPath.dis"] + sys.argv[1:]
9 |
10 | libmain()
11 |
--------------------------------------------------------------------------------
/distalgo/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 1.0
2 | Name: DistAlgo
3 | Version: 0.2
4 | Summary: UNKNOWN
5 | Home-page: UNKNOWN
6 | Author: bolin
7 | Author-email: bolin@cs.stonybrook.edu
8 | License: UNKNOWN
9 | Description: UNKNOWN
10 | Platform: UNKNOWN
11 |
--------------------------------------------------------------------------------
/distalgo/__init__.py:
--------------------------------------------------------------------------------
1 | # main package
2 |
3 | from . import compiler, runtime
4 |
--------------------------------------------------------------------------------
/distalgo/compiler/__init__.py:
--------------------------------------------------------------------------------
1 | # Compiler package for Distalgo
2 |
3 | from .compiler import dist_compile, dist_compile_to_file, dist_compile_to_string
4 |
5 | __all__ = ["dist_compile", "dist_compile_to_file", "dist_compile_to_string"]
6 |
--------------------------------------------------------------------------------
/distalgo/compiler/__main__.py:
--------------------------------------------------------------------------------
1 | """Main entry point"""
2 |
3 | import sys,os
4 | import time
5 | if sys.argv[0].endswith("__main__.py"):
6 | sys.argv[0] = "python -m distalgo"
7 |
8 | RUNTIMEPKG = "runtime"
9 | RUNTIMEFILES = ["event.py", "endpoint.py", "udp.py", "tcp.py", "sim.py", "util.py"]
10 |
11 | def parseArgs(argv):
12 |
13 | import optparse
14 | p = optparse.OptionParser()
15 |
16 | p.add_option("-p", action="store_true", dest='printsource')
17 | p.add_option("-F", action="store_true", dest='genfull')
18 | p.add_option("--full", action="store_true", dest='genfull')
19 | p.add_option("-O", action="store_true", dest='optimize')
20 | p.add_option("-D", action="store", dest='rootdir')
21 | p.add_option("-o", action="store", dest="outfile")
22 |
23 | p.set_defaults(printsource=False,
24 | genfull=False,
25 | optimize=False,
26 | outfile=None,
27 | rootdir=os.getcwd())
28 |
29 | return p.parse_args()
30 |
31 |
32 | def printUsage(name):
33 | usage = """
34 | Usage: %s [-p] [-o outfile]
35 | where is the file name of the distalgo source
36 | """
37 | sys.stderr.write(usage % name)
38 |
39 | from .codegen import to_source
40 | from .compiler import dist_compile
41 |
42 | def main():
43 | opts, args = parseArgs(sys.argv)
44 | print("rootdir is %s" % opts.rootdir)
45 |
46 | start = time.time()
47 | runtime = []
48 | if opts.genfull:
49 | for f in RUNTIMEFILES:
50 | p = os.path.join(opts.rootdir, RUNTIMEPKG, f)
51 | if not os.path.isfile(p):
52 | sys.stderr.write("File %s not found. Please specify root directory using -D.\n"%p)
53 | sys.exit(1)
54 | else:
55 | pfd = open(p, "r")
56 | runtime.extend(pfd.readlines())
57 | pfd.close()
58 | postamble = ["\nif __name__ == \"__main__\":\n",
59 | " main()\n"]
60 |
61 | for f in args:
62 | infd = open(f, 'r')
63 | pytree = dist_compile(infd)
64 | infd.close()
65 |
66 | pysource = to_source(pytree)
67 |
68 | if opts.printsource:
69 | sys.stdout.write(pysource)
70 | else:
71 | outfile = f[:-4] + ".py"
72 | outfd = open(outfile, 'w')
73 | if opts.genfull:
74 | outfd.writelines(runtime)
75 | outfd.write(pysource)
76 | if opts.genfull:
77 | outfd.writelines(postamble)
78 | outfd.close()
79 | sys.stderr.write("Written %s.\n"%outfile)
80 |
81 | elapsed = time.time() - start
82 | sys.stderr.write("\nTotal compilation time: %f second(s).\n" % elapsed)
83 | return 0
84 |
85 | if __name__ == '__main__':
86 | main()
87 |
--------------------------------------------------------------------------------
/distalgo/compiler/compiler.py:
--------------------------------------------------------------------------------
1 | from ast import *
2 | from .dist import DistalgoTransformer
3 | from .codegen import to_source
4 |
5 | def dist_compile(fd):
6 | distree = parse(fd.read())
7 | pytree = DistalgoTransformer().visit(distree)
8 |
9 | return pytree
10 |
11 | def dist_compile_to_string(fd):
12 | distree = parse(fd.read())
13 | pytree = DistalgoTransformer().visit(distree)
14 |
15 | return to_source(pytree)
16 |
17 | def dist_compile_to_file(fd, outfd):
18 | distree = parse(fd.read())
19 | pytree = DistalgoTransformer().visit(distree)
20 | source = to_source(pytree)
21 | outfd.write(source)
22 |
23 | return pytree
24 |
25 |
--------------------------------------------------------------------------------
/distalgo/compiler/consts.py:
--------------------------------------------------------------------------------
1 |
2 | SENT_PATTERN_VARNAME = "_sent_patterns"
3 | EVENT_PATTERN_VARNAME = "_event_patterns"
4 | LABEL_EVENTS_VARNAME = "_label_events"
5 | EVENT_PROC_FUNNAME = "_process_event"
6 |
7 | TIMER_VARNAME = "__await_timer_"
8 | TIMEOUT_VARNAME = "_timeout"
9 | TIMELEFT_VARNAME = "_timeleft"
10 | TEMP_VARNAME = "__temp_"
11 |
12 | LOGICAL_TIMESTAMP_VARNAME = "_timestamp"
13 | MSG_SRCNODE_VARNAME = "_source"
14 |
15 | SENDMSG_FUNNAME = "send"
16 | RECEIVED_FUNNAME = "received"
17 | SENT_FUNNAME = "sent"
18 |
19 | DISTALGO_BASE_CLASSNAME = "DistProcess"
20 |
--------------------------------------------------------------------------------
/distalgo/compiler/distast.py:
--------------------------------------------------------------------------------
1 | import ast
2 |
3 |
4 | class Label(stmt):
5 | _fields = ['name', 'body']
6 | def __init__(self, name, body):
7 | self.name = name
8 | self.body = body
9 |
10 |
11 | class Event(stmt):
12 | _fields = ['name', 'arg', 'at', 'body']
13 | def __init__ (self, name, arg, at=None, body):
14 | self.name = name
15 | self.arg = arg
16 | self.at = at
17 | self.body = body
18 |
--------------------------------------------------------------------------------
/distalgo/compiler/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | class InvalidLabelException(Exception):
3 | pass
4 |
5 | class InvalidEventException(Exception):
6 | pass
7 |
8 | class InvalidAwaitException(Exception):
9 | pass
10 |
11 | class InvalidReceivedException(Exception):
12 | pass
13 |
14 | class InvalidSentException(Exception):
15 | pass
16 |
17 | class InvalidSendException(Exception):
18 | pass
19 |
--------------------------------------------------------------------------------
/distalgo/compiler/info.py:
--------------------------------------------------------------------------------
1 | from .consts import *
2 | from ast import *
3 |
4 | class ClassInfo:
5 | """ A structure to hold info about classes.
6 | """
7 | def __init__(self, name, isp = True):
8 | self.name = name # Obvious
9 | self.isp = isp # Is this class a process class?
10 | self.membervars = set() # Set of member variables names
11 | self.memberfuncs = set() # Set of member function names
12 | self.labels = set() # Set of label names
13 | self.events = []
14 | self.sent_patterns = []
15 | self.newstmts = [] # Stmts that need to be added to __init__
16 | self.newdefs = [] # New func defs that need to be added to the
17 | # class
18 |
19 | self.memberfuncs.add(EVENT_PROC_FUNNAME)
20 | self.membervars.add(EVENT_PATTERN_VARNAME)
21 |
22 |
23 | def genSentPatternStmt(self):
24 | left = Attribute(Name("self", Load()), SENT_PATTERN_VARNAME, Store())
25 | right = List([p.toNode() for p in self.sent_patterns], Load())
26 | return Assign([left], right)
27 |
28 | def genEventPatternStmt(self):
29 | left = Attribute(Name("self", Load()), EVENT_PATTERN_VARNAME, Store())
30 | right = List([e.toNode() for e in self.events], Load())
31 | return Assign([left], right)
32 |
33 | def genLabelEventsStmt(self):
34 | left = Attribute(Name("self", Load()), LABEL_EVENTS_VARNAME, Store())
35 | right = Dict([Str(l) for l in self.labels],
36 | [Attribute(Name("self", Load()), EVENT_PATTERN_VARNAME,
37 | Load())
38 | for l in self.labels])
39 | return Assign([left], right)
40 |
--------------------------------------------------------------------------------
/distalgo/compiler/label.py:
--------------------------------------------------------------------------------
1 | import ast
2 | from ast import *
3 | from .exceptions import InvalidLabelException
4 |
5 | LABEL_FUNC = "_label_"
6 |
7 |
8 | # This class generates unique names for all labels, aggregates all the label
9 | # names, and at the same time transforms the labels into function calls
10 | class LabelTransformer(NodeTransformer):
11 | """ Generate unique names for all labels in class scope. Flattens Label
12 | blocks and insert self._label_ function calls. Aggregates all label names
13 | into a set.
14 | """
15 |
16 | def __init__(self, info):
17 | self.info = info
18 | self.hasLabelAst = hasattr(ast, "Label")
19 | info.memberfuncs.add(LABEL_FUNC)
20 |
21 | def insert_labels(self, body):
22 | new_body = []
23 | for stmt in body:
24 | if isinstance(stmt, Expr):
25 | if (isinstance(stmt.value, UnaryOp) and
26 | isinstance(stmt.value.op, USub) and
27 | isinstance(stmt.value.operand, UnaryOp) and
28 | isinstance(stmt.value.operand.op, USub) and
29 | isinstance(stmt.value.operand.operand, Name)):
30 |
31 | fullname = stmt.value.operand.operand.id
32 | self.info.labels.add(fullname)
33 | stmt = self.genLabelCall(stmt, fullname)
34 | new_body.append(stmt)
35 | return new_body
36 |
37 | def visit_Block(self, node):
38 | new_node = self.generic_visit(node)
39 | if not self.hasLabelAst:
40 | new_node.body = self.insert_labels(new_node.body)
41 | if hasattr(new_node, "orelse"):
42 | new_node.orelse = self.insert_labels(new_node.orelse)
43 | return new_node
44 |
45 | visit_FunctionDef = visit_Block
46 | visit_For = visit_Block
47 | visit_If = visit_Block
48 | visit_While = visit_Block
49 | visit_With = visit_Block
50 | visit_TryExcept = visit_Block
51 | visit_TryFinally = visit_Block
52 |
53 | def visit_Label(self, node):
54 | fullname = node.name
55 | self.info.labels.add(fullname)
56 |
57 | new_node = self.generic_visit(node)
58 | labelcall = self.genLabelCall(node, fullname)
59 | new_node.body.insert(0, labelcall)
60 | return new_node.body
61 |
62 | def genLabelCall(self, node, fullname):
63 | return copy_location(Call(Name(LABEL_FUNC, Load()),
64 | [Str(fullname)], [], None, None), node)
65 |
66 |
67 |
--------------------------------------------------------------------------------
/distalgo/compiler/send.py:
--------------------------------------------------------------------------------
1 | from ast import *
2 | from .exceptions import InvalidSendException
3 | from .consts import SENDMSG_FUNNAME
4 |
5 | class SendTransformer(NodeTransformer):
6 | """Translates 'send' arguments into Tuples.
7 | """
8 |
9 | def __init__(self, info):
10 | self.info = info
11 |
12 | def visit_Expr(self, node):
13 | if (not (isinstance(node.value, Call) and
14 | isinstance(node.value.func, Name) and
15 | (node.value.func.id == SENDMSG_FUNNAME))):
16 | return node
17 |
18 | if (len(node.value.args) != 2):
19 | raise InvalidSendException()
20 |
21 | if (not isinstance(node.value.args[0], Call)):
22 | return node
23 |
24 | messCall = node.value.args[0]
25 | messTuple = Tuple([Str(messCall.func.id)] + messCall.args, Load())
26 | node.value.args[0] = messTuple
27 | return node
28 |
--------------------------------------------------------------------------------
/distalgo/runtime/__init__.py:
--------------------------------------------------------------------------------
1 | # runtime package
2 |
3 | from .__main__ import libmain
4 |
5 | __all__ = ["libmain"]
6 |
--------------------------------------------------------------------------------
/distalgo/runtime/__main__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from .util import entrypoint
4 |
5 | def parseArgs(argv):
6 | import optparse
7 | p = optparse.OptionParser()
8 |
9 | p.add_option("-s", action="store", dest='perffile')
10 | p.add_option("--dumpfile", action="store", dest='dumpfile')
11 | p.add_option("--nolog", action="store_true", dest="nolog")
12 | p.add_option("--logfile", action="store", dest="logfile")
13 | p.add_option("--logdir", action="store", dest="logdir")
14 | p.add_option("--logconsolelevel", action="store", dest="logconsolelevel")
15 | p.add_option("--logfilelevel", action="store", dest="logfilelevel")
16 |
17 | p.set_defaults(perffile=None,
18 | iterations="10",
19 | dumpfile=None,
20 | numprocs="1",
21 | other=None,
22 | nolog=False,
23 | logfile=None,
24 | logdir=None,
25 | logconsolelevel="INFO",
26 | logfilelevel="DEBUG")
27 |
28 | return p.parse_args(argv)
29 |
30 |
31 | def cut_cmdline():
32 | for i, a in enumerate(sys.argv):
33 | if a.endswith(".dis") or a.endswith(".run"):
34 | return (sys.argv[1:i+1], sys.argv[i:])
35 | die("No DistAlgo source file specified.")
36 |
37 | def libmain():
38 | """
39 | Main program entry point. Parses command line options, sets up global
40 | variables, and calls the 'main' function of the DistAlgo program.
41 | """
42 | libcmdl, distcmdl = cut_cmdline()
43 |
44 | cmdline_options, args = parseArgs(libcmdl)
45 |
46 | entrypoint(cmdline_options, args, distcmdl)
47 |
48 | def die(mesg = None):
49 | if mesg != None:
50 | sys.stderr.write(mesg + "\n")
51 | sys.exit(1)
52 |
53 | if __name__ == '__main__':
54 | libmain()
55 |
--------------------------------------------------------------------------------
/distalgo/runtime/endpoint.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | class EndPoint:
4 | def __init__(self, name=None):
5 | self._name = name
6 | self._proc = None
7 | self._log = logging.getLogger("runtime.EndPoint")
8 | self._address = ('localhost', 0)
9 |
10 | def send(self, data, src, timestamp = 0):
11 | pass
12 |
13 | def recv(self, block, timeout = None):
14 | pass
15 |
16 | def setname(self, name):
17 | self._name = name
18 |
19 | def getlogname(self):
20 | if self._name is None:
21 | return "%s_%s" % (self._address[0], str(self._address[1]))
22 | else:
23 | return self._name
24 |
25 | ###################################################
26 | # Make the EndPoint behave like a Process object:
27 |
28 | def is_alive(self):
29 | if self._proc is not None:
30 | return self._proc.is_alive()
31 | else:
32 | self._log.warn("is_alive can only be called from parent process.")
33 | return self
34 |
35 | def join(self):
36 | if self._proc is not None:
37 | return self._proc.join()
38 | else:
39 | self._log.warn("join can only be called from parent process.")
40 | return self
41 |
42 | def terminate(self):
43 | if self._proc is not None:
44 | return self._proc.terminate()
45 | else:
46 | self._log.warn("terminate can only be called from parent process.")
47 | return self
48 |
49 | ###################################################
50 |
51 | def __getstate__(self):
52 | return ("EndPoint", self._address, self._name)
53 |
54 | def __setstate__(self, value):
55 | proto, self._address, self._name = value
56 | self._log = logging.getLogger("runtime.EndPoint")
57 |
58 | def __str__(self):
59 | if self._name is None:
60 | return str(self._address)
61 | else:
62 | return self._name
63 |
64 | def __repr__(self):
65 | if self._name is None:
66 | return str(self._address[1])
67 | else:
68 | return self._name
69 |
70 | def __hash__(self):
71 | return hash(self._address)
72 |
73 | def __eq__(self, obj):
74 | if not hasattr(obj, "_address"):
75 | return False
76 | return self._address == obj._address
77 | def __lt__(self, obj):
78 | return self._address < obj._address
79 | def __le__(self, obj):
80 | return self._address <= obj._address
81 | def __gt__(self, obj):
82 | return self._address > obj._address
83 | def __ge__(self, obj):
84 | return self._address >= obj._address
85 | def __ne__(self, obj):
86 | if not hasattr(obj, "_address"):
87 | return True
88 | return self._address != obj._address
89 |
--------------------------------------------------------------------------------
/distalgo/runtime/event.py:
--------------------------------------------------------------------------------
1 | class EventPattern:
2 | """ Describes an event "pattern" that can be used to match against Event
3 | instances.
4 | """
5 | def __init__(self, etype, mtype, consts, var, handlers=[]):
6 | self.etype = etype # Event type
7 | self.mtype = mtype # Message type
8 | self.consts = consts # Constants in pattern
9 | self.var = var # Variables in pattern
10 | self.handlers = handlers # Handlers for this kind of events
11 |
12 | def match(self, event):
13 | if (not ((self.etype == event.etype) and
14 | (self.mtype == event.mtype))):
15 | return False
16 |
17 | for (index, value) in self.consts:
18 | if (index >= len(event.data) or
19 | event.data[index] != value):
20 | return False
21 | for (index, name) in self.var:
22 | if (index >= len(event.data)):
23 | return False
24 |
25 | return True
26 |
27 | class Event:
28 | """ Describes a single event.
29 |
30 | Instances of Event are created by the backend thread and passed to the
31 | front end.
32 | """
33 | # Event types:
34 | receive = 0 # A message was received
35 | send = 1 # A message was sent
36 | user = 2 # User defined
37 | peerjoin = 3 # A new peer joined the network
38 | peerdown = 4 # Connection to a peer is lost
39 |
40 | def __init__(self, etype, source, timestamp, message):
41 | self.etype = etype
42 | self.source = source
43 | self.timestamp = timestamp
44 | self.mtype = message[0]
45 | self.data = message
46 |
--------------------------------------------------------------------------------
/distalgo/runtime/udp.py:
--------------------------------------------------------------------------------
1 | import socket
2 | import pickle
3 | import random
4 | import logging
5 |
6 | if not __name__ == "__main__":
7 | from .event import *
8 | from .endpoint import EndPoint
9 |
10 | MIN_UDP_PORT = 10000
11 | MAX_UDP_PORT = 20000
12 | MAX_UDP_BUFSIZE = 200000
13 |
14 | class UdpEndPoint(EndPoint):
15 | sender = None
16 |
17 | def __init__(self, name=None, host='localhost', port=None):
18 | super().__init__(name)
19 |
20 | self._log = logging.getLogger("runtime.UdpEndPoint")
21 | UdpEndPoint.sender = None
22 |
23 | self._conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
24 | if port is None:
25 | while True:
26 | self._address = (host,
27 | random.randint(MIN_UDP_PORT, MAX_UDP_PORT))
28 | try:
29 | self._conn.bind(self._address)
30 | break
31 | except socket.error:
32 | pass
33 | else:
34 | self._address = (host, port)
35 | self._conn.bind(self._address)
36 |
37 | self._log.debug("UdpEndPoint %s initialization complete",
38 | str(self._address))
39 |
40 |
41 | def send(self, data, src, timestamp = 0):
42 | if UdpEndPoint.sender is None:
43 | UdpEndPoint.sender = socket.socket(socket.AF_INET,
44 | socket.SOCK_DGRAM)
45 |
46 | bytedata = pickle.dumps((src, timestamp, data))
47 | if len(bytedata) > MAX_UDP_BUFSIZE:
48 | self._log.warn("Data size exceeded maximum buffer size!" +
49 | " Outgoing packet dropped.")
50 | self._log.debug("Dropped packet: %s", str((src, timestamp, data)))
51 |
52 | elif UdpEndPoint.sender.sendto(bytedata, self._address) != len(bytedata):
53 | raise socket.error()
54 |
55 | def recvmesgs(self):
56 | flags = 0
57 |
58 | try:
59 | while True:
60 | bytedata = self._conn.recv(MAX_UDP_BUFSIZE, flags)
61 | src, tstamp, data = pickle.loads(bytedata)
62 | if not isinstance(src, UdpEndPoint):
63 | raise TypeError()
64 | else:
65 | yield (src, tstamp, data)
66 | except socket.error as e:
67 | self._log.debug("socket.error occured, terminating receive loop.")
68 |
69 | def __getstate__(self):
70 | return ("UDP", self._address, self._name)
71 |
72 | def __setstate__(self, value):
73 | proto, self._address, self._name = value
74 | self._conn = None
75 | self._log = logging.getLogger("runtime.UdpEndPoint")
76 |
--------------------------------------------------------------------------------
/draw.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import sys
4 | file = sys.argv[1]
5 |
6 | def edge(n1, n2, w):
7 | return (n1, n2, {'weight':w})
8 |
9 | edge_list = list()
10 |
11 | with open(file, 'r') as f:
12 | edge_list = list( edge(ed.split()[0], ed.split()[1], int(ed.split()[2]))
13 | for ed in
14 | (e.strip() for e in f.readlines() if e.strip() != "")
15 | if len(ed.split()) == 3 )
16 |
17 | sys.path.append('..')
18 | import networkx as nx
19 | G = nx.Graph()
20 | G.add_edges_from(edge_list)
21 |
22 | "Draw graph using_matplotlib"
23 | import matplotlib
24 | if matplotlib.rcParams['backend'] == 'agg':
25 | matplotlib.rcParams['backend'] = 'Qt4Agg'
26 |
27 | import matplotlib.pyplot as plt
28 | pos=nx.spring_layout(G, weight = None)
29 | nx.draw_networkx_nodes(G,pos, node_size=330)
30 | nx.draw_networkx_edges(G,pos, set(G.edges()), width=2)
31 | nx.draw_networkx_labels(G,pos, font_size=12, font_family='sans-serif')
32 |
33 | plt.draw()
34 | plt.show()
35 |
--------------------------------------------------------------------------------
/networkx/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | NetworkX
3 | ========
4 |
5 | NetworkX (NX) is a Python package for the creation, manipulation, and
6 | study of the structure, dynamics, and functions of complex networks.
7 |
8 | https://networkx.lanl.gov/
9 |
10 | Using
11 | -----
12 |
13 | Just write in Python
14 |
15 | >>> import networkx as nx
16 | >>> G=nx.Graph()
17 | >>> G.add_edge(1,2)
18 | >>> G.add_node("spam")
19 | >>> print(G.nodes())
20 | [1, 2, 'spam']
21 | >>> print(G.edges())
22 | [(1, 2)]
23 | """
24 | # Copyright (C) 2004-2010 by
25 | # Aric Hagberg
26 | # Dan Schult
27 | # Pieter Swart
28 | # All rights reserved.
29 | # BSD license.
30 | #
31 | # Add platform dependent shared library path to sys.path
32 | #
33 |
34 | from __future__ import absolute_import
35 |
36 | import sys
37 | if sys.version_info[:2] < (2, 6):
38 | m = "Python version 2.6 or later is required for NetworkX (%d.%d detected)."
39 | raise ImportError(m % sys.version_info[:2])
40 | del sys
41 |
42 | # Release data
43 | from networkx import release
44 |
45 | __author__ = '%s <%s>\n%s <%s>\n%s <%s>' % \
46 | ( release.authors['Hagberg'] + release.authors['Schult'] + \
47 | release.authors['Swart'] )
48 | __license__ = release.license
49 |
50 | __date__ = release.date
51 | __version__ = release.version
52 |
53 | #These are import orderwise
54 | from networkx.exception import *
55 | import networkx.external
56 | import networkx.utils
57 | # these packages work with Python >= 2.6
58 |
59 | import networkx.classes
60 | from networkx.classes import *
61 |
62 |
63 | import networkx.convert
64 | from networkx.convert import *
65 |
66 | import networkx.relabel
67 | from networkx.relabel import *
68 |
69 | import networkx.generators
70 | from networkx.generators import *
71 |
72 | import networkx.readwrite
73 | from networkx.readwrite import *
74 |
75 | #Need to test with SciPy, when available
76 | import networkx.algorithms
77 | from networkx.algorithms import *
78 | import networkx.linalg
79 |
80 | from networkx.linalg import *
81 | from networkx.tests.test import run as test
82 |
83 | import networkx.drawing
84 | from networkx.drawing import *
85 |
86 |
--------------------------------------------------------------------------------
/networkx/algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.assortativity import *
2 | from networkx.algorithms.block import *
3 | from networkx.algorithms.boundary import *
4 | from networkx.algorithms.centrality import *
5 | from networkx.algorithms.cluster import *
6 | from networkx.algorithms.clique import *
7 | from networkx.algorithms.community import *
8 | from networkx.algorithms.components import *
9 | from networkx.algorithms.core import *
10 | from networkx.algorithms.cycles import *
11 | from networkx.algorithms.dag import *
12 | from networkx.algorithms.distance_measures import *
13 | from networkx.algorithms.flow import *
14 | from networkx.algorithms.hierarchy import *
15 | from networkx.algorithms.matching import *
16 | from networkx.algorithms.mis import *
17 | from networkx.algorithms.mst import *
18 | from networkx.algorithms.link_analysis import *
19 | from networkx.algorithms.operators import *
20 | from networkx.algorithms.shortest_paths import *
21 | from networkx.algorithms.smetric import *
22 | from networkx.algorithms.traversal import *
23 | from networkx.algorithms.isolate import *
24 | from networkx.algorithms.euler import *
25 | from networkx.algorithms.vitality import *
26 | from networkx.algorithms.chordal import *
27 | from networkx.algorithms.richclub import *
28 | from networkx.algorithms.distance_regular import *
29 | from networkx.algorithms.swap import *
30 | from networkx.algorithms.graphical import *
31 | from networkx.algorithms.simple_paths import *
32 |
33 | import networkx.algorithms.assortativity
34 | import networkx.algorithms.bipartite
35 | import networkx.algorithms.centrality
36 | import networkx.algorithms.cluster
37 | import networkx.algorithms.clique
38 | import networkx.algorithms.components
39 | import networkx.algorithms.flow
40 | import networkx.algorithms.isomorphism
41 | import networkx.algorithms.link_analysis
42 | import networkx.algorithms.shortest_paths
43 | import networkx.algorithms.traversal
44 | import networkx.algorithms.chordal
45 | import networkx.algorithms.operators
46 |
47 | from networkx.algorithms.bipartite import projected_graph,project,is_bipartite
48 | from networkx.algorithms.isomorphism import is_isomorphic,could_be_isomorphic,\
49 | fast_could_be_isomorphic,faster_could_be_isomorphic
50 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.approximation.clique import *
2 | from networkx.algorithms.approximation.dominating_set import *
3 | from networkx.algorithms.approximation.independent_set import *
4 | from networkx.algorithms.approximation.matching import *
5 | from networkx.algorithms.approximation.ramsey import *
6 | from networkx.algorithms.approximation.vertex_cover import *
7 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/independent_set.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Independent Set
4 |
5 | Independent set or stable set is a set of vertices in a graph, no two of
6 | which are adjacent. That is, it is a set I of vertices such that for every
7 | two vertices in I, there is no edge connecting the two. Equivalently, each
8 | edge in the graph has at most one endpoint in I. The size of an independent
9 | set is the number of vertices it contains.
10 |
11 | A maximum independent set is a largest independent set for a given graph G
12 | and its size is denoted α(G). The problem of finding such a set is called
13 | the maximum independent set problem and is an NP-hard optimization problem.
14 | As such, it is unlikely that there exists an efficient algorithm for finding
15 | a maximum independent set of a graph.
16 |
17 | http://en.wikipedia.org/wiki/Independent_set_(graph_theory)
18 |
19 | Independent set algorithm is based on the following paper:
20 |
21 | `O(|V|/(log|V|)^2)` apx of maximum clique/independent set.
22 |
23 | Boppana, R., & Halldórsson, M. M. (1992).
24 | Approximating maximum independent sets by excluding subgraphs.
25 | BIT Numerical Mathematics, 32(2), 180–196. Springer.
26 | doi:10.1007/BF01994876
27 |
28 | """
29 | # Copyright (C) 2011-2012 by
30 | # Nicholas Mancuso
31 | # All rights reserved.
32 | # BSD license.
33 | from networkx.algorithms.approximation import clique_removal
34 | __all__ = ["maximum_independent_set"]
35 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
36 |
37 |
38 | def maximum_independent_set(graph):
39 | """Return an approximate maximum independent set.
40 |
41 | Parameters
42 | ----------
43 | graph : NetworkX graph
44 | Undirected graph
45 |
46 | Returns
47 | -------
48 | iset : Set
49 | The apx-maximum independent set
50 |
51 | Notes
52 | -----
53 | Finds the `O(|V|/(log|V|)^2)` apx of independent set in the worst case.
54 |
55 |
56 | References
57 | ----------
58 | .. [1] Boppana, R., & Halldórsson, M. M. (1992).
59 | Approximating maximum independent sets by excluding subgraphs.
60 | BIT Numerical Mathematics, 32(2), 180–196. Springer.
61 | """
62 | iset, _ = clique_removal(graph)
63 | return iset
64 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/matching.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | **************
4 | Graph Matching
5 | **************
6 |
7 | Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
8 | edges; that is, no two edges share a common vertex.
9 |
10 | http://en.wikipedia.org/wiki/Matching_(graph_theory)
11 | """
12 | # Copyright (C) 2011-2012 by
13 | # Nicholas Mancuso
14 | # All rights reserved.
15 | # BSD license.
16 | import networkx as nx
17 | __all__ = ["min_maximal_matching"]
18 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
19 |
20 | def min_maximal_matching(graph):
21 | """Returns a set of edges such that no two edges share a common endpoint
22 | and every edge not in the set shares some common endpoint in the set.
23 |
24 | Parameters
25 | ----------
26 | graph : NetworkX graph
27 | Undirected graph
28 |
29 | Returns
30 | -------
31 | min_maximal_matching : set
32 | Returns a set of edges such that no two edges share a common endpoint
33 | and every edge not in the set shares some common endpoint in the set.
34 | Cardinality will be 2*OPT in the worst case.
35 |
36 | References
37 | ----------
38 | .. [1] Vazirani, Vijay Approximation Algorithms (2001)
39 | """
40 | return nx.maximal_matching(graph)
41 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/ramsey.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Ramsey numbers.
4 | """
5 | # Copyright (C) 2011 by
6 | # Nicholas Mancuso
7 | # All rights reserved.
8 | # BSD license.
9 | import networkx as nx
10 | __all__ = ["ramsey_R2"]
11 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
12 |
13 | def ramsey_R2(graph):
14 | r"""Approximately computes the Ramsey number `R(2;s,t)` for graph.
15 |
16 | Parameters
17 | ----------
18 | graph : NetworkX graph
19 | Undirected graph
20 |
21 | Returns
22 | -------
23 | max_pair : (set, set) tuple
24 | Maximum clique, Maximum independent set.
25 | """
26 | if not graph:
27 | return (set([]), set([]))
28 |
29 | node = next(graph.nodes_iter())
30 | nbrs = nx.all_neighbors(graph, node)
31 | nnbrs = nx.non_neighbors(graph, node)
32 | c_1, i_1 = ramsey_R2(graph.subgraph(nbrs))
33 | c_2, i_2 = ramsey_R2(graph.subgraph(nnbrs))
34 |
35 | c_1.add(node)
36 | i_2.add(node)
37 | return (max([c_1, c_2]), max([i_1, i_2]))
38 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_clique.py:
--------------------------------------------------------------------------------
1 | from nose.tools import *
2 | import networkx as nx
3 | import networkx.algorithms.approximation as apxa
4 |
5 | def test_clique_removal():
6 | graph = nx.complete_graph(10)
7 | i, cs = apxa.clique_removal(graph)
8 | idens = nx.density(graph.subgraph(i))
9 | eq_(idens, 0.0, "i-set not found by clique_removal!")
10 | for clique in cs:
11 | cdens = nx.density(graph.subgraph(clique))
12 | eq_(cdens, 1.0, "clique not found by clique_removal!")
13 |
14 | graph = nx.trivial_graph(nx.Graph())
15 | i, cs = apxa.clique_removal(graph)
16 | idens = nx.density(graph.subgraph(i))
17 | eq_(idens, 0.0, "i-set not found by ramsey!")
18 | # we should only have 1-cliques. Just singleton nodes.
19 | for clique in cs:
20 | cdens = nx.density(graph.subgraph(clique))
21 | eq_(cdens, 0.0, "clique not found by clique_removal!")
22 |
23 | graph = nx.barbell_graph(10, 5, nx.Graph())
24 | i, cs = apxa.clique_removal(graph)
25 | idens = nx.density(graph.subgraph(i))
26 | eq_(idens, 0.0, "i-set not found by ramsey!")
27 | for clique in cs:
28 | cdens = nx.density(graph.subgraph(clique))
29 | eq_(cdens, 1.0, "clique not found by clique_removal!")
30 |
31 | def test_max_clique_smoke():
32 | # smoke test
33 | G = nx.Graph()
34 | assert_equal(len(apxa.max_clique(G)),0)
35 |
36 | def test_max_clique():
37 | # create a complete graph
38 | graph = nx.complete_graph(30)
39 | # this should return the entire graph
40 | mc = apxa.max_clique(graph)
41 | assert_equals(30, len(mc))
42 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_dominating_set.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 | import networkx.algorithms.approximation as apxa
5 |
6 |
7 | class TestMinWeightDominatingSet:
8 |
9 | def test_min_weighted_dominating_set(self):
10 | graph = nx.Graph()
11 | graph.add_edge(1, 2)
12 | graph.add_edge(1, 5)
13 | graph.add_edge(2, 3)
14 | graph.add_edge(2, 5)
15 | graph.add_edge(3, 4)
16 | graph.add_edge(3, 6)
17 | graph.add_edge(5, 6)
18 |
19 | vertices = set([1, 2, 3, 4, 5, 6])
20 | # due to ties, this might be hard to test tight bounds
21 | dom_set = apxa.min_weighted_dominating_set(graph)
22 | for vertex in vertices - dom_set:
23 | neighbors = set(graph.neighbors(vertex))
24 | ok_(len(neighbors & dom_set) > 0, "Non dominating set found!")
25 |
26 | def test_min_edge_dominating_set(self):
27 | graph = nx.path_graph(5)
28 | dom_set = apxa.min_edge_dominating_set(graph)
29 |
30 | # this is a crappy way to test, but good enough for now.
31 | for edge in graph.edges_iter():
32 | if edge in dom_set:
33 | continue
34 | else:
35 | u, v = edge
36 | found = False
37 | for dom_edge in dom_set:
38 | found |= u == dom_edge[0] or u == dom_edge[1]
39 | ok_(found, "Non adjacent edge found!")
40 |
41 | graph = nx.complete_graph(10)
42 | dom_set = apxa.min_edge_dominating_set(graph)
43 |
44 | # this is a crappy way to test, but good enough for now.
45 | for edge in graph.edges_iter():
46 | if edge in dom_set:
47 | continue
48 | else:
49 | u, v = edge
50 | found = False
51 | for dom_edge in dom_set:
52 | found |= u == dom_edge[0] or u == dom_edge[1]
53 | ok_(found, "Non adjacent edge found!")
54 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_independent_set.py:
--------------------------------------------------------------------------------
1 | from nose.tools import *
2 | import networkx as nx
3 | import networkx.algorithms.approximation as a
4 |
5 | def test_independent_set():
6 | # smoke test
7 | G = nx.Graph()
8 | assert_equal(len(a.maximum_independent_set(G)),0)
9 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_matching.py:
--------------------------------------------------------------------------------
1 | from nose.tools import *
2 | import networkx as nx
3 | import networkx.algorithms.approximation as a
4 |
5 | def test_min_maximal_matching():
6 | # smoke test
7 | G = nx.Graph()
8 | assert_equal(len(a.min_maximal_matching(G)),0)
9 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_ramsey.py:
--------------------------------------------------------------------------------
1 | from nose.tools import *
2 | import networkx as nx
3 | import networkx.algorithms.approximation as apxa
4 |
5 | def test_ramsey():
6 | # this should only find the complete graph
7 | graph = nx.complete_graph(10)
8 | c, i = apxa.ramsey_R2(graph)
9 | cdens = nx.density(graph.subgraph(c))
10 | eq_(cdens, 1.0, "clique not found by ramsey!")
11 | idens = nx.density(graph.subgraph(i))
12 | eq_(idens, 0.0, "i-set not found by ramsey!")
13 |
14 | # this trival graph has no cliques. should just find i-sets
15 | graph = nx.trivial_graph(nx.Graph())
16 | c, i = apxa.ramsey_R2(graph)
17 | cdens = nx.density(graph.subgraph(c))
18 | eq_(cdens, 0.0, "clique not found by ramsey!")
19 | idens = nx.density(graph.subgraph(i))
20 | eq_(idens, 0.0, "i-set not found by ramsey!")
21 |
22 | graph = nx.barbell_graph(10, 5, nx.Graph())
23 | c, i = apxa.ramsey_R2(graph)
24 | cdens = nx.density(graph.subgraph(c))
25 | eq_(cdens, 1.0, "clique not found by ramsey!")
26 | idens = nx.density(graph.subgraph(i))
27 | eq_(idens, 0.0, "i-set not found by ramsey!")
28 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/tests/test_vertex_cover.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 | from networkx.algorithms import approximation as a
5 |
6 | class TestMWVC:
7 |
8 | def test_min_vertex_cover(self):
9 | # create a simple star graph
10 | size = 50
11 | sg = nx.star_graph(size)
12 | cover = a.min_weighted_vertex_cover(sg)
13 | assert_equals(2, len(cover))
14 | for u, v in sg.edges_iter():
15 | ok_((u in cover or v in cover), "Node node covered!")
16 |
17 | wg = nx.Graph()
18 | wg.add_node(0, weight=10)
19 | wg.add_node(1, weight=1)
20 | wg.add_node(2, weight=1)
21 | wg.add_node(3, weight=1)
22 | wg.add_node(4, weight=1)
23 |
24 | wg.add_edge(0, 1)
25 | wg.add_edge(0, 2)
26 | wg.add_edge(0, 3)
27 | wg.add_edge(0, 4)
28 |
29 | wg.add_edge(1,2)
30 | wg.add_edge(2,3)
31 | wg.add_edge(3,4)
32 | wg.add_edge(4,1)
33 |
34 | cover = a.min_weighted_vertex_cover(wg, weight="weight")
35 | csum = sum(wg.node[node]["weight"] for node in cover)
36 | assert_equals(4, csum)
37 |
38 | for u, v in wg.edges_iter():
39 | ok_((u in cover or v in cover), "Node node covered!")
40 |
--------------------------------------------------------------------------------
/networkx/algorithms/approximation/vertex_cover.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | ************
4 | Vertex Cover
5 | ************
6 |
7 | Given an undirected graph `G = (V, E)` and a function w assigning nonnegative
8 | weights to its vertices, find a minimum weight subset of V such that each edge
9 | in E is incident to at least one vertex in the subset.
10 |
11 | http://en.wikipedia.org/wiki/Vertex_cover
12 | """
13 | # Copyright (C) 2011-2012 by
14 | # Nicholas Mancuso
15 | # All rights reserved.
16 | # BSD license.
17 | from networkx.utils import *
18 | __all__ = ["min_weighted_vertex_cover"]
19 | __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
20 |
21 | @not_implemented_for('directed')
22 | def min_weighted_vertex_cover(graph, weight=None):
23 | """2-OPT Local Ratio for Minimum Weighted Vertex Cover
24 |
25 | Find an approximate minimum weighted vertex cover of a graph.
26 |
27 | Parameters
28 | ----------
29 | graph : NetworkX graph
30 | Undirected graph
31 |
32 | weight : None or string, optional (default = None)
33 | If None, every edge has weight/distance/cost 1. If a string, use this
34 | edge attribute as the edge weight. Any edge attribute not present
35 | defaults to 1.
36 |
37 | Returns
38 | -------
39 | min_weighted_cover : set
40 | Returns a set of vertices whose weight sum is no more than 2 * OPT.
41 |
42 | References
43 | ----------
44 | .. [1] Bar-Yehuda, R., & Even, S. (1985). A local-ratio theorem for
45 | approximating the weighted vertex cover problem.
46 | Annals of Discrete Mathematics, 25, 27–46
47 | http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf
48 | """
49 | weight_func = lambda nd: nd.get(weight, 1)
50 | cost = dict((n, weight_func(nd)) for n, nd in graph.nodes(data=True))
51 |
52 | # while there are edges uncovered, continue
53 | for u,v in graph.edges_iter():
54 | # select some uncovered edge
55 | min_cost = min([cost[u], cost[v]])
56 | cost[u] -= min_cost
57 | cost[v] -= min_cost
58 |
59 | return set(u for u in cost if cost[u] == 0)
60 |
--------------------------------------------------------------------------------
/networkx/algorithms/assortativity/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.assortativity.connectivity import *
2 | from networkx.algorithms.assortativity.correlation import *
3 | from networkx.algorithms.assortativity.mixing import *
4 | from networkx.algorithms.assortativity.neighbor_degree import *
5 | from networkx.algorithms.assortativity.pairs import *
6 |
--------------------------------------------------------------------------------
/networkx/algorithms/assortativity/tests/base_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 |
3 | class BaseTestAttributeMixing(object):
4 |
5 | def setUp(self):
6 | G=nx.Graph()
7 | G.add_nodes_from([0,1],fish='one')
8 | G.add_nodes_from([2,3],fish='two')
9 | G.add_nodes_from([4],fish='red')
10 | G.add_nodes_from([5],fish='blue')
11 | G.add_edges_from([(0,1),(2,3),(0,4),(2,5)])
12 | self.G=G
13 |
14 | D=nx.DiGraph()
15 | D.add_nodes_from([0,1],fish='one')
16 | D.add_nodes_from([2,3],fish='two')
17 | D.add_nodes_from([4],fish='red')
18 | D.add_nodes_from([5],fish='blue')
19 | D.add_edges_from([(0,1),(2,3),(0,4),(2,5)])
20 | self.D=D
21 |
22 | M=nx.MultiGraph()
23 | M.add_nodes_from([0,1],fish='one')
24 | M.add_nodes_from([2,3],fish='two')
25 | M.add_nodes_from([4],fish='red')
26 | M.add_nodes_from([5],fish='blue')
27 | M.add_edges_from([(0,1),(0,1),(2,3)])
28 | self.M=M
29 |
30 | S=nx.Graph()
31 | S.add_nodes_from([0,1],fish='one')
32 | S.add_nodes_from([2,3],fish='two')
33 | S.add_nodes_from([4],fish='red')
34 | S.add_nodes_from([5],fish='blue')
35 | S.add_edge(0,0)
36 | S.add_edge(2,2)
37 | self.S=S
38 |
39 | class BaseTestDegreeMixing(object):
40 |
41 | def setUp(self):
42 | self.P4=nx.path_graph(4)
43 | self.D=nx.DiGraph()
44 | self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
45 | self.M=nx.MultiGraph()
46 | self.M.add_path(list(range(4)))
47 | self.M.add_edge(0,1)
48 | self.S=nx.Graph()
49 | self.S.add_edges_from([(0,0),(1,1)])
50 |
51 |
--------------------------------------------------------------------------------
/networkx/algorithms/assortativity/tests/test_neighbor_degree.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 |
5 | class TestAverageNeighbor(object):
6 |
7 | def test_degree_p4(self):
8 | G=nx.path_graph(4)
9 | answer={0:2,1:1.5,2:1.5,3:2}
10 | nd = nx.average_neighbor_degree(G)
11 | assert_equal(nd,answer)
12 |
13 | D=G.to_directed()
14 | nd = nx.average_neighbor_degree(D)
15 | assert_equal(nd,answer)
16 |
17 | D=G.to_directed()
18 | nd = nx.average_neighbor_degree(D)
19 | assert_equal(nd,answer)
20 |
21 | D=G.to_directed()
22 | nd = nx.average_neighbor_degree(D, source='in', target='in')
23 | assert_equal(nd,answer)
24 |
25 | def test_degree_p4_weighted(self):
26 | G=nx.path_graph(4)
27 | G[1][2]['weight']=4
28 | answer={0:2,1:1.8,2:1.8,3:2}
29 | nd = nx.average_neighbor_degree(G,weight='weight')
30 | assert_equal(nd,answer)
31 |
32 | D=G.to_directed()
33 | nd = nx.average_neighbor_degree(D,weight='weight')
34 | assert_equal(nd,answer)
35 |
36 | D=G.to_directed()
37 | nd = nx.average_neighbor_degree(D,weight='weight')
38 | assert_equal(nd,answer)
39 | nd = nx.average_neighbor_degree(D,source='out',target='out',
40 | weight='weight')
41 | assert_equal(nd,answer)
42 |
43 | D=G.to_directed()
44 | nd = nx.average_neighbor_degree(D,source='in',target='in',
45 | weight='weight')
46 | assert_equal(nd,answer)
47 |
48 |
49 | def test_degree_k4(self):
50 | G=nx.complete_graph(4)
51 | answer={0:3,1:3,2:3,3:3}
52 | nd = nx.average_neighbor_degree(G)
53 | assert_equal(nd,answer)
54 |
55 | D=G.to_directed()
56 | nd = nx.average_neighbor_degree(D)
57 | assert_equal(nd,answer)
58 |
59 | D=G.to_directed()
60 | nd = nx.average_neighbor_degree(D)
61 | assert_equal(nd,answer)
62 |
63 | D=G.to_directed()
64 | nd = nx.average_neighbor_degree(D,source='in',target='in')
65 | assert_equal(nd,answer)
66 |
67 | def test_degree_k4_nodes(self):
68 | G=nx.complete_graph(4)
69 | answer={1:3.0,2:3.0}
70 | nd = nx.average_neighbor_degree(G,nodes=[1,2])
71 | assert_equal(nd,answer)
72 |
73 | def test_degree_barrat(self):
74 | G=nx.star_graph(5)
75 | G.add_edges_from([(5,6),(5,7),(5,8),(5,9)])
76 | G[0][5]['weight']=5
77 | nd = nx.average_neighbor_degree(G)[5]
78 | assert_equal(nd,1.8)
79 | nd = nx.average_neighbor_degree(G,weight='weight')[5]
80 | assert_almost_equal(nd,3.222222,places=5)
81 |
82 |
83 |
--------------------------------------------------------------------------------
/networkx/algorithms/bipartite/redundancy.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 | """Node redundancy for bipartite graphs."""
3 | # Copyright (C) 2011 by
4 | # Jordi Torrents
5 | # Aric Hagberg
6 | # All rights reserved.
7 | # BSD license.
8 | from itertools import combinations
9 | import networkx as nx
10 |
11 | __author__ = """\n""".join(['Jordi Torrents ',
12 | 'Aric Hagberg (hagberg@lanl.gov)'])
13 | __all__ = ['node_redundancy']
14 |
15 | def node_redundancy(G, nodes=None):
16 | r"""Compute bipartite node redundancy coefficient.
17 |
18 | The redundancy coefficient of a node `v` is the fraction of pairs of
19 | neighbors of `v` that are both linked to other nodes. In a one-mode
20 | projection these nodes would be linked together even if `v` were
21 | not there.
22 |
23 | .. math::
24 |
25 | rc(v) = \frac{|\{\{u,w\} \subseteq N(v),
26 | \: \exists v' \neq v,\: (v',u) \in E\:
27 | \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}}
28 |
29 | where `N(v)` are the neighbors of `v` in `G`.
30 |
31 | Parameters
32 | ----------
33 | G : graph
34 | A bipartite graph
35 |
36 | nodes : list or iterable (optional)
37 | Compute redundancy for these nodes. The default is all nodes in G.
38 |
39 | Returns
40 | -------
41 | redundancy : dictionary
42 | A dictionary keyed by node with the node redundancy value.
43 |
44 | Examples
45 | --------
46 | >>> from networkx.algorithms import bipartite
47 | >>> G = nx.cycle_graph(4)
48 | >>> rc = bipartite.node_redundancy(G)
49 | >>> rc[0]
50 | 1.0
51 |
52 | Compute the average redundancy for the graph:
53 |
54 | >>> sum(rc.values())/len(G)
55 | 1.0
56 |
57 | Compute the average redundancy for a set of nodes:
58 |
59 | >>> nodes = [0, 2]
60 | >>> sum(rc[n] for n in nodes)/len(nodes)
61 | 1.0
62 |
63 | References
64 | ----------
65 | .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
66 | Basic notions for the analysis of large two-mode networks.
67 | Social Networks 30(1), 31--48.
68 | """
69 | if nodes is None:
70 | nodes = G
71 | rc = {}
72 | for v in nodes:
73 | overlap = 0.0
74 | for u, w in combinations(G[v], 2):
75 | if len((set(G[u]) & set(G[w])) - set([v])) > 0:
76 | overlap += 1
77 | if overlap > 0:
78 | n = len(G[v])
79 | norm = 2.0/(n*(n-1))
80 | else:
81 | norm = 1.0
82 | rc[v] = overlap*norm
83 | return rc
84 |
85 |
--------------------------------------------------------------------------------
/networkx/algorithms/bipartite/spectral.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Spectral bipartivity measure.
4 | """
5 | import networkx as nx
6 | __author__ = """Aric Hagberg (hagberg@lanl.gov)"""
7 | # Copyright (C) 2011 by
8 | # Aric Hagberg
9 | # Dan Schult
10 | # Pieter Swart
11 | # All rights reserved.
12 | # BSD license.
13 | __all__ = ['spectral_bipartivity']
14 |
15 | def spectral_bipartivity(G, nodes=None, weight='weight'):
16 | """Returns the spectral bipartivity.
17 |
18 | Parameters
19 | ----------
20 | G : NetworkX graph
21 |
22 | nodes : list or container optional(default is all nodes)
23 | Nodes to return value of spectral bipartivity contribution.
24 |
25 | weight : string or None optional (default = 'weight')
26 | Edge data key to use for edge weights. If None, weights set to 1.
27 |
28 | Returns
29 | -------
30 | sb : float or dict
31 | A single number if the keyword nodes is not specified, or
32 | a dictionary keyed by node with the spectral bipartivity contribution
33 | of that node as the value.
34 |
35 | Examples
36 | --------
37 | >>> from networkx.algorithms import bipartite
38 | >>> G = nx.path_graph(4)
39 | >>> bipartite.spectral_bipartivity(G)
40 | 1.0
41 |
42 | Notes
43 | -----
44 | This implementation uses Numpy (dense) matrices which are not efficient
45 | for storing large sparse graphs.
46 |
47 | See Also
48 | --------
49 | color
50 |
51 | References
52 | ----------
53 | .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
54 | bipartivity in complex networks", PhysRev E 72, 046105 (2005)
55 | """
56 | try:
57 | import scipy.linalg
58 | except ImportError:
59 | raise ImportError('spectral_bipartivity() requires SciPy: ',
60 | 'http://scipy.org/')
61 | nodelist = G.nodes() # ordering of nodes in matrix
62 | A = nx.to_numpy_matrix(G, nodelist, weight=weight)
63 | expA = scipy.linalg.expm(A)
64 | expmA = scipy.linalg.expm(-A)
65 | coshA = 0.5 * (expA + expmA)
66 | if nodes is None:
67 | # return single number for entire graph
68 | return coshA.diagonal().sum() / expA.diagonal().sum()
69 | else:
70 | # contribution for individual nodes
71 | index = dict(zip(nodelist, range(len(nodelist))))
72 | sb = {}
73 | for n in nodes:
74 | i = index[n]
75 | sb[n] = coshA[i, i] / expA[i, i]
76 | return sb
77 |
78 | def setup_module(module):
79 | """Fixture for nose tests."""
80 | from nose import SkipTest
81 | try:
82 | import numpy
83 | except:
84 | raise SkipTest("NumPy not available")
85 | try:
86 | import scipy
87 | except:
88 | raise SkipTest("SciPy not available")
89 |
--------------------------------------------------------------------------------
/networkx/algorithms/bipartite/tests/test_cluster.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 | from nose.tools import *
3 | from networkx.algorithms.bipartite.cluster import cc_dot,cc_min,cc_max
4 | import networkx.algorithms.bipartite as bipartite
5 |
6 | def test_pairwise_bipartite_cc_functions():
7 | # Test functions for different kinds of bipartite clustering coefficients
8 | # between pairs of nodes using 3 example graphs from figure 5 p. 40
9 | # Latapy et al (2008)
10 | G1 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7)])
11 | G2 = nx.Graph([(0,2),(0,3),(0,4),(1,3),(1,4),(1,5)])
12 | G3 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7),(1,8),(1,9)])
13 | result = {0:[1/3.0, 2/3.0, 2/5.0], 1:[1/2.0, 2/3.0, 2/3.0], 2:[2/8.0, 2/5.0, 2/5.0]}
14 | for i, G in enumerate([G1, G2, G3]):
15 | assert(bipartite.is_bipartite(G))
16 | assert(cc_dot(set(G[0]), set(G[1])) == result[i][0])
17 | assert(cc_min(set(G[0]), set(G[1])) == result[i][1])
18 | assert(cc_max(set(G[0]), set(G[1])) == result[i][2])
19 |
20 | def test_star_graph():
21 | G=nx.star_graph(3)
22 | # all modes are the same
23 | answer={0:0,1:1,2:1,3:1}
24 | assert_equal(bipartite.clustering(G,mode='dot'),answer)
25 | assert_equal(bipartite.clustering(G,mode='min'),answer)
26 | assert_equal(bipartite.clustering(G,mode='max'),answer)
27 |
28 | @raises(nx.NetworkXError)
29 | def test_not_bipartite():
30 | bipartite.clustering(nx.complete_graph(4))
31 |
32 | @raises(nx.NetworkXError)
33 | def test_bad_mode():
34 | bipartite.clustering(nx.path_graph(4),mode='foo')
35 |
36 | def test_path_graph():
37 | G=nx.path_graph(4)
38 | answer={0:0.5,1:0.5,2:0.5,3:0.5}
39 | assert_equal(bipartite.clustering(G,mode='dot'),answer)
40 | assert_equal(bipartite.clustering(G,mode='max'),answer)
41 | answer={0:1,1:1,2:1,3:1}
42 | assert_equal(bipartite.clustering(G,mode='min'),answer)
43 |
44 |
45 | def test_average_path_graph():
46 | G=nx.path_graph(4)
47 | assert_equal(bipartite.average_clustering(G,mode='dot'),0.5)
48 | assert_equal(bipartite.average_clustering(G,mode='max'),0.5)
49 | assert_equal(bipartite.average_clustering(G,mode='min'),1)
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/networkx/algorithms/centrality/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.centrality.betweenness import *
2 | from networkx.algorithms.centrality.betweenness_subset import *
3 | from networkx.algorithms.centrality.closeness import *
4 | from networkx.algorithms.centrality.current_flow_closeness import *
5 | from networkx.algorithms.centrality.current_flow_betweenness import *
6 | from networkx.algorithms.centrality.current_flow_betweenness_subset import *
7 | from networkx.algorithms.centrality.degree_alg import *
8 | from networkx.algorithms.centrality.eigenvector import *
9 | from networkx.algorithms.centrality.load import *
10 | from networkx.algorithms.centrality.communicability_alg import *
11 | import networkx.algorithms.centrality.betweenness
12 | import networkx.algorithms.centrality.closeness
13 | import networkx.algorithms.centrality.current_flow_betweenness
14 | import networkx.algorithms.centrality.current_flow_closeness
15 | import networkx.algorithms.centrality.degree_alg
16 | import networkx.algorithms.centrality.eigenvector
17 | import networkx.algorithms.centrality.load
18 | import networkx.algorithms.centrality.communicability_alg
19 |
20 |
--------------------------------------------------------------------------------
/networkx/algorithms/centrality/closeness.py:
--------------------------------------------------------------------------------
1 | """
2 | Closeness centrality measures.
3 |
4 | """
5 | # Copyright (C) 2004-2010 by
6 | # Aric Hagberg
7 | # Dan Schult
8 | # Pieter Swart
9 | # All rights reserved.
10 | # BSD license.
11 | import functools
12 | import networkx as nx
13 | __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)',
14 | 'Pieter Swart (swart@lanl.gov)',
15 | 'Sasha Gutfraind (ag362@cornell.edu)'])
16 | __all__ = ['closeness_centrality']
17 |
18 |
19 | def closeness_centrality(G, v=None, distance=None, normalized=True):
20 | """Compute closeness centrality for nodes.
21 |
22 | Closeness centrality at a node is 1/average distance to all other nodes.
23 |
24 | Parameters
25 | ----------
26 | G : graph
27 | A networkx graph
28 | v : node, optional
29 | Return only the value for node v
30 | distance : string key, optional (default=None)
31 | Use specified edge key as edge distance.
32 | If True, use 'weight' as the edge key.
33 | normalized : bool, optional
34 | If True (default) normalize by the graph size.
35 |
36 | Returns
37 | -------
38 | nodes : dictionary
39 | Dictionary of nodes with closeness centrality as the value.
40 |
41 | See Also
42 | --------
43 | betweenness_centrality, load_centrality, eigenvector_centrality,
44 | degree_centrality
45 |
46 | Notes
47 | -----
48 | The closeness centrality is normalized to to n-1 / size(G)-1 where
49 | n is the number of nodes in the connected part of graph containing
50 | the node. If the graph is not completely connected, this
51 | algorithm computes the closeness centrality for each connected
52 | part separately.
53 | """
54 | if distance is not None:
55 | if distance is True: distance='weight'
56 | path_length=functools.partial(nx.single_source_dijkstra_path_length,
57 | weight=distance)
58 | else:
59 | path_length=nx.single_source_shortest_path_length
60 |
61 | if v is None:
62 | nodes=G.nodes()
63 | else:
64 | nodes=[v]
65 | closeness_centrality={}
66 |
67 | for n in nodes:
68 | sp=path_length(G,n)
69 | totsp=sum(sp.values())
70 | if totsp > 0.0 and len(G) > 1:
71 | closeness_centrality[n]= (len(sp)-1.0) / totsp
72 | # normalize to number of nodes-1 in connected part
73 | if normalized:
74 | s=(len(sp)-1.0) / ( len(G) - 1 )
75 | closeness_centrality[n] *= s
76 | else:
77 | closeness_centrality[n]=0.0
78 | if v is not None:
79 | return closeness_centrality[v]
80 | else:
81 | return closeness_centrality
82 |
83 |
--------------------------------------------------------------------------------
/networkx/algorithms/centrality/tests/test_current_flow_closeness.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | from nose import SkipTest
4 | import networkx
5 |
6 | class TestFlowClosenessCentrality(object):
7 | numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
8 | @classmethod
9 | def setupClass(cls):
10 | global np
11 | try:
12 | import numpy as np
13 | import scipy
14 | except ImportError:
15 | raise SkipTest('NumPy not available.')
16 |
17 |
18 | def test_K4(self):
19 | """Closeness centrality: K4"""
20 | G=networkx.complete_graph(4)
21 | b=networkx.current_flow_closeness_centrality(G,normalized=True)
22 | b_answer={0: 2.0, 1: 2.0, 2: 2.0, 3: 2.0}
23 | for n in sorted(G):
24 | assert_almost_equal(b[n],b_answer[n])
25 |
26 |
27 | def test_P4_normalized(self):
28 | """Closeness centrality: P4 normalized"""
29 | G=networkx.path_graph(4)
30 | b=networkx.current_flow_closeness_centrality(G,normalized=True)
31 | b_answer={0: 1./2, 1: 3./4, 2: 3./4, 3:1./2}
32 | for n in sorted(G):
33 | assert_almost_equal(b[n],b_answer[n])
34 |
35 |
36 | def test_P4(self):
37 | """Closeness centrality: P4"""
38 | G=networkx.path_graph(4)
39 | b=networkx.current_flow_closeness_centrality(G,normalized=False)
40 | b_answer={0: 1.0/6, 1: 1.0/4, 2: 1.0/4, 3:1.0/6}
41 | for n in sorted(G):
42 | assert_almost_equal(b[n],b_answer[n])
43 |
44 | def test_star(self):
45 | """Closeness centrality: star """
46 | G=networkx.Graph()
47 | G.add_star(['a','b','c','d'])
48 | b=networkx.current_flow_closeness_centrality(G,normalized=True)
49 | b_answer={'a': 1.0, 'b': 0.6, 'c': 0.6, 'd':0.6}
50 | for n in sorted(G):
51 | assert_almost_equal(b[n],b_answer[n])
52 |
53 |
54 |
55 | class TestWeightedFlowClosenessCentrality(object):
56 | pass
57 |
--------------------------------------------------------------------------------
/networkx/algorithms/chordal/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.chordal.chordal_alg import *
2 |
3 |
4 |
--------------------------------------------------------------------------------
/networkx/algorithms/chordal/tests/test_chordal.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 |
5 | class TestMCS:
6 |
7 | def setUp(self):
8 | # simple graph
9 | connected_chordal_G=nx.Graph()
10 | connected_chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4),
11 | (3,5),(3,6),(4,5),(4,6),(5,6)])
12 | self.connected_chordal_G=connected_chordal_G
13 |
14 | chordal_G = nx.Graph()
15 | chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4),
16 | (3,5),(3,6),(4,5),(4,6),(5,6),(7,8)])
17 | chordal_G.add_node(9)
18 | self.chordal_G=chordal_G
19 |
20 | non_chordal_G = nx.Graph()
21 | non_chordal_G.add_edges_from([(1,2),(1,3),(2,4),(2,5),(3,4),(3,5)])
22 | self.non_chordal_G = non_chordal_G
23 |
24 | def test_is_chordal(self):
25 | assert_false(nx.is_chordal(self.non_chordal_G))
26 | assert_true(nx.is_chordal(self.chordal_G))
27 | assert_true(nx.is_chordal(self.connected_chordal_G))
28 | assert_true(nx.is_chordal(nx.complete_graph(3)))
29 | assert_true(nx.is_chordal(nx.cycle_graph(3)))
30 | assert_false(nx.is_chordal(nx.cycle_graph(5)))
31 |
32 | def test_induced_nodes(self):
33 | G = nx.generators.classic.path_graph(10)
34 | I = nx.find_induced_nodes(G,1,9,2)
35 | assert_equal(I,set([1,2,3,4,5,6,7,8,9]))
36 | assert_raises(nx.NetworkXTreewidthBoundExceeded,
37 | nx.find_induced_nodes,G,1,9,1)
38 | I = nx.find_induced_nodes(self.chordal_G,1,6)
39 | assert_equal(I,set([1,2,4,6]))
40 | assert_raises(nx.NetworkXError,
41 | nx.find_induced_nodes,self.non_chordal_G,1,5)
42 |
43 | def test_chordal_find_cliques(self):
44 | cliques = set([frozenset([9]),frozenset([7,8]),frozenset([1,2,3]),
45 | frozenset([2,3,4]),frozenset([3,4,5,6])])
46 | assert_equal(nx.chordal_graph_cliques(self.chordal_G),cliques)
47 |
48 | def test_chordal_find_cliques_path(self):
49 | G = nx.path_graph(10)
50 | cliqueset = nx.chordal_graph_cliques(G)
51 | for (u,v) in G.edges_iter():
52 | assert_true(frozenset([u,v]) in cliqueset
53 | or frozenset([v,u]) in cliqueset)
54 |
55 | def test_chordal_find_cliquesCC(self):
56 | cliques = set([frozenset([1,2,3]),frozenset([2,3,4]),
57 | frozenset([3,4,5,6])])
58 | assert_equal(nx.chordal_graph_cliques(self.connected_chordal_G),cliques)
59 |
60 |
--------------------------------------------------------------------------------
/networkx/algorithms/community/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.community.kclique import *
2 |
--------------------------------------------------------------------------------
/networkx/algorithms/community/tests/test_kclique.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 | from itertools import combinations
5 | from networkx import k_clique_communities
6 |
7 | def test_overlaping_K5():
8 | G = nx.Graph()
9 | G.add_edges_from(combinations(range(5), 2)) # Add a five clique
10 | G.add_edges_from(combinations(range(2,7), 2)) # Add another five clique
11 | c = list(nx.k_clique_communities(G, 4))
12 | assert_equal(c,[frozenset([0, 1, 2, 3, 4, 5, 6])])
13 | c= list(nx.k_clique_communities(G, 5))
14 | assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([2,3,4,5,6])]))
15 |
16 | def test_isolated_K5():
17 | G = nx.Graph()
18 | G.add_edges_from(combinations(range(0,5), 2)) # Add a five clique
19 | G.add_edges_from(combinations(range(5,10), 2)) # Add another five clique
20 | c= list(nx.k_clique_communities(G, 5))
21 | assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([5,6,7,8,9])]))
22 |
23 | def test_zachary():
24 | z = nx.karate_club_graph()
25 | # clique percolation with k=2 is just connected components
26 | zachary_k2_ground_truth = set([frozenset(z.nodes())])
27 | zachary_k3_ground_truth = set([frozenset([0, 1, 2, 3, 7, 8, 12, 13, 14,
28 | 15, 17, 18, 19, 20, 21, 22, 23,
29 | 26, 27, 28, 29, 30, 31, 32, 33]),
30 | frozenset([0, 4, 5, 6, 10, 16]),
31 | frozenset([24, 25, 31])])
32 | zachary_k4_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13]),
33 | frozenset([8, 32, 30, 33]),
34 | frozenset([32, 33, 29, 23])])
35 | zachary_k5_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13])])
36 | zachary_k6_ground_truth = set([])
37 |
38 | assert set(k_clique_communities(z, 2)) == zachary_k2_ground_truth
39 | assert set(k_clique_communities(z, 3)) == zachary_k3_ground_truth
40 | assert set(k_clique_communities(z, 4)) == zachary_k4_ground_truth
41 | assert set(k_clique_communities(z, 5)) == zachary_k5_ground_truth
42 | assert set(k_clique_communities(z, 6)) == zachary_k6_ground_truth
43 |
44 | @raises(nx.NetworkXError)
45 | def test_bad_k():
46 | c = list(k_clique_communities(nx.Graph(),1))
47 |
--------------------------------------------------------------------------------
/networkx/algorithms/components/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.components.connected import *
2 | from networkx.algorithms.components.strongly_connected import *
3 | from networkx.algorithms.components.weakly_connected import *
4 | from networkx.algorithms.components.attracting import *
5 | from networkx.algorithms.components.biconnected import *
6 |
--------------------------------------------------------------------------------
/networkx/algorithms/components/tests/test_attracting.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from nose.tools import *
3 | import networkx as nx
4 |
5 |
6 | class TestAttractingComponents(object):
7 | def setUp(self):
8 | self.G1 = nx.DiGraph()
9 | self.G1.add_edges_from([(5,11),(11,2),(11,9),(11,10),
10 | (7,11),(7,8),(8,9),(3,8),(3,10)])
11 | self.G2 = nx.DiGraph()
12 | self.G2.add_edges_from([(0,1),(0,2),(1,1),(1,2),(2,1)])
13 |
14 | self.G3 = nx.DiGraph()
15 | self.G3.add_edges_from([(0,1),(1,2),(2,1),(0,3),(3,4),(4,3)])
16 |
17 | def test_attracting_components(self):
18 | ac = nx.attracting_components(self.G1)
19 | assert_true([2] in ac)
20 | assert_true([9] in ac)
21 | assert_true([10] in ac)
22 |
23 | ac = nx.attracting_components(self.G2)
24 | ac = [tuple(sorted(x)) for x in ac]
25 | assert_true(ac == [(1,2)])
26 |
27 | ac = nx.attracting_components(self.G3)
28 | ac = [tuple(sorted(x)) for x in ac]
29 | assert_true((1,2) in ac)
30 | assert_true((3,4) in ac)
31 | assert_equal(len(ac), 2)
32 |
33 | def test_number_attacting_components(self):
34 | assert_equal(len(nx.attracting_components(self.G1)), 3)
35 | assert_equal(len(nx.attracting_components(self.G2)), 1)
36 | assert_equal(len(nx.attracting_components(self.G3)), 2)
37 |
38 | def test_is_attracting_component(self):
39 | assert_false(nx.is_attracting_component(self.G1))
40 | assert_false(nx.is_attracting_component(self.G2))
41 | assert_false(nx.is_attracting_component(self.G3))
42 | g2 = self.G3.subgraph([1,2])
43 | assert_true(nx.is_attracting_component(g2))
44 |
45 | def test_attracting_component_subgraphs(self):
46 | subgraphs = nx.attracting_component_subgraphs(self.G1)
47 | for subgraph in subgraphs:
48 | assert_equal(len(subgraph), 1)
49 |
50 | self.G2.add_edge(1,2,eattr='red') # test attrs copied to subgraphs
51 | self.G2.node[2]['nattr']='blue'
52 | self.G2.graph['gattr']='green'
53 | subgraphs = nx.attracting_component_subgraphs(self.G2)
54 | assert_equal(len(subgraphs), 1)
55 | SG2=subgraphs[0]
56 | assert_true(1 in SG2)
57 | assert_true(2 in SG2)
58 | assert_equal(SG2[1][2]['eattr'],'red')
59 | assert_equal(SG2.node[2]['nattr'],'blue')
60 | assert_equal(SG2.graph['gattr'],'green')
61 | SG2.add_edge(1,2,eattr='blue')
62 | assert_equal(SG2[1][2]['eattr'],'blue')
63 | assert_equal(self.G2[1][2]['eattr'],'red')
64 |
65 |
--------------------------------------------------------------------------------
/networkx/algorithms/flow/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.flow.maxflow import *
2 | from networkx.algorithms.flow.mincost import *
3 |
4 |
--------------------------------------------------------------------------------
/networkx/algorithms/flow/tests/test_maxflow_large_graph.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Max flow algorithm test suite on large graphs.
3 |
4 | Run with nose: nosetests -v test_max_flow.py
5 | """
6 |
7 | __author__ = """Loïc Séguin-C. """
8 | # Copyright (C) 2010 Loïc Séguin-C.
9 | # All rights reserved.
10 | # BSD license.
11 |
12 |
13 | import networkx as nx
14 | from nose.tools import *
15 |
16 | def gen_pyramid(N):
17 | # This graph admits a flow of value 1 for which every arc is at
18 | # capacity (except the arcs incident to the sink which have
19 | # infinite capacity).
20 | G = nx.DiGraph()
21 |
22 | for i in range(N - 1):
23 | cap = 1. / (i + 2)
24 | for j in range(i + 1):
25 | G.add_edge((i, j), (i + 1, j),
26 | capacity = cap)
27 | cap = 1. / (i + 1) - cap
28 | G.add_edge((i, j), (i + 1, j + 1),
29 | capacity = cap)
30 | cap = 1. / (i + 2) - cap
31 |
32 | for j in range(N):
33 | G.add_edge((N - 1, j), 't')
34 |
35 | return G
36 |
37 |
38 | class TestMaxflowLargeGraph:
39 | def test_complete_graph(self):
40 | N = 50
41 | G = nx.complete_graph(N)
42 | for (u, v) in G.edges():
43 | G[u][v]['capacity'] = 5
44 | assert_equal(nx.ford_fulkerson(G, 1, 2)[0], 5 * (N - 1))
45 |
46 | def test_pyramid(self):
47 | N = 10
48 | # N = 100 # this gives a graph with 5051 nodes
49 | G = gen_pyramid(N)
50 | assert_almost_equal(nx.ford_fulkerson(G, (0, 0), 't')[0], 1.)
51 |
52 |
--------------------------------------------------------------------------------
/networkx/algorithms/hierarchy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Flow Hierarchy.
4 | """
5 | # Copyright (C) 2004-2011 by
6 | # Aric Hagberg
7 | # Dan Schult
8 | # Pieter Swart
9 | # All rights reserved.
10 | # BSD license.
11 | import networkx as nx
12 | __authors__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)'])
13 | __all__ = ['flow_hierarchy']
14 |
15 | def flow_hierarchy(G, weight=None):
16 | """Returns the flow hierarchy of a directed network.
17 |
18 | Flow hierarchy is defined as the fraction of edges not participating
19 | in cycles in a directed graph [1]_.
20 |
21 | Parameters
22 | ----------
23 | G : DiGraph or MultiDiGraph
24 | A directed graph
25 |
26 | weight : key,optional (default=None)
27 | Attribute to use for node weights. If None the weight defaults to 1.
28 |
29 | Returns
30 | -------
31 | h : float
32 | Flow heirarchy value
33 |
34 | Notes
35 | -----
36 | The algorithm described in [1]_ computes the flow hierarchy through
37 | exponentiation of the adjacency matrix. This function implements an
38 | alternative approach that finds strongly connected components.
39 | An edge is in a cycle if and only if it is in a strongly connected
40 | component, which can be found in `O(m)` time using Tarjan's algorithm.
41 |
42 | References
43 | ----------
44 | .. [1] Luo, J.; Magee, C.L. (2011),
45 | Detecting evolving patterns of self-organizing networks by flow
46 | hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
47 | DOI: 10.1002/cplx.20368
48 | http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
49 | """
50 | if not G.is_directed():
51 | raise nx.NetworkXError("G must be a digraph in flow_heirarchy")
52 | scc = nx.strongly_connected_components(G)
53 | return 1.-sum(G.subgraph(c).size(weight) for c in scc)/float(G.size(weight))
54 |
--------------------------------------------------------------------------------
/networkx/algorithms/isolate.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | Functions for identifying isolate (degree zero) nodes.
4 | """
5 | # Copyright (C) 2004-2011 by
6 | # Aric Hagberg
7 | # Dan Schult
8 | # Pieter Swart
9 | # All rights reserved.
10 | # BSD license.
11 | import networkx as nx
12 | __author__ = """\n""".join(['Drew Conway ',
13 | 'Aric Hagberg '])
14 | __all__=['is_isolate','isolates']
15 |
16 | def is_isolate(G,n):
17 | """Determine of node n is an isolate (degree zero).
18 |
19 | Parameters
20 | ----------
21 | G : graph
22 | A networkx graph
23 | n : node
24 | A node in G
25 |
26 | Returns
27 | -------
28 | isolate : bool
29 | True if n has no neighbors, False otherwise.
30 |
31 | Examples
32 | --------
33 | >>> G=nx.Graph()
34 | >>> G.add_edge(1,2)
35 | >>> G.add_node(3)
36 | >>> nx.is_isolate(G,2)
37 | False
38 | >>> nx.is_isolate(G,3)
39 | True
40 | """
41 | return G.degree(n)==0
42 |
43 | def isolates(G):
44 | """Return list of isolates in the graph.
45 |
46 | Isolates are nodes with no neighbors (degree zero).
47 |
48 | Parameters
49 | ----------
50 | G : graph
51 | A networkx graph
52 |
53 | Returns
54 | -------
55 | isolates : list
56 | List of isolate nodes.
57 |
58 | Examples
59 | --------
60 | >>> G = nx.Graph()
61 | >>> G.add_edge(1,2)
62 | >>> G.add_node(3)
63 | >>> nx.isolates(G)
64 | [3]
65 |
66 | To remove all isolates in the graph use
67 | >>> G.remove_nodes_from(nx.isolates(G))
68 | >>> G.nodes()
69 | [1, 2]
70 |
71 | For digraphs isolates have zero in-degree and zero out_degre
72 | >>> G = nx.DiGraph([(0,1),(1,2)])
73 | >>> G.add_node(3)
74 | >>> nx.isolates(G)
75 | [3]
76 | """
77 | return [n for (n,d) in G.degree_iter() if d==0]
78 |
--------------------------------------------------------------------------------
/networkx/algorithms/isomorphism/__init__.py:
--------------------------------------------------------------------------------
1 | from networkx.algorithms.isomorphism.isomorph import *
2 | from networkx.algorithms.isomorphism.vf2userfunc import *
3 | from networkx.algorithms.isomorphism.matchhelpers import *
4 |
5 |
--------------------------------------------------------------------------------
/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99:
--------------------------------------------------------------------------------
1 | P % 6 : C G , - . 9 : G % 0 1 4 ! $ + 0 2 : B J
! * 5 6 7 <