├── .github
└── workflows
│ ├── deploy_docs.yml
│ ├── draft-pdf.yml
│ └── tests.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── README.md
├── doc
├── Makefile
├── _static
│ ├── .keep
│ └── socialnet.png
├── conf.py
├── index-example.png
├── index.rst
├── overview.rst
├── reference.rst
├── requirements.txt
├── robots.txt
├── tutorials.rst
└── tutorials
│ ├── cites-trades_commercial_live-meat_2010-2020.csv
│ ├── commercial_live-meat_2010.csv
│ ├── commercial_live-meat_2020.csv
│ ├── installing.rst
│ ├── isomorphisms.ipynb
│ ├── networktypes.ipynb
│ ├── nx.ipynb
│ ├── preprocessing.py
│ ├── realworld.ipynb
│ └── visualizing.ipynb
├── pymnet
├── __init__.py
├── benchmarks
│ └── bm1.py
├── cc.py
├── diagnostics.py
├── graphlets
│ ├── __init__.py
│ ├── graphlet_measures.py
│ ├── graphlets.py
│ └── independent_equations.py
├── isomorphisms
│ ├── __init__.py
│ ├── bliss_bind_backend.py
│ ├── isomcore.py
│ └── nxbackend.py
├── models.py
├── net.py
├── netio.py
├── nxwrap.py
├── sampling
│ ├── __init__.py
│ ├── creators.py
│ ├── dumb.py
│ ├── esu.py
│ └── reqs.py
├── tests
│ ├── __init__.py
│ ├── __main__.py
│ ├── cc_test.py
│ ├── diagnostics_test.py
│ ├── graphlets_test.py
│ ├── io_test.py
│ ├── isomorphisms_test.py
│ ├── models_test.py
│ ├── net_test.py
│ ├── nxwrap_test.py
│ ├── sampling_test.py
│ ├── transforms_test.py
│ ├── tutorial_test.py
│ └── visuals_test.py
├── transforms.py
└── visuals
│ ├── __init__.py
│ ├── drawassigners.py
│ ├── drawbackends
│ ├── __init__.py
│ ├── mpl.py
│ ├── threejs.py
│ └── threejs_template.html
│ ├── drawcore.py
│ ├── drawnet.py
│ ├── layouts.py
│ └── webplots.py
├── pyproject.toml
└── socialnet.png
/.github/workflows/deploy_docs.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: Deploy docs
4 |
5 | # Controls when the workflow will run
6 | on:
7 | # push:
8 | # branches: [ "master" ]
9 | #
10 | workflow_dispatch:
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v3
18 |
19 | - uses: actions/setup-python@v4
20 | with:
21 | python-version: '3.11'
22 |
23 | - name: Update pip
24 | run: python -m pip install -U pip
25 |
26 | - name: Get docs requirements
27 | run: python -m pip install -r doc/requirements.txt
28 |
29 | - name: Install the package
30 | run: python -m pip install .
31 |
32 | - name: Build docs
33 | run: sphinx-build doc/ doc/_build/html
34 |
35 | - uses: actions/upload-pages-artifact@v2
36 | with:
37 | path: doc/_build/html
38 |
39 | deploy:
40 | runs-on: ubuntu-latest
41 | needs: build
42 |
43 | permissions:
44 | pages: write
45 | id-token: write
46 |
47 | environment:
48 | name: github-pages
49 | url: ${{ steps.deployment.outputs.page_url }}
50 |
51 | steps:
52 | - uses: actions/deploy-pages@v2
53 |
--------------------------------------------------------------------------------
/.github/workflows/draft-pdf.yml:
--------------------------------------------------------------------------------
1 | on:
2 | push:
3 | branches:
4 | - publication
5 |
6 | jobs:
7 | paper:
8 | runs-on: ubuntu-latest
9 | name: Paper Draft
10 | steps:
11 | - name: Checkout
12 | uses: actions/checkout@v2
13 | - name: Build draft PDF
14 | uses: openjournals/openjournals-draft-action@master
15 | with:
16 | journal: joss
17 | paper-path: paper.md
18 | - name: Upload
19 | uses: actions/upload-artifact@v1
20 | with:
21 | name: paper
22 | path: paper.pdf
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Run tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 |
9 | jobs:
10 | build:
11 | name: run tests on ${{ matrix.os }} + Python ${{ matrix.python }}
12 | runs-on: ${{ matrix.os }}
13 |
14 | strategy:
15 | fail-fast: false
16 | matrix:
17 | os: [ubuntu-latest, windows-latest, macos-latest]
18 | python: ['3.8', '3.9', '3.10', '3.11', '3.12']
19 |
20 | env:
21 | OS: ${{ matrix.os }}
22 | PYTHON: ${{ matrix.python }}
23 |
24 | steps:
25 | - uses: actions/checkout@v4
26 |
27 | - uses: actions/setup-python@v5
28 | with:
29 | python-version: ${{ matrix.python }}
30 |
31 | - name: System information
32 | run: python -c "import sys, platform; print(sys.platform, platform.machine())"
33 |
34 | - name: Update pip
35 | run: python -m pip install -U pip
36 |
37 | - name: Install the package
38 | run: python -m pip install .[test]
39 |
40 | - name: Run tests
41 | run: coverage run -m pymnet.tests
42 |
43 | - name: Generate coverage report
44 | run: coverage xml -o coverage.xml
45 |
46 | - name: Upload coverage reports to Codecov
47 | uses: codecov/codecov-action@v4.0.1
48 | with:
49 | token: ${{ secrets.CODECOV_TOKEN }}
50 | env_vars: OS,PYTHON
51 | slug: mnets/pymnet
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | __pycache__
3 | *~
4 | .DS_Store
5 | *.egg-info
6 | build/*
7 | dist/*
8 |
9 | doc/_build
10 | doc/autogen
11 |
12 | pymnet/tests/figs/
13 |
14 | wheelhouse/
15 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | http://www.mkivela.com/contact/.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Developing `pymnet`
2 |
3 | This package is a pure python package, where you can find the main modules in
4 | the `pymnet/` directory and the documentation in the `docs/`. Most of the
5 | method documentations are, however, directly pulled from method docstrings in
6 | the code. The tests are located at `pymnet/tests/`.
7 |
8 | ## Getting started
9 |
10 | The best way to begin contributing is by creating a fork of this project on GitHub,
11 | making your modifications, and sending in a pull request.
12 | Refer to [GitHub
13 | documentations][github-how-to] on the basic workflow.
14 |
15 | Currently, the code is hosted on GitHub at https://github.com/mnets/pymnet.
16 |
17 | Before commiting a lot of time and making big changes, it might be a good
18 | idea to talk about it with the developers by creating an issue.
19 |
20 | [github-how-to]: https://docs.github.com/en/get-started/exploring-projects-on-github/contributing-to-a-project
21 |
22 | ## Reporting bugs or suggesting improvments
23 |
24 | Use [GitHub issues][github-issues] for reporting bugs or suggesting
25 | improvments. It would help everyone involved if you could include a minimal
26 | example to demonstrate the problem in isolation.
27 |
28 | [github-issues]: https://docs.github.com/en/issues/tracking-your-work-with-issues/creating-an-issue
29 |
30 | ## Installing from source
31 |
32 | With the repository cloned in the current directory, you can use `pip` to
33 | install from the current directory:
34 |
35 | ```console
36 | $ pip install --force-reinstall .
37 | ```
38 |
39 | The flag `--force-reinstall` ensures that a new install from the current
40 | directory will happen even if you already have `pymnet` installed from another
41 | source.
42 |
43 | ## Running tests
44 |
45 | With the repository cloned in the current directory, you can run the
46 | `pymnet.tests` module to run the tests:
47 |
48 | ```console
49 | $ python -m pymnet.tests
50 | ```
51 |
52 | ## Writing tests
53 |
54 | Please ensure that your code is covered by unit tests.
55 | We use `codecov` to monitor coverage, and the `codecov/project` check integrated with the GitHub repository checks if a commit decreases coverage.
56 |
57 | ## Formatting code
58 |
59 | Please use `black` to format your code and `isort` to sort imports.
60 | Both can be installed via `pip install ".[dev]"`.
61 |
62 | ## Writing documentation
63 |
64 | Please follow the `numpydoc` docstring standard explained [here](https://numpydoc.readthedocs.io/en/latest/format.html).
65 |
66 | ## Building documentation
67 |
68 | With the repository cloned in the current directory, you can build the
69 | documentations using Sphinx. First, make sure all requirements for docs
70 | are installed:
71 |
72 | ```console
73 | $ python -m pip install -r doc/requirements.txt
74 | ```
75 |
76 | You can build the docs using this command:
77 |
78 | ```console
79 | $ sphinx-build doc/ doc/_build/html
80 | ```
81 |
82 | You can start a local HTTP server using Python and click on the URL printed to
83 | open the built documentations on your browser:
84 | ```console
85 | $ python -m http.server -d doc/_build/html/
86 | Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ...
87 | ```
88 |
89 | ## Release
90 |
91 | So far, we are not using GitHub releases. Releasing a new version is a three -step process: Bumping version number in `pyproject.toml`, building a wheel
92 | file, and uploading it to PyPI.
93 |
94 | You can build a new wheel using `pip`:
95 |
96 | ```console
97 | $ pip wheel . --wheel-dir wheelhouse
98 | ```
99 |
100 | This creates a wheel file for `pymnet` as well as all dependencies in the
101 | `wheelhouse/` directory. You can use [Twine][twine] to upload the pymnet wheel
102 | to the Python Package Index:
103 |
104 | ```console
105 | $ python -m twine upload wheelhouse/pymnet-$VERSION-py3-none-any.whl --verbose
106 | ```
107 |
108 | Remember to replace `$VERSION` with the version in the generated file!
109 |
110 | Uplading a new wheel under the `pymnet` project is only available to the
111 | [package maintainers on PyPI][maintainers].
112 |
113 | [twine]: https://twine.readthedocs.io/en/latest/
114 | [maintainers]: https://pypi.org/project/pymnet/
115 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pymnet: A Python Library for Multilayer Networks
2 |
3 | [](https://codecov.io/gh/mnets/pymnet)
4 | [](https://doi.org/10.21105/joss.06930)
5 |
6 |
7 | `pymnet` is a Python package for creating, analyzing, and visualizing multilayer networks as formalized by [Kivelä et al. (2014)](https://doi.org/10.1093/comnet/cnu016).
8 | It is designed for network scientists with an easy-to-use yet flexible interface, featuring, inter alia, representations of a very general class of multilayer networks, structural metrics of multilayer networks, and random multilayer-network models.
9 |
10 | To learn more about the concepts and design principles underlying `pymnet`, check out [this overview](https://mnets.github.io/pymnet/overview.html).
11 |
12 | ## Features
13 |
14 | * Written in pure Python
15 | * Full support for general [multilayer networks](http://comnet.oxfordjournals.org/content/2/3/203)
16 | * Efficient handling of multiplex networks (with automatically generated lazy evaluation of coupling edges)
17 | * Extensive functionality –– analysis, transformations, reading and writing networks, network models, etc.
18 | * Flexible multilayer-network visualization (using Matplotlib and D3)
19 | * Integration with [NetworkX](https://networkx.org/) for monoplex network analysis
20 |
21 | ## Working with pymnet
22 |
23 | ### Installation
24 | We recommend executing the following command in a virtual environment:
25 | ```console
26 | $ python -m pip install pymnet
27 | ```
28 |
29 | ### Usage
30 | To get started with `pymnet`, check out our [tutorials](https://mnets.github.io/pymnet/tutorials) –– and when in doubt, consult the [API reference](https://mnets.github.io/pymnet/reference.html) contained in our [documentation](https://mnets.github.io/pymnet/).
31 |
32 | As an introductory example, with the following code, we can create a small multiplex network capturing different types of social relations between individuals and visualize the result:
33 |
34 | ```python
35 | import pymnet
36 |
37 | net_social = pymnet.MultiplexNetwork(couplings="categorical", fullyInterconnected=False)
38 | net_social["Alice", "Bob", "Friends"] = 1
39 | net_social["Alice", "Carol", "Friends"] = 1
40 | net_social["Bob", "Carol", "Friends"] = 1
41 | net_social["Alice", "Bob", "Married"] = 1
42 |
43 | fig_social = pymnet.draw(net_social, layout="circular", layerPadding=0.2, defaultLayerLabelLoc=(0.9,0.9))
44 | ```
45 |
46 |
47 |
48 |
49 |
50 |
51 | ## Contributing
52 |
53 | We welcome contributions!
54 | Before you get started, please check out our [contribution guide](CONTRIBUTING.md).
55 |
56 | ## Asking Questions
57 |
58 | * For bugs, feature requests, etc., please use [GitHub issues][github-issues].
59 | * Otherwise, feel free to contact the main developer: [Mikko Kivelä](http://www.mkivela.com/)
60 |
61 | [github-issues]: https://github.com/mnets/pymnet/issues
62 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 |
49 | clean:
50 | rm -rf $(BUILDDIR)/*
51 |
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | dirhtml:
58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
59 | @echo
60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
61 |
62 | singlehtml:
63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
64 | @echo
65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
66 |
67 | pickle:
68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
69 | @echo
70 | @echo "Build finished; now you can process the pickle files."
71 |
72 | json:
73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
74 | @echo
75 | @echo "Build finished; now you can process the JSON files."
76 |
77 | htmlhelp:
78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
79 | @echo
80 | @echo "Build finished; now you can run HTML Help Workshop with the" \
81 | ".hhp project file in $(BUILDDIR)/htmlhelp."
82 |
83 | qthelp:
84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
85 | @echo
86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MultilayerNetworksLibrary.qhcp"
89 | @echo "To view the help file:"
90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MultilayerNetworksLibrary.qhc"
91 |
92 | devhelp:
93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
94 | @echo
95 | @echo "Build finished."
96 | @echo "To view the help file:"
97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/MultilayerNetworksLibrary"
98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MultilayerNetworksLibrary"
99 | @echo "# devhelp"
100 |
101 | epub:
102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
103 | @echo
104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
105 |
106 | latex:
107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
108 | @echo
109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
111 | "(use \`make latexpdf' here to do that automatically)."
112 |
113 | latexpdf:
114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
115 | @echo "Running LaTeX files through pdflatex..."
116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
118 |
119 | latexpdfja:
120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
121 | @echo "Running LaTeX files through platex and dvipdfmx..."
122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
124 |
125 | text:
126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
127 | @echo
128 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
129 |
130 | man:
131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
132 | @echo
133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
134 |
135 | texinfo:
136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
137 | @echo
138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
139 | @echo "Run \`make' in that directory to run these through makeinfo" \
140 | "(use \`make info' here to do that automatically)."
141 |
142 | info:
143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
144 | @echo "Running Texinfo files through makeinfo..."
145 | make -C $(BUILDDIR)/texinfo info
146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
147 |
148 | gettext:
149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
150 | @echo
151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
152 |
153 | changes:
154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
155 | @echo
156 | @echo "The overview file is in $(BUILDDIR)/changes."
157 |
158 | linkcheck:
159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
160 | @echo
161 | @echo "Link check complete; look for any errors in the above output " \
162 | "or in $(BUILDDIR)/linkcheck/output.txt."
163 |
164 | doctest:
165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
166 | @echo "Testing of doctests in the sources finished, look at the " \
167 | "results in $(BUILDDIR)/doctest/output.txt."
168 |
169 | xml:
170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
171 | @echo
172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
173 |
174 | pseudoxml:
175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
176 | @echo
177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
178 |
--------------------------------------------------------------------------------
/doc/_static/.keep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mnets/pymnet/0a54d5dc8b0d6281c940043a5dcaed1df59a23e2/doc/_static/.keep
--------------------------------------------------------------------------------
/doc/_static/socialnet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mnets/pymnet/0a54d5dc8b0d6281c940043a5dcaed1df59a23e2/doc/_static/socialnet.png
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Multilayer Networks Library documentation build configuration file, created
4 | # by sphinx-quickstart on Thu Oct 24 15:35:16 2013.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # import sys
16 | # import os
17 |
18 | # If extensions (or modules to document with autodoc) are in another directory,
19 | # add these directories to sys.path here. If the directory is relative to the
20 | # documentation root, use os.path.abspath to make it absolute, like shown here.
21 | # sys.path.insert(0, os.path.abspath('..'))
22 |
23 | # -- General configuration ------------------------------------------------
24 |
25 | # If your documentation needs a minimal Sphinx version, state it here.
26 | # needs_sphinx = '1.0'
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
30 | # ones.
31 | extensions = [
32 | "sphinx.ext.autodoc",
33 | "sphinx.ext.autosummary",
34 | "sphinx_copybutton",
35 | "numpydoc",
36 | "myst_nb",
37 | ]
38 |
39 | nb_execution_mode = "off"
40 |
41 | # Add any paths that contain templates here, relative to this directory.
42 | templates_path = ["_templates"]
43 |
44 | # The suffix of source filenames.
45 | source_suffix = ".rst"
46 |
47 | # The encoding of source files.
48 | # source_encoding = 'utf-8-sig'
49 |
50 | # The master toctree document.
51 | master_doc = "index"
52 |
53 | # General information about the project.
54 | project = "pymnet: Multilayer Networks Library"
55 | copyright = "2013, Mikko Kivela"
56 |
57 | import toml
58 |
59 | project_data = toml.load("../pyproject.toml")
60 |
61 | # The version info for the project you're documenting, acts as replacement for
62 | # |version| and |release|, also used in various other places throughout the
63 | # built documents.
64 | #
65 | # The short X.Y version.
66 | version = project_data["project"]["version"]
67 | # The full version, including alpha/beta/rc tags.
68 | release = project_data["project"]["version"]
69 |
70 | # The language for content autogenerated by Sphinx. Refer to documentation
71 | # for a list of supported languages.
72 | # language = None
73 |
74 | # There are two options for replacing |today|: either, you set today to some
75 | # non-false value, then it is used:
76 | # today = ''
77 | # Else, today_fmt is used as the format for a strftime call.
78 | # today_fmt = '%B %d, %Y'
79 |
80 | # List of patterns, relative to source directory, that match files and
81 | # directories to ignore when looking for source files.
82 | exclude_patterns = ["_build"]
83 |
84 | # The reST default role (used for this markup: `text`) to use for all
85 | # documents.
86 | # default_role = None
87 |
88 | # If true, '()' will be appended to :func: etc. cross-reference text.
89 | # add_function_parentheses = True
90 |
91 | # If true, the current module name will be prepended to all description
92 | # unit titles (such as .. function::).
93 | # add_module_names = True
94 |
95 | # If true, sectionauthor and moduleauthor directives will be shown in the
96 | # output. They are ignored by default.
97 | # show_authors = False
98 |
99 | # The name of the Pygments (syntax highlighting) style to use.
100 | pygments_style = "sphinx"
101 | pygments_dark_style = "monokai"
102 |
103 |
104 | # A list of ignored prefixes for module index sorting.
105 | modindex_common_prefix = ["pymnet."]
106 |
107 | # If true, keep warnings as "system message" paragraphs in the built documents.
108 | # keep_warnings = False
109 |
110 |
111 | # -- Options for HTML output ----------------------------------------------
112 |
113 | # The theme to use for HTML and HTML Help pages. See the documentation for
114 | # a list of builtin themes.
115 | html_theme = "furo"
116 |
117 | # Theme options are theme-specific and customize the look and feel of a theme
118 | # further. For a list of options available for each theme, see the
119 | # documentation.
120 | html_theme_options = {
121 | "source_repository": "https://github.com/mnets/pymnet",
122 | "source_branch": "master",
123 | "source_directory": "doc/",
124 | }
125 |
126 |
127 | # The name for this set of Sphinx documents. If None, it defaults to
128 | # " v documentation".
129 | html_title = "pymnet: Multilayer Networks Library"
130 |
131 | # A shorter title for the navigation bar. Default is the same as html_title.
132 | # html_short_title = None
133 |
134 | html_baseurl = "https://mnets.github.io/pymnet/"
135 |
136 | # The name of an image file (relative to this directory) to place at the top
137 | # of the sidebar.
138 | # html_logo = None
139 |
140 | # The name of an image file (within the static path) to use as favicon of the
141 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
142 | # pixels large.
143 | # html_favicon = None
144 |
145 | # Add any paths that contain custom static files (such as style sheets) here,
146 | # relative to this directory. They are copied after the builtin static files,
147 | # so a file named "default.css" will overwrite the builtin "default.css".
148 | html_static_path = ["_static"]
149 |
150 | # Add any extra paths that contain custom files (such as robots.txt or
151 | # .htaccess) here, relative to this directory. These files are copied
152 | # directly to the root of the documentation.
153 | html_extra_path = ["robots.txt"]
154 |
155 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
156 | # using the given strftime format.
157 | # html_last_updated_fmt = '%b %d, %Y'
158 |
159 | # If true, SmartyPants will be used to convert quotes and dashes to
160 | # typographically correct entities.
161 | # html_use_smartypants = True
162 |
163 | # Custom sidebar templates, maps document names to template names.
164 | # html_sidebars = {}
165 |
166 | # Additional templates that should be rendered to pages, maps page names to
167 | # template names.
168 | # html_additional_pages = {}
169 |
170 | # If false, no module index is generated.
171 | html_domain_indices = False
172 |
173 | # If false, no index is generated.
174 | html_use_index = False
175 |
176 | # If true, the index is split into individual pages for each letter.
177 | # html_split_index = False
178 |
179 | # If true, links to the reST sources are added to the pages.
180 | # html_show_sourcelink = True
181 |
182 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
183 | html_show_sphinx = False
184 |
185 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
186 | html_show_copyright = False
187 |
188 | # If true, an OpenSearch description file will be output, and all pages will
189 | # contain a tag referring to it. The value of this option must be the
190 | # base URL from which the finished HTML is served.
191 | # html_use_opensearch = ''
192 |
193 | # This is the file name suffix for HTML files (e.g. ".xhtml").
194 | # html_file_suffix = None
195 |
196 | # Output file base name for HTML help builder.
197 | htmlhelp_basename = "MultilayerNetworksLibrarydoc"
198 |
199 |
200 | # -- Options for LaTeX output ---------------------------------------------
201 |
202 | latex_elements = {
203 | # The paper size ('letterpaper' or 'a4paper').
204 | # 'papersize': 'letterpaper',
205 | # The font size ('10pt', '11pt' or '12pt').
206 | # 'pointsize': '10pt',
207 | # Additional stuff for the LaTeX preamble.
208 | # 'preamble': '',
209 | }
210 |
211 | # Grouping the document tree into LaTeX files. List of tuples
212 | # (source start file, target name, title,
213 | # author, documentclass [howto, manual, or own class]).
214 | latex_documents = [
215 | (
216 | "index",
217 | "pymnet.tex",
218 | "Multilayer Networks Library Documentation",
219 | "Mikko Kivela",
220 | "manual",
221 | ),
222 | ]
223 |
224 | # The name of an image file (relative to this directory) to place at the top of
225 | # the title page.
226 | # latex_logo = None
227 |
228 | # For "manual" documents, if this is true, then toplevel headings are parts,
229 | # not chapters.
230 | # latex_use_parts = False
231 |
232 | # If true, show page references after internal links.
233 | # latex_show_pagerefs = False
234 |
235 | # If true, show URL addresses after external links.
236 | # latex_show_urls = False
237 |
238 | # Documents to append as an appendix to all manuals.
239 | # latex_appendices = []
240 |
241 | # If false, no module index is generated.
242 | # latex_domain_indices = True
243 |
244 |
245 | # -- Options for manual page output ---------------------------------------
246 |
247 | # One entry per manual page. List of tuples
248 | # (source start file, name, description, authors, manual section).
249 | man_pages = [
250 | (
251 | "index",
252 | "multilayernetworkslibrary",
253 | "Multilayer Networks Library Documentation",
254 | ["Mikko Kivela"],
255 | 1,
256 | )
257 | ]
258 |
259 | # If true, show URL addresses after external links.
260 | # man_show_urls = False
261 |
262 |
263 | # -- Options for Texinfo output -------------------------------------------
264 |
265 | # Grouping the document tree into Texinfo files. List of tuples
266 | # (source start file, target name, title, author,
267 | # dir menu entry, description, category)
268 | texinfo_documents = [
269 | (
270 | "index",
271 | "MultilayerNetworksLibrary",
272 | "Multilayer Networks Library Documentation",
273 | "Mikko Kivela",
274 | "MultilayerNetworksLibrary",
275 | "Pymnet is a free library for analysing multilayer networks",
276 | "Miscellaneous",
277 | ),
278 | ]
279 |
280 | # Documents to append as an appendix to all manuals.
281 | # texinfo_appendices = []
282 |
283 | # If false, no module index is generated.
284 | # texinfo_domain_indices = True
285 |
286 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
287 | # texinfo_show_urls = 'footnote'
288 |
289 | # If true, do not generate a @detailmenu in the "Top" node's menu.
290 | # texinfo_no_detailmenu = False
291 |
292 | autosummary_generate = True
293 |
294 | copybutton_exclude = ".linenos, .gp, .go"
295 |
--------------------------------------------------------------------------------
/doc/index-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mnets/pymnet/0a54d5dc8b0d6281c940043a5dcaed1df59a23e2/doc/index-example.png
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | ``pymnet``: A Multilayer-Networks Library for Python
2 | ====================================================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 | :hidden:
7 |
8 | overview
9 | tutorials
10 | reference
11 |
12 | .. sidebar:: Multilayer networks
13 |
14 | .. figure:: index-example.png
15 | :height: 250 pt
16 | :align: left
17 |
18 | The library is based on the general definition of **multilayer networks** presented in a `review article `_. Multilayer networks can be used to represent various types network generalizations found in the literature. For example, multiplex networks, temporal networks, networks of networks, and interdependent networks are all types of multilayer networks. The library supports even more general types of networks with multiple *aspects* such that the networks can for example have both temporal and multiplex aspect at the same time.
19 |
20 | The visualization on the left is produced with the library. See the visualization tutorial for instructions on how to visualize your own network data with the library!
21 |
22 |
23 | ``pymnet`` is a free library for analyzing multilayer networks.
24 | The easiest way to get ``pymnet`` is via pip:
25 |
26 | .. code-block:: bash
27 |
28 | $ pip install pymnet
29 |
30 |
31 | **Main features include:**
32 |
33 | * Pure Python implementation
34 |
35 | * Can handle general multilayer networks
36 |
37 | * Data structures for multilayer networks and multiplex networks
38 |
39 | * Scalable implementation for sparse networks: memory usage scales linearly with the number of edges and number of nodes
40 |
41 | * Rule-based generation and lazy evaluation of coupling edges
42 |
43 | * Various network analysis methods, transformations, reading and writing networks, network models, etc.
44 |
45 | * Visualization (using matplotlib or D3 as a backend)
46 |
47 | * Integration with NetworkX for monoplex network analysis
48 |
49 |
50 |
51 | **Documentation:**
52 |
53 |
54 | * :ref:`overview`: *Overview of the design of the library and benchmarks*
55 | * :ref:`reference`: *Reference for all functions and classes in the library*
56 | * :ref:`tutorials`: *Easy way of getting started with various topics*
57 |
58 |
59 |
--------------------------------------------------------------------------------
/doc/overview.rst:
--------------------------------------------------------------------------------
1 | .. _overview:
2 |
3 | Overview
4 | ========
5 |
6 | Multilayer networks as defined by `Kivelä et al. (2014) `_ generalize graphs to capture the rich network data often associated with complex systems, allowing us to study a broad range of phenomena using a common representation, using the same multilayer tools and methods.
7 | Formally, a *multilayer network* :math:`M` is defined as a quadruple :math:`M = (V_M, E_M, V,` **L**:math:`)`, where the sequence **L** :math:`= (L_a)_{𝑎=1}^d` defines sets :math:`L_a` of *elementary layers*, the set :math:`V` defines the *nodes* of the network, the *node-layers* are :math:`V_M ⊆ V × L_1 × ... × L_d`, and the *edges* :math:`E_M ⊆ V_M × V_M` are defined between node-layers.
8 | Put simply, a node-layer is an association of a node :math:`v ∈ V` with a layer :math:`∈ L ×...× L` of dimensionality :math:`d`, nodes can exist on an arbitrary number of layers, and edges can connect node-layers within layers and across arbitrary pairs of layers, which can differ in an arbitrary number of dimensions. The dimensions :math:`1, 2, ..., d` are called the *aspects* of the network (e.g., a two-aspect transport network could have *time* as its first aspect and *transport mode* as its second aspect).
9 |
10 | The data types in :code:`pymnet` mirror the formalization stated above. In `Kivelä et al. (2014) `_, a multilayer network is defined as a general mathematical structure, and all the other types of networks are defined as special cases of that structure.
11 | Here, we take a similar approach and define a class :code:`MultilayerNetwork` such that it represents the mathematical definition of the multilayer network. All the other network classes then inherit the :code:`MultilayerNetwork` class. Currently, we have the :code:`MultiplexNetwork` class, which represents multiplex networks as defined in the article. In the article, there were several constraints defined for multiplex networks. Some of these constraints, such as "categorical" and "ordinal" couplings, are also implemented in this library. Instances of :code:`MultiplexNetwork` that are constrained in this way can be implemented efficiently, and the algorithms dealing with general multilayer networks can take advantage of the information that the network object is constrained.
12 |
13 | For example, the following code constructs and visualizes a small multiplex social network:
14 |
15 | .. code-block:: python
16 |
17 | from pymnet import *
18 |
19 | net_social = MultiplexNetwork(couplings="categorical", fullyInterconnected=False)
20 | net_social["Alice", "Bob", "Friends"] = 1
21 | net_social["Alice", "Carol", "Friends"] = 1
22 | net_social["Bob", "Carol", "Friends"] = 1
23 | net_social["Alice", "Bob", "Married"] = 1
24 |
25 | fig_social = draw(net_social, layout="circular", layerPadding=0.2, defaultLayerLabelLoc=(0.9,0.9))
26 |
27 | .. figure:: /_static/socialnet.png
28 | :width: 400
29 | :alt: An image of a small multiplex social network.
30 |
31 | Since the network is multiplex, :code:`pymnet` does not store the dotted (i.e., inter-layer) edges explicitly. Rather, they are generated on the fly when they are needed (e.g., to draw the network) according to the specified coupling rule (here: "categorical").
32 |
33 |
34 | Computational efficiency
35 | ------------------------
36 |
37 | One often wants to study large-scale synthetic networks or big network datasets. In these situations, the most important thing is to consider how the memory and time requirements of the data structures and the algorithms scale with the size of the network. This library is designed with these scaling requirements in mind. The average scaling in time and memory should typically be optimal for the number of nodes, :math:`n`, number of layers, :math:`l`, and number of edges, :math:`e`. We typically consider the number of aspects, :math:`d`, to be constant and not dependent on the size of the data. Note, however, that the current implementation is only in Python (the C++ version implementation is in the planning phase), and thus the constant factor in the memory and time consumption is typically fairly large.
38 |
39 | The main data structure underlying the general multilayer network representation is a global graph :math:`G_M` implemented as dictionary of dictionaries. That is, the graph is a dictionary where for each node, e.g., :math:`(u,v,\alpha,\beta)`, is a key and its values are another dictionary containing information about the neighbors of each node. Thus, the inner dictionaries have the neighboring nodes as keys and the weights of the edges connecting them as values. This type of structure ensures that the average time complexity of adding new nodes, removing nodes, querying for existence of edges, or their weights, are all on average constant, and iterating over the neighbors of a node is linear. Further, the memory requirements in scale as :math:`\mathcal{O}(n+l+e)`, and are typically dominated by the number of edges in the network.
40 |
41 | Multiplex networks are a special case of multilayer networks, and we could easily employ the same data structure as for the multilayer networks. There are a few reason why we do not want to do that here. First, in multiplex networks, we typically want to iterate over intra-layer links of a node in a single layer, and this would require one to go through all the inter-layer edges, too, if the multilayer-network data structure was used. Second, in most cases, we do not want to explicitly store all inter-layer links. For example, when we have a multiplex network with categorical couplings that are all of equal weight, the number of inter-layer edges grows as :math:`\mathcal{O}(nl^2)`. In the multiplex-network data structure in this library, we only always store the intra-layer networks separately. We do not store the inter-layer edges explicitly but only generate them according to given rules when they are needed. This ensures that we can always iterate over the intra-layer edges in time linear in the number of intra-layer edges, and that having the inter-layer edges only requires constant memory (i.e., the memory needed for storing the rule to generate them).
42 |
43 | Examples
44 | ^^^^^^^^
45 |
46 | Next, we give a few examples of simple tasks and the time it takes to complete them on a normal desktop computer. This is only to give an idea of the practical efficiency of the library and computation times of typical jobs. All runs are using PyPy 2.1 and a desktop computer with AMD Phenom II X3 processor and 2 GB of memory running Linux.
47 |
48 | Multiplex ER network with a large number of layers
49 | """"""""""""""""""""""""""""""""""""""""""""""""
50 |
51 | First, we create an Erdos-Renyi multiplex network with a small number of nodes and a large number of layers and categorical couplings. We choose to have :math:`n=10` nodes and :math:`b=10^5` layers with edge probability of :math:`p=0.1`. This will result in a network with around :math:`9 \times 10^5` intra-layer edges and :math:`10 \binom{10^5}{2} \approx 5 \times 10^{10}` inter-layer edges. The command for creating this network is
52 |
53 | >>> import pymnet
54 | >>> net = pymnet.er(10, 10**5*[0.1])
55 |
56 | The command takes around 2.4 seconds to run (averaged over 100 runs) in the above-mentioned computer. Note that, internally, this command creates a sparse-matrix representation of the intra-layer networks (i.e., only edges that exist are created) and the inter-layer edges are not actually created explicitly. Creating a full adjacency tensor, or a supra-adjacency matrix, would require creating an object with :math:`10^{12}` elements, and even a sparse representation with all edges explicitly generated would have around :math:`5 \times 10^{10}` elements.
57 |
58 | Multiplex ER network with a large number of nodes
59 | """""""""""""""""""""""""""""""""""""""""""""""
60 |
61 | Next, we create an ER network with :math:`n=10^5` nodes, :math:`b=10` layers, and an average degree of around one. The total number of intra-layer edges will be around :math:`5 \times 10^5`. This can be done with the following command:
62 |
63 | >>> net = pymnet.er(10**5,10*[10**-5])
64 |
65 | The total time to complete this task on the above-mentioned hardware is around 3.4 seconds.
66 |
67 |
--------------------------------------------------------------------------------
/doc/reference.rst:
--------------------------------------------------------------------------------
1 | .. _reference:
2 |
3 | Reference
4 | =========
5 | .. automodule:: pymnet
6 | .. autosummary::
7 | :toctree: autogen
8 |
9 | Data Structures
10 | ---------------
11 | .. automodule:: pymnet.net
12 | .. autosummary::
13 | :toctree: autogen
14 |
15 | .. automodule:: pymnet
16 | .. autosummary::
17 | :toctree: autogen
18 |
19 | MultilayerNetwork
20 | MultiplexNetwork
21 | net.MultilayerNode
22 |
23 | Network Models
24 | --------------
25 | .. automodule:: pymnet.models
26 | .. autosummary::
27 | :toctree: autogen
28 |
29 | .. automodule:: pymnet
30 | .. autosummary::
31 | :toctree: autogen
32 |
33 | er
34 | conf
35 | single_layer_er
36 | single_layer_conf
37 | er_partially_interconnected
38 | full
39 | full_multilayer
40 | er_multilayer
41 |
42 | Transforming Networks
43 | ---------------------
44 | .. automodule:: pymnet.transforms
45 | .. autosummary::
46 | :toctree: autogen
47 |
48 | .. automodule:: pymnet
49 | .. autosummary::
50 | :toctree: autogen
51 |
52 | aggregate
53 | subnet
54 | supra_adjacency_matrix
55 |
56 | Reading and Writing Networks
57 | ----------------------------
58 | .. automodule:: pymnet.netio
59 | .. autosummary::
60 | :toctree: autogen
61 |
62 | .. automodule:: pymnet
63 | .. autosummary::
64 | :toctree: autogen
65 |
66 | read_edge_file
67 | write_edge_file
68 | read_ucinet
69 | write_edge_files
70 | write_json
71 |
72 | Basic Network Diagnostics
73 | -------------------------
74 | .. automodule:: pymnet.diagnostics
75 | .. autosummary::
76 | :toctree: autogen
77 |
78 | .. automodule:: pymnet
79 | .. autosummary::
80 | :toctree: autogen
81 |
82 | degs
83 | density
84 | multiplex_degs
85 | multiplex_density
86 |
87 | Clustering Coefficients
88 | -----------------------
89 | .. automodule:: pymnet.cc
90 | .. autosummary::
91 | :toctree: autogen
92 |
93 | .. automodule:: pymnet
94 | .. autosummary::
95 | :toctree: autogen
96 |
97 | lcc
98 | cc_zhang
99 | gcc_zhang
100 | cc_onnela
101 | cc_barrat
102 | cc_barrett
103 | lcc_brodka
104 | cc_sequence
105 | lcc_aw
106 | avg_lcc_aw
107 | gcc_aw
108 | sncc_aw
109 | elementary_cycles
110 |
111 |
112 | Visualization
113 | -------------
114 | .. automodule:: pymnet.visuals
115 | .. autosummary::
116 | :toctree: autogen
117 |
118 | .. automodule:: pymnet
119 | .. autosummary::
120 | :toctree: autogen
121 |
122 | draw
123 | webplot
124 |
125 | Isomorphisms
126 | ------------
127 | .. automodule:: pymnet.isomorphisms
128 | .. autosummary::
129 | :toctree: autogen
130 |
131 | .. automodule:: pymnet
132 | .. autosummary::
133 | :toctree: autogen
134 |
135 | is_isomorphic
136 | get_complete_invariant
137 | get_automorphism_generators
138 | get_isomorphism
139 |
140 | Graphlets
141 | ------------
142 | .. automodule:: pymnet.graphlets
143 | .. autosummary::
144 | :toctree: autogen
145 |
146 | graphlets
147 | automorphism_orbits
148 | orbit_equations
149 | orbit_counts_all
150 | GCM
151 | GCD
152 |
153 | Sampling
154 | ------------
155 | .. automodule:: pymnet.sampling
156 | .. autosummary::
157 | :toctree: autogen
158 |
159 | .. automodule:: pymnet.sampling.esu
160 | .. autosummary::
161 | :toctree: autogen
162 |
163 | sample_multilayer_subgraphs_esu
164 |
165 | .. automodule:: pymnet.sampling.reqs
166 | .. autosummary::
167 | :toctree: autogen
168 |
169 | default_check_reqs
170 | relaxed_check_reqs
171 |
172 |
--------------------------------------------------------------------------------
/doc/requirements.txt:
--------------------------------------------------------------------------------
1 | numpydoc == 1.6.0
2 | furo
3 | sphinx-copybutton
4 | toml
5 | myst_nb
--------------------------------------------------------------------------------
/doc/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 |
--------------------------------------------------------------------------------
/doc/tutorials.rst:
--------------------------------------------------------------------------------
1 | .. _tutorials:
2 |
3 | Tutorials
4 | =========
5 |
6 | The following tutorials will help you with to get started with using the library.
7 |
8 | .. toctree::
9 | :maxdepth: 5
10 |
11 | tutorials/installing
12 | tutorials/networktypes
13 | tutorials/visualizing
14 | tutorials/isomorphisms
15 | tutorials/nx
16 | tutorials/realworld
17 |
18 |
--------------------------------------------------------------------------------
/doc/tutorials/installing.rst:
--------------------------------------------------------------------------------
1 | Downloading and installing
2 | ==========================
3 |
4 | The easiest way to install :code:`pymnet` is via pip:
5 |
6 | .. code-block:: bash
7 |
8 | $ pip install pymnet
9 |
10 | We recommend installing into a virtual environment (managed, e.g., with virtualenv, conda, mamba, or poetry).
11 | Afterwards, the installation should work out-of-the-box.
12 |
13 | Alternatively, you can download the latest source files directly from our `GitHub Repository `_ as a zip file,
14 | or you can use git to clone the repository, for example, via HTTPS:
15 |
16 | .. code-block:: bash
17 |
18 | $ git clone https://github.com/mnets/pymnet.git
19 |
20 | ...or via SSH using a password-protected SSH key:
21 |
22 | .. code-block:: bash
23 |
24 | $ git@github.com:mnets/pymnet.git
25 |
--------------------------------------------------------------------------------
/doc/tutorials/nx.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "metadata": {},
5 | "cell_type": "markdown",
6 | "source": [
7 | "# Using NetworkX functions\n",
8 | "\n",
9 | "[NetworkX](https://networkx.github.io/) is an excellent tool for network analysis, and there is no need to reinvent the wheel when working on monoplex networks with Pymnet. If you have NetworkX installed, you can use its functions directly with the multilayer-network objects produced by Pymnet.\n",
10 | "\n",
11 | "Start by importing the library:"
12 | ],
13 | "id": "4770bc9305b5d5db"
14 | },
15 | {
16 | "metadata": {
17 | "ExecuteTime": {
18 | "end_time": "2024-07-14T21:14:37.081131Z",
19 | "start_time": "2024-07-14T21:14:35.177446Z"
20 | }
21 | },
22 | "cell_type": "code",
23 | "source": "from pymnet import nx",
24 | "id": "98e6640774c51fcb",
25 | "outputs": [],
26 | "execution_count": 1
27 | },
28 | {
29 | "metadata": {},
30 | "cell_type": "markdown",
31 | "source": "You can then run any NetworkX function from the pymnet.nx module. For example, you can produce the Karate Club network with the following command.",
32 | "id": "e7579bf203091df0"
33 | },
34 | {
35 | "metadata": {
36 | "ExecuteTime": {
37 | "end_time": "2024-07-14T21:14:37.094864Z",
38 | "start_time": "2024-07-14T21:14:37.083478Z"
39 | }
40 | },
41 | "cell_type": "code",
42 | "source": "net = nx.karate_club_graph()",
43 | "id": "9574c30c74877724",
44 | "outputs": [],
45 | "execution_count": 2
46 | },
47 | {
48 | "metadata": {},
49 | "cell_type": "markdown",
50 | "source": "This will produce a native Pymnet multilayer network object with 0 aspects (i.e., a monoplex network). To confirm this, try:",
51 | "id": "cfc9e752cf3f22b9"
52 | },
53 | {
54 | "metadata": {
55 | "ExecuteTime": {
56 | "end_time": "2024-07-14T21:16:00.602274Z",
57 | "start_time": "2024-07-14T21:16:00.597755Z"
58 | }
59 | },
60 | "cell_type": "code",
61 | "source": "type(net)",
62 | "id": "bede72d137b192e9",
63 | "outputs": [
64 | {
65 | "data": {
66 | "text/plain": [
67 | "pymnet.net.MultilayerNetwork"
68 | ]
69 | },
70 | "execution_count": 9,
71 | "metadata": {},
72 | "output_type": "execute_result"
73 | }
74 | ],
75 | "execution_count": 9
76 | },
77 | {
78 | "metadata": {
79 | "ExecuteTime": {
80 | "end_time": "2024-07-14T21:14:39.442028Z",
81 | "start_time": "2024-07-14T21:14:39.437629Z"
82 | }
83 | },
84 | "cell_type": "code",
85 | "source": "net.aspects",
86 | "id": "e3f69f087bbf9390",
87 | "outputs": [
88 | {
89 | "data": {
90 | "text/plain": [
91 | "0"
92 | ]
93 | },
94 | "execution_count": 4,
95 | "metadata": {},
96 | "output_type": "execute_result"
97 | }
98 | ],
99 | "execution_count": 4
100 | },
101 | {
102 | "metadata": {},
103 | "cell_type": "markdown",
104 | "source": "For the sake of reproducibility in the next example, let's explicitly seed the random number generator here:",
105 | "id": "9372b0e498a1eb5c"
106 | },
107 | {
108 | "metadata": {
109 | "ExecuteTime": {
110 | "end_time": "2024-07-14T21:14:41.836007Z",
111 | "start_time": "2024-07-14T21:14:41.832680Z"
112 | }
113 | },
114 | "cell_type": "code",
115 | "source": [
116 | "import random\n",
117 | "random.seed(42)"
118 | ],
119 | "id": "e5eaa4016156458c",
120 | "outputs": [],
121 | "execution_count": 5
122 | },
123 | {
124 | "metadata": {},
125 | "cell_type": "markdown",
126 | "source": "You can also pass Pymnet objects as arguments to NetworkX functions in a similar way. This is handy, for example, when analyzing monoplex structures of intra-layer networks of multiplex networks. For example, producing a multiplex network with three Erdos-Renyi intra-layer networks using Pymnet and calculating the number of connected components in each layer can be done with the following command:",
127 | "id": "2af171cf20423043"
128 | },
129 | {
130 | "metadata": {
131 | "ExecuteTime": {
132 | "end_time": "2024-07-14T21:14:45.328401Z",
133 | "start_time": "2024-07-14T21:14:45.168653Z"
134 | }
135 | },
136 | "cell_type": "code",
137 | "source": [
138 | "import pymnet\n",
139 | "{name: nx.number_connected_components(layer) for name, layer in pymnet.er(1000, 3*[0.005]).A.items()}"
140 | ],
141 | "id": "8c6a4f1cad84ce31",
142 | "outputs": [
143 | {
144 | "data": {
145 | "text/plain": [
146 | "{0: 10, 1: 9, 2: 5}"
147 | ]
148 | },
149 | "execution_count": 6,
150 | "metadata": {},
151 | "output_type": "execute_result"
152 | }
153 | ],
154 | "execution_count": 6
155 | }
156 | ],
157 | "metadata": {
158 | "kernelspec": {
159 | "display_name": "Python 3",
160 | "language": "python",
161 | "name": "python3"
162 | },
163 | "language_info": {
164 | "codemirror_mode": {
165 | "name": "ipython",
166 | "version": 2
167 | },
168 | "file_extension": ".py",
169 | "mimetype": "text/x-python",
170 | "name": "python",
171 | "nbconvert_exporter": "python",
172 | "pygments_lexer": "ipython2",
173 | "version": "2.7.6"
174 | }
175 | },
176 | "nbformat": 4,
177 | "nbformat_minor": 5
178 | }
179 |
--------------------------------------------------------------------------------
/doc/tutorials/preprocessing.py:
--------------------------------------------------------------------------------
1 | """
2 | The original data was exported as a CSV on July 17, 2024, from https://trade.cites.org/, with the following filters:
3 | - Source: "W-wild"
4 | - Purposes: "T-commercial"
5 | - Trade terms: "live" and "meat"
6 | - Years: 2010 only (commercial_live-meat_2010.csv) resp. 2020 only (commercial_live-meat_2020.csv)
7 |
8 | Data citation:
9 | CITES Trade Database 2024. Compiled by UNEP-WCMC for the CITES Secretariat. Available
10 | at: trade.cites.org. Accessed July 17, 2024.
11 |
12 | This script uses polars for preprocessing (pip install polars), just for fun.
13 | """
14 | import polars as pl
15 |
16 | def preprocess_data():
17 |
18 | df_2010 = pl.read_csv("commercial_live-meat_2010.csv", infer_schema_length=10000)
19 | df_2020 = pl.read_csv("commercial_live-meat_2020.csv", infer_schema_length=10000)
20 | df = pl.concat([df_2010, df_2020])
21 |
22 | df_agg = df.with_columns(pl.max_horizontal(pl.col("Importer reported quantity"),pl.col("Exporter reported quantity")).alias("Max_reported_quantity"),
23 | pl.col("Unit").replace("","Number of specimens")
24 | ).filter(
25 | ((pl.col("Term") == "live") & (pl.col("Unit").is_in(["Number of specimens"])))
26 | | ((pl.col("Term") == "meat") & (pl.col("Unit").is_in(["kg"])))
27 | & (pl.col("Source") == "W")
28 | ).group_by([pl.col("Year"), pl.col("Term"),pl.col("Importer"),pl.col("Exporter")]).agg(pl.col("Max_reported_quantity").sum(),
29 | pl.col("Unit").max()
30 | ).sort([pl.col("Year"), pl.col("Term"), pl.col("Importer"), pl.col("Exporter")])
31 |
32 | df_agg.write_csv("commercial_live-leather_2010-2020.csv")
33 |
34 | if __name__ == "__main__":
35 | preprocess_data()
--------------------------------------------------------------------------------
/pymnet/__init__.py:
--------------------------------------------------------------------------------
1 | from . import graphlets, isomorphisms
2 | from .cc import (
3 | avg_lcc_aw,
4 | cc_barrat,
5 | cc_barrett,
6 | cc_onnela,
7 | cc_sequence,
8 | cc_zhang,
9 | elementary_cycles,
10 | gcc_aw,
11 | gcc_zhang,
12 | lcc,
13 | lcc_aw,
14 | lcc_brodka,
15 | sncc_aw,
16 | )
17 | from .diagnostics import degs, density, multiplex_degs, multiplex_density
18 | from .isomorphisms import (
19 | get_automorphism_generators,
20 | get_complete_invariant,
21 | get_isomorphism,
22 | is_isomorphic,
23 | )
24 | from .models import (
25 | conf,
26 | er,
27 | er_multilayer,
28 | er_partially_interconnected,
29 | full,
30 | full_multilayer,
31 | single_layer_conf,
32 | single_layer_er,
33 | )
34 | from .net import MultilayerNetwork, MultiplexNetwork
35 | from .netio import (
36 | read_edge_file,
37 | read_ucinet,
38 | write_edge_file,
39 | write_edge_files,
40 | write_json,
41 | )
42 | from .transforms import aggregate, subnet, supra_adjacency_matrix
43 | from .visuals import draw, webplot
44 |
45 | try:
46 | from . import nxwrap as nx
47 | except ImportError: # in case networkx is not installed
48 | pass
49 |
50 | from . import sampling
51 |
--------------------------------------------------------------------------------
/pymnet/benchmarks/bm1.py:
--------------------------------------------------------------------------------
1 | from timeit import Timer
2 |
3 | from pymnet import *
4 |
5 | ### ER with large number of layers
6 |
7 |
8 | def create_er():
9 | net = er(10, 10**5 * [0.1])
10 | return net
11 |
12 |
13 | timer = Timer(create_er)
14 | result = timer.timeit(number=100)
15 |
16 |
17 | ### ER with large number of nodes
18 |
19 |
20 | def create_er2():
21 | net = er(10**5, 10 * [10**-5])
22 | return net
23 |
24 |
25 | timer2 = Timer(create_er2)
26 | result2 = timer2.timeit(number=100)
27 |
28 |
29 | ### Aggregating ER
30 |
31 | net = er(10**5, 10 * [10**-5])
32 |
33 |
34 | def aggr():
35 | aggregate(net, 1)
36 |
37 |
38 | timer3 = Timer(aggr)
39 | result3 = timer3.timeit(number=100)
40 |
41 |
42 | print(result, result2, result3)
43 |
--------------------------------------------------------------------------------
/pymnet/diagnostics.py:
--------------------------------------------------------------------------------
1 | import heapq
2 | import itertools
3 |
4 | from .net import MultilayerNetwork, MultiplexNetwork
5 | from .transforms import aggregate, subnet, threshold
6 |
7 |
8 | def degs(net, degstype="distribution"):
9 | """Returns the degree distribution of a multilayer network.
10 |
11 | If the network has more than 1 aspect the degree distribution is returned for
12 | node-layer tuples.
13 |
14 | Parameters
15 | ----------
16 | net : MultilayerNetwork
17 | A multilayer network object.
18 |
19 | degstype : string
20 | If 'distribution', then degs dicts give the degree distributions. I.e.,
21 | keys are degrees, and corresponding values are number of nodes with the given degree.
22 | If 'nodes', then degs dicts give node degrees. I.e, keys are node names and
23 | corresponding values are degrees of those nodes.
24 |
25 | """
26 | if net.aspects == 0:
27 | the_iterator = net
28 | else:
29 | the_iterator = net.iter_node_layers()
30 | degs = {}
31 | if degstype == "distribution":
32 | for node in the_iterator:
33 | d = net[node].deg()
34 | degs[d] = degs.get(d, 0) + 1
35 | elif degstype == "nodes":
36 | for node in the_iterator:
37 | degs[node] = net[node].deg()
38 | else:
39 | raise Exception("Invalid degstype parameter.")
40 | return degs
41 |
42 |
43 | def density(net):
44 | """Returns the density of the network.
45 |
46 | Density is defined as the number of edges in the network divided by the number
47 | of possible edges in a general multilayer network with the same set of nodes and
48 | layers.
49 | """
50 | if len(net) == 0:
51 | return 0
52 |
53 | if net.fullyInterconnected:
54 | nl = len(net.get_layers(0))
55 | for a in range(net.aspects):
56 | nl = nl * len(net.get_layers(a + 1))
57 | if net.directed:
58 | pedges = nl * (nl - 1)
59 | else:
60 | pedges = (nl * (nl - 1)) / 2
61 |
62 | return len(net.edges) / float(pedges)
63 |
64 |
65 | def multiplex_density(net):
66 | """Returns a dictionary of densities of each intra-layer network of a multiplex network."""
67 | assert isinstance(net, MultiplexNetwork)
68 | d = {}
69 | for layer in net.iter_layers():
70 | d[layer] = density(net.A[layer])
71 | return d
72 |
73 |
74 | def multiplex_degs(net, degstype="distribution"):
75 | """Returns a dictionary of degree distributions of each intra-layer network of a multiplex network.
76 |
77 | Parameters
78 | ----------
79 | net : MultiplexNetwork
80 | A multiplex network object.
81 |
82 | degstype : string
83 | If 'distribution', then degs dicts give the degree distributions. I.e.,
84 | keys are degrees, and corresponding values are number of nodes with the given degree.
85 | If 'nodes', then degs dicts give node degrees. I.e, keys are node names and
86 | corresponding values are degrees of those nodes.
87 |
88 | """
89 | assert isinstance(net, MultiplexNetwork)
90 |
91 | d = {}
92 | for layer in net.iter_layers():
93 | d[layer] = degs(net.A[layer], degstype=degstype)
94 | return d
95 |
96 |
97 | def overlap_degs(net):
98 | """Returns a dictionary of overlap degree distributions of each layer combination
99 | of an unweighted multiplex network.
100 |
101 | The overlap degree distribution will contain every layer combination, including the
102 | one where there is only a single layer, and the key of each of those is another
103 | dictionary giving the overlap degrees of nodes.
104 |
105 | The overlap degrees of nodes for a given layer combination give the number of links that
106 | are shared between exactly the layers in the combination. If the link is in an additional
107 | layer, or it is missing from one layer, then it is not included in the degree of the
108 | corresponding layer combination.
109 |
110 | Parameters
111 | ----------
112 | net : MultiplexNetwork
113 | An unweighted multiplex network object (i.e. edge weigths equal to 1).
114 | """
115 | ol_degs = {}
116 | nodes = net.slices[0]
117 | layers = net.slices[1]
118 |
119 | net0 = subnet(net, nodes, layers)
120 |
121 | for n_l in range(len(layers), 0, -1):
122 | for layer_comb in itertools.combinations(layers, n_l):
123 | sub_net = subnet(net0, nodes, layer_comb)
124 | agg_net = aggregate(sub_net, 1)
125 | thr_net = threshold(agg_net, n_l)
126 | ol_degs[layer_comb] = degs(thr_net, degstype="nodes")
127 |
128 | if n_l > 1:
129 | for e in thr_net.edges:
130 | for layer in layer_comb:
131 | net0[e[0], e[1], layer] = 0
132 |
133 | return ol_degs
134 |
135 |
136 | def dijkstra(net, sources):
137 | """Return the forest giving shortest paths from a set of source nodes.
138 |
139 | Parameters
140 | ----------
141 | net : MultilayerNetwork
142 | sources : iterable
143 | """
144 | forest = MultilayerNetwork(
145 | aspects=net.aspects, fullyInterconnected=False, directed=True, noEdge=-1
146 | )
147 | d = dict([(s, 0) for s in sources])
148 |
149 | queue = []
150 | for s in sources:
151 | heapq.heappush(queue, (0, s, s)) # distance, source, dest
152 |
153 | while len(queue) > 0:
154 | dist, source, dest = heapq.heappop(queue)
155 | if d[dest] >= dist: # could be ==
156 | assert d[dest] == dist, " ".join(map(str, [dist, source, dest, d[dest]]))
157 | forest[source][dest] = net[source][dest]
158 | for neigh in net[dest].iter_out():
159 | ndist = dist + net[dest][neigh]
160 | if neigh not in d or d[neigh] >= ndist:
161 | d[neigh] = ndist
162 | heapq.heappush(queue, (ndist, dest, neigh))
163 |
164 | return d, forest
165 |
166 |
167 | def dijkstra_mlayer_prune(net, sources, aaspects):
168 | nsources = []
169 | for s in sources:
170 | layers = []
171 | for a in range(net.aspects + 1):
172 | if a in aaspects:
173 | assert s[a] == None
174 | layers.append(list(net.slices[a]))
175 | else:
176 | layers.append([s[a]])
177 | for nl in itertools.product(*layers):
178 | if net[nl].deg() > 0:
179 | nsources.append(nl)
180 |
181 | d, forest = dijkstra(net, nsources)
182 |
183 | def select_aspects(nl, aaspects):
184 | nnl = []
185 | for a in range(len(nl)):
186 | if a not in aaspects:
187 | nnl.append(nl[a])
188 | return tuple(nnl)
189 |
190 | nd = {}
191 | # for nl,dist in d.iteritems():
192 | for nl in d:
193 | dist = d[nl]
194 | nnl = select_aspects(nl, aaspects)
195 | if nnl not in nd or nd[nnl] > dist:
196 | nd[nnl] = dist
197 |
198 | def build_path(otree, ntree, node):
199 | for neigh in otree[node].iter_in():
200 | ntree[neigh][node] = otree[neigh][node]
201 | if ntree[neigh].deg_in() == 0:
202 | build_path(otree, ntree, neigh)
203 |
204 | nforest = MultilayerNetwork(
205 | aspects=net.aspects, fullyInterconnected=False, directed=True, noEdge=-1
206 | )
207 |
208 | # for nl,dist in d.iteritems():
209 | for nl in d:
210 | dist = d[nl]
211 | nnl = select_aspects(nl, aaspects)
212 | if nd[nnl] == d[nl]:
213 | build_path(forest, nforest, nl)
214 |
215 | return nd, nforest
216 |
--------------------------------------------------------------------------------
/pymnet/graphlets/__init__.py:
--------------------------------------------------------------------------------
1 | """Package for multiplex graphlet analysis.
2 |
3 | If you use this package, please cite:
4 |
5 | Sallamari Sallmen, Tarmo Nurmi, and Mikko Kivelä. "Graphlets in multilayer networks." Journal of Complex Networks 10.2 (2022): cnac005. https://doi.org/10.1093/comnet/cnac005
6 | """
7 |
8 | from . import graphlet_measures
9 | from . import graphlets as graphlets_module
10 | from . import independent_equations as independent_equations_module
11 | from .graphlet_measures import (
12 | GCD,
13 | GCM,
14 | GCD_matrix,
15 | orbit_counts,
16 | orbit_counts_all,
17 | orbit_numbers,
18 | ordered_orbit_list,
19 | )
20 | from .graphlets import automorphism_orbits, graphlets, list_orbits, orbit_equations
21 | from .independent_equations import independent_equations, redundant_orbits
22 |
--------------------------------------------------------------------------------
/pymnet/graphlets/graphlet_measures.py:
--------------------------------------------------------------------------------
1 | """Module for graphlet data analysis
2 | """
3 |
4 | import math
5 | from collections import defaultdict as dd
6 |
7 | from scipy.stats import spearmanr
8 |
9 | import pymnet
10 |
11 |
12 | def orbit_counts_all(net, n, nets, invs, auts, orbit_list, allowed_aspects="all"):
13 | """
14 | Compute the orbit counts for all the nodes in net.
15 |
16 | Parameters
17 | ----------
18 | net : network
19 | n : int
20 | max number of nodes
21 | nets : dict (key: n_nodes, value: list of networks)
22 | Graphlets, as produced by graphlets
23 | invs : dict (key: complete invariant, value: tuple(n_nodes, net_index in nets))
24 | complete invariants of the graphlets, as produced by graphlet
25 | auts : dd (key: (n_nodes, net_index, node), value: node)
26 | automorphisms, as produced by automorphism_orbits
27 | orbit_list : list of orbits
28 | as returned by ordered_orbit_list
29 | allowed_aspects : list, string
30 | the aspects that can be permuted when computing isomorphisms
31 |
32 | Returns
33 | -------
34 | orbits : dd (key: (node, orbit), value: count)
35 | Orbit counts for all the nodes
36 |
37 | Notes
38 | -----
39 | Should be faster than orbit_counts if the counts are computed for all
40 | (/ most of) the nodes
41 | """
42 |
43 | nodes = net.slices[0]
44 | layers = net.slices[1]
45 |
46 | orbits = dd()
47 |
48 | for node in nodes:
49 | for orbit in orbit_list:
50 | orbits[node, orbit] = 0
51 |
52 | processed = set()
53 |
54 | for node0 in nodes:
55 | node_sets = set()
56 | set_p = set([frozenset([node0])])
57 | for _ in range(n - 1):
58 | set_c = set()
59 | for p in set_p:
60 | for node_p in p:
61 | for layer in layers:
62 | node_o = net.__getitem__((node_p, layer))
63 | for neighbor in node_o.iter_total():
64 | if not (neighbor[0] in p or neighbor[0] in processed):
65 | set_n = frozenset(p | set([neighbor[0]]))
66 | set_c.add(set_n)
67 |
68 | node_sets = node_sets.union(set_c)
69 | set_p = set_c.copy()
70 |
71 | processed.add(node0)
72 | for node_comb in node_sets:
73 | sub_net = pymnet.subnet(net, node_comb, layers)
74 | ci_sub = pymnet.get_complete_invariant(
75 | sub_net, allowed_aspects=allowed_aspects)
76 | if ci_sub not in invs:
77 | raise KeyError(
78 | "The network contains a graphlet not found in the "
79 | "pre-constructed complete invariant dictionary (invs). "
80 | "This can be caused by invs creation not being compatible "
81 | "with the attributes of the network. For example, the "
82 | "network might not be fully interconnected."
83 | )
84 | i = invs[ci_sub][0]
85 | j = invs[ci_sub][1]
86 | nw = nets[i][j]
87 | iso = pymnet.get_isomorphism(
88 | sub_net, nw, allowed_aspects=allowed_aspects)
89 | for node in node_comb:
90 | if node in iso[0]:
91 | orbits[node, (i, j, auts[i, j, iso[0][node]])] += 1
92 | else:
93 | orbits[node, (i, j, auts[i, j, node])] += 1
94 |
95 | for layer in layers:
96 | nls = list(net[node0, :, layer])
97 | for node1 in nls:
98 | net[node0, node1[0], layer] = 0 # remove edges
99 |
100 | return orbits
101 |
102 |
103 | def orbit_numbers(n, nets, auts):
104 | """
105 | Assign numbers to the orbits.
106 |
107 | Parameters
108 | ----------
109 | n : int
110 | Max number of nodes in the graphlets
111 | nets : dict (key: n_nodes, value: list of networks)
112 | Graphlets, as given by graphlets
113 | auts : dd (key: (n_nodes, net_index, node), value: node)
114 | Automorphism orbits, as given by automorphism_orbits
115 |
116 | Returns
117 | -------
118 | orbit_is : dict
119 | Orbit numbers, keys are orbits in form (n_nodes, net_index, node)
120 | """
121 |
122 | orbit_is = {}
123 | for k in range(2, n + 1):
124 | for j in range(len(nets[k])):
125 | net = nets[k][j]
126 | for node in net.slices[0]:
127 | aut = auts[(k, j, node)]
128 | if not (k, j, aut) in orbit_is:
129 | orbit_is[(k, j, aut)] = len(orbit_is)
130 |
131 | return orbit_is
132 |
133 |
134 | def ordered_orbit_list(orbit_is):
135 | """
136 | Return a list of orbits ordered based on the orbit numbers.
137 |
138 | Parameters
139 | ----------
140 | orbit_is : dict
141 | Orbit numbers, keys are orbits in form (n_nodes, net_index, node)
142 |
143 | Returns
144 | -------
145 | orbit_list : list of orbits
146 | """
147 |
148 | orbit_list = [None] * len(orbit_is)
149 | for orbit in orbit_is:
150 | i = orbit_is[orbit]
151 | orbit_list[i] = orbit
152 |
153 | return orbit_list
154 |
155 |
156 | def orbit_counts(
157 | n, node0, net, nets, orbits, auts, invs, orbit_list, allowed_aspects="all"
158 | ):
159 | """
160 | Compute the orbit counts for node0 in net.
161 |
162 | Parameters
163 | ----------
164 | node0 : node
165 | net : network
166 | nets : dict (key: n_nodes, value: list of networks)
167 | graphlets
168 | orbits : dd (key: (node, orbit), value: count)
169 | dictionary where the counts will be stored
170 | auts : dd (key: (n_nodes, net_index, node), value: node)
171 | automorphism orbits
172 | invs : dict (key: complete invariant, value: tuple(n_nodes, net_index in nets))
173 | complete invariants of the graphlets
174 | orbit_list : list of orbits
175 | allowed_aspects : list, string
176 | the aspects that can be permutated when computing isomorphisms
177 | """
178 |
179 | for orbit in orbit_list:
180 | orbits[node0, orbit] = 0
181 |
182 | layers = net.slices[1]
183 | node_sets = touching_orbit_nodes(node0, net, n)
184 | for nodes_s in node_sets:
185 | sub_net = pymnet.subnet(net, nodes_s, layers)
186 | ci_sub = pymnet.get_complete_invariant(
187 | sub_net, allowed_aspects=allowed_aspects)
188 | i = invs[ci_sub][1]
189 | n_nodes = invs[ci_sub][0]
190 | nw = nets[n_nodes][i]
191 | iso = pymnet.get_isomorphism(sub_net, nw, allowed_aspects=allowed_aspects)
192 | if node0 in iso[0]:
193 | orbits[node0, (n_nodes, i, auts[n_nodes, i, iso[0][node0]])] += 1
194 | else:
195 | orbits[node0, (n_nodes, i, auts[n_nodes, i, node0])] += 1
196 |
197 |
198 | def touching_orbit_nodes(node0, net, max_size):
199 |
200 | layers = net.slices[1]
201 | set_p = set([frozenset([node0])])
202 | set_c = set()
203 | node_sets = set()
204 |
205 | for _ in range(max_size - 1):
206 | for p in set_p:
207 | for node in p:
208 | for layer in layers:
209 | node_o = net.__getitem__((node, layer))
210 | for neighbor in node_o.iter_total():
211 | if not neighbor[0] in p:
212 | set_n = frozenset(p | set([neighbor[0]]))
213 | set_c.add(set_n)
214 |
215 | node_sets = node_sets.union(set_c)
216 | set_p = set_c.copy()
217 |
218 | return node_sets
219 |
220 |
221 | def GCM(orbits):
222 | """
223 | Returns the graphlet correlation matrix
224 |
225 | Parameters
226 | ----------
227 | orbits : pandas dataframe
228 | Orbit counts for nodes in the network
229 | """
230 | # add dummy vector
231 | n_rows = orbits.shape[0]
232 | n_cols = orbits.shape[1]
233 | orbits.loc[n_rows] = [1] * n_cols
234 |
235 | corr, p = spearmanr(orbits, axis=0)
236 |
237 | return corr
238 |
239 |
240 | def GCD(gcm1, gcm2):
241 | """
242 | Graphlet correlation distance between two networks
243 |
244 | Parameters
245 | ----------
246 | gcm1, gcm2 : 2-d array
247 | Graphlet correlation matrices
248 |
249 | Returns
250 | -------
251 | gcd : float
252 | Graphlet correlation distance
253 | """
254 |
255 | assert gcm1.shape == gcm2.shape, "matrix dimensions do not match"
256 |
257 | gcd = 0
258 | for i in range(len(gcm1)):
259 | for j in range(i + 1, len(gcm1[i])):
260 | gcd += (gcm1[i][j] - gcm2[i][j]) ** 2
261 |
262 | gcd = math.sqrt(gcd)
263 |
264 | return gcd
265 |
266 |
267 | def GCD_matrix(gcms):
268 | """
269 | Produce a distance matrix of GCDs between networks
270 |
271 | Parameters
272 | ----------
273 | gcms : list of 2-d arrays
274 | Graphlet correlation matrices
275 |
276 | Returns
277 | -------
278 | gcds : list of lists
279 | Graphlet correlation distances
280 | """
281 |
282 | gcds = []
283 | for gcm1 in gcms:
284 | gcds_t = []
285 | for gcm2 in gcms:
286 | gcd = GCD(gcm1, gcm2)
287 | gcds_t.append(gcd)
288 |
289 | gcds.append(gcds_t)
290 |
291 | return gcds
292 |
--------------------------------------------------------------------------------
/pymnet/isomorphisms/__init__.py:
--------------------------------------------------------------------------------
1 | """Package for isomorphisms in multilayer networks.
2 |
3 | The package is based on reducing multilayer network isomorphism problems to
4 | graph isomorphism problems. The graph isomorphism problems can be solved using
5 | different backends. Currently the following backends are supported (the
6 | functions these backends can be used for are in parenthesis):
7 |
8 | - NetworkX : "nx" (is_isomorphic, get_isomorphism)
9 | - bliss-bind : "bliss_bind" (is_isomorphic, get_isomorphism, get_automorphism_generators, get_complete_invariant)
10 | """
11 |
12 | auxbuilder_backends = {}
13 | comparison_backends = []
14 | complete_invariant_backends = []
15 | automorphism_group_generator_backends = []
16 | isomorphism_mapping_backends = []
17 |
18 | # lets try to import some backends
19 |
20 | try:
21 | from . import nxbackend
22 |
23 | auxbuilder_backends["nx"] = nxbackend.AuxiliaryGraphBuilderNX
24 | except ImportError:
25 | pass
26 |
27 | try:
28 | from . import bliss_bind_backend
29 |
30 | try:
31 | auxbuilder_backends["bliss_bind"] = (
32 | bliss_bind_backend.AuxiliaryGraphBuilderBlissBind
33 | )
34 | except AttributeError:
35 | pass
36 | except ImportError:
37 | pass
38 |
39 |
40 | # fill in the backends that are available to do various tasks
41 | # start from the most preferred backend, to the least preferred
42 | backend_order = ["bliss_bind", "nx"]
43 | for backend_name in backend_order:
44 | if backend_name not in auxbuilder_backends:
45 | continue
46 | auxbuilder = auxbuilder_backends[backend_name]
47 | if auxbuilder.has_comparison:
48 | comparison_backends.append(backend_name)
49 | if auxbuilder.has_complete_invariant:
50 | complete_invariant_backends.append(backend_name)
51 | if auxbuilder.has_automorphism_group_generators:
52 | automorphism_group_generator_backends.append(backend_name)
53 | if auxbuilder.has_isomorphism_mapping:
54 | isomorphism_mapping_backends.append(backend_name)
55 |
56 |
57 | def is_isomorphic(net1, net2, allowed_aspects="all", backend="auto"):
58 | """Checks if the two networks are isomorphic.
59 |
60 | Parameters
61 | ----------
62 | net1 : MultilayerNetwork
63 | The first multilayer network.
64 | net2 : MultilayerNetwork
65 | The second multilayer network.
66 | allowed_aspects : list of ints, string
67 | The aspects that can be permuted in this isomorphism type. Nodes are in
68 | aspect 0 by convention. Value "all" will allow all permutations, i.e.,
69 | it gives the (nonpartial) node-layer isomorphism.
70 | backend : string
71 | The program to be used for solving the graph isomorphism of the
72 | auxiliary graphs. Value "auto" will select the best available candidate.
73 | For a list of backends, see documentation of the package.
74 |
75 |
76 | Returns
77 | -------
78 | is_isomorphic : bool
79 | True if net1 and net1 are isomorphic, False otherwise.
80 |
81 |
82 | References
83 | ----------
84 | "Isomorphisms in Multilayer Networks", M. Kivela & M. A. Porter,
85 | arXiv:1506.00508 [physics.soc-ph]
86 | """
87 | assert len(comparison_backends) > 0, "No backends for comparison were imported!"
88 | if backend == "auto":
89 | backend = comparison_backends[0]
90 | else:
91 | assert backend in comparison_backends, (
92 | "Backend " + str(backend) + " does not allow comparisons"
93 | )
94 |
95 | auxbuilder = auxbuilder_backends[backend]
96 | a1 = auxbuilder(net1, allowed_aspects)
97 | a2 = auxbuilder(net2, allowed_aspects)
98 | return a1.compare(a2)
99 |
100 |
101 | def get_complete_invariant(net, allowed_aspects="all", backend="auto"):
102 | """
103 | Returns a value that is a complete invariant under multilayer network
104 | isomorphism.
105 |
106 | Parameters
107 | ----------
108 | net : MultilayerNetwork
109 | The multilayer network.
110 | allowed_aspects : list of ints, string
111 | The aspects that can be permuted in this isomorphism type. Nodes are in
112 | aspect 0 by convention. Value "all" will allow all permutations, i.e.,
113 | it gives the (nonpartial) node-layer isomorphism.
114 | backend : string
115 | The program to be used for solving the graph isomorphism of the
116 | auxiliary graphs. Value "auto" will select the best available candidate.
117 | For a list of backends, see documentation of the package.
118 |
119 |
120 | Returns
121 | -------
122 | complete_invariant : object
123 | The returned object is a complete invariant under the specified
124 | multilayer network isomorphism. That is, any two objects returned by
125 | this function are the same exactly when the two networks are isomorphic.
126 | Note that the isomorphism types (allowed_aspects) need to match in order
127 | for the comparison to be valid. The actual object can depend on the
128 | backend that was used.
129 |
130 | References
131 | ----------
132 | "Isomorphisms in Multilayer Networks", M. Kivela & M. A. Porter,
133 | arXiv:1506.00508 [physics.soc-ph]
134 | """
135 |
136 | assert (
137 | len(complete_invariant_backends) > 0
138 | ), "No backends for complete invariants were imported!"
139 | if backend == "auto":
140 | backend = complete_invariant_backends[0]
141 | else:
142 | assert backend in complete_invariant_backends, (
143 | "Backend " + str(backend) + " cannot be used to produce complete invariants"
144 | )
145 |
146 | auxbuilder = auxbuilder_backends[backend]
147 | aux_graph = auxbuilder(net, allowed_aspects)
148 | return aux_graph.get_complete_invariant()
149 |
150 |
151 | def get_automorphism_generators(
152 | net, allowed_aspects="all", include_fixed=False, backend="auto"
153 | ):
154 | """
155 | Returns automorphism generators for the given network. The generators are
156 | permutations that can be used to construct the automorphism group of the
157 | network.
158 |
159 | Parameters
160 | ----------
161 | net : MultilayerNetwork
162 | The multilayer network.
163 | allowed_aspects : list of ints, string
164 | The aspects that can be permuted in this isomorphism type. Nodes are in
165 | aspect 0 by convention. Value "all" will allow all permutations, i.e.,
166 | it gives the (nonpartial) node-layer isomorphism.
167 | include_fixed : bool
168 | If True the elementary layer permutations include elements that remain
169 | unchanged.
170 | backend : string
171 | The program to be used for solving the graph isomorphism of the
172 | auxiliary graphs. Value "auto" will select the best available candidate.
173 | For a list of backends, see documentation of the package.
174 |
175 | Returns
176 | -------
177 | automorphism_generators : list of lists of dicts
178 | Each element in the list is a permutation for a multilayer network. A
179 | permutation of a multilayer network is a list of permutations, one for
180 | each aspect. Permutation for an aspect is a dictionary where each key is
181 | mapped to the value. If include_fixed is not set true, the dictionaries
182 | do not contain elementary layers that would be mapped to themselves.
183 |
184 | References
185 | ----------
186 | "Isomorphisms in Multilayer Networks", M. Kivela & M. A. Porter,
187 | arXiv:1506.00508 [physics.soc-ph]
188 | """
189 |
190 | assert (
191 | len(automorphism_group_generator_backends) > 0
192 | ), "No backends for automorphism generators were imported!"
193 | if backend == "auto":
194 | backend = automorphism_group_generator_backends[0]
195 | else:
196 | assert backend in automorphism_group_generator_backends, (
197 | "Backend "
198 | + str(backend)
199 | + " cannot be used to produce automorphism generators"
200 | )
201 |
202 | auxbuilder = auxbuilder_backends[backend]
203 | aux_graph = auxbuilder(net, allowed_aspects)
204 |
205 | return aux_graph.get_automorphism_generators(include_fixed=include_fixed)
206 |
207 |
208 | def get_isomorphism(
209 | net1, net2, allowed_aspects="all", include_fixed=False, backend="auto"
210 | ):
211 | """
212 | Returns an isomorphism between net1 and net2 if possible.
213 |
214 | Parameters
215 | ----------
216 | net1 : MultilayerNetwork
217 | The first multilayer network.
218 | net2 : MultilayerNetwork
219 | The second multilayer network.
220 | allowed_aspects : list of ints, string
221 | The aspects that can be permuted in this isomorphism type. Nodes are in
222 | aspect 0 by convention. Value "all" will allow all permutations, i.e.,
223 | it gives the (nonpartial) node-layer isomorphism.
224 | include_fixed : bool
225 | If True the elementary layer permutations include elements that remain
226 | unchanged.
227 | backend : string
228 | The program to be used for solving the graph isomorphism of the
229 | auxiliary graphs. Value "auto" will select the best available candidate.
230 | For a list of backends, see documentation of the package.
231 |
232 | Returns
233 | -------
234 | automorphism_generators : lists of dicts, None
235 | A permutation of the first multilayer network that gives the second
236 | network. A permutation of a multilayer network is a list of
237 | permutations, one for each aspect. Permutation for an aspect is a
238 | dictionary where each key is mapped to the value. If include_fixed is
239 | not set true, the dictionaries do not contain elementary layers that
240 | would be mapped to themselves. If the two networks are not isomorphic,
241 | None is returned instead.
242 |
243 | References
244 | ----------
245 | "Isomorphisms in Multilayer Networks", M. Kivela & M. A. Porter,
246 | arXiv:1506.00508 [physics.soc-ph]
247 | """
248 |
249 | assert (
250 | len(isomorphism_mapping_backends) > 0
251 | ), "No backends for isomorphism mapping were imported!"
252 | if backend == "auto":
253 | backend = isomorphism_mapping_backends[0]
254 | else:
255 | assert backend in isomorphism_mapping_backends, (
256 | "Backend "
257 | + str(backend)
258 | + " cannot be used to produce isomorphism mappings"
259 | )
260 |
261 | auxbuilder = auxbuilder_backends[backend]
262 | aux_graph1 = auxbuilder(net1, allowed_aspects)
263 | aux_graph2 = auxbuilder(net2, allowed_aspects)
264 |
265 | return aux_graph1.get_isomorphism(aux_graph2, include_fixed=include_fixed)
266 |
--------------------------------------------------------------------------------
/pymnet/isomorphisms/bliss_bind_backend.py:
--------------------------------------------------------------------------------
1 | """Multilayer network isomorphism backend using Bliss through bliss_bind."""
2 |
3 | import bliss_bind
4 |
5 | from . import isomcore
6 |
7 |
8 | class AuxiliaryGraphBuilderBlissBind(isomcore.AuxiliaryGraphBuilder):
9 | has_comparison = True
10 | has_complete_invariant = True
11 | has_automorphism_group_generators = True
12 | has_isomorphism_mapping = True
13 |
14 | def build_init(self):
15 | self.bbgraph = bliss_bind.NamedGraph()
16 |
17 | def add_node(self, name, color):
18 | self.bbgraph.add_node(name, color)
19 |
20 | def add_link(self, node1, node2):
21 | self.bbgraph.add_link(node1, node2)
22 |
23 | def compare_structure(self, other):
24 | return self.bbgraph.get_isomorphism(other.bbgraph) is not None
25 |
26 | def complete_invariant_structure(self):
27 | return self.bbgraph.canonical_graph()
28 |
29 | def finalize(self):
30 | pass
31 |
32 | def _automorphism_generators(self):
33 | return self.bbgraph.find_automorphisms()
34 |
35 | def _isomorphism_mapping(self, other):
36 | return self.bbgraph.get_isomorphism(other.bbgraph)
37 |
--------------------------------------------------------------------------------
/pymnet/isomorphisms/isomcore.py:
--------------------------------------------------------------------------------
1 | class AuxiliaryGraphBuilder(object):
2 | """
3 | This is a generic class for building auxiliary graphs. Backends can
4 | inherit this class to create auxiliary graph builders.
5 | """
6 |
7 | # method can be used to compare networks
8 | has_comparison = False
9 |
10 | # method can be used to create complete invariants
11 | has_complete_invariant = False
12 |
13 | # method can be used to generate automorphism groups
14 | has_automorphism_group_generators = False
15 |
16 | # method can be used to generate an isomorphic mapping
17 | has_isomorphism_mapping = False
18 |
19 | def __init__(self, net, allowed_aspects="all", reduction_type="auto"):
20 | assert not net.directed, "Only undirected networks for now."
21 | self.net = net
22 |
23 | if allowed_aspects == "all":
24 | allowed_aspects = range(net.aspects + 1)
25 | self.asp = sorted(allowed_aspects)
26 | self.nasp = list(
27 | filter(lambda a: a not in allowed_aspects, range(net.aspects + 1))
28 | )
29 |
30 | self.nodemap = {}
31 | self.auxnodemap = {}
32 | self.colormap = {}
33 | self.auxcolormap = {}
34 |
35 | self.build_init()
36 |
37 | if reduction_type == "auto":
38 | # this is the only one implemented so far
39 | self._build_graph_general()
40 | elif reduction_type == "general":
41 | self._build_graph_general()
42 | else:
43 | raise Exception("Unknown reduction type: " + str(reduction_type))
44 |
45 | self.finalize()
46 |
47 | def _get_node_id(self, node):
48 | if node not in self.nodemap:
49 | assert len(self.auxnodemap) == 0
50 | self.nodemap[node] = len(self.nodemap)
51 | return self.nodemap[node]
52 |
53 | def _get_auxnode_id(self, auxnode):
54 | if auxnode not in self.auxnodemap:
55 | self.auxnodemap[auxnode] = len(self.nodemap) + len(self.auxnodemap)
56 | return self.auxnodemap[auxnode]
57 |
58 | def _slice_node_layer_allowed(self, nodelayer):
59 | s = []
60 | for i in self.asp:
61 | s.append(nodelayer[i])
62 | return tuple(s)
63 |
64 | def _slice_node_layer_not_allowed(self, nodelayer):
65 | s = []
66 | for i in self.nasp:
67 | s.append(nodelayer[i])
68 | return tuple(s)
69 |
70 | def _assert_full_order(self, seq):
71 | for i in range(len(seq) - 1):
72 | assert (
73 | seq[i] < seq[i + 1]
74 | ), "Cannot sort the node or elementary layer names!"
75 |
76 | def _build_graph_general(self):
77 | """
78 | This is a reduction that works for all multilayer networks.
79 | """
80 |
81 | # Find a canonical coloring scheme
82 | # Each node has a color that is determined by the non-mapped aspects
83 | nodecolors = set()
84 | for nl in self.net.iter_node_layers():
85 | nodecolors.add(self._slice_node_layer_not_allowed(nl))
86 | nodecolors_sorted = sorted(list(nodecolors))
87 | del nodecolors
88 | self._assert_full_order(nodecolors_sorted)
89 | self.colormap = dict(
90 | ((color, colorid) for colorid, color in enumerate(nodecolors_sorted))
91 | )
92 |
93 | # each aux node has a color that is determined by the aspect
94 | self.auxcolormap = dict(
95 | (
96 | (auxcolor, auxcolorid + len(self.colormap))
97 | for auxcolorid, auxcolor in enumerate(sorted(self.asp))
98 | )
99 | )
100 |
101 | # Add the underlying network
102 | # node-layers:
103 | for nl in self.net.iter_node_layers():
104 | nlid = self._get_node_id(nl)
105 | color = self._slice_node_layer_not_allowed(nl)
106 | colorid = self.colormap[color]
107 | self.add_node(nlid, colorid)
108 |
109 | # edges between node-layers:
110 | for nl1 in self.net.iter_node_layers():
111 | for nl2 in self.net[nl1]:
112 | nl1id = self._get_node_id(nl1)
113 | nl2id = self._get_node_id(nl2)
114 | self.add_link(nl1id, nl2id)
115 |
116 | # Add the auxiliary nodes and edges
117 | # add the aux nodes
118 | for a in self.asp:
119 | for elayer in self.net.slices[a]:
120 | auxid = self._get_auxnode_id((a, elayer))
121 | auxcolorid = self.auxcolormap[a]
122 | self.add_node(auxid, auxcolorid)
123 |
124 | # add the aux edges
125 | for nl in self.net.iter_node_layers():
126 | for a in self.asp:
127 | nlid = self._get_node_id(nl)
128 | auxid = self._get_auxnode_id((a, nl[a]))
129 | self.add_link(nlid, auxid)
130 |
131 | def compare_labels(self, other):
132 | # this should be true if comparable
133 | assert self.auxcolormap == other.auxcolormap
134 | return self.colormap == other.colormap
135 |
136 | def compare(self, other):
137 | # make sure that the two are comparable
138 | assert self.asp == other.asp and self.nasp == other.nasp, (
139 | "Auxiliary graphs build for different isomorphisms, " "cannot compare."
140 | )
141 |
142 | return self.compare_labels(other) and self.compare_structure(other)
143 |
144 | def complete_invariant_labels(self):
145 | # the colors for the colors for the nodes are determined in a way that
146 | # there is canonical order for them.
147 | #
148 | # The self.colormap could be used directly for the invariant, but we
149 | # want to make sure that the invariant is valid even after it is
150 | # serialized. To this end, we will sort the dict entries
151 | return tuple(sorted(self.colormap.items()))
152 |
153 | def get_complete_invariant(self):
154 | return (self.complete_invariant_labels(), self.complete_invariant_structure())
155 |
156 | def get_automorphism_generators(self, include_fixed=False):
157 | generators = []
158 | invauxnodemap = dict(((self.auxnodemap[k], k) for k in self.auxnodemap))
159 | for permutation in self._automorphism_generators():
160 | mperms = []
161 | for a in range(self.net.aspects + 1):
162 | mperms.append({})
163 | for node in self.auxnodemap:
164 | nodeid = self.auxnodemap[node]
165 | aspect, elayer = node
166 | new_aspect, new_elayer = invauxnodemap[permutation[nodeid]]
167 | if elayer != new_elayer or include_fixed:
168 | mperms[aspect][elayer] = new_elayer
169 | assert aspect == new_aspect
170 |
171 | # add the aspects that are not permuted
172 | if include_fixed:
173 | for aspect in self.nasp:
174 | for elayer in self.net.slices[aspect]:
175 | mperms[aspect][elayer] = elayer
176 |
177 | generators.append(mperms)
178 | return generators
179 |
180 | def get_isomorphism(self, other, include_fixed=False):
181 | if self.compare(other):
182 | permutation = self._isomorphism_mapping(other)
183 |
184 | invauxnodemap = dict(((other.auxnodemap[k], k)
185 | for k in other.auxnodemap))
186 | mperms = []
187 | for a in range(self.net.aspects + 1):
188 | mperms.append({})
189 | for node in self.auxnodemap:
190 | nodeid = self.auxnodemap[node]
191 | aspect, elayer = node
192 | new_aspect, new_elayer = invauxnodemap[permutation[nodeid]]
193 | if elayer != new_elayer or include_fixed:
194 | mperms[aspect][elayer] = new_elayer
195 | assert aspect == new_aspect
196 |
197 | # add the aspects that are not permuted
198 | if include_fixed:
199 | for aspect in self.nasp:
200 | for elayer in self.net.slices[aspect]:
201 | mperms[aspect][elayer] = elayer
202 |
203 | return mperms
204 |
205 | else:
206 | return None
207 |
208 | # The following functions need to be overridden:
209 | def build_init(self):
210 | raise NotImplementedError()
211 |
212 | def finalize(self):
213 | raise NotImplementedError()
214 |
215 | def add_node(self, name, color):
216 | raise NotImplementedError()
217 |
218 | def add_link(self, node1, node2):
219 | raise NotImplementedError()
220 |
221 | ##
222 |
223 | # The following can be overridden if possible
224 | def compare_structure(self, other):
225 | raise NotImplementedError()
226 |
227 | def complete_invariant_structure(self):
228 | raise NotImplementedError()
229 |
230 | def _automorphism_generators(self):
231 | raise NotImplementedError()
232 |
233 | def _isomorphism_mapping(self, other):
234 | raise NotImplementedError()
235 |
236 | ##
237 |
--------------------------------------------------------------------------------
/pymnet/isomorphisms/nxbackend.py:
--------------------------------------------------------------------------------
1 | import networkx
2 | from networkx.algorithms import isomorphism as nxisomorphism
3 |
4 | from . import isomcore
5 |
6 |
7 | class AuxiliaryGraphBuilderNX(isomcore.AuxiliaryGraphBuilder):
8 | has_comparison = True
9 | has_isomorphism_mapping = True
10 |
11 | def build_init(self):
12 | self.nxgraph = networkx.Graph()
13 |
14 | def finalize(self):
15 | pass
16 |
17 | def add_node(self, name, color):
18 | self.nxgraph.add_node(name, color=color)
19 |
20 | def add_link(self, node1, node2):
21 | self.nxgraph.add_edge(node1, node2)
22 |
23 | def compare_structure(self, other):
24 | def matcher(n1, n2):
25 | return n1["color"] == n2["color"]
26 |
27 | return networkx.is_isomorphic(self.nxgraph, other.nxgraph, node_match=matcher)
28 |
29 | def _isomorphism_mapping(self, other):
30 | def matcher(n1, n2):
31 | return n1["color"] == n2["color"]
32 |
33 | m = nxisomorphism.GraphMatcher(self.nxgraph, other.nxgraph, node_match=matcher)
34 | # this needs to be run so that the mapping is created
35 | is_isomorphic = m.is_isomorphic()
36 | if is_isomorphic:
37 | return m.mapping
38 | else:
39 | return None
40 |
--------------------------------------------------------------------------------
/pymnet/nxwrap.py:
--------------------------------------------------------------------------------
1 | """Module which allows one to use Networkx methods for pymnet network objects.
2 | """
3 |
4 | import collections
5 | from functools import wraps
6 |
7 | import networkx
8 |
9 | from pymnet.net import MultilayerNetwork
10 |
11 | # Pre 3.10
12 | try:
13 | from collections import MutableMapping
14 | except ImportError:
15 | pass
16 |
17 | # Post 3.10
18 | try:
19 | from collections.abc import MutableMapping
20 | except ImportError:
21 | pass
22 |
23 |
24 | # NetworkX supports tuples as node names, but pymnet doesn't (because in Python there is no way of distinguishing between net[1,2] and net[(1,2)] ).
25 | # In order to make some of the NetworkX functions that use tuples and node names to work, we define a new class "ntuple" which is a tuple that is
26 | # used to store node names.
27 | class ntuple(tuple):
28 | pass
29 |
30 |
31 | class MonoplexGraphWrapper_singleedge(MutableMapping):
32 | def __init__(self, net, node1, node2):
33 | self.net = net
34 | self.node1 = node1
35 | self.node2 = node2
36 |
37 | def __getitem__(self, key):
38 | if key == "weight":
39 | return self.net[self.node1, self.node2]
40 | else:
41 | raise KeyError(key)
42 |
43 | def __iter__(self):
44 | yield "weight"
45 |
46 | def __len__(self):
47 | return 1
48 |
49 | def __setitem__(self, key, val):
50 | if key == "weight":
51 | self.net[self.node1, self.node2] = val
52 |
53 | def __delitem__(self, key):
54 | if key == "weight":
55 | self.net[self.node1, self.node2] = self.net.noEdge
56 |
57 | def copy(self):
58 | return dict(((k, self[k]) for k in self)) # dict(self.iteritems())
59 |
60 |
61 | class MonoplexGraphWrapper_adjlist(MutableMapping):
62 | def __init__(self, net, node):
63 | self.net = net
64 | self.node = node
65 |
66 | def __getitem__(self, key):
67 | if key.__class__ == tuple:
68 | key = ntuple(key)
69 | key in {} # this is to raise TypeError if key is unhashable
70 | if key in self.net[self.node]:
71 | return MonoplexGraphWrapper_singleedge(self.net, self.node, key)
72 | else:
73 | raise KeyError(key)
74 |
75 | def __iter__(self):
76 | for node in self.net[self.node]:
77 | yield node
78 |
79 | def __len__(self):
80 | return self.net[self.node].deg()
81 |
82 | def __setitem__(self, key, val):
83 | if key.__class__ == tuple:
84 | key = ntuple(key)
85 | if isinstance(val, dict) or isinstance(val, MonoplexGraphWrapper_singleedge):
86 | if len(val) > 0:
87 | # self.net[self.node,key]=list(val.itervalues())[0]
88 | self.net[self.node, key] = list((val[key] for key in val))[0]
89 | else:
90 | self.net[self.node, key] = 1
91 | else:
92 | self.net[self.node, key] = val
93 |
94 | def __delitem__(self, key):
95 | self.net[self.node, key] = self.net.noEdge
96 |
97 |
98 | class MonoplexGraphWrapper_adj(MutableMapping):
99 | def __init__(self, net):
100 | self.net = net
101 |
102 | def __getitem__(self, key):
103 | if key.__class__ == tuple:
104 | key = ntuple(key)
105 | key in {} # this is to raise TypeError if key is unhashable
106 | if key in self.net:
107 | return MonoplexGraphWrapper_adjlist(self.net, key)
108 | else:
109 | raise KeyError(key)
110 |
111 | def __iter__(self):
112 | for node in self.net:
113 | yield node
114 |
115 | def __len__(self):
116 | return len(self.net)
117 |
118 | def __setitem__(self, key, val):
119 | if key.__class__ == tuple:
120 | key = ntuple(key)
121 | if isinstance(val, dict):
122 | self.net.add_node(key)
123 | # for key2,val2 in val.iteritems():
124 | for key2 in val:
125 | val2 = val[key2]
126 | MonoplexGraphWrapper_adjlist(self.net, key)[key2] = val2
127 | else:
128 | raise Exception("Can only sent adjacencies to dicts.")
129 |
130 | def __delitem__(self, key):
131 | raise Exception("Cannot remove nodes.")
132 |
133 |
134 | class MonoplexGraphWrapper_node(MutableMapping):
135 | def __init__(self, net):
136 | self.net = net
137 |
138 | def __getitem__(self, key):
139 | if key.__class__ == tuple:
140 | key = ntuple(key)
141 | key in {} # this is to raise TypeError if key is unhashable
142 | if key in self.net:
143 | return {}
144 | else:
145 | raise KeyError(key)
146 |
147 | def __iter__(self):
148 | for node in self.net:
149 | yield node
150 |
151 | def __len__(self):
152 | return len(self.net)
153 |
154 | def __setitem__(self, key, val):
155 | pass
156 |
157 | def __delitem__(self, key):
158 | pass
159 |
160 |
161 | class MonoplexGraphNetworkxView(networkx.Graph):
162 | def __init__(self, net=None, data=None, **kwargs):
163 | super(MonoplexGraphNetworkxView, self).__init__(**kwargs)
164 |
165 | if net == None: # networkx is calling __class__()
166 | net = MultilayerNetwork(aspects=0)
167 |
168 | self.net = net
169 |
170 | # Networkx Graph class has changed since 2.0
171 | if int(networkx.__version__.split(".")[0]) >= 2:
172 | self._adj = MonoplexGraphWrapper_adj(net)
173 | self._node = MonoplexGraphWrapper_node(net)
174 | else:
175 | self.adj = MonoplexGraphWrapper_adj(net)
176 | self.edge = MonoplexGraphWrapper_adj(net)
177 | self.node = MonoplexGraphWrapper_node(net)
178 |
179 | if data is not None:
180 | networkx.convert.to_networkx_graph(data, create_using=self)
181 |
182 | def fresh_copy(self):
183 | fresh_net = MultilayerNetwork(aspects=0)
184 | return MonoplexGraphNetworkxView(fresh_net)
185 |
186 |
187 | class MonoplexGraphNetworkxNew(MonoplexGraphNetworkxView):
188 | def __init__(self, data=None, **kwargs):
189 | net = MultilayerNetwork(aspects=0) # new empty pymnet object
190 | super(MonoplexGraphNetworkxNew, self).__init__(net, data=data, **kwargs)
191 |
192 |
193 | def autowrap(net):
194 | assert net.aspects == 0, "Only monoplex networks."
195 | assert net.directed == False, "Only undirected networks."
196 | return MonoplexGraphNetworkxView(net)
197 |
198 |
199 | def networkxdecorator(f):
200 | @wraps(f)
201 | def newf(*args, **kwargs):
202 | # First we wrapt the pyment objects given as parameters
203 | newargs = []
204 | for arg in args:
205 | if isinstance(arg, MultilayerNetwork):
206 | newargs.append(autowrap(arg))
207 | else:
208 | newargs.append(arg)
209 | args = tuple(newargs)
210 |
211 | # for key,val in kwargs.iteritems():
212 | for key in kwargs:
213 | val = kwargs[key]
214 | if isinstance(val, MultilayerNetwork):
215 | kwargs[key] = autowrap(val)
216 | if val.__class__ == tuple:
217 | kwargs[key] = ntuple(val)
218 |
219 | # Modify the NetworkX library such that new graphs are wrapped pymnet objects
220 | networkx_Graph_original = networkx.Graph
221 | networkx.Graph = MonoplexGraphNetworkxView
222 |
223 | # Run the actual function
224 | rval = f(*args, **kwargs)
225 |
226 | # Revert the modifications to NetworkX
227 | networkx.Graph = networkx_Graph_original
228 |
229 | # Unpack the pymnet objects from the results
230 | if isinstance(rval, MonoplexGraphNetworkxView):
231 | rval = rval.net
232 |
233 | return rval
234 |
235 | return newf
236 |
237 |
238 | # We need to modify the networkx module such that new graphs are wrapped pymnet objects
239 | # import imp
240 | # networkx_modified=imp.load_module('networkx_modified', *imp.find_module('networkx'))
241 | # import networkx as networkx_modified
242 | # networkx_modified.Graph=MonoplexGraphNetworkxNew
243 |
244 | for name, obj in networkx.__dict__.items():
245 | if hasattr(obj, "__call__"):
246 | exec(name + "=networkxdecorator(obj)")
247 |
--------------------------------------------------------------------------------
/pymnet/sampling/__init__.py:
--------------------------------------------------------------------------------
1 | """Package for sampling multilayer subgraphs.
2 | """
3 |
4 | from . import creators, dumb, esu, reqs
5 |
--------------------------------------------------------------------------------
/pymnet/sampling/creators.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import random
4 |
5 | import pymnet
6 |
7 |
8 | def er_multilayer_partially_interconnected(nodes_by_layer, p, seed=None):
9 | """Create a one-aspect E-R multilayer network with given nodesets for each
10 | layer and edge probability p.
11 |
12 | Parameters
13 | ----------
14 | nodes_by_layer : sequence/iterator of sequences/iterators
15 | A sequence where each element is a sequence of nodes on a layer.
16 | p : float 0 <= p <= 1
17 | The probability that an edge exists between a node-layer pair.
18 | seed : int, str, bytes or bytearray
19 | Seed for network generation.
20 |
21 | Returns
22 | -------
23 | The generated network.
24 | """
25 | if seed == None:
26 | random.seed()
27 | else:
28 | random.seed(seed)
29 | network = pymnet.MultilayerNetwork(aspects=1, fullyInterconnected=False)
30 | for layer, nodelist in enumerate(nodes_by_layer):
31 | network.add_layer(layer)
32 | for node in nodelist:
33 | network.add_node(node=node, layer=layer)
34 | numberings = dict()
35 | for index, nodelayer in enumerate(network.iter_node_layers()):
36 | numberings[nodelayer] = index
37 | for nodelayer1 in numberings:
38 | for nodelayer2 in numberings:
39 | if numberings[nodelayer1] > numberings[nodelayer2] and random.random() < p:
40 | network[nodelayer1][nodelayer2] = 1
41 | return network
42 |
43 |
44 | def random_nodelists(poolsize, nodes_per_layer, layers, seed=None):
45 | """Draw a random sample of nodes without replacement for each layer
46 | from a pool of specified size.
47 |
48 | Parameters
49 | ----------
50 | poolsize : int
51 | Size of the pool to draw nodes from.
52 | nodes_per_layer : int
53 | How many nodes are on each layer.
54 | layers : int
55 | How many layers should nodes be drawn for.
56 | seed : int, str, bytes or bytearray
57 | Seed for random drawing.
58 |
59 | Yields
60 | ------
61 | A list of a sample of nodes_per_layer nodes without replacement, times layers.
62 | """
63 | if seed == None:
64 | random.seed()
65 | else:
66 | random.seed(seed)
67 | for _ in range(layers):
68 | yield random.sample(xrange(poolsize), nodes_per_layer)
69 |
--------------------------------------------------------------------------------
/pymnet/sampling/dumb.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import itertools
4 |
5 | from .reqs import (
6 | default_calculate_required_lengths,
7 | default_check_reqs,
8 | relaxed_check_reqs,
9 | )
10 |
11 |
12 | def dumb_enumeration(
13 | network,
14 | results,
15 | sizes=None,
16 | intersections=None,
17 | nnodes=None,
18 | nlayers=None,
19 | intersection_type="strict",
20 | custom_check_function=None,
21 | ):
22 | """Enumerates all induced subgraphs of the form [nodelist][layerlist] by
23 | going through all possible [nodelist][layerlist] combinations and checking
24 | whether they fulfill the requirements. This is a naive algorithm and is not
25 | intended for use in large networks.
26 |
27 | Accepts the same parameters as sample_multilayer_subgraphs_esu, and has the same functionalities
28 | (except when using a custom_check_function, where induced subgraphs passed to
29 | the check function are different between this and sample_multilayer_subgraphs_esu, which needs to
30 | be handled by the user - see below).
31 |
32 | A difference between this and sample_multilayer_subgraphs_esu is that in this function, no
33 | guarantees other than nnodes and nlayers being correct are made about the
34 | induced subgraphs passed to the validity checking function (unlike in sample_multilayer_subgraphs_esu,
35 | where they are guaranteed to have at least some path in them and have no empty nodes or
36 | layers.) That is, the induced subgraphs are probably not connected, they might contain
37 | empty layers or nodes, etc. If you use a custom_check_function, take this into account.
38 | If using one of the built-in functionalities which use default_check_reqs or
39 | relaxed_check_reqs, this has been taken into account and you don't have to worry about it.
40 | """
41 | check_function = None
42 | assert (sizes != None and intersections != None) or (
43 | nnodes != None and nlayers != None
44 | ), "Please provide either sizes and intersections or nnodes and nlayers"
45 | if custom_check_function != None:
46 | assert (
47 | nnodes != None and nlayers != None
48 | ), "Please provide nnodes and nlayers when using a custom check function"
49 | req_nodelist_len = nnodes
50 | req_layerlist_len = nlayers
51 | check_function = custom_check_function
52 | if sizes != None and intersections != None and check_function == None:
53 | if isinstance(intersections, list):
54 | if None in intersections:
55 | assert (
56 | nnodes != None
57 | ), "Please provide nnodes if including Nones in intersections"
58 | req_nodelist_len = nnodes
59 | req_layerlist_len = len(sizes)
60 | else:
61 | if intersection_type == "strict":
62 | assert (
63 | nnodes == None and nlayers == None
64 | ), "You cannot provide both sizes and intersections and nnodes and nlayers, if intersections is a list"
65 | req_nodelist_len, req_layerlist_len = (
66 | default_calculate_required_lengths(sizes, intersections)
67 | )
68 | elif intersection_type == "less_or_equal":
69 | assert (
70 | nnodes != None and nlayers == None
71 | ), "please provide nnodes (and not nlayers) if using less_or_equal intersection type"
72 | req_nodelist_len = nnodes
73 | req_layerlist_len = len(sizes)
74 | check_function = lambda x, y, z: default_check_reqs(
75 | x,
76 | y,
77 | z,
78 | sizes,
79 | intersections,
80 | req_nodelist_len,
81 | req_layerlist_len,
82 | intersection_type,
83 | )
84 | elif isinstance(intersections, int):
85 | assert (
86 | intersections >= 0
87 | ), "Please provide nonnegative common intersection size"
88 | assert (
89 | nnodes != None and nlayers == None
90 | ), "When requiring only common intersection size, please provide nnodes (and not nlayers)"
91 | req_nodelist_len = nnodes
92 | req_layerlist_len = len(sizes)
93 | intersections_as_list = [None] * (2 ** len(sizes) - len(sizes) - 1)
94 | intersections_as_list[-1] = intersections
95 | check_function = lambda x, y, z: default_check_reqs(
96 | x,
97 | y,
98 | z,
99 | sizes,
100 | intersections_as_list,
101 | req_nodelist_len,
102 | req_layerlist_len,
103 | intersection_type,
104 | )
105 | if nnodes != None and nlayers != None and check_function == None:
106 | assert (
107 | sizes == None and intersections == None
108 | ), "You cannot provide both sizes and intersections and nnodes and nlayers, if intersections is a list"
109 | req_nodelist_len = nnodes
110 | req_layerlist_len = nlayers
111 | assert isinstance(req_nodelist_len, int) and isinstance(
112 | req_layerlist_len, int
113 | ), "Non-integer nnodes or nlayers"
114 | assert (
115 | req_nodelist_len > 0 and req_layerlist_len > 0
116 | ), "Nonpositive nnodes or nlayers"
117 | check_function = relaxed_check_reqs
118 | assert (
119 | check_function != None
120 | ), "Please specify a valid combination of parameters to determine method of subgraph validity checking"
121 |
122 | for nodelist in list(
123 | itertools.combinations(list(network.iter_nodes()), req_nodelist_len)
124 | ):
125 | for layerlist in list(
126 | itertools.combinations(list(network.iter_layers()), req_layerlist_len)
127 | ):
128 | if check_function(network, nodelist, layerlist):
129 | if isinstance(results, list):
130 | results.append((list(nodelist), list(layerlist)))
131 | elif callable(results):
132 | results((list(nodelist), list(layerlist)))
133 | else:
134 | raise TypeError(
135 | "Please provide results container as list or callable"
136 | )
137 |
--------------------------------------------------------------------------------
/pymnet/tests/__init__.py:
--------------------------------------------------------------------------------
1 | from .cc_test import test_cc
2 | from .diagnostics_test import test_diagnostics
3 | from .graphlets_test import test_graphlets
4 | from .io_test import test_io
5 | from .isomorphisms_test import test_isomorphisms
6 | from .models_test import test_models
7 | from .net_test import test_net
8 | from .sampling_test import test_sampling
9 | from .transforms_test import test_transforms
10 | from .visuals_test import test_visuals
11 |
12 | try:
13 | import networkx
14 |
15 | from .nxwrap_test import test_nxwrap
16 |
17 | nximported = True
18 | except ImportError:
19 | nximported = False
20 |
21 |
22 | def test_all():
23 | codes = []
24 | codes.append(test_net())
25 | codes.append(test_cc())
26 | codes.append(test_diagnostics())
27 | codes.append(test_io())
28 | codes.append(test_models())
29 | codes.append(test_transforms())
30 | codes.append(test_visuals())
31 | codes.append(test_isomorphisms())
32 | codes.append(test_sampling())
33 | codes.append(test_graphlets())
34 | if nximported:
35 | codes.append(test_nxwrap())
36 | return all(codes)
37 |
--------------------------------------------------------------------------------
/pymnet/tests/__main__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from . import test_all
4 |
5 | sys.exit(not test_all())
6 |
--------------------------------------------------------------------------------
/pymnet/tests/diagnostics_test.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import unittest
3 |
4 | from pymnet import diagnostics, models, net, nx
5 |
6 |
7 | class TestDiagnostics(unittest.TestCase):
8 | def setUp(self):
9 | pass
10 |
11 | def create_chain(self, net):
12 | net[1, 2] = 1
13 | net[2, 3] = 1
14 | net[3, 4] = 1
15 |
16 | def create_chain_mplex(self, net):
17 | net.add_layer(1)
18 | net.add_layer(2)
19 | net.add_layer(3)
20 | self.create_chain(net.A[1])
21 | self.create_chain(net.A[3])
22 |
23 | def test_monoplex_density_degs(self, net, dnet):
24 | self.create_chain(net)
25 | self.create_chain(dnet)
26 |
27 | self.assertEqual(diagnostics.density(net), 3 / float((4 * 3) / 2))
28 | # self.assertEqual(diagnostics.density(dnet),3/float(4*3))
29 |
30 | self.assertEqual(diagnostics.degs(net), {1: 2, 2: 2})
31 | self.assertEqual(
32 | diagnostics.degs(net, degstype="nodes"), {1: 1, 4: 1, 2: 2, 3: 2}
33 | )
34 | # self.assertEqual(diagnostics.degs(dnet),{1:2,2:2})
35 |
36 | def test_monoplex_density_degs_mnet(self):
37 | n = net.MultilayerNetwork(aspects=0, directed=False)
38 | dn = net.MultilayerNetwork(aspects=0, directed=True)
39 | self.test_monoplex_density_degs(n, dn)
40 |
41 | def test_multiplex_density_degs(self, net):
42 | self.create_chain_mplex(net)
43 |
44 | self.assertEqual(diagnostics.multiplex_density(net), {1: 0.5, 2: 0, 3: 0.5})
45 | if net.fullyInterconnected:
46 | self.assertEqual(
47 | diagnostics.multiplex_degs(net),
48 | {1: {1: 2, 2: 2}, 2: {0: 4}, 3: {1: 2, 2: 2}},
49 | )
50 | else:
51 | self.assertEqual(
52 | diagnostics.multiplex_degs(net),
53 | {1: {1: 2, 2: 2}, 2: {}, 3: {1: 2, 2: 2}},
54 | )
55 |
56 | def test_multiplex_density_degs_mnet(self):
57 | n = net.MultiplexNetwork(couplings="none", directed=False)
58 | self.test_multiplex_density_degs(n)
59 | n = net.MultiplexNetwork(couplings="categorical", directed=False)
60 | self.test_multiplex_density_degs(n)
61 | n = net.MultiplexNetwork(
62 | couplings="none", directed=False, fullyInterconnected=False
63 | )
64 | self.test_multiplex_density_degs(n)
65 |
66 | def test_multilayer_degs_multilayernet(self):
67 | n = net.MultilayerNetwork(aspects=1, fullyInterconnected=False)
68 | n[1, 2, 3, 4] = 1
69 | n[1, 2, 3, 3] = 1
70 | self.assertEqual(diagnostics.degs(n, degstype="distribution"), {1: 2, 2: 1})
71 | self.assertEqual(
72 | diagnostics.degs(n, degstype="nodes"), {(1, 3): 2, (2, 3): 1, (2, 4): 1}
73 | )
74 |
75 | def test_multilayer_degs_mplexnet(self):
76 | n = net.MultiplexNetwork(couplings="none", fullyInterconnected=True)
77 | self.create_chain_mplex(n)
78 | self.assertEqual(
79 | diagnostics.degs(n, degstype="nodes"),
80 | {
81 | (1, 1): 1,
82 | (2, 1): 2,
83 | (3, 1): 2,
84 | (4, 1): 1,
85 | (1, 3): 1,
86 | (2, 3): 2,
87 | (3, 3): 2,
88 | (4, 3): 1,
89 | (1, 2): 0,
90 | (2, 2): 0,
91 | (3, 2): 0,
92 | (4, 2): 0,
93 | },
94 | )
95 |
96 | n = net.MultiplexNetwork(couplings="none", fullyInterconnected=False)
97 | self.create_chain_mplex(n)
98 | self.assertEqual(
99 | diagnostics.degs(n, degstype="nodes"),
100 | {
101 | (1, 1): 1,
102 | (2, 1): 2,
103 | (3, 1): 2,
104 | (4, 1): 1,
105 | (1, 3): 1,
106 | (2, 3): 2,
107 | (3, 3): 2,
108 | (4, 3): 1,
109 | },
110 | )
111 |
112 | n = net.MultiplexNetwork(couplings="categorical", fullyInterconnected=True)
113 | self.create_chain_mplex(n)
114 | self.assertEqual(
115 | diagnostics.degs(n, degstype="nodes"),
116 | {
117 | (1, 1): 3,
118 | (2, 1): 4,
119 | (3, 1): 4,
120 | (4, 1): 3,
121 | (1, 3): 3,
122 | (2, 3): 4,
123 | (3, 3): 4,
124 | (4, 3): 3,
125 | (1, 2): 2,
126 | (2, 2): 2,
127 | (3, 2): 2,
128 | (4, 2): 2,
129 | },
130 | )
131 |
132 | n = net.MultiplexNetwork(couplings="categorical", fullyInterconnected=False)
133 | self.create_chain_mplex(n)
134 | self.assertEqual(
135 | diagnostics.degs(n, degstype="nodes"),
136 | {
137 | (1, 1): 2,
138 | (2, 1): 3,
139 | (3, 1): 3,
140 | (4, 1): 2,
141 | (1, 3): 2,
142 | (2, 3): 3,
143 | (3, 3): 3,
144 | (4, 3): 2,
145 | },
146 | )
147 |
148 | n = net.MultiplexNetwork(couplings="ordinal", fullyInterconnected=True)
149 | self.create_chain_mplex(n)
150 | self.assertEqual(
151 | diagnostics.degs(n, degstype="nodes"),
152 | {
153 | (1, 1): 2,
154 | (2, 1): 3,
155 | (3, 1): 3,
156 | (4, 1): 2,
157 | (1, 3): 2,
158 | (2, 3): 3,
159 | (3, 3): 3,
160 | (4, 3): 2,
161 | (1, 2): 2,
162 | (2, 2): 2,
163 | (3, 2): 2,
164 | (4, 2): 2,
165 | },
166 | )
167 |
168 | n = net.MultiplexNetwork(couplings="ordinal", fullyInterconnected=False)
169 | self.create_chain_mplex(n)
170 | self.assertEqual(
171 | diagnostics.degs(n, degstype="nodes"),
172 | {
173 | (1, 1): 1,
174 | (2, 1): 2,
175 | (3, 1): 2,
176 | (4, 1): 1,
177 | (1, 3): 1,
178 | (2, 3): 2,
179 | (3, 3): 2,
180 | (4, 3): 1,
181 | },
182 | )
183 |
184 | def test_dijkstra_monoplex(self):
185 | n = net.MultilayerNetwork(aspects=0)
186 | n[1, 2] = 1
187 | n[1, 3] = 1
188 | n[2, 3] = 1
189 | n[2, 4] = 1
190 | n[3, 4] = 1
191 | n[3, 5] = 1
192 |
193 | d, f = diagnostics.dijkstra(n, [1])
194 |
195 | ftrue = net.MultilayerNetwork(
196 | aspects=0, fullyInterconnected=False, directed=True, noEdge=-1
197 | )
198 | ftrue[1, 1] = 0
199 | ftrue[1, 2] = 1
200 | ftrue[1, 3] = 1
201 | ftrue[2, 4] = 1
202 | ftrue[3, 4] = 1
203 | ftrue[3, 5] = 1
204 | self.assertEqual(d, {1: 0, 2: 1, 3: 1, 4: 2, 5: 2})
205 | self.assertEqual(f, ftrue)
206 |
207 | d, f = diagnostics.dijkstra(n, [1, 2])
208 | ftrue = net.MultilayerNetwork(
209 | aspects=0, fullyInterconnected=False, directed=True, noEdge=-1
210 | )
211 | ftrue[1, 1] = 0
212 | ftrue[2, 2] = 0
213 | ftrue[1, 3] = 1
214 | ftrue[2, 3] = 1
215 | ftrue[2, 4] = 1
216 | ftrue[3, 5] = 1
217 | self.assertEqual(d, {1: 0, 2: 0, 3: 1, 4: 1, 5: 2})
218 | self.assertEqual(f, ftrue)
219 |
220 | def test_dijkstra_monoplex_compare(self):
221 | n = models.er(100, 0.1)
222 | d, f = diagnostics.dijkstra(n, [1])
223 |
224 | self.assertEqual(d, nx.shortest_path_length(n, 1))
225 |
226 | def test_dijkstra_multilayer_two_aspect(self):
227 | n = net.MultilayerNetwork(aspects=2, directed=True)
228 | n[1, "a", 1][2, "a", 2] = 1
229 | n[2, "a", 2][3, "a", 3] = 1
230 | n[3, "a", 3][4, "a", 4] = 1
231 | n[1, "b", 1.5][3, "b", 2.5] = 1
232 |
233 | n[1, "a", 1][1, "b", 1.25] = 0.25
234 | n[1, "b", 1.5][1, "a", 1.75] = 0.25
235 | n[3, "b", 2.5][3, "a", 2.75] = 0.25
236 |
237 | n[1, "a", 1.75][1, "a", 2] = 0.25
238 | n[1, "b", 1.25][1, "b", 1.5] = 0.25
239 | n[3, "a", 2.75][3, "a", 3] = 0.25
240 |
241 | d, f = diagnostics.dijkstra(n, [(1, "a", 1)])
242 | # for nl,dist in d.iteritems():
243 | for nl in d:
244 | dist = d[nl]
245 | self.assertEqual(d[nl], nl[2] - 1)
246 |
247 | d, f = diagnostics.dijkstra_mlayer_prune(n, [(1, None, None)], aaspects=[1, 2])
248 | self.assertEqual(d[(1,)], 0)
249 | self.assertEqual(d[(2,)], 1)
250 | self.assertEqual(d[(3,)], 1)
251 | self.assertEqual(d[(4,)], 2.5)
252 | ftrue = net.MultilayerNetwork(
253 | aspects=2, fullyInterconnected=False, directed=True, noEdge=-1
254 | )
255 | ftrue[1, "a", 1][1, "a", 1] = 0
256 | ftrue[1, "b", 1.5][1, "b", 1.5] = 0
257 | ftrue[1, "b", 1.25][1, "b", 1.25] = 0
258 | ftrue[1, "a", 1.75][1, "a", 1.75]
259 | ftrue[1, "a", 2][1, "a", 2]
260 |
261 | # print d,list(f.edges)
262 |
263 |
264 | def test_diagnostics():
265 | suite = unittest.TestSuite()
266 | suite.addTest(TestDiagnostics("test_monoplex_density_degs_mnet"))
267 | suite.addTest(TestDiagnostics("test_multiplex_density_degs_mnet"))
268 | suite.addTest(TestDiagnostics("test_multilayer_degs_multilayernet"))
269 | suite.addTest(TestDiagnostics("test_multilayer_degs_mplexnet"))
270 | suite.addTest(TestDiagnostics("test_dijkstra_monoplex"))
271 | suite.addTest(TestDiagnostics("test_dijkstra_monoplex_compare"))
272 | suite.addTest(TestDiagnostics("test_dijkstra_multilayer_two_aspect"))
273 |
274 | return unittest.TextTestRunner().run(suite).wasSuccessful()
275 |
276 |
277 | if __name__ == "__main__":
278 | sys.exit(not test_diagnostics())
279 |
--------------------------------------------------------------------------------
/pymnet/tests/graphlets_test.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import unittest
3 |
4 | import pymnet.graphlets as graphlets
5 | from pymnet import isomorphisms, net
6 |
7 |
8 | class TestGraphlets(unittest.TestCase):
9 | def setUp(self):
10 | self.M_ref_1 = net.MultiplexNetwork(
11 | couplings="categorical", fullyInterconnected=True
12 | )
13 | self.M_ref_1[42, 99, "x", "x"] = 1
14 | self.M_ref_1.add_layer("y")
15 | self.M_ref_2 = net.MultiplexNetwork(
16 | couplings="categorical", fullyInterconnected=True
17 | )
18 | self.M_ref_2[42, 99, "x", "x"] = 1
19 | self.M_ref_2[42, 99, "y", "y"] = 1
20 |
21 | ### tests for graphlets file
22 |
23 | def test_graphlets(self):
24 | nets, invs = graphlets.graphlets(
25 | n=3,
26 | layers=["a", "b", "c"],
27 | n_l=2,
28 | couplings="categorical",
29 | allowed_aspects="all",
30 | )
31 | assert len(nets[2]) == 2
32 | assert len(nets[3]) == 10
33 | # check that the two 2-node-2-layer graphlets match the reference nets
34 | match_ref_1 = False
35 | match_ref_2 = False
36 | for net in nets[2]:
37 | if isomorphisms.is_isomorphic(net, self.M_ref_1, allowed_aspects="all"):
38 | match_ref_1 = True
39 | elif isomorphisms.is_isomorphic(net, self.M_ref_2, allowed_aspects="all"):
40 | match_ref_2 = True
41 | assert match_ref_1
42 | assert match_ref_2
43 |
44 | def test_automorphism_orbits(self):
45 | nets, invs = graphlets.graphlets(
46 | n=3,
47 | layers=["a", "b", "c"],
48 | n_l=2,
49 | couplings="categorical",
50 | allowed_aspects="all",
51 | )
52 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
53 | target_auts = {
54 | (3, 0, 2): 1,
55 | (3, 9, 0): 0,
56 | (3, 4, 0): 0,
57 | (3, 3, 1): 0,
58 | (3, 4, 1): 1,
59 | (3, 3, 0): 0,
60 | (3, 4, 2): 1,
61 | (3, 3, 2): 0,
62 | (3, 8, 0): 0,
63 | (3, 7, 1): 0,
64 | (3, 8, 1): 1,
65 | (3, 2, 2): 2,
66 | (3, 7, 0): 0,
67 | (2, 0, 1): 0,
68 | (3, 8, 2): 1,
69 | (3, 1, 2): 1,
70 | (3, 2, 0): 0,
71 | (3, 7, 2): 2,
72 | (3, 1, 1): 1,
73 | (3, 2, 1): 1,
74 | (3, 1, 0): 0,
75 | (3, 6, 2): 1,
76 | (3, 5, 2): 1,
77 | (3, 6, 0): 0,
78 | (3, 5, 1): 1,
79 | (3, 6, 1): 1,
80 | (2, 1, 0): 0,
81 | (3, 5, 0): 0,
82 | (2, 1, 1): 0,
83 | (2, 0, 0): 0,
84 | (3, 0, 0): 0,
85 | (3, 9, 2): 0,
86 | (3, 0, 1): 1,
87 | (3, 9, 1): 0,
88 | }
89 | assert auts == target_auts
90 |
91 | def test_list_orbits(self):
92 | nets, invs = graphlets.graphlets(
93 | n=3,
94 | layers=["a", "b", "c"],
95 | n_l=2,
96 | couplings="categorical",
97 | allowed_aspects="all",
98 | )
99 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
100 | orbit_lists = graphlets.list_orbits(auts)
101 | target_orbit_lists = {
102 | 2: [(2, 0, 0), (2, 1, 0)],
103 | 3: [
104 | (3, 0, 1),
105 | (3, 9, 0),
106 | (3, 4, 0),
107 | (3, 3, 0),
108 | (3, 4, 1),
109 | (3, 8, 0),
110 | (3, 7, 0),
111 | (3, 8, 1),
112 | (3, 2, 2),
113 | (3, 1, 1),
114 | (3, 2, 0),
115 | (3, 7, 2),
116 | (3, 2, 1),
117 | (3, 1, 0),
118 | (3, 6, 1),
119 | (3, 5, 1),
120 | (3, 6, 0),
121 | (3, 5, 0),
122 | (3, 0, 0),
123 | ],
124 | }
125 | assert set(orbit_lists[2]) == set(target_orbit_lists[2])
126 | assert set(orbit_lists[3]) == set(target_orbit_lists[3])
127 |
128 | def test_orbit_equations(self):
129 | nets, invs = graphlets.graphlets(
130 | n=3,
131 | layers=["a", "b", "c"],
132 | n_l=2,
133 | couplings="categorical",
134 | allowed_aspects="all",
135 | )
136 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
137 | orbit_eqs = graphlets.orbit_equations(
138 | n=3, nets=nets, auts=auts, invs=invs, allowed_aspects="all"
139 | )
140 | target_orbit_eqs = {
141 | (((2, 1, 0), 1), ((2, 0, 0), 1)): {
142 | (3, 2, 0): 1,
143 | (3, 5, 1): 1,
144 | (3, 6, 1): 1,
145 | (3, 7, 0): 1,
146 | },
147 | ((2, 0, 0), 2): {
148 | (3, 5, 0): 1,
149 | (3, 1, 0): 1,
150 | (3, 4, 0): 1,
151 | (3, 0, 0): 1,
152 | (3, 4, 1): 1,
153 | (3, 3, 0): 1,
154 | (3, 6, 0): 1,
155 | },
156 | ((2, 1, 0), 2): {(3, 8, 0): 1, (3, 7, 2): 1, (3, 9, 0): 1},
157 | }
158 | assert orbit_eqs == target_orbit_eqs
159 | # test with 4 nodes
160 | nets, invs = graphlets.graphlets(
161 | n=4,
162 | layers=["a", "b", "c"],
163 | n_l=2,
164 | couplings="categorical",
165 | allowed_aspects="all",
166 | )
167 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
168 | orbit_eqs = graphlets.orbit_equations(
169 | n=4, nets=nets, auts=auts, invs=invs, allowed_aspects="all"
170 | )
171 |
172 | ### tests for independent_equations file
173 |
174 | def test_independent_equations(self):
175 | inds, eqs = graphlets.independent_equations(
176 | n=3, n_l=2, layers=["a", "b", "c"], allowed_aspects="all"
177 | )
178 | target_inds = set(
179 | [((2, 0, 0), 2), (((2, 1, 0), 1), ((2, 0, 0), 1)), ((2, 1, 0), 2)]
180 | )
181 | target_eqs = {
182 | (((2, 1, 0), 1), ((2, 0, 0), 1)): {
183 | (3, 2, 0): 1,
184 | (3, 5, 1): 1,
185 | (3, 6, 1): 1,
186 | (3, 7, 0): 1,
187 | },
188 | ((2, 0, 0), 2): {
189 | (3, 5, 0): 1,
190 | (3, 1, 0): 1,
191 | (3, 4, 0): 1,
192 | (3, 0, 0): 1,
193 | (3, 4, 1): 1,
194 | (3, 3, 0): 1,
195 | (3, 6, 0): 1,
196 | },
197 | ((2, 1, 0), 2): {(3, 8, 0): 1, (3, 7, 2): 1, (3, 9, 0): 1},
198 | }
199 | assert inds == target_inds
200 | assert eqs == target_eqs
201 |
202 | def test_redundant_orbits(self):
203 | nets, invs = graphlets.graphlets(
204 | n=3,
205 | layers=["a", "b", "c"],
206 | n_l=2,
207 | couplings="categorical",
208 | allowed_aspects="all",
209 | )
210 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
211 | inds, eqs = graphlets.independent_equations(
212 | n=3, n_l=2, layers=["a", "b", "c"], allowed_aspects="all"
213 | )
214 | orbit_is = graphlets.orbit_numbers(n=3, nets=nets, auts=auts)
215 | orbit_list = graphlets.ordered_orbit_list(orbit_is)
216 | reds = graphlets.redundant_orbits(inds, eqs, orbit_is, orbit_list)
217 | target_reds = ["(3, 7, 0)", "(3, 9, 0)", "(3, 6, 0)"]
218 | assert set(reds) == set(target_reds)
219 |
220 | ### tests for graphlet_measures file
221 |
222 | def test_orbit_counts_all(self):
223 | M = net.MultiplexNetwork(couplings="categorical", fullyInterconnected=True)
224 | M[42, 99, "x", "x"] = 1
225 | M[42, 99, "z", "z"] = 1
226 | M[99, 101, "z", "z"] = 1
227 | M[42, 101, "z", "z"] = 1
228 | nets, invs = graphlets.graphlets(
229 | n=3,
230 | layers=["a", "b", "c"],
231 | n_l=2,
232 | couplings="categorical",
233 | allowed_aspects="all",
234 | )
235 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
236 | orbit_is = graphlets.orbit_numbers(n=3, nets=nets, auts=auts)
237 | orbit_list = graphlets.ordered_orbit_list(orbit_is)
238 | orbits = graphlets.orbit_counts_all(
239 | net=M,
240 | n=3,
241 | nets=nets,
242 | invs=invs,
243 | auts=auts,
244 | orbit_list=orbit_list,
245 | allowed_aspects="all",
246 | )
247 | target_orbits = {
248 | (101, (2, 1, 0)): 0,
249 | (42, (2, 1, 0)): 1,
250 | (42, (3, 8, 1)): 0,
251 | (42, (3, 2, 1)): 0,
252 | (101, (3, 2, 1)): 0,
253 | (42, (3, 3, 0)): 0,
254 | (99, (3, 1, 0)): 0,
255 | (101, (3, 3, 0)): 0,
256 | (42, (3, 7, 0)): 0,
257 | (42, (3, 4, 0)): 0,
258 | (42, (3, 2, 0)): 0,
259 | (99, (3, 4, 1)): 0,
260 | (99, (3, 1, 1)): 0,
261 | (99, (3, 9, 0)): 0,
262 | (99, (3, 8, 1)): 0,
263 | (42, (3, 1, 1)): 0,
264 | (99, (3, 2, 0)): 0,
265 | (99, (3, 8, 0)): 0,
266 | (101, (3, 5, 0)): 1,
267 | (101, (3, 2, 2)): 0,
268 | (99, (3, 0, 0)): 0,
269 | (42, (3, 0, 1)): 0,
270 | (42, (3, 2, 2)): 0,
271 | (42, (3, 4, 1)): 0,
272 | (99, (3, 6, 0)): 0,
273 | (101, (3, 4, 1)): 0,
274 | (99, (3, 2, 1)): 0,
275 | (101, (3, 6, 1)): 0,
276 | (42, (3, 9, 0)): 0,
277 | (42, (3, 6, 0)): 0,
278 | (101, (3, 9, 0)): 0,
279 | (99, (3, 0, 1)): 0,
280 | (99, (3, 7, 2)): 0,
281 | (99, (3, 5, 1)): 1,
282 | (101, (3, 8, 1)): 0,
283 | (101, (3, 8, 0)): 0,
284 | (101, (3, 4, 0)): 0,
285 | (101, (2, 0, 0)): 2,
286 | (101, (3, 2, 0)): 0,
287 | (99, (3, 5, 0)): 0,
288 | (99, (3, 7, 0)): 0,
289 | (42, (3, 6, 1)): 0,
290 | (101, (3, 6, 0)): 0,
291 | (101, (3, 1, 1)): 0,
292 | (99, (3, 3, 0)): 0,
293 | (42, (3, 1, 0)): 0,
294 | (99, (3, 6, 1)): 0,
295 | (42, (3, 8, 0)): 0,
296 | (42, (3, 5, 0)): 0,
297 | (99, (2, 1, 0)): 1,
298 | (101, (3, 1, 0)): 0,
299 | (101, (3, 0, 0)): 0,
300 | (42, (3, 0, 0)): 0,
301 | (101, (3, 0, 1)): 0,
302 | (99, (3, 2, 2)): 0,
303 | (101, (3, 5, 1)): 0,
304 | (99, (2, 0, 0)): 1,
305 | (42, (3, 5, 1)): 1,
306 | (42, (2, 0, 0)): 1,
307 | (101, (3, 7, 2)): 0,
308 | (42, (3, 7, 2)): 0,
309 | (99, (3, 4, 0)): 0,
310 | (101, (3, 7, 0)): 0,
311 | }
312 | assert orbits == target_orbits
313 |
314 | def test_orbit_counts(self):
315 | M = net.MultiplexNetwork(couplings="categorical", fullyInterconnected=True)
316 | M[42, 99, "x", "x"] = 1
317 | M[42, 99, "z", "z"] = 1
318 | M[99, 101, "z", "z"] = 1
319 | M[42, 101, "z", "z"] = 1
320 | nets, invs = graphlets.graphlets(
321 | n=3,
322 | layers=["a", "b", "c"],
323 | n_l=2,
324 | couplings="categorical",
325 | allowed_aspects="all",
326 | )
327 | auts = graphlets.automorphism_orbits(nets, allowed_aspects="all")
328 | orbit_is = graphlets.orbit_numbers(n=3, nets=nets, auts=auts)
329 | orbit_list = graphlets.ordered_orbit_list(orbit_is)
330 | orbits_n0 = dict()
331 | graphlets.orbit_counts(
332 | n=3,
333 | node0=42,
334 | net=M,
335 | nets=nets,
336 | orbits=orbits_n0,
337 | invs=invs,
338 | auts=auts,
339 | orbit_list=orbit_list,
340 | allowed_aspects="all",
341 | )
342 | target_orbits_n0 = {
343 | (42, (3, 1, 0)): 0,
344 | (42, (3, 1, 1)): 0,
345 | (42, (3, 8, 1)): 0,
346 | (42, (3, 8, 0)): 0,
347 | (42, (3, 2, 1)): 0,
348 | (42, (3, 7, 2)): 0,
349 | (42, (3, 7, 0)): 0,
350 | (42, (3, 0, 1)): 0,
351 | (42, (3, 0, 0)): 0,
352 | (42, (3, 5, 0)): 0,
353 | (42, (3, 4, 1)): 0,
354 | (42, (3, 4, 0)): 0,
355 | (42, (3, 3, 0)): 0,
356 | (42, (3, 5, 1)): 1,
357 | (42, (3, 9, 0)): 0,
358 | (42, (3, 2, 0)): 0,
359 | (42, (2, 0, 0)): 1,
360 | (42, (3, 2, 2)): 0,
361 | (42, (3, 6, 1)): 0,
362 | (42, (3, 6, 0)): 0,
363 | (42, (2, 1, 0)): 1,
364 | }
365 | assert orbits_n0 == target_orbits_n0
366 |
367 |
368 | def makesuite():
369 | suite = unittest.TestSuite()
370 | suite.addTest(TestGraphlets("test_graphlets"))
371 | suite.addTest(TestGraphlets("test_automorphism_orbits"))
372 | suite.addTest(TestGraphlets("test_list_orbits"))
373 | suite.addTest(TestGraphlets("test_orbit_equations"))
374 | suite.addTest(TestGraphlets("test_independent_equations"))
375 | suite.addTest(TestGraphlets("test_redundant_orbits"))
376 | suite.addTest(TestGraphlets("test_orbit_counts_all"))
377 | suite.addTest(TestGraphlets("test_orbit_counts"))
378 | return suite
379 |
380 |
381 | def test_graphlets(**kwargs):
382 | suite = makesuite(**kwargs)
383 | return unittest.TextTestRunner().run(suite).wasSuccessful()
384 |
385 |
386 | if __name__ == "__main__":
387 | sys.exit(not test_graphlets())
388 |
--------------------------------------------------------------------------------
/pymnet/tests/io_test.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import tempfile
4 | import unittest
5 | from operator import itemgetter
6 |
7 | from pymnet import net, netio
8 |
9 |
10 | class TestIO(unittest.TestCase):
11 |
12 | def setUp(self):
13 | pass
14 |
15 | def test_read_ucinet_flat_fullnet(self):
16 | netfile = """DL N = 5
17 | Data:
18 | 0 1 1 1 1
19 | 1 0 1 0 0
20 | 1 1 0 0 1
21 | 1 0 0 0 0
22 | 1 0 1 0 0"""
23 | net = netio.read_ucinet(netfile.split("\n"))
24 | self.assertEqual(net.aspects, 0)
25 | self.assertEqual(set(net), set([0, 1, 2, 3, 4]))
26 | self.assertEqual(set(net[0]), set([1, 2, 3, 4]))
27 | self.assertEqual(set(net[1]), set([0, 2]))
28 | self.assertEqual(set(net[2]), set([0, 1, 4]))
29 | self.assertEqual(set(net[3]), set([0]))
30 | self.assertEqual(set(net[4]), set([0, 2]))
31 |
32 | def test_labeled(netfile):
33 | net = netio.read_ucinet(netfile.split("\n"))
34 | self.assertEqual(net.aspects, 0)
35 | self.assertEqual(set(net), set(["barry", "david", "lin", "pat", "russ"]))
36 | self.assertEqual(set(net["barry"]), set(["david", "lin", "pat"]))
37 | self.assertEqual(set(net["david"]), set(["barry", "russ"]))
38 | self.assertEqual(set(net["lin"]), set(["barry", "pat"]))
39 | self.assertEqual(set(net["pat"]), set(["barry", "lin", "russ"]))
40 | self.assertEqual(set(net["russ"]), set(["david", "pat"]))
41 |
42 | netfile1 = """dl n=5
43 | format = fullmatrix
44 | labels:
45 | barry,david,lin,pat,russ
46 | data:
47 | 0 1 1 1 0
48 | 1 0 0 0 1
49 | 1 0 0 1 0
50 | 1 0 1 0 1
51 | 0 1 0 1 0"""
52 | test_labeled(netfile1)
53 |
54 | netfile2 = """dl n=5
55 | format = fullmatrix
56 | labels:
57 | barry,david
58 | lin,pat
59 | russ
60 | data:
61 | 0 1 1 1 0
62 | 1 0 0 0 1
63 | 1 0 0 1 0
64 | 1 0 1 0 1
65 | 0 1 0 1 0"""
66 | test_labeled(netfile2)
67 |
68 | netfile3 = """dl n=5
69 | format = fullmatrix
70 | labels embedded
71 | data:
72 | barry david lin pat russ
73 | Barry 0 1 1 1 0
74 | david 1 0 0 0 1
75 | Lin 1 0 0 1 0
76 | Pat 1 0 1 0 1
77 | Russ 0 1 0 1 0"""
78 | test_labeled(netfile3)
79 |
80 | def test_read_ucinet_mplex_fullnet(self):
81 | netfile = """DL N = 5 nm=2
82 | Data:
83 | 0 1 1 1 1
84 | 1 0 1 0 0
85 | 1 1 0 0 1
86 | 1 0 0 0 0
87 | 1 0 1 0 0
88 | 0 1 1 1 0
89 | 1 0 0 0 1
90 | 1 0 0 1 0
91 | 1 0 1 0 1
92 | 0 1 0 1 0"""
93 | net = netio.read_ucinet(netfile.split("\n"))
94 | self.assertEqual(net.aspects, 1)
95 | self.assertEqual(set(net), set([0, 1, 2, 3, 4]))
96 | self.assertEqual(set(net.A[0][0]), set([1, 2, 3, 4]))
97 | self.assertEqual(set(net.A[0][1]), set([0, 2]))
98 | self.assertEqual(set(net.A[0][2]), set([0, 1, 4]))
99 | self.assertEqual(set(net.A[0][3]), set([0]))
100 | self.assertEqual(set(net.A[0][4]), set([0, 2]))
101 | self.assertEqual(set(net.A[1][0]), set([1, 2, 3]))
102 | self.assertEqual(set(net.A[1][1]), set([0, 4]))
103 | self.assertEqual(set(net.A[1][2]), set([0, 3]))
104 | self.assertEqual(set(net.A[1][3]), set([0, 2, 4]))
105 | self.assertEqual(set(net.A[1][4]), set([1, 3]))
106 |
107 | def test_read_ucinet_mplex_nonglobalnodes(self):
108 | netfile = """DL N = 3 nm =2
109 | Data:
110 | 0 1 1
111 | 1 0 1
112 | 1 1 0
113 | 0 0 0
114 | 0 0 1
115 | 0 1 0"""
116 | net = netio.read_ucinet(netfile.split("\n"), fullyInterconnected=False)
117 | self.assertEqual(set(net[0, 0]), set([(1, 0), (2, 0)]))
118 | self.assertEqual(set(net[1, 0]), set([(0, 0), (2, 0), (1, 1)]))
119 | self.assertEqual(set(net[2, 0]), set([(0, 0), (1, 0), (2, 1)]))
120 | self.assertEqual(set(net[0, 1]), set([]))
121 | self.assertEqual(set(net[1, 1]), set([(2, 1), (1, 0)]))
122 | self.assertEqual(set(net[2, 1]), set([(1, 1), (2, 0)]))
123 |
124 | def test_pickle(self):
125 | import pickle
126 |
127 | n = net.MultilayerNetwork(aspects=1)
128 | n[1, 2, 3, 4] = 1
129 | self.assertEqual(pickle.loads(pickle.dumps(n)), n)
130 | n = net.MultilayerNetwork(aspects=1, directed=True)
131 | n[1, 2, 3, 4] = 1
132 | self.assertEqual(pickle.loads(pickle.dumps(n)), n)
133 |
134 | n = net.MultiplexNetwork(couplings=[("categorical", 1)])
135 | n[1, 2, 3, 3] = 1
136 | self.assertEqual(pickle.loads(pickle.dumps(n)), n)
137 | n = net.MultiplexNetwork(couplings=[("categorical", 1)], directed=True)
138 | n[1, 2, 3, 3] = 1
139 | self.assertEqual(pickle.loads(pickle.dumps(n)), n)
140 |
141 | def test_write_json(self):
142 | import json
143 |
144 | n = net.MultiplexNetwork(couplings=[("categorical", 1)])
145 | n[1, 2, 3, 3] = 1
146 | j = json.loads(netio.write_json(n))
147 | self.assertEqual({node["name"] for node in j["nodes"]}, {1, 2})
148 | self.assertEqual({layer["name"] for layer in j["layers"]}, {3})
149 | self.assertEqual(
150 | [
151 | ({link["source"], link["target"]}, link["value"], link["layer"])
152 | for link in j["links"]
153 | ],
154 | [({0, 1}, 1, 0)],
155 | )
156 |
157 | with tempfile.TemporaryDirectory() as tmp:
158 | with open(os.path.join(tmp, "fobject"), "w") as f:
159 | netio.write_json(n, outputfile=f)
160 | with open(os.path.join(tmp, "fobject")) as f:
161 | self.assertEqual(json.load(f), j)
162 |
163 | netio.write_json(n, outputfile=os.path.join(tmp, "fname"))
164 | with open(os.path.join(tmp, "fname")) as f:
165 | self.assertEqual(json.load(f), j)
166 |
167 | def test_write_edge_files(self):
168 | n = net.MultiplexNetwork(couplings=[("categorical", 1)])
169 | n[1, 2, 3, 3] = 1
170 | with tempfile.TemporaryDirectory() as tmp:
171 | name = os.path.join(tmp, "test")
172 | netio.write_edge_files(n, name, masterFile=True)
173 |
174 | with open(name + ".txt") as master:
175 | self.assertEqual(master.readlines(), ["test3.edg;3;\n"])
176 |
177 | with open(name + "3.edg") as layer:
178 | l1, l2, w = next(layer).split()
179 | self.assertEqual(({int(l1), int(l2)}, int(w)), ({1, 2}, 1))
180 |
181 | def test_read_edge_file(self):
182 | with tempfile.TemporaryDirectory() as tmp:
183 | name = os.path.join(tmp, "test")
184 | fn = f"{name}.txt"
185 | with open(fn, "w") as f:
186 | f.write("1 1 2 0.5\n1 2 2 0.75")
187 | net = netio.read_edge_file(fn, sep=" ")
188 | self.assertEqual(len(net.edges), 1)
189 | with open(fn, "w") as f:
190 | f.write("1\t1\t2\t0.5\n1\t2\t2\t0.75")
191 | net = netio.read_edge_file(fn, sep="\t")
192 | self.assertEqual(len(net.edges), 1)
193 |
194 | def test_write_edge_file(self):
195 | n = net.MultiplexNetwork(couplings=[("categorical", 1)])
196 | n[1, 2, 3, 3] = 1
197 | with tempfile.TemporaryDirectory() as tmp:
198 | name = os.path.join(tmp, "test.edgelist")
199 | netio.write_edge_file(n, name, sep="\t")
200 |
201 | with open(name) as edges:
202 | layer, l1, l2, w = next(edges).split("\t")
203 | self.assertEqual(({int(l1), int(l2)}, int(w)), ({1, 2}, 1))
204 | self.assertEqual(int(layer), 3)
205 | net2 = netio.read_edge_file(name, sep="\t")
206 | self.assertListEqual(list(net2.edges), list(n.edges))
207 |
208 |
209 | def test_io():
210 | suite = unittest.TestSuite()
211 | suite.addTest(TestIO("test_read_ucinet_flat_fullnet"))
212 | suite.addTest(TestIO("test_read_ucinet_mplex_fullnet"))
213 | suite.addTest(TestIO("test_read_ucinet_mplex_nonglobalnodes"))
214 | suite.addTest(TestIO("test_pickle"))
215 | suite.addTest(TestIO("test_write_json"))
216 | suite.addTest(TestIO("test_write_edge_files"))
217 | suite.addTest(TestIO("test_read_edge_file"))
218 | suite.addTest(TestIO("test_write_edge_file"))
219 |
220 | return unittest.TextTestRunner().run(suite).wasSuccessful()
221 |
222 |
223 | if __name__ == "__main__":
224 | sys.exit(not test_io())
225 |
--------------------------------------------------------------------------------
/pymnet/tests/models_test.py:
--------------------------------------------------------------------------------
1 | import math
2 | import random
3 | import sys
4 | import unittest
5 |
6 | from pymnet import diagnostics, models, net
7 |
8 |
9 | class TestModels(unittest.TestCase):
10 |
11 | def setUp(self):
12 | pass
13 |
14 | def test_monoplex_erdosrenyi(self):
15 | size = 10
16 | full = net.MultilayerNetwork(aspects=0)
17 | models.single_layer_er(
18 | full, range(10, 10 + size), p=None, edges=(size * (size - 1)) // 2
19 | )
20 | for i in full:
21 | for j in full:
22 | if i != j:
23 | self.assertEqual(full[i, j], 1)
24 | self.assertEqual(len(full.edges), int((size * (size - 1)) / 2))
25 |
26 | net2 = net.MultilayerNetwork(aspects=0)
27 | models.single_layer_er(net2, range(10), p=None, edges=30)
28 | self.assertEqual(len(net2.edges), 30)
29 |
30 | net3 = net.MultilayerNetwork(aspects=0)
31 | models.single_layer_er(net3, range(10), p=1.0, edges=None)
32 | self.assertEqual(len(net3.edges), 45)
33 |
34 | def test_multiplex_erdosrenyi(self):
35 | net = models.er(10, 0.5)
36 | net2 = models.er(10, [0.4, 0.6])
37 |
38 | # test that there are some links but not all
39 | self.assertTrue(1 < len(list(net.edges)) < 10 * 9 / 2.0)
40 | self.assertTrue(1 < len(list(net2.A[1].edges)) < 10 * 9 + 10)
41 |
42 | net3 = models.er(10, edges=[30, 45])
43 | self.assertEqual(len(net3.A[0].edges), 30)
44 | self.assertEqual(len(net3.A[1].edges), 45)
45 |
46 | net4 = models.er([range(10), range(5, 15)], edges=[30, 45])
47 | self.assertEqual(len(net4.A[0].edges), 30)
48 | self.assertEqual(len(net4.A[1].edges), 45)
49 | self.assertEqual(set(net4.A[0]), set(range(10)))
50 | self.assertEqual(set(net4.A[1]), set(range(5, 15)))
51 |
52 | net5 = models.er([range(10), range(5, 15)], edges=30)
53 | self.assertEqual(len(net5.A[0].edges), 30)
54 | self.assertEqual(len(net5.A[1].edges), 30)
55 | self.assertEqual(set(net5.A[0]), set(range(10)))
56 | self.assertEqual(set(net5.A[1]), set(range(5, 15)))
57 |
58 | def test_monoplex_configuration_model(self):
59 | net = models.conf({5: 1000}) # maxdeg << sqrt(number of nodes)
60 | self.assertEqual(diagnostics.degs(net), {5: 1000})
61 |
62 | net = models.conf({50: 100})
63 | self.assertEqual(diagnostics.degs(net), {50: 100})
64 |
65 | # zero degrees
66 | net = models.conf({50: 100, 0: 10})
67 | self.assertEqual(diagnostics.degs(net), {50: 100, 0: 10})
68 |
69 | net = models.conf(
70 | dict(map(lambda x: (x, int(math.sqrt(x) + 1)), range(101))),
71 | degstype="nodes",
72 | )
73 | for i in range(101):
74 | self.assertEqual(net[i].deg(), int(math.sqrt(i) + 1))
75 |
76 | # zero degrees
77 | net = models.conf(
78 | dict(map(lambda x: (x, int(math.sqrt(x))), range(99))), degstype="nodes"
79 | )
80 | for i in range(99):
81 | self.assertEqual(net[i].deg(), int(math.sqrt(i)))
82 |
83 | net = models.conf(net)
84 | for i in range(99):
85 | self.assertEqual(net[i].deg(), int(math.sqrt(i)))
86 |
87 | def test_multiplex_configuration_model(self):
88 | net = models.conf([{50: 100}, {50: 100}])
89 | self.assertEqual(diagnostics.multiplex_degs(net), {0: {50: 100}, 1: {50: 100}})
90 |
91 | net = models.conf({"l1": {50: 100}, "l2": {50: 100}})
92 | self.assertEqual(
93 | diagnostics.multiplex_degs(net), {"l1": {50: 100}, "l2": {50: 100}}
94 | )
95 |
96 | net = models.conf(net)
97 | self.assertEqual(
98 | diagnostics.multiplex_degs(net), {"l1": {50: 100}, "l2": {50: 100}}
99 | )
100 |
101 | degs = {
102 | "l1": dict(map(lambda x: (x, 2 * int(math.sqrt(x))), range(100))),
103 | "l2": dict(map(lambda x: (x, 2 * int(math.sqrt(x))), range(20, 120))),
104 | }
105 | net = models.conf(degs, degstype="nodes")
106 | self.assertEqual(diagnostics.multiplex_degs(net, degstype="nodes"), degs)
107 | self.assertEqual(set(net.A["l1"]), set(range(100)))
108 | self.assertEqual(set(net.A["l2"]), set(range(20, 120)))
109 |
110 | def test_full_multiplex_network(self):
111 | self.assertEqual(diagnostics.degs(models.full(nodes=10, layers=None)), {9: 10})
112 |
113 | self.assertEqual(
114 | diagnostics.degs(models.full(nodes=10, layers=["a", "b"])), {10: 20}
115 | )
116 | self.assertEqual(
117 | diagnostics.multiplex_degs(models.full(nodes=10, layers=["a", "b"])),
118 | {"a": {9: 10}, "b": {9: 10}},
119 | )
120 |
121 | self.assertEqual(diagnostics.degs(models.full(nodes=10, layers=2)), {10: 20})
122 | self.assertEqual(
123 | diagnostics.multiplex_degs(models.full(nodes=10, layers=2)),
124 | {0: {9: 10}, 1: {9: 10}},
125 | )
126 |
127 | def test_er_partially_interconnected(self):
128 | random.seed(42)
129 | nodes = [list(range(10)), list(range(0, 10, 2))]
130 | ps = [0.1, 0.1]
131 | model = models.er_partially_interconnected(
132 | nodes, ps, couplings=("categorical", 0.9)
133 | )
134 | self.assertListEqual(
135 | list(model.edges)[:2], [(0, 5, 0, 0, 1), (0, 0, 0, 1, 0.9)]
136 | )
137 |
138 | def test_conf_overlaps(self):
139 | ol_dict = {
140 | (0, 0): {0: 0, 1: 0, 2: 1, 3: 1},
141 | (0, 1): {0: 1, 1: 1, 2: 0},
142 | (1, 1): {0: 0, 1: 0, 4: 1, 5: 1},
143 | }
144 | model = models.conf_overlaps(ol_dict)
145 | self.assertListEqual(
146 | list(model.edges),
147 | [(0, 1, 0, 0, 1), (0, 1, 1, 1, 1), (2, 3, 0, 0, 1), (4, 5, 1, 1, 1)],
148 | )
149 | ol_dict = {
150 | (0, 0): {0: 1, 1: 0, 2: 1, 3: 2},
151 | (0, 1): {0: 1, 1: 1, 2: 0},
152 | (1, 1): {0: 0, 1: 1, 4: 2, 5: 1},
153 | }
154 | random.seed(1)
155 | model = models.conf_overlaps(ol_dict)
156 | self.assertSetEqual(
157 | set(model.edges),
158 | {
159 | (0, 1, 0, 0, 1),
160 | (0, 1, 1, 1, 1),
161 | (0, 3, 0, 0, 1),
162 | (1, 4, 1, 1, 1),
163 | (2, 3, 0, 0, 1),
164 | (4, 5, 1, 1, 1),
165 | },
166 | )
167 |
168 | def test_ba_total_degree(self):
169 | random.seed(42)
170 | model = models.ba_total_degree(100, [1, 2])
171 | self.assertEqual(len(list(model.edges)), 295)
172 | self.assertListEqual(
173 | list(model.edges)[10:12], [(0, 12, 1, 1, 1), (0, 22, 1, 1, 1)]
174 | )
175 |
176 | # TODO double-check model implementation
177 | def test_geo(self):
178 | random.seed(42)
179 | model = models.geo(200, [10, 10])
180 | self.assertEqual(len(list(model.edges)), 26)
181 | self.assertListEqual(
182 | list(model.edges)[:2], [(2, 90, 0, 0, 1), (2, 186, 0, 0, 1)]
183 | )
184 |
185 | def test_ws(self):
186 | random.seed(42)
187 | model = models.ws(10, [20, 20])
188 | self.assertEqual(len(list(model.edges)), 40)
189 | self.assertListEqual(
190 | list(model.edges)[10:12], [(1, 9, 0, 0, 1), (1, 4, 0, 0, 1)]
191 | )
192 |
193 | # TODO double-check model implementation
194 | def test_er_overlaps_match_aggregated(self):
195 | pass
196 |
197 |
198 | def test_models():
199 | suite = unittest.TestSuite()
200 | suite.addTest(TestModels("test_monoplex_erdosrenyi"))
201 | suite.addTest(TestModels("test_multiplex_erdosrenyi"))
202 | suite.addTest(TestModels("test_monoplex_configuration_model"))
203 | suite.addTest(TestModels("test_multiplex_configuration_model"))
204 | suite.addTest(TestModels("test_full_multiplex_network"))
205 | suite.addTest(TestModels("test_er_partially_interconnected"))
206 | suite.addTest(TestModels("test_conf_overlaps"))
207 | suite.addTest(TestModels("test_ba_total_degree"))
208 | suite.addTest(TestModels("test_geo"))
209 | suite.addTest(TestModels("test_ws"))
210 | suite.addTest(TestModels("test_er_overlaps_match_aggregated"))
211 |
212 | return unittest.TextTestRunner().run(suite).wasSuccessful()
213 |
214 |
215 | if __name__ == "__main__":
216 | sys.exit(not test_models())
217 |
--------------------------------------------------------------------------------
/pymnet/tests/nxwrap_test.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import sys
3 | import unittest
4 |
5 | import networkx
6 |
7 | from pymnet import net, nxwrap
8 |
9 |
10 | class TestNxwrap(unittest.TestCase):
11 | def setUp(self):
12 | pass
13 |
14 | def test_monoplex_basics(self, nnx):
15 | self.assertEqual(nnx[1][2]["weight"], 1)
16 | self.assertEqual(nnx[2][3]["weight"], 3)
17 | self.assertEqual(nnx[4][5]["weight"], 4)
18 |
19 | self.assertEqual(nnx[2][1]["weight"], 1)
20 | self.assertEqual(nnx[3][2]["weight"], 3)
21 | self.assertEqual(nnx[5][4]["weight"], 4)
22 |
23 | self.assertEqual(
24 | set(map(frozenset, networkx.connected_components(nnx))),
25 | set([frozenset([1, 2, 3]), frozenset([4, 5])]),
26 | )
27 |
28 | def test_monoplex_basics_writing_pymnet(self):
29 | n = net.MultilayerNetwork(aspects=0)
30 | n[1, 2] = 1
31 | nnx = nxwrap.MonoplexGraphNetworkxView(n)
32 | n[2, 3] = 3
33 | n[4, 5] = 4
34 |
35 | self.test_monoplex_basics(nnx)
36 |
37 | def test_monoplex_basics_writing_nx(self):
38 | n = net.MultilayerNetwork(aspects=0)
39 | nnx = nxwrap.MonoplexGraphNetworkxView(n)
40 | nnx.add_node(1)
41 | nnx.add_nodes_from([2, 3, 4, 5])
42 | nnx.add_edge(1, 2)
43 | nnx.add_edge(2, 3, weight=3)
44 | nnx.add_edge(4, 5)
45 | nnx.add_edge(4, 5, weight=1)
46 | nnx.add_edge(4, 5, weight=4)
47 |
48 | self.test_monoplex_basics(nnx)
49 |
50 | def test_autowrapping(self):
51 | n = net.MultilayerNetwork(aspects=0)
52 | n[1, 2] = 1
53 | n[2, 3] = 1
54 | n[4, 5] = 1
55 |
56 | self.assertEqual(
57 | set(map(frozenset, nxwrap.connected_components(n))),
58 | set([frozenset([1, 2, 3]), frozenset([4, 5])]),
59 | )
60 |
61 | def test_mst(self):
62 | n = net.MultilayerNetwork(aspects=0)
63 | n[1, 2] = 1
64 | n[2, 3] = 1
65 | n[1, 3] = 10
66 |
67 | mst = nxwrap.minimum_spanning_tree(n)
68 |
69 | self.assertEqual(mst[1, 2], 1)
70 | self.assertEqual(mst[2, 3], 1)
71 | self.assertEqual(mst[1, 3], mst.noEdge)
72 |
73 | def test_monoplex_load_karate(self):
74 | knet = nxwrap.karate_club_graph()
75 | self.assertEqual(knet.__class__, net.MultilayerNetwork)
76 | self.assertEqual(set(range(34)), set(knet))
77 | self.assertEqual(len(knet.edges), 78)
78 | self.assertNotEqual(knet[0, 1], 0)
79 | # self.assertNotEqual(networkx.Graph,nxwrap.MonoplexGraphNetworkxNew)
80 |
81 | def test_monoplex_tuples(self):
82 | n = net.MultilayerNetwork(aspects=0)
83 | nnx = nxwrap.MonoplexGraphNetworkxView(n)
84 | nnx.add_node((1, "a"))
85 | nnx.add_nodes_from([(2, "a"), (3, "a"), (4, "a"), (5, "a")])
86 | nnx.add_edge((1, "a"), (2, "a"))
87 | nnx.add_edge((2, "a"), (3, "a"), weight=3)
88 | nnx.add_edge((4, "a"), (5, "a"))
89 | nnx.add_edge((4, "a"), (5, "a"), weight=1)
90 | nnx.add_edge((4, "a"), (5, "a"), weight=4)
91 |
92 | self.assertEqual(nnx[(1, "a")][(2, "a")]["weight"], 1)
93 | self.assertEqual(nnx[(2, "a")][(3, "a")]["weight"], 3)
94 | self.assertEqual(nnx[(4, "a")][(5, "a")]["weight"], 4)
95 |
96 | self.assertEqual(nnx[(2, "a")][(1, "a")]["weight"], 1)
97 | self.assertEqual(nnx[(3, "a")][(2, "a")]["weight"], 3)
98 | self.assertEqual(nnx[(5, "a")][(4, "a")]["weight"], 4)
99 |
100 | self.assertEqual(
101 | set(map(frozenset, networkx.connected_components(nnx))),
102 | set(
103 | [
104 | frozenset([(1, "a"), (2, "a"), (3, "a")]),
105 | frozenset([(4, "a"), (5, "a")]),
106 | ]
107 | ),
108 | )
109 |
110 | def test_grid_graph(self):
111 | gg = nxwrap.grid_graph([2, 3])
112 | if (
113 | int(networkx.__version__.split(".")[0]) >= 2
114 | ): # The grid is produced in reversed order in networkx 2.
115 | self.assertEqual(set(gg), set(itertools.product(range(3), range(2))))
116 | else:
117 | self.assertEqual(set(gg), set(itertools.product(range(2), range(3))))
118 |
119 |
120 | def test_nxwrap():
121 | suite = unittest.TestSuite()
122 | suite.addTest(TestNxwrap("test_monoplex_basics_writing_pymnet"))
123 | suite.addTest(TestNxwrap("test_monoplex_basics_writing_nx"))
124 | suite.addTest(TestNxwrap("test_monoplex_load_karate"))
125 | suite.addTest(TestNxwrap("test_monoplex_tuples"))
126 | suite.addTest(TestNxwrap("test_grid_graph"))
127 | suite.addTest(TestNxwrap("test_autowrapping"))
128 | suite.addTest(TestNxwrap("test_mst"))
129 |
130 | return unittest.TextTestRunner().run(suite).wasSuccessful()
131 |
132 |
133 | if __name__ == "__main__":
134 | sys.exit(not test_nxwrap())
135 |
--------------------------------------------------------------------------------
/pymnet/tests/tutorial_test.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import sys
3 | import unittest
4 |
5 | import nbformat
6 |
7 |
8 | class TestTutorials(unittest.TestCase):
9 | def setUp(self):
10 | self.networktypes = "../../doc/tutorials/networktypes"
11 |
12 | def test_networktypes(self):
13 | subprocess.call(
14 | f"jupyter nbconvert --to notebook --execute {self.networktypes}.ipynb",
15 | shell=True,
16 | )
17 | nb = nbformat.read(f"{self.networktypes}.nbconvert.ipynb", as_version=4)
18 | nb_original = nbformat.read(f"{self.networktypes}.ipynb", as_version=4)
19 | out = [c["outputs"] for c in nb.cells if c["cell_type"] == "code"]
20 | out_original = [
21 | c["outputs"] for c in nb_original.cells if c["cell_type"] == "code"
22 | ]
23 | self.assertListEqual(out, out_original)
24 |
25 | def tearDown(self):
26 | subprocess.call(
27 | f"rm ../../doc/tutorials/{self.networktypes}.nbconvert.ipynb", shell=True
28 | )
29 |
30 |
31 | def test_tutorials():
32 | suite = unittest.TestSuite()
33 | suite.addTest(TestTutorials("test_networktypes"))
34 |
35 |
36 | if __name__ == "__main__":
37 | sys.exit(not test_tutorials())
38 |
--------------------------------------------------------------------------------
/pymnet/tests/visuals_test.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import unittest
4 | from operator import itemgetter
5 |
6 | from pymnet import net, visuals
7 |
8 |
9 | class TestVisuals(unittest.TestCase):
10 | figdirname = "figs"
11 |
12 | def setUp(self):
13 | # create directory for figs if it doesn't exist
14 | self.figdirpath = os.path.join(
15 | os.path.dirname(os.path.realpath(__file__)), self.figdirname
16 | )
17 | if not os.path.exists(self.figdirpath):
18 | os.mkdir(self.figdirpath)
19 |
20 | n = net.MultiplexNetwork([("categorical", 1.0)])
21 |
22 | n[1, 2, 1] = 1
23 | n[1, 3, 1] = 1
24 | n[2, 3, 1] = 1
25 |
26 | n[1, 2, 2] = 1
27 | n[1, 3, 2] = 1
28 | n[1, 4, 2] = 1
29 | n[3, 4, 2] = 1
30 |
31 | n[1, 2, 3] = 1
32 | n[1, 3, 3] = 1
33 | n[1, 4, 3] = 1
34 | n[2, 4, 3] = 1
35 |
36 | self.mplex_simple = n
37 |
38 | n = net.MultiplexNetwork([("categorical", 1.0)], fullyInterconnected=False)
39 |
40 | n[1, 2, 1] = 1
41 | n[1, 3, 1] = 1
42 | n[2, 3, 1] = 1
43 |
44 | n[1, 2, 2] = 1
45 | n[1, 3, 2] = 1
46 | n[1, 4, 2] = 1
47 | n[3, 4, 2] = 1
48 |
49 | n[1, 2, 3] = 1
50 | n[1, 3, 3] = 1
51 | n[1, 4, 3] = 1
52 | n[2, 4, 3] = 1
53 |
54 | self.mplex_nonaligned_simple = n
55 |
56 | # The 2-aspect example network for the review article
57 | n = net.MultilayerNetwork(aspects=2, fullyInterconnected=False)
58 | n[1, 2, "A", "A", "X", "X"] = 1
59 | n[2, 3, "A", "A", "Y", "Y"] = 1
60 | n[1, 3, "B", "B", "X", "X"] = 1
61 | n[1, 4, "B", "B", "X", "X"] = 1
62 | n[3, 4, "B", "B", "X", "X"] = 1
63 | n[1, 1, "A", "B", "X", "X"] = 1
64 | n[1, 4, "A", "B", "X", "X"] = 1
65 | n[1, 1, "B", "B", "X", "Y"] = 1
66 | n[3, 3, "A", "A", "X", "Y"] = 1
67 | n[3, 4, "A", "B", "X", "Y"] = 1
68 | self.mlayer_example_2d = n
69 |
70 | n = net.MultilayerNetwork(aspects=1, fullyInterconnected=False)
71 | n[1, 2, "A", "A"] = 1
72 | n[2, 3, "A", "A"] = 1
73 | n[1, 3, "B", "B"] = 1
74 | n[1, 4, "B", "B"] = 1
75 | n[3, 4, "B", "B"] = 1
76 | n[1, 1, "A", "B"] = 1
77 | n[1, 4, "A", "B"] = 1
78 | n[3, 4, "A", "B"] = 1
79 | self.mlayer_example_1d = n
80 |
81 | # Non-aligned network for testing multilayer coordinates
82 | n = net.MultilayerNetwork(aspects=1, fullyInterconnected=False)
83 | n.add_node(0, "a")
84 | n.add_node(1, "b")
85 | n.add_node(2, "b")
86 | n.add_node(3, "b")
87 | n.add_node(4, "b")
88 | n.add_node(5, "b")
89 | n[1, 2, "b", "b"] = 1
90 | n[2, 3, "b", "b"] = 1
91 | n[3, 4, "b", "b"] = 1
92 | n[4, 1, "b", "b"] = 1
93 |
94 | n[0, 5, "a", "b"] = 1
95 | n[1, 5, "b", "b"] = 1
96 | n[2, 5, "b", "b"] = 1
97 | n[3, 5, "b", "b"] = 1
98 | n[4, 5, "b", "b"] = 1
99 | self.mlayer_nonaligned_aligntest = n
100 |
101 | # Second non-aligned network for testing multilayer coordinates
102 | n = net.MultilayerNetwork(aspects=1, fullyInterconnected=False)
103 | n.add_node(0, "a")
104 | n.add_node(1, "b")
105 | n.add_node(2, "b")
106 | n.add_node(3, "b")
107 | n.add_node(4, "b")
108 |
109 | n[1, 2, "b", "b"] = 1
110 | n[2, 3, "b", "b"] = 1
111 | n[3, 4, "b", "b"] = 1
112 | n[4, 1, "b", "b"] = 1
113 |
114 | n[0, 0, "a", "b"] = 1
115 | n[1, 0, "b", "b"] = 1
116 | n[2, 0, "b", "b"] = 1
117 | n[3, 0, "b", "b"] = 1
118 | n[4, 0, "b", "b"] = 1
119 | self.mlayer_nonaligned_aligntest2 = n
120 |
121 | n = net.MultilayerNetwork(aspects=0, fullyInterconnected=True)
122 | n[1, 2] = 1
123 | n[2, 3] = 1
124 | n[1, 3] = 1
125 | n[1, 4] = 2
126 | n[3, 4] = 2
127 | self.mlayer_example_monoplex = n
128 |
129 | def test_draw_mplex_simple_defaults(self):
130 | fig = visuals.draw(self.mplex_simple)
131 | fig.savefig(os.path.join(self.figdirpath, "mplex_simple_defaults.png"))
132 |
133 | def test_draw_mplex_nonaligned_simple_defaults(self):
134 | fig = visuals.draw(self.mplex_nonaligned_simple)
135 | fig.savefig(
136 | os.path.join(self.figdirpath, "mplex_nonaligned_simple_defaults.png")
137 | )
138 |
139 | def test_draw_mlayer_example_1d_defaults(self):
140 | fig = visuals.draw(self.mlayer_example_1d)
141 | fig.savefig(os.path.join(self.figdirpath, "mlayer_example_1d_defaults.png"))
142 |
143 | def test_draw_mplex_simple_layer_labels(self):
144 | fig = visuals.draw(
145 | self.mplex_simple,
146 | layerLabelColorDict={1: "blue", 2: "green"},
147 | layerLabelSizeRule={"rule": "name", "scaleby": 10},
148 | layerLabelAlphaDict={3: 0.5},
149 | layerLabelStyleDict={2: "italic"},
150 | )
151 | fig.savefig(os.path.join(self.figdirpath, "mlayer_example_1d_layer_labels.png"))
152 |
153 | def test_draw_mlayer_nonaligned_mlayer_coords(self):
154 | nc = visuals.layouts.get_fruchterman_reingold_multilayer_layout(
155 | self.mlayer_nonaligned_aligntest
156 | )
157 | fig = visuals.draw(self.mlayer_nonaligned_aligntest, nodeCoords=nc)
158 | fig.savefig(
159 | os.path.join(self.figdirpath, "mlayer_nonaligned_mlayer_coords.png")
160 | )
161 |
162 | nc2 = visuals.layouts.get_fruchterman_reingold_multilayer_layout(
163 | self.mlayer_nonaligned_aligntest2
164 | )
165 | fig2 = visuals.draw(self.mlayer_nonaligned_aligntest2, nodeCoords=nc2)
166 | fig2.savefig(
167 | os.path.join(self.figdirpath, "mlayer_nonaligned_mlayer_coords2.png")
168 | )
169 |
170 | nc3 = visuals.layouts.get_fruchterman_reingold_multilayer_layout(
171 | self.mlayer_nonaligned_aligntest2, alignedNodes=False
172 | )
173 | fig3 = visuals.draw(self.mlayer_nonaligned_aligntest2, nodelayerCoords=nc3)
174 | fig3.savefig(
175 | os.path.join(self.figdirpath, "mlayer_nonaligned_mlayer_coords3.png")
176 | )
177 |
178 | def test_multiaxis(self):
179 | from matplotlib import pyplot as plt
180 |
181 | fig = plt.figure()
182 | ax1 = fig.add_subplot(121, projection="3d")
183 | ax2 = fig.add_subplot(122, projection="3d")
184 |
185 | nc = visuals.layouts.get_fruchterman_reingold_multilayer_layout(
186 | self.mlayer_nonaligned_aligntest
187 | )
188 | visuals.draw(self.mlayer_nonaligned_aligntest, nodeCoords=nc, ax=ax1)
189 |
190 | nc2 = visuals.layouts.get_fruchterman_reingold_multilayer_layout(
191 | self.mlayer_nonaligned_aligntest2
192 | )
193 | visuals.draw(self.mlayer_nonaligned_aligntest2, nodeCoords=nc2, ax=ax2)
194 |
195 | fig.savefig(os.path.join(self.figdirpath, "multiaxis_mlayer.png"))
196 |
197 | def test_draw_assigners_advanced1(self):
198 | fig = visuals.draw(
199 | self.mplex_simple,
200 | edgeWidthRule={
201 | "rule": "edgeweight",
202 | "scaleby": "layer",
203 | 1: 1.0,
204 | 2: 0.5,
205 | 3: 2.0,
206 | "interlayer": 3,
207 | },
208 | nodeColorRule={
209 | "rule": "layer",
210 | "mapping": True,
211 | 1: "red",
212 | 2: "blue",
213 | 3: "green",
214 | },
215 | )
216 | fig.savefig(
217 | os.path.join(self.figdirpath, "mlayer_example_1d_assigners_advanced1.png")
218 | )
219 |
220 | def test_mplex_networkx_layouts(self):
221 | from pymnet import nx
222 |
223 | g = nx.karate_club_graph()
224 | mplex = net.MultiplexNetwork()
225 | mplex.add_layer("karate-1")
226 | mplex.add_layer("karate-2")
227 | mplex.A["karate-1"] = g
228 | mplex.A["karate-2"] = g
229 | fig = visuals.draw(mplex, layout="spring")
230 | fig.savefig(os.path.join(self.figdirpath, "mplex_networkx_spring.png"))
231 |
232 | def test_mplex_fr_layout(self):
233 | from pymnet import models
234 |
235 | mplex = models.er(10, 2 * [0.2])
236 | fig = visuals.draw(mplex, layout="fr")
237 | fig.savefig(os.path.join(self.figdirpath, "mplex_er100_fr.png"))
238 |
239 |
240 | def test_visuals():
241 | suite = unittest.TestSuite()
242 | suite.addTest(TestVisuals("test_draw_mplex_simple_defaults"))
243 | suite.addTest(TestVisuals("test_draw_mplex_nonaligned_simple_defaults"))
244 | suite.addTest(TestVisuals("test_draw_mlayer_example_1d_defaults"))
245 | suite.addTest(TestVisuals("test_draw_mplex_simple_layer_labels"))
246 | suite.addTest(TestVisuals("test_draw_mlayer_nonaligned_mlayer_coords"))
247 | suite.addTest(TestVisuals("test_draw_assigners_advanced1"))
248 | suite.addTest(TestVisuals("test_multiaxis"))
249 | suite.addTest(TestVisuals("test_mplex_networkx_layouts"))
250 | suite.addTest(TestVisuals("test_mplex_fr_layout"))
251 | return unittest.TextTestRunner().run(suite).wasSuccessful()
252 |
253 |
254 | if __name__ == "__main__":
255 | sys.exit(not test_visuals())
256 |
--------------------------------------------------------------------------------
/pymnet/visuals/__init__.py:
--------------------------------------------------------------------------------
1 | """Package for visualizing multilayer networks.
2 | """
3 |
4 | from .drawcore import draw
5 | from .webplots import webplot
6 |
--------------------------------------------------------------------------------
/pymnet/visuals/drawassigners.py:
--------------------------------------------------------------------------------
1 | """Property assigners that give a friendly user interface for setting
2 | properties of various elements of the networks.
3 | """
4 |
5 | import math
6 |
7 | # Some assigner features use matplotlib. These features are only available if
8 | # matplotlib can be loaded
9 | try:
10 | import matplotlib
11 |
12 | matplotlib_loaded = True
13 | except ImportError:
14 | matplotlib_loaded = False
15 |
16 |
17 | class PropertyAssigner(object):
18 | rules = set(["order", "name", "f"])
19 |
20 | def __init__(self, propDict, propRule, defaultProp, net):
21 | self.propDict = propDict
22 | self.propRule = propRule
23 | self.defaultProp = defaultProp
24 | self.net = net
25 |
26 | def _get_from_property_dict(self, item):
27 | if item in self.propDict:
28 | return self.propDict[item]
29 | else:
30 | return None
31 |
32 | def __getitem__(self, item):
33 | pdictval = self._get_from_property_dict(item)
34 | if pdictval is not None:
35 | return pdictval
36 | elif len(self.propRule) > 0:
37 | assert (
38 | "rule" in self.propRule
39 | ), "The rule dictionary must contain 'rule' key"
40 | if self.propRule["rule"] in self.rules:
41 | return self.apply_modify_rules(
42 | self.get_by_rule(item, self.propRule["rule"]), item
43 | )
44 | else:
45 | raise Exception("Unknown rule: " + str(self.propRule["rule"]))
46 | else:
47 | return self.defaultProp
48 |
49 | def get_by_rule(self, item, rule):
50 | if rule == "order":
51 | assert "sequence" in self.propRule
52 | if hasattr(self, "i"):
53 | self.i += 1
54 | else:
55 | self.i = 0
56 | return self.propRule["sequence"][self.i % len(self.propRule["sequence"])]
57 | elif rule == "name":
58 | return item
59 |
60 | def apply_modify_rules(self, item, origitem):
61 | if "f" in self.propRule and self.propRule["rule"] != "f":
62 | item = self.propRule["f"](item)
63 | if "mapping" in self.propRule and self.propRule["mapping"]:
64 | item = self.propRule[item]
65 | if "scaleby" in self.propRule:
66 | if self.propRule["scaleby"] in self.rules:
67 | item = (
68 | item
69 | * self.propRule[
70 | self.get_by_rule(origitem, self.propRule["scaleby"])
71 | ]
72 | )
73 | else:
74 | item = item * self.propRule["scaleby"]
75 | if "colormap" in self.propRule:
76 | if matplotlib_loaded:
77 | item = matplotlib.cm.get_cmap(self.propRule["colormap"])(item)
78 | else:
79 | raise ImportError(
80 | "The colormap feature uses matplotlib, and matplotlib "
81 | "cannot be imported."
82 | )
83 | return item
84 |
85 |
86 | class LayerPropertyAssigner(PropertyAssigner):
87 | pass
88 |
89 |
90 | class LayerColorAssigner(LayerPropertyAssigner):
91 | pass
92 |
93 |
94 | class LayerAlphaAssigner(LayerPropertyAssigner):
95 | pass
96 |
97 |
98 | class LayerLabelAssigner(LayerPropertyAssigner):
99 | pass
100 |
101 |
102 | class LayerLabelLocAssigner(LayerPropertyAssigner):
103 | pass
104 |
105 |
106 | class LayerOrderAssigner(LayerPropertyAssigner):
107 | pass
108 |
109 |
110 | class LayerLabelSizeAssigner(LayerPropertyAssigner):
111 | pass
112 |
113 |
114 | class LayerLabelColorAssigner(LayerPropertyAssigner):
115 | pass
116 |
117 |
118 | class LayerLabelStyleAssigner(LayerPropertyAssigner):
119 | pass
120 |
121 |
122 | class LayerLabelAlphaAssigner(LayerPropertyAssigner):
123 | pass
124 |
125 |
126 | class NodePropertyAssigner(PropertyAssigner):
127 | rules = PropertyAssigner.rules.union(set(["degree", "layer"]))
128 |
129 | def get_by_rule(self, item, rule):
130 | if rule == "degree":
131 | return self.net[item].deg()
132 | elif rule == "layer":
133 | return item[1] # assuming a single aspect here
134 | return super(NodePropertyAssigner, self).get_by_rule(item, rule)
135 |
136 |
137 | class NodeLabelSizeAssigner(NodePropertyAssigner):
138 | pass
139 |
140 |
141 | class NodeLabelColorAssigner(NodePropertyAssigner):
142 | pass
143 |
144 |
145 | class NodeLabelStyleAssigner(NodePropertyAssigner):
146 | pass
147 |
148 |
149 | class NodeLabelAlphaAssigner(NodePropertyAssigner):
150 | pass
151 |
152 |
153 | class NodeLabelAssigner(NodePropertyAssigner):
154 | rules = NodePropertyAssigner.rules.union(set(["nodename"]))
155 |
156 | def get_by_rule(self, item, rule):
157 | if rule == "nodename":
158 | return item[0]
159 | return super(NodeLabelAssigner, self).get_by_rule(item, rule)
160 |
161 |
162 | class NodeColorAssigner(NodePropertyAssigner):
163 | rules = NodePropertyAssigner.rules
164 |
165 |
166 | class NodeSizeAssigner(NodePropertyAssigner):
167 | rules = NodePropertyAssigner.rules.union(set(["scaled"])) - set(["name"])
168 |
169 | def get_by_rule(self, item, rule):
170 | if rule == "scaled":
171 | coeff = (
172 | self.propRule["scalecoeff"] if "scalecoeff" in self.propRule else 1.0
173 | )
174 | n = len(self.net)
175 | return coeff / float(math.sqrt(n))
176 | return super(NodeSizeAssigner, self).get_by_rule(item, rule)
177 |
178 | def apply_modify_rules(self, item, origitem):
179 | if "propscale" in self.propRule:
180 | coeff = self.propRule["propscale"]
181 | n = len(self.net)
182 | item = item * coeff / float(math.sqrt(n))
183 | return super(NodeSizeAssigner, self).apply_modify_rules(item, origitem)
184 |
185 |
186 | # nodes todo: marker
187 |
188 |
189 | class EdgePropertyAssigner(PropertyAssigner):
190 | rules = PropertyAssigner.rules.union(
191 | set(["edgetype", "edgeweight", "sourcedestweight", "layer"])
192 | )
193 |
194 | def _get_from_property_dict(self, item):
195 | """Return the edge property from the property dict given by the user.
196 |
197 | For directed networks this is same as the parent classes method. For
198 | undirected networks both directions for edges are accepted.
199 | """
200 | if self.net.directed:
201 | return super(EdgePropertyAssigner, self)._get_from_property_dict(item)
202 | else:
203 | if item in self.propDict:
204 | return self.propDict[item]
205 | else:
206 | try:
207 | item = (item[1], item[0])
208 | if item in self.propDict:
209 | return self.propDict[item]
210 | except Exception:
211 | return None
212 | return None
213 |
214 | def get_by_rule(self, item, rule):
215 | if rule == "edgetype":
216 | if "intra" in self.propRule and item[0][1] == item[1][1]:
217 | return self.propRule["intra"]
218 | elif "inter" in self.propRule and item[0][1] != item[1][1]:
219 | return self.propRule["inter"]
220 | elif rule == "edgeweight":
221 | return self.net[item[0]][item[1]]
222 | elif rule == "layer":
223 | if item[0][1] == item[1][1]:
224 | return item[0][1]
225 | else:
226 | return "interlayer"
227 | elif rule == "sourcedestweight":
228 | return item[0], item[1], self.net[item[0]][item[1]]
229 | return super(EdgePropertyAssigner, self).get_by_rule(item, rule)
230 |
231 |
232 | class EdgeWidthAssigner(EdgePropertyAssigner):
233 | pass
234 |
235 |
236 | class EdgeColorAssigner(EdgePropertyAssigner):
237 | pass
238 |
239 |
240 | class EdgeStyleAssigner(EdgePropertyAssigner):
241 | pass
242 |
243 |
244 | class EdgeAlphaAssigner(EdgePropertyAssigner):
245 | pass
246 |
247 |
248 | class EdgeZAssigner(EdgePropertyAssigner):
249 | pass
250 |
--------------------------------------------------------------------------------
/pymnet/visuals/drawbackends/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mnets/pymnet/0a54d5dc8b0d6281c940043a5dcaed1df59a23e2/pymnet/visuals/drawbackends/__init__.py
--------------------------------------------------------------------------------
/pymnet/visuals/drawbackends/mpl.py:
--------------------------------------------------------------------------------
1 | """Matplotlib backend for the draw method.
2 | """
3 |
4 | # Checking if we can display graphics.
5 | import os
6 |
7 | import matplotlib
8 | import matplotlib.pyplot as plt
9 | from matplotlib.patches import Circle, PathPatch, Rectangle
10 | from mpl_toolkits.mplot3d import Axes3D, art3d
11 |
12 | from .. import drawnet
13 |
14 | # The previous backend-detection code caused problems on windows.
15 | # This fragment now tries to set an interactive backend where available and otherwise defaults to Agg,
16 | # which should mimic the previously intended behavior.
17 | # has_display = bool(os.environ.get("DISPLAY", ""))
18 | # if not has_display:
19 | # current_backend = matplotlib.get_backend()
20 | # interactive_backends = matplotlib.rcsetup.interactive_bk
21 | # if interactive_backends and current_backend not in interactive_backends:
22 | # for ib in interactive_backends:
23 | # try:
24 | # matplotlib.use(ib, force=True)
25 | # break
26 | # except (ImportError, RuntimeError):
27 | # matplotlib.use("Agg")
28 | # continue
29 | # else:
30 | # matplotlib.use("Agg")
31 |
32 |
33 | defaultLayerColors = ["red", "green", "blue"]
34 |
35 |
36 | def fix_attr(obj, attr, val):
37 | obj.__setattr__(attr, val) # Just in case there is side effects
38 | newclass = type(type(obj).__name__, (type(obj),), {})
39 | setattr(newclass, attr, property(lambda s: val, lambda s, x: None))
40 | obj.__class__ = newclass
41 |
42 |
43 | def fix_attr_range(obj, attr, ran):
44 | assert ran[0] <= obj.__getattribute__(attr) <= ran[1]
45 | obj.__setattr__("_" + attr, obj.__getattribute__(attr))
46 | oldclass = type(obj)
47 | newclass = type(oldclass.__name__, (oldclass,), {})
48 |
49 | def setter(s, val):
50 | if val < ran[0]:
51 | val = ran[0]
52 | elif val > ran[1]:
53 | val = ran[1]
54 | obj.__setattr__("_" + attr, val)
55 |
56 | def getter(s):
57 | return obj.__getattribute__("_" + attr)
58 |
59 | setattr(newclass, attr, property(getter, setter))
60 | obj.__class__ = newclass
61 |
62 |
63 | class NetFigureMPL(drawnet.NetFigure):
64 | def draw(self, **kwargs):
65 | ax = kwargs["ax"] if "ax" in kwargs else None
66 |
67 | self.normalize_coords()
68 |
69 | if ax is None:
70 | self.fig = plt.figure(figsize=self.figsize)
71 | self.ax = self.fig.add_subplot(projection="3d")
72 | else:
73 | assert isinstance(ax, Axes3D), (
74 | "The axes need to have 3D projection. Use, for example, "
75 | "fig.add_subplot(111, projection='3d')"
76 | )
77 | self.ax = ax
78 | self.fig = self.ax.get_figure()
79 |
80 | self.draw_elements()
81 |
82 | self.ax.set_xlim3d(0, 1)
83 | self.ax.set_ylim3d(0, 1)
84 | self.ax.set_zlim3d(0, 2)
85 | self.ax.set_axis_off()
86 |
87 | fix_attr_range(self.ax, "elev", [0, 179])
88 |
89 | self.ax.azim = self.azim
90 | self.ax.elev = self.elev
91 | if self.camera_dist is not None:
92 | self.ax.dist = self.camera_dist
93 | if self.autoscale and len(self.layers) * self.layergap > 3:
94 | self.ax.autoscale_view()
95 | if self.show:
96 | plt.show()
97 |
98 | return self.fig
99 |
100 |
101 | class NodeMPL(drawnet.Node):
102 | def draw(self):
103 | self.circle = Circle((self.x, self.y), self.size / 2.0, color=self.color)
104 | self.net.ax.add_patch(self.circle)
105 | art3d.pathpatch_2d_to_3d(self.circle, z=self.layer.z, zdir="z")
106 | fix_attr(self.circle, "zorder", self.layer.z + self.net.eps)
107 |
108 | if self.label is not None:
109 | self.labelObject = self.net.ax.text(
110 | self.x + self.size / 2.0,
111 | self.y + self.size / 2.0,
112 | self.layer.z + self.net.eps,
113 | str(self.label),
114 | **self.labelArgs,
115 | )
116 | fix_attr(self.labelObject, "zorder", self.layer.z + 2 * self.net.eps)
117 |
118 |
119 | class LayerMPL(drawnet.Layer):
120 | def draw(self):
121 | assert self.z is not None
122 | if self.shape == "rectangle":
123 | self.layer = Rectangle((0, 0), 1, 1, alpha=self.alpha, color=self.color)
124 | if self.label is not None:
125 | self.labelObject = self.net.ax.text(
126 | self.labelloc[0],
127 | self.labelloc[1],
128 | self.z,
129 | str(self.label),
130 | **self.labelArgs,
131 | )
132 | elif self.shape == "circle":
133 | self.layer = Circle((0.5, 0.5), 0.5, alpha=self.alpha, color=self.color)
134 | if self.label is not None:
135 | self.labelObject = self.net.ax.text(
136 | self.labelloc[0],
137 | self.labelloc[1],
138 | self.z,
139 | str(self.label),
140 | **self.labelArgs,
141 | )
142 | self.net.ax.add_patch(self.layer)
143 | art3d.pathpatch_2d_to_3d(self.layer, z=self.z, zdir="z")
144 | fix_attr(self.layer, "zorder", self.z)
145 |
146 |
147 | class EdgeMPL(drawnet.Edge):
148 | def draw(self):
149 | self.lines = []
150 | # find layers this edge is crossing
151 | if abs(self.node1.layer.z - self.node2.layer.z) > self.net.layergap:
152 | n = (
153 | int(
154 | round(
155 | abs(self.node1.layer.z - self.node2.layer.z)
156 | / float(self.net.layergap)
157 | )
158 | )
159 | + 1
160 | )
161 | import numpy
162 |
163 | xs = numpy.linspace(self.node1.x, self.node2.x, n)
164 | ys = numpy.linspace(self.node1.y, self.node2.y, n)
165 | zs = numpy.linspace(self.node1.layer.z, self.node2.layer.z, n)
166 | zorders = []
167 | for i in range(len(zs) - 1):
168 | zorders = (zs[i] + zs[i + 1]) / 2.0
169 | else:
170 | xs = [self.node1.x, self.node2.x]
171 | ys = [self.node1.y, self.node2.y]
172 | zs = [self.node1.layer.z, self.node2.layer.z]
173 | for i in range(len(zs) - 1):
174 | z = (zs[i] + zs[i + 1]) / 2.0 + self.z * self.net.eps
175 | line = self.net.ax.plot(
176 | xs[i : i + 2],
177 | ys[i : i + 2],
178 | zs=zs[i : i + 2],
179 | linestyle=self.style,
180 | zdir="z",
181 | color=self.color,
182 | linewidth=self.width,
183 | alpha=self.alpha,
184 | )[0]
185 | fix_attr(line, "zorder", z)
186 | self.lines.append(line)
187 |
--------------------------------------------------------------------------------
/pymnet/visuals/drawbackends/threejs.py:
--------------------------------------------------------------------------------
1 | """Backend for multilayer network draw method using three.js.
2 |
3 | This is still experimental and is missing many features.
4 | """
5 |
6 | import os
7 |
8 | from .. import drawbackends, drawnet
9 |
10 | TEMPLATE_FILE = os.path.join(
11 | os.path.dirname(drawbackends.__file__), "threejs_template.html"
12 | )
13 | SIZE = 100
14 |
15 |
16 | class NetFigureThreeJS(drawnet.NetFigure):
17 | def draw(self, **kwargs):
18 | self.normalize_coords()
19 |
20 | template_file = open(TEMPLATE_FILE, "r")
21 | self.template = template_file.read()
22 | template_file.close()
23 |
24 | self.node_snippets = []
25 | self.edge_snippets = []
26 | self.layer_snippets = []
27 |
28 | self.draw_elements()
29 |
30 | self.template = self.template.replace("@nodes", "".join(self.node_snippets))
31 | self.template = self.template.replace("@edges", "".join(self.edge_snippets))
32 | self.template = self.template.replace("@layers", "".join(self.layer_snippets))
33 |
34 | return self.template
35 |
36 |
37 | class NodeThreeJS(drawnet.Node):
38 | def draw(self):
39 | snippet = """
40 | var node= getNode(@x,@y,@z,@r);
41 | scene.add(node);
42 |
43 | """
44 | snippet = snippet.replace("@x", str(SIZE * self.x))
45 | snippet = snippet.replace("@y", str(10 * SIZE * self.y))
46 | snippet = snippet.replace("@z", str(10 * SIZE * self.layer.z))
47 | snippet = snippet.replace("@r", str(0.1 * self.size / 2.0))
48 |
49 | self.net.node_snippets.append(snippet)
50 |
51 |
52 | class EdgeThreeJS(drawnet.Edge):
53 | def draw(self):
54 | snippet = """
55 | var link= getLink(@x1,@y1,@z1,@x2,@y2,@z2,@r);
56 | scene.add(link);
57 |
58 | """
59 | snippet = snippet.replace("@x1", str(SIZE * self.node1.x))
60 | snippet = snippet.replace("@y1", str(SIZE * self.node1.y))
61 | snippet = snippet.replace("@z1", str(SIZE * self.node1.layer.z))
62 |
63 | snippet = snippet.replace("@x2", str(SIZE * self.node2.x))
64 | snippet = snippet.replace("@y2", str(SIZE * self.node2.y))
65 | snippet = snippet.replace("@z2", str(SIZE * self.node2.layer.z))
66 |
67 | snippet = snippet.replace("@r", str(0.01))
68 |
69 | self.net.edge_snippets.append(snippet)
70 |
71 |
72 | class LayerThreeJS(drawnet.Layer):
73 | def draw(self):
74 | pass
75 |
--------------------------------------------------------------------------------
/pymnet/visuals/drawbackends/threejs_template.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Multilayer network visualization
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
138 |
139 |
140 |
141 |
--------------------------------------------------------------------------------
/pymnet/visuals/drawnet.py:
--------------------------------------------------------------------------------
1 | """Classes that are used by the represent multilayer networks and drawing
2 | backends to visualize them.
3 | """
4 |
5 |
6 | class NetFigure(object):
7 | def __init__(
8 | self,
9 | figsize=None,
10 | layergap=1,
11 | eps=0.001,
12 | padding=0.05,
13 | azim=-51,
14 | elev=22,
15 | show=False,
16 | camera_dist=None,
17 | autoscale=True,
18 | ):
19 | self.nodes = []
20 | self.layers = []
21 | self.edges = []
22 |
23 | self.padding = padding
24 | self.eps = eps
25 | self.layergap = layergap
26 | self.figsize = figsize
27 | self.azim = azim
28 | self.elev = elev
29 | self.show = show
30 | self.camera_dist = camera_dist
31 | self.autoscale = autoscale
32 |
33 | def normalize_coords(self):
34 | maxx, maxy, minx, miny = (
35 | float("-inf"),
36 | float("-inf"),
37 | float("inf"),
38 | float("inf"),
39 | )
40 | for node in self.nodes:
41 | if maxx < node.x + node.size / 2.0:
42 | maxx = node.x + node.size / 2.0
43 | if maxy < node.y + node.size / 2.0:
44 | maxy = node.y + node.size / 2.0
45 | if minx > node.x - node.size / 2.0:
46 | minx = node.x - node.size / 2.0
47 | if miny > node.y - node.size / 2.0:
48 | miny = node.y - node.size / 2.0
49 |
50 | def xtrans(x):
51 | return (x - minx + self.padding) / float(maxx - minx + 2 * self.padding)
52 |
53 | def ytrans(y):
54 | return (y - miny + self.padding) / float(maxy - miny + 2 * self.padding)
55 |
56 | for node in self.nodes:
57 | node.x = xtrans(node.x)
58 | node.y = ytrans(node.y)
59 |
60 | def draw_elements(self):
61 | for i, layer in enumerate(self.layers):
62 | layer.z = i * self.layergap
63 | if layer.alpha != 0:
64 | layer.draw()
65 |
66 | for node in self.nodes:
67 | if node.size > 0:
68 | node.draw()
69 |
70 | for edge in self.edges:
71 | edge.draw()
72 |
73 | def draw(self, **kwargs):
74 | # Override this method
75 | raise NotImplementedError()
76 |
77 | def register_layer(self, layer):
78 | self.layers.insert(0, layer) # First to top
79 |
80 | def register_node(self, node):
81 | self.nodes.append(node)
82 |
83 | def register_edge(self, edge):
84 | self.edges.append(edge)
85 |
86 |
87 | class Node(object):
88 | def __init__(self, layer, x, y, label=None, size=0.04, color="black", labelArgs={}):
89 | self.x, self.y, self.size, self.color, self.label = x, y, size, color, label
90 | self.layer = layer
91 | self.net = layer.net
92 | self.label = label
93 | self.labelArgs = labelArgs
94 |
95 | self.net.register_node(self)
96 |
97 | def draw(self):
98 | # Override this method
99 | raise NotImplementedError()
100 |
101 |
102 | class Layer(object):
103 | def __init__(
104 | self,
105 | net,
106 | color="gray",
107 | alpha=0.3,
108 | shape="rectangle",
109 | label=None,
110 | labelloc=(1, 1),
111 | labelArgs={},
112 | ):
113 | assert shape in ["rectangle", "circle"]
114 | self.shape = shape
115 | self.color = color
116 | self.alpha = alpha
117 | self.label = label
118 | self.labelloc = labelloc
119 | self.labelArgs = labelArgs
120 | self.z = None
121 | self.net = net
122 | self.net.register_layer(self)
123 |
124 | def draw(self):
125 | # Override this method
126 | raise NotImplementedError()
127 |
128 |
129 | class Edge(object):
130 | def __init__(
131 | self,
132 | node1,
133 | node2,
134 | color="gray",
135 | width=1.0,
136 | directed=False,
137 | style="-",
138 | z=0,
139 | alpha=1,
140 | ):
141 | self.node1 = node1
142 | self.node2 = node2
143 | self.net = node1.net
144 | assert 0 <= z <= 1
145 | self.z = z
146 | assert 0 <= alpha <= 1
147 | self.alpha = alpha
148 |
149 | self.net.register_edge(self)
150 |
151 | self.color, self.width, self.directed, self.style = (
152 | color,
153 | width,
154 | directed,
155 | style,
156 | )
157 |
--------------------------------------------------------------------------------
/pymnet/visuals/layouts.py:
--------------------------------------------------------------------------------
1 | """Module for creating network layouts.
2 | """
3 |
4 | import math
5 | import random
6 |
7 | import pymnet
8 |
9 |
10 | def get_layout(layout, net, alignedNodes=True, **kwargs):
11 | """Function for calculating a layout for a network. For parameter values see documentation
12 | of the draw function.
13 |
14 | Returns
15 | -------
16 | nodeCoords, nodelayerCoords : dict, dict
17 | Node coordinates and node-layer coordinates that are generated. These can be given to the
18 | draw function as parameters.
19 | """
20 | if alignedNodes == None:
21 | if isinstance(net, pymnet.net.MultiplexNetwork):
22 | alignedNodes = True
23 | elif isinstance(net, pymnet.net.MultilayerNetwork):
24 | alignedNodes = False
25 | else:
26 | raise ValueError(
27 | "The argument net must be a MultilayerNetwork or Multiplex network."
28 | )
29 |
30 | ncoords, nlcoords = {}, {}
31 | if alignedNodes:
32 | if layout in ["circular", "shell", "spring", "spectral"]: # nx layout
33 | if hasattr(pymnet, "nx"):
34 | la = getattr(pymnet.nx, layout + "_layout")
35 | na = pymnet.transforms.aggregate(net, 1)
36 | ncoords = la(na, **kwargs)
37 | else:
38 | raise Exception(
39 | "Networkx needs to be installed to use layout: " + layout
40 | )
41 | elif layout == "random":
42 | for node in net:
43 | ncoords[node] = (random.random(), random.random())
44 | elif layout == "fr":
45 | ncoords = get_fruchterman_reingold_multilayer_layout(net)
46 | else:
47 | raise Exception("Invalid layout: " + layout)
48 | else:
49 | if layout == "random":
50 | for nl in net.iter_node_layers():
51 | nlcoords[nl] = (random.random(), random.random())
52 | elif layout == "fr":
53 | nlcoords = get_fruchterman_reingold_multilayer_layout(net)
54 | else:
55 | raise Exception("Invalid layout: " + layout)
56 | return ncoords, nlcoords
57 |
58 |
59 | def normalize_coordinates(coords, boxSize, inplace=False):
60 | """Normalizes coordinates (linearly) such that coordinate min is zero and max is
61 | the one given by the boxSize.
62 |
63 | Parameters
64 | ----------
65 | coords : dict
66 | Dictionary of coordinates, where keys are nodes/node-layers and values are tuples
67 | in the format (x,y)
68 | boxSize : tuple of floats
69 | The size of the box where the coordinates are to be normalized
70 | """
71 | minx, miny, maxx, maxy = None, None, None, None
72 | # for node,(x,y) in coords.iteritems():
73 | for node in coords:
74 | x, y = coords[node]
75 | if minx == None or x < minx:
76 | minx = x
77 | if miny == None or y < miny:
78 | miny = y
79 | if maxx == None or x > maxx:
80 | maxx = x
81 | if maxy == None or y > maxy:
82 | maxy = y
83 |
84 | difx = maxx - minx
85 | dify = maxy - miny
86 | if inplace:
87 | newcoords = coords
88 | else:
89 | newcoords = {}
90 | # for node, (x, y) in coords.iteritems():
91 | for node in coords:
92 | x, y = coords[node]
93 | newcoords[node] = ((x - minx) / difx, (y - miny) / dify)
94 | return newcoords
95 |
96 |
97 | def get_fruchterman_reingold_multilayer_layout(
98 | net,
99 | nodeDist="auto",
100 | boxSize=1.0,
101 | alignedNodes=True,
102 | nodelayerCoords=None,
103 | nodeCoords=None,
104 | fixedNodes=None,
105 | fixedNodeLayers=None,
106 | iterations=100,
107 | ):
108 | """A multilayer version of the Fructherman-Reingold algorithm for network layouts.
109 |
110 | This is a mulitlayer variation of the standard FR algorithm, where the layout is
111 | produced by simulating springs between linked nodes and repulsive forces between all
112 | nodes. The main difference to the normal version of the algorithm is that the nodes
113 | which are on different layers do not repulse each other.
114 |
115 | Parameters
116 | ----------
117 | net : MultilayerNetwork
118 | The network for which the coordinates are calculated
119 | nodeDist : float, string
120 | The desired distance between pairs of nodes. If "auto", then inverse of the
121 | square root of the number of nodes is used.
122 | boxSize : float, tuple of floats
123 | The size of the bounding box for the coordinates. If float is given then a square
124 | box is used. Otherwise, provide a tuple with two floats.
125 | alignedNodes : bool
126 | Determines if the nodes-layer tuples with the same node should have the same
127 | coordinates.
128 | nodelayerCoords : dict, None
129 | Initial coordinates for node-layer tuples. If None, random coordinates are used.
130 | If alignedNodes is set to True these coordinates are not used.
131 | nodeCoords : dict, None
132 | Initial coordinates for nodes. If None, random coordinates are used. If a coordinate
133 | for node-layer tuple is defined then that is used instead of the node coordinate.
134 | fixedNodes : set, None
135 | The set of nodes that are not moved from the initial coordinates. If None, then
136 | all nodes are allowed to move. You can also use list or similar data structures, but
137 | set is recommended for speed when the number of elements is large.
138 | fixedNodeLayers : set, None
139 | The set of nodes-layers that are not moved from the initial coordinates. If None, then
140 | all node-layers are allowed to move. You can also use list or similar data structures, but
141 | set is recommended for speed when the number of elements is large.
142 | iterations : int
143 | The number of times the nodes/node-layer tuples are moved.
144 | """
145 |
146 | # Parsing parameters and sanity check for them
147 | # net
148 | assert isinstance(net, pymnet.MultilayerNetwork), "Invalid network type"
149 | assert net.aspects >= 1, "No monoplex networks"
150 |
151 | # If the network is fully interconnected, we just create network with one layer
152 | if net.fullyInterconnected:
153 | assert (
154 | nodelayerCoords == None
155 | ), "Only node coordinates for fully interconnected networks"
156 | magg = pymnet.MultiplexNetwork(fullyInterconnected=False)
157 | magg.add_layer("all")
158 | magg.A["all"] = pymnet.transforms.aggregate(net, 1)
159 | net = magg
160 |
161 | # nodeDist
162 | if nodeDist == "auto":
163 | nodeDist = 1.0 / math.sqrt(len(net.slices[0]))
164 | else:
165 | nodeDist = float(nodeDist)
166 | assert nodeDist > 0
167 |
168 | # boxSize
169 | if isinstance(boxSize, tuple) or isinstance(boxSize, list):
170 | assert len(boxSize) == 2
171 | else:
172 | boxSize = float(boxSize)
173 | boxSize = (boxSize, boxSize)
174 |
175 | # nodeCoords
176 | if nodeCoords == None:
177 | nodeCoords = {}
178 |
179 | # nodelayerCoords
180 | if nodelayerCoords == None:
181 | nodelayerCoords = {}
182 |
183 | if alignedNodes: # use node coordinates
184 | nc = {}
185 | for node in net:
186 | if node in nodeCoords:
187 | nc[node] = nodeCoords[node]
188 | else:
189 | nc[node] = (boxSize[0] * random.random(), boxSize[1] * random.random())
190 | else: # use node-layer tuple coordinates
191 | nlc = {}
192 | for nl in net.iter_node_layers():
193 | if nl in nodelayerCoords:
194 | nlc[nl] = nodelayerCoords[nl]
195 | elif nl[0] in nodeCoords:
196 | nlc[nl] = nodeCoords[nl[0]]
197 | else:
198 | nlc[nl] = (boxSize[0] * random.random(), boxSize[1] * random.random())
199 |
200 | if fixedNodes == None:
201 | fixedNodes = set()
202 | if fixedNodeLayers == None:
203 | fixedNodeLayers = set()
204 |
205 | # Parsing complete
206 |
207 | # Some internal parameters
208 | temperature = 0.1 * max(boxSize)
209 | delta_temperature = temperature / float(iterations)
210 | min_dist = 0.01
211 |
212 | for iteration in range(iterations):
213 | if alignedNodes: # we have coordinates for nodes
214 | delta_nc = dict(((k, (0.0, 0.0)) for k in nc))
215 | # Spring forces
216 | for edge in net.edges:
217 | node1 = edge[0]
218 | node2 = edge[1]
219 | if node1 != node2:
220 | diff = (nc[node1][0] - nc[node2][0], nc[node1][1] - nc[node2][1])
221 | dist = math.sqrt(diff[0] ** 2 + diff[1] ** 2)
222 | c = dist / float(nodeDist)
223 | delta_nc[node1] = (
224 | delta_nc[node1][0] - c * diff[0],
225 | delta_nc[node1][1] - c * diff[1],
226 | )
227 | delta_nc[node2] = (
228 | delta_nc[node2][0] + c * diff[0],
229 | delta_nc[node2][1] + c * diff[1],
230 | )
231 |
232 | # Repulsive forces
233 | for node1 in net:
234 | for node2 in net:
235 | if node1 != node2:
236 | layer_overlap = len(
237 | net._nodeToLayers[node1].intersection(
238 | net._nodeToLayers[node2]
239 | )
240 | )
241 | diff = (
242 | nc[node1][0] - nc[node2][0],
243 | nc[node1][1] - nc[node2][1],
244 | )
245 | dist = math.sqrt(diff[0] ** 2 + diff[1] ** 2)
246 | dist = max(dist, min_dist)
247 | c = layer_overlap * nodeDist**2 / float(dist**2)
248 | delta_nc[node1] = (
249 | delta_nc[node1][0] + c * diff[0],
250 | delta_nc[node1][1] + c * diff[1],
251 | )
252 | delta_nc[node2] = (
253 | delta_nc[node2][0] - c * diff[0],
254 | delta_nc[node2][1] - c * diff[1],
255 | )
256 |
257 | # Normalize coordinate, and apply them
258 | # for node,(x,y) in delta_nc.iteritems():
259 | for node in delta_nc:
260 | x, y = delta_nc[node]
261 | if node not in fixedNodes:
262 | delta_len = math.sqrt(x**2 + y**2)
263 | nc[node] = (
264 | nc[node][0] + temperature * delta_len * x,
265 | nc[node][1] + temperature * delta_len * y,
266 | )
267 | normalize_coordinates(nc, boxSize, inplace=True)
268 |
269 | else: # we have coordinates for node-layer tuples
270 |
271 | # There is currently a lot of code dublication here when compared to the
272 | # case where nodes are aligned. Some of this should be removed, and some
273 | # of it will probably disappear once the code is optimized a bit.
274 |
275 | delta_nlc = dict(((k, (0.0, 0.0)) for k in nlc))
276 | # Spring forces
277 | for edge in net.edges:
278 | nl1, nl2 = net._link_to_nodes(edge[:-1])
279 | diff = (nlc[nl1][0] - nlc[nl2][0], nlc[nl1][1] - nlc[nl2][1])
280 | dist = math.sqrt(diff[0] ** 2 + diff[1] ** 2)
281 | dist = max(dist, min_dist)
282 | c = dist / float(nodeDist)
283 | delta_nlc[nl1] = (
284 | delta_nlc[nl1][0] - c * diff[0],
285 | delta_nlc[nl1][1] - c * diff[1],
286 | )
287 | delta_nlc[nl2] = (
288 | delta_nlc[nl2][0] + c * diff[0],
289 | delta_nlc[nl2][1] + c * diff[1],
290 | )
291 |
292 | # Repulsive forces
293 | for nl1 in net.iter_node_layers():
294 | layer = nl1[1:][0] if net.aspects == 1 else nl1[1:]
295 | for node2 in net.iter_nodes(layer=layer):
296 | nl2 = (node2,) + nl1[1:]
297 | if nl1 != nl2:
298 | diff = (nlc[nl1][0] - nlc[nl2][0], nlc[nl1][1] - nlc[nl2][1])
299 | dist = math.sqrt(diff[0] ** 2 + diff[1] ** 2)
300 | dist = max(dist, min_dist)
301 | c = nodeDist**2 / float(dist**2)
302 | delta_nlc[nl1] = (
303 | delta_nlc[nl1][0] + c * diff[0],
304 | delta_nlc[nl1][1] + c * diff[1],
305 | )
306 | delta_nlc[nl2] = (
307 | delta_nlc[nl2][0] - c * diff[0],
308 | delta_nlc[nl2][1] - c * diff[1],
309 | )
310 |
311 | # Normalize coordinate, and apply them
312 | # for nl,(x,y) in delta_nlc.iteritems():
313 | for nl in delta_nlc:
314 | x, y = delta_nlc[nl]
315 | if nl not in fixedNodeLayers:
316 | delta_len = math.sqrt(x**2 + y**2)
317 | nlc[nl] = (
318 | nlc[nl][0] + temperature * delta_len * x,
319 | nlc[nl][1] + temperature * delta_len * y,
320 | )
321 | normalize_coordinates(nlc, boxSize, inplace=True)
322 |
323 | temperature -= delta_temperature
324 |
325 | if alignedNodes:
326 | return nc
327 | else:
328 | return nlc
329 |
--------------------------------------------------------------------------------
/pymnet/visuals/webplots.py:
--------------------------------------------------------------------------------
1 | """Module for creating plots of multiplex network for the web. This is completely separate functionality from the draw function.
2 | """
3 |
4 | import math
5 | import random
6 |
7 | import pymnet
8 | from pymnet.net import MultiplexNetwork
9 |
10 | from .. import netio
11 |
12 | webplot_template = """
13 |
14 |
95 | """
96 |
97 |
98 | def webplot(net, outputfile=None):
99 | """Create a 3D visualization of a multiplex network for web using D3.
100 |
101 | Creates a webpage that contains a visualization of the input multiplex
102 | network. The network must have only a single aspect.
103 |
104 | Parameters
105 | ----------
106 | net : MultiplexNetwork with aspects=1
107 | The input network.
108 | outputfile : None, string, or file object
109 | Returns the output as a string if outputfile is None. If outputfile
110 | is string, then uses it as a file name and tries to open it for
111 | writing. Finally, if outputfile is a file object then writes to that
112 | file.
113 |
114 | Returns
115 | -------
116 | String or None
117 | Returns the output as a string if outputfile is None.
118 | """
119 | assert isinstance(net, MultiplexNetwork)
120 | assert net.aspects == 1
121 |
122 | script = webplot_template
123 | netdatastr = netio.write_json(net)
124 |
125 | replace = {"@netjson": netdatastr}
126 | for key, val in replace.items():
127 | script = script.replace(key, val)
128 |
129 | if outputfile == None:
130 | return script
131 | else:
132 | if isinstance(outputfile, "".__class__) or isinstance(outputfile, "".__class__):
133 | outputfile = open(outputfile, "w")
134 |
135 | outputfile.write("")
136 | outputfile.write(script)
137 | outputfile.write("")
138 | outputfile.close()
139 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "pymnet"
3 | version = "1.0.0"
4 | description = "Multilayer network analysis library for Python"
5 | authors = [{name="Mikko Kivelä", email="mikko.kivela@iki.fi"}]
6 | requires-python = ">=3.8"
7 | license = {text = "GPLv3"}
8 | readme = "README.md"
9 | keywords = [
10 | "Complex Networks", "Networks", "network",
11 | "Graphs", "Graph Theory", "graph",
12 | "Multilayer Networks", "multilayer network"
13 | ]
14 |
15 | classifiers = [
16 | "Development Status :: 4 - Beta",
17 | "Intended Audience :: Education",
18 | "Intended Audience :: Science/Research",
19 | "License :: OSI Approved :: MIT License",
20 | "Operating System :: POSIX :: Linux",
21 | "Programming Language :: C++",
22 | "Programming Language :: Python :: 3",
23 | "Programming Language :: Python :: Implementation :: CPython",
24 | "Topic :: Scientific/Engineering",
25 | "Topic :: Scientific/Engineering :: Physics",
26 | "Topic :: Scientific/Engineering :: Mathematics",
27 | "Topic :: Scientific/Engineering :: Information Analysis"
28 | ]
29 |
30 | dependencies = [
31 | "matplotlib >= 3.7.0",
32 | "numpy >= 1.23.0",
33 | "scipy >= 1.10.0",
34 | "networkx >= 2.0, < 4.0",
35 | "bliss-bind >= 0.3.0 ; (sys_platform == 'darwin' or ( (platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine == 'AMD64' and sys_platform == 'win32') )) and python_version >= '3.8'"
36 | ]
37 |
38 | [project.urls]
39 | homepage = "https://mnets.github.io/pymnet/"
40 | documentation = "https://mnets.github.io/pymnet/"
41 | repository = "https://github.com/mnets/pymnet"
42 | bug-tracker = "https://github.com/mnets/pymnet/issues"
43 |
44 | [project.optional-dependencies]
45 | test = [
46 | "coverage >= 7.0"
47 | ]
48 | dev = [
49 | "black >= 24.4",
50 | "isort >= 5.13"
51 | ]
52 | tutorial = [
53 | "jupyterlab >= 4.2",
54 | "pandas"
55 | ]
56 |
57 | [build-system]
58 | requires = [
59 | "setuptools>=67",
60 | "wheel"
61 | ]
62 | build-backend = "setuptools.build_meta"
63 |
64 | [tool.setuptools.packages.find]
65 | exclude = ["wheelhouse"]
66 |
67 | [tool.black]
68 | line-length = 88
69 | target-version = ['py39']
70 |
71 | [tool.isort]
72 | profile = "black"
73 |
--------------------------------------------------------------------------------
/socialnet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mnets/pymnet/0a54d5dc8b0d6281c940043a5dcaed1df59a23e2/socialnet.png
--------------------------------------------------------------------------------