├── .circleci
└── config.yml
├── .github
└── workflows
│ └── wheels.yml
├── .gitignore
├── Dockerfile
├── LICENSE.txt
├── MANIFEST.in
├── README.md
├── build-docker.sh
├── coverage.rc
├── dev_notes.md
├── docs
├── .gitignore
├── Makefile
├── conf.py
├── guide
│ ├── .gitignore
│ ├── ascii_doc_ex.adoc
│ ├── distributing_agents.adoc
│ ├── images
│ │ ├── shared_grid_agents.png
│ │ ├── shared_grid_agents_800.png
│ │ └── shared_net_2.png
│ ├── motivation.adoc
│ ├── overview.adoc
│ ├── tutorial_network.adoc
│ ├── tutorial_random_walk.adoc
│ ├── tutorial_zombies.adoc
│ └── user_guide.adoc
├── index.rst
├── make.bat
├── source
│ ├── modules.rst
│ ├── repast4py.context.rst
│ ├── repast4py.core.rst
│ ├── repast4py.geometry.rst
│ ├── repast4py.logging.rst
│ ├── repast4py.network.rst
│ ├── repast4py.parameters.rst
│ ├── repast4py.random.rst
│ ├── repast4py.rst
│ ├── repast4py.schedule.rst
│ ├── repast4py.space.rst
│ ├── repast4py.util.rst
│ └── repast4py.value_layer.rst
└── web
│ ├── build.sh
│ ├── examples_to_include
│ ├── landing.adoc
│ └── macos_mpi_install.adoc
├── envs
├── bebop_config_repast4py.sh
└── bebop_env.sh
├── examples
├── .gitignore
├── diffusion
│ ├── diffusion.py
│ └── run_diffusion.py
├── examples.adoc
├── rndwalk
│ ├── random_walk.adoc
│ ├── random_walk.yaml
│ ├── random_walk_overview.adoc
│ └── rndwalk.py
├── rumor
│ ├── network.txt
│ ├── rumor.py
│ ├── rumor_model.adoc
│ ├── rumor_model.yaml
│ └── rumor_overview.adoc
└── zombies
│ ├── zombie_model.yaml
│ ├── zombies.adoc
│ ├── zombies.py
│ └── zombies_overview.adoc
├── paper_data
├── r4py_scaling_analysis.r
├── runtimes_144p_3000000h_6000z.csv
├── runtimes_288p_3000000h_6000z.csv
├── runtimes_36p_3000000h_6000z.csv
└── runtimes_72p_3000000h_6000z.csv
├── pyproject.toml
├── requirements.txt
├── run-docker.sh
├── scripts
└── zombies.sbatch
├── setup.cfg
├── setup.py
├── src
└── repast4py
│ ├── SpatialTree.cpp
│ ├── SpatialTree.h
│ ├── __init__.py
│ ├── borders.h
│ ├── context.py
│ ├── core.h
│ ├── core.py
│ ├── coremodule.cpp
│ ├── coremodule.h
│ ├── cspace.h
│ ├── distributed_space.cpp
│ ├── distributed_space.h
│ ├── geometry.cpp
│ ├── geometry.h
│ ├── geometry.py
│ ├── grid.h
│ ├── logging.py
│ ├── network.py
│ ├── occupancy.h
│ ├── parameters.py
│ ├── random.py
│ ├── schedule.py
│ ├── space.cpp
│ ├── space.h
│ ├── space.py
│ ├── space_types.h
│ ├── spacemodule.cpp
│ ├── types.h
│ ├── util.py
│ └── value_layer.py
├── swift_proj
├── data
│ ├── generate_upf.py
│ ├── upf_01.txt
│ ├── upf_strong_scaling.txt
│ ├── upf_weak_scaling_144.txt
│ ├── upf_weak_scaling_2304.txt
│ ├── upf_weak_scaling_36.txt
│ └── upf_weak_scaling_576.txt
├── etc
│ └── emews_utils.sh
└── swift
│ ├── bebop_run_zombies.sh
│ └── swift_run_zombies.swift
├── test_data
├── net_with_attribs.txt
├── net_with_no_attribs.txt
├── simple_net.txt
├── test_file
├── test_file.csv
└── test_params.yaml
├── test_models
├── README.md
├── move_test
│ ├── move.py
│ └── move.yaml
└── spatial_tree_test
│ ├── spatial_tree.yaml
│ └── spatial_tree_test.py
├── tests
├── Makefile
├── __init__.py
├── ctopo_tests.py
├── logging_tests.py
├── shared_network_tests.py
├── shared_obj_tests.py
├── shared_vl_tests.py
├── space_tests.cpp
├── test.sh
├── test_agents.py
├── test_main.cpp
├── test_schedule.py
├── test_space.py
├── test_value_layer.py
└── util_tests.py
└── tox.ini
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 |
3 | # Define the jobs we want to run for this project
4 | executors:
5 | build:
6 | docker:
7 | - image: dsheeler/repast4py
8 | auth:
9 | username: $DOCKERHUB_USER
10 | password: $DOCKERHUB_PASSWORD
11 | jobs:
12 | test_all:
13 | executor: build
14 | steps:
15 | - checkout
16 | - run: CC=mpicc CXX=mpicxx python setup.py build_ext --inplace
17 | - run: PYTHONPATH="src" ./tests/test.sh
18 | - run:
19 | command: |
20 | coverage combine
21 | coverage report -m
22 | bash <(curl -s https://codecov.io/bash)
23 | # Orchestrate our job run sequence
24 | workflows:
25 | test:
26 | jobs:
27 | - test_all:
28 | context:
29 | - test-context
30 |
--------------------------------------------------------------------------------
/.github/workflows/wheels.yml:
--------------------------------------------------------------------------------
1 | name: ci-build
2 |
3 | on:
4 | # Schedule and workflow_dispatch (manual) only work on default branch
5 | schedule:
6 | - cron: "0 6 * * *" # Fire at 06:00 AM every day
7 | workflow_dispatch:
8 | pull_request:
9 | push:
10 | branches:
11 | - develop
12 | release:
13 | types:
14 | - published
15 |
16 | permissions:
17 | contents: read
18 |
19 | jobs:
20 | build_wheels:
21 | name: Build wheels on ${{ matrix.os }}
22 | runs-on: ${{ matrix.os }}
23 | strategy:
24 | matrix:
25 | os:
26 | - windows-2019
27 | - windows-2022
28 | - macos-14
29 |
30 | mpi:
31 | - msmpi
32 | - mpich
33 |
34 | exclude:
35 | - os: windows-2019
36 | mpi: mpich
37 | - os: windows-2022
38 | mpi: mpich
39 | - os: macos-14
40 | mpi: msmpi
41 |
42 | steps:
43 | - name: Checkout
44 | uses: actions/checkout@v4
45 |
46 | - name: Setup MPI (${{ matrix.mpi }})
47 | uses: mpi4py/setup-mpi@v1
48 | with:
49 | mpi: ${{ matrix.mpi }}
50 |
51 | - name: Setup Python
52 | uses: actions/setup-python@v5
53 | with:
54 | python-version: '3.11'
55 | cache: pip
56 |
57 | - name: Upgrade pip
58 | run: python -m pip install -U pip
59 |
60 | - name: Install cibuildwheel
61 | run: python -m pip install cibuildwheel==2.17.0
62 |
63 | - name: Build wheels
64 | run: python -m cibuildwheel --output-dir wheelhouse
65 |
66 | env:
67 | CIBW_ARCHS_WINDOWS: "AMD64 x86"
68 |
69 | CIBW_ARCHS_MACOS: "arm64"
70 | CIBW_ENVIRONMENT_MACOS: "CC=mpicxx CXX=mpicxx"
71 |
72 | CIBW_BUILD: cp37-* cp38-* cp39-* cp310-* cp311-*
73 |
74 | # Disable building PyPy wheels on all platforms
75 | CIBW_SKIP: pp*
76 |
77 | - uses: actions/upload-artifact@v4
78 | with:
79 | name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }}
80 | path: ./wheelhouse/*.whl
81 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Emacs backups
2 | *~
3 |
4 | # Compiler outputs
5 | *.o
6 | *.x
7 | *.so
8 | *.pyc
9 | __pycache__
10 |
11 | *.egg-info
12 |
13 | scratch/
14 |
15 | # python build directory
16 | build
17 | # python distribution directory
18 | dist
19 |
20 | # SWIG-generated interfaces
21 | *_wrap.cxx
22 |
23 | # Tcl packages (generated by make-package.tcl)
24 | pkgIndex.tcl
25 |
26 | # STC outputs:
27 | *.tic
28 |
29 | # Autoconf stuff
30 | autom4te.cache
31 | autoscan.log
32 | config.cache
33 | config.h
34 | config.h.in
35 | config.log
36 | config.status
37 | config.status.out
38 | configure
39 |
40 | # Misc.
41 | .cache
42 | turbine-directory.txt
43 | .Rproj.user
44 |
45 | # BG/Q stuff
46 | *.cobaltlog
47 | *.error
48 | *.output
49 |
50 | # Bulk data
51 | *.csv
52 | *.png
53 | *.log
54 | *.tsv
55 | *.out
56 |
57 | !test_data/*.csv
58 |
59 | # IDE
60 | .vscode
61 |
62 | # tox
63 | .tox
64 |
65 | # Andrew
66 | *.DS_Store
67 | *._*
68 | templates/scripts/archive/
69 |
70 | # Turbine outputs
71 | **/turbine-output
72 | **/experiments
73 |
74 | old_repast_py_src/
75 | Debug/
76 | Release/
77 | notes_nc.txt
78 |
79 | # Include R4Py paper experiment data
80 | !paper_data/*
81 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8
2 |
3 | RUN apt-get update && \
4 | apt-get install -y mpich \
5 | && rm -rf /var/lib/apt/lists/*
6 |
7 | # Install the python requirements
8 | COPY ./requirements.txt ./requirements.txt
9 | RUN pip install -r ./requirements.txt
10 |
11 | # RUN apt-get update \
12 | # apt-get install -y git
13 |
14 | RUN mkdir -p /repos && \
15 | cd /repos && \
16 | git clone --depth 1 https://github.com/networkx/networkx-metis.git && \
17 | cd /repos/networkx-metis && \
18 | python setup.py install
19 |
20 | # Set the PYTHONPATH to include the /repast4py folder which contains the core folder
21 | ENV PYTHONPATH=/repast4py/src
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright © 2021, UChicago Argonne, LLC
2 | All Rights Reserved
3 | Software Name: repast4py
4 | By: Argonne National Laboratory
5 |
6 | OPEN SOURCE LICENSE
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | 1. Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | 2. Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation and/or
16 | other materials provided with the distribution.
17 |
18 | 3. Neither the name of the copyright holder nor the names of its contributors
19 | may be used to endorse or promote products derived from this software without
20 | specific prior written permission.
21 |
22 | ********************************************************************************
23 | DISCLAIMER
24 |
25 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
26 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
28 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
29 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
30 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
31 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
32 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
33 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 | ********************************************************************************
36 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include ./src/repast4py/*.h
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Repast for Python (Repast4Py)
2 |
3 | [](https://codecov.io/gh/Repast/repast4py/branch/develop)
4 |
5 | ## Build Status
6 |
7 |
8 |
9 | Master |
10 | Develop |
11 |
12 |
13 |  |
14 |  |
15 |
16 |
17 |
18 | ## Repast4Py
19 |
20 | Repast for Python (Repast4Py) is the newest member of the [Repast Suite](https://repast.github.io) of
21 | free and open source agent-based modeling and simulation software.
22 | It builds on [Repast HPC](https://repast.github.io/repast_hpc.html), and provides the ability to build large, distributed agent-based models (ABMs) that span multiple processing cores.
23 | Distributed ABMs enable the development of complex systems models that capture the
24 | scale and relevant details of many problems of societal importance. Where Repast HPC is
25 | implemented in C++ and is more HPC expert focused, Repast4Py is a Python package and is
26 | designed to provide an easier on-ramp for researchers from diverse scientific communities to apply large-scale distributed ABM methods.
27 | Repast4Py is released under the BSD-3 open source license, and leverages [Numba](https://numba.pydata.org),
28 | [NumPy](https://numpy.org), and [PyTorch](https://pytorch.org) packages, and the Python C API
29 | to create a scalable modeling system that can exploit the largest HPC resources and emerging computing architectures. See our paper on Repast4Py for additional information about the design and implementation.
30 |
31 | Collier, N. T., Ozik, J., & Tatara, E. R. (2020). Experiences in Developing a Distributed Agent-based Modeling Toolkit with Python. 2020 IEEE/ACM 9th Workshop on Python for High-Performance and Scientific Computing (PyHPC), 1–12. https://doi.org/10.1109/PyHPC51966.2020.00006
32 |
33 | ### Requirements
34 |
35 | Repast4Py requires Python 3.8+
36 |
37 | Repast4Py can run on Linux, macOS and Windows provided there is a working MPI implementation
38 | installed and mpi4py is supported. Repast4Py is developed and tested on Linux. We recommend
39 | that Windows users use the Windows Subsystem for Linux (WSL). Installation instructions for
40 | WSL can be found [here](https://docs.microsoft.com/en-us/windows/wsl/install).
41 |
42 | Under Linux, MPI can be installed using your OS's package manager. For example,
43 | under Ubuntu 20.04 (and thus WSL), the mpich MPI implementation can be installed with:
44 |
45 | ```bash
46 | $ sudo apt install mpich
47 | ```
48 |
49 | Installation instructions for MPI on macOS can be found [here](https://repast.github.io/repast4py.site/macos_mpi_install.html).
50 |
51 | A typical campus cluster, or HPC resource will have MPI and mpi4py installed.
52 | Check the resource's documentation on available software for more details.
53 |
54 | ### Installation
55 |
56 | Repast4Py can be downloaded and installed from PyPI using pip.
57 | Since Repast4Py includes native MPI C++ code that needs to be compiled,
58 | the C compiler `CC` environment variable must be set
59 | to the `mpicxx` (or `mpic++`) compiler wrapper provided by your MPI installation.
60 |
61 | ```
62 | env CC=mpicxx pip install repast4py
63 | ```
64 |
65 | __NOTE__: If you see an error message about a missing `python.h` header file when
66 | installing Repast4Py under Ubuntu (or other Linuxes), you will need to install
67 | a python dev package using your OS's package manager. For example, assuming
68 | Python 3.8, `sudo apt install python3.8-dev` will work for Ubuntu.
69 |
70 | ### Documentation
71 |
72 | * [User's Guide](https://repast.github.io/repast4py.site/guide/user_guide.html)
73 | * [API Docs](https://repast.github.io/repast4py.site/apidoc/index.html)
74 | * [Example Models](https://repast.github.io/repast4py.site/examples/examples.html)
75 |
76 | ### Contact and Support
77 |
78 | * [GitHub Issues](https://github.com/Repast/repast4py/issues)
79 | * [GitHub Repository](https://github.com/Repast/repast4pyV)
80 |
81 | In addition to filing issues on GitHub, support is also available via
82 | [Stack Overflow](https://stackoverflow.com/questions/tagged/repast4py).
83 | Please use the `repast4py` tag to ensure that we are notified of your question.
84 | Software announcements will be made on the
85 | [repast-interest](http://lists.sourceforge.net/lists/listinfo/repast-interest) mailing list.
86 |
87 | Jonathan Ozik is the Repast project lead. Please contact him through
88 | the [Argonne Staff Directory](https://www.anl.gov/staff-directory) if you
89 | have project-related questions.
90 |
--------------------------------------------------------------------------------
/build-docker.sh:
--------------------------------------------------------------------------------
1 | docker build --rm -f Dockerfile -t repast4py:latest .
2 | docker tag repast4py:latest dsheeler/repast4py:latest
3 | docker push dsheeler/repast4py:latest
4 |
--------------------------------------------------------------------------------
/coverage.rc:
--------------------------------------------------------------------------------
1 | [coverage:run]
2 | parallel = true
3 | omit = *tests*
4 |
--------------------------------------------------------------------------------
/dev_notes.md:
--------------------------------------------------------------------------------
1 | # Development Notes #
2 |
3 | ### Compiling and Testing ###
4 | Compile with:
5 |
6 | `CC=mpicxx CXX=mpicxx python setup.py build_ext --inplace`
7 |
8 | or for debugging:
9 |
10 | `CC=mpicxx CXX=mpicxx CFLAGS="-O0 -g" CXXFLAGS="-O0 -g" python setup.py build_ext --inplace`
11 |
12 |
13 | There are 3 types of python unit tests:
14 |
15 | 1. Ordinary single process tests. Run with:
16 |
17 | `python -m unittest discover tests`
18 |
19 | 2. Multiprocess (9 procs) mpi tests for 2D spaces. Run with:
20 |
21 | ```
22 | mpirun -n 9 python -m unittest tests.shared_obj_tests
23 | mpirun -n 9 python -m unittest tests.shared_vl_tests
24 | mpirun -n 9 python -m unittest tests.ctopo_tests
25 | ```
26 |
27 | 3. Multiprocess (18 procs) mpi tests for 3D spaces. Run with:
28 |
29 | ```
30 | mpirun -n 18 python -m unittest tests.shared_obj_tests.SharedGridTests.test_buffer_data_3d
31 | mpirun -n 18 python -m unittest tests.shared_obj_tests.SharedGridTests.test_buffer_data_3d_periodic
32 | ```
33 |
34 | 4. Multiprocess (4 procs) mpi tests for logging and network support. Run with:
35 |
36 | ```
37 | mpirun -n 4 python -m unittest tests.logging_tests
38 | mpirun -n 4 python -m unittest tests.shared_network_tests
39 | ```
40 |
41 |
42 | There are also some C++ unitests. C++ tests can be compiled with the Makefile in the tests directory.
43 | Copy the Makefile to a `Release` or `Debug` directory at the top level, and edit it as necessary.
44 | The makefile target 'tests' will compile a `unit_tests` executable. Run the tests with:
45 |
46 | ```
47 | mpirun -n 9 ./unit_tests --gtest_filter=CartesianTopology.*
48 | ./unit_tests --gtest_filter=SpatialTreeTests.*
49 | ```
50 |
51 | ## Requirements
52 |
53 | * Python 3.8+
54 | * mpi4py
55 | * PyTorch
56 | * NumPy >= 1.18
57 | * nptyping (`pip install nptyping`)
58 | * numba
59 | * typing-extensions if < 3.8
60 | * pyyaml
61 |
62 | ## Linting ##
63 |
64 | flake8 - configuration, exclusions etc. are in setup.cfg
65 |
66 | ## Documentation Guidelines ##
67 |
68 | * Use Sphinx and readthedocs.io
69 |
70 | https://docs.python-guide.org/writing/documentation/#sphinx
71 |
72 | * Use restructured text:
73 |
74 | https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
75 |
76 | * Use google style doc strings:
77 |
78 | https://www.sphinx-doc.org/en/1.8/usage/extensions/example_google.html?highlight=docstring
79 |
80 | * Pep 484 type hints - when these are present then they do not need to be included in the doc string
81 |
82 | ## Generating API Docs ##
83 |
84 | If a new module is added then, from within the docs directory `sphinx-apidoc -e -o source ../src/repast4py`
85 | to generate the rst for that module.
86 |
87 | And `make html` to create the html docs.
88 |
89 | `make clean` followed by `make html` will build from scratch.
90 |
91 |
92 | ## Generating ASCIIDoc Manual ##
93 |
94 | ### Prerequisites ###
95 |
96 | 1. Install asciidoctor.
97 | * With apt (Ubuntu): `sudo apt-get install -y asciidoctor`
98 | * Other OSes, see `https://docs.asciidoctor.org/asciidoctor/latest/install/`
99 | 2. Install pygments for code syntax highlighting.
100 | * Ubuntu: `gem install --user-install pygments.rb`
101 | * Other OSes, see `https://docs.asciidoctor.org/asciidoctor/latest/syntax-highlighting/pygments/`
102 |
103 | Currently (08/09/2021) the docs are generated as single html page. If we want multiple
104 | pages, see `https://github.com/owenh000/asciidoctor-multipage`
105 |
106 | ### Generating the Docs ###
107 |
108 | ```
109 | cd docs/guide
110 | asciidoctor user_guide.adoc
111 | ```
112 |
113 | This generates a user_guide.html that can be viewed in a browser.
114 |
115 | ### Creating a Distribution ###
116 |
117 | `CC=mpicxx CXX=mpicxx python -m build`
118 |
119 | creates a source tar.gz and a wheel in `dist/`
120 |
121 | https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project
122 | https://setuptools.readthedocs.io/en/latest/userguide/index.html
123 | https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
124 |
125 | Note that a whl created on linux cannot be uploaded. See the many linux project:
126 | https://github.com/pypa/manylinux
127 |
128 | Testing the sdist (source dist) in a virtual env with tox:
129 |
130 | `CC=mpicxx CXX=mpicxx tox --list-dependencies`
131 |
132 | and
133 |
134 | `CC=mpicxx CXX=mpicxx tox -r --list-dependencies`
135 |
136 | if the virtual env needs to be recreated.
137 |
138 | If using conda for Python, switch to the appropriate
139 | environment, and then use tox's -e argument to select
140 | the py environment that matches the activated conda
141 | environment:
142 |
143 | ```
144 | CC=mpicxx CXX=mpicxx tox -e py37
145 | CC=mpicxx CXX=mpicxx tox -e py38
146 | ```
147 |
148 | ### Uploading to PyPI (or testpypi)
149 |
150 | This uses ~/.pypirc for API token authentication
151 |
152 | python3 -m twine upload --repository repast4py dist/*.tar.gz
153 |
154 | ### Testing from testPyPI
155 |
156 | CC=mpicxx pip3 install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple repast4py
157 |
158 | ## Multi Process Seg Fault Debugging
159 |
160 | See https://www.open-mpi.org/faq/?category=debugging#serial-debuggers for using gdb with mpi.
161 |
162 | General idea is to add this code
163 |
164 | ```
165 | {
166 | volatile int i = 0;
167 | char hostname[256];
168 | gethostname(hostname, sizeof(hostname));
169 | printf("PID %d on %s ready for attach\n", getpid(), hostname);
170 | fflush(stdout);
171 | while (0 == i)
172 | sleep(5);
173 | }
174 | ```
175 |
176 | to the module code function call that's triggering the segfault, and follow the directions in the link above. Note that might have to run gdb via sudo.
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _build
2 | landing.html
3 | macos_mpi_install.html
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | sys.path.insert(0, os.path.abspath('../src'))
17 |
18 |
19 | # -- Project information -----------------------------------------------------
20 |
21 | project = 'repast4py'
22 | copyright = '2023, UChicago Argonne, LLC'
23 | author = 'Nick Collier, Jonathan Ozik, Eric Tatara, Sara Rimer'
24 |
25 |
26 | # -- General configuration ---------------------------------------------------
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
30 | # ones.
31 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon']
32 |
33 | # napolean settings
34 | napoleon_google_docstring = True
35 | napoleon_numpy_docstring = True
36 | napoleon_include_private_with_doc = False
37 | napoleon_include_special_with_doc = False
38 | napoleon_use_admonition_for_examples = False
39 | napoleon_use_admonition_for_notes = False
40 | napoleon_use_admonition_for_references = False
41 | napoleon_use_ivar = False
42 | napoleon_use_param = True
43 | napoleon_use_rtype = True
44 |
45 | autodoc_inherit_docstrings = True
46 | autoclass_content = 'both'
47 | autodoc_typehints = 'description'
48 |
49 | # Add any paths that contain templates here, relative to this directory.
50 | templates_path = ['_templates']
51 |
52 | # List of patterns, relative to source directory, that match files and
53 | # directories to ignore when looking for source files.
54 | # This pattern also affects html_static_path and html_extra_path.
55 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
56 |
57 |
58 | # -- Options for HTML output -------------------------------------------------
59 |
60 | # The theme to use for HTML and HTML Help pages. See the documentation for
61 | # a list of builtin themes.
62 | #
63 | html_theme = 'nature'
64 | html_theme_options = {
65 | "sidebarwidth": "23em"
66 | }
67 |
68 |
69 | # Add any paths that contain custom static files (such as style sheets) here,
70 | # relative to this directory. They are copied after the builtin static files,
71 | # so a file named "default.css" will overwrite the builtin "default.css".
72 | html_static_path = ['_static']
73 |
74 |
75 | def setup(app):
76 | # repast4py.space imports various objects from the _space extension
77 | # module. In order to document these as being in the space module
78 | # we set space's __all__ here, so that when autodoc does 'from space import *'
79 | # to determine what to pydoc, the objects from space are returned.
80 | import repast4py.space
81 | repast4py.space.__all__ = ['DiscretePoint', 'ContinuousPoint',
82 | 'CartesianTopology',
83 | 'BorderType', 'OccupancyType', 'SharedGrid', 'SharedCSpace',
84 | 'Grid', 'ContinuousSpace']
85 | import repast4py.core
86 | repast4py.core.__all__ = ['Agent', 'GhostAgent', 'GhostedAgent', 'AgentManager',
87 | 'SharedProjection', 'BoundedProjection']
88 |
89 | import repast4py.network
90 | repast4py.network.__all__ = ['SharedNetwork', 'DirectedSharedNetwork', 'UndirectedSharedNetwork',
91 | 'read_network', 'write_network']
92 |
93 | # override meaningless autogenerated named tuple docs with good doc
94 | from repast4py.geometry import BoundingBox
95 | BoundingBox.__doc__ = "A BoundingBox defines an up to 3 dimensional space in terms of the minimum value and extent along each dimension."
96 | BoundingBox.xmin.__doc__ = "int: the minimun x coordinate value."
97 | BoundingBox.ymin.__doc__ = "int: the minimun y coordinate value."
98 | BoundingBox.zmin.__doc__ = "int: the minimun z coordinate value."
99 | BoundingBox.xextent.__doc__ = "int: the size of the x dimension"
100 | BoundingBox.yextent.__doc__ = "int: the size of the y dimension"
101 | BoundingBox.zextent.__doc__ = "int: the size of the z dimension"
102 |
--------------------------------------------------------------------------------
/docs/guide/.gitignore:
--------------------------------------------------------------------------------
1 | user_guide.html
2 | !images/*.png
3 |
--------------------------------------------------------------------------------
/docs/guide/ascii_doc_ex.adoc:
--------------------------------------------------------------------------------
1 | == ASCIIDoc Examples
2 |
3 |
4 | TIP: In many cases, the default user_path.xml file does not need to be changed at all.
5 |
6 | NOTE: The only unique entry in the default user_path.xml file is the model name attribute
7 | which is the same as the project name.
8 |
9 | [source,python,numbered]
10 | ----
11 | for i in range(pp_human_count): #<1>
12 | h = Human(i, self.rank)
13 | self.context.add(h)
14 | x = random.default_rng.uniform(local_bounds.xmin, local_bounds.xmin +
15 | local_bounds.xextent)
16 | y = random.default_rng.uniform(local_bounds.ymin, local_bounds.ymin +
17 | local_bounds.yextent)
18 | self.move(h, x, y)
19 | ----
20 | <1> A For loop
--------------------------------------------------------------------------------
/docs/guide/distributing_agents.adoc:
--------------------------------------------------------------------------------
1 | == Cross-Process Code Requirements
2 | We've seen in the <<_distributed_simulation, Distributed Simulation>> section how ghost agents
3 | (non-local copies) are used
4 | to stitch a simulation together across processes and that when agents move out of their local
5 | grid or continuous space subsection they are moved to the process responsible for the destination
6 | subsection. While much of this is handled internally by Repast4Py, this section describes in more detail the
7 | code the user needs to provide in order for moving and copying to work correctly. We will use examples from the Zombies and Rumor demonstration models. See the {website}/examples/examples.html[Repast4Py Examples] page to download the source code for these models and for more information on getting started with the examples.
8 |
9 | === Agent ID
10 | For moving and copying agents across processes to work each agent must have a unique id.
11 | This id has three components:
12 |
13 | . An integer that uniquely identifies the agent on the rank on which it was created
14 | . An integer that identifies its type
15 | . The integer rank on which the agent was created
16 |
17 | Combining the first component with the last allows us to uniquely identify an agent across the multi-process
18 | simulation while the second allows us to create agents of the appropriate type when they are copied
19 | between ranks.
20 |
21 | In order to ensure that all agents in Repast4Py have an agent id, all agents must inherit from the
22 | {website}/apidoc/source/repast4py.core.html#repast4py.core.Agent[`repast4py.core.Agent`] class which requires these components in its constructor. For example, in the
23 | Zombies demonstration model, the `Human` agents are subclasses of the `repast4py.core.Agent`.
24 |
25 | [source,python,numbered]
26 | ----
27 | class Human(repast4py.core.Agent): # <1>
28 | """The Human Agent
29 |
30 | Args:
31 | a_id: a integer that uniquely identifies this Human on its
32 | starting rank
33 | rank: the starting MPI rank of this Human.
34 | """
35 |
36 | ID = 0
37 |
38 | def __init__(self, a_id: int, rank: int):
39 | super().__init__(id=a_id, type=Human.ID, rank=rank) #<2>
40 | ----
41 | <1> Human inherits from `repast4py.core.Agent`
42 | <2> Calling the `repast4py.core.Agent` constructor with the agent id
43 | components.
44 |
45 | The components as well as the full unique id are accessible as
46 | attributes of the `repast4py.core.Agent` class.
47 |
48 | * id: the id component from the agent's unique id
49 | * type: the type component from the agent's unique id
50 | * rank: the rank component from the agent's unique id
51 | * uid: the unique id tuple (id, type, rank)
52 |
53 | [source,python,numbered]
54 | ----
55 | >>> h = Human(12, 3)
56 | >>> h.id
57 | 12
58 | >>> h.rank
59 | 4
60 | >>> h.type
61 | 0
62 | >>> h.uid
63 | (12, 0, 4)
64 | ----
65 |
66 | IMPORTANT: All agents must subclass `repast4py.core.Agent`. See the {website}/apidoc/source/repast4py.core.html#repast4py.core.Agent[API documenation] for `repast4py.core.Agent` for more details of the `Agent` class.
67 |
68 | === Saving and Restoring Agents
69 | Moving or copying an agent between processes consists of saving the agent state, moving / copying that state
70 | to another process, and then restoring the agent state as an agent on the destination process. For this to work, each
71 | agent is required to implement a `save` method that returns a tuple containing the full agent state. The first element of this
72 | full state tuple is the agent's unique id, itself a tuple (accessed via the `uid` attribute), and the second
73 | is the dynamic state of that agent. For example, in the Zombie
74 | demonstration model the state of each Human is represented by two variables:
75 |
76 | 1. infected: a boolean that indicates whether or not the Human is infected
77 | 2. infected_duration: an integer tracking how long the agent has been infected
78 |
79 | The `save` method creates a tuple consisting of these two variables and the unique id tuple.
80 |
81 | [source,python,numbered]
82 | ----
83 | def save(self) -> Tuple:
84 | """Saves the state of this Human as a tuple.
85 |
86 | Used to move this Human from one MPI rank to another.
87 |
88 | Returns:
89 | The saved state of this Human.
90 | """
91 | return (self.uid, self.infected, self.infected_duration)
92 | ----
93 |
94 | NOTE: The agent state in the tuple returned from `save` can also consist of other tuples, lists
95 | and so on, in addition to primitive values, as long as the unique id tuple is the first element.
96 |
97 | IMPORTANT: All agents must implement a `save` method.
98 |
99 | You must also provide a `restore` function that takes the tuple produced by the `save` method and
100 | returns an agent either created from or updated with that state. The function is used during synchronization
101 | to create the agents on the destination ranks. In the Zombies demonstration model, the `restore_agent`
102 | function, when given agent state, returns Human and Zombie agents. It uses a caching scheme
103 | to avoid re-instantiating agents that have previously been created on a rank, and updates the
104 | state of those previously created agents. This can be a useful performance improvement at the
105 | expense of using more memory.
106 |
107 | [source,python,numbered]
108 | ----
109 | agent_cache = {} #<1>
110 |
111 | def restore_agent(agent_data: Tuple): #<2>
112 | """Creates an agent from the specified agent_data.
113 |
114 | This is used to re-create agents when they have moved from one MPI rank
115 | to another. The tuple returned by the agent's save() method is moved
116 | between ranks and create_agent is called for each tuple in order
117 | to create the agent on that rank. Here we also use
118 | a cache to store any agents already created on this rank,
119 | and only update their state rather than recreating them from scratch.
120 |
121 | Args:
122 | agent_data: the data from which to create the agent. This is the tuple
123 | returned from the agent's save() method where the first
124 | element is the agent id tuple, and any remaining
125 | arguments encapsulate agent state.
126 | """
127 | uid = agent_data[0] #<3>
128 | # in uid element 0 is id, 1 is type, 2 is rank
129 | if uid[1] == Human.ID: #<4>
130 | if uid in agent_cache: #<5>
131 | h = agent_cache[uid]
132 | else:
133 | h = Human(uid[0], uid[2])
134 | agent_cache[uid] = h
135 |
136 | # restore the agent state from the agent_data tuple
137 | h.infected = agent_data[1] #<6>
138 | h.infected_duration = agent_data[2]
139 | return h
140 | else: #<7>
141 | # note that the zombie has no internal state
142 | # so there's nothing to restore other than
143 | # the Zombie itself
144 | if uid in agent_cache:
145 | return agent_cache[uid]
146 | else:
147 | z = Zombie(uid[0], uid[2])
148 | agent_cache[uid] = z
149 | return z
150 | ----
151 | <1> Cache for previously instantiated agents. Key is an agent's unique id (uid) tuple and value is the agent.
152 | <2> `agent_data` is a tuple of the format produced by the `save` method. For Humans this is (uid, infected,
153 | infected_duration). For Zombies, this is just (uid).
154 | <3> The first element of the `agent_data` tuple is the uid tuple. The uid tuple is (id, type, starting rank).
155 | <4> Checks if the agent is a Human or Zombie, using the type component of the uid.
156 | <5> Checks if the agent is already cached, if so then get it (line 23), otherwise create a new `Human` agent
157 | (line 25).
158 | <6> Updates the cached / created Human with the passed in agent state.
159 | <7> `agent_data` is for a Zombie so search cache and if necessary create a new one.
160 |
161 | Lastly, in a distributed network, agents are not typically moved between processes
162 | but rather the ghost agents remain on a process once the network is created. Repast4Py tracks
163 | these ghost agents and does not recreate the agents every synchronization step via a `restore`
164 | method, instead a state update is sent to the appropriate ghost agents. In that case, an agent's `update`
165 | method is called to handle the state update. The Rumor demonstration model has an example of this.
166 |
167 | [source,python,numbered]
168 | ----
169 | class RumorAgent(core.Agent):
170 |
171 | ...
172 |
173 | def update(self, data: bool): <1>
174 | """Updates the state of this agent when it is a ghost
175 | agent on some rank other than its local one.
176 |
177 | Args:
178 | data: the new agent state (received_rumor)
179 | """
180 | ...
181 | self.received_rumor = data
182 | ----
183 | <1> Updates ghost agent state from saved agent state. Here the `data` argument
184 | is only the dynamic state element of the tuple returned from the agent's `save` method, namely,
185 | the `self.received_rumor` bool from `(self.uid, self.received_rumor)`.
186 |
187 | === Synchronization
188 | As mentioned in the <<_distributed_simulation, Distributed Simulation>> section, each process in a
189 | Repast4Py application runs in a separate memory space from all the other processes. Consequently,
190 | we need to synchronize the model state across processes by moving agents, filling
191 | projection buffers with ghosts, and updating ghosted states, as necessary. Synchronization
192 | is performed by calling the
193 | {website}/apidoc/source/repast4py.context.html#repast4py.context.SharedContext.synchronize[`SharedContext.synchronize`]method and passing it your restore function.
194 | The `synchronization` method will use the agent `save` method(s) and your restore function
195 | to synchronize the state of the simulation across its processes.
196 |
--------------------------------------------------------------------------------
/docs/guide/images/shared_grid_agents.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Repast/repast4py/bbf151726c4fa5f8cfaa10b78a614aefcbb1e82e/docs/guide/images/shared_grid_agents.png
--------------------------------------------------------------------------------
/docs/guide/images/shared_grid_agents_800.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Repast/repast4py/bbf151726c4fa5f8cfaa10b78a614aefcbb1e82e/docs/guide/images/shared_grid_agents_800.png
--------------------------------------------------------------------------------
/docs/guide/images/shared_net_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Repast/repast4py/bbf151726c4fa5f8cfaa10b78a614aefcbb1e82e/docs/guide/images/shared_net_2.png
--------------------------------------------------------------------------------
/docs/guide/motivation.adoc:
--------------------------------------------------------------------------------
1 | :fn-workflow-covid: footnote:[Ozik, J., Wozniak, J. M., Collier, N., Macal, C. M., & Binois, M. (2021). A population data-driven workflow for COVID-19 modeling and learning. The International Journal of High Performance Computing Applications, 35(5), 483–499. https://doi.org/10.1177/10943420211035164]
2 | :fn-extreme-scale: footnote:[Ozik, J., Collier, N. T., Wozniak, J. M., Macal, C. M., & An, G. (2018). Extreme-Scale Dynamic Exploration of a Distributed Agent-Based Model With the EMEWS Framework. IEEE Transactions on Computational Social Systems, 5(3), 884–895. https://doi.org/10.1109/TCSS.2018.2859189]
3 | :fn-r4py: footnote:[Collier, N. T., Ozik, J., & Tatara, E. R. (2020). Experiences in Developing a Distributed Agent-based Modeling Toolkit with Python. 2020 IEEE/ACM 9th Workshop on Python for High-Performance and Scientific Computing (PyHPC), 1–12. https://doi.org/10.1109/PyHPC51966.2020.00006]
4 |
5 | == Getting Started
6 | Repast for Python (Repast4Py) is the newest member of the https://repast.github.io[Repast Suite] of free and open source agent-based modeling and simulation software.
7 | It builds on https://repast.github.io/repast_hpc.html[Repast HPC], and provides the ability to build large, distributed agent-based models (ABMs) that span multiple processing cores.
8 | Distributed ABMs enable the development of complex systems models that capture the scale and relevant details of many problems of societal importance.{wj}{fn-workflow-covid}{wj}{fn-extreme-scale}
9 | Where Repast HPC is implemented in C++ and is more HPC expert focused, Repast4Py is a Python package and is designed to provide an easier on-ramp for researchers from diverse scientific communities to apply large-scale distributed ABM methods. Repast4Py is released under the BSD-3 open source license, and leverages https://numba.pydata.org[Numba], https://numpy.org[NumPy], and https://pytorch.org[PyTorch] packages, and the Python C API
10 | to create a scalable modeling system that can exploit the largest HPC resources and emerging computing architectures. See our paper on Repast4Py for additional information about the design and implementation.{wj}{fn-r4py}
11 |
12 | === Requirements
13 |
14 | Repast4Py can run on Linux, macOS and Windows provided there is a working MPI implementation
15 | installed and mpi4py is supported. Repast4Py is developed and tested on Linux. We recommend
16 | that Windows users use the Windows Subsystem for Linux (WSL). Installation instructions for
17 | WSL can be found https://docs.microsoft.com/en-us/windows/wsl/install[here].
18 |
19 | Under Linux, MPI can be installed using your OS's package manager. For example,
20 | under Ubuntu 20.04 (and thus WSL), the mpich MPI implementation can be installed with:
21 |
22 | [source,bash]
23 | ----
24 | $ sudo apt install mpich
25 | ----
26 |
27 | Installation instructions for MPI on macOS can be found {website}/macos_mpi_install.html[here].
28 |
29 | A typical campus cluster, or HPC resource will have MPI and mpi4py installed.
30 | Check the resource's documentation on available software for more details.
31 |
32 | === Installation
33 |
34 | Repast4Py can be downloaded and installed from PyPI using pip.
35 | Since Repast4Py includes native MPI {cpp} code that needs to be compiled,
36 | the C compiler `CC` environment variable must be set
37 | to the `mpicxx` (or `mpic++`) compiler wrapper provided by your MPI installation.
38 |
39 | ----
40 | env CC=mpicxx pip install repast4py
41 | ----
42 |
43 | NOTE: If you see an error message about a missing `python.h` header file when
44 | installing Repast4Py under Ubuntu (or other Linuxes), you will need to install
45 | a python dev package using your OS's package manager. For example, assuming
46 | Python 3.8, `sudo apt install python3.8-dev` will work for Ubuntu.
47 |
48 | === Documentation
49 |
50 | * link:./user_guide.html[User's Guide] (This document)
51 | * {website}/apidoc/index.html[API Docs]
52 | * {website}/examples/examples.html[Example Models]
53 |
54 | === Contact and Support
55 |
56 | * https://github.com/Repast/repast4py/issues[GitHub Issues]
57 | * https://github.com/Repast/repast4py[GitHub Repository]
58 |
59 |
60 | In addition to filing issues on GitHub, support is also available via
61 | https://stackoverflow.com/questions/tagged/repast4py[Stack Overflow].
62 | Please use the `repast4py` tag to ensure that we are notified of your question.
63 | Software announcements will be made on the
64 | http://lists.sourceforge.net/lists/listinfo/repast-interest[repast-interest] mailing list.
65 |
66 | Jonathan Ozik is the Repast project lead. Please contact him through
67 | the https://www.anl.gov/staff-directory[Argonne Staff Directory] if you
68 | have project-related questions.
69 |
70 |
71 | == Why Repast4Py?
72 | Modern high-performance computing (HPC) capabilities have allowed for large-scale computational modeling and experimentation.
73 | HPC clusters and supercomputers -- such as those hosted by universities, national laboratories, and cloud computing providers -- can have thousands or more processor cores available, allowing for high concurrency.
74 | Even individual CPUs now typically contain multiple cores, which are capable of running concurrently.
75 | Distributed ABMs attempt to leverage this hardware by distributing an individual simulation over multiple processes running in parallel.
76 |
77 | However, in order to take advantage of these increasingly ubiquitous parallel computing resources, a computational model must first be refashioned to run on multiple processors.
78 | Adapting a computational model that was built for a single processor to run on multiple processors can be a nontrivial endeavor, both conceptually and practically.
79 | Repast4Py aims to ease the transition to distributed ABMs by hiding much of the complexity.
80 |
81 |
82 | === Distributed computing a natural fit for agent-based modeling
83 | A typical agent-based simulation consists of a population of agents each of which performs some behavior each timestep or at some frequency.
84 | In practice, this is often implemented as a loop over the agent population in which each agent executes its behavior.
85 | The time it takes to complete the loop depends on the number of agents and the complexity of the behavior.
86 | By distributing the agent population across multiple processes running in parallel, each process executes its own loop over only a subset of the population, allowing for larger agent populations and more complex behavior.
87 |
88 | === Repast4Py and the broader Repast family
89 | While Repas4Py is meant to make the development of distributed ABMs easier, we encourage users new to the Repast Suite to look through the different versions of https://repast.github.io/docs.html[Repast] to determine which toolkit is most appropriate for their needs. Of note, we recommend users new to agent-based modeling to first check out https://repast.github.io/repast_simphony.html[Repast Simphony] to develop a better understanding of the concepts behind agent-based modeling and learn how to quickly build such models.
90 |
91 | The following sections will provide some conceptual background for a Repast-style simulation, describe how such a simulation is distributed across multiple processes with Repast4Py, and end with providing a few basic tutorials.
92 |
93 |
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/docs/guide/user_guide.adoc:
--------------------------------------------------------------------------------
1 | = Repast for Python (Repast4Py) User Guide
2 | Version 2.0 February 2023
3 | :toc2:
4 | :icons: font
5 | :numbered:
6 | :website: https://jozik.github.io/goes_bing
7 | :xrefstyle: full
8 | :imagesdir: images
9 | :source-highlighter: pygments
10 |
11 | // Sections need line break between includes
12 |
13 | include::motivation.adoc[]
14 |
15 | include::overview.adoc[]
16 |
17 | include::distributing_agents.adoc[]
18 |
19 | include::tutorial_random_walk.adoc[]
20 |
21 | include::tutorial_network.adoc[]
22 |
23 | include::tutorial_zombies.adoc[]
24 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. repast4py documentation master file, created by
2 | sphinx-quickstart on Thu Apr 22 13:56:50 2021.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Repast4Py API Documentation
7 | =========================================
8 |
9 | .. toctree::
10 | :maxdepth: 4
11 | :caption: Contents:
12 |
13 | source/modules.rst
14 |
15 |
16 |
17 |
18 | Indices and tables
19 | ==================
20 |
21 | * :ref:`genindex`
22 | * :ref:`modindex`
23 | * :ref:`search`
24 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | repast4py
2 | =========
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | repast4py
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.context.rst:
--------------------------------------------------------------------------------
1 | repast4py.context module
2 | ========================
3 |
4 | .. automodule:: repast4py.context
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.core.rst:
--------------------------------------------------------------------------------
1 | repast4py.core module
2 | =====================
3 |
4 | .. automodule:: repast4py.core
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.geometry.rst:
--------------------------------------------------------------------------------
1 | repast4py.geometry module
2 | =========================
3 |
4 | .. automodule:: repast4py.geometry
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.logging.rst:
--------------------------------------------------------------------------------
1 | repast4py.logging module
2 | ========================
3 |
4 | .. automodule:: repast4py.logging
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.network.rst:
--------------------------------------------------------------------------------
1 | repast4py.network module
2 | ========================
3 |
4 | .. automodule:: repast4py.network
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.parameters.rst:
--------------------------------------------------------------------------------
1 | repast4py.parameters module
2 | ===========================
3 |
4 | .. automodule:: repast4py.parameters
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.random.rst:
--------------------------------------------------------------------------------
1 | repast4py.random module
2 | =======================
3 |
4 | .. automodule:: repast4py.random
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.rst:
--------------------------------------------------------------------------------
1 | repast4py package
2 | =================
3 |
4 | Submodules
5 | ----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | repast4py.context
11 | repast4py.core
12 | repast4py.geometry
13 | repast4py.logging
14 | repast4py.network
15 | repast4py.parameters
16 | repast4py.random
17 | repast4py.schedule
18 | repast4py.space
19 | repast4py.util
20 | repast4py.value_layer
21 |
22 | Module contents
23 | ---------------
24 |
25 | .. automodule:: repast4py
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
--------------------------------------------------------------------------------
/docs/source/repast4py.schedule.rst:
--------------------------------------------------------------------------------
1 | repast4py.schedule module
2 | =========================
3 |
4 | .. automodule:: repast4py.schedule
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.space.rst:
--------------------------------------------------------------------------------
1 | repast4py.space module
2 | ======================
3 |
4 | .. automodule:: repast4py.space
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 | :inherited-members:
9 |
--------------------------------------------------------------------------------
/docs/source/repast4py.util.rst:
--------------------------------------------------------------------------------
1 | repast4py.util module
2 | =====================
3 |
4 | .. automodule:: repast4py.util
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/repast4py.value_layer.rst:
--------------------------------------------------------------------------------
1 | repast4py.value\_layer module
2 | =============================
3 |
4 | .. automodule:: repast4py.value_layer
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/web/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -eu
4 |
5 |
6 | REPO=$HOME/Documents/repos/repast4py.site
7 | THIS=$( cd $( dirname $0 ) ; /bin/pwd )
8 | ROOT=$( cd ../../ ; /bin/pwd )
9 |
10 | WEBSITE=https://repast.github.io/repast4py.site
11 |
12 | help() {
13 | echo "$(basename "$0") [-l|u|a|e] - builds the repast4py documentation."
14 | echo "If no option is specified, then all the documentation is built."
15 | echo
16 | echo "Options:"
17 | echo " l build the landing page and macos mpi doc"
18 | echo " u build the user guide"
19 | echo " a build the API documentation"
20 | echo " e build the examples"
21 | }
22 |
23 | landing_page() {
24 | # Update the landing page
25 | echo "Building landing page and macos mpi doc"
26 | asciidoctor -a website=$WEBSITE landing.adoc -o $REPO/index.html
27 | asciidoctor -a website=$WEBSITE macos_mpi_install.adoc -o $REPO/macos_mpi_install.html
28 |
29 | }
30 |
31 | user_guide() {
32 | echo "Building user guide"
33 | mkdir -p $REPO/guide/images
34 | asciidoctor -a website=$WEBSITE $THIS/../guide/user_guide.adoc -o $REPO/guide/user_guide.html
35 | cp $THIS/../guide/images/* $REPO/guide/images
36 | }
37 |
38 | api_docs() {
39 | echo "Building API Docs"
40 | mkdir -p $REPO/apidoc
41 | cd $THIS/../..
42 | CC=mpicxx CXX=mpicxx python setup.py build_ext --inplace
43 | cd $THIS/..
44 | make clean
45 | make html
46 | cp -r _build/html/* $REPO/apidoc/
47 | }
48 |
49 | examples() {
50 | echo "Building Examples"
51 | mkdir -p $REPO/examples
52 | cd $THIS/../../examples
53 | asciidoctor -a website=$WEBSITE examples.adoc
54 | cp examples.html $REPO/examples/examples.html
55 |
56 | adocs=("examples/rumor/rumor_model.adoc"
57 | "examples/rndwalk/random_walk.adoc"
58 | "examples/zombies/zombies.adoc"
59 | )
60 | for f in "${adocs[@]}"
61 | do
62 | path=$ROOT/$f
63 | # pd=$( dirname $path)
64 | # cd $pd
65 | # echo $pd
66 | asciidoctor -a website=$WEBSITE $path
67 | done
68 |
69 | cd $ROOT
70 | rm -f $REPO/examples/repast4py_example_models.zip
71 | zip $REPO/examples/repast4py_example_models.zip -r examples -i@$THIS/examples_to_include
72 | }
73 |
74 | # push() {
75 | # echo "Pushing $REPO"
76 | # # cd $REPO
77 | # # git commit -a -m "Updated with latest"
78 | # # git push origin main
79 | # }
80 |
81 | while getopts "hluae" option; do
82 | case $option in
83 | h) # display Help
84 | help
85 | exit;;
86 | l)
87 | landing_page
88 | ;;
89 | u)
90 | user_guide
91 | ;;
92 | a)
93 | api_docs
94 | ;;
95 | e)
96 | examples
97 | ;;
98 | *) # Invalid option
99 | echo "Error: Invalid option"
100 | help
101 | exit;;
102 | esac
103 | done
104 |
105 | if [ $OPTIND -eq 1 ]; then
106 | landing_page
107 | user_guide
108 | api_docs
109 | examples
110 | fi
--------------------------------------------------------------------------------
/docs/web/examples_to_include:
--------------------------------------------------------------------------------
1 | examples/examples.html
2 | examples/rndwalk/random_walk.html
3 | examples/rndwalk/*.yaml
4 | examples/rndwalk/rndwalk.py
5 | examples/rumor/rumor_model.html
6 | examples/rumor/rumor_model.yaml
7 | examples/rumor/rumor.py
8 | examples/rumor/network.txt
9 | examples/zombies/zombies.py
10 | examples/zombies/zombies.html
11 | examples/zombies/zombie_model.yaml
12 |
--------------------------------------------------------------------------------
/docs/web/landing.adoc:
--------------------------------------------------------------------------------
1 | = image:Repast4PyLogo_avenir_light_300h.png[width=400]
2 | :icons: font
3 | :website: http://repast.github.io
4 | :xrefstyle: full
5 | :imagesdir: ./images
6 | :source-highlighter: pygments
7 |
8 | == Repast4Py
9 |
10 | Repast for Python (Repast4Py) is the newest member of the https://repast.github.io[Repast Suite] of free and open source agent-based modeling and simulation software.
11 | It builds on https://repast.github.io/repast_hpc.html[Repast HPC], and provides the ability to build large, distributed agent-based models (ABMs) that span multiple processing cores.
12 | Distributed ABMs enable the development of complex systems models that capture the scale and relevant details of many problems of societal importance. Where Repast HPC is implemented in C++ and is more HPC expert focused, Repast4Py is a Python package and is designed to provide an easier on-ramp for researchers from diverse scientific communities to apply large-scale distributed ABM methods.
13 | Repast4Py is released under the BSD-3 open source license, and leverages https://numba.pydata.org[Numba], https://numpy.org[NumPy], and https://pytorch.org[PyTorch] packages, and the Python C API
14 | to create a scalable modeling system that can exploit the largest HPC resources and emerging computing architectures. See our papers on Repast4Py for additional information about the design and implementation.{wj}footnote:[Collier, N. T., Ozik, J., & Tatara, E. R. (2020). Experiences in Developing a Distributed Agent-based Modeling Toolkit with Python. 2020 IEEE/ACM 9th Workshop on Python for High-Performance and Scientific Computing (PyHPC), 1–12. https://doi.org/10.1109/PyHPC51966.2020.00006.]
15 | footnote:[Collier, N. & Ozik, J. (2023). Distributed Agent-Based Simulation with Repast4Py. In Proceedings of the Winter Simulation Conference (WSC '22). IEEE Press, 192–206. https://doi.org/10.1109/WSC57314.2022.10015389.]
16 |
17 | === Requirements
18 |
19 | Repast4Py can run on Linux, macOS and Windows provided there is a working MPI implementation
20 | installed and mpi4py is supported. Repast4Py is developed and tested on Linux. We recommend
21 | that Windows users use the Windows Subsystem for Linux (WSL). Installation instructions for
22 | WSL can be found https://docs.microsoft.com/en-us/windows/wsl/install[here].
23 |
24 | Under Linux, MPI can be installed using your OS's package manager. For example,
25 | under Ubuntu 20.04 (and thus WSL), the mpich MPI implementation can be installed with:
26 |
27 | [source,bash]
28 | ----
29 | $ sudo apt install mpich
30 | ----
31 |
32 | Installation instructions for MPI on macOS can be found link:./macos_mpi_install.html[here].
33 |
34 | A typical campus cluster, or HPC resource will have MPI and mpi4py installed.
35 | Check the resource's documentation on available software for more details.
36 |
37 | === Installation
38 |
39 | Repast4Py can be downloaded and installed from PyPI using pip.
40 | Since Repast4Py includes native MPI {cpp} code that needs to be compiled,
41 | the C compiler `CC` environment variable must be set
42 | to the `mpicxx` (or `mpic++`) compiler wrapper provided by your MPI installation.
43 |
44 | ----
45 | env CC=mpicxx pip install repast4py
46 | ----
47 |
48 | NOTE: If you see an error message about a missing `python.h` header file when
49 | installing Repast4Py under Ubuntu (or other Linuxes), you will need to install
50 | a python dev package using your OS's package manager. For example, assuming
51 | Python 3.8, `sudo apt install python3.8-dev` will work for Ubuntu.
52 |
53 | === Documentation
54 |
55 | * link:./guide/user_guide.html[User's Guide]
56 | * link:./apidoc/index.html[API Docs]
57 | * link:./examples/examples.html[Example Models]
58 |
59 | === Contact and Support
60 |
61 | * https://github.com/Repast/repast4py/issues[GitHub Issues]
62 | * https://github.com/Repast/repast4py[GitHub Repository]
63 |
64 |
65 | In addition to filing issues on GitHub, support is also available via
66 | https://stackoverflow.com/questions/tagged/repast4py[Stack Overflow].
67 | Please use the `repast4py` tag to ensure that we are notified of your question.
68 | Software announcements will be made on the
69 | http://lists.sourceforge.net/lists/listinfo/repast-interest[repast-interest] mailing list.
70 |
71 | Jonathan Ozik is the Repast project lead. Please contact him through
72 | the https://www.anl.gov/staff-directory[Argonne Staff Directory] if you
73 | have project-related questions.
74 |
75 | === Funding
76 |
77 | This material is based upon work supported by the U.S. Department of Energy, Office of Science, under contract number DE-AC02-06CH11357. Repast4Py is being used in multiple projects, including the NIH funded projects R01AI136056, U2CDA050098, R01MD014703, R21MH128116, R01AI146917, R01AI158666 and is informed by their requirements and continuing development.
78 |
--------------------------------------------------------------------------------
/docs/web/macos_mpi_install.adoc:
--------------------------------------------------------------------------------
1 | :icons: font
2 | :website: http://repast.github.io
3 | :xrefstyle: full
4 | :imagesdir: ./images
5 | :source-highlighter: pygments
6 |
7 | == MPI on macOS
8 |
9 | Repast4Py requires that a working MPI implementation be
10 | installed. On macOS, the MPICH MPI implementation can be installed using the https://brew.sh[Homebrew] package manager, or compiled from source. In either case, first
11 | install the macOS Xcode command line tools, if necessary, by running the following in a terminal window.
12 |
13 | [source,zsh]
14 | ----
15 | xcode-select -install
16 | ----
17 |
18 | This will start a dialog for the tools installation.
19 |
20 | === Homebrew
21 |
22 | To install MPICH using the homebrew package manager, first install
23 | Homebrew by going to the https://brew.sh[Homebrew] site, and
24 | follow the installation instructions. Once Homebrew is installed, install MPICH
25 | by running the following in a terminal window.
26 |
27 | [source,zsh]
28 | ----
29 | % /opt/homebrew/bin/brew install mpich
30 | ----
31 |
32 | Check if the install was successful with by running one of the MPICH MPI compiler
33 | wrappers.
34 |
35 | [source,zsh]
36 | ----
37 | % /opt/homebrew/bin/mpicxx --version
38 | Apple clang version 14.0.0 (clang-1400.0.29.102)
39 | Target: arm64-apple-darwin21.6.0
40 | Thread model: posix
41 | InstalledDir: /Library/Developer/CommandLineTools/usr/bin
42 | ----
43 |
44 | NOTE: The version information might differ.
45 |
46 | === Compile MPICH from Source
47 |
48 | Once the command line tool installation is complete, download the mpich source from https://www.mpich.org/downloads[here], any recent stable version will work. The source then needs to be unarchived, configured, and compiled in a terminal window.
49 |
50 | [source,zsh]
51 | ----
52 | # cd to wherever the mpich tar ball was downloaded (e.g, ~/Downloads)
53 | % cd ~/Downloads
54 | # Replace mpich-4.0.3.tar.gz with the downloaded version
55 | % tar xf mpich-4.0.3.tar.gz
56 | % cd mpich-4.0.3
57 | # Replace $HOME/sfw/mpich-4.0.3 with where you want to install mpich
58 | % ./configure --disable-fortran --prefix=$HOME/sfw/mpich-4.0.3
59 | # Compile and install the source
60 | % make install
61 | ----
62 |
63 | NOTE: The compilation will take a few minutes or more, depending on your
64 | machine.
65 |
66 | === Testing the MPI Installation
67 |
68 | Once MPICH is installed, we can test the MPI installation by installing the `mpi4py` Python package
69 | which requires a MPI installation, and is itself a Repast4Py requirement.
70 | As part of its installation `mpi4py` needs to be able to find the MPI
71 | compilation wrappers installed as part of MPI implementation.
72 | To determine if the wrappers can be found, run the following in a
73 | terminal window.
74 |
75 | [source,zsh]
76 | ----
77 | % mpicc --version
78 | ----
79 |
80 | If the wrappers are not found, you will see an error like `zsh: command not found: mpicc`, and you will need to add the wrapper location to the `PATH` environment variable. If you installed via Homebrew, this will be `/opt/homebrew/bin`. If you installed from source, this will be the install directory specified
81 | in the `prefix` argument plus `/bin` (e.g., `$HOME/sfw/mpich-4.0.3/bin`).
82 |
83 | [source,zsh]
84 | ----
85 | # Replace X with `opt/homebrew` for Hombrew or wherever you installed mpich
86 | # for source
87 | % export PATH=X/bin:$PATH
88 | % mpicc --version
89 | Apple clang version 14.0.0 (clang-1400.0.29.102)
90 | Target: arm64-apple-darwin21.6.0
91 | Thread model: posix
92 | InstalledDir: /Library/Developer/CommandLineTools/usr/bin
93 | ----
94 |
95 | In the same terminal window, install and test `mpi4py` by importing it.
96 |
97 | [source,zsh]
98 | ----
99 | % pip3 install mpi4py
100 | Collecting mpi4py
101 | ...
102 | % python3
103 | Python 3.9.6 (default, Aug 5 2022, 15:21:02)
104 | [Clang 14.0.0 (clang-1400.0.29.102)] on darwin
105 | Type "help", "copyright", "credits" or "license" for more information.
106 | >>> from mpi4py import MPI
107 | ----
108 |
109 | Assuming the `mpi4py` install is successful, install Repast4Py and
110 | test it with an import.
111 |
112 | [source,zsh]
113 | ----
114 | % CC=mpicxx pip3 install repast4py
115 | Collecting repast4py
116 | ...
117 | % python3
118 | Python 3.9.6 (default, Aug 5 2022, 15:21:02)
119 | [Clang 14.0.0 (clang-1400.0.29.102)] on darwin
120 | Type "help", "copyright", "credits" or "license" for more information.
121 | >>> from repast4py import space
122 | ----
123 |
124 | NOTE: The MPI compilation wrappers also need to be on the `PATH` for
125 | the repast4py install.
--------------------------------------------------------------------------------
/envs/bebop_config_repast4py.sh:
--------------------------------------------------------------------------------
1 | export PYTHONPATH=${HOME}/jccm/repast4py/src
2 |
3 | module load gcc/7.1.0-4bgguyp
4 | module load mpich
5 | module load anaconda3/5.2.0
6 | module load jdk
7 | module unload intel-mkl/2018.1.163-4okndez
8 | . /lcrc/project/EMEWS/bebop/repos/spack/share/spack/setup-env.sh
9 | spack load /5x5gwmx
10 |
--------------------------------------------------------------------------------
/envs/bebop_env.sh:
--------------------------------------------------------------------------------
1 | module load gcc/8.2.0-xhxgy33
2 | module load mvapich2/2.3-bebop-a66r4jf
3 | module load anaconda3/5.2.0
4 |
--------------------------------------------------------------------------------
/examples/.gitignore:
--------------------------------------------------------------------------------
1 | output/
2 | *.html
--------------------------------------------------------------------------------
/examples/diffusion/run_diffusion.py:
--------------------------------------------------------------------------------
1 | import time, sys
2 |
3 | import torch
4 |
5 | from mpi4py import MPI
6 | from scipy import stats
7 | import numpy as np
8 |
9 | from repast4py import value_layer, space
10 | from diffusion import Diffuser
11 |
12 |
13 | def run(use_cuda):
14 | box = space.BoundingBox(0, 10016, 0, 10016, 0, 0)
15 | device = torch.device('cuda') if use_cuda else torch.device('cpu')
16 | vl = value_layer.ReadWriteValueLayer('vl', bounds=box, borders=space.BorderType.Sticky,
17 | buffer_size=2, comm=MPI.COMM_WORLD, init_value=0, device=device)
18 | diffuser = Diffuser(vl)
19 | vl.read_layer.impl.grid[2, 2] = 1
20 | #print(vl.read_layer.impl.grid)
21 | #print(vl.write_layer.impl.grid)
22 | for _ in range(50):
23 | vl.apply(diffuser)
24 | # vl.synchronize_buffer()
25 | vl.swap_layers()
26 |
27 | #print(vl.read_layer.impl.grid)
28 | #print(vl.write_layer.impl.grid)
29 |
30 | if __name__ == "__main__":
31 | for _ in range(21):
32 | start_time = time.time()
33 | run(sys.argv[1] == 'on')
34 | end_time = time.time()
35 | if MPI.COMM_WORLD.Get_rank() == 0:
36 | print(end_time - start_time)
37 |
--------------------------------------------------------------------------------
/examples/examples.adoc:
--------------------------------------------------------------------------------
1 | = Repast4Py Examples
2 | :toc2:
3 | :icons: font
4 | :website: https://jozik.github.io/goes_bing
5 | :xrefstyle: full
6 | :imagesdir: images
7 | :source-highlighter: pygments
8 |
9 | == Getting Started
10 |
11 | The example models can be downloaded from
12 | {website}/examples/repast4py_example_models.zip[here].
13 |
14 | Each
15 | example model resides in its own subdirectory, e.g., `examples/rumor` for the
16 | Rumor Network model. Each subdirectory contains the python source for the model,
17 | an html file with additional information about the model, and a yaml format
18 | file containing the input parameters for that model.
19 |
20 | To run the model,
21 |
22 | [source, bash]
23 | ----
24 | $ cd examples/
25 | $ mpirun -n 4 python
26 | ----
27 |
28 | Replace ``,``, `` with the relevant model directory,
29 | python file, and yaml input file. For example,
30 |
31 | [source, bash]
32 | ----
33 | $ cd examples/rndwalk
34 | $ mpirun -n 4 python rndwalk.py random_walk.yaml
35 | ----
36 |
37 | == Example Models
38 |
39 | // There are currently 3 demonstration models.
40 | === Random Walk
41 |
42 | *Location*: `examples/rndwalk` +
43 | *Key Concepts*: movement, grid API
44 |
45 | include::./rndwalk/random_walk_overview.adoc[]
46 |
47 | === Rumor Spreading
48 |
49 | *Location*: `examples/rumor` +
50 | *Key Concepts*: networks, network neighbors, network API
51 |
52 | include::./rumor/rumor_overview.adoc[]
53 |
54 | === Zombies
55 |
56 | *Location*: `examples/zombies` +
57 | *Key Concepts*: movement, neighborhood search, continuous space API, grid API
58 |
59 | include::./zombies/zombies_overview.adoc[]
60 |
--------------------------------------------------------------------------------
/examples/rndwalk/random_walk.adoc:
--------------------------------------------------------------------------------
1 | = Random Walk Example Model
2 | :icons: font
3 | :website: http://repast.github.io
4 | :xrefstyle: full
5 | :imagesdir: images
6 | :source-highlighter: pygments
7 |
8 | == Overview
9 |
10 | include::random_walk_overview.adoc[]
11 |
12 |
--------------------------------------------------------------------------------
/examples/rndwalk/random_walk.yaml:
--------------------------------------------------------------------------------
1 | random.seed: 42
2 | stop.at: 50
3 | walker.count: 1000
4 | world.width: 2000
5 | world.height: 2000
6 | meet_log_file: 'output/meet_log.csv'
7 | agent_log_file: 'output/agent_log.csv'
--------------------------------------------------------------------------------
/examples/rndwalk/random_walk_overview.adoc:
--------------------------------------------------------------------------------
1 | The Random Walk model is simple model intended as introduction to coding a Repast4Py simulation.
2 | The model consists of a number of agents moving at random around a two-dimensional grid and logging
3 | the aggregate and agent-level colocation counts. Each iteration of the model:
4 |
5 | 1. All the agents (_walkers_) choose a random direction and move one unit in that direction.
6 | 2. All the agents count the number of other agents they _meet_ at their current location by
7 | determining the number of colocated agents at their grid locations.
8 | 3. The sum, minimum, and maxiumum number of agents met are calculated across all process ranks, and these
9 | values are logged as the total, minimum, and maximum `meet` values.
10 |
11 | See {website}/guide/user_guide.html#_tutorial_1_a_simple_random_walk_model[Tutorial 1] in the Repast4Py
12 | User's Guide for a complete explanation of the Random Walk model.
13 |
--------------------------------------------------------------------------------
/examples/rndwalk/rndwalk.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Tuple
2 | from mpi4py import MPI
3 | import numpy as np
4 | from dataclasses import dataclass
5 |
6 | from repast4py import core, random, space, schedule, logging, parameters
7 | from repast4py import context as ctx
8 | import repast4py
9 | from repast4py.space import DiscretePoint as dpt
10 |
11 |
12 | @dataclass
13 | class MeetLog:
14 | total_meets: int = 0
15 | min_meets: int = 0
16 | max_meets: int = 0
17 |
18 |
19 | class Walker(core.Agent):
20 |
21 | TYPE = 0
22 | OFFSETS = np.array([-1, 1])
23 |
24 | def __init__(self, local_id: int, rank: int, pt: dpt):
25 | super().__init__(id=local_id, type=Walker.TYPE, rank=rank)
26 | self.pt = pt
27 | self.meet_count = 0
28 |
29 | def save(self) -> Tuple:
30 | """Saves the state of this Walker as a Tuple.
31 |
32 | Returns:
33 | The saved state of this Walker.
34 | """
35 | return (self.uid, self.meet_count, self.pt.coordinates)
36 |
37 | def walk(self, grid):
38 | # choose two elements from the OFFSET array
39 | # to select the direction to walk in the
40 | # x and y dimensions
41 | xy_dirs = random.default_rng.choice(Walker.OFFSETS, size=2)
42 | self.pt = grid.move(self, dpt(self.pt.x + xy_dirs[0], self.pt.y + xy_dirs[1], 0))
43 |
44 | def count_colocations(self, grid, meet_log: MeetLog):
45 | # subtract self
46 | num_here = grid.get_num_agents(self.pt) - 1
47 | meet_log.total_meets += num_here
48 | if num_here < meet_log.min_meets:
49 | meet_log.min_meets = num_here
50 | if num_here > meet_log.max_meets:
51 | meet_log.max_meets = num_here
52 | self.meet_count += num_here
53 |
54 |
55 | walker_cache = {}
56 |
57 |
58 | def restore_walker(walker_data: Tuple):
59 | """
60 | Args:
61 | walker_data: tuple containing the data returned by Walker.save.
62 | """
63 | # uid is a 3 element tuple: 0 is id, 1 is type, 2 is rank
64 | uid = walker_data[0]
65 | pt_array = walker_data[2]
66 | pt = dpt(pt_array[0], pt_array[1], 0)
67 |
68 | if uid in walker_cache:
69 | walker = walker_cache[uid]
70 | else:
71 | walker = Walker(uid[0], uid[2], pt)
72 | walker_cache[uid] = walker
73 |
74 | walker.meet_count = walker_data[1]
75 | walker.pt = pt
76 | return walker
77 |
78 |
79 | class Model:
80 | """
81 | The Model class encapsulates the simulation, and is
82 | responsible for initialization (scheduling events, creating agents,
83 | and the grid the agents inhabit), and the overall iterating
84 | behavior of the model.
85 |
86 | Args:
87 | comm: the mpi communicator over which the model is distributed.
88 | params: the simulation input parameters
89 | """
90 |
91 | def __init__(self, comm: MPI.Intracomm, params: Dict):
92 | # create the schedule
93 | self.runner = schedule.init_schedule_runner(comm)
94 | self.runner.schedule_repeating_event(1, 1, self.step)
95 | self.runner.schedule_repeating_event(1.1, 10, self.log_agents)
96 | self.runner.schedule_stop(params['stop.at'])
97 | self.runner.schedule_end_event(self.at_end)
98 |
99 | # create the context to hold the agents and manage cross process
100 | # synchronization
101 | self.context = ctx.SharedContext(comm)
102 |
103 | # create a bounding box equal to the size of the entire global world grid
104 | box = space.BoundingBox(0, params['world.width'], 0, params['world.height'], 0, 0)
105 | # create a SharedGrid of 'box' size with sticky borders that allows multiple agents
106 | # in each grid location.
107 | self.grid = space.SharedGrid(name='grid', bounds=box, borders=space.BorderType.Sticky,
108 | occupancy=space.OccupancyType.Multiple, buffer_size=2, comm=comm)
109 | self.context.add_projection(self.grid)
110 |
111 | rank = comm.Get_rank()
112 | rng = repast4py.random.default_rng
113 | for i in range(params['walker.count']):
114 | # get a random x,y location in the grid
115 | pt = self.grid.get_random_local_pt(rng)
116 | # create and add the walker to the context
117 | walker = Walker(i, rank, pt)
118 | self.context.add(walker)
119 | self.grid.move(walker, pt)
120 |
121 | # initialize the logging
122 | self.agent_logger = logging.TabularLogger(comm, params['agent_log_file'], ['tick', 'agent_id', 'agent_uid_rank', 'meet_count'])
123 |
124 | self.meet_log = MeetLog()
125 | loggers = logging.create_loggers(self.meet_log, op=MPI.SUM, names={'total_meets': 'total'}, rank=rank)
126 | loggers += logging.create_loggers(self.meet_log, op=MPI.MIN, names={'min_meets': 'min'}, rank=rank)
127 | loggers += logging.create_loggers(self.meet_log, op=MPI.MAX, names={'max_meets': 'max'}, rank=rank)
128 | self.data_set = logging.ReducingDataSet(loggers, comm, params['meet_log_file'])
129 |
130 | # count the initial colocations at time 0 and log
131 | for walker in self.context.agents():
132 | walker.count_colocations(self.grid, self.meet_log)
133 | self.data_set.log(0)
134 | self.meet_log.max_meets = self.meet_log.min_meets = self.meet_log.total_meets = 0
135 | self.log_agents()
136 |
137 | def step(self):
138 | for walker in self.context.agents():
139 | walker.walk(self.grid)
140 |
141 | self.context.synchronize(restore_walker)
142 |
143 | for walker in self.context.agents():
144 | walker.count_colocations(self.grid, self.meet_log)
145 |
146 | tick = self.runner.schedule.tick
147 | self.data_set.log(tick)
148 | # clear the meet log counts for the next tick
149 | self.meet_log.max_meets = self.meet_log.min_meets = self.meet_log.total_meets = 0
150 |
151 | def log_agents(self):
152 | tick = self.runner.schedule.tick
153 | for walker in self.context.agents():
154 | self.agent_logger.log_row(tick, walker.id, walker.uid_rank, walker.meet_count)
155 |
156 | self.agent_logger.write()
157 |
158 | def at_end(self):
159 | self.data_set.close()
160 | self.agent_logger.close()
161 |
162 | def start(self):
163 | self.runner.execute()
164 |
165 |
166 | def run(params: Dict):
167 | model = Model(MPI.COMM_WORLD, params)
168 | model.start()
169 |
170 |
171 | if __name__ == "__main__":
172 | parser = parameters.create_args_parser()
173 | args = parser.parse_args()
174 | params = parameters.init_params(args.parameters_file, args.parameters)
175 | run(params)
176 |
--------------------------------------------------------------------------------
/examples/rumor/rumor.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 | from typing import Dict
3 | from mpi4py import MPI
4 | import numpy as np
5 | from dataclasses import dataclass
6 |
7 | from repast4py.network import write_network, read_network
8 | from repast4py import context as ctx
9 | from repast4py import core, random, schedule, logging, parameters
10 |
11 |
12 | def generate_network_file(fname: str, n_ranks: int, n_agents: int):
13 | """Generates a network file using repast4py.network.write_network.
14 |
15 | Args:
16 | fname: the name of the file to write to
17 | n_ranks: the number of process ranks to distribute the file over
18 | n_agents: the number of agents (node) in the network
19 | """
20 | g = nx.connected_watts_strogatz_graph(n_agents, 2, 0.25)
21 | try:
22 | import nxmetis
23 | write_network(g, 'rumor_network', fname, n_ranks, partition_method='metis')
24 | except ImportError:
25 | write_network(g, 'rumor_network', fname, n_ranks)
26 |
27 |
28 | model = None
29 |
30 |
31 | class RumorAgent(core.Agent):
32 |
33 | def __init__(self, nid: int, agent_type: int, rank: int, received_rumor=False):
34 | super().__init__(nid, agent_type, rank)
35 | self.received_rumor = received_rumor
36 |
37 | def save(self):
38 | """Saves the state of this agent as tuple.
39 |
40 | A non-ghost agent will save its state using this
41 | method, and any ghost agents of this agent will
42 | be updated with that data (self.received_rumor).
43 |
44 | Returns:
45 | The agent's state
46 | """
47 | return (self.uid, self.received_rumor)
48 |
49 | def update(self, data: bool):
50 | """Updates the state of this agent when it is a ghost
51 | agent on some rank other than its local one.
52 |
53 | Args:
54 | data: the new agent state (received_rumor)
55 | """
56 | if not self.received_rumor and data:
57 | # only update if the received rumor state
58 | # has changed from false to true
59 | model.rumor_spreaders.append(self)
60 | self.received_rumor = data
61 |
62 |
63 | def create_rumor_agent(nid, agent_type, rank, **kwargs):
64 | return RumorAgent(nid, agent_type, rank)
65 |
66 |
67 | def restore_agent(agent_data):
68 | uid = agent_data[0]
69 | return RumorAgent(uid[0], uid[1], uid[2], agent_data[1])
70 |
71 |
72 | @dataclass
73 | class RumorCounts:
74 | total_rumor_spreaders: int
75 | new_rumor_spreaders: int
76 |
77 |
78 | class Model:
79 |
80 | def __init__(self, comm, params):
81 | self.runner = schedule.init_schedule_runner(comm)
82 | self.runner.schedule_repeating_event(1, 1, self.step)
83 | self.runner.schedule_stop(params['stop.at'])
84 | self.runner.schedule_end_event(self.at_end)
85 |
86 | fpath = params['network_file']
87 | self.context = ctx.SharedContext(comm)
88 | read_network(fpath, self.context, create_rumor_agent, restore_agent)
89 | self.net = self.context.get_projection('rumor_network')
90 |
91 | self.rumor_spreaders = []
92 | self.rank = comm.Get_rank()
93 | self._seed_rumor(params['initial_rumor_count'], comm)
94 |
95 | rumored_count = len(self.rumor_spreaders)
96 | self.counts = RumorCounts(rumored_count, rumored_count)
97 | loggers = logging.create_loggers(self.counts, op=MPI.SUM, rank=self.rank)
98 | self.data_set = logging.ReducingDataSet(loggers, comm, params['counts_file'])
99 | self.data_set.log(0)
100 |
101 | self.rumor_prob = params['rumor_probability']
102 |
103 | def _seed_rumor(self, init_rumor_count: int, comm):
104 | world_size = comm.Get_size()
105 | # np array of world size, the value of i'th element of the array
106 | # is the number of rumors to seed on rank i.
107 | rumor_counts = np.zeros(world_size, np.int32)
108 | if (self.rank == 0):
109 | for _ in range(init_rumor_count):
110 | idx = random.default_rng.integers(0, high=world_size)
111 | rumor_counts[idx] += 1
112 |
113 | rumor_count = np.empty(1, dtype=np.int32)
114 | comm.Scatter(rumor_counts, rumor_count, root=0)
115 |
116 | for agent in self.context.agents(count=rumor_count[0], shuffle=True):
117 | agent.received_rumor = True
118 | self.rumor_spreaders.append(agent)
119 |
120 | def at_end(self):
121 | self.data_set.close()
122 |
123 | def step(self):
124 | new_rumor_spreaders = []
125 | rng = random.default_rng
126 | for agent in self.rumor_spreaders:
127 | for ngh in self.net.graph.neighbors(agent):
128 | # only update agents local to this rank
129 | if not ngh.received_rumor and ngh.local_rank == self.rank and rng.uniform() <= self.rumor_prob:
130 | ngh.received_rumor = True
131 | new_rumor_spreaders.append(ngh)
132 |
133 | self.rumor_spreaders += new_rumor_spreaders
134 | self.counts.new_rumor_spreaders = len(new_rumor_spreaders)
135 | self.counts.total_rumor_spreaders += self.counts.new_rumor_spreaders
136 | self.data_set.log(self.runner.schedule.tick)
137 |
138 | self.context.synchronize(restore_agent)
139 |
140 | def start(self):
141 | self.runner.execute()
142 |
143 |
144 | def run(params: Dict):
145 | global model
146 | model = Model(MPI.COMM_WORLD, params)
147 | model.start()
148 |
149 |
150 | if __name__ == "__main__":
151 | parser = parameters.create_args_parser()
152 | args = parser.parse_args()
153 | params = parameters.init_params(args.parameters_file, args.parameters)
154 | run(params)
155 |
--------------------------------------------------------------------------------
/examples/rumor/rumor_model.adoc:
--------------------------------------------------------------------------------
1 | = Rumor Spreading Example Model
2 | :icons: font
3 | :website: http://repast.github.io
4 | :xrefstyle: full
5 | :imagesdir: images
6 | :source-highlighter: pygments
7 |
8 | == Overview
9 |
10 | include::rumor_overview.adoc[]
11 |
--------------------------------------------------------------------------------
/examples/rumor/rumor_model.yaml:
--------------------------------------------------------------------------------
1 | network_file: network.txt
2 | initial_rumor_count: 5
3 | stop.at: 100
4 | rumor_probability: 0.1
5 | counts_file: output/rumor_counts.csv
--------------------------------------------------------------------------------
/examples/rumor/rumor_overview.adoc:
--------------------------------------------------------------------------------
1 | The Rumor model is a simple network model that illustrates repast4py's network
2 | agent-based model features. The simulation models the spread of a rumor through a networked population.
3 | During initialization some number of agents (network nodes) are marked as rumor spreaders. Then at each iteration of the model:
4 |
5 | 1. A random draw is made to determine if the network neighbors of any rumor-spreading nodes have received the rumor.
6 | This draw is performed once for each neighbor.
7 | 2. After all of the neighbors that can receive the rumor have been processed,
8 | the collection of rumor spreaders is updated to include those nodes that received the rumor.
9 | 3. The total number of rumor spreaders and the number of new rumor spreaders are logged.
10 |
11 | See {website}/guide/user_guide.html#_tutorial_2_the_rumor_network_model[Tutorial 2] in the Repast4Py User's Guide for a complete explanation of the Rumor model.
12 |
--------------------------------------------------------------------------------
/examples/zombies/zombie_model.yaml:
--------------------------------------------------------------------------------
1 | random.seed: 42
2 | stop.at: 50.0
3 | human.count: 8000
4 | zombie.count: 400
5 | world.width: 200
6 | world.height: 200
7 | run.number: 1
8 | counts_file: './output/agent_counts.csv'
9 |
--------------------------------------------------------------------------------
/examples/zombies/zombies.adoc:
--------------------------------------------------------------------------------
1 | = Zombies Example Model
2 | :icons: font
3 | :website: http://repast.github.io
4 | :xrefstyle: full
5 | :imagesdir: images
6 | :source-highlighter: pygments
7 |
8 | == Overview
9 |
10 | include::zombies_overview.adoc[]
11 |
--------------------------------------------------------------------------------
/examples/zombies/zombies_overview.adoc:
--------------------------------------------------------------------------------
1 |
2 | The Zombies model is a predator-prey type model that illustrates the use of a
3 | continous space for movement and a discrete grid for neighborhood searches.
4 | In the Zombies model, human agents are pursued by zombie agents, and once caught become
5 | zombies themselves. Each timestep, the following occurs:
6 |
7 | . All the Zombies:
8 | .. Query their immediate neighborhood to determine the adjacent grid location with
9 | the most number of Humans
10 | .. Move towards that location, assuming any Humans are found
11 | .. Infect the Humans at that location, also assuming any Humans are found
12 | . All the Humans:
13 | .. Become a Zombie, after being infected for more than 9 timesteps, else
14 | .. Query their immediate neighborhood to determine the adjacent grid location with
15 | the fewest number of Zombies
16 | .. Move to that location at twice the speed of a Zombie.
17 |
18 |
19 | See {website}/guide/user_guide.html#_tutorial_3_the_zombies_model[Tutorial 3] in the Repast4Py User's Guide
20 | for a complete explanation of the Zombies model.
--------------------------------------------------------------------------------
/paper_data/r4py_scaling_analysis.r:
--------------------------------------------------------------------------------
1 | #
2 | # Analysis Repast4Py Performance Testing.
3 | #
4 | #
5 | #
6 |
7 | library(data.table)
8 | library(ggplot2)
9 |
10 | # Std Err
11 | std <- function(x) sd(x)/sqrt(length(x))
12 |
13 | # 95% CI
14 | z <- 1.960
15 |
16 | dt <- NULL
17 | table <- NULL
18 |
19 | # Load all of the stats files that exist in an experiments dir
20 | files <- list.files (path=".", recursive=FALSE, pattern = "*.csv")
21 |
22 | tableList <- list()
23 | for (f in files){
24 | table <- fread(f)
25 |
26 |
27 | tableList[[f]] <- table
28 | }
29 |
30 | dt <- rbindlist(tableList) # Stack the list of tables into a single DT
31 | tableList <- NULL # clear mem
32 |
33 | setnames(dt, c("proc", "humans", "zombies", "foo", "seed", "run_time"))
34 |
35 | # Mean, SD, STD across replicates
36 | stats_summary <- dt[, list(run_time=mean(run_time), run_time.sd=sd(run_time), run_time.std=std(run_time)),
37 | by=list(proc)]
38 |
39 |
40 | base_run_time <- stats_summary[proc == 36]$run_time
41 |
42 | stats_summary[, speedup := base_run_time / run_time]
43 |
44 | # The colorblind palette with grey:
45 | cbPalette <- c("#999999", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7")
46 |
47 | p <- ggplot(stats_summary) + geom_line(aes(x=proc, y=run_time), size=1, alpha=0.5) +
48 | geom_point(aes(x=proc, y=run_time), size=2, alpha=0.5) +
49 |
50 | # geom_ribbon(aes(x=proc, ymin=run_time-z*run_time.std, ymax=run_time+z*run_time.std),alpha=0.3,colour=NA) +
51 |
52 | # geom_ribbon(aes(x=proc, ymin=run_time-run_time.sd, ymax=run_time+run_time.sd),alpha=0.3) +
53 |
54 | geom_errorbar(aes(x=proc, ymin=run_time-3*run_time.sd, ymax=run_time+3*run_time.sd),alpha=0.95,width=0.025) +
55 |
56 | scale_x_log10(limits = c(10,1000)) +
57 | scale_y_log10(limits = c(10,1000)) +
58 |
59 | # scale_x_continuous(limits = c(1,12), breaks=seq(1,12,1)) +
60 | # scale_colour_manual(values=cbPalette) +
61 | # scale_fill_manual(values=cbPalette) +
62 | labs(y="Run Time (s)", x="Procs", title="Zombies Model Runtime Scaling") +
63 |
64 | theme_bw() +
65 | theme(axis.text=element_text(size=14),axis.title=element_text(size=14),legend.text=element_text(size=14))
66 |
67 | show(p)
68 | ggsave(p, filename="zombies_scaling.png", width=10, height=8)
69 |
70 | q <- ggplot(stats_summary) +
71 | # geom_line(aes(x=proc, y=speedup), size=1, alpha=0.5) +
72 | geom_point(aes(x=proc, y=speedup), size=4, alpha=0.5) +
73 |
74 | geom_smooth(aes(x=proc, y=speedup), method='lm', formula= y ~ x, se = FALSE) +
75 |
76 | # scale_x_continuous(limits = c(1,12), breaks=seq(1,12,1)) +
77 | # scale_colour_manual(values=cbPalette) +
78 | # scale_fill_manual(values=cbPalette) +
79 | labs(y="Performance Speedup", x="Procs", title="Zombies Model Runtime Scaling") +
80 |
81 | theme_bw() +
82 | theme(axis.text=element_text(size=14),axis.title=element_text(size=14),legend.text=element_text(size=14))
83 |
84 | show(q)
85 | ggsave(p, filename="zombies_speedup.png", width=10, height=8)
86 |
--------------------------------------------------------------------------------
/paper_data/runtimes_144p_3000000h_6000z.csv:
--------------------------------------------------------------------------------
1 | 144,3000000,6000,1,1,201.3651475906372
2 | 144,3000000,6000,1,10,200.4400932788849
3 | 144,3000000,6000,1,11,207.78173184394836
4 | 144,3000000,6000,1,12,209.8201105594635
5 | 144,3000000,6000,1,13,203.7249150276184
6 | 144,3000000,6000,1,14,198.5499439239502
7 | 144,3000000,6000,1,15,191.25305676460266
8 | 144,3000000,6000,1,16,205.4966585636139
9 | 144,3000000,6000,1,17,194.88233971595764
10 | 144,3000000,6000,1,18,202.63729000091553
11 | 144,3000000,6000,1,19,202.98454093933105
12 | 144,3000000,6000,1,2,202.25914335250854
13 | 144,3000000,6000,1,20,203.5464916229248
14 | 144,3000000,6000,1,21,204.0576686859131
15 | 144,3000000,6000,1,22,201.49845576286316
16 | 144,3000000,6000,1,23,196.92813754081726
17 | 144,3000000,6000,1,24,194.22307896614075
18 | 144,3000000,6000,1,25,200.03780794143677
19 | 144,3000000,6000,1,26,201.9069483280182
20 | 144,3000000,6000,1,27,197.42192125320435
21 | 144,3000000,6000,1,28,203.62466073036194
22 | 144,3000000,6000,1,29,198.8855504989624
23 | 144,3000000,6000,1,3,204.3554892539978
24 | 144,3000000,6000,1,30,197.80997467041016
25 | 144,3000000,6000,1,4,205.95152306556702
26 | 144,3000000,6000,1,5,201.4520812034607
27 | 144,3000000,6000,1,6,208.05080318450928
28 | 144,3000000,6000,1,7,206.0670404434204
29 | 144,3000000,6000,1,8,206.37861275672913
30 | 144,3000000,6000,1,9,197.8364236354828
31 |
--------------------------------------------------------------------------------
/paper_data/runtimes_288p_3000000h_6000z.csv:
--------------------------------------------------------------------------------
1 | 288,3000000,6000,1,1,109.25364828109741
2 | 288,3000000,6000,1,10,112.16346216201782
3 | 288,3000000,6000,1,11,115.81855773925781
4 | 288,3000000,6000,1,12,112.79959011077881
5 | 288,3000000,6000,1,13,110.48133730888367
6 | 288,3000000,6000,1,14,113.74924564361572
7 | 288,3000000,6000,1,15,113.77850675582886
8 | 288,3000000,6000,1,16,108.02294254302979
9 | 288,3000000,6000,1,17,115.04216241836548
10 | 288,3000000,6000,1,18,117.06686449050903
11 | 288,3000000,6000,1,19,108.69264602661133
12 | 288,3000000,6000,1,2,113.50354337692261
13 | 288,3000000,6000,1,20,112.94871187210083
14 | 288,3000000,6000,1,21,110.3776159286499
15 | 288,3000000,6000,1,22,111.7979588508606
16 | 288,3000000,6000,1,23,123.64087128639221
17 | 288,3000000,6000,1,24,113.89836525917053
18 | 288,3000000,6000,1,25,110.9121322631836
19 | 288,3000000,6000,1,26,111.0184223651886
20 | 288,3000000,6000,1,27,112.57390570640564
21 | 288,3000000,6000,1,28,115.40941524505615
22 | 288,3000000,6000,1,29,108.83489322662354
23 | 288,3000000,6000,1,3,114.41526556015015
24 | 288,3000000,6000,1,30,109.76644659042358
25 | 288,3000000,6000,1,4,111.72889304161072
26 | 288,3000000,6000,1,5,111.95450973510742
27 | 288,3000000,6000,1,6,116.020024061203
28 | 288,3000000,6000,1,7,110.84672951698303
29 | 288,3000000,6000,1,8,110.52016711235046
30 | 288,3000000,6000,1,9,122.30765771865845
31 |
--------------------------------------------------------------------------------
/paper_data/runtimes_36p_3000000h_6000z.csv:
--------------------------------------------------------------------------------
1 | 36,3000000,6000,1,1,838.952709197998
2 | 36,3000000,6000,1,10,837.0415596961975
3 | 36,3000000,6000,1,11,837.2720367908478
4 | 36,3000000,6000,1,12,785.8163404464722
5 | 36,3000000,6000,1,13,808.2062766551971
6 | 36,3000000,6000,1,14,792.7329409122467
7 | 36,3000000,6000,1,15,822.3482551574707
8 | 36,3000000,6000,1,16,834.0571267604828
9 | 36,3000000,6000,1,17,849.2301068305969
10 | 36,3000000,6000,1,18,818.2671964168549
11 | 36,3000000,6000,1,19,837.8155252933502
12 | 36,3000000,6000,1,2,850.3186967372894
13 | 36,3000000,6000,1,20,823.860666513443
14 | 36,3000000,6000,1,21,825.0455129146576
15 | 36,3000000,6000,1,22,813.2468674182892
16 | 36,3000000,6000,1,23,803.8715825080872
17 | 36,3000000,6000,1,24,824.3802959918976
18 | 36,3000000,6000,1,25,858.3840138912201
19 | 36,3000000,6000,1,26,823.4037871360779
20 | 36,3000000,6000,1,27,843.3331615924835
21 | 36,3000000,6000,1,28,855.4899160861969
22 | 36,3000000,6000,1,29,812.8028392791748
23 | 36,3000000,6000,1,3,852.4780988693237
24 | 36,3000000,6000,1,30,831.7287409305573
25 | 36,3000000,6000,1,4,817.5290117263794
26 | 36,3000000,6000,1,5,826.6736435890198
27 | 36,3000000,6000,1,6,826.8520984649658
28 | 36,3000000,6000,1,7,819.597425699234
29 | 36,3000000,6000,1,8,830.3230123519897
30 | 36,3000000,6000,1,9,807.6373875141144
31 |
--------------------------------------------------------------------------------
/paper_data/runtimes_72p_3000000h_6000z.csv:
--------------------------------------------------------------------------------
1 | 72,3000000,6000,1,1,405.8189334869385
2 | 72,3000000,6000,1,10,415.6922564506531
3 | 72,3000000,6000,1,11,423.92218804359436
4 | 72,3000000,6000,1,12,404.57358598709106
5 | 72,3000000,6000,1,13,380.57711935043335
6 | 72,3000000,6000,1,14,417.1324670314789
7 | 72,3000000,6000,1,15,416.07784962654114
8 | 72,3000000,6000,1,16,379.71739292144775
9 | 72,3000000,6000,1,17,412.1756896972656
10 | 72,3000000,6000,1,18,404.3615794181824
11 | 72,3000000,6000,1,19,397.11854243278503
12 | 72,3000000,6000,1,2,416.83604192733765
13 | 72,3000000,6000,1,20,412.3781566619873
14 | 72,3000000,6000,1,21,409.05925130844116
15 | 72,3000000,6000,1,22,413.0708291530609
16 | 72,3000000,6000,1,23,396.8383803367615
17 | 72,3000000,6000,1,24,398.7081809043884
18 | 72,3000000,6000,1,25,387.2033486366272
19 | 72,3000000,6000,1,26,407.43036103248596
20 | 72,3000000,6000,1,27,402.169118642807
21 | 72,3000000,6000,1,28,415.66336488723755
22 | 72,3000000,6000,1,29,410.660790681839
23 | 72,3000000,6000,1,3,402.47610211372375
24 | 72,3000000,6000,1,30,409.312570810318
25 | 72,3000000,6000,1,4,401.7755832672119
26 | 72,3000000,6000,1,5,399.2609326839447
27 | 72,3000000,6000,1,6,416.49187207221985
28 | 72,3000000,6000,1,7,407.6782109737396
29 | 72,3000000,6000,1,8,410.35676193237305
30 | 72,3000000,6000,1,9,404.94030380249023
31 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel",
5 | "numpy",
6 | "mpi4py"
7 | ]
8 | build-backend = "setuptools.build_meta"
9 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Requirements for repast4py
2 | numpy
3 | numba
4 | mpi4py
5 | coverage
6 | torch
7 | networkx
8 | pyyaml
9 | Cython
10 |
--------------------------------------------------------------------------------
/run-docker.sh:
--------------------------------------------------------------------------------
1 | docker run --rm -it --name repast4py repast4py:latest
--------------------------------------------------------------------------------
/scripts/zombies.sbatch:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #SBATCH --time=01:00:00
4 | #SBATCH --nodes=10
5 | #SBATCH --ntasks-per-node=36
6 | #SBATCH --mem=128G
7 | #SBATCH --partition=dis
8 | #SBATCH --account=CONDO
9 | #SBATCH --output=/home/%u/sbatch_out/zombies.job.%j
10 |
11 | module load gcc/8.2.0-xhxgy33
12 | module load mvapich2/2.3-bebop-a66r4jf
13 | module load anaconda3/5.2.0
14 |
15 | export PYTHONPATH=/lcrc/project/EMEWS/bebop/repos/repast4py/src
16 | cd /lcrc/project/EMEWS/bebop/repos/repast4py
17 |
18 | srun -n 360 python ./src/zombies/zombies.py ./src/zombies/zombie_model.props "{\"stop.at\" : 100, \"human.count\" : 3000000, \"zombie.count\" : 6000, \"world.width\" : 1008, \"world.height\" : 1008}"
19 |
20 | # MPICH
21 | # module load gcc/8.2.0-g7hppkz
22 | # module load mpich/3.3-verbs-ipsk4eg
23 | # module load anaconda3/5.2.0
24 |
25 | # export PYTHONPATH=/lcrc/project/EMEWS/bebop/repos/repast4py/src
26 | # cd /lcrc/project/EMEWS/bebop/repos/repast4py
27 |
28 | # mpiexec -n 360 python ./src/zombies/zombies.py ./src/zombies/zombie_model.props "{\"stop.at\" : 100, \"human.count\" : 3000000, \"zombie.count\" : 6000, \"world.width\" : 1008, \"world.height\" : 1008}"
29 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E501,W503
3 |
4 | [metadata]
5 | name = repast4py
6 | version = attr: repast4py.__version__
7 | license = BSD 3-Clause License
8 | classifiers =
9 | Development Status :: 4 - Beta
10 | Intended Audience :: Science/Research
11 | License :: OSI Approved :: BSD License
12 | Operating System :: POSIX :: Linux
13 | Programming Language :: Python :: 3
14 | Programming Language :: Python :: 3.8
15 | Programming Language :: Python :: 3.9
16 | Programming Language :: Python :: 3.10
17 | Programming Language :: Python :: 3.11
18 | Programming Language :: Python :: 3.12
19 | Topic :: Scientific/Engineering
20 | Topic :: Software Development :: Libraries :: Python Modules
21 | Topic :: System :: Distributed Computing
22 | long_description = file: README.md
23 | long_description_content_type = text/markdown
24 | author = Nick Collier
25 | author_email = ncollier@anl.gov
26 | maintainer = Nick Collier
27 | maintainer_email = ncollier@anl.gov
28 | url = https://repast.github.io/repast4py.site/index.html
29 | project_urls =
30 | Git Repostitory = https://github.com/Repast/repast4py
31 | Issue Tracker = https://github.com/Repast/repast4py/issues
32 | Documentation = https://repast.github.io/repast4py.site/guide/user_guide.html
33 |
34 | [options]
35 | package_dir=
36 | =src
37 | packages=find:
38 |
39 | install_requires =
40 | numpy
41 | numba
42 | mpi4py
43 | torch
44 | networkx >=2.6.2
45 | pyyaml
46 | Cython
47 | typing_extensions;python_version < "3.8.0"
48 |
49 | python_requires = >= 3.8
50 |
51 | [options.packages.find]
52 | where=src
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, Extension
2 | import numpy as np
3 | import mpi4py
4 | import shutil
5 | import os
6 | import sys
7 | import shlex
8 |
9 | import platform
10 |
11 | IS_WINDOWS = platform.system() == "Windows"
12 | IS_DARWIN = platform.system() == "Darwin"
13 | IS_LINUX = platform.system() == "Linux"
14 |
15 |
16 | def is_64bit() -> bool:
17 | """Returns True if the python interpreter is 64-bit, independent of the OS arch."""
18 | return sys.maxsize > 2**32
19 |
20 |
21 | def run_command(exe, args):
22 | cmd = shutil.which(exe)
23 | if not cmd:
24 | return []
25 | if not isinstance(args, str):
26 | args = " ".join(args)
27 | try:
28 | with os.popen(cmd + " " + args) as f:
29 | return shlex.split(f.read())
30 | except Exception:
31 | return []
32 |
33 |
34 | def get_linker_args():
35 | linker_args = []
36 |
37 | # NOTE Windows setuptools apparently does not use the CC env variable.
38 | if IS_WINDOWS:
39 | pass
40 | else:
41 | compiler = os.getenv("CC")
42 | if compiler is None:
43 | print(
44 | 'Error: MPI compiler is not specified. Please specify the MPI compiler using the "CC" environment variable'
45 | )
46 | args = run_command(compiler, "-show")
47 | for arg in args:
48 | if arg.startswith("-l") or arg.startswith("-L"):
49 | linker_args.append(arg)
50 | # linker_args_str = ' '.join(linker_args)
51 | # return linker_args_str
52 | return linker_args
53 |
54 |
55 | def get_compiler_args():
56 | if IS_WINDOWS:
57 | compile_args = ["/std:c++latest"]
58 | else:
59 | compile_args = ["-std=c++11"]
60 |
61 | return compile_args
62 |
63 |
64 | def get_extra_includes():
65 | if IS_WINDOWS:
66 | return [os.environ["MSMPI_INC"]]
67 | else:
68 | return []
69 |
70 |
71 | def get_lib_dirs():
72 | if IS_WINDOWS:
73 | if is_64bit():
74 | return [os.environ["MSMPI_LIB64"]]
75 | else:
76 | return [os.environ["MSMPI_LIB32"]]
77 | else:
78 | return []
79 |
80 |
81 | def get_libs():
82 | if IS_WINDOWS:
83 | return ["msmpi"]
84 | else:
85 | return []
86 |
87 |
88 | core_module = Extension(
89 | "repast4py._core",
90 | sources=["src/repast4py/coremodule.cpp"],
91 | language="c++",
92 | extra_compile_args=get_compiler_args(),
93 | depends=["core.h"],
94 | extra_link_args=get_linker_args(),
95 | include_dirs=[np.get_include(), mpi4py.get_include()],
96 | )
97 | space_module = Extension(
98 | "repast4py._space",
99 | sources=[
100 | "src/repast4py/spacemodule.cpp",
101 | "src/repast4py/geometry.cpp",
102 | "src/repast4py/space.cpp",
103 | "src/repast4py/distributed_space.cpp",
104 | "src/repast4py/SpatialTree.cpp",
105 | ],
106 | language="c++",
107 | extra_compile_args=get_compiler_args(),
108 | depends=[
109 | "space.h",
110 | "grid.h",
111 | "cspace.h",
112 | "space_types.h",
113 | "geometry.h",
114 | "distributed_space.h",
115 | "borders.h",
116 | ],
117 | extra_link_args=get_linker_args(),
118 | include_dirs=[np.get_include(), mpi4py.get_include(), *get_extra_includes()],
119 | library_dirs=get_lib_dirs(),
120 | libraries=get_libs(),
121 | )
122 |
123 | setup(description="repast4py package", ext_modules=[core_module, space_module])
124 |
--------------------------------------------------------------------------------
/src/repast4py/SpatialTree.cpp:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #include "SpatialTree.h"
8 |
9 | namespace repast4py {
10 |
11 | NodePoint::NodePoint(double x, double y, double z) : x_{x}, y_{y}, z_{z} {}
12 |
13 | // ignore the z coordinate
14 | Box2D::Box2D(NodePoint& min, double max_x, double max_y, double max_z) : min_{min}, max_{max_x, max_y, 0}, x_extent{max_.x_ - min.x_},
15 | y_extent{max_.y_ - min.y_}, z_extent{0} {}
16 |
17 | bool Box2D::contains(R4Py_ContinuousPoint* pt) {
18 | double* data = (double*)PyArray_DATA(pt->coords);
19 | // std::cout << min_.x_ << ", " << max_.x_ << ", " << min_.y_ << ", " << max_.y_ << std::endl;
20 | return data[0] >= min_.x_ && data[0] <= max_.x_
21 | && data[1] >= min_.y_ && data[1] <= max_.y_;
22 | }
23 |
24 | bool Box2D::intersects(const BoundingBox& bbox) {
25 | if (min_.x_ > bbox.xmax_ || bbox.xmin_ > max_.x_) return false;
26 | if (min_.y_ > bbox.ymax_ || bbox.ymin_ > max_.y_) return false;
27 | return true;
28 | }
29 |
30 | Box3D::Box3D(NodePoint& min, double max_x, double max_y, double max_z) : min_{min}, max_{max_x, max_y, max_z}, x_extent{max_x - min.x_},
31 | y_extent{max_y - min.y_}, z_extent{max_z - min.z_} {}
32 |
33 | bool Box3D::contains(R4Py_ContinuousPoint* pt) {
34 | double* data = (double*)PyArray_DATA(pt->coords);
35 | return data[0] >= min_.x_ && data[0] <= max_.x_
36 | && data[1] >= min_.y_ && data[1] <= max_.y_
37 | && data[2] >= min_.z_ && data[2] <= max_.z_;
38 | }
39 |
40 | bool Box3D::intersects(const BoundingBox& bbox) {
41 | if (min_.x_ > bbox.xmax_ || bbox.xmin_ > max_.x_) return false;
42 | if (min_.y_ > bbox.ymax_ || bbox.ymin_ > max_.y_) return false;
43 | if (min_.z_ > bbox.zmax_ || bbox.zmin_ > max_.z_) return false;
44 |
45 | return true;
46 | }
47 |
48 |
49 | }
--------------------------------------------------------------------------------
/src/repast4py/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021, UChicago Argonne, LLC
2 | # All Rights Reserved
3 | # Software Name: repast4py
4 | # By: Argonne National Laboratory
5 | # License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | __version__ = '1.1.6'
8 |
--------------------------------------------------------------------------------
/src/repast4py/borders.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef SRC_BORDERS_H
8 | #define SRC_BORDERS_H
9 |
10 | #include "types.h"
11 | #include "geometry.h"
12 |
13 | namespace repast4py {
14 |
15 |
16 | template
17 | class StickyBorders {
18 |
19 | private:
20 | BoundingBox bounds_;
21 |
22 | public:
23 | using coord_type = typename TypeSelector::type;
24 | StickyBorders(const BoundingBox& bounds);
25 |
26 | ~StickyBorders() {}
27 |
28 | void transform(const PointType* pt, Point& transformed_pt);
29 | void transform(const PointType* pt, PointType* transformed_pt);
30 | };
31 |
32 | template
33 | StickyBorders::StickyBorders(const BoundingBox& bounds) :
34 | bounds_{bounds}
35 | {}
36 |
37 | template
38 | struct Offset {
39 |
40 | };
41 |
42 | template<>
43 | struct Offset {
44 | constexpr static long_t value = 1;
45 | };
46 |
47 | template<>
48 | struct Offset {
49 | constexpr static double value = 0.00000001;
50 | };
51 |
52 | template
53 | void StickyBorders::transform(const PointType* pt, PointType* transformed_pt) {
54 | coord_type* data = (coord_type *)PyArray_DATA(pt->coords);
55 | coord_type* t_data = (coord_type *)PyArray_DATA(transformed_pt->coords);
56 |
57 |
58 | // coord_type v = (bounds_.xmax_ - 1) < data[0] ? (bounds_.xmax_ - 1) : data[0];
59 | // transformed_pt.x = bounds_.xmin_ > v ? bounds_.xmin_ : v;
60 |
61 | // v = (bounds_.ymax_ - 1) < data[1] ? (bounds_.ymax_ - 1) : data[1];
62 | // transformed_pt.y = bounds_.ymin_ > v ? bounds_.ymin_ : v;
63 |
64 | // v = (bounds_.zmax_ - 1) < data[2] ? (bounds_.zmax_ - 1) : data[2];
65 | // transformed_pt.z = bounds_.zmin_ > v ? bounds_.zmin_ : v;
66 |
67 | t_data[0] = std::max((coord_type)bounds_.xmin_, std::min((coord_type)bounds_.xmax_ - Offset::value, data[0]));
68 | t_data[1] = std::max((coord_type)bounds_.ymin_, std::min((coord_type)bounds_.ymax_ - Offset::value, data[1]));
69 | t_data[2] = std::max((coord_type)bounds_.zmin_, std::min((coord_type)bounds_.zmax_ - Offset::value, data[2]));
70 | }
71 |
72 | template
73 | void StickyBorders::transform(const PointType* pt, Point& transformed_pt) {
74 | coord_type* data = (coord_type*) PyArray_DATA(pt->coords);
75 |
76 | // coord_type v = (bounds_.xmax_ - 1) < data[0] ? (bounds_.xmax_ - 1) : data[0];
77 | // transformed_pt.x = bounds_.xmin_ > v ? bounds_.xmin_ : v;
78 |
79 | // v = (bounds_.ymax_ - 1) < data[1] ? (bounds_.ymax_ - 1) : data[1];
80 | // transformed_pt.y = bounds_.ymin_ > v ? bounds_.ymin_ : v;
81 |
82 | // v = (bounds_.zmax_ - 1) < data[2] ? (bounds_.zmax_ - 1) : data[2];
83 | // transformed_pt.z = bounds_.zmin_ > v ? bounds_.zmin_ : v;
84 |
85 | transformed_pt.x = std::max((coord_type)bounds_.xmin_, std::min((coord_type)bounds_.xmax_ - Offset::value, data[0]));
86 | transformed_pt.y = std::max((coord_type)bounds_.ymin_, std::min((coord_type)bounds_.ymax_ - Offset::value, data[1]));
87 | transformed_pt.z = std::max((coord_type)bounds_.zmin_, std::min((coord_type)bounds_.zmax_ - Offset::value, data[2]));
88 | }
89 |
90 | using GridStickyBorders = StickyBorders;
91 | using CSStickyBorders = StickyBorders;
92 |
93 | template
94 | void transformX(const PointType* pt, Point& transformed_pt, const BoundingBox& bounds) {
95 | using coord_type = typename TypeSelector::type;
96 | coord_type* data = (coord_type*) PyArray_DATA(pt->coords);
97 |
98 | coord_type nc = fmod((data[0] - bounds.xmin_), bounds.x_extent_);
99 | transformed_pt.x = nc < 0 ? bounds.xmax_ + nc : bounds.xmin_ + nc;
100 | }
101 |
102 | template
103 | void transformXY(const PointType* pt, Point& transformed_pt, const BoundingBox& bounds) {
104 | using coord_type = typename TypeSelector::type;
105 | coord_type* data = (coord_type*) PyArray_DATA(pt->coords);
106 |
107 | coord_type nc = fmod(data[0] - bounds.xmin_, bounds.x_extent_);
108 | transformed_pt.x = nc < 0 ? bounds.xmax_ + nc : bounds.xmin_ + nc;
109 |
110 | nc = fmod(data[1] - bounds.ymin_, bounds.y_extent_);
111 | transformed_pt.y = nc < 0 ? bounds.ymax_ + nc : bounds.ymin_ + nc;
112 | }
113 |
114 | template
115 | void transformXYZ(const PointType* pt, Point& transformed_pt, const BoundingBox& bounds) {
116 | using coord_type = typename TypeSelector::type;
117 | coord_type* data = (coord_type*) PyArray_DATA(pt->coords);
118 |
119 | coord_type nc = fmod((data[0] - bounds.xmin_), bounds.x_extent_);
120 | transformed_pt.x = nc < 0 ? bounds.xmax_ + nc : bounds.xmin_ + nc;
121 |
122 | nc = fmod(data[1] - bounds.ymin_, bounds.y_extent_);
123 | transformed_pt.y = nc < 0 ? bounds.ymax_ + nc : bounds.ymin_ + nc;
124 |
125 | nc = fmod(data[2] - bounds.zmin_, bounds.z_extent_);
126 | transformed_pt.z = nc < 0 ? bounds.zmax_ + nc : bounds.zmin_ + nc;
127 | }
128 |
129 | template
130 | class PeriodicBorders {
131 |
132 | private:
133 | using trans_func = void(*)(const PointType*, Point&, const BoundingBox&);
134 |
135 | BoundingBox bounds_;
136 | trans_func transform_;
137 | Point temp_pt;
138 |
139 | public:
140 | using coord_type = typename TypeSelector::type;
141 | PeriodicBorders(const BoundingBox& bounds);
142 |
143 | ~PeriodicBorders() {}
144 |
145 | void transform(const PointType* pt, Point& transformed_pt);
146 | void transform(const PointType* pt, PointType* transformed_pt);
147 | };
148 |
149 | template
150 | PeriodicBorders::PeriodicBorders(const BoundingBox& bounds) :
151 | bounds_{bounds}, temp_pt{0, 0, 0}
152 | {
153 | if (bounds_.x_extent_ > 0 && bounds_.y_extent_ > 0 && bounds_.z_extent_ > 0) {
154 | transform_ = &transformXYZ;
155 | } else if (bounds_.x_extent_ > 0 && bounds_.y_extent_ > 0) {
156 | transform_ = &transformXY;
157 | } else {
158 | transform_ = &transformX;
159 | }
160 | }
161 |
162 | template
163 | void PeriodicBorders::transform(const PointType* pt, Point& transformed_pt) {
164 | transform_(pt, transformed_pt, bounds_);
165 | }
166 |
167 | template
168 | void PeriodicBorders::transform(const PointType* pt, PointType* transformed_pt) {
169 | transform_(pt, temp_pt, bounds_);
170 | coord_type* data = (coord_type *)PyArray_DATA(transformed_pt->coords);
171 | data[0] = temp_pt.x;
172 | data[1] = temp_pt.y;
173 | data[2] = temp_pt.z;
174 | }
175 |
176 | using GridPeriodicBorders = PeriodicBorders;
177 | using CSPeriodicBorders = PeriodicBorders;
178 |
179 | struct R4Py_GridStickyBorders
180 | {
181 | PyObject_HEAD
182 | GridStickyBorders* borders;
183 |
184 | };
185 |
186 | struct R4Py_GridPeriodicBorders
187 | {
188 | PyObject_HEAD
189 | GridPeriodicBorders* borders;
190 |
191 | };
192 |
193 | }
194 | #endif
--------------------------------------------------------------------------------
/src/repast4py/core.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef R4PY_SRC_CORE_H
8 | #define R4PY_SRC_CORE_H
9 |
10 | #define PY_SSIZE_T_CLEAN
11 | #include
12 |
13 | #include
14 | #include
15 |
16 | #include "types.h"
17 |
18 | namespace repast4py {
19 |
20 |
21 | struct R4Py_AgentID {
22 | long_t id;
23 | int type;
24 | unsigned int rank;
25 | PyObject* as_tuple;
26 | };
27 |
28 | struct agent_id_comp {
29 | bool operator()(const R4Py_AgentID* a1, const R4Py_AgentID* a2) const {
30 | if (a1->id != a2->id) return a1->id < a2->id;
31 | if (a1->type != a2->type) return a1->type < a2->type;
32 | return a1->rank < a2->rank;
33 | }
34 | };
35 |
36 | struct R4Py_Agent {
37 | PyObject_HEAD
38 | R4Py_AgentID* aid;
39 | unsigned int local_rank;
40 | };
41 |
42 | class AgentIter {
43 |
44 | public:
45 | AgentIter() {}
46 | virtual ~AgentIter() {}
47 | virtual R4Py_Agent* next() = 0;
48 | virtual bool hasNext() = 0;
49 | virtual void reset() = 0;
50 | };
51 |
52 | template
53 | AgentIter* create_iter(T* iterable);
54 |
55 | struct R4Py_AgentIter {
56 | PyObject_HEAD
57 | AgentIter* iter;
58 | };
59 |
60 | template
61 | class TAgentIter : public AgentIter {
62 |
63 | private:
64 | std::shared_ptr iterable_;
65 | typename IterableT::iterator iter_;
66 |
67 | public:
68 | TAgentIter(std::shared_ptr);
69 | virtual ~TAgentIter() {}
70 |
71 | R4Py_Agent* next() override;
72 | bool hasNext() override;
73 | void reset() override;
74 | };
75 |
76 | template
77 | TAgentIter::TAgentIter(std::shared_ptr iterable) : AgentIter(),
78 | iterable_{iterable}, iter_(iterable_->begin()) {}
79 |
80 | template
81 | bool TAgentIter::hasNext() {
82 | return iter_ != iterable_->end();
83 | }
84 |
85 | template
86 | void TAgentIter::reset() {
87 | iter_ = iterable_->begin();
88 | }
89 |
90 | template
91 | R4Py_Agent* TAgentIter::next() {
92 | R4Py_Agent* agent = *iter_;
93 | ++iter_;
94 | return agent;
95 | }
96 |
97 | class PyObjectIter {
98 |
99 | protected:
100 | bool incr;
101 |
102 | public:
103 | PyObjectIter() : incr{false} {}
104 | virtual ~PyObjectIter() {}
105 | virtual PyObject* next() = 0;
106 | virtual bool hasNext() = 0;
107 | virtual void reset() = 0;
108 | };
109 |
110 |
111 | struct R4Py_PyObjectIter {
112 | PyObject_HEAD
113 | PyObjectIter* iter;
114 | };
115 |
116 | template
117 | class ValueIter : public PyObjectIter {
118 |
119 | private:
120 | std::shared_ptr iterable_;
121 | typename MapT::const_iterator iter_;
122 |
123 | public:
124 | ValueIter(std::shared_ptr);
125 | virtual ~ValueIter() {}
126 |
127 | PyObject* next() override;
128 | bool hasNext() override;
129 | void reset() override;
130 | };
131 |
132 | template
133 | ValueIter::ValueIter(std::shared_ptr iterable) : PyObjectIter(),
134 | iterable_{iterable}, iter_(iterable_->begin()) {}
135 |
136 | template
137 | bool ValueIter::hasNext() {
138 | return iter_ != iterable_->end();
139 | }
140 |
141 | template
142 | void ValueIter::reset() {
143 | iter_ = iterable_->begin();
144 | }
145 |
146 | template
147 | PyObject* ValueIter::next() {
148 | PyObject* obj = iter_->second;
149 | ++iter_;
150 | // incref is in module function
151 | return obj;
152 | }
153 |
154 | template
155 | class SequenceIter : public PyObjectIter {
156 |
157 | private:
158 | std::shared_ptr iterable_;
159 | typename SequenceT::iterator iter_;
160 | UnpackT unpack;
161 |
162 | public:
163 | SequenceIter(std::shared_ptr);
164 | virtual ~SequenceIter() {}
165 |
166 | PyObject* next() override;
167 | bool hasNext() override;
168 | void reset() override;
169 | };
170 |
171 | template
172 | SequenceIter::SequenceIter(std::shared_ptr iterable) : PyObjectIter(),
173 | iterable_{iterable}, iter_{iterable_->begin()}, unpack{} {}
174 |
175 | template
176 | bool SequenceIter::hasNext() {
177 | return iter_ != iterable_->end();
178 | }
179 |
180 | template
181 | void SequenceIter::reset() {
182 | iter_ = iterable_->begin();
183 | }
184 |
185 | template
186 | PyObject* SequenceIter::next() {
187 | PyObject* obj = unpack(*iter_);
188 | ++iter_;
189 | // incref is in module function
190 | return obj;
191 | }
192 |
193 |
194 |
195 | }
196 |
197 | #endif
--------------------------------------------------------------------------------
/src/repast4py/coremodule.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef R4PY_COREMODULE_H
8 | #define R4PY_COREMODULE_H
9 |
10 | #ifdef __cplusplus
11 | extern "C" {
12 | #endif
13 |
14 | #define R4PyCore_API_pointers 3
15 |
16 | #ifdef R4PY_CORE_MODULE
17 |
18 | #else
19 |
20 | static void** R4PyCore_API;
21 |
22 | #define R4Py_AgentType (*(PyTypeObject *)R4PyCore_API[0])
23 | #define R4Py_AgentIterType (*(PyTypeObject *)R4PyCore_API[1])
24 | #define R4Py_PyObjectIterType (*(PyTypeObject *)R4PyCore_API[2])
25 |
26 | static int import_core(void) {
27 | R4PyCore_API = (void **)PyCapsule_Import("repast4py._core._C_API", 0);
28 | return (R4PyCore_API != NULL) ? 0 : -1;
29 | }
30 |
31 | #endif
32 |
33 |
34 | #ifdef __cplusplus
35 | }
36 | #endif
37 |
38 | #endif
--------------------------------------------------------------------------------
/src/repast4py/cspace.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef R4PY_SRC_CSPACE_H
8 | #define R4PY_SRC_CSPACE_H
9 |
10 | #define PY_SSIZE_T_CLEAN
11 | #include
12 |
13 | #include "space.h"
14 | #include "SpatialTree.h"
15 |
16 | namespace repast4py {
17 |
18 | template
19 | class BaseCSpace : public BaseSpace {
20 |
21 | using BaseSpace::agent_map;
22 | using BaseSpace::location_map;
23 | using BaseSpace::borders;
24 | using BaseSpace::accessor;
25 | using BaseSpace::wpt;
26 | using BaseSpace::name_;
27 |
28 | private:
29 | std::unique_ptr spatial_tree;
30 |
31 |
32 | public:
33 | using PointType = R4Py_ContinuousPoint;
34 | BaseCSpace(const std::string& name, const BoundingBox& bounds, int tree_threshold);
35 | ~BaseCSpace();
36 |
37 | void getAgentsWithin(const BoundingBox& bounds, std::shared_ptr>& agents) override;
38 | virtual bool remove(R4Py_Agent* agent) override;
39 | virtual bool remove(R4Py_AgentID* aid) override;
40 | R4Py_ContinuousPoint* move(R4Py_Agent* agent, R4Py_ContinuousPoint* to) override;
41 | };
42 |
43 | template
44 | BaseCSpace::BaseCSpace(const std::string& name, const BoundingBox& bounds, int tree_threshold) :
45 | BaseSpace(name, bounds), spatial_tree{} {
46 |
47 | //using Tree2DType = CPSpatialTreeImpl>;
48 | // using Tree3DType = CPSpatialTreeImpl>;
49 |
50 | if (bounds.num_dims == 1) {
51 | // TODO
52 | } else if (bounds.num_dims == 2) {
53 | spatial_tree = std::unique_ptr(new CPSpatialTreeImpl>(tree_threshold, bounds, &location_map));
55 | } else if (bounds.num_dims == 3) {
56 | spatial_tree = std::unique_ptr(new CPSpatialTreeImpl>(tree_threshold, bounds, &location_map));
58 |
59 | }
60 | }
61 |
62 | template
63 | BaseCSpace::~BaseCSpace() {}
64 |
65 | template
66 | void BaseCSpace::getAgentsWithin(const BoundingBox& bounds,
67 | std::shared_ptr>& agents)
68 | {
69 | spatial_tree->getObjectsWithin(bounds, agents);
70 | }
71 |
72 | template
73 | bool BaseCSpace::remove(R4Py_Agent* agent) {
74 | return remove(agent->aid);
75 | }
76 |
77 | template
78 | bool BaseCSpace::remove(R4Py_AgentID* aid) {
79 | auto iter = agent_map.find(aid);
80 | if (iter != agent_map.end() && iter->second->pt) {
81 | spatial_tree->removeItem(iter->second);
82 | }
83 | return BaseSpace::remove(aid);
84 | }
85 |
86 | template
87 | R4Py_ContinuousPoint* BaseCSpace::move(R4Py_Agent* agent, R4Py_ContinuousPoint* pt) {
88 | // If this gets changed such that the argument pt is not a temp input arg then
89 | // we need to make sure that any move calls reflect that.
90 | auto iter = agent_map.find(agent->aid);
91 | if (iter != agent_map.end()) {
92 | borders.transform(pt, wpt);
93 | if (!point_equals(iter->second->pt, wpt)) {
94 | if (accessor.put(agent, location_map, wpt)) {
95 | if (iter->second->pt) {
96 | spatial_tree->removeItem(iter->second);
97 | // if successful put, and agent is already located
98 | // so need to remove
99 | Point ppt;
100 | extract_coords(iter->second->pt, ppt);
101 | accessor.remove(agent, location_map, ppt);
102 | update_point(iter->second->pt, wpt);
103 | spatial_tree->addItem(iter->second);
104 | } else {
105 | iter->second->pt = create_point(Py_TYPE(pt), wpt);
106 | spatial_tree->addItem(iter->second);
107 | }
108 | } else {
109 | return nullptr;
110 | }
111 | }
112 | return iter->second->pt;
113 |
114 | } else {
115 | R4Py_AgentID* id = agent->aid;
116 | throw std::invalid_argument("Error moving agent (" + std::to_string(id->id) + "," +
117 | std::to_string(id->type) + "): agent is not in " + name_);
118 | }
119 | }
120 |
121 | class ICSpace {
122 |
123 | public:
124 | virtual ~ICSpace() = 0;
125 |
126 | virtual bool add(R4Py_Agent* agent) = 0;
127 | virtual bool remove(R4Py_Agent* agent) = 0;
128 | virtual bool remove(R4Py_AgentID* aid) = 0;
129 | virtual R4Py_Agent* getAgentAt(R4Py_ContinuousPoint* pt) = 0;
130 | virtual AgentListPtr getAgentsAt(R4Py_ContinuousPoint* pt) = 0;
131 | virtual R4Py_ContinuousPoint* getLocation(R4Py_Agent* agent) = 0;
132 | virtual R4Py_ContinuousPoint* move(R4Py_Agent* agent, R4Py_ContinuousPoint* to) = 0;
133 | virtual void getAgentsWithin(const BoundingBox& box, std::shared_ptr>& agents) = 0;
134 | virtual const std::string name() const = 0;
135 | virtual bool contains(R4Py_Agent* agent) const = 0;
136 | };
137 |
138 | inline ICSpace::~ICSpace() {}
139 |
140 | template
141 | class CSpace : public ICSpace {
142 |
143 | private:
144 | std::unique_ptr delegate;
145 |
146 | public:
147 | CSpace(const std::string& name, const BoundingBox& bounds, int tree_threshold);
148 | virtual ~CSpace() {}
149 | bool add(R4Py_Agent* agent) override;
150 | bool remove(R4Py_Agent* agent) override;
151 | bool remove(R4Py_AgentID* aid) override;
152 | R4Py_Agent* getAgentAt(R4Py_ContinuousPoint* pt) override;
153 | AgentListPtr getAgentsAt(R4Py_ContinuousPoint* pt) override;
154 | R4Py_ContinuousPoint* getLocation(R4Py_Agent* agent) override;
155 | R4Py_ContinuousPoint* move(R4Py_Agent* agent, R4Py_ContinuousPoint* to) override;
156 | void getAgentsWithin(const BoundingBox& box, std::shared_ptr>& agents) override;
157 | const std::string name() const override;
158 | bool contains(R4Py_Agent* agent) const override;
159 | };
160 |
161 | template
162 | CSpace::CSpace(const std::string& name, const BoundingBox& bounds, int tree_threshold) :
163 | delegate{std::unique_ptr(new DelegateType(name, bounds, tree_threshold))} {}
164 |
165 | template
166 | bool CSpace::add(R4Py_Agent* agent) {
167 | return delegate->add(agent);
168 | }
169 |
170 | template
171 | bool CSpace::remove(R4Py_Agent* agent) {
172 | return delegate->remove(agent);
173 | }
174 |
175 | template
176 | bool CSpace::remove(R4Py_AgentID* aid) {
177 | return delegate->remove(aid);
178 | }
179 |
180 | template
181 | R4Py_Agent* CSpace::getAgentAt(R4Py_ContinuousPoint* pt) {
182 | return delegate->getAgentAt(pt);
183 | }
184 |
185 | template
186 | AgentListPtr CSpace::getAgentsAt(R4Py_ContinuousPoint* pt) {
187 | return delegate->getAgentsAt(pt);
188 | }
189 |
190 | template
191 | R4Py_ContinuousPoint* CSpace::getLocation(R4Py_Agent* agent) {
192 | return delegate->getLocation(agent);
193 | }
194 |
195 | template
196 | R4Py_ContinuousPoint* CSpace::move(R4Py_Agent* agent, R4Py_ContinuousPoint* to) {
197 | return delegate->move(agent, to);
198 | }
199 |
200 | template
201 | void CSpace::getAgentsWithin(const BoundingBox& box, std::shared_ptr>& agents) {
202 | delegate->getAgentsWithin(box, agents);
203 | }
204 |
205 | template
206 | const std::string CSpace::name() const {
207 | return delegate->name();
208 | }
209 |
210 | template
211 | bool CSpace::contains(R4Py_Agent* agent) const {
212 | return delegate->contains(agent);
213 | }
214 |
215 | // aliases for CSpace with multi occupancy and sticky borders
216 | using ContinuousMOType = MultiOccupancyAccessor, R4Py_ContinuousPoint>;
217 | using ContinuousSOType = SingleOccupancyAccessor, R4Py_ContinuousPoint>;
218 | using MOSCSpace = BaseCSpace;
219 | using MOPCSpace = BaseCSpace;
220 | using SOSCSpace = BaseCSpace;
221 | using SOPCSpace = BaseCSpace;
222 |
223 |
224 | template<>
225 | struct is_periodic {
226 | static constexpr bool value {false};
227 | };
228 |
229 | template<>
230 | struct is_periodic {
231 | static constexpr bool value {true};
232 | };
233 |
234 | template<>
235 | struct is_periodic {
236 | static constexpr bool value {false};
237 | };
238 |
239 | template<>
240 | struct is_periodic {
241 | static constexpr bool value {true};
242 | };
243 |
244 | struct R4Py_CSpace {
245 | PyObject_HEAD
246 | ICSpace* space;
247 | };
248 |
249 |
250 | }
251 |
252 | #endif
--------------------------------------------------------------------------------
/src/repast4py/geometry.cpp:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #include "types.h"
8 | #include "geometry.h"
9 |
10 | namespace repast4py {
11 |
12 | R4Py_DiscretePoint* create_point(PyTypeObject* pt_type, const Point& wpt) {
13 | R4Py_DiscretePoint* pt = (R4Py_DiscretePoint*)pt_type->tp_new(pt_type, NULL, NULL);
14 | update_point(pt, wpt);
15 | return pt;
16 | }
17 |
18 | bool point_equals(R4Py_DiscretePoint* pt, const Point& coords) {
19 | if (pt) {
20 | long_t* data = (long_t*)PyArray_DATA(pt->coords);
21 | //printf("%lu,%lu,%lu -- %lu,%lu,%lu\n", data[0], data[1], data[2],
22 | // coords.x, coords.y, coords.z);
23 | return data[0] == coords.x && data[1] == coords.y && data[2] == coords.z;
24 | }
25 | return false;
26 | }
27 |
28 | void extract_coords(R4Py_DiscretePoint* pt, Point& coords) {
29 | long_t* data = (long_t*)PyArray_DATA(pt->coords);
30 | coords.x = data[0];
31 | coords.y = data[1];
32 | coords.z = data[2];
33 | }
34 |
35 | void update_point(R4Py_DiscretePoint* pt, const Point& coords) {
36 | long_t* data = (long_t*)PyArray_DATA(pt->coords);
37 | data[0] = coords.x;
38 | data[1] = coords.y;
39 | data[2] = coords.z;
40 |
41 | //printf("Updated Point: %lu,%lu,%lu\n", data[0], data[1], data[2]);
42 | }
43 |
44 | R4Py_ContinuousPoint* create_point(PyTypeObject* pt_type, const Point& wpt) {
45 | R4Py_ContinuousPoint* pt = (R4Py_ContinuousPoint*)pt_type->tp_new(pt_type, NULL, NULL);
46 | update_point(pt, wpt);
47 | return pt;
48 | }
49 |
50 | bool point_equals(R4Py_ContinuousPoint* pt, const Point& coords) {
51 | if (pt) {
52 | double* data = (double*)PyArray_DATA(pt->coords);
53 | //printf("%lu,%lu,%lu -- %lu,%lu,%lu\n", data[0], data[1], data[2],
54 | // coords.x, coords.y, coords.z);
55 | return data[0] == coords.x && data[1] == coords.y && data[2] == coords.z;
56 | }
57 | return false;
58 |
59 | }
60 | // sets coords.xyz from pt.xyz
61 | void extract_coords(R4Py_ContinuousPoint* pt, Point& coords) {
62 | double* data = (double*)PyArray_DATA(pt->coords);
63 | coords.x = data[0];
64 | coords.y = data[1];
65 | coords.z = data[2];
66 | }
67 |
68 | // sets pt.xyz from coords.xyz
69 | void update_point(R4Py_ContinuousPoint* pt, const Point& coords) {
70 | double* data = (double*)PyArray_DATA(pt->coords);
71 | data[0] = coords.x;
72 | data[1] = coords.y;
73 | data[2] = coords.z;
74 | }
75 |
76 | std::ostream& operator<<(std::ostream& os, const BoundingBox& box) {
77 | os << "BoundingBox(" << box.xmin_ << ", " << box.x_extent_ << ", "
78 | << box.ymin_ << ", " << box.y_extent_ << ", "
79 | << box.zmin_ << ", " << box.z_extent_ << ")";
80 | return os;
81 | }
82 |
83 | BoundingBox::BoundingBox(coord_type xmin, coord_type x_extent, coord_type ymin, coord_type y_extent,
84 | coord_type zmin, coord_type z_extent) : xmin_{xmin}, xmax_{xmin + x_extent}, ymin_{ymin},
85 | ymax_{ymin + y_extent}, zmin_{zmin}, zmax_{zmin + z_extent}, x_extent_{x_extent}, y_extent_{y_extent},
86 | z_extent_{z_extent}, num_dims{1} {
87 |
88 | if (y_extent_ > 0) num_dims = 2;
89 | if (z_extent_ > 0) num_dims = 3;
90 |
91 | }
92 |
93 | BoundingBox::BoundingBox(const BoundingBox& other) : xmin_{other.xmin_},
94 | xmax_{other.xmin_ + other.x_extent_}, ymin_{other.ymin_}, ymax_{other.ymin_ + other.y_extent_},
95 | zmin_{other.zmin_}, zmax_{other.zmin_ + other.z_extent_}, x_extent_{other.x_extent_},
96 | y_extent_{other.y_extent_}, z_extent_{other.z_extent_}, num_dims(1)
97 | {
98 | if (y_extent_ > 0) num_dims = 2;
99 | if (z_extent_ > 0) num_dims = 3;
100 |
101 | }
102 |
103 |
104 | void BoundingBox::reset(coord_type xmin, coord_type x_extent, coord_type ymin, coord_type y_extent,
105 | coord_type zmin, coord_type z_extent)
106 | {
107 | xmin_ = xmin;
108 | x_extent_ = x_extent;
109 | xmax_ = xmin + x_extent;
110 |
111 | ymin_ = ymin;
112 | y_extent_ = y_extent;
113 | ymax_ = ymin + y_extent;
114 |
115 | zmin_ = zmin;
116 | z_extent_ = z_extent;
117 | zmax_ = zmin + z_extent;
118 |
119 | num_dims = 1;
120 | if (y_extent_ > 0) num_dims = 2;
121 | if (z_extent_ > 0) num_dims = 3;
122 | }
123 |
124 |
125 | bool BoundingBox::contains(const R4Py_DiscretePoint* pt) const {
126 | coord_type* data = (coord_type*)PyArray_DATA(pt->coords);
127 |
128 | bool y_contains = true;
129 | bool z_contains = true;
130 | bool x_contains = data[0] >= xmin_ && data[0] < xmax_;
131 |
132 | if (num_dims == 2) {
133 | y_contains = data[1] >= ymin_ && data[1] < ymax_;
134 | } else if (num_dims == 3) {
135 | y_contains = data[1] >= ymin_ && data[1] < ymax_;
136 | z_contains = data[2] >= zmin_ && data[2] < zmax_;
137 | }
138 |
139 | return x_contains && y_contains && z_contains;
140 | }
141 |
142 | bool BoundingBox::contains(const Point& pt) const {
143 | bool y_contains = true;
144 | bool z_contains = true;
145 | bool x_contains = pt.x >= xmin_ && pt.x < xmax_;
146 |
147 | if (num_dims == 2) {
148 | y_contains = pt.y >= ymin_ && pt.y < ymax_;
149 | } else if (num_dims == 3) {
150 | y_contains = pt.y >= ymin_ && pt.y < ymax_;
151 | z_contains = pt.z >= zmin_ && pt.z < zmax_;
152 | }
153 |
154 | return x_contains && y_contains && z_contains;
155 | }
156 |
157 | bool BoundingBox::contains(const R4Py_ContinuousPoint* pt) const {
158 | using pt_type = typename TypeSelector::type;
159 | pt_type* data = (pt_type*)PyArray_DATA(pt->coords);
160 |
161 | bool y_contains = true;
162 | bool z_contains = true;
163 | bool x_contains = data[0] >= xmin_ && data[0] < xmax_;
164 |
165 | if (num_dims == 2) {
166 | y_contains = data[1] >= ymin_ && data[1] < ymax_;
167 | } else if (num_dims == 3) {
168 | y_contains = data[1] >= ymin_ && data[1] < ymax_;
169 | z_contains = data[2] >= zmin_ && data[2] < zmax_;
170 | }
171 |
172 | return x_contains && y_contains && z_contains;
173 |
174 | }
175 |
176 | bool BoundingBox::contains(const Point& pt) const {
177 | bool y_contains = true;
178 | bool z_contains = true;
179 | bool x_contains = pt.x >= xmin_ && pt.x < xmax_;
180 |
181 | if (num_dims == 2) {
182 | y_contains = pt.y >= ymin_ && pt.y < ymax_;
183 | } else if (num_dims == 3) {
184 | y_contains = pt.y >= ymin_ && pt.y < ymax_;
185 | z_contains = pt.z >= zmin_ && pt.z < zmax_;
186 | }
187 |
188 | return x_contains && y_contains && z_contains;
189 | }
190 |
191 |
192 |
193 |
194 | }
195 |
--------------------------------------------------------------------------------
/src/repast4py/geometry.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef R4PY_SRC_GEOMETRY_H
8 | #define R4PY_SRC_GEOMETRY_H
9 |
10 | #define PY_SSIZE_T_CLEAN
11 | #include
12 |
13 | #include
14 | #include
15 | #include
16 | #include
17 |
18 |
19 | #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
20 | // See https://docs.scipy.org/doc/numpy/reference/c-api.array.html#importing-the-api
21 | #define NO_IMPORT_ARRAY_API
22 | //#define PY_ARRAY_UNIQUE_SYMBOL REPAST4PY_ARRAY_API
23 | #include "numpy/arrayobject.h"
24 |
25 | #include "types.h"
26 |
27 | namespace repast4py {
28 |
29 | struct R4Py_DiscretePoint {
30 | PyObject_HEAD
31 | // array of longs
32 | PyArrayObject* coords;
33 | };
34 |
35 | struct R4Py_ContinuousPoint {
36 | PyObject_HEAD
37 | // array of doubles
38 | PyArrayObject* coords;
39 | };
40 |
41 |
42 | template
43 | struct TypeSelector {
44 | //using type = double;
45 | };
46 |
47 | template<>
48 | struct TypeSelector {
49 | using type = long_t;
50 | };
51 |
52 | template<>
53 | struct TypeSelector {
54 | using type = double;
55 | };
56 |
57 | template
58 | struct Point {
59 | using coord_type = typename TypeSelector::type;
60 | coord_type x, y, z;
61 | };
62 |
63 | R4Py_DiscretePoint* create_point(PyTypeObject* pt_type, const Point& wpt);
64 | bool point_equals(R4Py_DiscretePoint* pt, const Point& coords);
65 | // sets coords.xyz from pt.xyz
66 | void extract_coords(R4Py_DiscretePoint* pt, Point& coords);
67 | // sets pt.xyz from coords.xyz
68 | void update_point(R4Py_DiscretePoint* pt, const Point& coords);
69 |
70 | R4Py_ContinuousPoint* create_point(PyTypeObject* pt_type, const Point& wpt);
71 | bool point_equals(R4Py_ContinuousPoint* pt, const Point& coords);
72 | // sets coords.xyz from pt.xyz
73 | void extract_coords(R4Py_ContinuousPoint* pt, Point& coords);
74 | // sets pt.xyz from coords.xyz
75 | void update_point(R4Py_ContinuousPoint* pt, const Point& coords);
76 |
77 |
78 | template
79 | struct PointComp {
80 | bool operator()(const Point& p1, const Point& p2) {
81 | if (p1.x != p2.x) return p1.x < p2.x;
82 | if (p1.y != p2.y) return p1.y < p2.y;
83 | return p1.z < p2.z;
84 | }
85 |
86 | bool operator()(const Point& p1, const Point& p2) const {
87 | if (p1.x != p2.x) return p1.x < p2.x;
88 | if (p1.y != p2.y) return p1.y < p2.y;
89 | return p1.z < p2.z;
90 | }
91 | };
92 |
93 |
94 | template
95 | struct PtrPointComp {
96 | using coord_type = typename TypeSelector::type;
97 |
98 | bool operator()(const PointType* p1, const PointType* p2) {
99 | coord_type* p1_data = (coord_type*)PyArray_DATA(p1->coords);
100 | coord_type* p2_data = (coord_type*)PyArray_DATA(p2->coords);
101 | if (p1_data[0] != p2_data[0]) return p1_data[0] < p2_data[0];
102 | if (p1_data[1] != p2_data[1]) return p1_data[1] < p2_data[1];
103 | return p1_data[2] < p2_data[2];
104 | }
105 |
106 | bool operator()(const PointType* p1, const PointType* p2) const {
107 | coord_type* p1_data = (coord_type*)PyArray_DATA(p1->coords);
108 | coord_type* p2_data = (coord_type*)PyArray_DATA(p2->coords);
109 | if (p1_data[0] != p2_data[0]) return p1_data[0] < p2_data[0];
110 | if (p1_data[1] != p2_data[1]) return p1_data[1] < p2_data[1];
111 | return p1_data[2] < p2_data[2];
112 | }
113 | };
114 |
115 | struct BoundingBox {
116 | using coord_type = typename TypeSelector::type;
117 | coord_type xmin_, xmax_;
118 | coord_type ymin_, ymax_;
119 | coord_type zmin_, zmax_;
120 | coord_type x_extent_, y_extent_, z_extent_;
121 | unsigned int num_dims;
122 |
123 | BoundingBox(coord_type xmin, coord_type x_extent, coord_type ymin, coord_type y_extent,
124 | coord_type zmin = 0, coord_type z_extent = 0);
125 | BoundingBox(const BoundingBox& other);
126 |
127 | ~BoundingBox() {}
128 |
129 |
130 | void reset(coord_type xmin, coord_type x_extent, coord_type ymin, coord_type y_extent,
131 | coord_type zmin = 0, coord_type z_extent = 0);
132 | bool contains(const R4Py_DiscretePoint* pt) const;
133 | bool contains(const Point& pt) const;
134 | bool contains(const R4Py_ContinuousPoint* pt) const;
135 | bool contains(const Point& pt) const;
136 | };
137 |
138 |
139 | std::ostream& operator<<(std::ostream& os, const BoundingBox& box);
140 |
141 | }
142 |
143 | #endif
--------------------------------------------------------------------------------
/src/repast4py/grid.h:
--------------------------------------------------------------------------------
1 | // Copyright 2021, UChicago Argonne, LLC
2 | // All Rights Reserved
3 | // Software Name: repast4py
4 | // By: Argonne National Laboratory
5 | // License: BSD-3 - https://github.com/Repast/repast4py/blob/master/LICENSE.txt
6 |
7 | #ifndef R4PY_SRC_GRID_H
8 | #define R4PY_SRC_GRID_H
9 |
10 | #include
11 | #include
12 |
13 | #define PY_SSIZE_T_CLEAN
14 | #include
15 |
16 | #include "space.h"
17 |
18 | namespace repast4py {
19 |
20 | template
21 | class BaseGrid : public BaseSpace {
22 |
23 | using BaseSpace::agent_map;
24 | using BaseSpace::location_map;
25 | using BaseSpace::borders;
26 | using BaseSpace::accessor;
27 | using BaseSpace::wpt;
28 | using BaseSpace::name_;
29 |
30 | public:
31 | using PointType = R4Py_DiscretePoint;
32 | BaseGrid(const std::string& name, const BoundingBox& bounds);
33 | ~BaseGrid();
34 |
35 | void getAgentsWithin(const BoundingBox& bounds, std::shared_ptr>& agents) override;
36 | R4Py_DiscretePoint* move(R4Py_Agent* agent, R4Py_DiscretePoint* to) override;
37 | };
38 |
39 | template
40 | BaseGrid::BaseGrid(const std::string& name, const BoundingBox& bounds) :
41 | BaseSpace(name, bounds) {}
42 |
43 | template
44 | BaseGrid::~BaseGrid() {}
45 |
46 | template
47 | void BaseGrid::getAgentsWithin(const BoundingBox& bounds, std::shared_ptr>& agents) {
48 |
49 | }
50 |
51 | template
52 | R4Py_DiscretePoint* BaseGrid::move(R4Py_Agent* agent, R4Py_DiscretePoint* pt) {
53 | // If this gets changed such that the argument pt is not a temp input arg then
54 | // we need to make sure that any move calls reflect that.
55 | auto iter = agent_map.find(agent->aid);
56 | if (iter != agent_map.end()) {
57 | borders.transform(pt, wpt);
58 | if (!point_equals(iter->second->pt, wpt)) {
59 | if (accessor.put(agent, location_map, wpt)) {
60 | if (iter->second->pt) {
61 | // if successful put, and agent is already located
62 | // so need to remove
63 | Point ppt;
64 | extract_coords(iter->second->pt, ppt);
65 | accessor.remove(agent, location_map, ppt);
66 | update_point(iter->second->pt, wpt);
67 | } else {
68 | iter->second->pt = create_point(Py_TYPE(pt), wpt);
69 | }
70 | } else {
71 | return nullptr;
72 | }
73 | }
74 | return iter->second->pt;
75 |
76 | } else {
77 | R4Py_AgentID* id = agent->aid;
78 | throw std::invalid_argument("Error moving agent (" + std::to_string(id->id) + "," +
79 | std::to_string(id->type) + "): agent is not in " + name_);
80 | }
81 | }
82 |
83 | class IGrid {
84 |
85 | public:
86 | virtual ~IGrid() = 0;
87 |
88 | virtual bool add(R4Py_Agent* agent) = 0;
89 | virtual bool remove(R4Py_Agent* agent) = 0;
90 | virtual bool remove(R4Py_AgentID* aid) = 0;
91 | virtual R4Py_Agent* getAgentAt(R4Py_DiscretePoint* pt) = 0;
92 | virtual AgentListPtr getAgentsAt(R4Py_DiscretePoint* pt) = 0;
93 | virtual R4Py_DiscretePoint* getLocation(R4Py_Agent* agent) = 0;
94 | virtual R4Py_DiscretePoint* move(R4Py_Agent* agent, R4Py_DiscretePoint* to) = 0;
95 | virtual const std::string name() const = 0;
96 | virtual bool contains(R4Py_Agent* agent) const = 0;
97 | };
98 |
99 | inline IGrid::~IGrid() {}
100 |
101 | template
102 | class Grid : public IGrid {
103 |
104 | private:
105 | std::unique_ptr delegate;
106 |
107 | public:
108 | Grid(const std::string& name, const BoundingBox& bounds);
109 | virtual ~Grid() {}
110 | bool add(R4Py_Agent* agent) override;
111 | bool remove(R4Py_Agent* agent) override;
112 | bool remove(R4Py_AgentID* aid) override;
113 | R4Py_Agent* getAgentAt(R4Py_DiscretePoint* pt) override;
114 | AgentListPtr getAgentsAt(R4Py_DiscretePoint* pt) override;
115 | R4Py_DiscretePoint* getLocation(R4Py_Agent* agent) override;
116 | R4Py_DiscretePoint* move(R4Py_Agent* agent, R4Py_DiscretePoint* to) override;
117 | const std::string name() const override;
118 | bool contains(R4Py_Agent* agent) const override;
119 | };
120 |
121 | template
122 | Grid::Grid(const std::string& name, const BoundingBox& bounds) :
123 | delegate{std::unique_ptr(new DelegateType(name, bounds))} {}
124 |
125 | template
126 | bool Grid::add(R4Py_Agent* agent) {
127 | return delegate->add(agent);
128 | }
129 |
130 | template
131 | bool Grid::remove(R4Py_Agent* agent) {
132 | return delegate->remove(agent);
133 | }
134 |
135 | template
136 | bool Grid::remove(R4Py_AgentID* aid) {
137 | return delegate->remove(aid);
138 | }
139 |
140 | template
141 | R4Py_Agent* Grid::getAgentAt(R4Py_DiscretePoint* pt) {
142 | return delegate->getAgentAt(pt);
143 | }
144 |
145 | template
146 | AgentListPtr Grid::getAgentsAt(R4Py_DiscretePoint* pt) {
147 | return delegate->getAgentsAt(pt);
148 | }
149 |
150 | template
151 | R4Py_DiscretePoint* Grid::getLocation(R4Py_Agent* agent) {
152 | return delegate->getLocation(agent);
153 | }
154 |
155 | template
156 | R4Py_DiscretePoint* Grid::move(R4Py_Agent* agent, R4Py_DiscretePoint* to) {
157 | return delegate->move(agent, to);
158 | }
159 |
160 | template
161 | const std::string Grid::name() const {
162 | return delegate->name();
163 | }
164 |
165 | template
166 | bool Grid::contains(R4Py_Agent* agent) const {
167 | return delegate->contains(agent);
168 | }
169 |
170 |
171 | // typedefs for Discrete Grid with multi occupancy and sticky borders
172 | using DiscreteMOType = MultiOccupancyAccessor, R4Py_DiscretePoint>;
173 | using DiscreteSOType = SingleOccupancyAccessor, R4Py_DiscretePoint>;
174 | using MOSGrid = BaseGrid;
175 | using MOPGrid = BaseGrid;
176 | using SOSGrid = BaseGrid