├── desmod
├── py.typed
├── util.py
├── timescale.py
├── __init__.py
├── probe.py
├── dot.py
├── queue.py
├── progress.py
├── component.py
├── simulation.py
└── tracer.py
├── .gitattributes
├── docs
├── examples
│ ├── grocery
│ │ ├── workspace
│ │ │ ├── sim.log
│ │ │ ├── conn.dot
│ │ │ ├── hier.dot
│ │ │ ├── all.dot
│ │ │ └── result.json
│ │ └── grocery.rst
│ ├── index.rst
│ ├── gas_station
│ │ ├── workspace
│ │ │ ├── conn.dot
│ │ │ ├── hier.dot
│ │ │ ├── all.dot
│ │ │ ├── results.yaml
│ │ │ └── sim.log
│ │ ├── gas_station.rst
│ │ └── gas_station.py
│ └── code
│ │ └── sim.dot
├── api
│ ├── desmod.rst
│ ├── desmod.dot.rst
│ ├── desmod.pool.rst
│ ├── desmod.queue.rst
│ ├── index.rst
│ ├── desmod.config.rst
│ ├── desmod.simulation.rst
│ └── desmod.component.rst
├── index.rst
├── history.rst
├── conf.py
├── Makefile
└── make.bat
├── .coveragerc
├── setup.py
├── .gitignore
├── tests
├── conftest.py
├── test_util.py
├── test_timescale.py
├── test_dot.py
├── test_probe.py
├── test_queue.py
├── test_config.py
├── test_tracer.py
└── test_pool.py
├── tox.ini
├── requirements.txt
├── pyproject.toml
├── Makefile
├── LICENSE.txt
├── README.rst
├── setup.cfg
├── .github
└── workflows
│ └── ci.yml
├── CODE_OF_CONDUCT.md
└── CHANGELOG.rst
/desmod/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto
2 |
--------------------------------------------------------------------------------
/docs/examples/grocery/workspace/sim.log:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | include = desmod/*,tests/*
4 |
--------------------------------------------------------------------------------
/docs/api/desmod.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | ``desmod``
3 | ==========
4 |
5 | .. automodule:: desmod
6 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | from setuptools import setup
3 |
4 | setup(use_scm_version=True)
5 |
--------------------------------------------------------------------------------
/docs/api/desmod.dot.rst:
--------------------------------------------------------------------------------
1 | ==============
2 | ``desmod.dot``
3 | ==============
4 |
5 | .. automodule:: desmod.dot
6 |
7 | .. autofunction:: component_to_dot
8 |
--------------------------------------------------------------------------------
/docs/examples/index.rst:
--------------------------------------------------------------------------------
1 | ========
2 | Examples
3 | ========
4 |
5 | .. toctree::
6 | :maxdepth: 1
7 |
8 | gas_station/gas_station
9 | grocery/grocery
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | __pycache__
3 | .mypy_cache
4 | *.egg-info/
5 | .eggs/
6 | build/
7 | dist/
8 | .cache/
9 | .tox/
10 | .coverage
11 | htmlcov/
12 | docs/_build/
13 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import simpy
3 |
4 |
5 | @pytest.fixture
6 | def env():
7 | """Fixture providing simpy.Environment for tests with `env` argument."""
8 | return simpy.Environment()
9 |
--------------------------------------------------------------------------------
/docs/api/desmod.pool.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | ``desmod.pool``
3 | ===============
4 |
5 | .. automodule:: desmod.pool
6 |
7 | .. autoclass:: Pool
8 | :members:
9 |
10 | .. autoclass:: PriorityPool
11 | :inherited-members:
12 | :members:
13 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py36,py37,py38,pypy3,flake8
3 | skip_missing_interpreters = True
4 |
5 | [testenv]
6 | deps = -rrequirements.txt
7 | commands = pytest
8 |
9 | [testenv:flake8]
10 | skip_install = True
11 | deps =
12 | flake8
13 | commands =
14 | flake8
15 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | black >= 20.8b1; implementation_name == "cpython"
2 | colorama
3 | flake8
4 | isort
5 | mypy >= 0.770; implementation_name == "cpython"
6 | progressbar2
7 | pytest
8 | pytest-cov
9 | setuptools_scm
10 | Sphinx
11 | sphinx-rtd-theme
12 | tox
13 | types-PyYAML
14 |
--------------------------------------------------------------------------------
/docs/api/desmod.queue.rst:
--------------------------------------------------------------------------------
1 | ================
2 | ``desmod.queue``
3 | ================
4 |
5 | .. automodule:: desmod.queue
6 |
7 | .. autoclass:: Queue
8 | :members:
9 |
10 | .. autoclass:: PriorityQueue
11 | :inherited-members:
12 | :members:
13 |
14 | .. autoclass:: PriorityItem
15 | :members: priority, item
16 |
--------------------------------------------------------------------------------
/docs/api/index.rst:
--------------------------------------------------------------------------------
1 | =============
2 | API Reference
3 | =============
4 |
5 | This API reference details desmod's various modules, classes and
6 | functions.
7 |
8 | .. toctree::
9 | :maxdepth: 1
10 |
11 | desmod
12 | desmod.config
13 | desmod.component
14 | desmod.dot
15 | desmod.pool
16 | desmod.queue
17 | desmod.simulation
18 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to desmod
2 | =================
3 |
4 | Contents:
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 |
9 | api/index
10 | examples/index
11 | history
12 |
13 | .. include:: ../README.rst
14 |
15 |
16 | Indices and tables
17 | ==================
18 |
19 | * :ref:`genindex`
20 | * :ref:`modindex`
21 | * :ref:`search`
22 |
23 |
--------------------------------------------------------------------------------
/docs/examples/grocery/grocery.rst:
--------------------------------------------------------------------------------
1 | =============
2 | Grocery Store
3 | =============
4 |
5 | This example aims to demonstrate a wide breadth of desmod features.
6 |
7 | .. graphviz:: workspace/hier.dot
8 |
9 | .. literalinclude:: grocery.py
10 |
11 | Running the simulation with the default configuration produces the
12 | following ``result.json``:
13 |
14 | .. literalinclude:: workspace/result.json
15 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.setuptools_scm]
6 |
7 | [tool.black]
8 | line-length = 88
9 | target-version = ["py36"]
10 | skip-string-normalization = true
11 | exclude = '''
12 | (
13 | ^\/\..*$
14 | | \.egg-info
15 | | _build
16 | | build
17 | | dist
18 | | htmlcov
19 | )
20 | '''
21 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/workspace/conn.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | "Top" [shape=box,style="rounded,filled",fillcolor="/blues5/1",label=<Top
>];
3 | "station0" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<station0..station2
>];
4 | "tankerco" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<tankerco
>];
5 | "tankerco.truck0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<truck0..truck1
>];
6 |
7 | "station0" -> "tankerco" [];
8 | }
--------------------------------------------------------------------------------
/docs/api/desmod.config.rst:
--------------------------------------------------------------------------------
1 | =================
2 | ``desmod.config``
3 | =================
4 |
5 | .. automodule:: desmod.config
6 |
7 | .. autoclass:: ConfigError
8 | :members:
9 |
10 | .. autoclass:: NamedConfig
11 | :inherited-members:
12 | :members:
13 |
14 | .. autoclass:: NamedManager
15 | :members:
16 |
17 | .. autofunction:: apply_user_overrides
18 |
19 | .. autofunction:: parse_user_factors
20 |
21 | .. autofunction:: parse_user_factor
22 |
23 | .. autofunction:: factorial_config
24 |
25 | .. autofunction:: fuzzy_match
26 |
27 | .. autofunction:: fuzzy_lookup
28 |
--------------------------------------------------------------------------------
/tests/test_util.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from desmod.util import partial_format
4 |
5 |
6 | @pytest.mark.parametrize(
7 | 'expected, format_str, kwargs',
8 | [
9 | ('abc', 'abc', {}),
10 | ('aBc', 'a{b}c', {'b': 'B'}),
11 | ('a{b!r}c', 'a{b!r}c', {}),
12 | ("a'B'c", 'a{b!r}c', {'b': 'B'}),
13 | ('a {:.2f} c', 'a {:.{digits}f} c', {'digits': 2}),
14 | ('A{b}C', '{a}{b}{c}', {'a': 'A', 'c': 'C'}),
15 | ],
16 | )
17 | def test_partial_format(expected, format_str, kwargs):
18 | assert expected == partial_format(format_str, **kwargs)
19 |
--------------------------------------------------------------------------------
/docs/examples/code/sim.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | subgraph "cluster_Top" {
3 | label=<Top>
4 | style="filled"
5 | fillcolor="/blues5/1"
6 | "station0" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<station0..station2
>];
7 | subgraph "cluster_tankerco" {
8 | label=<tankerco>
9 | style="filled"
10 | fillcolor="/blues5/2"
11 | "tankerco.truck0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<truck0..truck1
>];
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/docs/examples/gas_station/workspace/hier.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | subgraph "cluster_Top" {
3 | label=<Top>
4 | style="filled"
5 | fillcolor="/blues5/1"
6 | "station0" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<station0..station2
>];
7 | subgraph "cluster_tankerco" {
8 | label=<tankerco>
9 | style="filled"
10 | fillcolor="/blues5/2"
11 | "tankerco.truck0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<truck0..truck1
>];
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | History
2 | =======
3 |
4 | Desmod development began in early 2016 as an internal project at SanDisk
5 | Corporation (now Western Digital) out of a desire to improve the pace of
6 | model development for architectural exploration and performance
7 | estimation of solid state storage systems. Using Python and SimPy for
8 | rapid model iteration proved to be a great improvement over SystemC
9 | based strategies.
10 |
11 | However, although SimPy is a solid foundation for discrete event
12 | simulation, building a complete model demanded solutions for other
13 | problems such as configuration, command line interface, model
14 | organization, monitoring, logging, capturing results, and more. Desmod
15 | was written to fill those gaps.
16 |
17 | Desmod was released as Free Software under the terms of the MIT License
18 | in July, 2016.
19 |
20 | .. include:: ../CHANGELOG.rst
21 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PYTHON ?= python
2 |
3 | .PHONY: lint
4 | lint: lint-black lint-isort lint-flake8 lint-mypy
5 |
6 | .PHONY: lint-black
7 | lint-black:
8 | black --check --quiet --diff .
9 |
10 | .PHONY: lint-isort
11 | lint-isort:
12 | isort --check-only --quiet --diff --recursive .
13 |
14 | .PHONY: lint-flake8
15 | lint-flake8:
16 | flake8 .
17 |
18 | .PHONY: lint-mypy
19 | lint-mypy:
20 | mypy --pretty
21 |
22 | .PHONY: format
23 | format: format-black format-isort
24 |
25 | .PHONY: format-black
26 | format-black:
27 | black .
28 |
29 | .PHONY: format-isort
30 | format-isort:
31 | isort --recursive .
32 |
33 | .PHONY: test
34 | test:
35 | pytest
36 |
37 | .PHONY: coverage
38 | coverage:
39 | pytest --cov
40 |
41 | .PHONY: docs
42 | docs:
43 | $(MAKE) -C docs html
44 |
45 | .PHONY: build
46 | build:
47 | $(PYTHON) setup.py build
48 |
49 | .PHONY: dist
50 | dist:
51 | $(PYTHON) setup.py sdist bdist_wheel
52 |
--------------------------------------------------------------------------------
/docs/examples/grocery/workspace/conn.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | "Top" [shape=box,style="rounded,filled",fillcolor="/blues5/1",label=<Top
>];
3 | "customers" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<customers
>];
4 | "grocery" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<grocery
>];
5 | "grocery.bagger0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<bagger0
>];
6 | "grocery.lane0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<lane0..lane1
>];
7 | "grocery.lane0.cashier" [shape=box,style="rounded,filled",fillcolor="/blues5/4",label=<cashier
>];
8 |
9 | "customers" -> "grocery" [];
10 | "grocery.bagger0" -> "grocery.lane0" [];
11 | "grocery.lane0.cashier" -> "grocery.lane0" [];
12 | }
--------------------------------------------------------------------------------
/docs/examples/grocery/workspace/hier.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | subgraph "cluster_Top" {
3 | label=<Top>
4 | style="filled"
5 | fillcolor="/blues5/1"
6 | "customers" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<customers
>];
7 | subgraph "cluster_grocery" {
8 | label=<grocery>
9 | style="filled"
10 | fillcolor="/blues5/2"
11 | "grocery.bagger0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<bagger0
>];
12 | subgraph "cluster_grocery.lane0" {
13 | label=<lane0..lane1>
14 | style="filled"
15 | fillcolor="/blues5/3"
16 | "grocery.lane0.cashier" [shape=box,style="rounded,filled",fillcolor="/blues5/4",label=<cashier
>];
17 | }
18 | }
19 | }
20 | }
--------------------------------------------------------------------------------
/docs/api/desmod.simulation.rst:
--------------------------------------------------------------------------------
1 | =====================
2 | ``desmod.simulation``
3 | =====================
4 |
5 | .. automodule:: desmod.simulation
6 |
7 | .. autoclass:: desmod.simulation.SimEnvironment
8 |
9 | .. autoinstanceattribute:: config
10 | :annotation:
11 | .. autoinstanceattribute:: rand
12 | :annotation:
13 | .. autoinstanceattribute:: timescale
14 | :annotation:
15 | .. autoinstanceattribute:: duration
16 | :annotation:
17 | .. autoinstanceattribute:: tracemgr
18 | :annotation:
19 | .. autoattribute:: now
20 | .. automethod:: time(t=None, unit='s')
21 | .. autoattribute:: active_process
22 | .. automethod:: process(generator)
23 | .. automethod:: timeout(delay, value)
24 | .. automethod:: event()
25 | .. automethod:: all_of(events)
26 | .. automethod:: any_of(events)
27 | .. automethod:: schedule
28 | .. automethod:: peek
29 | .. automethod:: step
30 |
31 | .. autofunction:: desmod.simulation.simulate
32 |
33 | .. autofunction:: desmod.simulation.simulate_factors
34 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/gas_station.rst:
--------------------------------------------------------------------------------
1 | ===========
2 | Gas Station
3 | ===========
4 |
5 | This example expands upon `SimPy's Gas Station Refueling example
6 | `_,
7 | demonstrating various desmod features.
8 |
9 | .. note::
10 |
11 | Desmod's goal is to support large-scale modeling. Thus this example
12 | is somewhat larger-scale than the SimPy model it expands upon.
13 |
14 | .. literalinclude:: gas_station.py
15 |
16 | The model hierarchy is captured during elaboration as a `DOT
17 | `_ graph. See the
18 | :mod:`desmod.dot` documentation for more detail on DOT output.
19 |
20 | .. graphviz:: workspace/hier.dot
21 |
22 | The simulation log, ``sim.log``, shows what happened during the
23 | simulation:
24 |
25 | .. literalinclude:: workspace/sim.log
26 |
27 | This example does not make heavy use of desmod's result-gathering
28 | capability, but we can nonetheless see the minimal ``results.yaml`` file
29 | generated from the simulation:
30 |
31 | .. literalinclude:: workspace/results.yaml
32 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Western Digital Corporation
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/workspace/all.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | subgraph "cluster_Top" {
3 | label=<Top>
4 | style="filled"
5 | fillcolor="/blues5/1"
6 | "Top" [shape=box,style="dotted,filled",fillcolor="/blues5/1",label=<Top
>];
7 | "station0" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<
8 | station0..station2
9 |
10 | _monitor_reservoir
11 | _traffic_generator
>];
12 | subgraph "cluster_tankerco" {
13 | label=<tankerco>
14 | style="filled"
15 | fillcolor="/blues5/2"
16 | "tankerco" [shape=box,style="dotted,filled",fillcolor="/blues5/2",label=<tankerco
>];
17 | "tankerco.truck0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<
18 | truck0..truck1
19 |
20 | _dispatch_loop
>];
21 | }
22 | }
23 |
24 | "station0" -> "tankerco" [];
25 | }
--------------------------------------------------------------------------------
/docs/api/desmod.component.rst:
--------------------------------------------------------------------------------
1 | ====================
2 | ``desmod.component``
3 | ====================
4 |
5 | .. automodule:: desmod.component
6 |
7 | .. autoclass:: desmod.component.Component
8 |
9 | .. autoinstanceattribute:: env
10 | :annotation:
11 | .. autoinstanceattribute:: name
12 | :annotation:
13 | .. autoinstanceattribute:: index
14 | :annotation:
15 | .. autoinstanceattribute:: scope
16 | :annotation:
17 | .. autoinstanceattribute:: children
18 | :annotation:
19 | .. autoinstanceattribute:: error(*values)
20 | :annotation:
21 | .. autoinstanceattribute:: warn(*values)
22 | :annotation:
23 | .. autoinstanceattribute:: info(*values)
24 | :annotation:
25 | .. autoinstanceattribute:: debug(*values)
26 | :annotation:
27 | .. automethod:: add_process
28 | .. automethod:: add_processes
29 | .. automethod:: add_connections
30 | .. automethod:: connect
31 | .. automethod:: connect_children
32 | .. automethod:: pre_init
33 | .. automethod:: elaborate
34 | .. automethod:: elab_hook
35 | .. automethod:: post_simulate
36 | .. automethod:: post_sim_hook
37 | .. automethod:: get_result
38 | .. automethod:: get_result_hook
39 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/workspace/results.yaml:
--------------------------------------------------------------------------------
1 | config:
2 | car.capacity: 50
3 | car.level: [5, 25]
4 | gas_station.arrival_interval: 60
5 | gas_station.capacity: 200
6 | gas_station.count: 3
7 | gas_station.pump_rate: 2
8 | gas_station.pumps: 2
9 | meta.sim.workspace: workspace
10 | sim.config.file: null
11 | sim.db.enable: false
12 | sim.db.persist: true
13 | sim.dot.all.file: all.dot
14 | sim.dot.colorscheme: blues5
15 | sim.dot.conn.file: conn.dot
16 | sim.dot.enable: true
17 | sim.dot.hier.file: hier.dot
18 | sim.duration: 500 s
19 | sim.log.buffering: -1
20 | sim.log.enable: true
21 | sim.log.exclude_pat: []
22 | sim.log.file: sim.log
23 | sim.log.format: '{level:7} {ts:.3f} {ts_unit}: {scope:<16}:'
24 | sim.log.include_pat: [.*]
25 | sim.log.level: INFO
26 | sim.log.persist: true
27 | sim.progress.enable: false
28 | sim.progress.max_width: null
29 | sim.progress.update_period: 1 s
30 | sim.result.file: results.yaml
31 | sim.seed: 42
32 | sim.timescale: s
33 | sim.vcd.enable: false
34 | sim.vcd.persist: true
35 | sim.workspace: workspace
36 | sim.workspace.overwrite: false
37 | tanker.capacity: 200
38 | tanker.count: 2
39 | tanker.pump_rate: 10
40 | tanker.travel_time: 100
41 | sim.exception: null
42 | sim.now: 500.0
43 | sim.runtime: 0.039155590000000004
44 | sim.time: 500
45 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | from setuptools_scm import get_version
5 |
6 | # flake8: noqa
7 |
8 | extensions = [
9 | 'sphinx.ext.autodoc',
10 | 'sphinx.ext.doctest',
11 | 'sphinx.ext.graphviz',
12 | 'sphinx.ext.intersphinx',
13 | 'sphinx.ext.todo',
14 | 'sphinx.ext.coverage',
15 | 'sphinx.ext.viewcode',
16 | ]
17 |
18 | autodoc_member_order = 'bysource'
19 | templates_path = ['_templates']
20 | source_suffix = '.rst'
21 | master_doc = 'index'
22 | project = 'desmod'
23 | copyright = '2021, Western Digital Corporation'
24 | author = 'Pete Grayson'
25 | version = get_version(root='..', relative_to=__file__)
26 | language = None
27 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
28 | pygments_style = 'sphinx'
29 | todo_include_todos = True
30 |
31 | html_theme = 'sphinx_rtd_theme'
32 | html_static_path = ['_static']
33 | htmlhelp_basename = 'desmoddoc'
34 |
35 | latex_elements = {}
36 | latex_documents = [
37 | (master_doc, 'desmod.tex', 'desmod Documentation', 'Pete Grayson', 'manual'),
38 | ]
39 | man_pages = [(master_doc, 'desmod', 'desmod Documentation', [author], 1)]
40 | texinfo_documents = [
41 | (
42 | master_doc,
43 | 'desmod',
44 | 'desmod Documentation',
45 | author,
46 | 'desmod',
47 | 'Discrete Event Simulation Modeling using SimPy',
48 | 'Miscellaneous',
49 | ),
50 | ]
51 |
52 | intersphinx_mapping = {
53 | 'python': ('https://docs.python.org/3/', None),
54 | 'simpy': ('https://simpy.readthedocs.io/en/latest/', None),
55 | }
56 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | desmod
2 | ======
3 |
4 | The desmod package provides a pythonic environment for composing
5 | Discrete Event Simulation MODels. The excellent `SimPy`__ package
6 | provides the discrete event simulation kernel. Desmod provides
7 | additional capabilities useful for composing, monitoring, configuring,
8 | and simulating arbitrarily complex models.
9 |
10 | __ https://simpy.readthedocs.io/en/latest/
11 |
12 | .. image:: https://readthedocs.org/projects/desmod/badge/?version=latest
13 | :target: https://desmod.readthedocs.io/en/latest/
14 | :alt: Documentation Status
15 |
16 | .. image:: https://github.com/westerndigitalcorporation/desmod/workflows/CI/badge.svg
17 | :target: https://github.com/westerndigitalcorporation/desmod/actions?query=workflow%3ACI
18 | :alt: Build Status
19 |
20 | .. image:: https://coveralls.io/repos/github/westerndigitalcorporation/desmod/badge.svg?branch=master
21 | :target: https://coveralls.io/github/westerndigitalcorporation/desmod?branch=master
22 | :alt: Code Coverage
23 |
24 |
25 | Installation
26 | ------------
27 |
28 | Desmod is available on PyPI and can be installed with `pip`::
29 |
30 | pip install desmod
31 |
32 |
33 | Resources
34 | ---------
35 |
36 | * `Documentation on ReadTheDocs `_
37 | * `Questions and development discussion Google Group
38 | `_
39 | * `Source code, issue tracker, and CI on GitHub
40 | `_
41 | * `Package on PyPI `_
42 |
--------------------------------------------------------------------------------
/desmod/util.py:
--------------------------------------------------------------------------------
1 | import string
2 |
3 | _formatter = string.Formatter()
4 |
5 |
6 | def partial_format(format_string: str, **kwargs: object) -> str:
7 | """Partially replace replacement fields in format string.
8 |
9 | Partial formatting allows a format string to be progressively formatted.
10 | This may be helpful to either amortize the expense of formatting or allow
11 | different entities (with access to different information) cooperatively
12 | format a string.
13 |
14 | Only named replacement fields are supported; positional replacement fields
15 | are not supported.
16 |
17 | :param str format_string: Format string to partially apply replacements.
18 | :param kwargs: Replacements for named fields.
19 | :returns: Partially formatted format string.
20 |
21 | """
22 | result = []
23 | for literal, field, spec, conversion in _formatter.parse(format_string):
24 | if literal:
25 | result.append(literal)
26 | if field is not None:
27 | spec_list = [field]
28 | if conversion:
29 | spec_list.extend(['!', conversion])
30 | if spec:
31 | spec_list.extend([':', spec])
32 | inner_spec = ''.join(spec_list)
33 | formatted_inner = partial_format(inner_spec, **kwargs)
34 | if not field or field.isdigit() or field not in kwargs:
35 | result.extend(['{{', formatted_inner, '}}'])
36 | else:
37 | result.extend(['{', formatted_inner, '}'])
38 | return ''.join(result).format(**kwargs)
39 |
--------------------------------------------------------------------------------
/docs/examples/grocery/workspace/all.dot:
--------------------------------------------------------------------------------
1 | strict digraph M {
2 | subgraph "cluster_Top" {
3 | label=<Top>
4 | style="filled"
5 | fillcolor="/blues5/1"
6 | "Top" [shape=box,style="dotted,filled",fillcolor="/blues5/1",label=<Top
>];
7 | "customers" [shape=box,style="rounded,filled",fillcolor="/blues5/2",label=<
8 | customers
9 |
10 | generate_customers
>];
11 | subgraph "cluster_grocery" {
12 | label=<grocery>
13 | style="filled"
14 | fillcolor="/blues5/2"
15 | "grocery" [shape=box,style="dotted,filled",fillcolor="/blues5/2",label=<grocery
>];
16 | "grocery.bagger0" [shape=box,style="rounded,filled",fillcolor="/blues5/3",label=<
17 | bagger0
18 |
19 | policy_float_aggressive
>];
20 | subgraph "cluster_grocery.lane0" {
21 | label=<lane0..lane1>
22 | style="filled"
23 | fillcolor="/blues5/3"
24 | "grocery.lane0" [shape=box,style="dotted,filled",fillcolor="/blues5/3",label=<lane0..lane1
>];
25 | "grocery.lane0.cashier" [shape=box,style="rounded,filled",fillcolor="/blues5/4",label=<
26 | cashier
27 |
28 | checkout
>];
29 | }
30 | }
31 | }
32 |
33 | "customers" -> "grocery" [];
34 | "grocery.bagger0" -> "grocery.lane0" [];
35 | "grocery.lane0.cashier" -> "grocery.lane0" [];
36 | }
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = desmod
3 | author = Peter Grayson
4 | author_email = pete@jpgrayson.net
5 | description = Discrete Event Simulation Modeling using SimPy
6 | long_description = file: README.rst, LICENSE.txt, CHANGELOG.rst
7 | url = https://desmod.readthedocs.io/
8 | download_url = https://github.com/westerndigitalcorporation/desmod
9 | license = MIT
10 | license_file = LICENSE.txt
11 | classifiers =
12 | Development Status :: 4 - Beta
13 | Environment :: Console
14 | Intended Audience :: Developers
15 | Intended Audience :: Education
16 | Intended Audience :: Science/Research
17 | License :: OSI Approved :: MIT License
18 | Natural Language :: English
19 | Operating System :: OS Independent
20 | Programming Language :: Python
21 | Programming Language :: Python :: 3
22 | Programming Language :: Python :: 3.6
23 | Programming Language :: Python :: 3.7
24 | Programming Language :: Python :: 3.8
25 | Programming Language :: Python :: 3.9
26 | Programming Language :: Python :: Implementation :: CPython
27 | Programming Language :: Python :: Implementation :: PyPy
28 | Topic :: Scientific/Engineering
29 |
30 | [options]
31 | include_package_data = True
32 | zip_safe = False
33 | setup_requires =
34 | setuptools_scm
35 | install_requires =
36 | simpy>=4
37 | pyvcd
38 | PyYAML
39 | python_requires = >= 3.6
40 | packages = desmod
41 |
42 | [options.package_data]
43 | desmod = py.typed
44 |
45 | [bdist_wheel]
46 | universal = 1
47 |
48 | [tool:pytest]
49 |
50 | [flake8]
51 | max-line-length = 88
52 | ignore = D, E203, E501, W503
53 | exclude =
54 | build/
55 | dist/
56 | pip-wheel-metadata/
57 | docs/_build/
58 | .*/
59 | __pycache__
60 | .mypy_cache
61 |
62 | [isort]
63 | force_grid_wrap = 0
64 | from_first = True
65 | include_trailing_comma = True
66 | line_length = 88
67 | multi_line_output = 3
68 | use_parentheses = True
69 | skip_glob = .*
70 | known_third_party =
71 | colorama
72 | progressbar
73 | simpy
74 | vcd
75 | yaml
76 |
77 | [mypy]
78 | files = desmod
79 | python_version = 3.6
80 | show_error_codes = True
81 |
82 | [mypy-colorama]
83 | ignore_missing_imports = True
84 |
85 | [mypy-progressbar]
86 | ignore_missing_imports = True
87 |
88 | [mypy-vcd.*]
89 | ignore_missing_imports = True
90 |
--------------------------------------------------------------------------------
/docs/examples/grocery/workspace/result.json:
--------------------------------------------------------------------------------
1 | {
2 | "checkout_time_avg": 354.3239645800326,
3 | "checkout_time_max": 895.4542368592038,
4 | "checkout_time_min": 90.43561995182836,
5 | "config": {
6 | "bagger.bag_time": 1.5,
7 | "bagger.policy": "float-aggressive",
8 | "cashier.bag_time": 2.0,
9 | "cashier.scan_time": 2.0,
10 | "checkout.bag_area_capacity": 15,
11 | "checkout.feed_capacity": 20,
12 | "customer.arrival_interval": 60,
13 | "customer.num_items.mu": 50,
14 | "customer.num_items.sigma": 10,
15 | "customer.time_per_item": 30.0,
16 | "grocery.num_baggers": 1,
17 | "grocery.num_lanes": 2,
18 | "meta.sim.workspace": "workspace",
19 | "sim.config.file": null,
20 | "sim.db.enable": true,
21 | "sim.db.exclude_pat": [],
22 | "sim.db.file": "sim.sqlite",
23 | "sim.db.include_pat": [
24 | ".*"
25 | ],
26 | "sim.db.persist": false,
27 | "sim.db.trace_table": "trace",
28 | "sim.dot.all.file": "all.dot",
29 | "sim.dot.colorscheme": "blues5",
30 | "sim.dot.conn.file": "conn.dot",
31 | "sim.dot.enable": true,
32 | "sim.dot.hier.file": "hier.dot",
33 | "sim.duration": "7200 s",
34 | "sim.gtkw.file": "sim.gtkw",
35 | "sim.gtkw.live": false,
36 | "sim.log.buffering": -1,
37 | "sim.log.enable": true,
38 | "sim.log.exclude_pat": [],
39 | "sim.log.file": "sim.log",
40 | "sim.log.format": "{level:7} {ts:.3f} {ts_unit}: {scope}:",
41 | "sim.log.include_pat": [
42 | ".*"
43 | ],
44 | "sim.log.level": "INFO",
45 | "sim.log.persist": true,
46 | "sim.progress.enable": false,
47 | "sim.progress.max_width": null,
48 | "sim.progress.update_period": "1 s",
49 | "sim.result.file": "result.json",
50 | "sim.seed": 1234,
51 | "sim.timescale": "s",
52 | "sim.vcd.check_values": true,
53 | "sim.vcd.dump_file": "sim.vcd",
54 | "sim.vcd.enable": true,
55 | "sim.vcd.exclude_pat": [],
56 | "sim.vcd.include_pat": [
57 | ".*"
58 | ],
59 | "sim.vcd.persist": false,
60 | "sim.vcd.start_time": "",
61 | "sim.vcd.stop_time": "",
62 | "sim.workspace": "workspace",
63 | "sim.workspace.overwrite": false
64 | },
65 | "customers_per_hour": 43.5,
66 | "customers_total": 87,
67 | "sim.exception": null,
68 | "sim.now": 7200.0,
69 | "sim.runtime": 0.503598068957217,
70 | "sim.time": 7200
71 | }
--------------------------------------------------------------------------------
/desmod/timescale.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Tuple, Union
2 | import re
3 |
4 | _unit_map = {'s': 1e0, 'ms': 1e3, 'us': 1e6, 'ns': 1e9, 'ps': 1e12, 'fs': 1e15}
5 |
6 | _num_re = r'[-+]? (?: \d*\.\d+ | \d+\.?\d* ) (?: [eE] [-+]? \d+)?'
7 |
8 | _timescale_re = re.compile(rf'(?P{_num_re})?\s?(?P [fpnum]? s)?', re.VERBOSE)
9 |
10 | TimeValue = Tuple[Union[int, float], str]
11 |
12 |
13 | def parse_time(time_str: str, default_unit: Optional[str] = None) -> TimeValue:
14 | """Parse a string containing a time magnitude and optional unit.
15 |
16 | :param str time_str: Time string to parse.
17 | :param str default_unit:
18 | Default time unit to apply if unit is not present in `time_str`. The
19 | default unit is only applied if `time_str` does not specify a unit.
20 | :returns:
21 | `(magnitude, unit)` tuple where magnitude is numeric (int or float) and
22 | the unit string is one of "s", "ms", "us", "ns", "ps", or "fs".
23 | :raises ValueError:
24 | If the string cannot be parsed or is missing a unit specifier and no
25 | `default_unit` is specified.
26 |
27 | """
28 | match = _timescale_re.match(time_str)
29 | if not match or not time_str:
30 | raise ValueError(f'Invalid timescale string "{time_str}"')
31 | if match.group('num'):
32 | num_str = match.group('num')
33 | num: Union[int, float]
34 | try:
35 | num = int(num_str)
36 | except ValueError:
37 | num = float(num_str)
38 | else:
39 | num = 1
40 |
41 | if match.group('unit'):
42 | unit = match.group('unit')
43 | else:
44 | if default_unit:
45 | unit = default_unit
46 | else:
47 | raise ValueError('No unit specified')
48 | return num, unit
49 |
50 |
51 | def scale_time(from_time: TimeValue, to_time: TimeValue) -> Union[int, float]:
52 | """Scale time values.
53 |
54 | :param tuple from_time: `(magnitude, unit)` tuple to be scaled.
55 | :param tuple to_time: `(magnitude, unit)` tuple to scale to.
56 | :returns: Numeric scale factor relating `from_time` to `to_time`.
57 |
58 | """
59 | from_t, from_u = from_time
60 | to_t, to_u = to_time
61 | from_scale = _unit_map[from_u]
62 | to_scale = _unit_map[to_u]
63 |
64 | scaled = (to_scale / from_scale * from_t) / to_t
65 |
66 | if scaled % 1.0 == 0.0:
67 | return int(scaled)
68 | else:
69 | return scaled
70 |
--------------------------------------------------------------------------------
/tests/test_timescale.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from desmod.timescale import parse_time, scale_time
4 |
5 |
6 | @pytest.mark.parametrize(
7 | 'test_input, expected',
8 | [
9 | ('12 s', (12, 's')),
10 | ('12s', (12, 's')),
11 | ('+12s', (12, 's')),
12 | ('-12s', (-12, 's')),
13 | ('12.0 s', (12.0, 's')),
14 | ('12. s', (12.0, 's')),
15 | ('+12.0 s', (12.0, 's')),
16 | ('-12.0 s', (-12.0, 's')),
17 | ('12.000 s', (12.0, 's')),
18 | ('1.2e1 s', (12.0, 's')),
19 | ('1.2e+1 s', (12.0, 's')),
20 | ('1.2e-1 s', (0.12, 's')),
21 | ('-1.2e-1 s', (-0.12, 's')),
22 | ('12.s', (12.0, 's')),
23 | ('12.0s', (12.0, 's')),
24 | ('12.000s', (12.0, 's')),
25 | ('1.2e1s', (12.0, 's')),
26 | ('.12e+2s', (12.0, 's')),
27 | ('.12s', (0.12, 's')),
28 | ('12 fs', (12, 'fs')),
29 | ('12 ps', (12, 'ps')),
30 | ('12 ns', (12, 'ns')),
31 | ('12 us', (12, 'us')),
32 | ('12 ms', (12, 'ms')),
33 | ('12.0ms', (12.0, 'ms')),
34 | ('s', (1, 's')),
35 | ('fs', (1, 'fs')),
36 | ],
37 | )
38 | def test_parse_time(test_input, expected):
39 | m, u = parse_time(test_input)
40 | assert (m, u) == expected
41 | assert isinstance(m, type(expected[0]))
42 |
43 |
44 | @pytest.mark.parametrize(
45 | 'test_input',
46 | [
47 | '',
48 | '123 s',
49 | '123',
50 | '123.0',
51 | '123 S',
52 | '123 Ms',
53 | '123e1.3 s',
54 | '+-123 s',
55 | '123 ks',
56 | '. s',
57 | '1-.1 s',
58 | '1e1.2 s',
59 | ],
60 | )
61 | def test_parse_time_except(test_input):
62 | with pytest.raises(ValueError) as exc_info:
63 | parse_time(test_input)
64 | assert 'float' not in str(exc_info.value)
65 |
66 |
67 | def test_parse_time_default():
68 | assert parse_time('123', default_unit='ms') == (123, 'ms')
69 |
70 |
71 | @pytest.mark.parametrize(
72 | 'input_t, input_tscale, expected',
73 | [
74 | ((1, 'us'), (1, 'us'), 1),
75 | ((1, 'us'), (10, 'us'), 0.1),
76 | ((1000, 'us'), (1, 'ms'), 1),
77 | ((1, 'us'), (100, 'ms'), 1e-5),
78 | ((50, 'ms'), (1, 'ns'), 50000000),
79 | ((5.2, 'ms'), (1, 'us'), 5200),
80 | ],
81 | )
82 | def test_scale_time(input_t, input_tscale, expected):
83 | scaled = scale_time(input_t, input_tscale)
84 | assert expected == scaled
85 | assert isinstance(scaled, type(expected))
86 |
--------------------------------------------------------------------------------
/tests/test_dot.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 | from desmod.component import Component
6 | from desmod.dot import component_to_dot, generate_dot
7 | from desmod.simulation import SimEnvironment
8 |
9 | pytestmark = pytest.mark.usefixtures('cleandir')
10 |
11 |
12 | @pytest.fixture
13 | def cleandir(tmpdir):
14 | origin = os.getcwd()
15 | tmpdir.chdir()
16 | yield None
17 | os.chdir(origin)
18 |
19 |
20 | @pytest.fixture
21 | def top():
22 | top = Top(parent=None, env=SimEnvironment(config={}))
23 | top.elaborate()
24 | return top
25 |
26 |
27 | class Top(Component):
28 | base_name = ''
29 |
30 | def __init__(self, *args, **kwargs):
31 | super().__init__(*args, **kwargs)
32 | self.a = A(self)
33 | self.bs = [B(self, index=i) for i in range(5)]
34 |
35 | def connect_children(self):
36 | for b in self.bs:
37 | self.connect(b, 'a')
38 |
39 |
40 | class A(Component):
41 | base_name = 'a'
42 |
43 |
44 | class B(Component):
45 | base_name = 'b'
46 |
47 | def __init__(self, *args, **kwargs):
48 | super().__init__(*args, **kwargs)
49 | self.add_connections('a')
50 | self.add_process(self.my_proc)
51 |
52 | def my_proc(self):
53 | yield self.env.timeout(1) # pragma: no coverage
54 |
55 |
56 | def test_hierarchy_only(top):
57 | dot = component_to_dot(top, show_connections=False, show_processes=False)
58 | assert '"a"' in dot
59 | assert '"b0"' in dot
60 |
61 |
62 | def test_connections_only(top):
63 | dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
64 | assert '"b0" -> "a"' in dot
65 |
66 |
67 | def test_processes_only(top):
68 | dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
69 | assert 'my_proc' in dot
70 |
71 |
72 | def test_all(top):
73 | dot = component_to_dot(top, colorscheme='blues9')
74 | assert 'my_proc' in dot
75 | assert '"a"' in dot
76 | assert '"b0"' in dot
77 | assert '"b0" -> "a"' in dot
78 |
79 |
80 | @pytest.mark.parametrize(
81 | 'key',
82 | [
83 | 'sim.dot.enable',
84 | 'sim.dot.colorscheme',
85 | 'sim.dot.all.file',
86 | 'sim.dot.hier.file',
87 | 'sim.dot.conn.file',
88 | ],
89 | )
90 | def test_generate_dot(top, key):
91 | assert key not in top.env.config
92 | generate_dot(top)
93 | assert key in top.env.config
94 | files = os.listdir(os.curdir)
95 | for key in top.env.config:
96 | if key.startswith('sim.dot.') and key.endswith('.file'):
97 | assert top.env.config[key] not in files
98 |
99 |
100 | @pytest.mark.parametrize(
101 | 'key', ['sim.dot.all.file', 'sim.dot.hier.file', 'sim.dot.conn.file']
102 | )
103 | def test_generate_dot_file_enables(top, key):
104 | top.env.config['sim.dot.enable'] = True
105 | top.env.config[key] = ''
106 | generate_dot(top)
107 | assert all(name.endswith('.dot') for name in os.listdir(os.curdir))
108 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | tags: [ '*' ]
7 | pull_request:
8 | branches: [ master ]
9 |
10 | jobs:
11 | build:
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | - run: git fetch --prune --unshallow origin +refs/tags/*:refs/tags/*
17 | - name: Set up Python
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: '3.9'
21 |
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install -U pip
25 | python -m pip install setuptools_scm wheel
26 |
27 | - name: Build
28 | run: |
29 | python setup.py sdist bdist_wheel
30 |
31 | - name: Upload dist
32 | uses: actions/upload-artifact@v2
33 | with:
34 | name: desmod-dist
35 | path: "dist/*"
36 |
37 | test:
38 | runs-on: ubuntu-latest
39 | strategy:
40 | matrix:
41 | python-version: [3.6, 3.7, 3.8, 3.9, pypy3]
42 |
43 | steps:
44 | - uses: actions/checkout@v2
45 | - name: Set up Python ${{ matrix.python-version }}
46 | uses: actions/setup-python@v1
47 | with:
48 | python-version: ${{ matrix.python-version }}
49 |
50 | - name: Install dependencies
51 | run: |
52 | python -m pip install -U pip
53 | python -m pip install -e . -r requirements.txt
54 |
55 | - name: Test
56 | run: |
57 | timeout 120s make test
58 |
59 | lint:
60 | runs-on: ubuntu-latest
61 | steps:
62 | - uses: actions/checkout@v2
63 | - name: Set up Python
64 | uses: actions/setup-python@v2
65 | with:
66 | python-version: '3.9'
67 | - name: Install dependencies
68 | run: |
69 | python -m pip install -U pip
70 | python -m pip install -e . -r requirements.txt
71 | - name: Lint
72 | run: |
73 | make lint
74 |
75 | coverage:
76 | runs-on: ubuntu-latest
77 | steps:
78 | - uses: actions/checkout@v2
79 | - name: Set up Python
80 | uses: actions/setup-python@v2
81 | with:
82 | python-version: '3.9'
83 | - name: Install dependencies
84 | run: |
85 | python -m pip install -U pip
86 | python -m pip install -e . -r requirements.txt coveralls
87 | - name: Coverage
88 | env:
89 | COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
90 | run: |
91 | make coverage && coveralls
92 |
93 | publish:
94 | name: Publish to PyPI
95 | needs: build
96 | runs-on: ubuntu-latest
97 |
98 | steps:
99 | - name: Download dist build
100 | uses: actions/download-artifact@v1
101 | with:
102 | name: desmod-dist
103 | - name: Print dists
104 | run: |
105 | ls desmod-dist/
106 | - name: Publish dists
107 | if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
108 | uses: pypa/gh-action-pypi-publish@master
109 | with:
110 | user: __token__
111 | password: ${{ secrets.pypi_password }}
112 | packages_dir: desmod-dist
113 |
--------------------------------------------------------------------------------
/desmod/__init__.py:
--------------------------------------------------------------------------------
1 | """Full-featured, high-level modeling using `SimPy`__.
2 |
3 | __ https://simpy.readthedocs.io/en/latest/contents.html
4 |
5 | The `desmod` package provides a variety of tools for composing, configuring,
6 | running, monitoring, and analyzing discrete event simulation (DES) models. It
7 | builds on top of the :mod:`simpy` simulation kernel, providing features useful
8 | for building large-scale models which are out-of-scope for :mod:`simpy` itself.
9 |
10 | An understanding of SimPy is required to use desmod effectively.
11 |
12 | Components
13 | ==========
14 |
15 | The primary building-block for `desmod` models is the
16 | :class:`~desmod.component.Component` class. Components provide a means for
17 | partitioning the system to be modeled into manageable pieces. Components can
18 | play a structural role by parenting other components; or play a behavioral role
19 | by having processes and connections to other components; or sometimes play both
20 | roles at once.
21 |
22 | The :func:`desmod.dot.component_to_dot()` function may be used to create a
23 | `DOT`__ language representation of the component hierarchy and/or the component
24 | connection graph. The resulting DOT representation may be rendered to a variety
25 | of graphical formats using `GraphViz`__ tools.
26 |
27 | __ http://graphviz.org/content/dot-language
28 | __ http://graphviz.org/
29 |
30 | Configuration
31 | =============
32 |
33 | It is common for models to have configurable paramaters. Desmod provides an
34 | opinionated mechanism for simulation configuration. A single, comprehensive
35 | configuration dictionary captures all configuration for the simulation. The
36 | configuration dictionary is propogated to all Components via the
37 | :class:`~desmod.simulation.SimEnvironment`.
38 |
39 | The various components (or component hierarchies) may maintain separate
40 | configuration namespaces within the configuration dictionary by use of keys
41 | conforming to the dot-separated naming convention. For example,
42 | "mymodel.compA.cfgitem".
43 |
44 | The :mod:`desmod.config` module provides various functionality useful for
45 | managing configuration dictionaries.
46 |
47 | Simulation
48 | ==========
49 |
50 | Desmod takes care of the details of running simulations to allow focus on the
51 | act of modeling.
52 |
53 | Running a simulation is accomplished with either
54 | :func:`~desmod.simulation.simulate()` or
55 | :func:`~desmod.simulation.simulate_factors()`, depending whether running a
56 | single simulation or a multi-factor set of simulations. In either case, the key
57 | ingredients are the configuration dict and the model's top-level
58 | :class:`~desmod.component.Component`. The :func:`~desmod.simulation.simulate()`
59 | function takes responsibility for taking the simulation through its various
60 | phases:
61 |
62 | - *Initialization*: where the components' `__init__()` methods are called.
63 | - *Elaboration*: where inter-component connections are made and components'
64 | processes are started.
65 | - *Simulation*: where discrete event simulation occurs.
66 | - *Post-simulation*: where simulation results are gathered.
67 |
68 | # TODO: simulation results
69 |
70 | Monitoring
71 | ==========
72 |
73 | # TODO: tracers, probes, logging, etc.
74 |
75 | """
76 |
77 | __all__ = ()
78 |
--------------------------------------------------------------------------------
/tests/test_probe.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import simpy
3 |
4 | from desmod.pool import Pool, PriorityPool
5 | from desmod.probe import attach
6 | from desmod.queue import Queue
7 |
8 |
9 | @pytest.fixture
10 | def env():
11 | return simpy.Environment()
12 |
13 |
14 | def test_attach_bad_type(env):
15 | values = []
16 | with pytest.raises(TypeError):
17 | attach('scope', 'a string', [values.append])
18 |
19 |
20 | def test_attach_method():
21 | values = []
22 |
23 | class C:
24 | def __init__(self):
25 | self.x = 0
26 |
27 | def doit(self):
28 | self.x += 1
29 | return self.x
30 |
31 | c = C()
32 | attach('scope', c.doit, [values.append])
33 | c.doit()
34 | c.doit()
35 | c.doit()
36 | assert values == [1, 2, 3]
37 |
38 |
39 | def test_attach_container(env):
40 | values = []
41 | container = simpy.Container(env)
42 | attach('scope', container, [values.append])
43 |
44 | def proc():
45 | yield container.put(2)
46 | yield container.get(1)
47 |
48 | env.process(proc())
49 | env.run()
50 | assert values == [2, 1]
51 |
52 |
53 | def test_attach_store(env):
54 | values = []
55 | store = simpy.Store(env)
56 | attach('scope', store, [values.append])
57 |
58 | def proc():
59 | yield store.put('item0')
60 | yield store.put('item1')
61 | yield store.put('item2')
62 | item = yield store.get()
63 | assert item == 'item0'
64 |
65 | env.process(proc())
66 | env.run()
67 | assert values == [1, 2, 3, 2]
68 |
69 |
70 | def test_attach_resource_users(env):
71 | values = []
72 | resource = simpy.Resource(env, capacity=3)
73 | attach('scope', resource, [values.append])
74 |
75 | def proc():
76 | with resource.request() as req:
77 | yield req
78 | with resource.request() as req:
79 | yield req
80 | with resource.request() as req:
81 | yield req
82 |
83 | env.process(proc())
84 | env.run()
85 | assert values == [1, 2, 1, 2, 1, 0]
86 |
87 |
88 | def test_attach_resource_queue(env):
89 | values = []
90 | resource = simpy.Resource(env)
91 | attach('scope', resource, [values.append], trace_queue=True)
92 |
93 | def proc(t):
94 | with resource.request() as req:
95 | yield req
96 | yield env.timeout(t)
97 |
98 | env.process(proc(1))
99 | env.process(proc(2))
100 | env.process(proc(3))
101 | env.run()
102 | assert values == [0, 1, 2, 1, 0]
103 |
104 |
105 | def test_attach_queue_size(env):
106 | values = []
107 | queue = Queue(env)
108 | attach('scope', queue, [values.append])
109 |
110 | def proc():
111 | yield queue.put('item0')
112 | yield queue.put('item1')
113 | yield queue.put('item2')
114 | item = yield queue.get()
115 | assert item == 'item0'
116 |
117 | env.process(proc())
118 | env.run()
119 | assert values == [1, 2, 3, 2]
120 |
121 |
122 | def test_attach_queue_remaining(env):
123 | values = []
124 | queue = Queue(env, capacity=10)
125 |
126 | attach('scope', queue, [values.append], trace_remaining=True)
127 |
128 | def proc():
129 | yield queue.put('item0')
130 | yield queue.put('item1')
131 | yield queue.put('item2')
132 | item = yield queue.get()
133 | assert item == 'item0'
134 |
135 | env.process(proc())
136 | env.run()
137 | assert values == [9, 8, 7, 8]
138 |
139 |
140 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
141 | def test_attach_pool_level(env, PoolClass):
142 | values = []
143 | pool = PoolClass(env)
144 | attach('scope', pool, [values.append])
145 |
146 | def proc():
147 | yield pool.put(1)
148 | yield pool.put(1)
149 | yield pool.put(1)
150 | item = yield pool.get(1)
151 | assert item == 1
152 |
153 | env.process(proc())
154 | env.run()
155 | assert values == [1, 2, 3, 2]
156 |
157 |
158 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
159 | def test_attach_pool_remaining(env, PoolClass):
160 | values = []
161 | pool = PoolClass(env, capacity=10)
162 |
163 | attach('scope', pool, [values.append], trace_remaining=True)
164 |
165 | def proc():
166 | yield pool.put(1)
167 | yield pool.put(1)
168 | yield pool.put(1)
169 | item = yield pool.get(3)
170 | assert item == 3
171 |
172 | env.process(proc())
173 | env.run()
174 | assert values == [9, 8, 7, 10]
175 |
--------------------------------------------------------------------------------
/desmod/probe.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from types import MethodType
3 | from typing import Any, Callable, Iterable, Union
4 |
5 | import simpy
6 |
7 | from desmod.pool import Pool
8 | from desmod.queue import ItemType, Queue
9 |
10 | ProbeCallback = Callable[[Any], None]
11 | ProbeCallbacks = Iterable[ProbeCallback]
12 | ProbeTarget = Union[
13 | Pool, Queue[ItemType], simpy.Resource, simpy.Store, simpy.Container, MethodType
14 | ]
15 |
16 |
17 | def attach(
18 | scope: str, target: ProbeTarget, callbacks: ProbeCallbacks, **hints: Any
19 | ) -> None:
20 | if isinstance(target, MethodType):
21 | _attach_method(target, callbacks)
22 | elif isinstance(target, simpy.Container):
23 | _attach_container_level(target, callbacks)
24 | elif isinstance(target, simpy.Store):
25 | _attach_store_items(target, callbacks)
26 | elif isinstance(target, simpy.Resource):
27 | if hints.get('trace_queue'):
28 | _attach_resource_queue(target, callbacks)
29 | else:
30 | _attach_resource_users(target, callbacks)
31 | elif isinstance(target, Queue):
32 | if hints.get('trace_remaining', False):
33 | _attach_queue_remaining(target, callbacks)
34 | else:
35 | _attach_queue_size(target, callbacks)
36 | elif isinstance(target, Pool):
37 | if hints.get('trace_remaining', False):
38 | _attach_pool_remaining(target, callbacks)
39 | else:
40 | _attach_pool_level(target, callbacks)
41 | else:
42 | raise TypeError(f'Cannot probe {scope} of type {type(target)}')
43 |
44 |
45 | def _attach_method(method: MethodType, callbacks: ProbeCallbacks) -> None:
46 | def make_wrapper(func):
47 | @wraps(func)
48 | def wrapper(*args, **kwargs):
49 | value = func(*args, **kwargs)
50 | for callback in callbacks:
51 | callback(value)
52 | return value
53 |
54 | return wrapper
55 |
56 | setattr(method.__self__, method.__func__.__name__, make_wrapper(method))
57 |
58 |
59 | def _attach_container_level(
60 | container: simpy.Container, callbacks: ProbeCallbacks
61 | ) -> None:
62 | def make_wrapper(func):
63 | @wraps(func)
64 | def wrapper(*args, **kwargs):
65 | old_level = container._level
66 | ret = func(*args, **kwargs)
67 | new_level = container._level
68 | if new_level != old_level:
69 | for callback in callbacks:
70 | callback(new_level)
71 | return ret
72 |
73 | return wrapper
74 |
75 | container._do_get = make_wrapper(container._do_get) # type: ignore
76 | container._do_put = make_wrapper(container._do_put) # type: ignore
77 |
78 |
79 | def _attach_store_items(store: simpy.Store, callbacks: ProbeCallbacks) -> None:
80 | def make_wrapper(func):
81 | @wraps(func)
82 | def wrapper(*args, **kwargs):
83 | old_items = len(store.items)
84 | ret = func(*args, **kwargs)
85 | new_items = len(store.items)
86 | if new_items != old_items:
87 | for callback in callbacks:
88 | callback(new_items)
89 | return ret
90 |
91 | return wrapper
92 |
93 | store._do_get = make_wrapper(store._do_get) # type: ignore
94 | store._do_put = make_wrapper(store._do_put) # type: ignore
95 |
96 |
97 | def _attach_resource_users(resource: simpy.Resource, callbacks: ProbeCallbacks) -> None:
98 | def make_wrapper(func):
99 | @wraps(func)
100 | def wrapper(*args, **kwargs):
101 | old_users = len(resource.users)
102 | ret = func(*args, **kwargs)
103 | new_users = len(resource.users)
104 | if new_users != old_users:
105 | for callback in callbacks:
106 | callback(new_users)
107 | return ret
108 |
109 | return wrapper
110 |
111 | resource._do_get = make_wrapper(resource._do_get) # type: ignore
112 | resource._do_put = make_wrapper(resource._do_put) # type: ignore
113 |
114 |
115 | def _attach_resource_queue(resource: simpy.Resource, callbacks: ProbeCallbacks) -> None:
116 | def make_wrapper(func):
117 | @wraps(func)
118 | def wrapper(*args, **kwargs):
119 | old_queue = len(resource.queue)
120 | ret = func(*args, **kwargs)
121 | new_queue = len(resource.queue)
122 | if new_queue != old_queue:
123 | for callback in callbacks:
124 | callback(new_queue)
125 | return ret
126 |
127 | return wrapper
128 |
129 | resource.request = make_wrapper(resource.request) # type: ignore
130 | resource._trigger_put = make_wrapper(resource._trigger_put) # type: ignore
131 |
132 |
133 | def _attach_queue_size(queue: Queue[ItemType], callbacks: ProbeCallbacks) -> None:
134 | def hook():
135 | for callback in callbacks:
136 | callback(queue.size)
137 |
138 | queue._put_hook = queue._get_hook = hook
139 |
140 |
141 | def _attach_queue_remaining(queue: Queue[ItemType], callbacks: ProbeCallbacks) -> None:
142 | def hook():
143 | for callback in callbacks:
144 | callback(queue.remaining)
145 |
146 | queue._put_hook = queue._get_hook = hook
147 |
148 |
149 | def _attach_pool_level(pool: Pool, callbacks: ProbeCallbacks) -> None:
150 | def hook():
151 | for callback in callbacks:
152 | callback(pool.level)
153 |
154 | pool._put_hook = pool._get_hook = hook
155 |
156 |
157 | def _attach_pool_remaining(pool: Pool, callbacks: ProbeCallbacks) -> None:
158 | def hook():
159 | for callback in callbacks:
160 | callback(pool.remaining)
161 |
162 | pool._put_hook = pool._get_hook = hook
163 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 |
2 | # Contributor Covenant Code of Conduct
3 |
4 | ## Our Pledge
5 |
6 | We as members, contributors, and leaders pledge to make participation in our
7 | community a harassment-free experience for everyone, regardless of age, body
8 | size, visible or invisible disability, ethnicity, sex characteristics, gender
9 | identity and expression, level of experience, education, socio-economic status,
10 | nationality, personal appearance, race, caste, color, religion, or sexual identity
11 | and orientation.
12 |
13 | We pledge to act and interact in ways that contribute to an open, welcoming,
14 | diverse, inclusive, and healthy community.
15 |
16 | ## Our Standards
17 |
18 | Examples of behavior that contributes to a positive environment for our
19 | community include:
20 |
21 | * Demonstrating empathy and kindness toward other people
22 | * Being respectful of differing opinions, viewpoints, and experiences
23 | * Giving and gracefully accepting constructive feedback
24 | * Accepting responsibility and apologizing to those affected by our mistakes,
25 | and learning from the experience
26 | * Focusing on what is best not just for us as individuals, but for the
27 | overall community
28 |
29 | Examples of unacceptable behavior include:
30 |
31 | * The use of sexualized language or imagery, and sexual attention or
32 | advances of any kind
33 | * Trolling, insulting or derogatory comments, and personal or political attacks
34 | * Public or private harassment
35 | * Publishing others' private information, such as a physical or email
36 | address, without their explicit permission
37 | * Other conduct which could reasonably be considered inappropriate in a
38 | professional setting
39 |
40 | ## Enforcement Responsibilities
41 |
42 | Community leaders are responsible for clarifying and enforcing our standards of
43 | acceptable behavior and will take appropriate and fair corrective action in
44 | response to any behavior that they deem inappropriate, threatening, offensive,
45 | or harmful.
46 |
47 | Community leaders have the right and responsibility to remove, edit, or reject
48 | comments, commits, code, wiki edits, issues, and other contributions that are
49 | not aligned to this Code of Conduct, and will communicate reasons for moderation
50 | decisions when appropriate.
51 |
52 | ## Scope
53 |
54 | This Code of Conduct applies within all community spaces, and also applies when
55 | an individual is officially representing the community in public spaces.
56 | Examples of representing our community include using an official e-mail address,
57 | posting via an official social media account, or acting as an appointed
58 | representative at an online or offline event.
59 |
60 | ## Enforcement
61 |
62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
63 | reported to the community leaders responsible for enforcement at
64 | [Open Source Inquiries][contact].
65 | All complaints will be reviewed and investigated promptly and fairly.
66 |
67 | All community leaders are obligated to respect the privacy and security of the
68 | reporter of any incident.
69 |
70 | ## Enforcement Guidelines
71 |
72 | Community leaders will follow these Community Impact Guidelines in determining
73 | the consequences for any action they deem in violation of this Code of Conduct:
74 |
75 | ### 1. Correction
76 |
77 | **Community Impact**: Use of inappropriate language or other behavior deemed
78 | unprofessional or unwelcome in the community.
79 |
80 | **Consequence**: A private, written warning from community leaders, providing
81 | clarity around the nature of the violation and an explanation of why the
82 | behavior was inappropriate. A public apology may be requested.
83 |
84 | ### 2. Warning
85 |
86 | **Community Impact**: A violation through a single incident or series
87 | of actions.
88 |
89 | **Consequence**: A warning with consequences for continued behavior. No
90 | interaction with the people involved, including unsolicited interaction with
91 | those enforcing the Code of Conduct, for a specified period of time. This
92 | includes avoiding interactions in community spaces as well as external channels
93 | like social media. Violating these terms may lead to a temporary or
94 | permanent ban.
95 |
96 | ### 3. Temporary Ban
97 |
98 | **Community Impact**: A serious violation of community standards, including
99 | sustained inappropriate behavior.
100 |
101 | **Consequence**: A temporary ban from any sort of interaction or public
102 | communication with the community for a specified period of time. No public or
103 | private interaction with the people involved, including unsolicited interaction
104 | with those enforcing the Code of Conduct, is allowed during this period.
105 | Violating these terms may lead to a permanent ban.
106 |
107 | ### 4. Permanent Ban
108 |
109 | **Community Impact**: Demonstrating a pattern of violation of community
110 | standards, including sustained inappropriate behavior, harassment of an
111 | individual, or aggression toward or disparagement of classes of individuals.
112 |
113 | **Consequence**: A permanent ban from any sort of public interaction within
114 | the community.
115 |
116 | ## Attribution
117 |
118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
119 | version 2.0, available at
120 | [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
121 |
122 | Community Impact Guidelines were inspired by
123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC].
124 |
125 | For answers to common questions about this code of conduct, see the FAQ at
126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available
127 | at [https://www.contributor-covenant.org/translations][translations].
128 |
129 | [homepage]: https://www.contributor-covenant.org
130 | [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
131 | [Mozilla CoC]: https://github.com/mozilla/diversity
132 | [FAQ]: https://www.contributor-covenant.org/faq
133 | [translations]: https://www.contributor-covenant.org/translations
134 | [contact]: https://www.westerndigital.com/contact/contact-open-source
135 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | desmod-0.6.1 (2020-04-16)
5 | -------------------------
6 | * [FIX] Pool when_not_full and when_not_empty broken epsilon
7 | * [FIX] Typing for SimEnvironment.time()
8 | * [FIX] Typing for __exit__() methods
9 |
10 | desmod-0.6.0 (2020-04-07)
11 | -------------------------
12 | * [BREAK] Drop support for Python < 3.6
13 | * [NEW] Inline type annotations
14 | * [FIX] Use yaml.safe_load() in tests
15 |
16 | desmod-0.5.6 (2019-02-12)
17 | -------------------------
18 | * [NEW] PriorityPool for prioritized get/put requests
19 | * [NEW] Queue.when_at_most() and when_at_least() events (#18)
20 | * [NEW] Pool.when_at_most() and when_at_least() events (#18)
21 | * [CHANGE] Remove Queue.when_new() event
22 | * [CHANGE] Gas station example uses Pool/Pool.when_at_most() (#18)
23 | * [FIX] Add API docs for desmod.pool
24 |
25 | desmod-0.5.5 (2018-12-19)
26 | -------------------------
27 | * [NEW] Add Queue.when_not_full() and Pool.when_not_full()
28 | * [NEW] Context manager protocol for Queue and Pool
29 | * [CHANGE] Pool checks validity of get/put amounts
30 | * [CHANGE] Pool getters/putters are not strictly FIFO
31 | * [CHANGE] __repr__() for Queue and Pool
32 | * [FIX] Pool no longer allows capacity to be exceeded
33 | * [FIX] Pool and Queue trigger all getters and putters
34 | * [FIX] Pool and Queue trigger from callbacks
35 | * [FIX] Repair deprecated import from collections
36 | * [FIX] Various Pool docstrings
37 | * [FIX] Complete unit test coverage for Queue and Pool
38 |
39 | desmod-0.5.4 (2018-08-20)
40 | -------------------------
41 | * [NEW] Add desmod.pool.Pool for modeling pool of resources
42 |
43 | desmod-0.5.3 (2018-05-25)
44 | -------------------------
45 | * [FIX] Repair silent truncation of config override
46 | * [CHANGE] Update dev requirements
47 | * [CHANGE] Do not use bare except
48 | * [CHANGE] Modernize travis-ci config
49 |
50 | desmod-0.5.2 (2017-09-08)
51 | -------------------------
52 | * [FIX] Join worker processes in simulate_many()
53 | * [FIX] Ensure PriorityQueue's items are heapified
54 |
55 | desmod-0.5.1 (2017-04-27)
56 | -------------------------
57 | * [NEW] Add config_filter param to simulate_factors() (#14)
58 | * [FIX] Use pyenv for travis builds
59 |
60 | desmod-0.5.0 (2017-04-27)
61 | -------------------------
62 | * [NEW] Add desmod.dot.generate_dot()
63 | * [NEW] Add "persist" option for tracers
64 | * [NEW] Add SQLiteTracer
65 | * [NEW] Add grocery store example
66 | * [NEW] Support probing a Resource's queue
67 | * [FIX] Stable sort order in DOT generation
68 | * [CHANGE] Rearrange doc index page
69 | * [CHANGE] Change examples hierarchy
70 | * [CHANGE] Add DOT to Gas Station example
71 | * [CHANGE] Tests and cleanup for desmod.probe
72 |
73 | desmod-0.4.0 (2017-03-20)
74 | -------------------------
75 | * [CHANGE] meta.sim.index and meta.sim.special
76 | * [CHANGE] Add meta.sim.workspace
77 | * [FIX] Check simulate_many() jobs
78 | * [CHANGE] Add named configuration categories and doc strings
79 |
80 | desmod-0.3.3 (2017-02-28)
81 | -------------------------
82 | * [CHANGE] Make NamedManager.name() deps argument optional
83 | * [FIX] Add test for desmod.config.parse_user_factors()
84 | * [FIX] More testing for tracer.py
85 |
86 | desmod-0.3.2 (2017-02-24)
87 | -------------------------
88 | * [FIX] Documentation repairs for desmod.config
89 | * [FIX] Add tests for sim.config.file
90 | * [FIX] Annotate no coverage line in test_dot.py
91 | * [NEW] Add desmod.config.apply_user_config()
92 | * [NEW] Support dumping JSON or Python config and result
93 |
94 | desmod-0.3.1 (2017-02-10)
95 | -------------------------
96 | * [NEW] Add sim.vcd.start_time and sim.vcd.stop_time
97 | * [NEW] Add unit tests for desmod.tracer
98 | * [NEW] Dump configuration to file in workspace
99 | * [NEW] Add unit tests for desmod.dot
100 | * [FIX] Use component scope instead of id() for DOT nodes
101 | * [NEW] Colored component hierarchy in DOT
102 | * [FIX] Repair typo in fuzzy_match() exception
103 |
104 | desmod-0.3.0 (2017-01-23)
105 | -------------------------
106 | * [CHANGE] Overhaul progress display
107 | * [NEW] Flexible control of simulation stop criteria
108 | * [FIX] Support progress notification on spawned processes
109 | * [FIX] Remove dead path in test_simulation.py
110 | * [FIX] Various doc repairs to SimEnvironment
111 | * [CHANGE] Add t parameter to SimEnvironment.time()
112 | * [CHANGE Parse unit in SimEnvironment.time()
113 | * [NEW] Add desmod.config.fuzzy_match()
114 | * [REMOVE] Remove desmod.config.short_special()
115 | * [NEW] Add coveralls to travis test suite
116 | * [NEW] Add flush() to tracing subsystem
117 | * [CHANGE] Do not use tox with travis
118 | * [NEW] Add Python 3.6 support in travis
119 | * [FIX] Repair gas_station.py for Python 2
120 |
121 | desmod-0.2.0 (2016-10-25)
122 | -------------------------
123 | * [CHANGE] simulate_factors() now has factors parameter
124 | * [NEW] simulate() can suppress exceptions
125 | * [FIX] simulate_factors() respects sim.workspace.overwrite
126 | * [CHANGE] Update config with missing defaults at runtime
127 |
128 | desmod-0.1.6 (2016-10-25)
129 | -------------------------
130 | * [NEW] Add env.time() and 'sim.now' result
131 | * [FIX] Enter workspace directory before instantiating env
132 | * [CHANGE] Use yaml.safe_dump()
133 | * [FIX] Add dist to .gitignore
134 | * [FIX] Squash warning in setup.cfg
135 |
136 | desmod-0.1.5 (2016-10-17)
137 | -------------------------
138 | * [NEW] Add Queue.size and Queue.remaining properties (#9)
139 | * [NEW] Trace Queue's remaining capacity (#10)
140 | * [NEW] Add Queue.when_new() event (#11)
141 |
142 | desmod-0.1.4 (2016-09-21)
143 | -------------------------
144 | * [NEW] Add desmod.simulation.simulate_many()
145 | * [FIX] Repair various docstring typos
146 | * [FIX] Disable progress bar for simulate_factors() on Windows
147 | * [NEW] Add CHANGELOG.txt to long description in setup.py
148 |
149 | desmod-0.1.3 (2016-07-28)
150 | -------------------------
151 | * [NEW] Cancelable Queue events
152 | * [CHANGE] Connection errors now raise ConnectError
153 | * [FIX] Update pytest-flake8 and flake8 dependencies (yet again)
154 |
155 | desmod-0.1.2 (2016-07-26)
156 | -------------------------
157 | * [NEW] Add "sim.log.buffering" configuration
158 | * [FIX] Repair unit tests (pytest-flake8 dependency)
159 | * [NEW] New optional `Queue.name` attribute
160 | * [FIX] Use `repr()` for exception string in result dict
161 |
162 | desmod-0.1.1 (2016-07-14)
163 | -------------------------
164 | * [FIX] Using 'True' and 'False' in expressions from the command line
165 | * [CHANGE] Improve simulation workspace handling (sim.workspace.overwrite)
166 | * [CHANGE] Make some 'sim.xxx' configuration keys optional
167 | * [NEW] Gas Station example in docs
168 | * [NEW] Add this CHANGELOG.rst and History page in docs
169 |
170 | desmod-0.1.0 (2016-07-06)
171 | -------------------------
172 | * Initial public release
173 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/workspace/sim.log:
--------------------------------------------------------------------------------
1 | INFO 0.000 s: tankerco.truck0 : going for refill
2 | INFO 0.000 s: tankerco.truck1 : going for refill
3 | INFO 1.520 s: station1 : car0 awaiting pump
4 | INFO 1.520 s: station1 : car0 at pump
5 | INFO 15.520 s: station1 : car0 pumped 28L in 14s
6 | INFO 19.297 s: station2 : car0 awaiting pump
7 | INFO 19.297 s: station2 : car0 at pump
8 | INFO 24.755 s: station2 : car1 awaiting pump
9 | INFO 24.755 s: station2 : car1 at pump
10 | INFO 25.259 s: tankerco.truck0 : refilling
11 | INFO 26.693 s: station2 : car2 awaiting pump
12 | INFO 35.297 s: station2 : car0 pumped 32L in 16s
13 | INFO 35.297 s: station2 : car2 at pump
14 | INFO 41.496 s: station2 : car3 awaiting pump
15 | INFO 45.259 s: tankerco.truck0 : refilled 200L in 20s
16 | INFO 46.255 s: station2 : car1 pumped 43L in 22s
17 | INFO 46.255 s: station2 : car3 at pump
18 | INFO 49.797 s: station2 : car2 pumped 29L in 14s
19 | INFO 60.255 s: station2 : car3 pumped 28L in 14s
20 | INFO 61.204 s: station0 : car0 awaiting pump
21 | INFO 61.204 s: station0 : car0 at pump
22 | INFO 69.270 s: station1 : car1 awaiting pump
23 | INFO 69.270 s: station1 : car1 at pump
24 | INFO 73.704 s: station0 : car0 pumped 25L in 13s
25 | INFO 74.505 s: station0 : car1 awaiting pump
26 | INFO 74.505 s: station0 : car1 at pump
27 | INFO 85.270 s: station1 : car1 pumped 32L in 16s
28 | INFO 88.005 s: station0 : car1 pumped 27L in 14s
29 | INFO 89.447 s: station0 : car2 awaiting pump
30 | INFO 89.447 s: station0 : car2 at pump
31 | INFO 96.777 s: station2 : car4 awaiting pump
32 | INFO 96.777 s: station2 : car4 at pump
33 | INFO 109.006 s: station0 : car3 awaiting pump
34 | INFO 109.006 s: station0 : car3 at pump
35 | INFO 111.947 s: station0 : car2 pumped 45L in 22s
36 | INFO 116.777 s: station2 : car4 pumped 40L in 20s
37 | INFO 126.506 s: station0 : car3 pumped 35L in 18s
38 | INFO 133.359 s: tankerco.truck1 : refilling
39 | INFO 141.774 s: station1 : car2 awaiting pump
40 | INFO 141.774 s: station1 : car2 at pump
41 | INFO 153.359 s: tankerco.truck1 : refilled 200L in 20s
42 | INFO 161.274 s: station1 : car2 pumped 39L in 20s
43 | INFO 161.307 s: station1 : car3 awaiting pump
44 | INFO 161.307 s: station1 : car3 at pump
45 | INFO 178.807 s: station1 : car3 pumped 35L in 18s
46 | INFO 180.874 s: station0 : car4 awaiting pump
47 | INFO 180.874 s: station0 : car4 at pump
48 | INFO 182.106 s: station2 : car5 awaiting pump
49 | INFO 182.106 s: station2 : car5 at pump
50 | INFO 185.606 s: tankerco : dispatching truck0 to station2
51 | INFO 185.606 s: tankerco.truck0 : traveling to station2
52 | INFO 187.343 s: station0 : car5 awaiting pump
53 | INFO 187.343 s: station0 : car5 at pump
54 | INFO 188.209 s: station2 : car6 awaiting pump
55 | INFO 188.209 s: station2 : car6 at pump
56 | INFO 195.843 s: tankerco : dispatching truck1 to station0
57 | INFO 195.843 s: tankerco.truck1 : traveling to station0
58 | INFO 197.374 s: station0 : car4 pumped 33L in 16s
59 | INFO 202.843 s: station0 : car5 pumped 31L in 16s
60 | INFO 204.051 s: tankerco.truck1 : arrived at station0
61 | INFO 223.651 s: tankerco.truck1 : done pumping
62 | INFO 234.311 s: station2 : car7 awaiting pump
63 | INFO 255.130 s: station2 : car8 awaiting pump
64 | INFO 278.171 s: tankerco.truck0 : arrived at station2
65 | INFO 285.271 s: station2 : car5 pumped 34L in 103s
66 | INFO 285.271 s: station2 : car7 at pump
67 | INFO 286.087 s: station0 : car6 awaiting pump
68 | INFO 286.087 s: station0 : car6 at pump
69 | INFO 290.871 s: station2 : car6 pumped 33L in 103s
70 | INFO 290.871 s: station2 : car8 at pump
71 | INFO 298.171 s: tankerco.truck0 : done pumping
72 | INFO 298.171 s: tankerco.truck0 : going for refill
73 | INFO 302.271 s: station2 : car7 pumped 34L in 17s
74 | INFO 307.587 s: station0 : car6 pumped 43L in 22s
75 | INFO 312.871 s: station2 : car8 pumped 44L in 22s
76 | INFO 314.565 s: station2 : car9 awaiting pump
77 | INFO 314.565 s: station2 : car9 at pump
78 | INFO 336.065 s: station2 : car9 pumped 43L in 22s
79 | INFO 337.760 s: station0 : car7 awaiting pump
80 | INFO 337.760 s: station0 : car7 at pump
81 | INFO 350.399 s: station1 : car4 awaiting pump
82 | INFO 350.399 s: station1 : car4 at pump
83 | INFO 358.760 s: station0 : car7 pumped 42L in 21s
84 | INFO 365.899 s: station1 : car4 pumped 31L in 16s
85 | INFO 379.093 s: station1 : car5 awaiting pump
86 | INFO 379.093 s: station1 : car5 at pump
87 | INFO 386.093 s: tankerco : dispatching truck0 to station1
88 | INFO 396.093 s: station1 : car5 pumped 34L in 17s
89 | INFO 403.551 s: station2 : car10 awaiting pump
90 | INFO 403.551 s: station2 : car10 at pump
91 | INFO 406.424 s: tankerco.truck0 : refilling
92 | INFO 413.051 s: tankerco : dispatching truck1 to station2
93 | INFO 413.051 s: tankerco.truck1 : traveling to station2
94 | INFO 414.202 s: station2 : car11 awaiting pump
95 | INFO 414.202 s: station2 : car11 at pump
96 | INFO 426.424 s: tankerco.truck0 : refilled 200L in 20s
97 | INFO 426.424 s: tankerco.truck0 : traveling to station1
98 | INFO 432.837 s: station2 : car12 awaiting pump
99 | INFO 433.832 s: tankerco.truck0 : arrived at station1
100 | INFO 436.561 s: tankerco.truck1 : arrived at station2
101 | INFO 436.961 s: tankerco.truck1 : done pumping
102 | INFO 436.961 s: tankerco.truck1 : going for refill
103 | INFO 436.961 s: tankerco : dispatching truck0 to station2
104 | INFO 439.677 s: station1 : car6 awaiting pump
105 | INFO 439.677 s: station1 : car6 at pump
106 | INFO 453.753 s: station0 : car8 awaiting pump
107 | INFO 453.753 s: station0 : car8 at pump
108 | INFO 453.832 s: tankerco.truck0 : done pumping
109 | INFO 453.832 s: tankerco.truck0 : going for refill
110 | INFO 455.177 s: station1 : car6 pumped 31L in 16s
111 | INFO 456.524 s: station1 : car7 awaiting pump
112 | INFO 456.524 s: station1 : car7 at pump
113 | INFO 466.253 s: station0 : car8 pumped 25L in 12s
114 | INFO 471.402 s: station1 : car8 awaiting pump
115 | INFO 471.402 s: station1 : car8 at pump
116 | INFO 474.024 s: station1 : car7 pumped 35L in 18s
117 | INFO 482.382 s: station0 : car9 awaiting pump
118 | INFO 482.382 s: station0 : car9 at pump
119 | INFO 493.305 s: station2 : car13 awaiting pump
120 | INFO 493.402 s: station1 : car8 pumped 44L in 22s
121 | INFO 497.990 s: station0 : car10 awaiting pump
122 | INFO 497.990 s: station0 : car10 at pump
123 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help
18 | help:
19 | @echo "Please use \`make ' where is one of"
20 | @echo " html to make standalone HTML files"
21 | @echo " dirhtml to make HTML files named index.html in directories"
22 | @echo " singlehtml to make a single large HTML file"
23 | @echo " pickle to make pickle files"
24 | @echo " json to make JSON files"
25 | @echo " htmlhelp to make HTML files and a HTML help project"
26 | @echo " qthelp to make HTML files and a qthelp project"
27 | @echo " applehelp to make an Apple Help Book"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " epub3 to make an epub3"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
34 | @echo " text to make text files"
35 | @echo " man to make manual pages"
36 | @echo " texinfo to make Texinfo files"
37 | @echo " info to make Texinfo files and run them through makeinfo"
38 | @echo " gettext to make PO message catalogs"
39 | @echo " changes to make an overview of all changed/added/deprecated items"
40 | @echo " xml to make Docutils-native XML files"
41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
42 | @echo " linkcheck to check all external links for integrity"
43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
44 | @echo " coverage to run coverage check of the documentation (if enabled)"
45 | @echo " dummy to check syntax errors of document sources"
46 |
47 | .PHONY: clean
48 | clean:
49 | rm -rf $(BUILDDIR)/*
50 |
51 | .PHONY: html
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | .PHONY: dirhtml
58 | dirhtml:
59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
60 | @echo
61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
62 |
63 | .PHONY: singlehtml
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | .PHONY: pickle
70 | pickle:
71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
72 | @echo
73 | @echo "Build finished; now you can process the pickle files."
74 |
75 | .PHONY: json
76 | json:
77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
78 | @echo
79 | @echo "Build finished; now you can process the JSON files."
80 |
81 | .PHONY: htmlhelp
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | .PHONY: qthelp
89 | qthelp:
90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
91 | @echo
92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/desmod.qhcp"
95 | @echo "To view the help file:"
96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/desmod.qhc"
97 |
98 | .PHONY: applehelp
99 | applehelp:
100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
101 | @echo
102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
103 | @echo "N.B. You won't be able to view it unless you put it in" \
104 | "~/Library/Documentation/Help or install it in your application" \
105 | "bundle."
106 |
107 | .PHONY: devhelp
108 | devhelp:
109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
110 | @echo
111 | @echo "Build finished."
112 | @echo "To view the help file:"
113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/desmod"
114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/desmod"
115 | @echo "# devhelp"
116 |
117 | .PHONY: epub
118 | epub:
119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
120 | @echo
121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
122 |
123 | .PHONY: epub3
124 | epub3:
125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
126 | @echo
127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
128 |
129 | .PHONY: latex
130 | latex:
131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
132 | @echo
133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
135 | "(use \`make latexpdf' here to do that automatically)."
136 |
137 | .PHONY: latexpdf
138 | latexpdf:
139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
140 | @echo "Running LaTeX files through pdflatex..."
141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
143 |
144 | .PHONY: latexpdfja
145 | latexpdfja:
146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
147 | @echo "Running LaTeX files through platex and dvipdfmx..."
148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
150 |
151 | .PHONY: text
152 | text:
153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
154 | @echo
155 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
156 |
157 | .PHONY: man
158 | man:
159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
160 | @echo
161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
162 |
163 | .PHONY: texinfo
164 | texinfo:
165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
166 | @echo
167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
168 | @echo "Run \`make' in that directory to run these through makeinfo" \
169 | "(use \`make info' here to do that automatically)."
170 |
171 | .PHONY: info
172 | info:
173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
174 | @echo "Running Texinfo files through makeinfo..."
175 | make -C $(BUILDDIR)/texinfo info
176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
177 |
178 | .PHONY: gettext
179 | gettext:
180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
181 | @echo
182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
183 |
184 | .PHONY: changes
185 | changes:
186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
187 | @echo
188 | @echo "The overview file is in $(BUILDDIR)/changes."
189 |
190 | .PHONY: linkcheck
191 | linkcheck:
192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
193 | @echo
194 | @echo "Link check complete; look for any errors in the above output " \
195 | "or in $(BUILDDIR)/linkcheck/output.txt."
196 |
197 | .PHONY: doctest
198 | doctest:
199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
200 | @echo "Testing of doctests in the sources finished, look at the " \
201 | "results in $(BUILDDIR)/doctest/output.txt."
202 |
203 | .PHONY: coverage
204 | coverage:
205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
206 | @echo "Testing of coverage in the sources finished, look at the " \
207 | "results in $(BUILDDIR)/coverage/python.txt."
208 |
209 | .PHONY: xml
210 | xml:
211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
212 | @echo
213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
214 |
215 | .PHONY: pseudoxml
216 | pseudoxml:
217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
218 | @echo
219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
220 |
221 | .PHONY: dummy
222 | dummy:
223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
224 | @echo
225 | @echo "Build finished. Dummy builder generates no files."
226 |
--------------------------------------------------------------------------------
/tests/test_queue.py:
--------------------------------------------------------------------------------
1 | from pytest import raises
2 |
3 | from desmod.queue import PriorityItem, PriorityQueue, Queue
4 |
5 |
6 | def test_mq(env):
7 | queue = Queue(env, capacity=2)
8 |
9 | def producer(msg, wait):
10 | yield env.timeout(wait)
11 | yield queue.put(msg)
12 |
13 | def consumer(expected_msg, wait):
14 | yield env.timeout(wait)
15 | msg = yield queue.get()
16 | assert msg == expected_msg
17 |
18 | env.process(producer('1st', 0))
19 | env.process(producer('2nd', 1))
20 | env.process(consumer('1st', 0))
21 | env.process(consumer('2nd', 1))
22 | env.run()
23 |
24 |
25 | def test_queue_peek(env):
26 | queue = Queue(env)
27 | assert queue.is_empty
28 | with raises(IndexError):
29 | queue.peek()
30 |
31 | queue2 = Queue(env, items=[9, 8, 7])
32 | assert not queue2.is_empty
33 | assert queue2.peek() == 9
34 |
35 |
36 | def test_queue_overflow(env):
37 | def proc(env, queue):
38 | yield queue.put(1)
39 | yield env.timeout(1)
40 | yield queue.put(1)
41 | yield env.timeout(1)
42 | with raises(OverflowError):
43 | yield queue.put(1)
44 |
45 | queue = Queue(env, capacity=2, hard_cap=True)
46 | env.process(proc(env, queue))
47 | env.run()
48 |
49 |
50 | def test_mq_when_full(env):
51 | queue = Queue(env, capacity=2)
52 | result = []
53 |
54 | def producer(env):
55 | yield env.timeout(1)
56 | for i in range(5):
57 | yield queue.put(i)
58 | yield env.timeout(1)
59 |
60 | def consumer(env):
61 | yield env.timeout(5)
62 | for i in range(3):
63 | msg = yield queue.get()
64 | assert msg == i
65 |
66 | def full_waiter(env):
67 | yield queue.when_full()
68 | result.append('full')
69 |
70 | def any_waiter(env):
71 | yield queue.when_any()
72 | assert env.now == 1
73 | result.append('any')
74 |
75 | env.process(producer(env))
76 | env.process(consumer(env))
77 | env.process(full_waiter(env))
78 | env.process(any_waiter(env))
79 | env.process(any_waiter(env))
80 | env.run()
81 | assert queue.items
82 | assert queue.is_full
83 | assert 'full' in result
84 | assert result.count('any') == 2
85 |
86 |
87 | def test_priority_mq(env):
88 | queue = PriorityQueue(env)
89 |
90 | def producer(env):
91 | for priority in reversed(range(5)):
92 | item = set([priority]) # unhashable
93 | yield queue.put(PriorityItem(priority, item))
94 | yield env.timeout(1)
95 |
96 | def consumer(env):
97 | yield env.timeout(5)
98 | for i in range(5):
99 | msg = yield queue.get()
100 | assert msg.item == set([i])
101 | yield env.timeout(1)
102 |
103 | env.process(producer(env))
104 | env.process(consumer(env))
105 | env.run()
106 |
107 |
108 | def test_queue_repr(env):
109 | queue = Queue(env, name='hi', items=[3, 2, 1])
110 | assert str(queue) == "Queue(name='hi' size=3 capacity=inf)"
111 |
112 | pri_queue = PriorityQueue(env, capacity=3)
113 | assert str(pri_queue) == 'PriorityQueue(name=None size=0 capacity=3)'
114 |
115 |
116 | def test_when_not_full(env):
117 | queue = Queue(env, capacity=2, items=[0, 1])
118 |
119 | def consumer(env):
120 | for i in range(2):
121 | yield env.timeout(3)
122 | msg = yield queue.get()
123 | assert msg == i
124 |
125 | def not_full_waiter(env):
126 | yield queue.when_not_full()
127 | assert env.now == 3
128 | yield queue.when_not_full()
129 | assert env.now == 3
130 |
131 | env.process(consumer(env))
132 | env.process(not_full_waiter(env))
133 | env.run()
134 |
135 |
136 | def test_when_empty(env):
137 | def proc(env, queue):
138 | yield queue.when_empty()
139 | assert env.now == 0
140 |
141 | yield queue.put('a')
142 | yield queue.put('b')
143 |
144 | with queue.when_empty() as when_empty_ev:
145 | assert not when_empty_ev.triggered
146 | yield env.timeout(1)
147 | item = yield queue.get()
148 | assert item == 'a'
149 | assert not when_empty_ev.triggered
150 |
151 | with queue.when_empty() as when_empty_ev:
152 | assert not when_empty_ev.triggered
153 | yield env.timeout(1)
154 | with queue.get() as get_ev:
155 | item = yield get_ev
156 | assert item == 'b'
157 | assert when_empty_ev.triggered
158 | yield when_empty_ev
159 |
160 | env.process(proc(env, Queue(env)))
161 | env.run()
162 |
163 |
164 | def test_when_at_most(env):
165 | def proc(env, queue):
166 | for item in 'abc':
167 | with queue.put(item) as put_ev:
168 | yield put_ev
169 |
170 | at_most = {}
171 | at_most[0] = queue.when_at_most(0)
172 | at_most[3] = queue.when_at_most(3)
173 | at_most[1] = queue.when_at_most(1)
174 | at_most[2] = queue.when_at_most(2)
175 | assert not at_most[0].triggered
176 | assert not at_most[1].triggered
177 | assert not at_most[2].triggered
178 | assert at_most[3].triggered
179 |
180 | item = yield queue.get()
181 | assert item == 'a'
182 | assert not at_most[0].triggered
183 | assert not at_most[1].triggered
184 | assert at_most[2].triggered
185 |
186 | item = yield queue.get()
187 | assert item == 'b'
188 | assert not at_most[0].triggered
189 | assert at_most[1].triggered
190 |
191 | item = yield queue.get()
192 | assert item == 'c'
193 | assert at_most[0].triggered
194 |
195 | env.process(proc(env, Queue(env)))
196 | env.run()
197 |
198 |
199 | def test_when_at_least(env):
200 | def proc(env, queue):
201 | at_least = {}
202 | at_least[3] = queue.when_at_least(3)
203 | at_least[0] = queue.when_at_least(0)
204 | at_least[2] = queue.when_at_least(2)
205 | at_least[1] = queue.when_at_least(1)
206 | assert at_least[0].triggered
207 | assert not at_least[1].triggered
208 | assert not at_least[2].triggered
209 | assert not at_least[3].triggered
210 |
211 | yield queue.put('a')
212 | assert at_least[1].triggered
213 | assert not at_least[2].triggered
214 | assert not at_least[3].triggered
215 |
216 | yield queue.get()
217 | assert not at_least[2].triggered
218 | assert not at_least[3].triggered
219 |
220 | yield queue.put('b')
221 | assert not at_least[2].triggered
222 | assert not at_least[3].triggered
223 |
224 | yield queue.put('c')
225 | assert at_least[2].triggered
226 | assert not at_least[3].triggered
227 |
228 | yield queue.put('d')
229 | assert at_least[3].triggered
230 |
231 | env.process(proc(env, Queue(env)))
232 | env.run()
233 |
234 |
235 | def test_queue_cancel(env):
236 | queue = Queue(env, capacity=2)
237 |
238 | def producer(env):
239 | for i in range(5):
240 | yield env.timeout(5)
241 | yield queue.put(i)
242 |
243 | def consumer(env):
244 | for i in range(3):
245 | yield env.timeout(10)
246 | msg = yield queue.get()
247 | assert msg == i
248 |
249 | def canceller(env):
250 | any_ev = queue.when_any()
251 | get_ev = queue.get()
252 | full_ev = queue.when_full()
253 |
254 | yield env.timeout(1)
255 |
256 | assert not get_ev.triggered
257 | assert not any_ev.triggered
258 | assert not full_ev.triggered
259 | get_ev.cancel()
260 | any_ev.cancel()
261 | full_ev.cancel()
262 |
263 | assert not queue.is_full
264 | with queue.when_full() as when_full:
265 | yield when_full
266 |
267 | with queue.put(1) as put_ev:
268 | not_full_ev = queue.when_not_full()
269 |
270 | yield env.timeout(1)
271 |
272 | assert not put_ev.triggered
273 | assert not not_full_ev.triggered
274 | not_full_ev.cancel()
275 |
276 | yield env.timeout(100)
277 |
278 | assert not get_ev.triggered
279 | assert not any_ev.triggered
280 | assert not put_ev.triggered
281 | assert not put_ev.triggered
282 | assert not not_full_ev.triggered
283 |
284 | env.process(producer(env))
285 | env.process(consumer(env))
286 | env.process(canceller(env))
287 | env.run()
288 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. epub3 to make an epub3
31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
32 | echo. text to make text files
33 | echo. man to make manual pages
34 | echo. texinfo to make Texinfo files
35 | echo. gettext to make PO message catalogs
36 | echo. changes to make an overview over all changed/added/deprecated items
37 | echo. xml to make Docutils-native XML files
38 | echo. pseudoxml to make pseudoxml-XML files for display purposes
39 | echo. linkcheck to check all external links for integrity
40 | echo. doctest to run all doctests embedded in the documentation if enabled
41 | echo. coverage to run coverage check of the documentation if enabled
42 | echo. dummy to check syntax errors of document sources
43 | goto end
44 | )
45 |
46 | if "%1" == "clean" (
47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
48 | del /q /s %BUILDDIR%\*
49 | goto end
50 | )
51 |
52 |
53 | REM Check if sphinx-build is available and fallback to Python version if any
54 | %SPHINXBUILD% 1>NUL 2>NUL
55 | if errorlevel 9009 goto sphinx_python
56 | goto sphinx_ok
57 |
58 | :sphinx_python
59 |
60 | set SPHINXBUILD=python -m sphinx.__init__
61 | %SPHINXBUILD% 2> nul
62 | if errorlevel 9009 (
63 | echo.
64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
65 | echo.installed, then set the SPHINXBUILD environment variable to point
66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
67 | echo.may add the Sphinx directory to PATH.
68 | echo.
69 | echo.If you don't have Sphinx installed, grab it from
70 | echo.http://sphinx-doc.org/
71 | exit /b 1
72 | )
73 |
74 | :sphinx_ok
75 |
76 |
77 | if "%1" == "html" (
78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
79 | if errorlevel 1 exit /b 1
80 | echo.
81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
82 | goto end
83 | )
84 |
85 | if "%1" == "dirhtml" (
86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
87 | if errorlevel 1 exit /b 1
88 | echo.
89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
90 | goto end
91 | )
92 |
93 | if "%1" == "singlehtml" (
94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
95 | if errorlevel 1 exit /b 1
96 | echo.
97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
98 | goto end
99 | )
100 |
101 | if "%1" == "pickle" (
102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
103 | if errorlevel 1 exit /b 1
104 | echo.
105 | echo.Build finished; now you can process the pickle files.
106 | goto end
107 | )
108 |
109 | if "%1" == "json" (
110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
111 | if errorlevel 1 exit /b 1
112 | echo.
113 | echo.Build finished; now you can process the JSON files.
114 | goto end
115 | )
116 |
117 | if "%1" == "htmlhelp" (
118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
119 | if errorlevel 1 exit /b 1
120 | echo.
121 | echo.Build finished; now you can run HTML Help Workshop with the ^
122 | .hhp project file in %BUILDDIR%/htmlhelp.
123 | goto end
124 | )
125 |
126 | if "%1" == "qthelp" (
127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
128 | if errorlevel 1 exit /b 1
129 | echo.
130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
131 | .qhcp project file in %BUILDDIR%/qthelp, like this:
132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\desmod.qhcp
133 | echo.To view the help file:
134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\desmod.ghc
135 | goto end
136 | )
137 |
138 | if "%1" == "devhelp" (
139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
140 | if errorlevel 1 exit /b 1
141 | echo.
142 | echo.Build finished.
143 | goto end
144 | )
145 |
146 | if "%1" == "epub" (
147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
148 | if errorlevel 1 exit /b 1
149 | echo.
150 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
151 | goto end
152 | )
153 |
154 | if "%1" == "epub3" (
155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
156 | if errorlevel 1 exit /b 1
157 | echo.
158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
159 | goto end
160 | )
161 |
162 | if "%1" == "latex" (
163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
164 | if errorlevel 1 exit /b 1
165 | echo.
166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdf" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "latexpdfja" (
181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
182 | cd %BUILDDIR%/latex
183 | make all-pdf-ja
184 | cd %~dp0
185 | echo.
186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
187 | goto end
188 | )
189 |
190 | if "%1" == "text" (
191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
192 | if errorlevel 1 exit /b 1
193 | echo.
194 | echo.Build finished. The text files are in %BUILDDIR%/text.
195 | goto end
196 | )
197 |
198 | if "%1" == "man" (
199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
200 | if errorlevel 1 exit /b 1
201 | echo.
202 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
203 | goto end
204 | )
205 |
206 | if "%1" == "texinfo" (
207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
208 | if errorlevel 1 exit /b 1
209 | echo.
210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
211 | goto end
212 | )
213 |
214 | if "%1" == "gettext" (
215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
216 | if errorlevel 1 exit /b 1
217 | echo.
218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
219 | goto end
220 | )
221 |
222 | if "%1" == "changes" (
223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
224 | if errorlevel 1 exit /b 1
225 | echo.
226 | echo.The overview file is in %BUILDDIR%/changes.
227 | goto end
228 | )
229 |
230 | if "%1" == "linkcheck" (
231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
232 | if errorlevel 1 exit /b 1
233 | echo.
234 | echo.Link check complete; look for any errors in the above output ^
235 | or in %BUILDDIR%/linkcheck/output.txt.
236 | goto end
237 | )
238 |
239 | if "%1" == "doctest" (
240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
241 | if errorlevel 1 exit /b 1
242 | echo.
243 | echo.Testing of doctests in the sources finished, look at the ^
244 | results in %BUILDDIR%/doctest/output.txt.
245 | goto end
246 | )
247 |
248 | if "%1" == "coverage" (
249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
250 | if errorlevel 1 exit /b 1
251 | echo.
252 | echo.Testing of coverage in the sources finished, look at the ^
253 | results in %BUILDDIR%/coverage/python.txt.
254 | goto end
255 | )
256 |
257 | if "%1" == "xml" (
258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
259 | if errorlevel 1 exit /b 1
260 | echo.
261 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
262 | goto end
263 | )
264 |
265 | if "%1" == "pseudoxml" (
266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
267 | if errorlevel 1 exit /b 1
268 | echo.
269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
270 | goto end
271 | )
272 |
273 | if "%1" == "dummy" (
274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
275 | if errorlevel 1 exit /b 1
276 | echo.
277 | echo.Build finished. Dummy builder generates no files.
278 | goto end
279 | )
280 |
281 | :end
282 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import pytest
4 |
5 | from desmod.config import (
6 | ConfigError,
7 | NamedManager,
8 | _safe_eval,
9 | apply_user_config,
10 | apply_user_overrides,
11 | factorial_config,
12 | fuzzy_lookup,
13 | parse_user_factor,
14 | parse_user_factors,
15 | )
16 |
17 |
18 | @pytest.fixture
19 | def config():
20 | return {
21 | 'foo.bar.baz': 17,
22 | 'foo.bar.biz': 1.23,
23 | 'abc.def.baz': False,
24 | 'a.b.c': 'something',
25 | 'd.e.f': [3, 2, 1],
26 | 'g.h.i': {'a': 1, 'b': 2},
27 | }
28 |
29 |
30 | @pytest.fixture
31 | def named_mgr():
32 | return NamedManager()
33 |
34 |
35 | def test_named_reuse(named_mgr):
36 | named_mgr.name('xxx', [], {'x': 0})
37 | with pytest.raises(ConfigError):
38 | named_mgr.name('xxx', [], {'y': 1})
39 | with pytest.raises(ConfigError):
40 | named_mgr.resolve('yyy')
41 |
42 |
43 | def test_named_resolve(named_mgr):
44 | named_mgr.name('www', config={'w': 0})
45 | named_mgr.name('xxx', [], {'x': 1}, category='thing', doc='documentation')
46 | named_mgr.name('yyy', ['xxx', 'www'], {'y': 2})
47 | named_mgr.name('zzz', depend=['yyy'], config={'z': 3})
48 | named_mgr.name('qqq', ['zzz'])
49 | assert named_mgr.resolve('qqq') == {'w': 0, 'x': 1, 'y': 2, 'z': 3}
50 |
51 | assert set(nc.name for nc in named_mgr) == {'www', 'xxx', 'yyy', 'zzz', 'qqq'}
52 | for nc in named_mgr:
53 | if nc.name == 'xxx':
54 | assert nc.category == 'thing' and nc.doc == 'documentation'
55 | else:
56 | assert not nc.category and not nc.doc
57 |
58 |
59 | @pytest.mark.parametrize(
60 | 'fuzzy_key, expected',
61 | [
62 | ('foo', ConfigError),
63 | ('b.foo', ('a.b.foo', 1)),
64 | ('d.foo', ('c.d.foo', 3)),
65 | ('bar', ('a.b.bar', 2)),
66 | ('o', ('e.f.o', 4)),
67 | ('.o', ('e.f.o', 4)),
68 | ('x.y.z', ('x.y.z', 5)),
69 | ('y.z', ConfigError),
70 | ],
71 | )
72 | def test_fuzzy_lookup(fuzzy_key, expected):
73 | config = {
74 | 'a.b.foo': 1,
75 | 'a.b.bar': 2,
76 | 'c.d.foo': 3,
77 | 'e.f.o': 4,
78 | 'x.y.z': 5,
79 | 'w.x.y.z': 6,
80 | }
81 | if isinstance(expected, type) and issubclass(expected, Exception):
82 | with pytest.raises(expected):
83 | fuzzy_lookup(config, fuzzy_key)
84 | else:
85 | assert fuzzy_lookup(config, fuzzy_key) == expected
86 |
87 |
88 | def test_user_override(config):
89 | apply_user_overrides(
90 | config, [('biz', '12'), ('e.f', 'range(4)'), ('g.h.i', 'zip("abc", range(3))')]
91 | )
92 | assert config['foo.bar.biz'] == 12.0
93 | assert config['d.e.f'] == [0, 1, 2, 3]
94 | assert config['g.h.i'] == {'a': 0, 'b': 1, 'c': 2}
95 |
96 |
97 | def test_user_override_type_mismatch(config):
98 | with pytest.raises(ConfigError):
99 | apply_user_overrides(config, [('d.e.f', 'os.system("clear")')])
100 |
101 |
102 | def test_user_override_invalid_value(config):
103 | with pytest.raises(ConfigError):
104 | apply_user_overrides(config, [('baz', '1')])
105 |
106 |
107 | def test_user_override_invalid_key(config):
108 | with pytest.raises(ConfigError):
109 | apply_user_overrides(config, [('not.a.key', '1')])
110 |
111 |
112 | def test_user_override_int(config):
113 | apply_user_overrides(config, [('bar.baz', '18')])
114 | assert config['foo.bar.baz'] == 18
115 |
116 |
117 | def test_user_override_int_invalid(config):
118 | with pytest.raises(ConfigError):
119 | apply_user_overrides(config, [('bar.baz', 'eighteen')])
120 |
121 |
122 | def test_user_override_bool(config):
123 | apply_user_overrides(config, [('def.baz', '1')])
124 | assert config['abc.def.baz'] is True
125 | apply_user_overrides(config, [('def.baz', 'True')])
126 | assert config['abc.def.baz'] is True
127 | apply_user_overrides(config, [('def.baz', 'False')])
128 | assert config['abc.def.baz'] is False
129 |
130 |
131 | def test_user_override_str(config):
132 | apply_user_overrides(config, [('a.b.c', 'just a string')])
133 | assert config['a.b.c'] == 'just a string'
134 |
135 |
136 | def test_user_override_str_int(config):
137 | apply_user_overrides(config, [('a.b.c', '123')])
138 | assert config['a.b.c'] == '123'
139 |
140 |
141 | def test_user_config(config):
142 | user_config = {
143 | 'foo.bar.baz': 99,
144 | 'g.h.i': {'c': 1, 'd': 2},
145 | }
146 | apply_user_config(config, user_config)
147 | assert config['foo.bar.baz'] == 99
148 | assert config['g.h.i'] == {'c': 1, 'd': 2}
149 |
150 |
151 | def test_user_config_bad_key(config):
152 | user_config = {'a.bad.key': 1}
153 | with pytest.raises(ConfigError):
154 | apply_user_config(config, user_config)
155 |
156 |
157 | def test_user_config_bad_value(config):
158 | user_config = {'foo.bar.baz': 'not an int'}
159 | with pytest.raises(ConfigError):
160 | apply_user_config(config, user_config)
161 |
162 |
163 | @pytest.mark.skipif(
164 | hasattr(sys, 'pypy_version_info'), reason="PyPy's eval() mishandles locals dict"
165 | )
166 | def test_safe_eval_str_builtin_alias():
167 | assert _safe_eval('oct', str) == 'oct'
168 | assert _safe_eval('oct') is oct
169 | with pytest.raises(ConfigError):
170 | _safe_eval('oct', eval_locals={})
171 | assert _safe_eval('oct', str, {}) == 'oct'
172 |
173 |
174 | def test_safe_eval_dict():
175 | with pytest.raises(ConfigError):
176 | _safe_eval('oct', coerce_type=dict)
177 |
178 |
179 | @pytest.mark.parametrize(
180 | 'user_keys, user_exprs, expected',
181 | [
182 | ('foo', '1,2,3', (['a.b.foo'], [[1], [2], [3]])),
183 | ('bar', '1.2, 3, 4.5', (['a.b.bar'], [[1.2], [3.0], [4.5]])),
184 | ('b.baz', '"abc"', (['a.b.baz'], [['a'], ['b'], ['c']])),
185 | ('b.baz', '"abc","def"', (['a.b.baz'], [['abc'], ['def']])),
186 | ('d.baz', '1, "y", 0', (['c.d.baz'], [[True], [True], [False]])),
187 | ('foo,bar', '(1,1),(2,2)', (['a.b.foo', 'a.b.bar'], [[1, 1.0], [2, 2.0]])),
188 | ],
189 | )
190 | def test_parse_user_factor(user_keys, user_exprs, expected):
191 | config = {
192 | 'a.b.foo': 1,
193 | 'a.b.bar': 2.0,
194 | 'a.b.baz': 'three',
195 | 'c.d.baz': True,
196 | }
197 |
198 | factor = parse_user_factor(config, user_keys, user_exprs)
199 | assert expected == factor
200 | assert all(
201 | isinstance(value, type(expected_value))
202 | for value, expected_value in zip(factor[1], expected[1])
203 | )
204 |
205 |
206 | def test_parse_user_factors(config):
207 | config = {
208 | 'a.b.foo': 1,
209 | 'a.b.bar': 2.0,
210 | 'a.b.baz': 'three',
211 | 'c.d.baz': True,
212 | }
213 |
214 | user_factors = [['foo', '1,2,3'], ['bar', '2.0, 4.0']]
215 |
216 | factors = parse_user_factors(config, user_factors)
217 |
218 | assert factors[0] == (['a.b.foo'], [[1], [2], [3]])
219 | assert factors[1] == (['a.b.bar'], [[2.0], [4.0]])
220 |
221 |
222 | @pytest.mark.parametrize(
223 | 'user_keys, user_exprs, err_str',
224 | [
225 | ('baz', 'True, False', 'ambiguous'),
226 | ('foo', '"one", "two"', 'coerce'),
227 | ('foo', '1', 'sequence'),
228 | ],
229 | )
230 | def test_parse_user_factor_invalid(user_keys, user_exprs, err_str):
231 | config = {
232 | 'a.b.foo': 1,
233 | 'a.b.bar': 2.0,
234 | 'a.b.baz': 'three',
235 | 'c.d.baz': True,
236 | }
237 | with pytest.raises(ConfigError) as e:
238 | parse_user_factor(config, user_keys, user_exprs)
239 | print(e)
240 | assert err_str in str(e)
241 |
242 |
243 | def test_factorial_config():
244 | factors = [
245 | (['k0', 'k1'], [[0, 1], [2, 3]]),
246 | (['k2'], [[4], [5], [6]]),
247 | ]
248 |
249 | expected = [
250 | {'k0': 0, 'k1': 1, 'k2': 4},
251 | {'k0': 0, 'k1': 1, 'k2': 5},
252 | {'k0': 0, 'k1': 1, 'k2': 6},
253 | {'k0': 2, 'k1': 3, 'k2': 4},
254 | {'k0': 2, 'k1': 3, 'k2': 5},
255 | {'k0': 2, 'k1': 3, 'k2': 6},
256 | ]
257 |
258 | assert list(factorial_config({}, factors)) == expected
259 |
260 |
261 | def test_factorial_config_special():
262 | factors = [
263 | (['k0', 'k1'], [[0, 1], [2, 3]]),
264 | (['k2'], [[4], [5], [6]]),
265 | ]
266 |
267 | expected = [
268 | {'k0': 0, 'k1': 1, 'k2': 4, 'special': [('k0', 0), ('k1', 1), ('k2', 4)]},
269 | {'k0': 0, 'k1': 1, 'k2': 5, 'special': [('k0', 0), ('k1', 1), ('k2', 5)]},
270 | {'k0': 0, 'k1': 1, 'k2': 6, 'special': [('k0', 0), ('k1', 1), ('k2', 6)]},
271 | {'k0': 2, 'k1': 3, 'k2': 4, 'special': [('k0', 2), ('k1', 3), ('k2', 4)]},
272 | {'k0': 2, 'k1': 3, 'k2': 5, 'special': [('k0', 2), ('k1', 3), ('k2', 5)]},
273 | {'k0': 2, 'k1': 3, 'k2': 6, 'special': [('k0', 2), ('k1', 3), ('k2', 6)]},
274 | ]
275 |
276 | fc = factorial_config({}, factors, 'special')
277 | assert list(fc) == expected
278 |
--------------------------------------------------------------------------------
/tests/test_tracer.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 |
4 | import pytest
5 | import simpy
6 |
7 | from desmod.component import Component
8 | from desmod.pool import Pool
9 | from desmod.queue import Queue
10 | from desmod.simulation import simulate
11 |
12 | pytestmark = pytest.mark.usefixtures('cleandir')
13 |
14 |
15 | @pytest.fixture
16 | def cleandir(tmpdir):
17 | origin = os.getcwd()
18 | tmpdir.chdir()
19 | yield None
20 | os.chdir(origin)
21 |
22 |
23 | @pytest.fixture
24 | def config():
25 | return {
26 | 'sim.db.enable': False,
27 | 'sim.db.file': 'sim.sqlite',
28 | 'sim.duration': '10 us',
29 | 'sim.log.enable': False,
30 | 'sim.log.file': 'sim.log',
31 | 'sim.log.level': 'INFO',
32 | 'sim.result.file': 'result.yaml',
33 | 'sim.seed': 1234,
34 | 'sim.timescale': '1 us',
35 | 'sim.vcd.dump_file': 'sim.vcd',
36 | 'sim.vcd.enable': False,
37 | 'sim.vcd.gtkw_file': 'sim.gtkw',
38 | 'sim.vcd.start_time': '',
39 | 'sim.vcd.stop_time': '',
40 | 'sim.workspace': 'workspace',
41 | 'test.raise': False,
42 | }
43 |
44 |
45 | class TopTest(Component):
46 |
47 | base_name = 'top'
48 |
49 | def __init__(self, *args, **kwargs):
50 | super().__init__(*args, **kwargs)
51 | self.container = simpy.Container(self.env)
52 | self.resource = simpy.Resource(self.env)
53 | self.queue = Queue(self.env)
54 | self.pool = Pool(self.env)
55 | self.a = CompA(self)
56 | self.b = CompB(self)
57 | hints = {}
58 | if self.env.config['sim.log.enable']:
59 | hints['log'] = {'level': 'INFO'}
60 | if self.env.config['sim.vcd.enable']:
61 | hints['vcd'] = {}
62 | if self.env.config['sim.db.enable']:
63 | hints['db'] = {}
64 | self.auto_probe('container', **hints)
65 | self.auto_probe('resource', **hints)
66 | self.auto_probe('queue', **hints)
67 | self.auto_probe('pool', **hints)
68 | self.trace_some = self.get_trace_function(
69 | 'something', vcd={'var_type': 'real'}, log={'level': 'INFO'}
70 | )
71 | self.trace_other = self.get_trace_function(
72 | 'otherthing',
73 | vcd={'var_type': 'integer', 'init': ('z', 'z'), 'size': (8, 8)},
74 | )
75 | self.add_process(self.loop)
76 |
77 | def connect_children(self):
78 | self.connect(self.a, 'container')
79 | self.connect(self.b, 'container')
80 |
81 | def loop(self):
82 | while True:
83 | yield self.env.timeout(5)
84 | with self.resource.request() as req:
85 | yield req
86 | self.trace_some(17.0)
87 | self.trace_other(42, 17)
88 | if self.env.config.get('test.raise'):
89 | raise Exception('oops')
90 |
91 |
92 | class CompA(Component):
93 |
94 | base_name = 'a'
95 |
96 | def __init__(self, *args, **kwargs):
97 | super().__init__(*args, **kwargs)
98 | self.add_process(self.loop)
99 | self.add_connections('container')
100 |
101 | def loop(self):
102 | while True:
103 | yield self.container.get(3)
104 |
105 |
106 | class CompB(CompA):
107 |
108 | base_name = 'b'
109 |
110 | def loop(self):
111 | while True:
112 | yield self.container.put(1)
113 | yield self.env.timeout(1)
114 |
115 |
116 | def test_defaults(config):
117 | simulate(config, TopTest)
118 | workspace = config['sim.workspace']
119 | assert os.path.isdir(workspace)
120 | assert os.path.exists(os.path.join(workspace, config['sim.result.file']))
121 | for filename_key in [
122 | 'sim.log.file',
123 | 'sim.vcd.dump_file',
124 | 'sim.vcd.gtkw_file',
125 | 'sim.db.file',
126 | ]:
127 | assert not os.path.exists(os.path.join(workspace, config[filename_key]))
128 |
129 |
130 | def test_exception(config):
131 | config['sim.log.enable'] = True
132 | config['test.raise'] = True
133 | with pytest.raises(Exception):
134 | simulate(config, TopTest)
135 | log_path = os.path.join(config['sim.workspace'], config['sim.log.file'])
136 | assert os.path.exists(log_path)
137 | with open(log_path) as f:
138 | log = f.read()
139 | assert 'ERROR' in log
140 |
141 |
142 | def test_log(config):
143 | config['sim.log.enable'] = True
144 | simulate(config, TopTest)
145 | log_path = os.path.join(config['sim.workspace'], config['sim.log.file'])
146 | assert os.path.exists(log_path)
147 | last_line = open(log_path).readlines()[-1]
148 | assert last_line == 'INFO 9.000 us: top.container: 1\n'
149 |
150 |
151 | def test_log_stderr(config, capsys):
152 | config['sim.log.enable'] = True
153 | config['sim.log.file'] = ''
154 | simulate(config, TopTest)
155 | out, err = capsys.readouterr()
156 | assert out == ''
157 | assert err.endswith('INFO 9.000 us: top.container: 1\n')
158 |
159 |
160 | def test_log_persist(config):
161 | config['sim.log.enable'] = True
162 | config['sim.log.persist'] = False
163 | simulate(config, TopTest)
164 | log_path = os.path.join(config['sim.workspace'], config['sim.log.file'])
165 | assert not os.path.exists(log_path)
166 |
167 | config['sim.log.file'] = ''
168 | simulate(config, TopTest)
169 |
170 |
171 | def test_vcd(config):
172 | config['sim.vcd.enable'] = True
173 | simulate(config, TopTest)
174 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
175 | assert os.path.exists(dump_path)
176 | with open(dump_path) as dump:
177 | vcd_str = dump.read()
178 | for t in range(1, 11):
179 | assert f'#{t}\n' in vcd_str
180 |
181 |
182 | def test_vcd_start(config):
183 | config['sim.vcd.enable'] = True
184 | config['sim.vcd.start_time'] = '5 us'
185 | simulate(config, TopTest)
186 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
187 | with open(dump_path) as dump:
188 | vcd_str = dump.read()
189 | assert 'dumpon' in vcd_str
190 | assert '#6' in vcd_str
191 |
192 |
193 | def test_vcd_stop(config):
194 | config['sim.vcd.enable'] = True
195 | config['sim.vcd.stop_time'] = '5 us'
196 | simulate(config, TopTest)
197 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
198 | with open(dump_path) as dump:
199 | vcd_str = dump.read()
200 | assert 'dumpoff' in vcd_str
201 | assert '#6' not in vcd_str
202 |
203 |
204 | def test_vcd_start_then_stop(config):
205 | config['sim.vcd.enable'] = True
206 | config['sim.vcd.start_time'] = '4 us'
207 | config['sim.vcd.stop_time'] = '6 us'
208 | simulate(config, TopTest)
209 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
210 | with open(dump_path) as dump:
211 | vcd_str = dump.read()
212 | assert 'dumpon' in vcd_str
213 | assert 'dumpoff' in vcd_str
214 | assert '#1\n' not in vcd_str
215 | assert '#5' in vcd_str
216 | assert '#9' not in vcd_str
217 |
218 |
219 | def test_vcd_stop_then_start(config):
220 | config['sim.vcd.enable'] = True
221 | config['sim.vcd.start_time'] = '6 us'
222 | config['sim.vcd.stop_time'] = '4 us'
223 | simulate(config, TopTest)
224 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
225 | with open(dump_path) as dump:
226 | vcd_str = dump.read()
227 | assert 'dumpon' in vcd_str
228 | assert 'dumpoff' in vcd_str
229 | assert '#1\n' in vcd_str
230 | assert '#5' not in vcd_str
231 | assert '#9' in vcd_str
232 |
233 |
234 | def test_vcd_timescale(config):
235 | config['sim.vcd.enable'] = True
236 | config['sim.vcd.timescale'] = '10 s'
237 | simulate(config, TopTest)
238 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
239 | with open(dump_path) as dump:
240 | vcd_str = dump.read()
241 | assert '$timescale 10 s' in vcd_str
242 |
243 |
244 | def test_vcd_persist(config):
245 | config['sim.vcd.enable'] = True
246 | config['sim.vcd.persist'] = False
247 | simulate(config, TopTest)
248 | dump_path = os.path.join(config['sim.workspace'], config['sim.vcd.dump_file'])
249 | assert not os.path.exists(dump_path)
250 |
251 |
252 | def test_db(config):
253 | config['sim.db.enable'] = True
254 | simulate(config, TopTest)
255 | db_path = os.path.join(config['sim.workspace'], config['sim.db.file'])
256 | assert os.path.exists(db_path)
257 | db = sqlite3.connect(db_path)
258 | assert db.execute('SELECT COUNT() FROM trace').fetchone()[0] == 15
259 |
260 |
261 | def test_db_persist(config):
262 | config['sim.db.enable'] = True
263 | config['sim.db.persist'] = False
264 | simulate(config, TopTest)
265 | db_path = os.path.join(config['sim.workspace'], config['sim.db.file'])
266 | assert not os.path.exists(db_path)
267 |
268 |
269 | def test_db_include_pat(config):
270 | config['sim.db.enable'] = True
271 | config['sim.db.include_pat'] = [r'top\.resource']
272 | simulate(config, TopTest)
273 | db_path = os.path.join(config['sim.workspace'], config['sim.db.file'])
274 | assert os.path.exists(db_path)
275 | db = sqlite3.connect(db_path)
276 | assert db.execute('SELECT COUNT() FROM trace').fetchone()[0] == 2
277 |
278 |
279 | def test_db_in_memory(config):
280 | config['sim.db.enable'] = True
281 | config['sim.db.file'] = ':memory:'
282 | simulate(config, TopTest)
283 | db_path = os.path.join(config['sim.workspace'], config['sim.db.file'])
284 | assert not os.path.exists(db_path)
285 |
--------------------------------------------------------------------------------
/docs/examples/gas_station/gas_station.py:
--------------------------------------------------------------------------------
1 | """Model refueling at several gas stations.
2 |
3 | Each gas station has several fuel pumps and a single, shared reservoir. Each
4 | arrving car pumps gas from the reservoir via a fuel pump.
5 |
6 | As the gas station's reservoir empties, a request is made to a tanker truck
7 | company to send a truck to refill the reservoir. The tanker company maintains a
8 | fleet of tanker trucks.
9 |
10 | This example demonstrates core desmod concepts including:
11 | - Modeling using Component subclasses
12 | - The "batteries-included" simulation environment
13 | - Centralized configuration
14 | - Logging
15 |
16 | """
17 | from itertools import count, cycle
18 |
19 | from simpy import Resource
20 |
21 | from desmod.component import Component
22 | from desmod.dot import generate_dot
23 | from desmod.pool import Pool
24 | from desmod.queue import Queue
25 | from desmod.simulation import simulate
26 |
27 |
28 | class Top(Component):
29 | """Every model has a single top-level Component.
30 |
31 | For this gas station model, the top level components are gas stations and a
32 | tanker truck company.
33 |
34 | """
35 |
36 | def __init__(self, *args, **kwargs):
37 | super().__init__(*args, **kwargs)
38 |
39 | # The simulation configuration is available everywhere via the
40 | # simulation environment.
41 | num_gas_stations = self.env.config.get('gas_station.count', 1)
42 |
43 | # Instantiate GasStation components. An index is passed so that each
44 | # child gas station gets a unique name.
45 | self.gas_stations = [GasStation(self, index=i) for i in range(num_gas_stations)]
46 |
47 | # There is just one tanker company.
48 | self.tanker_company = TankerCompany(self)
49 |
50 | def connect_children(self):
51 | # This function is called during the elaboration phase, i.e. after all
52 | # of the components have been instantiated, but before the simulation
53 | # phase.
54 | for gas_station in self.gas_stations:
55 | # Each GasStation instance gets a reference to (is connected to)
56 | # the tanker_company instance. This demonstrates the most
57 | # abbreviated way to call connect().
58 | self.connect(gas_station, 'tanker_company')
59 |
60 | def elab_hook(self):
61 | generate_dot(self)
62 |
63 |
64 | class TankerCompany(Component):
65 | """The tanker company owns and dispatches its fleet of tanker trunks."""
66 |
67 | # This base_name is used to build names and scopes of component instances.
68 | base_name = 'tankerco'
69 |
70 | def __init__(self, *args, **kwargs):
71 | # Many Component subclasses can simply forward *args and **kwargs to
72 | # the superclass initializer; although Component subclasses may also
73 | # have custom positional and keyword arguments.
74 | super().__init__(*args, **kwargs)
75 | num_tankers = self.env.config.get('tanker.count', 1)
76 |
77 | # Instantiate the fleet of tanker trucks.
78 | trucks = [TankerTruck(self, index=i) for i in range(num_tankers)]
79 |
80 | # Trucks are dispatched in a simple round-robin fashion.
81 | self.trucks_round_robin = cycle(trucks)
82 |
83 | def request_truck(self, gas_station, done_event):
84 | """Called by gas stations to request a truck to refill its reservior.
85 |
86 | Returns an event that the gas station must yield for.
87 |
88 | """
89 | truck = next(self.trucks_round_robin)
90 |
91 | # Each component has debug(), info(), warn(), and error() log methods.
92 | # Log lines are automatically annotated with the simulation time and
93 | # the scope of the component doing the logging.
94 | self.info(f'dispatching {truck.name} to {gas_station.name}')
95 | return truck.dispatch(gas_station, done_event)
96 |
97 |
98 | class TankerTruck(Component):
99 | """Tanker trucks carry fuel to gas stations.
100 |
101 | Each tanker truck has a queue of gas stations it must visit. When the
102 | truck's tank becomes empty, it must go refill itself.
103 |
104 | """
105 |
106 | base_name = 'truck'
107 |
108 | def __init__(self, *args, **kwargs):
109 | super().__init__(*args, **kwargs)
110 | self.pump_rate = self.env.config.get('tanker.pump_rate', 10)
111 | self.avg_travel = self.env.config.get('tanker.travel_time', 600)
112 | tank_capacity = self.env.config.get('tanker.capacity', 200)
113 | self.tank = Pool(self.env, tank_capacity)
114 |
115 | # This auto_probe() call uses the self.tank Pool get/put hooks so that
116 | # whenever it's level changes, the new level is noted in the log.
117 | self.auto_probe('tank', log={})
118 |
119 | # The parent TankerCompany enqueues instructions to this queue.
120 | self._instructions = Queue(self.env)
121 |
122 | # Declare a persistant process to be started at simulation-time.
123 | self.add_process(self._dispatch_loop)
124 |
125 | def dispatch(self, gas_station, done_event):
126 | """Append dispatch instructions to the truck's queue."""
127 | return self._instructions.put((gas_station, done_event))
128 |
129 | def _dispatch_loop(self):
130 | """This is the tanker truck's main behavior. Travel, pump, refill..."""
131 | while True:
132 | if not self.tank.level:
133 | self.info('going for refill')
134 |
135 | # Desmod simulation environments come equipped with a
136 | # random.Random() instance seeded based on the 'sim.seed'
137 | # configuration key.
138 | travel_time = self.env.rand.expovariate(1 / self.avg_travel)
139 | yield self.env.timeout(travel_time)
140 |
141 | self.info('refilling')
142 | pump_time = self.tank.capacity / self.pump_rate
143 | yield self.env.timeout(pump_time)
144 |
145 | yield self.tank.put(self.tank.capacity)
146 | self.info(f'refilled {self.tank.capacity}L in {pump_time:.0f}s')
147 |
148 | gas_station, done_event = yield self._instructions.get()
149 | self.info(f'traveling to {gas_station.name}')
150 | travel_time = self.env.rand.expovariate(1 / self.avg_travel)
151 | yield self.env.timeout(travel_time)
152 | self.info(f'arrived at {gas_station.name}')
153 | while self.tank.level and (
154 | gas_station.reservoir.level < gas_station.reservoir.capacity
155 | ):
156 | yield self.env.timeout(1 / self.pump_rate)
157 | yield gas_station.reservoir.put(1)
158 | yield self.tank.get(1)
159 | self.info('done pumping')
160 | done_event.succeed()
161 |
162 |
163 | class GasStation(Component):
164 | """A gas station has a fuel reservoir shared among several fuel pumps.
165 |
166 | The gas station has a traffic generator process that causes cars to arrive
167 | to fill up their tanks.
168 |
169 | As the cars fill up, the reservoir's level goes down. When the level goes
170 | below a critical threshold, the gas station makes a request to the tanker
171 | company for a tanker truck to refill the reservoir.
172 |
173 | """
174 |
175 | base_name = 'station'
176 |
177 | def __init__(self, *args, **kwargs):
178 | super().__init__(*args, **kwargs)
179 | config = self.env.config
180 | self.add_connections('tanker_company')
181 | self.arrival_interval = config.get('gas_station.arrival_interval', 60)
182 |
183 | station_capacity = config.get('gas_station.capacity', 200)
184 | self.reservoir = Pool(
185 | self.env, capacity=station_capacity, init=station_capacity
186 | )
187 | self.auto_probe('reservoir', log={})
188 |
189 | threshold_pct = config.get('gas_station.threshold_pct', 10)
190 | self.reservoir_low_water = threshold_pct * station_capacity / 100
191 |
192 | self.pump_rate = config.get('gas_station.pump_rate', 2)
193 | num_pumps = config.get('gas_station.pumps', 2)
194 | self.fuel_pumps = Resource(self.env, capacity=num_pumps)
195 | self.auto_probe('fuel_pumps', log={})
196 |
197 | self.car_capacity = config.get('car.capacity', 50)
198 | self.car_level_range = config.get('car.level', [5, 25])
199 |
200 | # A gas station has two persistent processes. One to monitor the
201 | # reservoir level and one that models the arrival of cars at the
202 | # station. Desmod starts these processes before simulation phase.
203 | self.add_processes(self._monitor_reservoir, self._traffic_generator)
204 |
205 | @property
206 | def reservoir_pct(self):
207 | return self.reservoir.level / self.reservoir.capacity * 100
208 |
209 | def _monitor_reservoir(self):
210 | """Periodically monitor reservoir level.
211 |
212 | The a request is made to the tanker company when the reservoir falls
213 | below a critical threshold.
214 |
215 | """
216 | while True:
217 | yield self.reservoir.when_at_most(self.reservoir_low_water)
218 | done_event = self.env.event()
219 | yield self.tanker_company.request_truck(self, done_event)
220 | yield done_event
221 |
222 | def _traffic_generator(self):
223 | """Model the sporadic arrival of cars to the gas station."""
224 | for i in count():
225 | interval = self.env.rand.expovariate(1 / self.arrival_interval)
226 | yield self.env.timeout(interval)
227 | self.env.process(self._car(i))
228 |
229 | def _car(self, i):
230 | """Model a car transacting fuel."""
231 | with self.fuel_pumps.request() as pump_req:
232 | self.info(f'car{i} awaiting pump')
233 | yield pump_req
234 | self.info(f'car{i} at pump')
235 | car_level = self.env.rand.randint(*self.car_level_range)
236 | amount = self.car_capacity - car_level
237 | t0 = self.env.now
238 | for _ in range(amount):
239 | yield self.reservoir.get(1)
240 | yield self.env.timeout(1 / self.pump_rate)
241 | pump_time = self.env.now - t0
242 | self.info(f'car{i} pumped {amount}L in {pump_time:.0f}s')
243 |
244 |
245 | # Desmod uses a plain dictionary to represent the simulation configuration.
246 | # The various 'sim.xxx' keys are reserved for desmod while the remainder are
247 | # application-specific.
248 | config = {
249 | 'car.capacity': 50,
250 | 'car.level': [5, 25],
251 | 'gas_station.capacity': 200,
252 | 'gas_station.count': 3,
253 | 'gas_station.pump_rate': 2,
254 | 'gas_station.pumps': 2,
255 | 'gas_station.arrival_interval': 60,
256 | 'sim.dot.enable': True,
257 | 'sim.dot.colorscheme': 'blues5',
258 | 'sim.duration': '500 s',
259 | 'sim.log.enable': True,
260 | 'sim.log.file': 'sim.log',
261 | 'sim.log.format': '{level:7} {ts:.3f} {ts_unit}: {scope:<16}:',
262 | 'sim.log.level': 'INFO',
263 | 'sim.result.file': 'results.yaml',
264 | 'sim.seed': 42,
265 | 'sim.timescale': 's',
266 | 'sim.workspace': 'workspace',
267 | 'tanker.capacity': 200,
268 | 'tanker.count': 2,
269 | 'tanker.pump_rate': 10,
270 | 'tanker.travel_time': 100,
271 | }
272 |
273 | if __name__ == '__main__':
274 | # Desmod takes responsibility for instantiating and elaborating the model,
275 | # thus we only need to pass the configuration dict and the top-level
276 | # Component class (Top) to simulate().
277 | simulate(config, Top)
278 |
--------------------------------------------------------------------------------
/tests/test_pool.py:
--------------------------------------------------------------------------------
1 | from pytest import raises
2 | import pytest
3 |
4 | from desmod.pool import Pool, PriorityPool
5 |
6 |
7 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
8 | def test_pool(env, PoolClass):
9 | pool = PoolClass(env, capacity=2)
10 |
11 | def producer(amount, wait):
12 | yield env.timeout(wait)
13 | yield pool.put(amount)
14 |
15 | def consumer(expected_amount, wait):
16 | yield env.timeout(wait)
17 | msg = yield pool.get(expected_amount)
18 | assert msg == expected_amount
19 |
20 | env.process(producer(1, 0))
21 | env.process(producer(2, 1))
22 | env.process(consumer(1, 0))
23 | env.process(consumer(2, 1))
24 | env.process(consumer(2, 2))
25 | env.process(producer(1, 2))
26 | env.process(producer(1, 3))
27 | env.run()
28 |
29 |
30 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
31 | def test_pool2(env, PoolClass):
32 | pool = PoolClass(env, capacity=2)
33 |
34 | def proc(env, pool):
35 | assert pool.is_empty
36 | assert env.now == 0
37 |
38 | yield env.timeout(1)
39 |
40 | when_full = pool.when_full()
41 | assert not when_full.triggered
42 |
43 | when_any = pool.when_any()
44 | assert not when_any.triggered
45 |
46 | with pool.when_not_full() as when_not_full:
47 | yield when_not_full
48 | assert when_not_full.triggered
49 |
50 | with raises(ValueError):
51 | pool.put(pool.capacity + 1)
52 |
53 | with raises(ValueError):
54 | pool.get(pool.capacity + 1)
55 |
56 | get_two = pool.get(2)
57 | assert not get_two.triggered
58 |
59 | put_one = pool.put(1)
60 | assert put_one.triggered
61 |
62 | assert not when_any.triggered
63 | assert not get_two.triggered
64 | assert not when_full.triggered
65 | assert pool.level == 1
66 |
67 | yield put_one
68 | assert when_any.triggered
69 |
70 | yield env.timeout(1)
71 |
72 | with pool.when_full() as when_full2:
73 | assert not when_full2.triggered
74 |
75 | put_one = pool.put(1)
76 | assert put_one.triggered
77 | assert not when_full.triggered
78 |
79 | yield put_one
80 |
81 | assert when_full.triggered
82 | assert get_two.triggered
83 | assert pool.level == 0
84 |
85 | yield pool.put(2)
86 |
87 | when_not_full = pool.when_not_full()
88 | assert not when_not_full.triggered
89 |
90 | with pool.when_any() as when_any2:
91 | yield when_any2
92 | assert when_any2.triggered
93 |
94 | yield pool.get(1)
95 |
96 | assert when_not_full.triggered
97 |
98 | env.process(proc(env, pool))
99 | env.run()
100 |
101 |
102 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
103 | def test_pool_float(env, PoolClass):
104 | pool = PoolClass(env, capacity=3.0)
105 |
106 | def proc(env, pool):
107 | assert pool.is_empty
108 |
109 | when_full = pool.when_full()
110 | assert not when_full.triggered
111 | when_any = pool.when_any()
112 | assert not when_any.triggered
113 |
114 | get_half = pool.get(0.5)
115 | assert not get_half.triggered
116 | put_three = pool.put(3)
117 | assert put_three.triggered
118 | yield put_three
119 | assert pool.level == 2.5
120 | assert get_half.triggered
121 |
122 | with raises(AssertionError):
123 | when_not_full = pool.when_not_full()
124 | when_not_full = pool.when_not_full(epsilon=0.01)
125 | assert when_not_full.triggered
126 |
127 | put_half = pool.put(0.5)
128 | assert put_half.triggered
129 | yield put_half
130 |
131 | when_not_full = pool.when_not_full(epsilon=0.01)
132 | assert not when_not_full.triggered
133 |
134 | env.process(proc(env, pool))
135 | env.run()
136 |
137 |
138 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
139 | def test_pool_overflow(env, PoolClass):
140 | pool = PoolClass(env, capacity=5, hard_cap=True)
141 |
142 | def producer(env):
143 | yield env.timeout(1)
144 | yield pool.put(1)
145 | yield pool.put(3)
146 | assert pool.remaining == 1
147 | with raises(OverflowError):
148 | yield pool.put(2)
149 |
150 | env.process(producer(env))
151 | env.run()
152 |
153 |
154 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
155 | def test_pool_put_zero(env, PoolClass):
156 | pool = PoolClass(env, capacity=5, hard_cap=True)
157 |
158 | def producer(env):
159 | with raises(ValueError):
160 | yield pool.put(0)
161 |
162 | env.process(producer(env))
163 | env.run()
164 |
165 |
166 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
167 | def test_pool_get_zero(env, PoolClass):
168 | pool = PoolClass(env, capacity=5, hard_cap=True)
169 |
170 | def consumer(env):
171 | with raises(ValueError):
172 | yield pool.get(0)
173 |
174 | env.process(consumer(env))
175 | env.run()
176 |
177 |
178 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
179 | def test_pool_get_too_many(env, PoolClass):
180 | def producer(env, pool):
181 | yield pool.put(1)
182 | yield env.timeout(1)
183 | yield pool.put(1)
184 |
185 | def consumer(env, pool):
186 | amount = yield pool.get(1)
187 | assert amount == 1
188 | with raises(ValueError):
189 | yield pool.get(pool.capacity + 1)
190 |
191 | pool = PoolClass(env, capacity=6, name='foo')
192 | env.process(producer(env, pool))
193 | env.process(consumer(env, pool))
194 | env.run()
195 |
196 |
197 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
198 | def test_pool_put_too_many(env, PoolClass):
199 | pool = PoolClass(env, capacity=6)
200 |
201 | def proc(env):
202 | with raises(ValueError):
203 | yield pool.put(pool.capacity + 1)
204 |
205 | env.process(proc(env))
206 | env.run()
207 |
208 |
209 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
210 | def test_pool_cancel(env, PoolClass):
211 | def proc(env, pool):
212 | get_ev = pool.get(2)
213 | full_ev = pool.when_full()
214 | any_ev = pool.when_any()
215 | empty_ev = pool.when_empty()
216 |
217 | assert not any_ev.triggered
218 | assert empty_ev.triggered
219 |
220 | yield env.timeout(1)
221 |
222 | any_ev.cancel()
223 |
224 | with pool.put(1) as put_ev:
225 | yield put_ev
226 |
227 | assert not get_ev.triggered
228 | assert not any_ev.triggered
229 |
230 | with pool.when_empty() as empty_ev:
231 | assert not empty_ev.triggered
232 |
233 | get_ev.cancel()
234 | full_ev.cancel()
235 |
236 | yield pool.put(1)
237 |
238 | assert not get_ev.triggered
239 | assert pool.is_full
240 | assert not full_ev.triggered
241 |
242 | put_ev = pool.put(1)
243 | assert not put_ev.triggered
244 |
245 | yield env.timeout(1)
246 | put_ev.cancel()
247 |
248 | with pool.get(1) as get_ev2:
249 | yield get_ev2
250 | assert not put_ev.triggered
251 |
252 | env.process(proc(env, PoolClass(env, capacity=2)))
253 | env.run()
254 |
255 |
256 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
257 | def test_pool_when_at_most(env, PoolClass):
258 | def proc(env, pool):
259 | yield pool.put(3)
260 | at_most = {}
261 | at_most[0] = pool.when_at_most(0)
262 | at_most[3] = pool.when_at_most(3)
263 | at_most[1] = pool.when_at_most(1)
264 | at_most[2] = pool.when_at_most(2)
265 | assert not at_most[0].triggered
266 | assert not at_most[1].triggered
267 | assert not at_most[2].triggered
268 | assert at_most[3].triggered
269 |
270 | yield pool.get(1)
271 | assert pool.level == 2
272 | assert not at_most[0].triggered
273 | assert not at_most[1].triggered
274 | assert at_most[2].triggered
275 |
276 | yield pool.get(1)
277 | assert pool.level == 1
278 | assert not at_most[0].triggered
279 | assert at_most[1].triggered
280 |
281 | yield pool.get(1)
282 | assert pool.level == 0
283 | assert at_most[0].triggered
284 |
285 | env.process(proc(env, PoolClass(env)))
286 | env.run()
287 |
288 |
289 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
290 | def test_when_at_least(env, PoolClass):
291 | def proc(env, pool):
292 | at_least = {}
293 | at_least[3] = pool.when_at_least(3)
294 | at_least[0] = pool.when_at_least(0)
295 | at_least[2] = pool.when_at_least(2)
296 | at_least[1] = pool.when_at_least(1)
297 | assert at_least[0].triggered
298 | assert not at_least[1].triggered
299 | assert not at_least[2].triggered
300 | assert not at_least[3].triggered
301 |
302 | yield pool.put(1)
303 | assert at_least[1].triggered
304 | assert not at_least[2].triggered
305 | assert not at_least[3].triggered
306 |
307 | yield pool.get(1)
308 | assert not at_least[2].triggered
309 | assert not at_least[3].triggered
310 |
311 | yield pool.put(1)
312 | assert not at_least[2].triggered
313 | assert not at_least[3].triggered
314 |
315 | yield pool.put(1)
316 | assert at_least[2].triggered
317 | assert not at_least[3].triggered
318 |
319 | yield pool.put(1)
320 | assert at_least[3].triggered
321 |
322 | env.process(proc(env, PoolClass(env)))
323 | env.run()
324 |
325 |
326 | @pytest.mark.parametrize('PoolClass', [Pool, PriorityPool])
327 | def test_pool_check_str(env, PoolClass):
328 | pool = PoolClass(env, name='bar', capacity=5)
329 | assert str(pool) == f"{PoolClass.__name__}(name='bar' level=0 capacity=5)"
330 |
331 |
332 | def test_priority_pool_gets(env):
333 | pool = PriorityPool(env)
334 |
335 | def producer(env, pool):
336 | for _ in range(10):
337 | yield env.timeout(1)
338 | yield pool.put(1)
339 |
340 | def consumer(get_event):
341 | yield get_event
342 |
343 | get1_p1_a = env.process(consumer(pool.get(1, priority=1)))
344 | get1_p1_b = env.process(consumer(pool.get(1, priority=1)))
345 | get5_p0 = env.process(consumer(pool.get(5, priority=0)))
346 | get4_p0 = env.process(consumer(pool.get(4, priority=0)))
347 |
348 | env.process(producer(env, pool))
349 |
350 | env.run(until=5.1)
351 | assert get5_p0.triggered
352 | assert not get4_p0.triggered
353 | assert not get1_p1_a.triggered
354 | assert not get1_p1_b.triggered
355 |
356 | env.run(until=9.1)
357 | assert get4_p0.triggered
358 | assert not get1_p1_a.triggered
359 | assert not get1_p1_b.triggered
360 |
361 | env.run(until=10.1)
362 | assert get1_p1_a.triggered
363 | assert not get1_p1_b.triggered
364 |
365 |
366 | def test_priority_pool_puts(env):
367 | def proc(env, pool):
368 | put_ev = {}
369 | put_ev[2] = pool.put(1, priority=2)
370 | put_ev[0] = pool.put(1, priority=0)
371 | put_ev[1] = pool.put(1, priority=1)
372 | assert not put_ev[0].triggered
373 | assert not put_ev[1].triggered
374 | assert not put_ev[2].triggered
375 |
376 | yield pool.get(1)
377 | assert put_ev[0].triggered
378 | assert not put_ev[1].triggered
379 | assert not put_ev[2].triggered
380 |
381 | yield pool.get(1)
382 | assert put_ev[1].triggered
383 | assert not put_ev[2].triggered
384 |
385 | yield pool.get(1)
386 | assert put_ev[2].triggered
387 |
388 | env.process(proc(env, PriorityPool(env, capacity=2, init=2)))
389 | env.run()
390 |
--------------------------------------------------------------------------------
/desmod/dot.py:
--------------------------------------------------------------------------------
1 | """Generate graphical representation of component hierarchy.
2 |
3 | Component hierarchy, connections, and processes can be represented graphically
4 | using the `Graphviz`_ `DOT language`_.
5 |
6 | The :func:`component_to_dot()` function produces a DOT language string that can
7 | be rendered into a variety of formats using Graphviz tools. Because the
8 | component hierarchy, connections, and processes are determined dynamically,
9 | :func:`component_to_dot()` must be called with an instantiated component. A
10 | good way to integrate this capabililty into a model is to call
11 | :func:`component_to_dot()` from a component's
12 | :meth:`desmod.component.Component.elab_hook()` method.
13 |
14 | The ``dot`` program from `Graphviz`_ may be used to render the generated DOT
15 | language description of the component hierarchy::
16 |
17 | dot -Tpng -o foo.png foo.dot
18 |
19 | For large component hierarchies, the ``osage`` program (also part of Graphviz)
20 | can produce a more compact layout::
21 |
22 | osage -Tpng -o foo.png foo.dot
23 |
24 | .. _Graphviz: http://graphviz.org/
25 | .. _DOT language: http://graphviz.org/content/dot-language
26 |
27 | """
28 | from itertools import cycle, groupby
29 | from typing import Dict, Iterator, List, Optional, Sequence
30 |
31 | from desmod.component import Component
32 | from desmod.config import ConfigDict
33 |
34 | _color_cycle = cycle(
35 | [
36 | 'dodgerblue4',
37 | 'darkgreen',
38 | 'darkorchid',
39 | 'darkslategray',
40 | 'deeppink4',
41 | 'goldenrod4',
42 | 'firebrick4',
43 | ]
44 | )
45 |
46 |
47 | def generate_dot(top: Component, config: Optional[ConfigDict] = None) -> None:
48 | """Generate dot files based on 'sim.dot' configuration.
49 |
50 | The ``sim.dot.enable`` configuration controls whether any dot file
51 | generation is performed. The remaining ``sim.dot`` configuration items have
52 | no effect unless ``sim.dot.enable`` is ``True``.
53 |
54 | The ``sim.dot.colorscheme`` configuration controls the colorscheme used in
55 | the generated DOT files. See :func:`component_to_dot` for more detail.
56 |
57 | The ``sim.dot.all.file``, ``sim.dot.hier.file``, and ``sim.dot.conn.file``
58 | configuration items control the names of the generated DOT files. These
59 | items can also be set to the empty string to disable generating a
60 | particular file.
61 |
62 | The nominal way to use this function is to call it from the top component's
63 | :meth:`Component.elab_hook()`. E.g.::
64 |
65 | def elab_hook(self):
66 | ...
67 | generate_dot(self)
68 | ...
69 |
70 | """
71 | config = top.env.config if config is None else config
72 |
73 | enable: bool = config.setdefault('sim.dot.enable', False)
74 | colorscheme: str = config.setdefault('sim.dot.colorscheme', '')
75 | all_filename: str = config.setdefault('sim.dot.all.file', 'all.dot')
76 | hier_filename: str = config.setdefault('sim.dot.hier.file', 'hier.dot')
77 | conn_filename: str = config.setdefault('sim.dot.conn.file', 'conn.dot')
78 |
79 | if not enable:
80 | return
81 |
82 | if all_filename:
83 | with open(all_filename, 'w') as dot_file:
84 | dot_file.write(
85 | component_to_dot(
86 | top,
87 | show_hierarchy=True,
88 | show_connections=True,
89 | show_processes=True,
90 | colorscheme=colorscheme,
91 | )
92 | )
93 |
94 | if hier_filename:
95 | with open(hier_filename, 'w') as dot_file:
96 | dot_file.write(
97 | component_to_dot(
98 | top,
99 | show_hierarchy=True,
100 | show_connections=False,
101 | show_processes=False,
102 | colorscheme=colorscheme,
103 | )
104 | )
105 |
106 | if conn_filename:
107 | with open(conn_filename, 'w') as dot_file:
108 | dot_file.write(
109 | component_to_dot(
110 | top,
111 | show_hierarchy=False,
112 | show_connections=True,
113 | show_processes=False,
114 | colorscheme=colorscheme,
115 | )
116 | )
117 |
118 |
119 | def component_to_dot(
120 | top: Component,
121 | show_hierarchy: bool = True,
122 | show_connections: bool = True,
123 | show_processes: bool = True,
124 | colorscheme: str = '',
125 | ) -> str:
126 | """Produce a dot stream from a component hierarchy.
127 |
128 | The DOT language representation of the component instance hierarchy can
129 | show the component hierarchy, the inter-component connections, components'
130 | processes, or any combination thereof.
131 |
132 | .. Note::
133 | The `top` component hierarchy must be initialized and all connections
134 | must be made in order for `component_to_dot()` to inspect these graphs.
135 | The :meth:`desmod.component.Component.elab_hook()` method is a good
136 | place to call `component_to_dot()` since the model is fully elaborated
137 | at that point and simulation has not yet started.
138 |
139 | :param Component top: Top-level component (instance).
140 | :param bool show_hierarchy:
141 | Should the component hierarchy be shown in the graph.
142 | :param bool show_connections:
143 | Should the inter-component connections be shown in the graph.
144 | :param bool show_processes:
145 | Should each component's processes be shown in the graph.
146 | :param str colorscheme:
147 | One of the `Brewer color schemes`_ supported by graphviz, e.g. "blues8"
148 | or "set27". Each level of the component hierarchy will use a different
149 | color from the color scheme. N.B. Brewer color schemes have between 3
150 | and 12 colors; one should be chosen that has at least as many colors as
151 | the depth of the component hierarchy.
152 | :returns str:
153 | DOT language representation of the component/connection graph(s).
154 |
155 | .. _Brewer color schemes: http://graphviz.org/content/color-names#brewer
156 |
157 | """
158 | indent = ' '
159 | lines = ['strict digraph M {']
160 | lines.extend(
161 | indent + line
162 | for line in _comp_hierarchy(
163 | [top], show_hierarchy, show_connections, show_processes, colorscheme
164 | )
165 | )
166 | if show_connections:
167 | lines.append('')
168 | lines.extend(indent + line for line in _comp_connections(top))
169 | lines.append('}')
170 | return '\n'.join(lines)
171 |
172 |
173 | def _comp_hierarchy(
174 | component_group: Sequence[Component],
175 | show_hierarchy: bool,
176 | show_connections: bool,
177 | show_processes: bool,
178 | colorscheme: str,
179 | _level: int = 1,
180 | ) -> List[str]:
181 | component = component_group[0]
182 | if len(component_group) == 1:
183 | label_name = _comp_name(component)
184 | else:
185 | label_name = (
186 | f'{_comp_name(component_group[0])}..{_comp_name(component_group[-1])}'
187 | )
188 |
189 | if component._children and show_hierarchy:
190 | border_style = 'dotted'
191 | else:
192 | border_style = 'rounded'
193 | if colorscheme:
194 | style = f'style="{border_style},filled",fillcolor="/{colorscheme}/{_level}"'
195 | else:
196 | style = 'style=' + border_style
197 |
198 | node_lines = [f'"{_comp_scope(component)}" [shape=box,{style},label=<']
199 |
200 | label_lines = _comp_label(component, label_name, show_processes)
201 | if len(label_lines) == 1:
202 | node_lines[-1] += label_lines[0]
203 | else:
204 | node_lines.extend(' ' + line for line in label_lines)
205 | node_lines[-1] += '>];'
206 |
207 | if not component._children:
208 | return node_lines
209 | else:
210 | if show_hierarchy:
211 | indent = ' '
212 | lines = [
213 | f'subgraph "{_cluster_id(component)}" {{',
214 | indent + f'{indent}label=<{_cluster_label(component_group)}>',
215 | ]
216 | if colorscheme:
217 | lines.extend(
218 | [
219 | indent + 'style="filled"',
220 | indent + f'fillcolor="/{colorscheme}/{_level}"',
221 | ]
222 | )
223 | else:
224 | indent = ''
225 | lines = []
226 | if show_connections:
227 | lines.extend(indent + line for line in node_lines)
228 | for child_group in _child_type_groups(component):
229 | lines.extend(
230 | indent + line
231 | for line in _comp_hierarchy(
232 | child_group,
233 | show_hierarchy,
234 | show_connections,
235 | show_processes,
236 | colorscheme,
237 | _level + 1,
238 | )
239 | )
240 | if show_hierarchy:
241 | lines.append('}')
242 | return lines
243 |
244 |
245 | def _comp_connections(component: Component) -> List[str]:
246 | lines = []
247 | for conn, src, src_conn, conn_obj in component._connections:
248 | attrs = {}
249 | if isinstance(conn_obj, Component):
250 | src = conn_obj
251 | elif (
252 | isinstance(conn_obj, list)
253 | and conn_obj
254 | and isinstance(conn_obj[0], Component)
255 | ):
256 | src = conn_obj[0]
257 | else:
258 | attrs['label'] = f'"{conn}"'
259 | attrs['color'] = attrs['fontcolor'] = next(_color_cycle)
260 |
261 | lines.append(
262 | '"{dst_id}" -> "{src_id}" [{attrs}];'.format(
263 | dst_id=_comp_scope(component),
264 | src_id=_comp_scope(src),
265 | attrs=_join_attrs(attrs),
266 | )
267 | )
268 |
269 | for child_group in _child_type_groups(component):
270 | lines.extend(_comp_connections(child_group[0]))
271 |
272 | return lines
273 |
274 |
275 | def _child_type_groups(component: Component) -> Iterator[List[Component]]:
276 | children = sorted(component._children, key=_comp_name)
277 | for _, group in groupby(children, lambda child: str(type(child))):
278 | yield list(group)
279 |
280 |
281 | def _comp_name(component: Component) -> str:
282 | return component.name if component.name else type(component).__name__
283 |
284 |
285 | def _comp_scope(component: Component) -> str:
286 | return component.scope if component.scope else type(component).__name__
287 |
288 |
289 | def _cluster_id(component: Component) -> str:
290 | return 'cluster_' + _comp_scope(component)
291 |
292 |
293 | def _cluster_label(component_group: Sequence[Component]) -> str:
294 | if len(component_group) == 1:
295 | return f'{_comp_name(component_group[0])}'
296 | else:
297 | return f'{component_group[0].name}..{component_group[-1].name}'
298 |
299 |
300 | def _comp_label(
301 | component: Component, label_name: str, show_processes: bool
302 | ) -> List[str]:
303 | label_lines = [f'{label_name}
']
304 | if show_processes and component._processes:
305 | label_lines.append('
')
306 | proc_funcs = set()
307 | for proc_func, _, _ in component._processes:
308 | if proc_func not in proc_funcs:
309 | proc_funcs.add(proc_func)
310 | label_lines.append(f'{proc_func.__name__}
')
311 | return label_lines
312 |
313 |
314 | def _join_attrs(attrs: Dict[str, str]) -> str:
315 | return ','.join(f'{k}={v}' for k, v in sorted(attrs.items()))
316 |
--------------------------------------------------------------------------------
/desmod/queue.py:
--------------------------------------------------------------------------------
1 | """Queue classes useful for modeling.
2 |
3 | A queue may be used for inter-process message passing, resource pools,
4 | event sequences, and many other modeling applications. The :class:`~Queue`
5 | class implements a simulation-aware, general-purpose queue useful for these
6 | modeling applications.
7 |
8 | The :class:`~PriorityQueue` class is an alternative to :class:`~Queue` that
9 | dequeues items in priority-order instead of :class:`Queue`'s FIFO discipline.
10 |
11 | """
12 | from heapq import heapify, heappop, heappush
13 | from types import TracebackType
14 | from typing import (
15 | TYPE_CHECKING,
16 | Any,
17 | Callable,
18 | Generic,
19 | Iterable,
20 | List,
21 | NamedTuple,
22 | Optional,
23 | Type,
24 | TypeVar,
25 | Union,
26 | )
27 |
28 | from simpy.core import BoundClass, Environment
29 | from simpy.events import Event
30 |
31 | EventCallback = Callable[[Event], None]
32 |
33 |
34 | class QueuePutEvent(Event):
35 | callbacks: List[EventCallback]
36 |
37 | def __init__(self, queue: 'Queue[ItemType]', item: Any) -> None:
38 | super().__init__(queue.env)
39 | self.queue = queue
40 | self.item = item
41 | queue._put_waiters.append(self)
42 | self.callbacks.extend([queue._trigger_when_at_least, queue._trigger_get])
43 | queue._trigger_put()
44 |
45 | def __enter__(self) -> 'QueuePutEvent':
46 | return self
47 |
48 | def __exit__(
49 | self,
50 | exc_type: Optional[Type[BaseException]],
51 | exc_value: Optional[BaseException],
52 | traceback: Optional[TracebackType],
53 | ) -> Optional[bool]:
54 | self.cancel()
55 | return None
56 |
57 | def cancel(self) -> None:
58 | if not self.triggered:
59 | self.queue._put_waiters.remove(self)
60 | self.callbacks = None # type: ignore[assignment] # noqa: F821
61 |
62 |
63 | class QueueGetEvent(Event):
64 | callbacks: List[EventCallback]
65 |
66 | def __init__(self, queue: 'Queue[ItemType]') -> None:
67 | super().__init__(queue.env)
68 | self.queue = queue
69 | queue._get_waiters.append(self)
70 | self.callbacks.extend([queue._trigger_when_at_most, queue._trigger_put])
71 | queue._trigger_get()
72 |
73 | def __enter__(self) -> 'QueueGetEvent':
74 | return self
75 |
76 | def __exit__(
77 | self,
78 | exc_type: Optional[Type[BaseException]],
79 | exc_value: Optional[BaseException],
80 | traceback: Optional[TracebackType],
81 | ) -> Optional[bool]:
82 | self.cancel()
83 | return None
84 |
85 | def cancel(self) -> None:
86 | if not self.triggered:
87 | self.queue._get_waiters.remove(self)
88 | self.callbacks = None # type: ignore[assignment] # noqa: F821
89 |
90 |
91 | class QueueWhenAtMostEvent(Event):
92 | def __init__(self, queue: 'Queue[ItemType]', num_items: Union[int, float]) -> None:
93 | super().__init__(queue.env)
94 | self.queue = queue
95 | self.num_items = num_items
96 | heappush(queue._at_most_waiters, self)
97 | queue._trigger_when_at_most()
98 |
99 | def __lt__(self, other: 'QueueWhenAtMostEvent') -> bool:
100 | return self.num_items > other.num_items
101 |
102 | def __enter__(self) -> 'QueueWhenAtMostEvent':
103 | return self
104 |
105 | def __exit__(
106 | self,
107 | exc_type: Optional[Type[BaseException]],
108 | exc_value: Optional[BaseException],
109 | traceback: Optional[TracebackType],
110 | ) -> Optional[bool]:
111 | self.cancel()
112 | return None
113 |
114 | def cancel(self) -> None:
115 | if not self.triggered:
116 | self.queue._at_most_waiters.remove(self)
117 | heapify(self.queue._at_most_waiters)
118 | self.callbacks = None # type: ignore[assignment] # noqa: F821
119 |
120 |
121 | class QueueWhenAtLeastEvent(Event):
122 | def __init__(self, queue: 'Queue[ItemType]', num_items: Union[int, float]) -> None:
123 | super().__init__(queue.env)
124 | self.queue = queue
125 | self.num_items = num_items
126 | heappush(queue._at_least_waiters, self)
127 | queue._trigger_when_at_least()
128 |
129 | def __lt__(self, other: 'QueueWhenAtLeastEvent') -> bool:
130 | return self.num_items < other.num_items
131 |
132 | def __enter__(self) -> 'QueueWhenAtLeastEvent':
133 | return self
134 |
135 | def __exit__(
136 | self,
137 | exc_type: Optional[Type[BaseException]],
138 | exc_value: Optional[BaseException],
139 | traceback: Optional[TracebackType],
140 | ) -> Optional[bool]:
141 | self.cancel()
142 | return None
143 |
144 | def cancel(self) -> None:
145 | if not self.triggered:
146 | self.queue._at_least_waiters.remove(self)
147 | heapify(self.queue._at_least_waiters)
148 | self.callbacks = None # type: ignore[assignment] # noqa: F821
149 |
150 |
151 | class QueueWhenAnyEvent(QueueWhenAtLeastEvent):
152 | def __init__(self, queue: 'Queue[ItemType]') -> None:
153 | super().__init__(queue, num_items=1)
154 |
155 |
156 | class QueueWhenFullEvent(QueueWhenAtLeastEvent):
157 | def __init__(self, queue: 'Queue[ItemType]') -> None:
158 | super().__init__(queue, num_items=queue.capacity)
159 |
160 |
161 | class QueueWhenNotFullEvent(QueueWhenAtMostEvent):
162 | def __init__(self, queue: 'Queue[ItemType]') -> None:
163 | super().__init__(queue, num_items=queue.capacity - 1)
164 |
165 |
166 | class QueueWhenEmptyEvent(QueueWhenAtMostEvent):
167 | def __init__(self, queue: 'Queue[ItemType]') -> None:
168 | super().__init__(queue, num_items=0)
169 |
170 |
171 | ItemType = TypeVar('ItemType')
172 |
173 |
174 | class Queue(Generic[ItemType]):
175 | """Simulation queue of arbitrary items.
176 |
177 | `Queue` is similar to :class:`simpy.Store`. It provides a simulation-aware
178 | first-in first-out (FIFO) queue useful for passing messages between
179 | simulation processes or managing a pool of objects needed by multiple
180 | processes.
181 |
182 | Items are enqueued and dequeued using :meth:`put()` and :meth:`get()`.
183 |
184 | :param env: Simulation environment.
185 | :param capacity: Capacity of the queue; infinite by default.
186 | :param hard_cap:
187 | If specified, the queue overflows when the `capacity` is reached.
188 | :param items: Optional sequence of items to pre-populate the queue.
189 | :param name: Optional name to associate with the queue.
190 |
191 | """
192 |
193 | def __init__(
194 | self,
195 | env: Environment,
196 | capacity: Union[int, float] = float('inf'),
197 | hard_cap: bool = False,
198 | items: Iterable[ItemType] = (),
199 | name: Optional[str] = None,
200 | ) -> None:
201 | self.env = env
202 | #: Capacity of the queue (maximum number of items).
203 | self.capacity = capacity
204 | self._hard_cap = hard_cap
205 | self.items: List[ItemType] = list(items)
206 | self.name = name
207 | self._put_waiters: List[QueuePutEvent] = []
208 | self._get_waiters: List[QueueGetEvent] = []
209 | self._at_most_waiters: List[QueueWhenAtMostEvent] = []
210 | self._at_least_waiters: List[QueueWhenAtLeastEvent] = []
211 | self._put_hook: Optional[Callable[[], Any]] = None
212 | self._get_hook: Optional[Callable[[], Any]] = None
213 | BoundClass.bind_early(self)
214 |
215 | @property
216 | def size(self) -> int:
217 | """Number of items in queue."""
218 | return len(self.items)
219 |
220 | @property
221 | def remaining(self) -> Union[int, float]:
222 | """Remaining queue capacity."""
223 | return self.capacity - len(self.items)
224 |
225 | @property
226 | def is_empty(self) -> bool:
227 | """Indicates whether the queue is empty."""
228 | return not self.items
229 |
230 | @property
231 | def is_full(self) -> bool:
232 | """Indicates whether the queue is full."""
233 | return len(self.items) >= self.capacity
234 |
235 | def peek(self) -> ItemType:
236 | """Peek at the next item in the queue."""
237 | return self.items[0]
238 |
239 | if TYPE_CHECKING:
240 |
241 | def put(self, item: ItemType) -> QueuePutEvent:
242 | """Enqueue an item on the queue."""
243 | ...
244 |
245 | def get(self) -> QueueGetEvent:
246 | """Dequeue an item from the queue."""
247 | ...
248 |
249 | def when_at_least(self, num_items: int) -> QueueWhenAtLeastEvent:
250 | """Return an event triggered when the queue has at least n items."""
251 | ...
252 |
253 | def when_at_most(self, num_items: int) -> QueueWhenAtMostEvent:
254 | """Return an event triggered when the queue has at most n items."""
255 | ...
256 |
257 | def when_any(self) -> QueueWhenAnyEvent:
258 | """Return an event triggered when the queue is non-empty."""
259 | ...
260 |
261 | def when_full(self) -> QueueWhenFullEvent:
262 | """Return an event triggered when the queue becomes full."""
263 | ...
264 |
265 | def when_not_full(self) -> QueueWhenNotFullEvent:
266 | """Return an event triggered when the queue becomes not full."""
267 | ...
268 |
269 | def when_empty(self) -> QueueWhenEmptyEvent:
270 | """Return an event triggered when the queue becomes empty."""
271 | ...
272 |
273 | else:
274 | put = BoundClass(QueuePutEvent)
275 | get = BoundClass(QueueGetEvent)
276 | when_at_least = BoundClass(QueueWhenAtLeastEvent)
277 | when_at_most = BoundClass(QueueWhenAtMostEvent)
278 | when_any = BoundClass(QueueWhenAnyEvent)
279 | when_full = BoundClass(QueueWhenFullEvent)
280 | when_not_full = BoundClass(QueueWhenNotFullEvent)
281 | when_empty = BoundClass(QueueWhenEmptyEvent)
282 |
283 | def _enqueue_item(self, item: ItemType) -> None:
284 | self.items.append(item)
285 |
286 | def _dequeue_item(self) -> ItemType:
287 | return self.items.pop(0)
288 |
289 | def _trigger_put(self, _: Optional[Event] = None) -> None:
290 | while self._put_waiters:
291 | if len(self.items) < self.capacity:
292 | put_ev = self._put_waiters.pop(0)
293 | self._enqueue_item(put_ev.item)
294 | put_ev.succeed()
295 | if self._put_hook:
296 | self._put_hook()
297 | elif self._hard_cap:
298 | raise OverflowError()
299 | else:
300 | break
301 |
302 | def _trigger_get(self, _: Optional[Event] = None) -> None:
303 | while self._get_waiters and self.items:
304 | get_ev = self._get_waiters.pop(0)
305 | item = self._dequeue_item()
306 | get_ev.succeed(item)
307 | if self._get_hook:
308 | self._get_hook()
309 |
310 | def _trigger_when_at_least(self, _: Optional[Event] = None) -> None:
311 | while (
312 | self._at_least_waiters and self.size >= self._at_least_waiters[0].num_items
313 | ):
314 | when_at_least_ev = heappop(self._at_least_waiters)
315 | when_at_least_ev.succeed()
316 |
317 | def _trigger_when_at_most(self, _: Optional[Event] = None) -> None:
318 | while self._at_most_waiters and self.size <= self._at_most_waiters[0].num_items:
319 | at_most_ev = heappop(self._at_most_waiters)
320 | at_most_ev.succeed()
321 |
322 | def __repr__(self) -> str:
323 | return (
324 | f'{self.__class__.__name__}('
325 | f'name={self.name!r} size={self.size} capacity={self.capacity})'
326 | )
327 |
328 |
329 | class PriorityItem(NamedTuple):
330 | """Wrap items with explicit priority for use with :class:`~PriorityQueue`.
331 |
332 | :param priority:
333 | Orderable priority value. Smaller values are dequeued first.
334 | :param item:
335 | Arbitrary item. Only the `priority` is determines dequeue order, so the
336 | `item` itself does not have to be orderable.
337 |
338 | """
339 |
340 | priority: Any
341 | item: Any
342 |
343 | def __lt__( # type: ignore[override] # noqa: F821
344 | self, other: 'PriorityItem'
345 | ) -> bool:
346 | return self.priority < other.priority
347 |
348 |
349 | class PriorityQueue(Queue[ItemType]):
350 | """Specialized queue where items are dequeued in priority order.
351 |
352 | Items in `PriorityQueue` must be orderable (implement
353 | :meth:`~object.__lt__`). Unorderable items may be used with `PriorityQueue`
354 | by wrapping with :class:`~PriorityItem`.
355 |
356 | Items that evaluate less-than other items will be dequeued first.
357 |
358 | """
359 |
360 | def __init__(
361 | self,
362 | env: Environment,
363 | capacity: Union[int, float] = float('inf'),
364 | hard_cap: bool = False,
365 | items: Iterable[ItemType] = (),
366 | name: Optional[str] = None,
367 | ) -> None:
368 | super().__init__(env, capacity, hard_cap, items, name)
369 | heapify(self.items)
370 |
371 | def _enqueue_item(self, item: ItemType) -> None:
372 | heappush(self.items, item)
373 |
374 | def _dequeue_item(self) -> ItemType:
375 | return heappop(self.items)
376 |
--------------------------------------------------------------------------------
/desmod/progress.py:
--------------------------------------------------------------------------------
1 | from contextlib import contextmanager
2 | from datetime import datetime, timedelta
3 | from queue import Queue
4 | from typing import IO, TYPE_CHECKING, Dict, Generator, List, Optional, Set, Tuple, Union
5 | import sys
6 | import timeit
7 |
8 | import simpy
9 |
10 | from desmod.config import ConfigDict
11 | from desmod.timescale import TimeValue, parse_time, scale_time
12 |
13 | try:
14 | import progressbar
15 | except ImportError:
16 | progressbar = None
17 | try:
18 | import colorama
19 | except ImportError:
20 | colorama = None
21 |
22 | if TYPE_CHECKING:
23 | from desmod.simulation import SimEnvironment
24 |
25 | ProgressTuple = Tuple[
26 | Optional[int], # simulation index
27 | Union[int, float], # now
28 | Optional[Union[int, float]], # t_stop
29 | TimeValue, # timescale
30 | ]
31 |
32 |
33 | @contextmanager
34 | def standalone_progress_manager(env: 'SimEnvironment') -> Generator[None, None, None]:
35 | enabled: bool = env.config.setdefault('sim.progress.enable', False)
36 | max_width: int = env.config.setdefault('sim.progress.max_width')
37 | period_s = _get_interval_period_s(env.config)
38 |
39 | if enabled:
40 | if sys.stderr.isatty() and progressbar:
41 | pbar = _get_standalone_pbar(env, max_width, sys.stderr)
42 | env.process(_standalone_pbar_process(env, pbar, period_s))
43 | try:
44 | yield None
45 | finally:
46 | pbar.finish()
47 | else:
48 | env.process(_standalone_display_process(env, period_s, sys.stderr))
49 | try:
50 | yield None
51 | finally:
52 | _print_progress(
53 | env.sim_index,
54 | env.now,
55 | env.now,
56 | env.timescale,
57 | end='\n',
58 | fd=sys.stderr,
59 | )
60 | else:
61 | yield None
62 |
63 |
64 | def _get_interval_period_s(config: ConfigDict) -> Union[int, float]:
65 | period_str: str = config.setdefault('sim.progress.update_period', '1 s')
66 | return scale_time(parse_time(period_str), (1, 's'))
67 |
68 |
69 | def _standalone_display_process(
70 | env: 'SimEnvironment', period_s: Union[int, float], fd: IO
71 | ) -> Generator[simpy.Timeout, None, None]:
72 | interval = 1.0
73 | end = '\r' if fd.isatty() else '\n'
74 | while True:
75 | sim_index, now, t_stop, timescale = env.get_progress()
76 | _print_progress(sim_index, now, t_stop, timescale, end=end, fd=fd)
77 | t0 = timeit.default_timer()
78 | yield env.timeout(interval)
79 | t1 = timeit.default_timer()
80 | interval *= period_s / (t1 - t0)
81 |
82 |
83 | def _print_progress(
84 | sim_index: Optional[int],
85 | now: Union[int, float],
86 | t_stop: Optional[Union[int, float]],
87 | timescale: TimeValue,
88 | end: str,
89 | fd: IO,
90 | ) -> None:
91 | parts = []
92 | if sim_index:
93 | parts.append(f'Sim {sim_index}')
94 | magnitude, units = timescale
95 | if magnitude == 1:
96 | parts.append(f'{now:6.0f} {units}')
97 | else:
98 | parts.append(f'{magnitude}x{now:6.0f} {units}')
99 | if t_stop:
100 | parts.append(f'({100 * now / t_stop:.0f}%)')
101 | else:
102 | parts.append('(N/A%)')
103 | print(*parts, end=end, file=fd)
104 | fd.flush()
105 |
106 |
107 | def _get_standalone_pbar(
108 | env: 'SimEnvironment', max_width: int, fd: IO
109 | ) -> progressbar.ProgressBar:
110 | pbar = progressbar.ProgressBar(
111 | fd=fd,
112 | min_value=0,
113 | max_value=progressbar.UnknownLength,
114 | widgets=_get_progressbar_widgets(
115 | env.sim_index, env.timescale, know_stop_time=False
116 | ),
117 | )
118 |
119 | if max_width and pbar.term_width > max_width:
120 | pbar.term_width = max_width
121 |
122 | return pbar
123 |
124 |
125 | def _standalone_pbar_process(
126 | env: 'SimEnvironment', pbar: progressbar.ProgressBar, period_s: Union[int, float]
127 | ) -> Generator[simpy.Timeout, None, None]:
128 | interval = 1.0
129 | while True:
130 | sim_index, now, t_stop, timescale = env.get_progress()
131 | if t_stop and pbar.max_value != t_stop:
132 | pbar.max_value = t_stop
133 | pbar.widgets = _get_progressbar_widgets(
134 | sim_index, timescale, know_stop_time=True
135 | )
136 | pbar.update(now)
137 | t0 = timeit.default_timer()
138 | yield env.timeout(interval)
139 | t1 = timeit.default_timer()
140 | interval *= period_s / (t1 - t0)
141 |
142 |
143 | def _get_progressbar_widgets(
144 | sim_index: Optional[int], timescale: TimeValue, know_stop_time: bool
145 | ) -> List[progressbar.widgets.WidgetBase]:
146 | widgets = []
147 |
148 | if sim_index is not None:
149 | widgets.append(f'Sim {sim_index:3}|')
150 |
151 | magnitude, units = timescale
152 | if magnitude == 1:
153 | sim_time_format = f'%(value)6.0f {units}|'
154 | else:
155 | sim_time_format = f'{magnitude}x%(value)6.0f {units}|'
156 | widgets.append(progressbar.FormatLabel(sim_time_format))
157 |
158 | widgets.append(progressbar.Percentage())
159 |
160 | if know_stop_time:
161 | widgets.append(progressbar.Bar())
162 | else:
163 | widgets.append(progressbar.BouncingBar())
164 |
165 | widgets.append(progressbar.ETA())
166 |
167 | return widgets
168 |
169 |
170 | def get_multi_progress_manager(progress_queue: Optional['Queue[ProgressTuple]']):
171 | @contextmanager
172 | def progress_producer(env):
173 | if progress_queue:
174 | period_s = _get_interval_period_s(env.config)
175 | env.process(_progress_enqueue_process(env, period_s, progress_queue))
176 | try:
177 | yield None
178 | finally:
179 | progress_queue.put((env.sim_index, env.now, env.now, env.timescale))
180 | else:
181 | yield None
182 |
183 | return progress_producer
184 |
185 |
186 | def _progress_enqueue_process(
187 | env: 'SimEnvironment',
188 | period_s: Union[int, float],
189 | progress_queue: 'Queue[ProgressTuple]',
190 | ) -> Generator[simpy.Timeout, None, None]:
191 | interval = 1.0
192 | while True:
193 | progress_queue.put(env.get_progress())
194 | t0 = timeit.default_timer()
195 | yield env.timeout(interval)
196 | t1 = timeit.default_timer()
197 | interval *= period_s / (t1 - t0)
198 |
199 |
200 | def consume_multi_progress(
201 | progress_queue: 'Queue[ProgressTuple]',
202 | num_workers: int,
203 | num_simulations: int,
204 | max_width: int,
205 | ) -> None:
206 | fd = sys.stderr
207 | try:
208 | if fd.isatty():
209 | if progressbar and colorama:
210 | _consume_multi_display_multi_pbar(
211 | progress_queue, num_workers, num_simulations, max_width, fd
212 | )
213 | elif progressbar:
214 | _consume_multi_display_single_pbar(
215 | progress_queue, num_workers, num_simulations, max_width, fd
216 | )
217 | else:
218 | _consume_multi_display_simple(
219 | progress_queue, num_workers, num_simulations, max_width, fd
220 | )
221 | else:
222 | _consume_multi_display_simple(
223 | progress_queue, num_workers, num_simulations, max_width, fd
224 | )
225 | except KeyboardInterrupt:
226 | pass
227 |
228 |
229 | def _consume_multi_display_simple(
230 | progress_queue: 'Queue[ProgressTuple]',
231 | num_workers: int,
232 | num_simulations: int,
233 | max_width: int,
234 | fd: IO,
235 | ) -> None:
236 | start_date = datetime.now()
237 | isatty = fd.isatty()
238 | end = '\r' if isatty else '\n'
239 | try:
240 | completed: Set[Optional[int]] = set()
241 | _print_simple(len(completed), num_simulations, timedelta(), end, fd)
242 | last_print_date = start_date
243 | while len(completed) < num_simulations:
244 | progress: ProgressTuple = progress_queue.get() # type: ignore
245 | sim_index, now, t_stop, timescale = progress
246 | now_date = datetime.now()
247 | td = now_date - start_date
248 | td_print = now_date - last_print_date
249 | if now == t_stop:
250 | completed.add(sim_index)
251 | _print_simple(len(completed), num_simulations, td, end, fd)
252 | last_print_date = now_date
253 | elif isatty and td_print.total_seconds() >= 1:
254 | _print_simple(len(completed), num_simulations, td, end, fd)
255 | last_print_date = now_date
256 | finally:
257 | if isatty:
258 | print(file=fd)
259 |
260 |
261 | def _print_simple(
262 | num_completed: int, num_simulations: int, td: timedelta, end: str, fd: IO
263 | ) -> None:
264 | if fd.closed:
265 | return
266 | print(
267 | timedelta(td.days, td.seconds),
268 | num_completed,
269 | 'of',
270 | num_simulations,
271 | 'simulations',
272 | f'({num_completed / num_simulations:.0%})',
273 | end=end,
274 | file=fd,
275 | )
276 | fd.flush()
277 |
278 |
279 | def _consume_multi_display_single_pbar(
280 | progress_queue: 'Queue[ProgressTuple]',
281 | num_workers: int,
282 | num_simulations: int,
283 | max_width: int,
284 | fd: IO,
285 | ):
286 | overall_pbar = _get_overall_pbar(num_simulations, max_width, fd=fd)
287 | try:
288 | completed: Set[Optional[int]] = set()
289 | while len(completed) < num_simulations:
290 | progress: ProgressTuple = progress_queue.get() # type: ignore
291 | sim_index, now, t_stop, timescale = progress
292 | if now == t_stop:
293 | completed.add(sim_index)
294 | overall_pbar.update(len(completed))
295 | finally:
296 | overall_pbar.finish()
297 |
298 |
299 | def _consume_multi_display_multi_pbar(
300 | progress_queue: 'Queue[ProgressTuple]',
301 | num_workers: int,
302 | num_simulations: int,
303 | max_width: int,
304 | fd: IO,
305 | ) -> None:
306 | # In order to display multiple progress bars, we need to manipulate the
307 | # terminal/console to move up lines. Colorama is used to wrap stderr such
308 | # that ANSI escape sequences are mapped to equivalent win32 API calls.
309 | fd = colorama.AnsiToWin32(fd).stream
310 |
311 | def ansi_up(n):
312 | return b'\x1b[{}A'.decode('latin1').format(n)
313 |
314 | ansi_bold = b'\x1b[1m'.decode('latin1')
315 | ansi_norm = b'\x1b[0m'.decode('latin1')
316 |
317 | overall_pbar = _get_overall_pbar(num_simulations, max_width, fd)
318 |
319 | try:
320 | worker_progress: Dict[Optional[int], progressbar.ProgressBar] = {}
321 | completed: Set[Optional[int]] = set()
322 | while len(completed) < num_simulations:
323 | progress: ProgressTuple = progress_queue.get() # type: ignore
324 | sim_index, now, t_stop, timescale = progress
325 |
326 | if now == t_stop:
327 | completed.add(sim_index)
328 |
329 | if worker_progress:
330 | print(ansi_up(len(worker_progress)), end='', file=fd)
331 |
332 | if sim_index in worker_progress:
333 | for pindex, pbar in worker_progress.items():
334 | if sim_index == pindex and pbar:
335 | if now == t_stop:
336 | pbar.finish()
337 | worker_progress[sim_index] = None
338 | else:
339 | if t_stop and pbar.max_value != t_stop:
340 | pbar.max_value = t_stop
341 | pbar.widgets = _get_progressbar_widgets(
342 | sim_index, timescale, know_stop_time=True
343 | )
344 | pbar.update(now)
345 | print(file=fd)
346 | else:
347 | print(file=fd)
348 | else:
349 | for pindex, pbar in worker_progress.items():
350 | if pbar is None:
351 | worker_progress.pop(pindex)
352 | break
353 | print('\n' * len(worker_progress), file=fd)
354 | pbar = progressbar.ProgressBar(
355 | fd=fd,
356 | term_width=overall_pbar.term_width,
357 | min_value=0,
358 | max_value=(progressbar.UnknownLength if t_stop is None else t_stop),
359 | widgets=_get_progressbar_widgets(
360 | sim_index, timescale, know_stop_time=t_stop is not None
361 | ),
362 | )
363 | worker_progress[sim_index] = pbar
364 |
365 | print(ansi_bold, end='', file=fd)
366 | overall_pbar.update(len(completed))
367 | print(ansi_norm, end='', file=fd)
368 | finally:
369 | print(ansi_bold, end='', file=fd)
370 | overall_pbar.finish()
371 | print(ansi_norm, end='', file=fd)
372 |
373 |
374 | def _get_overall_pbar(
375 | num_simulations: int, max_width: int, fd: IO
376 | ) -> progressbar.ProgressBar:
377 | pbar = progressbar.ProgressBar(
378 | fd=fd,
379 | min_value=0,
380 | max_value=num_simulations,
381 | widgets=[
382 | progressbar.FormatLabel('%(value)s of %(max_value)s '),
383 | 'simulations (',
384 | progressbar.Percentage(),
385 | ') ',
386 | progressbar.Bar(),
387 | progressbar.ETA(),
388 | ],
389 | )
390 |
391 | if max_width and pbar.term_width > max_width:
392 | pbar.term_width = max_width
393 |
394 | return pbar
395 |
--------------------------------------------------------------------------------
/desmod/component.py:
--------------------------------------------------------------------------------
1 | """Component is the building block for desmod models.
2 |
3 | Hierarchy
4 | ---------
5 |
6 | A desmod model consists of a directed acyclical graph (DAG) of
7 | :class:`Component` subclasses. Each Component is composed of zero or more child
8 | Components. A single top-level Component class is passed to the
9 | :func:`~desmod.simulation.simulate()` function to initiate simulation.
10 |
11 | The :class:`Component` hierarchy does not define the behavior of a model, but
12 | instead exists as a tool to build large models out of composable and
13 | encapsulated pieces.
14 |
15 | Connections
16 | -----------
17 |
18 | Components connect to other components via connection objects. Each component
19 | is responsible for declaring the names of external connections as well as make
20 | connections for its child components. The final network of inter-component
21 | connections is neither directed (a connection object may enable two-way
22 | communication), acyclic (groups of components may form cyclical connections),
23 | nor constrained to match the component hierarchy.
24 |
25 | Ultimately, a connection between two components means that each component
26 | instance has a [pythonic] reference to the connection object.
27 |
28 | In the spirit of Python, the types connection objects are flexible and dynamic.
29 | A connection object may be of any type--it is up to the connected components to
30 | cooperatively decide how to use the connection object for communication. That
31 | said, some object types are more useful than others for connections. Some
32 | useful connection object types include:
33 |
34 | * :class:`desmod.queue.Queue`
35 | * :class:`simpy.resources.resource.Resource`
36 |
37 | Processes
38 | ---------
39 |
40 | A component may have zero or more simulation processes
41 | (:class:`simpy.events.Process`). It is these processes that give a model its
42 | simulation-time behavior. The process methods declared by components are
43 | started at simulation time. These "standing" processes may dynamically launch
44 | addtional processes using `self.env.process()`.
45 |
46 | Use Cases
47 | ---------
48 |
49 | Given the flexibility components to have zero or more children, zero or more
50 | processes, and zero or more connections, it can be helpful to give names to
51 | the various roles components may play in a model.
52 |
53 | * Structural Component -- a component with child components, but no processes
54 | * Behavioral Component -- a component with processes, but no child components
55 | * Hybrid Component -- a component with child components and processes
56 | * State Component -- a component with neither children or processes
57 |
58 | It is typical for the top-level component in a model to be purely structural,
59 | while behavioral components are leaves in the model DAG.
60 |
61 | A component with neither children or processes may still be useful. Such a
62 | component could, for example, be used as a connection object.
63 |
64 | """
65 |
66 | from typing import Any, Callable, Dict, Generator, List, Optional, Set, Tuple
67 |
68 | import simpy
69 |
70 | from .simulation import ResultDict, SimEnvironment
71 |
72 | ProcessGenerator = Callable[..., Generator[simpy.Event, Any, None]]
73 |
74 |
75 | class ConnectError(Exception):
76 | pass
77 |
78 |
79 | class Component:
80 | """Building block for composing models.
81 |
82 | This class is meant to be subclassed. Component subclasses must declare
83 | their children, connections, and processes.
84 |
85 | :param Component parent: Parent component or None for top-level Component.
86 | :param SimEnvironment env: SimPy simulation environment.
87 | :param str name: Optional name of Component instance.
88 | :param int index:
89 | Optional index of Component. This is used when multiple sibling
90 | components of the same type are instantiated as an array/list.
91 |
92 | """
93 |
94 | #: Short/friendly name used in the scope (class attribute).
95 | base_name: str = ''
96 |
97 | def __init__(
98 | self,
99 | parent: Optional['Component'],
100 | env: Optional[SimEnvironment] = None,
101 | name: Optional[str] = None,
102 | index: Optional[int] = None,
103 | ) -> None:
104 | #: The simulation environment; a :class:`SimEnvironment` instance.
105 | self.env: SimEnvironment
106 | if env is not None:
107 | self.env = env
108 | elif parent is not None:
109 | self.env = parent.env
110 | else:
111 | raise AssertionError('either parent or env must be non-None')
112 |
113 | #: The component name (str).
114 | self.name = (self.base_name if name is None else name) + (
115 | '' if index is None else str(index)
116 | )
117 |
118 | #: Index of Component instance within group of sibling instances.
119 | #: Will be None for un-grouped Components.
120 | self.index = index
121 |
122 | #: String indicating the full scope of Component instance in the
123 | #: Component DAG.
124 | self.scope: str
125 | if parent is None or not parent.scope:
126 | self.scope = self.name
127 | else:
128 | self.scope = f'{parent.scope}.{self.name}'
129 |
130 | if parent:
131 | parent._children.append(self)
132 |
133 | self._children: List['Component'] = []
134 | self._processes: List[
135 | Tuple[ProcessGenerator, Tuple[Any, ...], Dict[str, Any]]
136 | ] = []
137 | self._connections: List[Any] = []
138 | self._not_connected: Set[str] = set()
139 |
140 | #: Log an error message.
141 | self.error: Callable[..., None] = self.env.tracemgr.get_trace_function(
142 | self.scope, log={'level': 'ERROR'}
143 | )
144 | #: Log a warning message.
145 | self.warn: Callable[..., None] = self.env.tracemgr.get_trace_function(
146 | self.scope, log={'level': 'WARNING'}
147 | )
148 | #: Log an informative message.
149 | self.info: Callable[..., None] = self.env.tracemgr.get_trace_function(
150 | self.scope, log={'level': 'INFO'}
151 | )
152 | #: Log a debug message.
153 | self.debug: Callable[..., None] = self.env.tracemgr.get_trace_function(
154 | self.scope, log={'level': 'DEBUG'}
155 | )
156 |
157 | def add_process(self, g: ProcessGenerator, *args: Any, **kwargs: Any) -> None:
158 | """Add a process method to be run at simulation-time.
159 |
160 | Subclasses should call this in `__init__()` to declare the process
161 | methods to be started at simulation-time.
162 |
163 | :param function process_func:
164 | Typically a bound method of the Component subclass.
165 | :param args: arguments to pass to `process_func`.
166 | :param kwargs: keyword arguments to pass to `process_func`.
167 |
168 | """
169 | self._processes.append((g, args, kwargs))
170 |
171 | def add_processes(self, *generators: ProcessGenerator) -> None:
172 | """Declare multiple processes at once.
173 |
174 | This is a convenience wrapper for :meth:`add_process()` that may be
175 | used to quickly declare a list of process methods that do not require
176 | any arguments.
177 |
178 | :param process_funcs: argument-less process functions (methods).
179 |
180 | """
181 | for g in generators:
182 | self.add_process(g)
183 |
184 | def add_connections(self, *connection_names: str) -> None:
185 | """Declare names of externally-provided connection objects.
186 |
187 | The named connections must be connected (assigned) by an ancestor at
188 | elaboration time.
189 |
190 | """
191 | self._not_connected.update(connection_names)
192 |
193 | def connect(
194 | self,
195 | dst: 'Component',
196 | dst_connection: Any,
197 | src: Optional['Component'] = None,
198 | src_connection: Optional[Any] = None,
199 | conn_obj: Optional[Any] = None,
200 | ) -> None:
201 | """Assign connection object from source to destination component.
202 |
203 | At elaboration-time, Components must call `connect()` to make the
204 | connections declared by descendant (child, grandchild, etc.)
205 | components.
206 |
207 | .. Note::
208 |
209 | :meth:`connect()` is nominally called from
210 | :meth:`connect_children()`.
211 |
212 | :param Component dst:
213 | Destination component being assigned the connection object.
214 | :param str dst_connection:
215 | Destination's name for the connection object.
216 | :param Component src:
217 | Source component providing the connection object. If omitted, the
218 | source component is assumed to be `self`.
219 | :param str src_connection:
220 | Source's name for the connection object. If omitted,
221 | `dst_connection` is used.
222 | :param conn_obj:
223 | The connection object to be assigned to the destination component.
224 | This parameter may typically be omitted in which case the
225 | connection object is resolved using `src` and `src_connection`.
226 |
227 | """
228 | if src is None:
229 | src = self
230 | if src_connection is None:
231 | src_connection = dst_connection
232 | if conn_obj is None:
233 | if hasattr(src, src_connection):
234 | conn_obj = getattr(src, src_connection)
235 | else:
236 | raise ConnectError(
237 | f'src "{src.scope}" (class {type(src).__name__}) does not have attr'
238 | f'"{src_connection}"'
239 | )
240 | if dst_connection in dst._not_connected:
241 | setattr(dst, dst_connection, conn_obj)
242 | dst._not_connected.remove(dst_connection)
243 | dst._connections.append((dst_connection, src, src_connection, conn_obj))
244 | else:
245 | raise ConnectError(
246 | f'dst "{dst.scope}" (class {type(dst).__name__}) does not declare'
247 | f'connection "{dst_connection}"'
248 | )
249 |
250 | def connect_children(self) -> None:
251 | """Make connections for descendant components.
252 |
253 | This method must be overridden in Component subclasses that need to
254 | make any connections on behalf of its descendant components.
255 | Connections are made using :meth:`connect()`.
256 |
257 | """
258 | if any(child._not_connected for child in self._children):
259 | raise ConnectError(
260 | '{0} has unconnected children; implement '
261 | '{0}.connect_children()'.format(type(self).__name__)
262 | )
263 |
264 | def auto_probe(self, name: str, target: Any = None, **hints: Any) -> None:
265 | if target is None:
266 | target = getattr(self, name)
267 | target_scope = '.'.join([self.scope, name])
268 | self.env.tracemgr.auto_probe(target_scope, target, **hints)
269 |
270 | def get_trace_function(self, name: str, **hints: Any) -> Callable[..., None]:
271 | target_scope = '.'.join([self.scope, name])
272 | return self.env.tracemgr.get_trace_function(target_scope, **hints)
273 |
274 | @classmethod
275 | def pre_init(cls, env: SimEnvironment) -> None:
276 | """Override-able class method called prior to model initialization.
277 |
278 | Component subclasses may override this classmethod to gain access
279 | to the simulation environment (`env`) prior to :meth:`__init__()` being
280 | called.
281 |
282 | """
283 | pass
284 |
285 | def elaborate(self) -> None:
286 | """Recursively elaborate the model.
287 |
288 | The elaboration phase prepares the model for simulation. Descendant
289 | connections are made and components' processes are started at
290 | elaboration-time.
291 |
292 | """
293 | self.connect_children()
294 | for child in self._children:
295 | if child._not_connected:
296 | raise ConnectError(
297 | f'{child.scope}.{child._not_connected.pop()} not connected'
298 | )
299 | child.elaborate()
300 | for proc, args, kwargs in self._processes:
301 | self.env.process(proc(*args, **kwargs))
302 | self.elab_hook()
303 |
304 | def elab_hook(self) -> None:
305 | """Hook called after elaboration and before simulation phase.
306 |
307 | Component subclasses may override :meth:`elab_hook()` to inject
308 | behavior after elaboration, but prior to simulation.
309 |
310 | """
311 | pass
312 |
313 | def post_simulate(self) -> None:
314 | """Recursively run post-simulation hooks."""
315 | for child in self._children:
316 | child.post_simulate()
317 | self.post_sim_hook()
318 |
319 | def post_sim_hook(self) -> None:
320 | """Hook called after simulation completes.
321 |
322 | Component subclasses may override `post_sim_hook()` to inject behavior
323 | after the simulation completes successfully. Note that
324 | `post_sim_hook()` will not be called if the simulation terminates with
325 | an unhandled exception.
326 |
327 | """
328 | pass
329 |
330 | def get_result(self, result: ResultDict) -> None:
331 | """Recursively compose simulation result dict.
332 |
333 | Upon successful completion of the simulation phase, each component in
334 | the model has the opportunity to add-to or modify the `result` dict via
335 | its :meth:`get_result_hook` method.
336 |
337 | The fully composed `result` dict is returned by :func:`simulate`.
338 |
339 | :param dict result: Result dictionary to be modified.
340 |
341 | """
342 | for child in self._children:
343 | child.get_result(result)
344 | self.get_result_hook(result)
345 |
346 | def get_result_hook(self, result: ResultDict) -> None:
347 | """Hook called after result is composed by descendant components."""
348 | pass
349 |
--------------------------------------------------------------------------------
/desmod/simulation.py:
--------------------------------------------------------------------------------
1 | """Simulation model with batteries included."""
2 |
3 | from contextlib import closing
4 | from multiprocessing import Process, Queue, cpu_count
5 | from pprint import pprint
6 | from threading import Thread
7 | from types import TracebackType
8 | from typing import (
9 | TYPE_CHECKING,
10 | Any,
11 | Callable,
12 | Dict,
13 | List,
14 | Optional,
15 | Sequence,
16 | Type,
17 | Union,
18 | )
19 | import json
20 | import os
21 | import random
22 | import shutil
23 | import timeit
24 |
25 | import simpy
26 | import yaml
27 |
28 | from desmod.config import ConfigDict, ConfigFactor, factorial_config
29 | from desmod.progress import (
30 | ProgressTuple,
31 | consume_multi_progress,
32 | get_multi_progress_manager,
33 | standalone_progress_manager,
34 | )
35 | from desmod.timescale import parse_time, scale_time
36 | from desmod.tracer import TraceManager
37 |
38 | if TYPE_CHECKING:
39 | from desmod.component import Component # noqa: F401
40 |
41 | ResultDict = Dict[str, Any]
42 |
43 |
44 | class SimEnvironment(simpy.Environment):
45 | """Simulation Environment.
46 |
47 | The :class:`SimEnvironment` class is a :class:`simpy.Environment` subclass
48 | that adds some useful features:
49 |
50 | - Access to the configuration dictionary (`config`).
51 | - Access to a seeded pseudo-random number generator (`rand`).
52 | - Access to the simulation timescale (`timescale`).
53 | - Access to the simulation duration (`duration`).
54 |
55 | Some models may need to share additional state with all its
56 | :class:`desmod.component.Component` instances. SimEnvironment may be
57 | subclassed to add additional members to achieve this sharing.
58 |
59 | :param dict config: A fully-initialized configuration dictionary.
60 |
61 | """
62 |
63 | def __init__(self, config: ConfigDict) -> None:
64 | super().__init__()
65 | #: The configuration dictionary.
66 | self.config = config
67 |
68 | #: The pseudo-random number generator; an instance of
69 | #: :class:`random.Random`.
70 | self.rand = random.Random()
71 | seed = config.setdefault('sim.seed', None)
72 | self.rand.seed(seed, version=1)
73 |
74 | timescale_str = self.config.setdefault('sim.timescale', '1 s')
75 |
76 | #: Simulation timescale ``(magnitude, units)`` tuple. The current
77 | #: simulation time is ``now * timescale``.
78 | self.timescale = parse_time(timescale_str)
79 |
80 | duration = config.setdefault('sim.duration', '0 s')
81 |
82 | #: The intended simulation duration, in units of :attr:`timescale`.
83 | self.duration = scale_time(parse_time(duration), self.timescale)
84 |
85 | #: The simulation runs "until" this event. By default, this is the
86 | #: configured "sim.duration", but may be overridden by subclasses.
87 | self.until = self.duration
88 |
89 | #: From 'meta.sim.index', the simulation's index when running multiple
90 | #: related simulations or `None` for a standalone simulation.
91 | self.sim_index: Optional[int] = config.get('meta.sim.index')
92 |
93 | #: :class:`TraceManager` instance.
94 | self.tracemgr = TraceManager(self)
95 |
96 | def time(self, t: Optional[float] = None, unit: str = 's') -> Union[int, float]:
97 | """The current simulation time scaled to specified unit.
98 |
99 | :param float t: Time in simulation units. Default is :attr:`now`.
100 | :param str unit: Unit of time to scale to. Default is 's' (seconds).
101 | :returns: Simulation time scaled to to `unit`.
102 |
103 | """
104 | target_scale = parse_time(unit)
105 | ts_mag, ts_unit = self.timescale
106 | sim_time = ((self.now if t is None else t) * ts_mag, ts_unit)
107 | return scale_time(sim_time, target_scale)
108 |
109 | def get_progress(self) -> ProgressTuple:
110 | if isinstance(self.until, SimStopEvent):
111 | t_stop = self.until.t_stop
112 | else:
113 | t_stop = self.until
114 | return self.sim_index, self.now, t_stop, self.timescale
115 |
116 |
117 | class SimStopEvent(simpy.Event):
118 | """Event appropriate for stopping the simulation.
119 |
120 | An instance of this event may be used to override `SimEnvironment.until` to
121 | dynamically choose when to stop the simulation. The simulation may be
122 | stopped by calling :meth:`schedule()`. The optional `delay` parameter may
123 | be used to schedule the simulation to stop at an offset from the current
124 | simulation time.
125 |
126 | """
127 |
128 | def __init__(self, env: SimEnvironment) -> None:
129 | super().__init__(env)
130 | self.t_stop: Optional[Union[int, float]] = None
131 |
132 | def schedule(self, delay: Union[int, float] = 0) -> None:
133 | assert not self.triggered
134 | assert delay >= 0
135 | self._ok = True
136 | self._value = None
137 | self.env.schedule(self, simpy.events.URGENT, delay)
138 | self.t_stop = self.env.now + delay
139 |
140 |
141 | class _Workspace:
142 | """Context manager for workspace directory management."""
143 |
144 | def __init__(self, config: ConfigDict) -> None:
145 | self.workspace: str = config.setdefault(
146 | 'meta.sim.workspace', config.setdefault('sim.workspace', os.curdir)
147 | )
148 | self.overwrite: bool = config.setdefault('sim.workspace.overwrite', False)
149 | self.prev_dir: str = os.getcwd()
150 |
151 | def __enter__(self) -> '_Workspace':
152 | if os.path.relpath(self.workspace) != os.curdir:
153 | workspace_exists = os.path.isdir(self.workspace)
154 | if self.overwrite and workspace_exists:
155 | shutil.rmtree(self.workspace)
156 | if self.overwrite or not workspace_exists:
157 | os.makedirs(self.workspace)
158 | os.chdir(self.workspace)
159 | return self
160 |
161 | def __exit__(
162 | self,
163 | exc_type: Optional[Type[BaseException]],
164 | exc_value: Optional[BaseException],
165 | traceback: Optional[TracebackType],
166 | ) -> Optional[bool]:
167 | os.chdir(self.prev_dir)
168 | return None
169 |
170 |
171 | def simulate(
172 | config: ConfigDict,
173 | top_type: Type['Component'],
174 | env_type: Type[SimEnvironment] = SimEnvironment,
175 | reraise: bool = True,
176 | progress_manager=standalone_progress_manager,
177 | ) -> ResultDict:
178 | """Initialize, elaborate, and run a simulation.
179 |
180 | All exceptions are caught by `simulate()` so they can be logged and
181 | captured in the result file. By default, any unhandled exception caught by
182 | `simulate()` will be re-raised. Setting `reraise` to False prevents
183 | exceptions from propagating to the caller. Instead, the returned result
184 | dict will indicate if an exception occurred via the 'sim.exception' item.
185 |
186 | :param dict config: Configuration dictionary for the simulation.
187 | :param top_type: The model's top-level Component subclass.
188 | :param env_type: :class:`SimEnvironment` subclass.
189 | :param bool reraise: Should unhandled exceptions propogate to the caller.
190 | :returns:
191 | Dictionary containing the model-specific results of the simulation.
192 | """
193 | t0 = timeit.default_timer()
194 | result: ResultDict = {}
195 | result_file = config.setdefault('sim.result.file')
196 | config_file = config.setdefault('sim.config.file')
197 | try:
198 | with _Workspace(config):
199 | env = env_type(config)
200 | with closing(env.tracemgr):
201 | try:
202 | top_type.pre_init(env)
203 | env.tracemgr.flush()
204 | with progress_manager(env):
205 | top = top_type(parent=None, env=env)
206 | top.elaborate()
207 | env.tracemgr.flush()
208 | env.run(until=env.until)
209 | env.tracemgr.flush()
210 | top.post_simulate()
211 | env.tracemgr.flush()
212 | top.get_result(result)
213 | except BaseException as e:
214 | env.tracemgr.trace_exception()
215 | result['sim.exception'] = repr(e)
216 | raise
217 | else:
218 | result['sim.exception'] = None
219 | finally:
220 | env.tracemgr.flush()
221 | result['config'] = config
222 | result['sim.now'] = env.now
223 | result['sim.time'] = env.time()
224 | result['sim.runtime'] = timeit.default_timer() - t0
225 | _dump_dict(config_file, config)
226 | _dump_dict(result_file, result)
227 | except BaseException as e:
228 | if reraise:
229 | raise
230 | result.setdefault('config', config)
231 | result.setdefault('sim.runtime', timeit.default_timer() - t0)
232 | if result.get('sim.exception') is None:
233 | result['sim.exception'] = repr(e)
234 | return result
235 |
236 |
237 | def simulate_factors(
238 | base_config: ConfigDict,
239 | factors: List[ConfigFactor],
240 | top_type: Type['Component'],
241 | env_type: Type[SimEnvironment] = SimEnvironment,
242 | jobs: Optional[int] = None,
243 | config_filter: Optional[Callable[[ConfigDict], bool]] = None,
244 | ) -> List[ResultDict]:
245 | """Run multi-factor simulations in separate processes.
246 |
247 | The `factors` are used to compose specialized config dictionaries for the
248 | simulations.
249 |
250 | The :mod:`python:multiprocessing` module is used run each simulation with a
251 | separate Python process. This allows multi-factor simulations to run in
252 | parallel on all available CPU cores.
253 |
254 | :param dict base_config: Base configuration dictionary to be specialized.
255 | :param list factors: List of factors.
256 | :param top_type: The model's top-level Component subclass.
257 | :param env_type: :class:`SimEnvironment` subclass.
258 | :param int jobs: User specified number of concurent processes.
259 | :param function config_filter:
260 | A function which will be passed a config and returns a bool to filter.
261 | :returns: Sequence of result dictionaries for each simulation.
262 |
263 | """
264 | configs = list(factorial_config(base_config, factors, 'meta.sim.special'))
265 | ws = base_config.setdefault('sim.workspace', os.curdir)
266 | overwrite = base_config.setdefault('sim.workspace.overwrite', False)
267 |
268 | for index, config in enumerate(configs):
269 | config['meta.sim.index'] = index
270 | config['meta.sim.workspace'] = os.path.join(ws, str(index))
271 | if config_filter is not None:
272 | configs[:] = filter(config_filter, configs)
273 | if overwrite and os.path.relpath(ws) != os.curdir and os.path.isdir(ws):
274 | shutil.rmtree(ws)
275 | return simulate_many(configs, top_type, env_type, jobs)
276 |
277 |
278 | def simulate_many(
279 | configs: Sequence[ConfigDict],
280 | top_type: Type['Component'],
281 | env_type: Type[SimEnvironment] = SimEnvironment,
282 | jobs: Optional[int] = None,
283 | ) -> List[ResultDict]:
284 | """Run multiple experiments in separate processes.
285 |
286 | The :mod:`python:multiprocessing` module is used run each simulation with a
287 | separate Python process. This allows multi-factor simulations to run in
288 | parallel on all available CPU cores.
289 |
290 | :param dict configs: list of configuration dictionary for the simulation.
291 | :param top_type: The model's top-level Component subclass.
292 | :param env_type: :class:`SimEnvironment` subclass.
293 | :param int jobs: User specified number of concurent processes.
294 | :returns: Sequence of result dictionaries for each simulation.
295 |
296 | """
297 | if jobs is not None and jobs < 1:
298 | raise ValueError(f'Invalid number of jobs: {jobs}')
299 |
300 | progress_enable = any(
301 | config.setdefault('sim.progress.enable', False) for config in configs
302 | )
303 |
304 | progress_queue: Optional[Queue[ProgressTuple]] = (
305 | Queue() if progress_enable else None
306 | )
307 | result_queue: Queue[ResultDict] = Queue()
308 | config_queue: Queue[Optional[ConfigDict]] = Queue()
309 |
310 | workspaces = set()
311 | max_width = 0
312 | for index, config in enumerate(configs):
313 | max_width = max(config.setdefault('sim.progress.max_width', 0), max_width)
314 |
315 | workspace = os.path.normpath(
316 | config.setdefault(
317 | 'meta.sim.workspace', config.setdefault('sim.workspace', os.curdir)
318 | )
319 | )
320 | if workspace in workspaces:
321 | raise ValueError(f'Duplicate workspace: {workspace}')
322 | workspaces.add(workspace)
323 |
324 | config.setdefault('meta.sim.index', index)
325 | config['sim.progress.enable'] = progress_enable
326 | config_queue.put(config)
327 |
328 | num_workers = min(len(configs), cpu_count())
329 | if jobs is not None:
330 | num_workers = min(num_workers, jobs)
331 |
332 | workers = []
333 | for i in range(num_workers):
334 | worker = Process(
335 | name=f'sim-worker-{i}',
336 | target=_simulate_worker,
337 | args=(
338 | top_type,
339 | env_type,
340 | False,
341 | progress_queue,
342 | config_queue,
343 | result_queue,
344 | ),
345 | )
346 | worker.daemon = True # Workers die if main process dies.
347 | worker.start()
348 | workers.append(worker)
349 | config_queue.put(None) # A stop sentinel for each worker.
350 |
351 | if progress_enable:
352 | progress_thread = Thread(
353 | target=consume_multi_progress,
354 | args=(progress_queue, num_workers, len(configs), max_width),
355 | )
356 | progress_thread.daemon = True
357 | progress_thread.start()
358 |
359 | results = [result_queue.get() for _ in configs]
360 |
361 | if progress_enable:
362 | # Although this is a daemon thread, we still make a token attempt to
363 | # join with it. This avoids a race with certain testing frameworks
364 | # (ahem, py.test) that may monkey-patch and close stderr while
365 | # progress_thread is still using it.
366 | progress_thread.join(1)
367 |
368 | for worker in workers:
369 | worker.join(5)
370 |
371 | return sorted(results, key=lambda r: r['config']['meta.sim.index'])
372 |
373 |
374 | def _simulate_worker(
375 | top_type: Type['Component'],
376 | env_type: Type[SimEnvironment],
377 | reraise: bool,
378 | progress_queue: Optional['Queue[ProgressTuple]'],
379 | config_queue: 'Queue[Optional[ConfigDict]]',
380 | result_queue: 'Queue[ResultDict]',
381 | ):
382 | progress_manager = get_multi_progress_manager(progress_queue)
383 | while True:
384 | config = config_queue.get()
385 | if config is None:
386 | break
387 | result = simulate(config, top_type, env_type, reraise, progress_manager)
388 | result_queue.put(result)
389 |
390 |
391 | def _dump_dict(filename: str, dump_dict: Dict[str, Any]):
392 | if filename is not None:
393 | _, ext = os.path.splitext(filename)
394 | if ext not in ['.yaml', '.yml', '.json', '.py']:
395 | raise ValueError(f'Invalid extension: {ext}')
396 | with open(filename, 'w') as dump_file:
397 | if ext in ['.yaml', '.yml']:
398 | yaml.safe_dump(dump_dict, stream=dump_file)
399 | elif ext == '.json':
400 | json.dump(dump_dict, dump_file, sort_keys=True, indent=2)
401 | else:
402 | assert ext == '.py'
403 | pprint(dump_dict, stream=dump_file)
404 |
--------------------------------------------------------------------------------
/desmod/tracer.py:
--------------------------------------------------------------------------------
1 | from typing import TYPE_CHECKING, Any, Callable, Generator, List, Optional, Union
2 | import os
3 | import re
4 | import sqlite3
5 | import sys
6 | import traceback
7 |
8 | from vcd import VCDWriter
9 | import simpy
10 |
11 | from .pool import Pool
12 | from .probe import ProbeCallback, ProbeTarget
13 | from .probe import attach as probe_attach
14 | from .queue import Queue
15 | from .timescale import parse_time, scale_time
16 | from .util import partial_format
17 |
18 | if TYPE_CHECKING:
19 | from .simulation import SimEnvironment
20 |
21 | TraceCallback = Callable[..., None]
22 |
23 |
24 | class Tracer:
25 |
26 | name: str = ''
27 |
28 | def __init__(self, env: 'SimEnvironment'):
29 | self.env = env
30 | cfg_scope = f'sim.{self.name}'
31 | self.enabled: bool = env.config.setdefault(f'{cfg_scope}.enable', False)
32 | self.persist: bool = env.config.setdefault(f'{cfg_scope}.persist', True)
33 | if self.enabled:
34 | self.open()
35 | include_pat: List[str] = env.config.setdefault(
36 | f'{cfg_scope}.include_pat', ['.*']
37 | )
38 | exclude_pat: List[str] = env.config.setdefault(
39 | f'{cfg_scope}.exclude_pat', []
40 | )
41 | self._include_re = [re.compile(pat) for pat in include_pat]
42 | self._exclude_re = [re.compile(pat) for pat in exclude_pat]
43 |
44 | def is_scope_enabled(self, scope: str) -> bool:
45 | return (
46 | self.enabled
47 | and any(r.match(scope) for r in self._include_re)
48 | and not any(r.match(scope) for r in self._exclude_re)
49 | )
50 |
51 | def open(self) -> None:
52 | raise NotImplementedError() # pragma: no cover
53 |
54 | def close(self) -> None:
55 | if self.enabled:
56 | self._close()
57 |
58 | def _close(self) -> None:
59 | raise NotImplementedError() # pragma: no cover
60 |
61 | def remove_files(self) -> None:
62 | raise NotImplementedError()
63 |
64 | def flush(self) -> None:
65 | pass
66 |
67 | def activate_probe(
68 | self, scope: str, target: ProbeTarget, **hints: Any
69 | ) -> Optional[ProbeCallback]:
70 | raise NotImplementedError() # pragma: no cover
71 |
72 | def activate_trace(self, scope: str, **hints) -> Optional[TraceCallback]:
73 | raise NotImplementedError() # pragma: no cover
74 |
75 | def trace_exception(self) -> None:
76 | pass
77 |
78 |
79 | class LogTracer(Tracer):
80 |
81 | name = 'log'
82 | default_format = '{level:7} {ts:.3f} {ts_unit}: {scope}:'
83 |
84 | levels = {
85 | 'ERROR': 1,
86 | 'WARNING': 2,
87 | 'INFO': 3,
88 | 'PROBE': 4,
89 | 'DEBUG': 5,
90 | }
91 |
92 | def open(self) -> None:
93 | self.filename: str = self.env.config.setdefault('sim.log.file', 'sim.log')
94 | buffering: int = self.env.config.setdefault('sim.log.buffering', -1)
95 | level: str = self.env.config.setdefault('sim.log.level', 'INFO')
96 | self.max_level = self.levels[level]
97 | self.format_str: str = self.env.config.setdefault(
98 | 'sim.log.format', self.default_format
99 | )
100 | ts_n, ts_unit = self.env.timescale
101 | if ts_n == 1:
102 | self.ts_unit = ts_unit
103 | else:
104 | self.ts_unit = f'({ts_n}{ts_unit})'
105 |
106 | if self.filename:
107 | self.file = open(self.filename, 'w', buffering)
108 | self.should_close = True
109 | else:
110 | self.file = sys.stderr
111 | self.should_close = False
112 |
113 | def flush(self) -> None:
114 | self.file.flush()
115 |
116 | def _close(self) -> None:
117 | if self.should_close:
118 | self.file.close()
119 |
120 | def remove_files(self) -> None:
121 | if os.path.isfile(self.filename):
122 | os.remove(self.filename)
123 |
124 | def is_scope_enabled(self, scope: str, level: Optional[str] = None) -> bool:
125 | return (level is None or self.levels[level] <= self.max_level) and super(
126 | LogTracer, self
127 | ).is_scope_enabled(scope)
128 |
129 | def activate_probe(
130 | self, scope: str, target: ProbeTarget, **hints: Any
131 | ) -> Optional[ProbeCallback]:
132 | level: str = hints.get('level', 'PROBE')
133 | if not self.is_scope_enabled(scope, level):
134 | return None
135 | format_str = partial_format(
136 | self.format_str, level=level, ts_unit=self.ts_unit, scope=scope
137 | )
138 |
139 | def probe_callback(value: object) -> None:
140 | print(format_str.format(ts=self.env.now), value, file=self.file)
141 |
142 | return probe_callback
143 |
144 | def activate_trace(self, scope: str, **hints) -> Optional[TraceCallback]:
145 | level: str = hints.get('level', 'DEBUG')
146 | if not self.is_scope_enabled(scope, level):
147 | return None
148 | format_str = partial_format(
149 | self.format_str, level=level, ts_unit=self.ts_unit, scope=scope
150 | )
151 |
152 | def trace_callback(*value) -> None:
153 | print(format_str.format(ts=self.env.now), *value, file=self.file)
154 |
155 | return trace_callback
156 |
157 | def trace_exception(self) -> None:
158 | tb_lines = traceback.format_exception(*sys.exc_info())
159 | print(
160 | self.format_str.format(
161 | level='ERROR', ts=self.env.now, ts_unit=self.ts_unit, scope='Exception'
162 | ),
163 | tb_lines[-1],
164 | '\n',
165 | *tb_lines,
166 | file=self.file,
167 | )
168 |
169 |
170 | class VCDTracer(Tracer):
171 |
172 | name = 'vcd'
173 |
174 | def open(self) -> None:
175 | dump_filename: str = self.env.config.setdefault('sim.vcd.dump_file', 'sim.vcd')
176 | if 'sim.vcd.timescale' in self.env.config:
177 | vcd_ts_str: str = self.env.config.setdefault(
178 | 'sim.vcd.timescale', self.env.config['sim.timescale']
179 | )
180 | mag, unit = parse_time(vcd_ts_str)
181 | else:
182 | mag, unit = self.env.timescale
183 | mag_int = int(mag)
184 | if mag_int != mag:
185 | raise ValueError(f'sim.timescale magnitude must be an integer, got {mag}')
186 | vcd_timescale = mag_int, unit
187 | self.scale_factor = scale_time(self.env.timescale, vcd_timescale)
188 | check_values: bool = self.env.config.setdefault('sim.vcd.check_values', True)
189 | self.dump_file = open(dump_filename, 'w')
190 | self.vcd = VCDWriter(
191 | self.dump_file, timescale=vcd_timescale, check_values=check_values
192 | )
193 | self.save_filename: str = self.env.config.setdefault(
194 | 'sim.gtkw.file', 'sim.gtkw'
195 | )
196 | if self.env.config.setdefault('sim.gtkw.live'):
197 | from vcd.gtkw import spawn_gtkwave_interactive
198 |
199 | quiet: bool = self.env.config.setdefault('sim.gtkw.quiet', True)
200 | spawn_gtkwave_interactive(dump_filename, self.save_filename, quiet=quiet)
201 |
202 | start_time: str = self.env.config.setdefault('sim.vcd.start_time', '')
203 | stop_time: str = self.env.config.setdefault('sim.vcd.stop_time', '')
204 | t_start = (
205 | scale_time(parse_time(start_time), self.env.timescale)
206 | if start_time
207 | else None
208 | )
209 | t_stop = (
210 | scale_time(parse_time(stop_time), self.env.timescale) if stop_time else None
211 | )
212 | self.env.process(self._start_stop(t_start, t_stop))
213 |
214 | def vcd_now(self) -> float:
215 | return self.env.now * self.scale_factor
216 |
217 | def flush(self) -> None:
218 | self.dump_file.flush()
219 |
220 | def _close(self) -> None:
221 | self.vcd.close(self.vcd_now())
222 | self.dump_file.close()
223 |
224 | def remove_files(self) -> None:
225 | if os.path.isfile(self.dump_file.name):
226 | os.remove(self.dump_file.name)
227 | if os.path.isfile(self.save_filename):
228 | os.remove(self.save_filename)
229 |
230 | def activate_probe(
231 | self, scope: str, target: ProbeTarget, **hints: Any
232 | ) -> Optional[ProbeCallback]:
233 | assert self.enabled
234 | var_type: Optional[str] = hints.get('var_type')
235 | if var_type is None:
236 | if isinstance(target, (simpy.Container, Pool)):
237 | if isinstance(target.level, float):
238 | var_type = 'real'
239 | else:
240 | var_type = 'integer'
241 | elif isinstance(target, (simpy.Resource, simpy.Store, Queue)):
242 | var_type = 'integer'
243 | else:
244 | raise ValueError(f'Could not infer VCD var_type for {scope}')
245 |
246 | kwargs = {k: hints[k] for k in ['size', 'init', 'ident'] if k in hints}
247 |
248 | if 'init' not in kwargs:
249 | if isinstance(target, (simpy.Container, Pool)):
250 | kwargs['init'] = target.level
251 | elif isinstance(target, simpy.Resource):
252 | kwargs['init'] = len(target.users) if target.users else 'z'
253 | elif isinstance(target, (simpy.Store, Queue)):
254 | kwargs['init'] = len(target.items)
255 |
256 | parent_scope, name = scope.rsplit('.', 1)
257 | var = self.vcd.register_var(parent_scope, name, var_type, **kwargs)
258 |
259 | def probe_callback(value: Any) -> None:
260 | self.vcd.change(var, self.vcd_now(), value)
261 |
262 | return probe_callback
263 |
264 | def activate_trace(self, scope: str, **hints) -> Optional[TraceCallback]:
265 | assert self.enabled
266 | var_type = hints['var_type']
267 | kwargs = {k: hints[k] for k in ['size', 'init', 'ident'] if k in hints}
268 |
269 | parent_scope, name = scope.rsplit('.', 1)
270 | var = self.vcd.register_var(parent_scope, name, var_type, **kwargs)
271 |
272 | if isinstance(var.size, tuple):
273 |
274 | def trace_callback(*value) -> None:
275 | self.vcd.change(var, self.vcd_now(), value)
276 |
277 | else:
278 |
279 | def trace_callback(*value) -> None:
280 | self.vcd.change(var, self.vcd_now(), value[0])
281 |
282 | return trace_callback
283 |
284 | def _start_stop(
285 | self, t_start: Optional[Union[int, float]], t_stop: Optional[Union[int, float]]
286 | ) -> Generator[simpy.Timeout, None, None]:
287 | # Wait for simulation to start to ensure all variable registration is
288 | # complete before doing and dump_on()/dump_off() calls.
289 | yield self.env.timeout(0)
290 |
291 | if t_start is None and t_stop is None:
292 | # |vvvvvvvvvvvvvv|
293 | pass
294 | elif t_start is None:
295 | assert t_stop is not None
296 | # |vvvvvv--------|
297 | yield self.env.timeout(t_stop)
298 | self.vcd.dump_off(self.vcd_now())
299 | elif t_stop is None:
300 | # |--------vvvvvv|
301 | self.vcd.dump_off(self.vcd_now())
302 | yield self.env.timeout(t_start)
303 | self.vcd.dump_on(self.vcd_now())
304 | elif t_start <= t_stop:
305 | # |---vvvvvv-----|
306 | self.vcd.dump_off(self.vcd_now())
307 | yield self.env.timeout(t_start)
308 | self.vcd.dump_on(self.vcd_now())
309 | yield self.env.timeout(t_stop - t_start)
310 | self.vcd.dump_off(self.vcd_now())
311 | else:
312 | # |vvv-------vvvv|
313 | yield self.env.timeout(t_stop)
314 | self.vcd.dump_off(self.vcd_now())
315 | yield self.env.timeout(t_start - t_stop)
316 | self.vcd.dump_on(self.vcd_now())
317 |
318 |
319 | class SQLiteTracer(Tracer):
320 |
321 | name = 'db'
322 |
323 | def open(self) -> None:
324 | self.filename: str = self.env.config.setdefault('sim.db.file', 'sim.sqlite')
325 | self.trace_table: str = self.env.config.setdefault(
326 | 'sim.db.trace_table', 'trace'
327 | )
328 | self.remove_files()
329 | self.db = sqlite3.connect(self.filename)
330 | self._is_trace_table_created = False
331 |
332 | def _create_trace_table(self) -> None:
333 | if not self._is_trace_table_created:
334 | self.db.execute(
335 | f'CREATE TABLE {self.trace_table} ('
336 | f'timestamp FLOAT, '
337 | f'scope TEXT, '
338 | f'value)'
339 | )
340 | self._is_trace_table_created = True
341 |
342 | def flush(self) -> None:
343 | self.db.commit()
344 |
345 | def _close(self) -> None:
346 | self.db.commit()
347 | self.db.close()
348 |
349 | def remove_files(self) -> None:
350 | if self.filename != ':memory:':
351 | for filename in [self.filename, f'{self.filename}-journal']:
352 | if os.path.exists(filename):
353 | os.remove(filename)
354 |
355 | def activate_probe(
356 | self, scope: str, target: ProbeTarget, **hints: Any
357 | ) -> Optional[ProbeCallback]:
358 | return self.activate_trace(scope, **hints)
359 |
360 | def activate_trace(self, scope: str, **hints) -> Optional[TraceCallback]:
361 | assert self.enabled
362 | self._create_trace_table()
363 | insert_sql = (
364 | f'INSERT INTO {self.trace_table} (timestamp, scope, value) VALUES (?, ?, ?)'
365 | )
366 |
367 | def trace_callback(value) -> None:
368 | self.db.execute(insert_sql, (self.env.now, scope, value))
369 |
370 | return trace_callback
371 |
372 |
373 | class TraceManager:
374 | def __init__(self, env: 'SimEnvironment') -> None:
375 | self.tracers: List[Tracer] = []
376 | try:
377 | self.log_tracer = LogTracer(env)
378 | self.tracers.append(self.log_tracer)
379 | self.vcd_tracer = VCDTracer(env)
380 | self.tracers.append(self.vcd_tracer)
381 | self.sqlite_tracer = SQLiteTracer(env)
382 | self.tracers.append(self.sqlite_tracer)
383 | except BaseException:
384 | self.close()
385 | raise
386 |
387 | def flush(self) -> None:
388 | """Flush all managed tracers instances.
389 |
390 | The effect of flushing is tracer-dependent.
391 |
392 | """
393 | for tracer in self.tracers:
394 | if tracer.enabled:
395 | tracer.flush()
396 |
397 | def close(self) -> None:
398 | for tracer in self.tracers:
399 | tracer.close()
400 | if tracer.enabled and not tracer.persist:
401 | tracer.remove_files()
402 |
403 | def auto_probe(self, scope: str, target: ProbeTarget, **hints: Any) -> None:
404 | callbacks: List[ProbeCallback] = []
405 | for tracer in self.tracers:
406 | if tracer.name in hints and tracer.is_scope_enabled(scope):
407 | callback = tracer.activate_probe(scope, target, **hints[tracer.name])
408 | if callback:
409 | callbacks.append(callback)
410 | if callbacks:
411 | probe_attach(scope, target, callbacks, **hints)
412 |
413 | def get_trace_function(self, scope: str, **hints) -> Callable[..., None]:
414 | callbacks = []
415 | for tracer in self.tracers:
416 | if tracer.name in hints and tracer.is_scope_enabled(scope):
417 | callback = tracer.activate_trace(scope, **hints[tracer.name])
418 | if callback:
419 | callbacks.append(callback)
420 |
421 | def trace_function(*value) -> None:
422 | for callback in callbacks:
423 | callback(*value)
424 |
425 | return trace_function
426 |
427 | def trace_exception(self) -> None:
428 | for tracer in self.tracers:
429 | if tracer.enabled:
430 | tracer.trace_exception()
431 |
--------------------------------------------------------------------------------