4 | Redirecting to master branch
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/.github/workflows/code.yml:
--------------------------------------------------------------------------------
1 | name: Code CI
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 | build:
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | include:
13 | # DLS RHEL7 legacy
14 | - os: ubuntu-latest
15 | python: "2.7"
16 | publish: true
17 | epics: "R3.14.12.7"
18 | # sphinx deps don't work on 2.7, so install manually
19 | pipenv: "--skip-lock && pipenv run pip install pytest-cov pytest-flake8"
20 |
21 | # DLS RHEL7 python3 current
22 | - os: ubuntu-latest
23 | python: "3.7"
24 | epics: "R3.14.12.7"
25 | pipenv: "--dev --deploy"
26 |
27 | # DLS RHEL7 python3 as used by pythonIoc
28 | - os: ubuntu-latest
29 | python: "3.7"
30 | publish: true
31 | pipenv: "--dev --deploy"
32 |
33 | # Other random possibilities
34 | - os: ubuntu-latest
35 | python: "3.8"
36 | epics: "R3.15.8"
37 | pipenv: "--dev --deploy"
38 |
39 | - os: ubuntu-latest
40 | python: "3.9"
41 | pipenv: "--dev --deploy"
42 |
43 | - os: windows-latest
44 | python: "3.7"
45 | pipenv: "--dev --deploy"
46 |
47 | name: ${{ matrix.os }}/${{ matrix.python }}/${{ matrix.epics }}
48 | runs-on: ${{ matrix.os }}
49 |
50 | steps:
51 | - name: Checkout Source
52 | uses: actions/checkout@v2
53 |
54 | - name: Set up Python ${{ matrix.python }}
55 | uses: actions/setup-python@v2
56 | with:
57 | python-version: ${{ matrix.python }}
58 |
59 | - name: Install EPICS Base
60 | if: matrix.epics
61 | run: |
62 | wget -nv https://github.com/epics-base/epics-base/archive/${{ matrix.epics }}.tar.gz
63 | tar -zxf ${{ matrix.epics }}.tar.gz
64 | make -sj -C epics-base-${{ matrix.epics }}
65 | echo "EPICS_BASE=`pwd`/epics-base-${{ matrix.epics }}" >> $GITHUB_ENV
66 |
67 | - name: Install Python Dependencies
68 | # Pin pipenv so it works on python2.7
69 | run: |
70 | pip install pipenv==2020.6.2 twine build
71 | pipenv install --python $(python -c 'import sys; print(sys.executable)') ${{ matrix.pipenv }} && pipenv graph
72 |
73 | - name: Install epicscorelibs
74 | if: ${{ ! matrix.epics }}
75 | run: pipenv run pip install epicscorelibs
76 |
77 | - name: Create Sdist and Wheel
78 | if: matrix.publish
79 | # Set SOURCE_DATE_EPOCH from git commit for reproducible build
80 | # https://reproducible-builds.org/
81 | # Set group writable and umask to do the same to match inside DLS
82 | run: |
83 | chmod -R g+w .
84 | umask 0002
85 | SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) python -m build --sdist --wheel
86 |
87 | - name: Run Tests
88 | run: pipenv run tests
89 |
90 | - name: Upload Wheel and Sdist
91 | if: matrix.publish
92 | uses: actions/upload-artifact@v2
93 | with:
94 | name: dist
95 | path: dist/*
96 |
97 | - name: Publish Sdist and Wheel to PyPI
98 | # Only once when on a tag
99 | if: ${{ matrix.publish && startsWith(github.ref, 'refs/tags') }}
100 | env:
101 | TWINE_USERNAME: __token__
102 | TWINE_PASSWORD: ${{ secrets.pypi_token }}
103 | run: twine upload dist/*
104 |
105 | - name: Upload coverage to Codecov
106 | uses: codecov/codecov-action@v1
107 | with:
108 | name: ${{ matrix.os }}/${{ matrix.python }}/${{ matrix.epics }}
109 | files: cov.xml
110 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Docs CI
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 | build:
9 | strategy:
10 | matrix:
11 | # Whether we are building all versions of the docs, or just checking they build
12 | publish: ["${{ github.repository_owner == 'dls-controls' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}"]
13 |
14 | name: "Docs CI (publish=${{ matrix.publish }})"
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - name: Checkout Source
19 | uses: actions/checkout@v2
20 | with:
21 | # require all of history to see all tagged versions' docs
22 | fetch-depth: 0
23 |
24 | - name: Set up Python
25 | uses: actions/setup-python@v2
26 | with:
27 | python-version: "3.7"
28 |
29 | - name: Install Python Dependencies
30 | run: |
31 | pip install pipenv
32 | pipenv install --dev --deploy --python $(which python) && pipenv graph
33 |
34 | - name: Deploy index
35 | if: matrix.publish
36 | # We pin to the SHA, not the tag, for security reasons.
37 | # https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
38 | uses: peaceiris/actions-gh-pages@bbdfb200618d235585ad98e965f4aafc39b4c501 # v3.7.3
39 | with:
40 | github_token: ${{ secrets.GITHUB_TOKEN }}
41 | publish_dir: .github/pages
42 | keep_files: true
43 |
44 | - name: Checkout gh-pages
45 | if: matrix.publish
46 | # As we already did a deploy of gh-pages above, it is guaranteed to be there
47 | # so check it out so we can selectively build docs below
48 | uses: actions/checkout@v2
49 | with:
50 | ref: gh-pages
51 | path: build/html
52 |
53 | - name: Maybe use sphinx-multiversion
54 | # If we are building master or a tag we will publish
55 | if: matrix.publish
56 | # So use the args we normally pass to sphinx-build, but run sphinx-multiversion
57 | run: mv $(pipenv --venv)/bin/sphinx-multiversion $(pipenv --venv)/bin/sphinx-build
58 |
59 | - name: Build Docs
60 | run: pipenv run docs
61 |
62 | - name: Publish Docs to gh-pages
63 | # Only master and tags are published
64 | if: matrix.publish
65 | # We pin to the SHA, not the tag, for security reasons.
66 | # https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
67 | uses: peaceiris/actions-gh-pages@bbdfb200618d235585ad98e965f4aafc39b4c501 # v3.7.3
68 | with:
69 | github_token: ${{ secrets.GITHUB_TOKEN }}
70 | publish_dir: build/html
71 | keep_files: true
72 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 |
26 | # PyInstaller
27 | # Usually these files are written by a python script from a template
28 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
29 | *.manifest
30 | *.spec
31 |
32 | # Installer logs
33 | pip-log.txt
34 | pip-delete-this-directory.txt
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .coverage
40 | .coverage.*
41 | .cache
42 | nosetests.xml
43 | coverage.xml
44 | *,cover
45 | *.mypy_cache
46 | *.pytest_cache
47 | cov.xml
48 |
49 | # DLS build dir and virtual environment
50 | /prefix/
51 | /venv/
52 | /lightweight-venv/
53 | /installed.files
54 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | include:
2 | - project: 'controls/reports/ci_templates'
3 | ref: master
4 | file: 'python3/dls_py3_template.yml'
5 |
--------------------------------------------------------------------------------
/.gitremotes:
--------------------------------------------------------------------------------
1 | github git@github.com:DiamondLightSource/epicsdbbuilder.git
2 | gitlab-py2 ssh://git@gitlab.diamond.ac.uk/controls/python/epicsdbbuilder.git
3 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Contributions and issues are most welcome! All issues and pull requests are
5 | handled through github on the `dls_controls repository`_. Also, please check for
6 | any existing issues before filing a new one. If you have a great idea but it
7 | involves big changes, please file a ticket before making a pull request! We
8 | want to make sure you don't spend your time coding something that might not fit
9 | the scope of the project.
10 |
11 | .. _dls_controls repository: https://github.com/DiamondLightSource/epicsdbbuilder/issues
12 |
13 | Running the tests
14 | -----------------
15 |
16 | To get the source source code and run the unit tests, run::
17 |
18 | $ git clone git://github.com/DiamondLightSource/epicsdbbuilder.git
19 | $ cd epicsdbbuilder
20 | $ pipenv install --dev
21 | $ pipenv run tests
22 |
23 | While 100% code coverage does not make a library bug-free, it significantly
24 | reduces the number of easily caught bugs! Please make sure coverage remains the
25 | same or is improved by a pull request!
26 |
27 | Code Styling
28 | ------------
29 |
30 | The code in this repository conforms to standards set by the following tools:
31 |
32 | - flake8_ for style checks
33 |
34 | .. _flake8: http://flake8.pycqa.org/en/latest/
35 |
36 | These tests will be run on code when running ``pipenv run tests`` and also
37 | automatically at check in. Please read the tool documentation for details
38 | on how to fix the errors it reports.
39 |
40 | Documentation
41 | -------------
42 |
43 | Documentation is contained in the ``docs`` directory and extracted from
44 | docstrings of the API.
45 |
46 | Docs follow the underlining convention::
47 |
48 | Headling 1 (page title)
49 | =======================
50 |
51 | Heading 2
52 | ---------
53 |
54 | Heading 3
55 | ~~~~~~~~~
56 |
57 |
58 | You can build the docs from the project directory by running::
59 |
60 | $ pipenv run docs
61 | $ firefox build/html/index.html
62 |
63 |
64 | Release Checklist
65 | -----------------
66 |
67 | Before a new release, please go through the following checklist:
68 |
69 | - Choose a new PEP440 compliant release number
70 | - Git tag the version with a message summarizing the changes
71 | - Push to github and the actions will make a release on pypi
72 | - Push to internal gitlab and do a dls-release.py of the tag
73 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Specify defaults for testing
2 | PREFIX := $(shell pwd)/prefix
3 | PYTHON = dls-python
4 | INSTALL_DIR = $(PREFIX)/lib/python2.7/site-packages
5 | SCRIPT_DIR = $(PREFIX)/bin
6 | MODULEVER=0.0
7 |
8 | # Override with any release info
9 | -include Makefile.private
10 |
11 | # This is run when we type make
12 | dist: setup.py $(wildcard epicsdbbuilder/*) docs
13 | MODULEVER=$(MODULEVER) $(PYTHON) setup.py bdist_egg
14 | touch dist
15 |
16 | # Clean the module
17 | clean:
18 | $(PYTHON) setup.py clean
19 | -rm -rf build dist *egg-info installed.files
20 | -find -name '*.pyc' -exec rm {} \;
21 |
22 | # Install the built egg
23 | install: dist
24 | $(PYTHON) setup.py easy_install -m \
25 | --record=installed.files \
26 | --install-dir=$(INSTALL_DIR) \
27 | --script-dir=$(SCRIPT_DIR) dist/*.egg
28 |
29 | # Build the docs with sphinx
30 | docs:
31 | sphinx-build -EWT --keep-going docs build/html
32 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | name = "pypi"
3 | url = "https://pypi.org/simple"
4 | verify_ssl = true
5 |
6 | [dev-packages]
7 | # Switch to main repo after PR https://github.com/Holzhaus/sphinx-multiversion/pull/64 is merged
8 | sphinx-multiversion = {editable = true,git = "https://github.com/DiamondLightSource/sphinx-multiversion.git",ref = "only-arg"}
9 |
10 | # Make sure lockfile is usable on other platforms
11 | atomicwrites = "*"
12 |
13 | # Test and docs deps
14 | pytest-cov = "*"
15 | pytest-flake8 = "*"
16 | sphinx-rtd-theme = "*"
17 |
18 | [packages]
19 | # All other package requirements from setup.cfg
20 | epicsdbbuilder = {editable = true,path = "."}
21 |
22 | [scripts]
23 | tests = "python -m pytest"
24 | docs = "sphinx-build -EWT --keep-going docs build/html"
25 | # Delete any files that git ignore hides from us
26 | gitclean = "git clean -fdX"
27 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "440ec49dc40f84e7b23248902e0cb3d7759fb34675a28f38f395fcdd789e3fdf"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {},
8 | "sources": [
9 | {
10 | "name": "pypi",
11 | "url": "https://pypi.org/simple",
12 | "verify_ssl": true
13 | }
14 | ]
15 | },
16 | "default": {
17 | "epicsdbbuilder": {
18 | "editable": true,
19 | "path": "."
20 | }
21 | },
22 | "develop": {
23 | "alabaster": {
24 | "hashes": [
25 | "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
26 | "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
27 | ],
28 | "version": "==0.7.12"
29 | },
30 | "atomicwrites": {
31 | "hashes": [
32 | "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197",
33 | "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"
34 | ],
35 | "index": "pypi",
36 | "version": "==1.4.0"
37 | },
38 | "attrs": {
39 | "hashes": [
40 | "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
41 | "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
42 | ],
43 | "version": "==20.3.0"
44 | },
45 | "babel": {
46 | "hashes": [
47 | "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9",
48 | "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"
49 | ],
50 | "version": "==2.9.1"
51 | },
52 | "certifi": {
53 | "hashes": [
54 | "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
55 | "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
56 | ],
57 | "version": "==2020.12.5"
58 | },
59 | "chardet": {
60 | "hashes": [
61 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
62 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
63 | ],
64 | "version": "==4.0.0"
65 | },
66 | "coverage": {
67 | "hashes": [
68 | "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c",
69 | "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6",
70 | "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45",
71 | "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a",
72 | "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03",
73 | "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529",
74 | "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a",
75 | "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a",
76 | "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2",
77 | "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6",
78 | "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759",
79 | "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53",
80 | "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a",
81 | "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4",
82 | "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff",
83 | "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502",
84 | "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793",
85 | "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb",
86 | "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905",
87 | "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821",
88 | "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b",
89 | "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81",
90 | "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0",
91 | "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b",
92 | "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3",
93 | "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184",
94 | "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701",
95 | "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a",
96 | "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82",
97 | "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638",
98 | "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5",
99 | "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083",
100 | "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6",
101 | "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90",
102 | "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465",
103 | "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a",
104 | "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3",
105 | "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e",
106 | "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066",
107 | "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf",
108 | "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b",
109 | "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae",
110 | "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669",
111 | "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873",
112 | "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b",
113 | "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6",
114 | "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb",
115 | "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160",
116 | "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c",
117 | "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079",
118 | "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d",
119 | "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"
120 | ],
121 | "version": "==5.5"
122 | },
123 | "docutils": {
124 | "hashes": [
125 | "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
126 | "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
127 | ],
128 | "version": "==0.16"
129 | },
130 | "flake8": {
131 | "hashes": [
132 | "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378",
133 | "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"
134 | ],
135 | "version": "==3.9.1"
136 | },
137 | "idna": {
138 | "hashes": [
139 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
140 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
141 | ],
142 | "version": "==2.10"
143 | },
144 | "imagesize": {
145 | "hashes": [
146 | "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
147 | "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
148 | ],
149 | "version": "==1.2.0"
150 | },
151 | "importlib-metadata": {
152 | "hashes": [
153 | "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581",
154 | "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"
155 | ],
156 | "markers": "python_version < '3.8' and python_version < '3.8'",
157 | "version": "==4.0.1"
158 | },
159 | "iniconfig": {
160 | "hashes": [
161 | "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
162 | "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
163 | ],
164 | "version": "==1.1.1"
165 | },
166 | "jinja2": {
167 | "hashes": [
168 | "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419",
169 | "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"
170 | ],
171 | "version": "==2.11.3"
172 | },
173 | "markupsafe": {
174 | "hashes": [
175 | "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
176 | "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
177 | "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
178 | "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
179 | "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
180 | "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
181 | "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
182 | "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
183 | "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
184 | "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
185 | "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
186 | "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
187 | "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
188 | "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
189 | "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
190 | "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
191 | "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
192 | "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
193 | "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
194 | "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
195 | "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
196 | "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
197 | "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
198 | "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
199 | "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
200 | "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
201 | "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
202 | "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
203 | "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
204 | "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
205 | "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
206 | "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
207 | "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
208 | ],
209 | "version": "==1.1.1"
210 | },
211 | "mccabe": {
212 | "hashes": [
213 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
214 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
215 | ],
216 | "version": "==0.6.1"
217 | },
218 | "packaging": {
219 | "hashes": [
220 | "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
221 | "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
222 | ],
223 | "version": "==20.9"
224 | },
225 | "pluggy": {
226 | "hashes": [
227 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
228 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
229 | ],
230 | "version": "==0.13.1"
231 | },
232 | "py": {
233 | "hashes": [
234 | "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3",
235 | "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"
236 | ],
237 | "version": "==1.10.0"
238 | },
239 | "pycodestyle": {
240 | "hashes": [
241 | "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068",
242 | "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"
243 | ],
244 | "version": "==2.7.0"
245 | },
246 | "pyflakes": {
247 | "hashes": [
248 | "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3",
249 | "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"
250 | ],
251 | "version": "==2.3.1"
252 | },
253 | "pygments": {
254 | "hashes": [
255 | "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f",
256 | "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"
257 | ],
258 | "version": "==2.9.0"
259 | },
260 | "pyparsing": {
261 | "hashes": [
262 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
263 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
264 | ],
265 | "version": "==2.4.7"
266 | },
267 | "pytest": {
268 | "hashes": [
269 | "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634",
270 | "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc"
271 | ],
272 | "version": "==6.2.3"
273 | },
274 | "pytest-cov": {
275 | "hashes": [
276 | "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7",
277 | "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"
278 | ],
279 | "index": "pypi",
280 | "version": "==2.11.1"
281 | },
282 | "pytest-flake8": {
283 | "hashes": [
284 | "sha256:c28cf23e7d359753c896745fd4ba859495d02e16c84bac36caa8b1eec58f5bc1",
285 | "sha256:f0259761a903563f33d6f099914afef339c085085e643bee8343eb323b32dd6b"
286 | ],
287 | "index": "pypi",
288 | "version": "==1.0.7"
289 | },
290 | "pytz": {
291 | "hashes": [
292 | "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da",
293 | "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"
294 | ],
295 | "version": "==2021.1"
296 | },
297 | "requests": {
298 | "hashes": [
299 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
300 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
301 | ],
302 | "version": "==2.25.1"
303 | },
304 | "snowballstemmer": {
305 | "hashes": [
306 | "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2",
307 | "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"
308 | ],
309 | "version": "==2.1.0"
310 | },
311 | "sphinx": {
312 | "hashes": [
313 | "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1",
314 | "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"
315 | ],
316 | "version": "==3.5.4"
317 | },
318 | "sphinx-multiversion": {
319 | "editable": true,
320 | "git": "https://github.com/DiamondLightSource/sphinx-multiversion.git",
321 | "ref": "3e72beb7f8f0a76b53722fb2eb3daeed3d2a3d31"
322 | },
323 | "sphinx-rtd-theme": {
324 | "hashes": [
325 | "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a",
326 | "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"
327 | ],
328 | "index": "pypi",
329 | "version": "==0.5.2"
330 | },
331 | "sphinxcontrib-applehelp": {
332 | "hashes": [
333 | "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
334 | "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
335 | ],
336 | "version": "==1.0.2"
337 | },
338 | "sphinxcontrib-devhelp": {
339 | "hashes": [
340 | "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
341 | "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
342 | ],
343 | "version": "==1.0.2"
344 | },
345 | "sphinxcontrib-htmlhelp": {
346 | "hashes": [
347 | "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
348 | "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
349 | ],
350 | "version": "==1.0.3"
351 | },
352 | "sphinxcontrib-jsmath": {
353 | "hashes": [
354 | "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
355 | "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
356 | ],
357 | "version": "==1.0.1"
358 | },
359 | "sphinxcontrib-qthelp": {
360 | "hashes": [
361 | "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
362 | "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
363 | ],
364 | "version": "==1.0.3"
365 | },
366 | "sphinxcontrib-serializinghtml": {
367 | "hashes": [
368 | "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
369 | "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
370 | ],
371 | "version": "==1.1.4"
372 | },
373 | "toml": {
374 | "hashes": [
375 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
376 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
377 | ],
378 | "version": "==0.10.2"
379 | },
380 | "typing-extensions": {
381 | "hashes": [
382 | "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497",
383 | "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342",
384 | "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"
385 | ],
386 | "markers": "python_version < '3.8'",
387 | "version": "==3.10.0.0"
388 | },
389 | "urllib3": {
390 | "hashes": [
391 | "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
392 | "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
393 | ],
394 | "version": "==1.26.5"
395 | },
396 | "zipp": {
397 | "hashes": [
398 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
399 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
400 | ],
401 | "version": "==3.4.1"
402 | }
403 | }
404 | }
405 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | EPICS Database Builder
2 | ======================
3 |
4 | |code_ci| |docs_ci| |coverage| |pypi_version| |license|
5 |
6 | This Python module is designed to make it easy to build complex EPICS databases
7 | by writing a Python script. While writing record templates the corresponding
8 | dbd entries are checked to reduce the number of errors in the database.
9 |
10 | ============== ==============================================================
11 | PyPI ``pip install epicsdbbuilder``
12 | Source code https://github.com/DiamondLightSource/epicsdbbuilder
13 | Documentation https://DiamondLightSource.github.io/epicsdbbuilder
14 | ============== ==============================================================
15 |
16 | A simple example of the use of this library is the following:
17 |
18 | .. code:: python
19 |
20 | from epicsdbbuilder import *
21 | InitialiseDbd('/dls_sw/epics/R3.14.12.3/base/')
22 | SetTemplateRecordNames()
23 |
24 | a = records.ao('TEST')
25 | c = records.calc('CALC', CALC = 'A+B', SCAN = '1 second', INPA = a.VAL)
26 | c.INPB = c
27 |
28 | WriteRecords('output.db')
29 |
30 | .. |code_ci| image:: https://github.com/DiamondLightSource/epicsdbbuilder/workflows/Code%20CI/badge.svg?branch=master
31 | :target: https://github.com/DiamondLightSource/epicsdbbuilder/actions?query=workflow%3A%22Code+CI%22
32 | :alt: Code CI
33 |
34 | .. |docs_ci| image:: https://github.com/DiamondLightSource/epicsdbbuilder/workflows/Docs%20CI/badge.svg?branch=master
35 | :target: https://github.com/DiamondLightSource/epicsdbbuilder/actions?query=workflow%3A%22Docs+CI%22
36 | :alt: Docs CI
37 |
38 | .. |coverage| image:: https://codecov.io/gh/DiamondLightSource/epicsdbbuilder/branch/master/graph/badge.svg
39 | :target: https://codecov.io/gh/DiamondLightSource/epicsdbbuilder
40 | :alt: Test Coverage
41 |
42 | .. |pypi_version| image:: https://img.shields.io/pypi/v/epicsdbbuilder.svg
43 | :target: https://pypi.org/project/epicsdbbuilder
44 | :alt: Latest PyPI version
45 |
46 | .. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg
47 | :target: https://opensource.org/licenses/Apache-2.0
48 | :alt: Apache License
49 |
50 | ..
51 | Anything below this line is used when viewing README.rst and will be replaced
52 | when included in index.rst
53 |
54 | See https://DiamondLightSource.github.io/epicsdbbuilder for more detailed documentation.
55 |
56 |
57 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | /_build
2 |
--------------------------------------------------------------------------------
/docs/_static/theme_overrides.css:
--------------------------------------------------------------------------------
1 | /* allow us to stop margin after list so we can add to it */
2 | .no-margin-after-ul ul {
3 | margin-bottom: 0 !important;
4 | }
5 |
--------------------------------------------------------------------------------
/docs/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 | {% block menu %}
3 |
9 | {{ super() }}
10 |
11 |
12 | Index
13 |
14 |
15 | {% if versions %}
16 |
22 | {% endif %}
23 | {% endblock %}
24 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | .. py:module:: epicsdbbuilder
2 |
3 | Database Builder API
4 | ====================
5 |
6 | Initialisation
7 | --------------
8 |
9 | .. function:: InitialiseDbd(epics_base=None, host_arch=None)
10 |
11 | This must be called once before calling any other functions. There are two
12 | possible mechanisms for locating EPICS base libraries and dbds.
13 |
14 | 1. Using the epicscorelibs_ library. If ``epics_base`` is ``None`` then the
15 | ``epicscorelibs`` package will be imported and used as the source of
16 | libraries and dbds.
17 |
18 | 2. Locally install EPICS. If ``epics_base`` is provided, then it must be an
19 | absolute path to the directory containing EPICS base, in particular this
20 | directory must contain both ``dbd/base.dbd`` and ``lib/``. This function
21 | will load the base EPICS dbd file. The host architecture can normally be
22 | computed automatically, but if this computation fails this can be specified
23 | by setting ``host_arch``.
24 |
25 | .. function:: LoadDbdFile(dbdfile, on_use=None)
26 |
27 | This can be called before creating records to load extra databases. If
28 | `on_use` is not ``None`` it must be a callable taking one argument, and it
29 | will be called for each record that is created using records defined by this
30 | dbd.
31 |
32 |
33 | Record Output
34 | -------------
35 |
36 | .. function:: WriteRecords(filename, header=None, alphabetical=True)
37 |
38 | This should be called after creating all records. The generated records
39 | will be written out to the file `filename`. If `header` is left unspecified
40 | then a standard disclaimer header will be generated:::
41 |
42 | # This file was automatically generated on Fri 27 Feb 2015 15:31:14 GMT.
43 | #
44 | # *** Please do not edit this file: edit the source file instead. ***
45 |
46 | Note that the leading ``#`` comments are added to any header that is passed
47 |
48 | If `alphabetical` then records, their fields and aliases will be sorted
49 | alphabetically, otherwise the records and aliases will be in insertion
50 | order and fields in DBD order.
51 |
52 | .. function:: Disclaimer(source=None, normalise_path=True)
53 |
54 | This function generates the disclaimer above. If a source file name is
55 | passed then it is included in the disclaimer. Unless `normalise_path` is
56 | set to ``False`` the `source` argument will normalised by calling
57 | ``os.path.abspath(source)``; this allows the caller to simply pass
58 | ``__file__`` as the source argument with the desired result.
59 |
60 | .. function:: CountRecords()
61 |
62 | Returns the number of records that have currently been created.
63 |
64 | .. function:: ResetRecords()
65 |
66 | Resets the list of records to be written. This can be used to write
67 | multiple databases.
68 |
69 |
70 | Building Databases
71 | ------------------
72 |
73 | .. data:: records
74 |
75 | This instance has a method for each record type, of the form:
76 |
77 | .. method:: records.type(name, **kargs)
78 |
79 | .. x** (vim fix)
80 |
81 | Here `name` will be used to construct the record name according to the
82 | record naming rules currently in force and any field can be given a
83 | value by assigning it in `kargs`.
84 |
85 | See :class:`epicsdbbuilder.recordbase.Record` for more details of these
86 | methods.
87 |
88 | Note that fields can be assigned either in the constructor or subsequently,
89 | and fields can be used as links::
90 |
91 | r = records.ai('NAME', INP = '@input')
92 | r.DESC = 'This is an ai record'
93 | r.FLNK = records.calc('NP1', CALC = 'A+1', INP = r.VAL)
94 |
95 | .. class:: Parameter(name, description='', default=None)
96 |
97 | When using :func:`TemplateRecordNames` this can be used to create template
98 | parameters with the given `name`. If `description` is given then this will
99 | be printed in the header. If a `default` string is given it will be used as
100 | the parameter default value, otherwise the parameter will be created with no
101 | default value.
102 |
103 | .. function:: ImportRecord(name)
104 |
105 | This generates a record reference without adding an entry into the generated
106 | database. Use this when linking to records outside of the database.
107 |
108 | .. function:: LookupRecord(full_name)
109 |
110 | Returns a reference to a record which has already been created.
111 |
112 |
113 | Record Naming
114 | -------------
115 |
116 | Record naming works as follows. Every time a record is created with a call the
117 | appropriate method of :data:`records` the name argument passed to that method is
118 | passed through the currently configured :func:`RecordName` method.
119 |
120 | If none of the functions named here are called then the default naming
121 | convention is applied: in this case record names are used unmodified.
122 |
123 | There is a simple "high level" API layered over a slightly more general
124 | interface.
125 |
126 | High Level API
127 | ~~~~~~~~~~~~~~
128 |
129 | Use one of the following functions for normal configuration:
130 |
131 | .. function:: SetSimpleRecordNames(prefix='', separator=':')
132 |
133 | In this case the given `prefix` and `separator` are added in front of any
134 | record name. If no arguments are given then the effect is the same as the
135 | default naming convention which is to use names unchanged.
136 |
137 | .. function:: SetTemplateRecordNames(prefix=None, separator=':')
138 |
139 | This is useful for generating template databases. If `prefix` is not
140 | specified then a :class:`Parameter` instance with name ``DEVICE`` is created
141 | and prefixed together with the `separator` to each record name.
142 |
143 | .. function:: RecordName(name)
144 |
145 | Applies the current record name conversion to compute a full record name.
146 |
147 | .. function:: SetPrefix(prefix)
148 |
149 | The currently configured prefix can be changed. This function will only
150 | work if a :class:`SimpleRecordNames` or similar naming mechanism is
151 | installed.
152 |
153 | .. function::
154 | PushPrefix(prefix)
155 | PopPrefix()
156 |
157 | These two functions manage a stack of record name prefixes, which will be
158 | separated by `separator` before being appended to the record name.
159 |
160 | .. function:: SetSeparator(separator)
161 |
162 | This function can be used to change the prefix separator.
163 |
164 |
165 | General Interface
166 | ~~~~~~~~~~~~~~~~~
167 |
168 | More generally any callable object can be used for record name generation.
169 |
170 | .. function:: SetRecordNames(names)
171 |
172 | This sets up a record naming convention. The argument passed will be called
173 | each time a new record is created. This function should take a name as
174 | argument and return the full name to be written to the generated database.
175 |
176 | The default naming mechanism uses the record name unmodified.
177 |
178 | When this method is called the previously establishing record naming
179 | convention is returned.
180 |
181 | .. class:: SimpleRecordNames(prefix='', separator=':', check=True)
182 |
183 | This implements a minimal naming convention. If no `prefix` is specified
184 | record names are generated unchanged, otherwise the given `prefix` and
185 | `separator` are contatenated to the front of the passed argument. If
186 | `check` is set the the resulting name is checked for length. Supports the
187 | following methods.
188 |
189 | .. method:: __call__(name)
190 |
191 | Returns `prefix` + `separator` + `name`. If `prefix` is currently
192 | ``None`` then an error will be generated.
193 |
194 | .. method:: SetPrefix(prefix)
195 |
196 | Allows the prefix to be modified. This can be called via the global
197 | :func:`SetPrefix` method.
198 |
199 | .. method::
200 | PushPrefix(prefix)
201 | PopPrefix()
202 |
203 | These two functions manage a stack of record name prefixes, which will
204 | be separated by `separator` before being appended to the record name.
205 | Can be called via the corresponding global functions.
206 |
207 |
208 | .. class:: TemplateRecordNames(prefix=None, separator=':')
209 |
210 | Subclasses :class:`SimpleRecordNames` to automatically add a ``$(DEVICE)``
211 | template to the prefix stack.
212 |
213 | .. function:: GetRecordNames()
214 |
215 | Returns the current record naming convention.
216 |
217 |
218 | Helper Functions and Classes
219 | ----------------------------
220 |
221 | .. function::
222 | CA(record)
223 | CP(record)
224 | CPP(record)
225 | NP(record)
226 | PP(record)
227 | MS(record)
228 | MSS(record)
229 | MSI(record)
230 | NMS(record)
231 |
232 | Used for record links to add the appropriate processing annotation to the
233 | link.
234 |
235 | Example (Python source)::
236 |
237 | other_record = records.ai('other')
238 | my_record.INP = PP(MS(other_record))
239 |
240 | Example (Generated DB)::
241 |
242 | field(INP, "other PP MS")
243 |
244 |
245 | JSON links
246 | ~~~~~~~~~~
247 |
248 | EPICS base 3.16.1 (and 7.0.1) has the ability to add JSON links:
249 | https://epics.anl.gov/base/R7-0/6-docs/links.html
250 |
251 | You can use these by passing a dictionary structure to one of the database
252 | fields or to :meth:`~recordbase.Record.add_info()`. Note that no validation is
253 | done on the structure of these as ``dbVerify`` does not validate them::
254 |
255 | records.ai('other', VAL={"const": 3.14159265358979})
256 |
257 |
258 | .. class::
259 | ConstArray(iterator)
260 |
261 | Used for **Constant Link Values** available since EPICS 3.16.1.
262 | Constant Link Values is an EPICS feature which allows passing
263 | an list of strings or a list of numbers as a constant into
264 | a field which contains a DB link (e.g. `INP`):
265 | https://epics.anl.gov/base/R7-0/6-docs/RELEASE_NOTES.html#constant-link-values
266 |
267 | ConstArray will accept any iterable (e.g. a list) which can generate
268 | a non-empty list of values of the same type. Allowed types are:
269 |
270 | * strings and parameters (i.e. :py:class:`epicsdbbuilder.Parameter`)
271 |
272 | * numbers (integers, floating-point, :py:class:`decimal.Decimal`,
273 | and booleans.
274 | Booleans will convert to `0` (`False`) or `1` (`True`) automatically.
275 |
276 |
277 | Known limitations:
278 |
279 | * No field type or record type check.
280 | ConstArray can be assigned to any field despite the field
281 | or the record type does not support Constant Link Values.
282 | Use it with link fields (e.g. `INP`) of record types `stringin`,
283 | `stringout`, `lso`, `lsi`, `printf`, `waveform`, `subArray`, and `aai`.
284 | Any other use is undefined and a warning may or may not appear
285 | while loading the DB
286 | (e.g. assigning `["1.23"]` to INP of the record type `ai` will print
287 | a warning while assigning `[1.23]` to INP of the record type `ai` will treat
288 | it as a CA link without any warning on EPICS 7.0.3.1).
289 | Always refer to EPICS Release Notes (section "Constant Link Values").
290 |
291 |
292 | Example (Python source)::
293 |
294 | r = records.lsi('r', INP=ConstArray(['Plain String not DBLINK']))
295 |
296 | Example (generated DB)::
297 |
298 | field(INP, ["Plain String not DBLINK"])
299 |
300 | .. function:: create_fanout(name, *records, **args)
301 |
302 | Creates one or more fanout records (as necessary) to fan processing out to
303 | all records in `records`. The first fanout record is named `name`, for
304 | others a sequence number is appended to `name`.
305 |
306 | .. function:: create_dfanout(name, *records, **args)
307 |
308 | Creates one or more dfanout records as necessary to fan a data output to a
309 | the list of records in `records`.
310 |
311 |
312 |
313 | Record Class
314 | ------------
315 |
316 | .. py:currentmodule:: epicsdbbuilder.recordbase
317 | .. class:: Record
318 |
319 | A subclass of this class is created for each record type and used to
320 | populate :data:`epicsdbbuilder.records`: for example, ``records.ai`` is a
321 | subclass of :class:`Record` used to generate ``ai`` records.
322 |
323 | .. method:: __init__(name, **kargs)
324 |
325 | .. x** (vim fix)
326 |
327 | The argument `name` is used to construct the record name. Any field
328 | appropriate for this record type can be named in `kargs`, for example::
329 |
330 | records.ai('NAME', VAL = 42, PINI = 'YES')
331 |
332 | .. method:: add_alias(alias)
333 |
334 | This method causes an EPICS ``alias`` statement to be added to the
335 | database giving `alias` as an alternative name for this record. The
336 | `alias` argument is used unchanged.
337 |
338 | .. method:: add_comment(comment)
339 |
340 | This adds a comment line above the created record. ``'# '`` is prepended
341 | to the comment string. Comments are emitted in the order that they are
342 | added.
343 |
344 | .. method:: add_metadata(metadata)
345 |
346 | This adds a metadata entry to the created record. Metadata entries are
347 | like comments, and will be emitted in the same order, but have ``'#% '``
348 | prepended to them. They can be used by processing tools.
349 |
350 | .. method:: add_info(name, value)
351 |
352 | This method causes an EPICS ``info`` statement to be added to the
353 | database. Its value can be either a a dictionary structure which will be
354 | converted to JSON (e.g. for ``info(Q:group, {...})``) or something else
355 | which will be double quoted (e.g. for ``info(autosaveFields, "VAL")``).
356 |
357 |
358 | Using other dbCore functions
359 | ----------------------------
360 |
361 | Advanced usage may require using other functions from the dbCore library to
362 | get extra introspection information about records.
363 |
364 | .. py:currentmodule:: epicsdbbuilder.mydbstatic
365 | .. function:: GetDbFunction(name, restype=None, argtypes=None, errcheck=None)
366 |
367 | This can be used to get a ``dbCore`` function by ``name``. The other
368 | arguments are used to decribe the function in the standard ctypes way.
369 |
370 | The other introspection functions will need a pointer to the current record and
371 | field within the currently loaded DBD. This can be done with the `DBEntry`
372 | utility class
373 |
374 | .. py:currentmodule:: epicsdbbuilder.dbd
375 | .. autoclass:: DBEntry
376 | :members:
377 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Cothread documentation build configuration file, created by
4 | # sphinx-quickstart on Fri May 14 13:06:33 2010.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | import os
15 | import sys
16 |
17 | # If extensions (or modules to document with autodoc) are in another directory,
18 | # add these directories to sys.path here. If the directory is relative to the
19 | # documentation root, use os.path.abspath to make it absolute, like shown here.
20 | repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
21 | sys.path.insert(0, repo_root)
22 |
23 | import epicsdbbuilder # noqa
24 |
25 | # General information about the project.
26 | project = u'EPICS Database Builder'
27 | copyright = u'2015, Michael Abbott'
28 |
29 | # The full version, including alpha/beta/rc tags.
30 | release = epicsdbbuilder.__version__
31 |
32 | # The short X.Y version.
33 | if '+' in release:
34 | # Not on a tag
35 | version = 'master'
36 | else:
37 | version = release
38 |
39 | # -- General configuration -----------------------------------------------------
40 |
41 | extensions = [
42 | # For linking to external sphinx documentation
43 | 'sphinx.ext.intersphinx',
44 | # Add links to source code in API docs
45 | 'sphinx.ext.viewcode',
46 | # Add multiple versions of documentation on CI
47 | 'sphinx_multiversion',
48 | # Add autodoc directives
49 | 'sphinx.ext.autodoc',
50 | ]
51 |
52 | viewcode_import = True
53 |
54 | # If true, Sphinx will warn about all references where the target cannot
55 | # be found.
56 | nitpicky = True
57 |
58 | # A list of (type, target) tuples (by default empty) that should be ignored when
59 | # generating warnings in "nitpicky mode". Note that type should include the
60 | # domain name if present. Example entries would be ('py:func', 'int') or
61 | # ('envvar', 'LD_LIBRARY_PATH').
62 | nitpick_ignore = [('py:func', 'int')]
63 |
64 | # Add any paths that contain templates here, relative to this directory.
65 | templates_path = ['_templates']
66 |
67 | # The suffix of source filenames.
68 | source_suffix = '.rst'
69 |
70 | # The master toctree document.
71 | master_doc = 'index'
72 |
73 | # Use the Sphinx Python domain for code documentation.
74 | primary_domain = 'py'
75 |
76 | # List of directories, relative to source directory, that shouldn't be searched
77 | # for source files.
78 | exclude_trees = ['_build']
79 |
80 | # The name of the Pygments (syntax highlighting) style to use.
81 | pygments_style = 'sphinx'
82 |
83 | # This means you can link things like `str` and `asyncio` to the relevant
84 | # docs in the python documentation.
85 | intersphinx_mapping = dict(python=('https://docs.python.org/3/', None))
86 |
87 | # -- Options for HTML output ---------------------------------------------------
88 |
89 | # The theme to use for HTML and HTML Help pages. Major themes that come with
90 | # Sphinx are currently 'default' and 'sphinxdoc'.
91 | try:
92 | import sphinx_rtd_theme
93 | html_theme = 'sphinx_rtd_theme'
94 | except ImportError:
95 | html_theme = 'default'
96 |
97 | # Options for the sphinx rtd theme, use DLS blue
98 | html_theme_options = dict(style_nav_header_background='rgb(7, 43, 93)')
99 |
100 | # Add any paths that contain custom static files (such as style sheets) here,
101 | # relative to this directory. They are copied after the builtin static files,
102 | # so a file named "default.css" will overwrite the builtin "default.css".
103 | html_static_path = ['_static']
104 |
105 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
106 | html_show_sphinx = False
107 |
108 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
109 | html_show_copyright = True
110 |
111 | # Add some CSS classes for columns and other tweaks in a custom css file
112 | html_css_files = ['theme_overrides.css']
113 |
114 | # Logo
115 | html_logo = 'images/dls-logo.svg'
116 | html_favicon = 'images/dls-favicon.ico'
117 |
118 | # sphinx-multiversion config
119 | smv_rebuild_tags = False
120 | smv_tag_whitelist = r'^\d+\.\d+.*$' # only document tags with form 0.9*
121 | smv_branch_whitelist = r'^master$' # only branch to document is master
122 | smv_outputdir_format = '{ref.name}'
123 | smv_prefer_remote_refs = False
124 | smv_remote_whitelist = 'origin|github'
125 |
126 | # Common links that should be available on every page
127 | rst_epilog = """
128 | .. _epicscorelibs: https://github.com/mdavidsaver/epicscorelibs
129 | """
130 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/images/dls-favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DiamondLightSource/epicsdbbuilder/88ac2802a17b47553bc6591ec56d8b1197645b1f/docs/images/dls-favicon.ico
--------------------------------------------------------------------------------
/docs/images/dls-logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
12 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 | :end-before: when included in index.rst
3 |
4 | .. rst-class:: no-margin-after-ul
5 |
6 | .. toctree::
7 | :numbered:
8 | :maxdepth: 2
9 |
10 | overview
11 | api
12 | contributing
13 |
14 | * :ref:`genindex`
15 |
16 |
--------------------------------------------------------------------------------
/docs/overview.rst:
--------------------------------------------------------------------------------
1 | .. currentmodule:: epicsdbbuilder
2 |
3 | EPICS Database Builder
4 | ======================
5 |
6 | The EPICS database builder is a support library designed to make it easy to
7 | create EPICS databases from a Python script. To create a script the following
8 | steps should be followed:
9 |
10 | 1. After importing :mod:`epicsdbbuilder` it must be initialised by calling
11 | :func:`InitialiseDbd`. This function can either be called without any
12 | arguments if the epicscorelibs_ module is installed, or passed an absolute
13 | path to an EPICS base directory where the ``dbd`` directory containing
14 | ``base.dbd`` can be found.
15 |
16 | 2. Next any other dbd files needed can be loaded by calls to
17 | :func:`LoadDbdFile`.
18 |
19 | 3. Optionally a naming convention can be installed by calling
20 | :func:`SetRecordNames`, or template naming can be set up by calling
21 | :func:`SetTemplateRecordNames`.
22 |
23 | 4. Records can be freely generated by calls to record generation methods of
24 | :data:`records`. The loaded dbd files will be used to ensure that all
25 | values written to fields are valid.
26 |
27 | 5. Finally the generated database should be written out by calling
28 | :func:`WriteRecords`.
29 |
30 |
31 | A simple example follows::
32 |
33 | from epicsdbbuilder import *
34 | InitialiseDbd('/dls_sw/epics/R3.14.12.3/base/')
35 | SetTemplateRecordNames()
36 |
37 | a = records.ao('TEST')
38 | c = records.calc('CALC', CALC = 'A+B', SCAN = '1 second', INPA = a.VAL)
39 | c.INPB = c
40 |
41 | WriteRecords('output.db')
42 |
43 | Running this Python script will generate the following database file::
44 |
45 | # This file was automatically generated on Mon 02 Mar 2015 12:57:13 GMT.
46 | #
47 | # *** Please do not edit this file: edit the source file instead. ***
48 | #
49 | #% macro, DEVICE, Device name
50 |
51 | record(calc, "$(DEVICE):CALC")
52 | {
53 | field(CALC, "A+B")
54 | field(INPA, "$(DEVICE):TEST.VAL")
55 | field(INPB, "$(DEVICE):CALC")
56 | field(SCAN, "1 second")
57 | }
58 |
59 | record(ao, "$(DEVICE):TEST")
60 | {
61 | }
62 |
63 | Note that record fields can be given values when the record is constructed, or
64 | can be assigned a new value at any time.
65 |
--------------------------------------------------------------------------------
/epicsdbbuilder/__init__.py:
--------------------------------------------------------------------------------
1 | # EPICS Database Building Framework
2 |
3 | # All these have an __all__ so rely on that
4 | from epicsdbbuilder.dbd import *
5 | from epicsdbbuilder.recordbase import *
6 | from epicsdbbuilder.fanout import *
7 | from epicsdbbuilder.recordset import *
8 | from epicsdbbuilder.recordnames import *
9 | from epicsdbbuilder.parameter import *
10 | from epicsdbbuilder.const_array import *
11 | from epicsdbbuilder._version_git import __version__
12 |
--------------------------------------------------------------------------------
/epicsdbbuilder/_version_git.py:
--------------------------------------------------------------------------------
1 | # Compute a version number from a git repo or archive
2 |
3 | # This file is released into the public domain. Generated by:
4 | # versiongit-1.0 (https://github.com/DiamondLightSource/versiongit)
5 | import os
6 | import re
7 | import sys
8 | from subprocess import STDOUT, CalledProcessError, check_output
9 |
10 | # These will be filled in if git archive is run or by setup.py cmdclasses
11 | GIT_REFS = "HEAD -> master"
12 | GIT_SHA1 = "88ac280"
13 |
14 | # Git describe gives us sha1, last version-like tag, and commits since then
15 | CMD = "git describe --tags --dirty --always --long --match=[0-9]*[-.][0-9]*"
16 |
17 |
18 | def get_version_from_git(path=None):
19 | """Try to parse version from git describe, fallback to git archive tags"""
20 | tag, plus, suffix = "0.0", "untagged", ""
21 | if not GIT_SHA1.startswith("$"):
22 | # git archive or the cmdclasses below have filled in these strings
23 | sha1 = GIT_SHA1
24 | for ref_name in GIT_REFS.split(", "):
25 | if ref_name.startswith("tag: "):
26 | # git from 1.8.3 onwards labels archive tags "tag: TAGNAME"
27 | tag, plus = ref_name[5:], "0"
28 | else:
29 | if path is None:
30 | # If no path to git repo, choose the directory this file is in
31 | path = os.path.dirname(os.path.abspath(__file__))
32 | # output is TAG-NUM-gHEX[-dirty] or HEX[-dirty]
33 | try:
34 | cmd_out = check_output(CMD.split(), stderr=STDOUT, cwd=path)
35 | except Exception as e:
36 | sys.stderr.write("%s: %s\n" % (type(e).__name__, str(e)))
37 | if isinstance(e, CalledProcessError):
38 | sys.stderr.write("-> %s" % e.output.decode())
39 | return "0.0+unknown", None, e
40 | else:
41 | out = cmd_out.decode().strip()
42 | if out.endswith("-dirty"):
43 | out = out[:-6]
44 | suffix = ".dirty"
45 | if "-" in out:
46 | # There is a tag, extract it and the other pieces
47 | match = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", out)
48 | tag, plus, sha1 = match.groups()
49 | else:
50 | # No tag, just sha1
51 | sha1 = out
52 | # Replace dashes in tag for dots
53 | tag = tag.replace("-", ".")
54 | if plus != "0" or suffix:
55 | # Not on a tag, add additional info
56 | tag = "%(tag)s+%(plus)s.g%(sha1)s%(suffix)s" % locals()
57 | return tag, sha1, None
58 |
59 |
60 | __version__, git_sha1, git_error = get_version_from_git()
61 |
62 |
63 | def get_cmdclass(build_py=None, sdist=None):
64 | """Create cmdclass dict to pass to setuptools.setup that will write a
65 | _version_static.py file in our resultant sdist, wheel or egg"""
66 | if build_py is None:
67 | from setuptools.command.build_py import build_py
68 | if sdist is None:
69 | from setuptools.command.sdist import sdist
70 |
71 | def make_version_static(base_dir, pkg):
72 | vg = os.path.join(base_dir, pkg.split(".")[0], "_version_git.py")
73 | if os.path.isfile(vg):
74 | lines = open(vg).readlines()
75 | with open(vg, "w") as f:
76 | for line in lines:
77 | # Replace GIT_* with static versions
78 | if line.startswith("GIT_SHA1 = "):
79 | f.write("GIT_SHA1 = '%s'\n" % git_sha1)
80 | elif line.startswith("GIT_REFS = "):
81 | f.write("GIT_REFS = 'tag: %s'\n" % __version__)
82 | else:
83 | f.write(line)
84 |
85 | class BuildPy(build_py):
86 | def run(self):
87 | build_py.run(self)
88 | for pkg in self.packages:
89 | make_version_static(self.build_lib, pkg)
90 |
91 | class Sdist(sdist):
92 | def make_release_tree(self, base_dir, files):
93 | sdist.make_release_tree(self, base_dir, files)
94 | for pkg in self.distribution.packages:
95 | make_version_static(base_dir, pkg)
96 |
97 | return dict(build_py=BuildPy, sdist=Sdist)
98 |
--------------------------------------------------------------------------------
/epicsdbbuilder/bits.py:
--------------------------------------------------------------------------------
1 | from .dbd import records
2 |
3 |
4 | # Extend the automatically generated mbbiDirect and mbboDirect classes
5 | # with bit and register methods so they behave a little like hardware.
6 |
7 | def Extend_mbbiDirect(mbbiDirect):
8 | class mbbiDirect(mbbiDirect):
9 | def bit(self, offset):
10 | return _Bits(self, BIT_INPUT, records.bi, offset, 1)
11 |
12 | def register(self, offset, length):
13 | return _Bits(self, BIT_INPUT, records.bi, offset, length)
14 |
15 | return mbbiDirect
16 |
17 |
18 | def Extend_mbboDirect(mbboDirect):
19 | class mbboDirect(mbboDirect):
20 | def bit(self, offset):
21 | return _Bits(self, BIT_OUTPUT, records.bo, offset, 1)
22 |
23 | def register(self, offset, length):
24 | return _Bits(self, BIT_OUTPUT, records.bo, offset, length)
25 |
26 | return mbboDirect
27 |
28 |
29 | # List of classes which we can extend.
30 | ClassExtensions = dict(
31 | mbbiDirect = Extend_mbbiDirect,
32 | mbboDirect = Extend_mbboDirect)
33 |
34 |
35 | def ExtendClass(recordClass):
36 | try:
37 | extension = ClassExtensions[recordClass.__name__]
38 | except KeyError:
39 | return recordClass
40 | else:
41 | return extension(recordClass)
42 |
43 |
44 |
45 |
46 |
47 |
48 | # ---------------------------------------------------------------------------
49 | #
50 | # Record support
51 |
52 |
53 | BIT_INPUT = 0
54 | BIT_OUTPUT = 1
55 |
56 | # Factory for building bits from an existing record. The parent record
57 | # (generally assumed to be an mbbi record) should present the bits to be
58 | # read or written in fields B0 to BF.
59 | #
60 | # Depending on direction, bi or bo can be invoked to create records linking
61 | # to the parent record. Also bit and register can be used to postpone
62 | # the creation of real records!
63 | class _Bits:
64 | def __init__(self, parent, direction, factory, offset, length):
65 | assert 0 <= offset and offset + length <= 16, \
66 | 'Bit field out of range'
67 | self.parent = parent
68 | self.direction = direction
69 | self.offset = offset
70 | self.length = length
71 | self._record = factory
72 | if direction == BIT_INPUT:
73 | self.bi = self._makeBit
74 | self._field = 'INP'
75 | self._linkage = 'CP'
76 | if direction == BIT_OUTPUT:
77 | self.bo = self._makeBit
78 | self._field = 'OUT'
79 | self._linkage = 'PP'
80 |
81 | # This function implements either bi or bo depending on direction.
82 | def _makeBit(self, record, bit=0, **fields):
83 | assert 0 <= bit and bit < self.length, 'Bit out of range'
84 | r = self._record(record, **fields)
85 | setattr(
86 | r, self._field,
87 | getattr(self.parent, 'B%X' % (self.offset + bit))(self._linkage))
88 | return r
89 |
90 | def bit(self, bit):
91 | return self.register(bit, 1)
92 |
93 | def register(self, offset, length):
94 | assert 0 <= offset and 0 < length and offset + length <= self.length, \
95 | 'Bit field out of range'
96 | return _Bits(
97 | self.parent, self.direction, self._record,
98 | self.offset + offset, length)
99 |
--------------------------------------------------------------------------------
/epicsdbbuilder/const_array.py:
--------------------------------------------------------------------------------
1 | from decimal import Decimal
2 | from .recordbase import quote_string
3 | from .parameter import Parameter
4 |
5 | __all__ = ['ConstArray']
6 |
7 |
8 | class ConstArray:
9 | """Constant Link Values. EPICS Base 3.16.1 and above.
10 |
11 | Example: PY Source
12 | ------------------
13 | `r = records.lsi('r', INP=ConstArray(['Plain String not DBLINK']))`
14 |
15 | Example: Generated DB
16 | ---------------------
17 | `field(INP, ["Plain String not DBLINK"])`
18 | """
19 |
20 | def __init__(self, value):
21 | """Constructor.
22 |
23 | Parameters
24 | ----------
25 | value : iterable
26 | Iterable which can provide a homogeneous non-empty list of values.
27 | """
28 | self.__value = self._sanitize(value)
29 |
30 | def _sanitize(self, raw_value):
31 | # ConstArray allows iterable only.
32 | value_list = list(raw_value)
33 |
34 | # EPICS 7.0.3.1 does not consider "[]" as constant.
35 | assert len(value_list) > 0, \
36 | 'ConstArray: Empty iterable is not allowed.'
37 |
38 | # EPICS does not allow mixing of strings and numbers.
39 | numbers = False
40 | strings = False
41 | valid_types = (Parameter, str, int, float, bool, Decimal)
42 | for index, value in enumerate(value_list):
43 | assert isinstance(value, valid_types), \
44 | 'ConstArray: expects a string or parameter as element' \
45 | ' but an element at the index %s is %s.' % (index, type(value))
46 |
47 | if isinstance(value, (Parameter, str)):
48 | assert not numbers, \
49 | 'ConstArray: cannot mix strings with an' \
50 | ' element at index %s which is %s.' % (index, type(value))
51 | strings = True
52 | else:
53 | assert not strings, \
54 | 'ConstArray: cannot mix numbers with an' \
55 | ' element at index %s which is %s.' % (index, type(value))
56 | numbers = True
57 |
58 | return value_list
59 |
60 | def _format_constant(self, value):
61 | if isinstance(value, Parameter):
62 | return '"%s"' % value
63 | elif isinstance(value, str):
64 | return quote_string(value)
65 | elif isinstance(value, bool):
66 | return '1' if value else '0'
67 | else:
68 | return str(value)
69 |
70 | def Validate(self, record, fieldname):
71 | """epicsdbbuilder callback
72 | """
73 | # Validation has been done on inside constructor already.
74 | # ConstArray is meant to be used with fields
75 | # which can contain a DB link (e.g. INLINK).
76 | # Unfortunately, dbVerify() does not verify
77 | # format of DB links. Therefore, it is not used here.
78 | pass
79 |
80 | def FormatDb(self, record, fieldname):
81 | """epicsdbbuilder callback
82 | """
83 | formatted = [self._format_constant(v) for v in self.__value]
84 | return '[{}]'.format(','.join(formatted))
85 |
86 | def __repr__(self):
87 | return '' % self.__value
88 |
--------------------------------------------------------------------------------
/epicsdbbuilder/dbd.py:
--------------------------------------------------------------------------------
1 | '''Implements the set of records provided by a dbd'''
2 |
3 | import os
4 | import os.path
5 | import ctypes
6 | import platform
7 |
8 | from . import mydbstatic # Pick up interface to EPICS dbd files
9 |
10 | from .recordbase import Record
11 |
12 |
13 | __all__ = ['InitialiseDbd', 'LoadDbdFile', 'records']
14 |
15 |
16 | # This class contains all the record types current supported by the loaded
17 | # dbd, and is published to the world as epics.records. As records are added
18 | # (in response to calls to LoadDbdFile) they are automatically available to
19 | # all targets.
20 | class RecordTypes(object):
21 | def __init__(self):
22 | self.__RecordTypes = set()
23 |
24 | def GetRecords(self):
25 | return sorted(self.__RecordTypes)
26 |
27 | def _PublishRecordType(self, on_use, recordType, validate):
28 | # Publish this record type as a method
29 | self.__RecordTypes.add(recordType)
30 | setattr(
31 | self, recordType,
32 | Record.CreateSubclass(on_use, recordType, validate))
33 |
34 | # Checks whether the given recordType names a known valid record type.
35 | def __contains__(self, recordType):
36 | return recordType in self.__RecordTypes
37 |
38 |
39 | # Every record type loaded from a DBD is present as an attribute of this
40 | # class with the name of the record type.
41 | #
42 | # For example, to create an ai record, simply write
43 | #
44 | # records.ai('NAME', DESC = 'A test ai record', EGU = 'V')
45 | #
46 | records = RecordTypes()
47 |
48 |
49 | # This class uses a the static database to validate whether the associated
50 | # record type allows a given value to be written to a given field.
51 | class ValidateDbField:
52 | def __init__(self, dbEntry):
53 | # Copy the existing entry so it stays on the right record
54 | self.dbEntry = DBEntry(dbEntry)
55 | self.__FieldInfo = None
56 |
57 | # Computes list of valid names and creates associated arginfo
58 | # definitions. This is postponed quite late to try and ensure the menus
59 | # are fully populated, in other words we don't want to fire this until
60 | # all the dbd files have been loaded.
61 | def __ProcessDbd(self):
62 | # set of field names
63 | self.__FieldInfo = set()
64 | for field_name in self.dbEntry.iterate_fields():
65 | if field_name != 'NAME':
66 | self.__FieldInfo.add(field_name)
67 |
68 |
69 | # This method raises an attribute error if the given field name is
70 | # invalid.
71 | def ValidFieldName(self, name):
72 | if self.__FieldInfo is None:
73 | self.__ProcessDbd()
74 | if name not in self.__FieldInfo:
75 | raise AttributeError('Invalid field name %s' % name)
76 |
77 | # This method raises an exeption if the given field name does not exist
78 | # or if the value cannot be validly written.
79 | def ValidFieldValue(self, name, value):
80 | # First check the field name is valid
81 | self.ValidFieldName(name)
82 | value = str(value)
83 |
84 | # Set the database cursor to the field
85 | for field_name in self.dbEntry.iterate_fields():
86 | if field_name == name:
87 | break
88 |
89 | # Now see if we can write the value to it
90 | message = mydbstatic.dbVerify(self.dbEntry, value)
91 | assert message is None, \
92 | 'Can\'t write "%s" to field %s: %s' % (value, name, message)
93 |
94 |
95 |
96 | # The same database pointer is used for all DBD files: this means that all
97 | # the DBD entries are accumulated into a single large database.
98 | _db = ctypes.c_void_p()
99 |
100 |
101 | class DBEntry(object):
102 | """Create a dbEntry instance within the current DBD.
103 |
104 | This is a stateful pointer that can be moved to different
105 | record types and fields within them with the iterate methods.
106 |
107 | If entry is specified on init and is a DBEntry instance, it
108 | will be copied so that its position is maintained.
109 | """
110 | def __init__(self, entry=None):
111 | assert _db, "LoadDdbFile not called yet"
112 | if entry is None:
113 | # No entry, so alloc a new one
114 | self._as_parameter_ = mydbstatic.dbAllocEntry(_db)
115 | else:
116 | # Existing entry, copy it so it stays on the same record
117 | self._as_parameter_ = mydbstatic.dbCopyEntry(entry)
118 |
119 | def iterate_records(self):
120 | """Iterate through the record types, yielding their names"""
121 | status = mydbstatic.dbFirstRecordType(self)
122 | while status == 0:
123 | yield mydbstatic.dbGetRecordTypeName(self)
124 | status = mydbstatic.dbNextRecordType(self)
125 |
126 | def iterate_fields(self, dct_only=0):
127 | """Iterate through a record's fields, yielding their names"""
128 | status = mydbstatic.dbFirstField(self, dct_only)
129 | while status == 0:
130 | yield mydbstatic.dbGetFieldName(self)
131 | status = mydbstatic.dbNextField(self, dct_only)
132 |
133 | def __del__(self):
134 | mydbstatic.dbFreeEntry(self._as_parameter_)
135 |
136 |
137 | def LoadDbdFile(dbdfile, on_use = None):
138 | dirname, filename = os.path.split(dbdfile)
139 |
140 | # Read the specified dbd file into the current database. This allows
141 | # us to see any new definitions.
142 | curdir = os.getcwd()
143 | if dirname:
144 | os.chdir(dirname)
145 |
146 | # We add /dbd to the path so that dbd includes can be resolved.
147 | separator = ':'
148 | if platform.system() == 'Windows':
149 | separator = ';'
150 |
151 | status = mydbstatic.dbReadDatabase(
152 | ctypes.byref(_db), filename,
153 | separator.join(['.', os.path.join(_epics_base, 'dbd')]), None)
154 | os.chdir(curdir)
155 | assert status == 0, 'Error reading database %s (status %d)' % \
156 | (dbdfile, status)
157 |
158 | # Enumerate all the record types and build a record generator class
159 | # for each one that we've not seen before.
160 | entry = DBEntry()
161 | for record_type in entry.iterate_records():
162 | if not hasattr(records, record_type):
163 | validate = ValidateDbField(entry)
164 | records._PublishRecordType(on_use, record_type, validate)
165 |
166 |
167 | def InitialiseDbd(epics_base = None, host_arch = None):
168 | global _epics_base
169 | if epics_base:
170 | # Import from given location
171 | mydbstatic.ImportFunctions(epics_base, host_arch)
172 | _epics_base = epics_base
173 | else:
174 | # Import from epicscorelibs installed libs
175 | from epicscorelibs import path
176 | mydbstatic.ImportFunctionsFrom(path.get_lib('dbCore'))
177 | _epics_base = path.base_path
178 | LoadDbdFile('base.dbd')
179 |
--------------------------------------------------------------------------------
/epicsdbbuilder/fanout.py:
--------------------------------------------------------------------------------
1 | '''Support for creating fanout records.'''
2 |
3 | from .dbd import records
4 | from .recordbase import PP
5 |
6 |
7 | __all__ = ['create_fanout', 'create_dfanout']
8 |
9 |
10 |
11 | # ----------------------------------------------------------------------------
12 | # Fanout record generation
13 |
14 |
15 | # This support routine chops the given list into segments no longer than size.
16 | def choplist(list, size):
17 | return [list[i:i+size] for i in range(0, len(list), size)]
18 |
19 |
20 | # Support routine to do the work of fanout generation common to fanout and
21 | # dfanout.
22 | def _fanout_helper(
23 | fanout_name, link_list, fanout_size, record_factory,
24 | field_name, fixup_link, firstargs, nextargs):
25 |
26 | # First break the list of links into chunks small enough for each fanout
27 | # record. First chop it into segments small enough to fit into each
28 | # fanout record, leaving room for an extra link. The last record can take
29 | # an extra entry so then fix up the chopped list.
30 | chopped = choplist(link_list, fanout_size - 1)
31 | if len(chopped) > 1 and len(chopped[-1]) == 1:
32 | chopped[-2:] = [chopped[-2] + chopped[-1]]
33 |
34 | # Convert the chopped list into a list of fanout records.
35 | recordList = []
36 | args = firstargs
37 | for i, links in enumerate(chopped):
38 | # The first record gets the standard name and a different set of
39 | # record arguments.
40 | name = fanout_name
41 | if i > 0:
42 | name += str(i)
43 | # Build a fanout record with the computed name and arguments.
44 | record = record_factory(name, **args)
45 | args = nextargs # Subsequent records get the other arguments
46 | # Link the new fanout record to the given list of links
47 | for i, link in enumerate(links):
48 | setattr(record, field_name(i), link)
49 | recordList.append(record)
50 |
51 | # Chain the fanout records together using the last field in each record:
52 | # we've taken care to reserve this field when we split the link list!
53 | next_name = field_name(fanout_size - 1)
54 | for prev, next in zip(recordList[:-1], recordList[1:]):
55 | setattr(prev, next_name, fixup_link(next))
56 |
57 | return recordList
58 |
59 |
60 |
61 | def create_fanout(name, *record_list, **args):
62 | # We can only support fanout to "All" style fanout records: to generate
63 | # masked or selected fanouts we'd need to create a cluster of supporting
64 | # calc records and structure the set rather differently.
65 | args['SELM'] = 'All'
66 |
67 | # All records after the first must be passive.
68 | firstargs = args
69 | nextargs = args.copy()
70 | nextargs['SCAN'] = 'Passive'
71 | if 'PINI' in nextargs:
72 | del nextargs['PINI']
73 |
74 | def fieldname(i):
75 | return 'LNK%d' % (i + 1)
76 |
77 | def identity(x):
78 | return x
79 |
80 | record_list = _fanout_helper(
81 | name, record_list, 6, records.fanout, fieldname,
82 | identity, firstargs, nextargs)
83 | return record_list[0]
84 |
85 |
86 | def create_dfanout(name, *record_list, **args):
87 | # All records after the first argument must operate passively and in
88 | # supervisory mode as they are simply mirroring the first record.
89 | firstargs = args
90 | nextargs = args.copy()
91 | nextargs.update(dict(SCAN = 'Passive', OMSL = 'supervisory'))
92 | if 'DOL' in nextargs:
93 | del nextargs['DOL']
94 | if 'PINI' in nextargs:
95 | del nextargs['PINI']
96 |
97 | def fieldname(i):
98 | return 'OUT%c' % (ord('A') + i)
99 |
100 | record_list = _fanout_helper(
101 | name, record_list, 8, records.dfanout, fieldname,
102 | PP, firstargs, nextargs)
103 | return record_list[0]
104 |
--------------------------------------------------------------------------------
/epicsdbbuilder/mydbstatic.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from ctypes import *
4 |
5 | import platform
6 |
7 | if sys.version_info < (3,):
8 | auto_encode = c_char_p
9 | def auto_decode(result, func, args):
10 | return result
11 | else:
12 | class auto_encode(c_char_p):
13 | @classmethod
14 | def from_param(cls, value):
15 | if value is None:
16 | return value
17 | else:
18 | return value.encode()
19 | def auto_decode(result, func, args):
20 | if result is None:
21 | return result
22 | else:
23 | return result.decode()
24 |
25 |
26 | _FunctionList = (
27 | ('dbReadDatabase', c_int, None,
28 | (c_void_p, auto_encode, auto_encode, auto_encode)),
29 | ('dbAllocEntry', c_void_p, None, (c_void_p,)),
30 | ('dbFirstRecordType', c_int, None, (c_void_p,)),
31 | ('dbGetRecordTypeName', c_char_p, auto_decode, (c_void_p,)),
32 | ('dbNextRecordType', c_int, None, (c_void_p,)),
33 | ('dbFreeEntry', None, None, (c_void_p,)),
34 | ('dbCopyEntry', c_void_p, None, (c_void_p,)),
35 | ('dbFirstField', c_int, None, (c_void_p,)),
36 | ('dbGetFieldName', c_char_p, auto_decode, (c_void_p,)),
37 | ('dbNextField', c_int, None, (c_void_p,)),
38 | ('dbVerify', c_char_p, auto_decode, (c_void_p, auto_encode)),
39 | )
40 |
41 |
42 | # Fallback implementation for dbVerify. Turns out not to be present in EPICS
43 | # 3.16, which is rather annoying. In this case we just allow all writes to
44 | # succeed.
45 | def dbVerify(entry, value):
46 | return None
47 |
48 |
49 | # This function is called late to complete the process of importing all the
50 | # exports from this module. This is done late so that paths.EPICS_BASE can be
51 | # configured late.
52 | def ImportFunctions(epics_base, host_arch):
53 | if host_arch is None:
54 | # Mapping from host architecture to EPICS host architecture name can be
55 | # done with a little careful guesswork. As EPICS architecture names are
56 | # a little arbitrary this isn't guaranteed to work.
57 | system_map = {
58 | ('Linux', '32bit'): 'linux-x86',
59 | ('Linux', '64bit'): 'linux-x86_64',
60 | ('Darwin', '32bit'): 'darwin-x86',
61 | ('Darwin', '64bit'): 'darwin-x86',
62 | ('Windows', '32bit'): 'win32-x86',
63 | ('Windows', '64bit'): 'windows-x64',
64 | }
65 | bits = platform.architecture()[0]
66 | host_arch = system_map[(platform.system(), bits)]
67 |
68 | # So we can work with both EPICS 3.14 and 3.15, look for libdbCore.so first
69 | # before falling back to the older libdbStaticHost.so
70 | # On Windows, use dbCore.dll or dbStaticHost.dll instead
71 | library_name_format = 'lib{}.so'
72 | library_location = 'lib'
73 | if platform.system() == 'Windows':
74 | library_name_format = '{}.dll'
75 | library_location = 'bin'
76 |
77 | try:
78 | ImportFunctionsFrom(os.path.join(
79 | epics_base, library_location, host_arch,
80 | library_name_format.format('dbCore')))
81 | except OSError:
82 | ImportFunctionsFrom(os.path.join(
83 | epics_base, library_location, host_arch,
84 | library_name_format.format('dbStaticHost')))
85 |
86 | _libdb = None
87 |
88 |
89 | def GetDbFunction(name, restype=None, argtypes=None, errcheck=None):
90 | assert _libdb, "ImportFunctionsFrom(path) not called yet"
91 | function = getattr(_libdb, name)
92 | # Set its call args, return type, and error handler if there is one
93 | if argtypes:
94 | function.argtypes = argtypes
95 | if restype:
96 | function.restype = restype
97 | if errcheck:
98 | function.errcheck = errcheck
99 | return function
100 |
101 | def ImportFunctionsFrom(path):
102 | # Load the dbd static library using ctypes PyDLL convention instead of CDLL
103 | #
104 | # The difference is that this way we hold onto the Python GIL. Mostly this
105 | # makes no difference, as these are very quick function calls, but it turns
106 | # out that if there is another Python thread running then contention for the
107 | # GIL can wreck performance here.
108 | #
109 | # The ctypes documentation is not particularly helpful, saying in particular
110 | # "this is only useful to call Python C api functions directly", which
111 | # doesn't seem to be correct.
112 | global _libdb
113 | _libdb = PyDLL(path)
114 | # Actually populate the functions in globals, split from ImportFunctions to
115 | # support legacy API
116 | for name, restype, errcheck, argtypes in _FunctionList:
117 | try:
118 | function = GetDbFunction(name, restype, argtypes, errcheck)
119 | except AttributeError:
120 | # Check for global fallback function
121 | if name not in globals():
122 | raise
123 | else:
124 | globals()[name] = function
125 |
--------------------------------------------------------------------------------
/epicsdbbuilder/parameter.py:
--------------------------------------------------------------------------------
1 | from . import recordset
2 |
3 | __all__ = ['Parameter']
4 |
5 |
6 | # A Parameter is used to wrap a template parameter before being assigned to a
7 | # record field.
8 | class Parameter:
9 | __ParameterNames = set()
10 |
11 | def __init__(self, name, description = '', default = None):
12 | # Ensure names aren't accidentially overwritten
13 | assert name not in self.__ParameterNames, \
14 | 'Parameter name "%s" already defined' % name
15 | self.__ParameterNames.add(name)
16 |
17 | self.__name = name
18 | self.__default = default
19 |
20 | # Add the description as metadata to the current record set
21 | lines = description.split('\n')
22 | recordset.recordset.AddHeaderLine(
23 | '#%% macro, %s, %s' % (name, lines[0]))
24 | for line in lines[1:]:
25 | recordset.AddHeaderLine('# %s' % line)
26 |
27 | def __str__(self):
28 | if self.__default is None:
29 | return '$(%s)' % self.__name
30 | else:
31 | return '$(%s=%s)' % (self.__name, self.__default)
32 |
33 | def __repr__(self):
34 | return "Parameter" + str(self)[1:]
35 |
36 | def Validate(self, record, field):
37 | pass
38 |
--------------------------------------------------------------------------------
/epicsdbbuilder/recordbase.py:
--------------------------------------------------------------------------------
1 | '''Support for generating epics records.'''
2 |
3 | from __future__ import print_function
4 |
5 | import string
6 | import json
7 | from collections import OrderedDict
8 |
9 | from . import recordnames
10 | from .recordset import recordset
11 |
12 |
13 | __all__ = [
14 | 'PP', 'CA', 'CP', 'CPP', 'NP',
15 | 'MS', 'MSS', 'MSI', 'NMS',
16 | 'ImportRecord']
17 |
18 |
19 |
20 | # Quotes a single character if necessary
21 | def quote_char(ch):
22 | if ord(ch) < ord(' '):
23 | return '\\x%02x' % ord(ch)
24 | elif ch in '"\\':
25 | return '\\' + ch
26 | else:
27 | return ch
28 |
29 | # Converts a string into a safely quoted string with quotation marks
30 | def quote_string(value):
31 | return '"' + ''.join(map(quote_char, value)) + '"'
32 |
33 |
34 | # ---------------------------------------------------------------------------
35 | #
36 | # Record class
37 |
38 | # Base class for all record types.
39 | #
40 | # All record types known to the IOC builder (loaded from DBD files in EPICS
41 | # support modules) are subclasses of this class.
42 | class Record(object):
43 |
44 | # Creates a subclass of the record with the given record type and
45 | # validator bound to the subclass. The device used to load the record is
46 | # remembered so that it can subsequently be instantiated if necessary.
47 | @classmethod
48 | def CreateSubclass(cls, on_use, recordType, validate):
49 | # Each record we publish is a class so that individual record
50 | # classes can be subclassed when convenient.
51 | class BuildRecord(Record):
52 | _validate = validate
53 | _type = recordType
54 | _on_use = on_use
55 | BuildRecord.__name__ = recordType
56 |
57 | # Perform any class extension required for this particular record type.
58 | from . import bits
59 | return bits.ExtendClass(BuildRecord)
60 |
61 |
62 | def __setattr(self, name, value):
63 | # Because we have hooked into __setattr__, we need to dance a little
64 | # to write names into our dictionary.
65 | if name[:2] == '__':
66 | self.__dict__['_Record' + name] = value
67 | else:
68 | self.__dict__[name] = value
69 |
70 |
71 | # Record constructor. Needs to be told the type of record that this will
72 | # be, a field validation object (which will be used to check field names
73 | # and field value assignments), the name of the record being created, and
74 | # initialisations for any other fields. Builds standard record name using
75 | # the currently configured RecordName hook.
76 |
77 | # Record constructor.
78 | #
79 | # This is used to construct a record of a particular record type. The
80 | # record is added to database of the generated IOC, or can simply be
81 | # written out to a separate .db file, depending on the chosen IOC writer.
82 | #
83 | # record
84 | # The name of the record being generated. The detailed name of the
85 | # record is determined by the configured record name convention, and
86 | # normally the device part of the record name is not specified here.
87 | # **fields
88 | # All of the fields supported by the record type appear as attributes
89 | # of the class. Values can be specified in the constructor, or can be
90 | # assigned subsequently to the generated instance.
91 | #
92 | # For example, the following code generates a record which counts how
93 | # many times it has been processed:
94 | #
95 | # cntr = records.calc('CNTR', CALC = 'A+1', VAL = 0)
96 | # cntr.A = cntr
97 | #
98 | # This will generate a database somewhat like this:
99 | #
100 | # record(calc, "$(DEVICE):CNTR")
101 | # {
102 | # field(A, "$(DEVICE):CNTR")
103 | # field(CALC, "A+1")
104 | # field(VAL, "0")
105 | # }
106 | #
107 | # Record links can be wrapped with PP(), CP(), MS() and NP() calls.
108 | def __init__(self, record, **fields):
109 |
110 | # Make sure the Device class providing this record is instantiated
111 | if self._on_use:
112 | self._on_use(self)
113 |
114 | # These assignment have to be directly into the dictionary to
115 | # bypass the tricksy use of __setattr__.
116 | self.__setattr('__fields', OrderedDict())
117 | self.__setattr('__aliases', OrderedDict())
118 | self.__setattr('__comments', [])
119 | self.__setattr('__infos', [])
120 | self.__setattr('name', recordnames.RecordName(record))
121 |
122 | # Support the special 'address' field as an alias for either INP or
123 | # OUT, depending on which of those exists. We only set up this field
124 | # if exactly one of INP or OUT is present as a valid field.
125 | address = [
126 | field for field in ['INP', 'OUT'] if self.ValidFieldName(field)]
127 | if len(address) == 1:
128 | self.__setattr('__address', address[0])
129 |
130 | # Make sure all the fields are properly processed and validated.
131 | for name, value in fields.items():
132 | setattr(self, name, value)
133 |
134 | recordset.PublishRecord(self.name, self)
135 |
136 | def add_alias(self, alias):
137 | self.__aliases[alias] = self
138 |
139 | def add_comment(self, comment):
140 | self.__comments.append('# ' + comment)
141 |
142 | def add_metadata(self, metadata):
143 | self.__comments.append('#% ' + metadata)
144 |
145 | def add_info(self, name, info):
146 | self.__infos.append((name, info))
147 |
148 | def __dbd_order(self, fields):
149 | field_set = set(fields)
150 | for field_name in self._validate.dbEntry.iterate_fields():
151 | if field_name in field_set:
152 | yield field_name
153 | field_set.remove(field_name)
154 | assert not field_set, "DBD for %s doesn't contain %s" % (
155 | self._type, sorted(field_set))
156 |
157 | # Call to generate database description of this record. Outputs record
158 | # definition in .db file format. Hooks for meta-data can go here.
159 | def Print(self, output, alphabetical=True):
160 | print(file = output)
161 | for comment in self.__comments:
162 | print(comment, file=output)
163 | print('record(%s, "%s")' % (self._type, self.name), file = output)
164 | print('{', file = output)
165 | # Print the fields in alphabetical order. This is more convenient
166 | # to the eye and has the useful side effect of bypassing a bug
167 | # where DTYPE needs to be specified before INP or OUT fields.
168 | sort = sorted if alphabetical else self.__dbd_order
169 | for k in sort(self.__fields.keys()):
170 | value = self.__fields[k]
171 | if getattr(value, 'ValidateLater', False):
172 | self.__ValidateField(k, value)
173 | value = self.__FormatFieldForDb(k, value)
174 | padding = ''.ljust(4-len(k)) # To align field values
175 | print(' field(%s, %s%s)' % (k, padding, value), file = output)
176 | sort = sorted if alphabetical else list
177 | for alias in sort(self.__aliases.keys()):
178 | print(' alias("%s")' % alias, file = output)
179 | for name, info in self.__infos:
180 | value = self.__FormatFieldForDb(name, info)
181 | print(' info(%s, %s)' % (name, value), file = output)
182 | print('}', file = output)
183 |
184 |
185 | # The string for a record is just its name.
186 | def __str__(self):
187 | return self.name
188 |
189 | # The representation string for a record identifies its type and name,
190 | # but we can't do much more.
191 | def __repr__(self):
192 | return '' % (self._type, self.name)
193 |
194 | # Calling the record generates a self link with a list of specifiers.
195 | def __call__(self, *specifiers):
196 | return _Link(self, None, *specifiers)
197 |
198 |
199 | # Assigning to a record attribute updates a field.
200 | def __setattr__(self, fieldname, value):
201 | if fieldname == 'address':
202 | fieldname = self.__address
203 | if value is None:
204 | # Treat assigning None to a field the same as deleting that field.
205 | # This is convenient for default arguments.
206 | if fieldname in self.__fields:
207 | del self.__fields[fieldname]
208 | else:
209 | # If the field is callable we call it first: this is used to
210 | # ensure we convert record pointers into links. It's unlikely
211 | # that this will have unfortunate side effects elsewhere, but it's
212 | # always possible...
213 | if callable(value):
214 | value = value()
215 | if not getattr(value, 'ValidateLater', False):
216 | self.__ValidateField(fieldname, value)
217 | self.__fields[fieldname] = value
218 |
219 | # Field validation
220 | def __ValidateField(self, fieldname, value):
221 | # If the field can validate itself then ask it to, otherwise use our
222 | # own validation routine. This is really just a hook for parameters
223 | # so that they can do their own validation.
224 | if hasattr(value, 'Validate'):
225 | value.Validate(self, fieldname)
226 | else:
227 | self._validate.ValidFieldValue(fieldname, str(value))
228 |
229 | # Field formatting
230 | def __FormatFieldForDb(self, fieldname, value):
231 | if hasattr(value, 'FormatDb'):
232 | return value.FormatDb(self, fieldname)
233 | elif isinstance(value, dict):
234 | # JSON values in EPICS database as per
235 | # https://epics.anl.gov/base/R7-0/6-docs/links.html
236 | return '\n '.join(json.dumps(value, indent=4).splitlines())
237 | else:
238 | return quote_string(str(value))
239 |
240 |
241 | # Allow individual fields to be deleted from the record.
242 | def __delattr__(self, fieldname):
243 | if fieldname == 'address':
244 | fieldname = self.__address
245 | del self.__fields[fieldname]
246 |
247 |
248 | # Reading a record attribute returns a link to the field.
249 | def __getattr__(self, fieldname):
250 | if fieldname == 'address':
251 | fieldname = self.__address
252 | self._validate.ValidFieldName(fieldname)
253 | return _Link(self, fieldname)
254 |
255 | def _FieldValue(self, fieldname):
256 | return self.__fields[fieldname]
257 |
258 | # Can be called to validate the given field name, returns True iff this
259 | # record type supports the given field name.
260 | @classmethod
261 | def ValidFieldName(cls, fieldname):
262 | try:
263 | # The validator is specified to raise an AttributeError exception
264 | # if the field name cannot be validated. We translate this into
265 | # a boolean here.
266 | cls._validate.ValidFieldName(fieldname)
267 | except AttributeError:
268 | return False
269 | else:
270 | return True
271 |
272 | # When a record is pickled for export it will reappear as an ImportRecord
273 | # instance. This makes more sense (as the record has been fully generated
274 | # already), and avoids a lot of trouble.
275 | def __reduce__(self):
276 | return (ImportRecord, (self.name, self._type))
277 |
278 |
279 |
280 | # Records can be imported by name. An imported record has no specification
281 | # of its type, and so no validation can be done: all that can be done to an
282 | # imported record is to link to it.
283 | class ImportRecord:
284 | def __init__(self, name):
285 | self.name = name
286 |
287 | def __str__(self):
288 | return self.name
289 |
290 | def __repr__(self):
291 | return '' % self.name
292 |
293 | def __call__(self, *specifiers):
294 | return _Link(self, None, *specifiers)
295 |
296 | def __getattr__(self, fieldname):
297 | # Brain-dead minimal validation: just check for all uppercase!
298 | ValidChars = set(string.ascii_uppercase + string.digits)
299 | if not set(fieldname) <= ValidChars:
300 | raise AttributeError('Invalid field name %s' % fieldname)
301 | return _Link(self, fieldname)
302 |
303 | def add_alias(self, name):
304 | recordset.AddBodyLine('alias("%s", "%s")' % (self.name, name))
305 |
306 |
307 | # A link is a class to encapsulate a process variable link. It remembers
308 | # the record, the linked field, and a list of specifiers (such as PP, CP,
309 | # etcetera).
310 | class _Link:
311 | def __init__(self, record, field, *specifiers):
312 | self.record = record
313 | self.field = field
314 | self.specifiers = specifiers
315 |
316 | def __str__(self):
317 | result = self.record.name
318 | if self.field:
319 | result = '%s.%s' % (result, self.field)
320 | for specifier in self.specifiers:
321 | result = '%s %s' % (result, specifier)
322 | return result
323 |
324 | def __call__(self, *specifiers):
325 | return _Link(self.record, self.field, *self.specifiers + specifiers)
326 |
327 | # Returns the value currently assigned to this field.
328 | def Value(self):
329 | return self.record._FieldValue(self.field)
330 |
331 |
332 | # Some helper routines for building links
333 |
334 | def PP(record):
335 | """ "Process Passive": any record update through a PP output link will be
336 | processed if its scan is Passive.
337 |
338 | Example (Python source)
339 | -----------------------
340 | `my_record.INP = PP(other_record)`
341 |
342 | Example (Generated DB)
343 | ----------------------
344 | `field(INP, "other PP")`
345 | """
346 | return record('PP')
347 |
348 | def CA(record):
349 | """ "Channel Access": a CA (input or output) link will be treated as
350 | a channel access link regardless whether it is a DB link or not.
351 |
352 | Example (Python source)
353 | -----------------------
354 | `my_record.INP = CA(other_record)`
355 |
356 | Example (Generated DB)
357 | ----------------------
358 | `field(INP, "other CA")`
359 | """
360 | return record('CA')
361 |
362 |
363 | def CP(record):
364 | """ "Channel Process": a CP input link will cause the linking record
365 | to process any time the linked record is updated.
366 |
367 | Example (Python source)
368 | -----------------------
369 | `my_record.INP = CP(other_record)`
370 |
371 | Example (Generated DB)
372 | ----------------------
373 | `field(INP, "other CP")`
374 | """
375 | return record('CP')
376 |
377 | def CPP(record):
378 | """ "Channel Process if Passive": a CP input link will be treated as
379 | a channel access link and if the linking record is passive,
380 | the linking passive record will be processed any time the linked record
381 | is updated.
382 |
383 | Example (Python source)
384 | -----------------------
385 | `my_record.INP = CPP(other_record)`
386 |
387 | Example (Generated DB)
388 | ----------------------
389 | `field(INP, "other CPP")`
390 | """
391 | return record('CPP')
392 |
393 |
394 | def MS(record):
395 | """ "Maximise Severity": any alarm state on the linked record is propagated
396 | to the linking record. When propagated, the alarm status will become
397 | `LINK_ALARM`.
398 |
399 | Example (Python source)
400 | -----------------------
401 | `my_record.INP = MS(other_record)`
402 |
403 | Example (Generated DB)
404 | ----------------------
405 | `field(INP, "other MS")`
406 | """
407 | return record('MS')
408 |
409 |
410 | def MSS(record):
411 | """ "Maximise Status and Severity": both alarm status and alarm severity
412 | on the linked record are propagated to the linking record.
413 |
414 | Example (Python source)
415 | -----------------------
416 | `my_record.INP = MSS(other_record)`
417 |
418 | Example (Generated DB)
419 | ----------------------
420 | `field(INP, "other MSS")`
421 | """
422 | return record('MSS')
423 |
424 |
425 | def MSI(record):
426 | """ "Maximise Severity if Invalid": propagate an alarm state on the linked
427 | record only if the alarm severity is `INVALID_ALARM`.
428 | When propagated, the alarm status will become `LINK_ALARM`.
429 |
430 | Example (Python source)
431 | -----------------------
432 | `my_record.INP = MSI(other_record)`
433 |
434 | Example (Generated DB)
435 | ----------------------
436 | `field(INP, "other MSI")`
437 | """
438 | return record('MSI')
439 |
440 |
441 | def NMS(record):
442 | """ "Non-Maximise Severity": no alarm is propagated.
443 | This is the default behavior of EPICS links.
444 |
445 | Example (Python source)
446 | -----------------------
447 | `my_record.INP = NMS(other_record)`
448 |
449 | Example (Generated DB)
450 | ----------------------
451 | `field(INP, "other NMS")`
452 | """
453 | return record('NMS')
454 |
455 |
456 | def NP(record):
457 | """ "No Process": the linked record is not processed.
458 | This is the default behavior of EPICS links.
459 |
460 | Example (Python source)
461 | -----------------------
462 | `my_record.INP = NP(other_record)`
463 |
464 | Example (Generated DB)
465 | ----------------------
466 | `field(INP, "other NPP")`
467 | """
468 | return record('NPP')
469 |
--------------------------------------------------------------------------------
/epicsdbbuilder/recordnames.py:
--------------------------------------------------------------------------------
1 | '''Support for default record name configurations.'''
2 |
3 | from . import parameter
4 |
5 |
6 | __all__ = [
7 | 'SimpleRecordNames', 'TemplateRecordNames',
8 | 'SetSimpleRecordNames', 'SetTemplateRecordNames',
9 | 'RecordName', 'SetRecordNames', 'GetRecordNames',
10 | 'PushPrefix', 'PopPrefix', 'SetPrefix', 'SetSeparator']
11 |
12 |
13 | # Default record name support: each record is created with precisely the name
14 | # it is given.
15 | class SimpleRecordNames(object):
16 | # Maximum record name length for EPICS 3.14
17 | maxLength = 61
18 |
19 | def __init__(self, prefix = '', separator = ':', check = True):
20 | self.prefix = [prefix] if prefix else []
21 | self.separator = separator
22 | self.check = check
23 |
24 | def __call__(self, name):
25 | name = self.separator.join(map(str, self.prefix + [name]))
26 | assert not self.check or 0 < len(name) <= self.maxLength, \
27 | 'Record name "%s" too long' % name
28 | return name
29 |
30 | def PushPrefix(self, prefix):
31 | self.prefix.append(prefix)
32 |
33 | def PopPrefix(self):
34 | return self.prefix.pop()
35 |
36 | def SetPrefix(self, prefix):
37 | if prefix:
38 | if self.prefix:
39 | self.PopPrefix()
40 | self.PushPrefix(prefix)
41 | else:
42 | self.PopPrefix()
43 |
44 | def SetSeparator(self, separator):
45 | self.separator = separator
46 |
47 |
48 | class TemplateRecordNames(SimpleRecordNames):
49 | def __init__(self, prefix = None, separator = ':'):
50 | if prefix is None:
51 | prefix = parameter.Parameter('DEVICE', 'Device name')
52 | SimpleRecordNames.__init__(self, prefix, separator, False)
53 |
54 |
55 | def SetSimpleRecordNames(prefix = '', separator = ''):
56 | SetRecordNames(SimpleRecordNames(prefix, separator))
57 |
58 | def SetTemplateRecordNames(prefix = None, separator = ':'):
59 | SetRecordNames(TemplateRecordNames(prefix, separator))
60 |
61 |
62 | # By default record names are unmodified.
63 | def _RecordNames(name):
64 | return name
65 |
66 | def SetRecordNames(names):
67 | global _RecordNames
68 | current = _RecordNames
69 | _RecordNames = names
70 | return current
71 |
72 | def GetRecordNames():
73 | return _RecordNames
74 |
75 | def RecordName(name):
76 | return _RecordNames(name)
77 |
78 | def PushPrefix(prefix):
79 | _RecordNames.PushPrefix(prefix)
80 |
81 | def PopPrefix():
82 | return _RecordNames.PopPrefix()
83 |
84 | def SetPrefix(prefix):
85 | _RecordNames.SetPrefix(prefix)
86 |
87 | def SetSeparator(separator):
88 | _RecordNames.SetSeparator(separator)
89 |
--------------------------------------------------------------------------------
/epicsdbbuilder/recordset.py:
--------------------------------------------------------------------------------
1 | '''Collections of records.'''
2 |
3 | from __future__ import print_function
4 |
5 | from collections import OrderedDict
6 | import os
7 | import time
8 |
9 | __all__ = [
10 | 'WriteRecords', 'Disclaimer',
11 | 'LookupRecord', 'CountRecords', 'ResetRecords']
12 |
13 |
14 | class RecordSet(object):
15 | def ResetRecords(self):
16 | self.__RecordSet = OrderedDict()
17 | self.__HeaderLines = []
18 | self.__BodyLines = []
19 |
20 | def __init__(self):
21 | self.ResetRecords()
22 |
23 | # Add a record to the list of records to be published.
24 | def PublishRecord(self, name, record):
25 | assert name not in self.__RecordSet, 'Record %s already defined' % name
26 | self.__RecordSet[name] = record
27 |
28 | # Returns the record with the given name.
29 | def LookupRecord(self, full_name):
30 | return self.__RecordSet[full_name]
31 |
32 | # Output complete set of records to the given file.
33 | def Print(self, output, alphabetical):
34 | for line in self.__HeaderLines:
35 | print(line, file = output)
36 | if self.__BodyLines:
37 | print(file = output)
38 | for line in self.__BodyLines:
39 | print(line, file = output)
40 | # Print the records in alphabetical order: gives the reader a fighting
41 | # chance to find their way around the generated database!
42 | sort = sorted if alphabetical else list
43 | for record in sort(self.__RecordSet):
44 | self.__RecordSet[record].Print(output, alphabetical)
45 |
46 | # Returns the number of published records.
47 | def CountRecords(self):
48 | return len(self.__RecordSet)
49 |
50 | def AddHeaderLine(self, line):
51 | self.__HeaderLines.append(line)
52 |
53 | def AddBodyLine(self, line):
54 | self.__BodyLines.append(line)
55 |
56 |
57 | recordset = RecordSet()
58 |
59 | LookupRecord = recordset.LookupRecord
60 | CountRecords = recordset.CountRecords
61 | ResetRecords = recordset.ResetRecords
62 |
63 |
64 |
65 | def Disclaimer(source = None, normalise_source = True):
66 | if source is None:
67 | from_source = '.'
68 | else:
69 | if normalise_source:
70 | source = os.path.abspath(source)
71 | from_source = ' from\nsource: %s' % source
72 |
73 | now = time.strftime('%a %d %b %Y %H:%M:%S %Z')
74 | message = '''\
75 | This file was automatically generated on %(now)s%(from_source)s
76 |
77 | *** Please do not edit this file: edit the source file instead. ***
78 |
79 | ''' % locals()
80 | return message
81 |
82 |
83 | def WriteRecords(filename, header=None, alphabetical=True):
84 | if header is None:
85 | header = Disclaimer()
86 | header = header.split('\n')
87 | assert header[-1] == '', 'Terminate header with empty line'
88 | with open(filename, 'w') as output:
89 | for line in header[:-1]:
90 | print('#', line, file = output)
91 | recordset.Print(output, alphabetical)
92 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | # Versions compatible with those installed in dls-python3
3 | requires = ["setuptools<45", "wheel==0.33.1"]
4 | build-backend = "setuptools.build_meta:__legacy__"
5 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = epicsdbbuilder
3 | description = EPICS Database Builder
4 | url = https://github.com/DiamondLightSource/epicsdbbuilder
5 | author = Michael Abbott
6 | author_email = Michael.Abbott@diamond.ac.uk
7 | license = Apache License 2.0
8 | long_description = file: README.rst
9 | long_description_content_type = text/x-rst
10 | classifiers =
11 | Development Status :: 5 - Production/Stable
12 | License :: OSI Approved :: Apache Software License
13 | Programming Language :: Python :: 2.7
14 | Programming Language :: Python :: 3.6
15 | Programming Language :: Python :: 3.7
16 | Programming Language :: Python :: 3.8
17 |
18 | [options]
19 | packages = find:
20 |
21 | # If you want to include data files in packages,
22 | # either define [options.package_data] or
23 | # set this to True and include a MANIFEST.in file.
24 | include_package_data = False
25 |
26 | [options.packages.find]
27 | # Don't include our test directory in the distribution
28 | exclude = test
29 |
30 | [flake8]
31 | max-line-length = 80
32 | extend-ignore =
33 | F401 F403 F405 # Allow from module import *
34 | E251 # Allow call(param = value)
35 | E301 E302 E303 E305 # Allow any number of blank lines
36 |
37 | [tool:pytest]
38 | # Run pytest with all our checkers, and don't spam us with massive tracebacks on error
39 | addopts =
40 | --tb=native -vv --flake8 --doctest-modules --doctest-glob="*.rst"
41 | --cov=epicsdbbuilder --cov-report term --cov-report xml:cov.xml
42 |
43 | [coverage:run]
44 | # This is covered in the versiongit test suite so exclude it here
45 | omit = */_version_git.py
46 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from setuptools import setup
4 |
5 | for path, _, filenames in os.walk(os.path.dirname(os.path.abspath(__file__))):
6 | if "_version_git.py" in filenames:
7 | sys.path.append(path)
8 | break
9 |
10 | from _version_git import __version__, get_cmdclass # noqa
11 |
12 | setup(cmdclass=get_cmdclass(), version=__version__)
13 |
--------------------------------------------------------------------------------
/test/expected_output.db:
--------------------------------------------------------------------------------
1 | # This file was automatically generated on Thu 25 Nov 2021 10:35:33 GMT.
2 | #
3 | # *** Please do not edit this file: edit the source file instead. ***
4 | #
5 | #% macro, DEVICE, Device name
6 | #% macro, P, A parameter
7 | #% macro, Q, A number
8 |
9 | record(longin, "XX-YY-ZZ-01:A")
10 | {
11 | field(DESC, "blah")
12 | }
13 |
14 | record(ai, "XX-YY-ZZ-01:B")
15 | {
16 | field(INP, "SR-DI-DCCT-01:SIGNAL")
17 | }
18 |
19 | record(fanout, "XX-YY-ZZ-01:FAN")
20 | {
21 | field(SELM, "All")
22 | field(LNK1, "XX-YY-ZZ-01:A")
23 | field(LNK2, "XX-YY-ZZ-01:B")
24 | }
25 |
26 | record(bi, "XX-YY-ZZ-01:TRIG")
27 | {
28 | field(SCAN, "1 second")
29 | field(FLNK, "XX-YY-ZZ-01:FAN")
30 | }
31 |
32 | # comment 1
33 | #% metadata
34 | # comment 2
35 | record(ai, "XX-YY-ZZ-01:ABC:TEST")
36 | {
37 | }
38 |
39 | record(ai, "$(DEVICE):TEST")
40 | {
41 | field(SCAN, "1 second")
42 | field(VAL, "$(Q)")
43 | field(INP, "@$(P)")
44 | }
45 |
46 | record(bi, "$(DEVICE):BOO")
47 | {
48 | field(INP, "XX-YY-ZZ-01:TRIG")
49 | }
50 |
51 | record(ai, "$(DEVICE):OPTIONS:CA")
52 | {
53 | field(INP, "$(DEVICE):TEST CA")
54 | }
55 |
56 | record(ai, "$(DEVICE):OPTIONS:CP")
57 | {
58 | field(INP, "$(DEVICE):TEST CP")
59 | }
60 |
61 | record(ai, "$(DEVICE):OPTIONS:CPP")
62 | {
63 | field(INP, "$(DEVICE):TEST CPP")
64 | }
65 |
66 | record(ai, "$(DEVICE):OPTIONS:NP")
67 | {
68 | field(INP, "$(DEVICE):TEST NPP")
69 | }
70 |
71 | record(ai, "$(DEVICE):OPTIONS:MSS")
72 | {
73 | field(INP, "$(DEVICE):TEST MSS")
74 | }
75 |
76 | record(ai, "$(DEVICE):OPTIONS:MSI")
77 | {
78 | field(INP, "$(DEVICE):TEST MSI")
79 | }
80 |
81 | record(ai, "$(DEVICE):OPTIONS:NMS")
82 | {
83 | field(INP, "$(DEVICE):TEST NMS")
84 | }
85 |
86 | record(ai, "$(DEVICE):OPTIONS:PP:MS")
87 | {
88 | field(INP, "$(DEVICE):TEST MS PP")
89 | }
90 |
91 | record(waveform, "$(DEVICE):FIELD:WITH_CONST_ARRAY")
92 | {
93 | field(INP, ["A","B","C"])
94 | info(Q:group, {
95 | "MYTABLE": {
96 | "+id": "epics:nt/NTTable:1.0",
97 | "labels": {
98 | "+type": "plain",
99 | "+channel": "VAL"
100 | }
101 | }
102 | })
103 | }
104 |
105 | record(ai, "$(DEVICE):FIELD:WITH_JSON_LINK")
106 | {
107 | field(INP, {
108 | "const": 3.14159265358979
109 | })
110 | info(asyn:READBACK, "1")
111 | info(autosaveFields, "PREC EGU DESC")
112 | }
113 |
114 | record(stringin, "$(DEVICE):STRING")
115 | {
116 | field(VAL, "\"\x0a\\\x01€")
117 | }
118 |
--------------------------------------------------------------------------------
/test/expected_output_alphabetical.db:
--------------------------------------------------------------------------------
1 | # This file was automatically generated on Mon 08 Nov 2021 09:23:43 GMT.
2 | #
3 | # *** Please do not edit this file: edit the source file instead. ***
4 | #
5 | #% macro, DEVICE, Device name
6 | #% macro, P, A parameter
7 | #% macro, Q, A number
8 |
9 | record(bi, "$(DEVICE):BOO")
10 | {
11 | field(INP, "XX-YY-ZZ-01:TRIG")
12 | }
13 |
14 | record(waveform, "$(DEVICE):FIELD:WITH_CONST_ARRAY")
15 | {
16 | field(INP, ["A","B","C"])
17 | info(Q:group, {
18 | "MYTABLE": {
19 | "+id": "epics:nt/NTTable:1.0",
20 | "labels": {
21 | "+type": "plain",
22 | "+channel": "VAL"
23 | }
24 | }
25 | })
26 | }
27 |
28 | record(ai, "$(DEVICE):FIELD:WITH_JSON_LINK")
29 | {
30 | field(INP, {
31 | "const": 3.14159265358979
32 | })
33 | info(asyn:READBACK, "1")
34 | info(autosaveFields, "PREC EGU DESC")
35 | }
36 |
37 | record(ai, "$(DEVICE):OPTIONS:CA")
38 | {
39 | field(INP, "$(DEVICE):TEST CA")
40 | }
41 |
42 | record(ai, "$(DEVICE):OPTIONS:CP")
43 | {
44 | field(INP, "$(DEVICE):TEST CP")
45 | }
46 |
47 | record(ai, "$(DEVICE):OPTIONS:CPP")
48 | {
49 | field(INP, "$(DEVICE):TEST CPP")
50 | }
51 |
52 | record(ai, "$(DEVICE):OPTIONS:MSI")
53 | {
54 | field(INP, "$(DEVICE):TEST MSI")
55 | }
56 |
57 | record(ai, "$(DEVICE):OPTIONS:MSS")
58 | {
59 | field(INP, "$(DEVICE):TEST MSS")
60 | }
61 |
62 | record(ai, "$(DEVICE):OPTIONS:NMS")
63 | {
64 | field(INP, "$(DEVICE):TEST NMS")
65 | }
66 |
67 | record(ai, "$(DEVICE):OPTIONS:NP")
68 | {
69 | field(INP, "$(DEVICE):TEST NPP")
70 | }
71 |
72 | record(ai, "$(DEVICE):OPTIONS:PP:MS")
73 | {
74 | field(INP, "$(DEVICE):TEST MS PP")
75 | }
76 |
77 | record(stringin, "$(DEVICE):STRING")
78 | {
79 | field(VAL, "\"\x0a\\\x01€")
80 | }
81 |
82 | record(ai, "$(DEVICE):TEST")
83 | {
84 | field(INP, "@$(P)")
85 | field(SCAN, "1 second")
86 | field(VAL, "$(Q)")
87 | }
88 |
89 | record(longin, "XX-YY-ZZ-01:A")
90 | {
91 | field(DESC, "blah")
92 | }
93 |
94 | # comment 1
95 | #% metadata
96 | # comment 2
97 | record(ai, "XX-YY-ZZ-01:ABC:TEST")
98 | {
99 | }
100 |
101 | record(ai, "XX-YY-ZZ-01:B")
102 | {
103 | field(INP, "SR-DI-DCCT-01:SIGNAL")
104 | }
105 |
106 | record(fanout, "XX-YY-ZZ-01:FAN")
107 | {
108 | field(LNK1, "XX-YY-ZZ-01:A")
109 | field(LNK2, "XX-YY-ZZ-01:B")
110 | field(SELM, "All")
111 | }
112 |
113 | record(bi, "XX-YY-ZZ-01:TRIG")
114 | {
115 | field(FLNK, "XX-YY-ZZ-01:FAN")
116 | field(SCAN, "1 second")
117 | }
118 |
--------------------------------------------------------------------------------
/test/test_a_file.py:
--------------------------------------------------------------------------------
1 | # vim: set fileencoding=UTF-8:
2 |
3 | import os
4 | import sys
5 | from collections import OrderedDict
6 | from epicsdbbuilder import *
7 |
8 | def test_output(tmp_path):
9 | ResetRecords()
10 |
11 | InitialiseDbd(
12 | os.environ.get('EPICS_BASE', None),
13 | os.environ.get('EPICS_HOST_ARCH', None))
14 |
15 | tmpl_names = TemplateRecordNames()
16 | dls_names = SimpleRecordNames('XX-YY-ZZ-01', ':')
17 |
18 | SetRecordNames(dls_names)
19 |
20 | P = Parameter('P', 'A parameter')
21 | assert repr(P) == "Parameter(P)"
22 | Q = Parameter('Q', 'A number')
23 |
24 | r = ImportRecord('SR-DI-DCCT-01:SIGNAL')
25 |
26 | records.bi(
27 | 'TRIG',
28 | FLNK = create_fanout(
29 | 'FAN',
30 | records.longin('A', DESC = 'blah'),
31 | records.ai('B', INP = r)),
32 | SCAN = '1 second')
33 |
34 | s = ImportRecord(RecordName('TRIG'))
35 |
36 | PushPrefix('ABC')
37 |
38 | r = records.ai('TEST')
39 | r.add_comment("comment 1")
40 | r.add_metadata("metadata")
41 | r.add_comment("comment 2")
42 |
43 | assert PopPrefix() == 'ABC'
44 |
45 | SetRecordNames(tmpl_names)
46 |
47 | t = records.ai(
48 | 'TEST',
49 | INP = '@%s' % P, VAL = Q, SCAN = '1 second')
50 | records.bi('BOO', INP = s)
51 |
52 | # Test link options
53 | records.ai('OPTIONS:CA', INP = CA(t))
54 | records.ai('OPTIONS:CP', INP = CP(t))
55 | records.ai('OPTIONS:CPP', INP = CPP(t))
56 | records.ai('OPTIONS:NP', INP = NP(t))
57 | records.ai('OPTIONS:MSS', INP = MSS(t))
58 | records.ai('OPTIONS:MSI', INP = MSI(t))
59 | records.ai('OPTIONS:NMS', INP = NMS(t))
60 |
61 | # Test multiple link options
62 | records.ai('OPTIONS:PP:MS', INP = PP(MS(t)))
63 |
64 | # Test const array with QSRV infos
65 | w = records.waveform(
66 | 'FIELD:WITH_CONST_ARRAY',
67 | INP = ConstArray(["A", "B", "C"])
68 | )
69 | # Ordereddict for python2.7 compat
70 | td = OrderedDict([
71 | ("+id", "epics:nt/NTTable:1.0"),
72 | ("labels", OrderedDict([
73 | ("+type", "plain"),
74 | ("+channel", "VAL")
75 | ]))])
76 | w.add_info("Q:group", {"MYTABLE": td})
77 | # And json links with readbacks
78 | a = records.ai(
79 | 'FIELD:WITH_JSON_LINK',
80 | INP = {"const": 3.14159265358979}
81 | )
82 | a.add_info("asyn:READBACK", 1)
83 | a.add_info("autosaveFields", "PREC EGU DESC")
84 |
85 | # A string constant with some evil character values
86 | records.stringin('STRING', VAL = '"\n\\\x01€')
87 |
88 | fname = str(tmp_path / 'test_output.db')
89 | expected_open_args = {}
90 | if sys.version_info > (3, ):
91 | # Specify encoding so it works on windows
92 | expected_open_args['encoding'] = 'utf8'
93 |
94 | def lines(fname, **open_args):
95 | return [x.rstrip() for x in open(fname, **open_args).readlines()[1:]]
96 |
97 | WriteRecords(fname, alphabetical=False)
98 | expected = os.path.join(os.path.dirname(__file__), 'expected_output.db')
99 | assert lines(fname) == lines(expected, **expected_open_args)
100 |
101 | WriteRecords(fname)
102 | expected = os.path.join(
103 | os.path.dirname(__file__), 'expected_output_alphabetical.db')
104 | assert lines(fname) == lines(expected, **expected_open_args)
105 |
--------------------------------------------------------------------------------
/test/test_const_array.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from collections import OrderedDict
3 | from decimal import Decimal
4 | from epicsdbbuilder import ConstArray, Parameter
5 |
6 |
7 | class TestConstArray(unittest.TestCase):
8 | par = Parameter('PAR', 'Parameter')
9 |
10 | def assertValidFormatDb(self, expected, value):
11 | arr = ConstArray(value)
12 | arr.Validate(None, None)
13 | self.assertEqual(expected, arr.FormatDb(None, None))
14 |
15 | def assertInvalid(self, value, expected_exception=AssertionError):
16 | with self.assertRaises(expected_exception):
17 | ConstArray(value).Validate(None, None)
18 |
19 | def test_allow_iterators(self):
20 | class Iterable:
21 | def __iter__(self):
22 | class Iterator:
23 | def __init__(self, arr):
24 | self.arr = iter(arr)
25 |
26 | def __next__(self):
27 | return next(self.arr)
28 | next = __next__ # Python2
29 | return Iterator([1, 2, 3])
30 |
31 | self.assertValidFormatDb('[1,2,3]', [1, 2, 3])
32 | self.assertValidFormatDb('[1,2,3]', (1, 2, 3))
33 | d = OrderedDict([('1', 'A'), ('2', 'B'), ('3', 'C')])
34 | self.assertValidFormatDb('["1","2","3"]', d)
35 | self.assertValidFormatDb('["1","2","3"]', '123')
36 | self.assertValidFormatDb('[1,2,3]', Iterable())
37 |
38 | def test_block_empty_iterators(self):
39 | self.assertInvalid([])
40 | self.assertInvalid(())
41 | self.assertInvalid('')
42 |
43 | class Iterable:
44 | def __iter__(self):
45 | class Iterator:
46 | def __next__(self):
47 | raise StopIteration
48 | next = __next__ # Python2
49 | return Iterator()
50 |
51 | self.assertInvalid(Iterable())
52 |
53 | def test_block_instances_which_are_not_iterators(self):
54 | self.assertInvalid(None, TypeError)
55 |
56 | self.assertInvalid(True, TypeError)
57 | self.assertInvalid(1, TypeError)
58 | self.assertInvalid(2.5, TypeError)
59 | self.assertInvalid(Decimal('3'), TypeError)
60 |
61 | class MyClass:
62 | pass
63 | self.assertInvalid(MyClass(), TypeError)
64 |
65 | def test_allow_boolean_as_elements(self):
66 | self.assertValidFormatDb('[1]', [True])
67 | self.assertValidFormatDb('[0]', [False])
68 |
69 | def test_allow_numbers_as_elements(self):
70 | self.assertValidFormatDb('[1]', [1])
71 | self.assertValidFormatDb('[2.5]', [2.5])
72 | self.assertValidFormatDb('[3.5]', [Decimal('3.5')])
73 |
74 | def test_allow_strings_as_elements(self):
75 | self.assertValidFormatDb('["str"]', ['str'])
76 | self.assertValidFormatDb('["s1","s2"]', ['s1', 's2'])
77 | self.assertValidFormatDb(
78 | '["escaped\\"quotes"]', ['escaped"quotes'])
79 | self.assertValidFormatDb('[""]', [''])
80 |
81 | def test_allow_parameters_as_elements(self):
82 | self.assertValidFormatDb('["$(PAR)"]', [self.par])
83 | self.assertValidFormatDb('["$(PAR)","$(PAR)"]', [self.par, self.par])
84 |
85 | def test_block_none_as_an_element(self):
86 | self.assertInvalid([None])
87 | self.assertInvalid(['A', None])
88 | self.assertInvalid([None, 'A'])
89 | self.assertInvalid(['A', None, 'A'])
90 |
91 | def test_allow_mixing_numbers(self):
92 | self.assertValidFormatDb('[1,1.5,2]', [1, 1.5, Decimal('2')])
93 |
94 | def test_allow_mixing_numbers_and_booleans(self):
95 | self.assertValidFormatDb('[1,2]', [True, 2])
96 |
97 | def test_allow_mixing_strings_and_parameters(self):
98 | self.assertValidFormatDb('["str","$(PAR)"]', ['str', self.par])
99 |
100 | def test_block_mixing_numbers_and_strings_or_parameters(self):
101 | self.assertInvalid([1, 'A'])
102 | self.assertInvalid(['A', 1])
103 | self.assertInvalid([1, self.par])
104 |
105 | def test_repr(self):
106 | self.assertEqual('', repr(ConstArray(['ABC'])))
107 |
--------------------------------------------------------------------------------
/test/test_quote_string.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from epicsdbbuilder.recordbase import quote_string
3 |
4 |
5 | class TestQuoteString(unittest.TestCase):
6 | def test_empty(self):
7 | self.assertEqual('""', quote_string(''))
8 |
9 | def test_string_without_escaping(self):
10 | self.assertEqual('"ABC:123.FIELD"', quote_string('ABC:123.FIELD'))
11 |
12 | def test_string_with_escaping(self):
13 | self.assertEqual('"A\\"C"', quote_string('A"C'))
14 |
--------------------------------------------------------------------------------