├── .build
└── install_pyenv.sh
├── .gitignore
├── .travis.yml
├── LICENSE
├── MANIFEST.in
├── NOTICE
├── README.rst
├── appveyor.yml
├── build.xml
├── ci_requirements.txt
├── examples.ipynb
├── logging.properties
├── open_fortran_parser
├── .gitignore
├── __init__.py
├── __main__.py
├── _version.py
├── config.py
├── dependencies.py
├── main.py
├── ofc_wrapper.py
├── parser_wrapper.py
└── py.typed
├── requirements.txt
├── setup.py
├── setup_boilerplate.py
├── src
└── fortran
│ └── ofp
│ ├── XMLPrinter.java
│ ├── XMLPrinterBase.java
│ ├── XmlPrinterArgsParser.java
│ └── parser
│ └── java
│ ├── CodeBounds.java
│ ├── CodeLocation.java
│ └── TokensList.java
├── test
├── .gitignore
├── __init__.py
├── examples
│ ├── arithmetic_kind.f90
│ ├── comments.f
│ ├── comments.f90
│ ├── deepvar.f90
│ ├── empty.f
│ ├── problematic_write.f90
│ ├── simple_if.f90
│ └── strings.f90
├── examples_large
│ ├── ORCHIDEE_grassland_grazing.f90
│ ├── ORCHIDEE_grid.f90
│ ├── ORCHIDEE_hydrolc.f90
│ └── ORCHIDEE_stomate.f90
├── fortran
│ └── ofp
│ │ ├── XMLPrinterBaseTests.java
│ │ └── parser
│ │ └── java
│ │ └── CodeBoundsTests.java
├── test_apps.py
├── test_compatibility.py
├── test_dependencies.py
├── test_ofc.py
├── test_parser_wrapper.py
├── test_script.py
└── test_setup.py
└── test_requirements.txt
/.build/install_pyenv.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -Eeuxo pipefail
3 |
4 | # pyenv installer (for macOS)
5 | # updated: 2019-05-13
6 |
7 | # use the following to enable diagnostics
8 | # export PYENV_DIAGNOSE=1
9 |
10 | if [[ "$(uname)" == "Darwin" ]]; then
11 | if [ -n "${DIAGNOSE_PYENV-}" ] ; then
12 | pyenv install --list
13 | fi
14 | if ! [[ ${TRAVIS_PYTHON_VERSION} =~ .*-dev$ ]] ; then
15 | TRAVIS_PYTHON_VERSION="$(pyenv install --list | grep -E " ${TRAVIS_PYTHON_VERSION}(\.[0-9brc]+)+" | tail -n 1 | sed -e 's/^[[:space:]]*//')"
16 | fi
17 | pyenv install "${TRAVIS_PYTHON_VERSION}"
18 | # export PATH="${HOME}/.pyenv/versions/${TRAVIS_PYTHON_VERSION}/bin:${PATH}"
19 | mkdir -p "${HOME}/.local/bin"
20 | ln -s "${HOME}/.pyenv/versions/${TRAVIS_PYTHON_VERSION}/bin/python" "${HOME}/.local/bin/python"
21 | ln -s "${HOME}/.pyenv/versions/${TRAVIS_PYTHON_VERSION}/bin/pip" "${HOME}/.local/bin/pip"
22 | ln -s "${HOME}/.pyenv/versions/${TRAVIS_PYTHON_VERSION}/bin/coverage" "${HOME}/.local/bin/coverage"
23 | ln -s "${HOME}/.pyenv/versions/${TRAVIS_PYTHON_VERSION}/bin/codecov" "${HOME}/.local/bin/codecov"
24 | fi
25 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Eclipse
2 | /.settings
3 | /.*project
4 |
5 | # Java
6 | /.classpath
7 | /.externalToolBuilders
8 | *.class
9 |
10 | # Java: coverage
11 | /jacoco
12 | /jacoco.exec
13 | /jacoco.xml
14 |
15 | # Jupyter Notebook
16 | .ipynb_checkpoints
17 | *-checkpoint.ipynb
18 |
19 | # OS: macOS
20 | .DS_Store
21 |
22 | # Python
23 | /build
24 | /dist
25 | /.cache
26 | __pycache__
27 | *.egg
28 | *.egg-info
29 | *.pyc
30 |
31 | # Python: coverage
32 | /htmlcov
33 | /.coverage
34 |
35 | # other
36 | /bin
37 | /lib
38 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 | language: generic
3 | addons:
4 | apt:
5 | packages:
6 | - ant
7 | homebrew:
8 | packages:
9 | - ant
10 | - pyenv
11 | matrix:
12 | include:
13 | - os: linux
14 | dist: trusty
15 | language: python
16 | python: "3.5"
17 | env: TRAVIS_JDK_VERSION=openjdk7
18 | - os: linux
19 | language: python
20 | python: "3.6"
21 | env: TRAVIS_JDK_VERSION=openjdk11
22 | - os: linux
23 | language: python
24 | python: "3.7"
25 | env: TRAVIS_JDK_VERSION=openjdk11
26 | - os: osx
27 | osx_image: xcode11
28 | language: generic
29 | env:
30 | - TRAVIS_PYTHON_VERSION="3.5"
31 | - TRAVIS_JDK_VERSION=oraclejdk12
32 | - os: osx
33 | osx_image: xcode11
34 | language: generic
35 | env:
36 | - TRAVIS_PYTHON_VERSION="3.6"
37 | - TRAVIS_JDK_VERSION=oraclejdk12
38 | - os: osx
39 | osx_image: xcode11
40 | language: generic
41 | env:
42 | - TRAVIS_PYTHON_VERSION="3.7"
43 | - TRAVIS_JDK_VERSION=oraclejdk12
44 |
45 | before_install:
46 | - .build/install_pyenv.sh
47 | - |
48 | set -e
49 | if [[ "${TRAVIS_OS_NAME}" == "linux" && "$(lsb_release -cs)" == "trusty" ]]; then
50 | jdk_switcher use $TRAVIS_JDK_VERSION
51 | fi
52 | set +e
53 | - java -version
54 | - python --version
55 | # Open Fortran Parser
56 | - git clone "https://github.com/OpenFortranProject/open-fortran-parser" "../open-fortran-parser"
57 | # Open Fortran Compiler
58 | - git clone "https://github.com/codethinklabs/ofc" "../open-fortran-compiler"
59 | - cd "../open-fortran-compiler" && make && cd -
60 | - export PATH="${PATH}:$(pwd)/../open-fortran-compiler"
61 | # FFB-MINI app
62 | - git clone "https://github.com/mbdevpl/ffb-mini" "../ffb-mini"
63 | # FLASH 5 app
64 | - git clone "https://github.com/mbdevpl/FLASH5.git" "../flash5"
65 | # miranda_io app
66 | - git clone "https://github.com/mbdevpl/miranda_io" "../miranda_io"
67 |
68 | install:
69 | # dependencies
70 | - pip install -U pip
71 | - pip install -U -r ci_requirements.txt
72 | # Java
73 | - python -m open_fortran_parser --deps
74 | - export CLASSPATH="${CLASSPATH}:$(pwd)/lib/*"
75 | - ant -Dpython=python
76 | - export CLASSPATH="${CLASSPATH}:$(pwd)/dist/*"
77 |
78 | script:
79 | - |
80 | if [[ "${TRAVIS_JDK_VERSION}" == "openjdk7" ]]; then
81 | export TEST_DEPENDENCIES=1
82 | export TEST_LONG=1
83 | fi
84 | - TEST_PACKAGING=1 TEST_COVERAGE=1 python -m coverage run --branch --source . -m unittest -v
85 |
86 | after_success:
87 | - python -m coverage report --show-missing
88 | - java -jar "lib/org.jacoco.cli-0.8.3-nodeps.jar" report "jacoco.exec" --classfiles "bin/" --sourcefiles "src/" --xml jacoco.xml
89 | - codecov
90 |
91 | before_deploy:
92 | - pip3 install -U --user version_query || pip3 install -U version_query
93 | - wget https://gist.githubusercontent.com/mbdevpl/46d458350f0c9cc7d793b67573e01f7b/raw/prepare_bintray_deployment.py
94 | - python3 prepare_bintray_deployment.py "$TRAVIS_OS_NAME-$TRAVIS_JDK_VERSION-python$TRAVIS_PYTHON_VERSION" "dist/*.jar" "dist/*.tar.gz" "dist/*.whl" "dist/*.zip"
95 |
96 | deploy:
97 | - provider: bintray
98 | file: ".bintray.json"
99 | user: "mbdevpl"
100 | key:
101 | secure: "QXtjizZcEByE66zwhrhGHG+6Y9YZKXnPQxYdQYH0CFQ3zc6gakRnzsMGAAo0APhtn+EZAZjzjBXcO/WllhhPlfIInad24Q7N17MzIIoUO9exjcmahZAygXgazLaN4+hpntv/2AuQd7ZAfNXpmMtKfRSGyzKqWMwwVTvEerKigbSFZmEjyBsX4jzUxD7Mpza8XULwGf70xpfTg2T5Ovy4iwDzIhFwruWS7RjwORFWd4gyqLJrfN/LuAEmwt0BYu0qyfx7onqHUbUYWYj0gsnkHMZBftf/mJXTYmdZIBkiu/oeaYdc4DN3hEnKdJ77AMt6ny8FgP/vs4gacRX7NLBcbQlkpEueQQzAGK/E8AL2M0Os/UX7DH9tuDjON2DwE2BGt87DVxc+AxwCIpntxTfuWaJuenylVhDNJ1UsQHvODPALyVjT5CT05sVG83pLTCO9w85Hl4D2engxPkb+ksGsa51vEH9Ohm70sO+XyawQqWtoOJ8UTqKvKvyP8yDsUv+GXhlqmy8/H9CxO14fmz5/kpd5pWC4ZGJZnIEjsIosl/WeFRQJ5fOmYen48A06+cWSAmEbfe/1w8c0Y2Z+wGtbTjUM4z988TYlKJK2pYxe+xoQH+615HLETPR65Ngv6FqRM7wrUbKPvznc875kaAugsnbjBgMUZwX/gMxAkxLp0T0="
102 | on:
103 | all_branches: true
104 | skip_cleanup: true
105 |
106 | notifications:
107 | slack:
108 | secure: "OiXc/9CvAVd1l3xWTP554GzPe7ba6cBUHzLLHi4siEW0TDCDeJAzRrFbA/Ei2OTBqVX5p6b4uqeuPCc+IIYhcNDZa4tvEAAyfMIttKEEiEE7qMESxaqXkYQo0IIDu2FVjNjvqZlOfzc9kjFL2J2W3uCd/yivd/WOIoIijkU1HiFfN7wk6e+1ATUIyPXrNJNDYpE2/gKJhMVNHexjEQkeNgGN4eoAU/2cTdwI23SkHCsPgxbgCSMCUn0N/ZsTEgu/zzVFUBlqzzDZKR+fHmrCv7pLHSkIe2WROlbAFyg0AzIEJwhTLbZLOC9Xh1FLhigYvNDaZsOFUnlSEwmSjegvtnXLQkJ2vtSS7xf2UaLdc4w+kgv6If7369IICHgi59X97C8dT9W+YeZoidkINiMuijmCkTKqKF+DhpOx6x2bt6Pk91RscIjwqamb1a0SvMhgqtfpYQz/UkyDadC3g2BsHdg0l+K6ZmEWtfJUMAVctsr8dvwg2KlwrfX5V4+ecwzluz5aL7jYBzXRlgO83ulvM+xaS/9+CGMn08w6ipCjK2xrD4ZKMnoHBr2s+ijceoYINg9nDefNknV9C2Ja/pjgMCtWKZNlX/PkhbOq0oLBBsoUM3Mp1JgP86I88H6TSCRFk8ASV/ghZeaAr8k7AzZi2TAzGFh90KGrlw6aNzvqVak="
109 | email: false
110 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include setup_boilerplate.py
2 | include *requirements.txt
3 | include LICENSE
4 | include NOTICE
5 | include ./open_fortran_parser/*.jar
6 | recursive-include ./test/examples *.*
7 | include ./*/py.typed
8 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Copyright 2017-2019 Mateusz Bysiek https://mbdevpl.github.io/
2 | Copyright 2018 Michael Lange
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | .. role:: bash(code)
2 | :language: bash
3 |
4 | .. role:: fortran(code)
5 | :language: fortran
6 |
7 | .. role:: java(code)
8 | :language: java
9 |
10 | .. role:: python(code)
11 | :language: python
12 |
13 | .. role:: xml(code)
14 | :language: xml
15 |
16 |
17 | ============================================
18 | XML output generator for Open Fortran Parser
19 | ============================================
20 |
21 | Extension of Java-based Open Fortran Parser and a Python wrapper enabling Fortran parsing from Python.
22 |
23 | .. image:: https://travis-ci.com/mbdevpl/open-fortran-parser-xml.svg?branch=master
24 | :target: https://travis-ci.com/mbdevpl/open-fortran-parser-xml
25 | :alt: build status from Travis CI
26 |
27 | .. image:: https://ci.appveyor.com/api/projects/status/github/mbdevpl/open-fortran-parser-xml?branch=master&svg=true
28 | :target: https://ci.appveyor.com/project/mbdevpl/open-fortran-parser-xml
29 | :alt: build status from AppVeyor
30 |
31 | .. image:: https://api.codacy.com/project/badge/Grade/1e5602a9efed41998eca0437d84cc1db
32 | :target: https://www.codacy.com/app/mbdevpl/open-fortran-parser-xml
33 | :alt: grade from Codacy
34 |
35 | .. image:: https://codecov.io/gh/mbdevpl/open-fortran-parser-xml/branch/master/graph/badge.svg
36 | :target: https://codecov.io/gh/mbdevpl/open-fortran-parser-xml
37 | :alt: test coverage from Codecov
38 |
39 | .. image:: https://img.shields.io/github/license/mbdevpl/open-fortran-parser.svg
40 | :target: https://github.com/mbdevpl/open-fortran-parser-xml/blob/master/NOTICE
41 | :alt: license
42 |
43 | Implementation has 2 parts: the XML generator written in Java, and Python wrapper for the generator.
44 |
45 | The implementation is tested on Linux, OS X and Windows.
46 |
47 | In this file, first the AST specification is described, then the Java implementation,
48 | and then the Python wrapper.
49 |
50 | .. contents::
51 | :backlinks: none
52 |
53 |
54 | AST specification
55 | =================
56 |
57 | For any Fortran file, the resulting XML file has the following structure:
58 |
59 | .. code:: xml
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 | Root node is :xml:``, it has one subnode :xml:``.
69 |
70 | Inside the :xml:``, there might be one or many of the following nodes:
71 |
72 | * :xml:``
73 | * :xml:``
74 | * :xml:``
75 | * :xml:``
76 | * :xml:``
77 | * ...
78 |
79 | Additionally, every XML node that was built using tokens from the source code
80 | (which means almost any XML node) has its source code location described in the following way:
81 |
82 | .. code:: xml
83 |
84 |
85 |
86 | For simplicity, the above XML file boilerplate as well as locations are stripped
87 | from the examples that follow.
88 |
89 | For each presented construct, Fortran code snippet and corresponding XML AST is given.
90 |
91 |
92 | Comments and directives
93 | -----------------------
94 |
95 | Comment:
96 |
97 | .. code:: fortran
98 |
99 | ! my comment
100 | !$omp parallel do
101 |
102 | .. code:: xml
103 |
104 |
105 |
106 |
107 | Directive:
108 |
109 | .. code:: fortran
110 |
111 | #define NDIMS 3
112 |
113 | .. code:: xml
114 |
115 |
116 |
117 |
118 | Nodes :xml:`` and :xml:``
119 | exist to carry comments and preprocessor directives, respectively.
120 | These nodes might be in principle inserted before, after or within any of other nodes,
121 | however, in practice they are either surrounding the top-level nodes (e.g. program or subroutine)
122 | or are placed in-between non-compound declarations and/or statements within them.
123 |
124 | Note: compiler directives are comments in Fortran.
125 |
126 |
127 | Program
128 | -------
129 |
130 | .. code:: fortran
131 |
132 | program empty
133 | ...
134 | end program empty
135 |
136 | .. code:: xml
137 |
138 |
139 |
140 | ...
141 |
142 |
143 |
144 | In the body, `Declarations`_ followed by any number of statements can be found.
145 |
146 | And each of the statements listed after the declarations,
147 | can be one of `Simple statements`_ or `Compound statements`_.
148 |
149 |
150 | Declarations
151 | ------------
152 |
153 | A special node :xml:`` wraps all declarations:
154 |
155 | .. code:: xml
156 |
157 |
158 | ...
159 |
160 |
161 | It provides counts for each of the declaration type and contains a collection of declarations,
162 | which can any of the following:
163 |
164 | * :xml:``
165 | * :xml:``
166 | * ...
167 |
168 | The :xml:`` node is special in a sense that it has type attribute that specifies
169 | what kind of declaration it is.
170 |
171 |
172 | Implicit declaration
173 | ~~~~~~~~~~~~~~~~~~~~
174 |
175 | .. code:: fortran
176 |
177 | implicit none
178 | implicit real (A-H,O-Z)
179 |
180 | .. code:: xml
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 | Variable declaration
193 | ~~~~~~~~~~~~~~~~~~~~
194 |
195 | .. code:: fortran
196 |
197 | integer i, j
198 |
199 | .. code:: xml
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 | External declaration
211 | ~~~~~~~~~~~~~~~~~~~~
212 |
213 | .. code:: fortran
214 |
215 | external omp_get_num_procs
216 |
217 |
218 | Save declaration
219 | ~~~~~~~~~~~~~~~~
220 |
221 | .. code:: fortran
222 |
223 | save n
224 |
225 |
226 | Use
227 | ~~~
228 |
229 | .. code:: fortran
230 |
231 | use mpi
232 | use my_interface, only: subroutine1, subroutine2
233 | use, non_intrinsic :: my_module
234 | use, intrinsic :: iso_c_binding, only: c_int, c_float
235 |
236 | .. code:: xml
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 | Compound statements
258 | -------------------
259 |
260 | Compound statements, e.g.:
261 |
262 | * :xml:``
263 | * :xml:``
264 | * :xml:``
265 | * ...
266 |
267 | each have :xml:`` and :xml:``.
268 |
269 |
270 | If
271 | ~~~
272 |
273 | In the header of :xml:``, an expression is present.
274 |
275 | See `Expressions`_ for a definition.
276 |
277 |
278 | Loop
279 | ~~~~
280 |
281 | In the header of the :xml:``, at least one :xml:`` is present.
282 | It has :xml:``, :xml:`` and :xml:``.
283 |
284 |
285 | Select
286 | ~~~~~~
287 |
288 | In the body of :xml:`` there multiple :xml:`` nodes.
289 | These are also compound (i.e. each of them has :xml:`` and :xml:``),
290 | however they exist only within the body of select statement.
291 |
292 |
293 | Simple statements
294 | -----------------
295 |
296 | .. code:: xml
297 |
298 |
299 | ...
300 |
301 |
302 | All simple statements are using :xml:`` node, which wraps around nodes like:
303 |
304 | * :xml:``
305 | * :xml:``
306 | * :xml:``
307 | * :xml:``
308 | * :xml:``
309 | * :xml:``
310 | * :xml:``
311 | * :xml:``
312 | * :xml:``
313 | * :xml:``
314 | * :xml:``
315 | * :xml:``
316 | * :xml:``
317 | * :xml:``
318 | * :xml:``
319 | * ...
320 |
321 |
322 | Assignment
323 | ~~~~~~~~~~
324 |
325 | .. code:: fortran
326 |
327 | x = 1
328 |
329 | .. code:: xml
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 | Call
342 | ~~~~
343 |
344 | .. code:: fortran
345 |
346 | call configure
347 | call initialize()
348 | call calculate(1, 2)
349 | call something(thing=my_value)
350 |
351 | .. code:: xml
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 | Expressions
385 | -----------
386 |
387 | Expression might be a single node like:
388 |
389 | * :xml:``
390 | * :xml:``
391 | * ...
392 |
393 | More complex expressions are built from the :xml:`` nodes, each of which contains
394 | a collection of :xml:`` and :xml:`` nodes. Each operand contains an expression.
395 |
396 |
397 | Unary operation
398 | ~~~~~~~~~~~~~~~
399 |
400 | .. code:: fortran
401 |
402 | .not. flag
403 |
404 | .. code:: xml
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 | Multiary operation
415 | ~~~~~~~~~~~~~~~~~~
416 |
417 | .. code:: fortran
418 |
419 | 'Hello' // ' world'
420 | 5 + x
421 |
422 | .. code:: xml
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 | Subroutine
445 | ----------
446 |
447 | Many complex nodes contain :xml:`` and :xml:``.
448 |
449 | The contents of the header depend on the type of the node. For example, in case of subroutines,
450 | it contains list of parameters.
451 |
452 |
453 | Function
454 | --------
455 |
456 | .. code:: fortran
457 |
458 | function foo
459 | ...
460 | end function foo
461 |
462 | .. code:: xml
463 |
464 |
465 |
468 |
469 | ...
470 |
471 |
472 |
473 |
474 | Module
475 | ------
476 |
477 | .. code:: fortran
478 |
479 | module abc
480 | integer i
481 | ...
482 | contains
483 | subroutine sub()
484 | ...
485 | end subroutine sub
486 | ...
487 | end module abc
488 |
489 | .. code:: xml
490 |
491 |
492 |
493 |
494 |
495 |
496 |
497 |
498 |
499 |
500 |
501 | ...
502 |
503 |
504 |
505 |
506 |
507 | ...
508 |
509 |
510 | ...
511 |
512 |
513 |
514 |
515 | Work in progress
516 | ----------------
517 |
518 | Remaining details of AST are not decided yet. For the time being, to see implementation details,
519 | please take a look into ``_.
520 |
521 |
522 | Unhandled corner cases
523 | ----------------------
524 |
525 | in certain corner cases, the parse tree might deviate from the above description.
526 |
527 | This might be due to two main reasons:
528 |
529 | 1) Some feature is not yet implemented in this XML output generator
530 | 2) The events provided by OFP are not sufficient to generate a correct tree.
531 |
532 | In case 1, all contributions to this project are very welcome. The implementation of any one
533 | of the missing features might not be very troublesome. The main reason why many of those features
534 | are not implemented yet is because the Fortran codes the current contributors work with
535 | do not use them.
536 |
537 | In case 2, there is a need to dynamically reorder/modify/delete nodes, or otherwise manipulate
538 | existing parse tree while adding new nodes. Contributions are also very welcome,
539 | but implementation might be much more challenging in this case.
540 |
541 |
542 | Java XML generator for OFP
543 | ==========================
544 |
545 | .. image:: https://img.shields.io/github/release/mbdevpl/open-fortran-parser-xml.svg
546 | :target: https://github.com/mbdevpl/open-fortran-parser-xml/releases
547 | :alt: latest GitHub release
548 |
549 | This is an extension of Open Fortran Parser (OFP), which outputs abstract syntaxt tree (AST)
550 | of parsed Fortran file in XML format - to a file or to :java:`System.out`.
551 |
552 |
553 | dependencies
554 | ------------
555 |
556 | * Java 1.7 or later
557 |
558 | * Open Fortran Parser 0.8.5-1
559 |
560 | https://github.com/mbdevpl/open-fortran-parser/releases
561 |
562 | This is a patched version of OFP. The list of changes is available at the above link.
563 |
564 | * ANTRL 3.5.2 (dependency of Open Fortran Parser)
565 |
566 | http://www.antlr3.org/download/
567 |
568 | * Apache Commons CLI 1.4 or later
569 |
570 | https://commons.apache.org/proper/commons-cli/download_cli.cgi
571 |
572 |
573 | how to build
574 | ------------
575 |
576 | Get dependencies, either manually, or using the provided script:
577 |
578 | .. code:: bash
579 |
580 | pip3 install -U -r requirements.txt
581 | python3 -m open_fortran_parser --deps
582 | export CLASSPATH="${CLASSPATH}:$(pwd)/lib/*"
583 |
584 | Build:
585 |
586 | .. code:: bash
587 |
588 | ant
589 | export CLASSPATH="${CLASSPATH}:$(pwd)/dist/*"
590 |
591 | This will create a `.jar` file in `dist` directory, and add it to the Java classpath.
592 |
593 | If you use a different python executable to install requirements, please provide it to ant too:
594 |
595 | .. code:: bash
596 |
597 | ant -Dpython=/custom/python
598 |
599 | Because the build script by default relies on "python3" executable.
600 |
601 |
602 | how to run
603 | ----------
604 |
605 | .. code:: bash
606 |
607 | java fortran.ofp.FrontEnd --class fortran.ofp.XMLPrinter \
608 | --output output.xml --verbosity 0~100 input.f
609 |
610 | where:
611 |
612 | * The :bash:`--verbosity` flag controls verbosity of the parse tree. Defaluts to :bash:`100`
613 | when omitted.
614 |
615 | * Maximum, :bash:`100`, means that all details picked up by Open Fortran Parser
616 | will be preserved.
617 |
618 | * Minimum, :bash:`0`, means that tree will contain only what is needed to reconstruct
619 | the program without changing it's meaning.
620 |
621 | * The :bash:`--output` flag controls where the XML should be written. Defaults to standard output
622 | when omitted.
623 |
624 | and remaining command-line options are exactly as defined in OFP 0.8.5.
625 |
626 | To parse :bash:`some_fortran_file.f` and save XML output in :bash:`tree.xml` with minimum verbosity:
627 |
628 | .. code:: bash
629 |
630 | java fortran.ofp.FrontEnd --class fortran.ofp.XMLPrinter \
631 | --output tree.xml --verbosity 0 some_fortran_file.f
632 |
633 | And to dump XML with maximum verbosity to console:
634 |
635 | .. code:: bash
636 |
637 | java fortran.ofp.FrontEnd --class fortran.ofp.XMLPrinter \
638 | --verbosity 100 some_fortran_file.f
639 |
640 |
641 | Python wrapper for the generator
642 | ================================
643 |
644 | .. image:: https://img.shields.io/pypi/v/open-fortran-parser.svg
645 | :target: https://pypi.python.org/pypi/open-fortran-parser
646 | :alt: package version from PyPI
647 |
648 | Using the wrapper should not require any special knowledge about the generator itself, other than
649 | knowing the abstract syntax tree (AST) specification.
650 |
651 |
652 | dependencies
653 | ------------
654 |
655 | Java XML generator for OFP and all of its dependencies.
656 |
657 | Python version 3.5 or later.
658 |
659 | Python libraries as specified in ``_.
660 |
661 | Building and running tests additionally requires packages listed in ``_.
662 |
663 |
664 | how to build
665 | ------------
666 |
667 | .. code:: bash
668 |
669 | pip3 install -U -r test_requirements.txt
670 | python3 setup.py sdist --formats=gztar,zip
671 | python3 setup.py bdist_wheel
672 |
673 | how to install
674 | --------------
675 |
676 | You can simply install from PyPI:
677 |
678 | .. code:: bash
679 |
680 | pip3 install open-fortran-parser
681 |
682 | Or using any of below commands, when installing from source:
683 |
684 | .. code:: bash
685 |
686 | pip3 install .
687 | pip3 install dist/.whl
688 | pip3 install dist/.tar.gz
689 | pip3 install dist/.zip
690 |
691 |
692 | how to run
693 | ----------
694 |
695 | The wrapper can be used as a script, or as a library.
696 |
697 | When running any installed version, even if installed from source, dependencies are automatically
698 | installed together with the wrapper.
699 |
700 | Before running from source (without installation), however, please follow "how to build" section
701 | for Java implementation above.
702 | You can make sure that dependencies are configured correctly by running:
703 |
704 | .. code:: bash
705 |
706 | python3 -m open_fortran_parser --check-deps
707 |
708 | If the depenencies changed since you first ran the wrapper from the source tree, you can cleanup
709 | outdated dependencies by executing:
710 |
711 | .. code:: bash
712 |
713 | python3 -m open_fortran_parser --cleanup-deps
714 |
715 |
716 | as script
717 | ~~~~~~~~~
718 |
719 | .. code::
720 |
721 | $ python3 -m open_fortran_parser -h
722 | usage: open_fortran_parser [-h] [--version] [-v VERBOSITY]
723 | [--check-dependencies]
724 | [input] [output]
725 |
726 | Python wrapper around XML generator for Open Fortran Parser
727 |
728 | positional arguments:
729 | input path to Fortran source code file (default: None)
730 | output writable path for where to store resulting XML,
731 | defaults to stdout if no path provided (default: None)
732 |
733 | optional arguments:
734 | -h, --help show this help message and exit
735 | --version show program's version number and exit
736 | -v VERBOSITY, --verbosity VERBOSITY
737 | level of verbosity, from 0 to 100 (default: 100)
738 | --check-dependencies, --check-deps
739 | check if all required dependencies are present and
740 | exit (default: False)
741 |
742 | Copyright 2017-2019 by the contributors, Apache License 2.0,
743 | https://github.com/mbdevpl/open-fortran-parser-xml
744 |
745 |
746 | as library
747 | ~~~~~~~~~~
748 |
749 | .. code:: python
750 |
751 | from open_fortran_parser import parse
752 |
753 | xml = parse('my_legacy_code.f', verbosity=0)
754 |
755 | More examples available in ``_.
756 |
757 |
758 | testing
759 | -------
760 |
761 | Run basic tests:
762 |
763 | .. code:: bash
764 |
765 | python3 -m unittest -v
766 | TEST_LONG=1 python3 -m unittest -v # this might take a long time...
767 |
768 |
769 | code coverage
770 | ~~~~~~~~~~~~~
771 |
772 | Getting code coverage results for Java requires JaCoCo agent, and JaCoCo CLI,
773 | and both are dowonloaded automatically along with other development dependencies.
774 |
775 | Currently, test setup relies on JaCoCo 0.8.3:
776 |
777 | * JaCoCo agent 0.8.3 (runtime)
778 | * JaCoCo CLI 0.8.3 (nodeps)
779 |
780 | Run all test and gather code coverage:
781 |
782 | .. code:: bash
783 |
784 | TEST_LONG=1 TEST_COVERAGE=1 python3 -m coverage run --branch --source . -m unittest -v
785 |
786 | This will take a long while.
787 |
788 | Then, generate results for Python code:
789 |
790 | .. code:: bash
791 |
792 | python3 -m coverage report --show-missing
793 | python3 -m coverage html
794 |
795 | Finally, generate results for Java code:
796 |
797 | .. code:: bash
798 |
799 | java -jar "lib/org.jacoco.cli-0.8.3-nodeps.jar" report "jacoco.exec" --classfiles "bin/" --sourcefiles "src/" --html jacoco
800 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | version: "{build}"
2 |
3 | environment:
4 | matrix:
5 | - ARCHITECTURE: "x86"
6 | PYTHON_VERSION: "3.5"
7 | PYTHON: "C:\\Python35"
8 | JAVA_VERSION: "jdk7"
9 | JAVA: "C:\\Program Files (x86)\\Java\\jdk1.7.0"
10 | ANT: "1.9.14"
11 | - ARCHITECTURE: "x64"
12 | PYTHON_VERSION: "3.5"
13 | PYTHON: "C:\\Python35-x64"
14 | JAVA_VERSION: "jdk7"
15 | JAVA: "C:\\Program Files\\Java\\jdk1.7.0"
16 | ANT: "1.9.14"
17 | - ARCHITECTURE: "x86"
18 | PYTHON_VERSION: "3.6"
19 | PYTHON: "C:\\Python36"
20 | JAVA_VERSION: "jdk8"
21 | JAVA: "C:\\Program Files (x86)\\Java\\jdk1.8.0"
22 | ANT: "1.10.7"
23 | - ARCHITECTURE: "x64"
24 | PYTHON_VERSION: "3.6"
25 | PYTHON: "C:\\Python36-x64"
26 | JAVA_VERSION: "jdk8"
27 | JAVA: "C:\\Program Files\\Java\\jdk1.8.0"
28 | ANT: "1.10.7"
29 | - ARCHITECTURE: "x86"
30 | PYTHON_VERSION: "3.6"
31 | PYTHON: "C:\\Python36"
32 | JAVA_VERSION: "jdk9"
33 | JAVA: "C:\\Program Files (x86)\\Java\\jdk9"
34 | ANT: "1.10.7"
35 | - ARCHITECTURE: "x64"
36 | PYTHON_VERSION: "3.6"
37 | PYTHON: "C:\\Python36-x64"
38 | JAVA_VERSION: "jdk9"
39 | JAVA: "C:\\Program Files\\Java\\jdk9"
40 | ANT: "1.10.7"
41 | - ARCHITECTURE: "x86"
42 | PYTHON_VERSION: "3.6"
43 | PYTHON: "C:\\Python36"
44 | JAVA_VERSION: "jdk10"
45 | JAVA: "C:\\Program Files (x86)\\Java\\jdk10"
46 | ANT: "1.10.7"
47 | - ARCHITECTURE: "x64"
48 | PYTHON_VERSION: "3.6"
49 | PYTHON: "C:\\Python36-x64"
50 | JAVA_VERSION: "jdk10"
51 | JAVA: "C:\\Program Files\\Java\\jdk10"
52 | ANT: "1.10.7"
53 | - ARCHITECTURE: "x86"
54 | PYTHON_VERSION: "3.7"
55 | PYTHON: "C:\\Python37"
56 | JAVA_VERSION: "jdk11"
57 | JAVA: "C:\\Program Files (x86)\\Java\\jdk11"
58 | ANT: "1.10.7"
59 | - ARCHITECTURE: "x64"
60 | PYTHON_VERSION: "3.7"
61 | PYTHON: "C:\\Python37-x64"
62 | JAVA_VERSION: "jdk11"
63 | JAVA: "C:\\Program Files\\Java\\jdk11"
64 | ANT: "1.10.7"
65 |
66 | init:
67 | - set PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%
68 | - set JAVA_HOME=%JAVA%
69 | - set PATH=%JAVA%\\bin;%PATH%
70 | # Open Fortran Parser
71 | - git clone "https://github.com/OpenFortranProject/open-fortran-parser" "..\\open-fortran-parser"
72 | # Open Fortran Compiler
73 | #- git clone "https://github.com/codethinklabs/ofc" "../open-fortran-compiler"
74 | #- cd "..\\open-fortran-compiler" && make && cd -
75 | #- set PATH=%PATH%;%cd%\\..\\open-fortran-compiler
76 | # FFB-MINI app
77 | - git clone "https://github.com/mbdevpl/ffb-mini" "..\\ffb-mini" --branch ofp_tests
78 | # FLASH 5 app
79 | - git clone "https://github.com/mbdevpl/FLASH5.git" "..\\flash5"
80 | # miranda_io app
81 | - git clone "https://github.com/mbdevpl/miranda_io" "..\\miranda_io"
82 | # ant
83 | - ps: Invoke-WebRequest "https://archive.apache.org/dist/ant/binaries/apache-ant-$Env:ANT-bin.zip" -OutFile "..\\apache-ant-$Env:ANT-bin.zip"
84 | - ps: Expand-Archive -Path "..\\apache-ant-$Env:ANT-bin.zip" -DestinationPath "C:\\"
85 | - set PATH=C:\\apache-ant-%ANT%\\bin;%PATH%
86 |
87 | install:
88 | - python -m pip install -U pip
89 | - python -m pip install -U -r ci_requirements.txt
90 | # Java
91 | - python -m open_fortran_parser --deps
92 | - set CLASSPATH=%cd%\\lib\\*;%CLASSPATH%
93 | - ant -Dpython=python
94 | - set CLASSPATH=%cd%\\dist\\*;%CLASSPATH%
95 |
96 | build: off
97 |
98 | test_script:
99 | - set TEST_PACKAGING=1
100 | - if [%JAVA_VERSION%]==[jdk7] set TEST_DEPENDENCIES=1
101 | #- if [%JAVA_VERSION%]==[jdk7] set TEST_LONG=1
102 | - set TEST_COVERAGE=1
103 | - python -m coverage run --branch --source . -m unittest -v
104 |
105 | after_test:
106 | - python -m coverage report --show-missing
107 | - java -jar "lib\\org.jacoco.cli-0.8.3-nodeps.jar" report "jacoco.exec" --classfiles "bin\\" --sourcefiles "src\\" --xml jacoco.xml
108 | - codecov
109 | # Bintray archive preparation
110 | - python -m pip install version_query
111 | - ps: Invoke-WebRequest "https://gist.githubusercontent.com/mbdevpl/46d458350f0c9cc7d793b67573e01f7b/raw/prepare_bintray_deployment.py" -OutFile "prepare_bintray_deployment.py"
112 | - python prepare_bintray_deployment.py "windows%ARCHITECTURE%-%JAVA_VERSION%-python%PYTHON_VERSION%" "dist\*.jar" "dist\*.tar.gz" "dist\*.whl" "dist\*.zip"
113 | - set /p BINTRAY_VERSION=<.bintray_version.txt
114 |
115 | artifacts:
116 | - path: dist\*.jar
117 | - path: dist\*.tar.gz
118 | - path: dist\*.whl
119 | - path: dist\*.zip
120 | - path: '*-bintray.zip'
121 |
122 | deploy:
123 | - provider: BinTray
124 | username: $(APPVEYOR_ACCOUNT_NAME)
125 | api_key:
126 | secure: cMLbWadS24XyCD5RU3XM+2GrgqtTfoBgKwkQXyDyVa/3QOF1rXheHki+BRXP5tLo
127 | subject: $(APPVEYOR_ACCOUNT_NAME)
128 | repo: pkgs
129 | package: $(APPVEYOR_PROJECT_NAME)
130 | version: $(BINTRAY_VERSION)
131 | publish: true
132 | override: true
133 | explode: true
134 | artifact: /.*-bintray\.zip/
135 |
--------------------------------------------------------------------------------
/build.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/ci_requirements.txt:
--------------------------------------------------------------------------------
1 | codecov
2 | coverage
3 | -rtest_requirements.txt
4 |
--------------------------------------------------------------------------------
/examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Open Fortran Parser XML wrapper examples"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "These examples are for Python wrapper to the OFP XML. Run `ant` to build the OFP XML itself before executing them. "
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 1,
20 | "metadata": {},
21 | "outputs": [],
22 | "source": [
23 | "import pathlib\n",
24 | "import tempfile\n",
25 | "import xml.etree.ElementTree as ET\n",
26 | "\n",
27 | "import open_fortran_parser"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 2,
33 | "metadata": {},
34 | "outputs": [
35 | {
36 | "name": "stdout",
37 | "output_type": "stream",
38 | "text": [
39 | "\n",
40 | " \n",
41 | " \n",
42 | " \n",
43 | " \n",
44 | " \n",
45 | " \n",
46 | " \n",
47 | " \n",
48 | " \n",
49 | " \n",
50 | " \n",
51 | " \n",
52 | " \n",
53 | " \n",
54 | " \n",
55 | " \n",
56 | " \n",
57 | " \n",
58 | " \n",
59 | " \n",
60 | " \n",
61 | " \n",
62 | " \n"
63 | ]
64 | }
65 | ],
66 | "source": [
67 | "xml_tree = open_fortran_parser.parse(pathlib.Path('test', 'examples', 'empty.f'))\n",
68 | "ET.dump(xml_tree)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": 3,
74 | "metadata": {
75 | "scrolled": true
76 | },
77 | "outputs": [
78 | {
79 | "name": "stdout",
80 | "output_type": "stream",
81 | "text": [
82 | "HowBadCanItBe\n"
83 | ]
84 | }
85 | ],
86 | "source": [
87 | "code = '''\n",
88 | "program HowBadCanItBe\n",
89 | "\n",
90 | " goto main_sub3\n",
91 | "\n",
92 | "end program\n",
93 | "'''\n",
94 | "\n",
95 | "with tempfile.NamedTemporaryFile('w+') as tmp:\n",
96 | " print(code, file=tmp, flush=True)\n",
97 | " xml_tree = open_fortran_parser.parse(pathlib.Path(tmp.name), raise_on_error=True)\n",
98 | "\n",
99 | "for prog in xml_tree.findall('.//program'):\n",
100 | " print(prog.attrib['name'])"
101 | ]
102 | }
103 | ],
104 | "metadata": {
105 | "kernelspec": {
106 | "display_name": "Python 3",
107 | "language": "python",
108 | "name": "python3"
109 | },
110 | "language_info": {
111 | "codemirror_mode": {
112 | "name": "ipython",
113 | "version": 3
114 | },
115 | "file_extension": ".py",
116 | "mimetype": "text/x-python",
117 | "name": "python",
118 | "nbconvert_exporter": "python",
119 | "pygments_lexer": "ipython3",
120 | "version": "3.8.0"
121 | }
122 | },
123 | "nbformat": 4,
124 | "nbformat_minor": 2
125 | }
126 |
--------------------------------------------------------------------------------
/logging.properties:
--------------------------------------------------------------------------------
1 | handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler
2 | .level = FINEST
3 |
4 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT] [%4$-7s] %5$s %n
5 |
6 | java.util.logging.ConsoleHandler.level = FINE
7 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
8 |
9 | java.util.logging.FileHandler.level = FINER
10 | java.util.logging.FileHandler.limit = 524288
11 | java.util.logging.FileHandler.count = 100
12 | java.util.logging.FileHandler.pattern = test/results/logs/parser_%g.log
13 | java.util.logging.FileHandler.append = true
14 |
--------------------------------------------------------------------------------
/open_fortran_parser/.gitignore:
--------------------------------------------------------------------------------
1 | /*.jar
2 |
--------------------------------------------------------------------------------
/open_fortran_parser/__init__.py:
--------------------------------------------------------------------------------
1 | """Package marker file for open_fortran_parser package."""
2 |
3 | from .config import JAVA as java_config
4 | from .parser_wrapper import execute_parser, parse
5 |
--------------------------------------------------------------------------------
/open_fortran_parser/__main__.py:
--------------------------------------------------------------------------------
1 | """Entry point of open_fortran_parser package."""
2 |
3 | from .main import main
4 |
5 | if __name__ == '__main__':
6 | main()
7 |
--------------------------------------------------------------------------------
/open_fortran_parser/_version.py:
--------------------------------------------------------------------------------
1 | """Version of open_fortran_parser package."""
2 |
3 | from version_query import predict_version_str
4 |
5 | VERSION = predict_version_str()
6 |
--------------------------------------------------------------------------------
/open_fortran_parser/config.py:
--------------------------------------------------------------------------------
1 | """Default configuration for open_fortran_parser package."""
2 |
3 | import os
4 | import pathlib
5 | import urllib
6 |
7 | from ._version import VERSION
8 |
9 | DEPENDENCIES_PATH = pathlib.Path(__file__).resolve().parent
10 |
11 | DEV_DEPENDENCIES_PATH = DEPENDENCIES_PATH.parent.joinpath('lib')
12 |
13 | COMMON_DEPENDENCIES = {
14 | 'ANTLR 3.5.2': (
15 | urllib.parse.urlparse(
16 | 'https://github.com/mbdevpl/open-fortran-parser/releases/download/v0.8.5-1/'),
17 | pathlib.Path('antlr-3.5.2-complete.jar')),
18 | 'Open Fortran Parser 0.8.5-1': (
19 | urllib.parse.urlparse(
20 | 'https://github.com/mbdevpl/open-fortran-parser/releases/download/v0.8.5-1/'),
21 | pathlib.Path('OpenFortranParser-0.8.5-1.jar')),
22 | 'Apache Commons CLI 1.4': (
23 | urllib.parse.urlparse(
24 | 'https://github.com/mbdevpl/open-fortran-parser-xml/releases/download/v0.1.0/'),
25 | pathlib.Path('commons-cli-1.4.jar'))}
26 |
27 | DEV_DEPENDENCIES = COMMON_DEPENDENCIES.copy()
28 |
29 | DEV_DEPENDENCIES.update({
30 | 'JaCoCo agent 0.8.3': (
31 | urllib.parse.urlparse(
32 | 'https://search.maven.org/remotecontent?filepath=org/jacoco/org.jacoco.agent/0.8.3/'),
33 | pathlib.Path('org.jacoco.agent-0.8.3-runtime.jar')),
34 | 'JaCoCo CLI 0.8.3': (
35 | urllib.parse.urlparse(
36 | 'https://search.maven.org/remotecontent?filepath=org/jacoco/org.jacoco.cli/0.8.3/'),
37 | pathlib.Path('org.jacoco.cli-0.8.3-nodeps.jar')),
38 | })
39 |
40 | DEPENDENCIES = COMMON_DEPENDENCIES.copy()
41 |
42 | DEPENDENCIES.update({
43 | 'Open Fortran Parser XML {}'.format(VERSION): (
44 | urllib.parse.urlparse(
45 | 'https://github.com/mbdevpl/open-fortran-parser-xml/releases/download/v{}/'
46 | .format(VERSION)),
47 | pathlib.Path('OpenFortranParserXML-{}.jar'.format(VERSION)))})
48 |
49 | OUTDATED_DEPENDENCIES = {
50 | 'Open Fortran Parser 0.8.4-1': pathlib.Path('OpenFortranParser-0.8.4-1.jar'),
51 | 'Open Fortran Parser 0.8.4-2': pathlib.Path('OpenFortranParser-0.8.4-2.jar'),
52 | 'Open Fortran Parser 0.8.4-3': pathlib.Path('OpenFortranParser-0.8.4-3.jar'),
53 | 'ANTLR 3.3': pathlib.Path('antlr-3.3-complete.jar'),
54 | 'Open Fortran Parser 0.8.4-4': pathlib.Path('OpenFortranParser-0.8.4-4.jar')}
55 |
56 | JAVA = {
57 | 'executable': pathlib.Path('java'),
58 | 'classpath': pathlib.Path(DEPENDENCIES_PATH, '*'),
59 | 'options': None,
60 | 'ofp_class': 'fortran.ofp.FrontEnd',
61 | 'ofp_xml_class': 'fortran.ofp.XMLPrinter'}
62 |
63 | OFC = {
64 | 'executable': pathlib.Path('ofc'),
65 | 'path': None}
66 |
--------------------------------------------------------------------------------
/open_fortran_parser/dependencies.py:
--------------------------------------------------------------------------------
1 | """Dependency downloader for open_fortran_parser."""
2 |
3 | import logging
4 | import os
5 | import pathlib
6 | import platform
7 | import typing as t
8 | import urllib
9 |
10 | import wget
11 |
12 | _LOG = logging.getLogger(__name__)
13 |
14 |
15 | def ensure_dependencies(
16 | dependencies: t.Mapping[str, t.Tuple[urllib.parse.ParseResult, pathlib.Path]],
17 | target_dir: pathlib.Path, download: bool = True, silent: bool = False) -> None:
18 | """Download missing depenedencies."""
19 | if not target_dir.exists():
20 | _LOG.warning('Creating directory "%s"...', target_dir)
21 | os.makedirs(str(target_dir), exist_ok=True)
22 | for dependency, (url_root, filename) in dependencies.items():
23 | path = target_dir.joinpath(filename)
24 | if path.is_file() and not silent:
25 | _LOG.warning('%s is present already.', dependency)
26 | continue
27 | if not download:
28 | _LOG.warning('%s is not present!', dependency)
29 | continue
30 | url = urllib.parse.urlunparse(url_root) + str(filename)
31 | _LOG.warning('Downloading %s from URL "%s" to path "%s"...', dependency, url, path)
32 | wget.download(url, str(path), bar=None if silent else wget.bar_adaptive)
33 | if not silent:
34 | print()
35 |
36 | def cleanup_old_dependencies(
37 | outdated_dependencies, current_dir: pathlib.Path,
38 | backup_dir: t.Optional[pathlib.Path] = None):
39 | if backup_dir is not None and not backup_dir.exists():
40 | _LOG.warning('Creating directory "%s"...', backup_dir)
41 | os.makedirs(str(backup_dir), exist_ok=True)
42 | for dependency, filename in outdated_dependencies.items():
43 | path = current_dir.joinpath(filename)
44 | if not path.is_file():
45 | _LOG.debug('%s already does not exist.', dependency)
46 | continue
47 | if backup_dir is None:
48 | _LOG.warning('Deleting %s in path "%s"...', dependency, current_dir)
49 | path.unlink()
50 | else:
51 | _LOG.warning('Moving %s from path "%s" to path "%s"...',
52 | dependency, current_dir, backup_dir)
53 | path.rename(backup_dir.joinpath(filename))
54 |
--------------------------------------------------------------------------------
/open_fortran_parser/main.py:
--------------------------------------------------------------------------------
1 | """Main function for open_fortran_parser package."""
2 |
3 | import argparse
4 | import logging
5 | import pathlib
6 | import sys
7 |
8 | from ._version import VERSION
9 | from .config import \
10 | DEV_DEPENDENCIES_PATH, DEV_DEPENDENCIES, DEPENDENCIES_PATH, DEPENDENCIES, OUTDATED_DEPENDENCIES
11 | from .parser_wrapper import execute_parser
12 | from .dependencies import ensure_dependencies, cleanup_old_dependencies
13 |
14 | logging.basicConfig()
15 |
16 | _LOG = logging.getLogger(__name__)
17 |
18 |
19 | def main(args=None, namespace=None):
20 | """Launch Open Fortran Parser."""
21 | parser = argparse.ArgumentParser(
22 | prog='open_fortran_parser',
23 | description='''Python wrapper around XML generator for Open Fortran Parser''',
24 | epilog='''Copyright 2017-2019 by the contributors, Apache License 2.0,
25 | https://github.com/mbdevpl/open-fortran-parser-xml''',
26 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
27 |
28 | parser.version = VERSION
29 | parser.add_argument('--version', action='version')
30 |
31 | parser.add_argument(
32 | 'input', nargs='?', type=pathlib.Path, help='''path to Fortran source code file''')
33 | parser.add_argument(
34 | 'output', nargs='?', type=pathlib.Path, default=None,
35 | help='''writable path for where to store resulting XML, defaults to stdout
36 | if no path provided''')
37 | parser.add_argument(
38 | '-v', '--verbosity', type=int, default=100, help='''level of verbosity, from 0 to 100''')
39 | parser.add_argument(
40 | '--get-dependencies', '--deps', action='store_true',
41 | help=argparse.SUPPRESS) # download dependencies for development and exit
42 | parser.add_argument(
43 | '--check-dependencies', '--check-deps', action='store_true',
44 | help='''check if all required dependencies are present and exit''')
45 | parser.add_argument(
46 | '--cleanup-dependencies', '--cleanup-deps', action='store_true',
47 | help=argparse.SUPPRESS) # delete outdated development dependencies and exit
48 |
49 | args = parser.parse_args(args, namespace)
50 |
51 | if args.get_dependencies:
52 | ensure_dependencies(DEV_DEPENDENCIES, DEV_DEPENDENCIES_PATH)
53 | return
54 |
55 | if args.check_dependencies:
56 | ensure_dependencies(DEPENDENCIES, DEPENDENCIES_PATH, download=False)
57 | return
58 |
59 | if args.cleanup_dependencies:
60 | cleanup_old_dependencies(OUTDATED_DEPENDENCIES, DEV_DEPENDENCIES_PATH)
61 | return
62 |
63 | if not args.input:
64 | parser.print_help(sys.stderr)
65 | parser.exit(2)
66 |
67 | process = execute_parser(args.input, args.output, args.verbosity)
68 | if process.stderr:
69 | _LOG.warning(process.stderr.decode().rstrip())
70 | process.check_returncode()
71 | if args.output is None:
72 | print(process.stdout.decode().rstrip())
73 |
--------------------------------------------------------------------------------
/open_fortran_parser/ofc_wrapper.py:
--------------------------------------------------------------------------------
1 | """Implementation of Python wrapper for Open Fortran Compiler."""
2 |
3 | import enum
4 | import logging
5 | import pathlib
6 | import subprocess
7 | import typing as t
8 |
9 | from .config import OFC as ofc_config
10 |
11 | _LOG = logging.getLogger(__name__)
12 |
13 |
14 | @enum.unique
15 | class CodeForm(enum.IntEnum):
16 | Fixed = 1
17 | Free = 2
18 |
19 |
20 | def execute_compiler(
21 | input_path: pathlib.Path, output_path: t.Optional[pathlib.Path],
22 | indent: int = 4, form: t.Optional[CodeForm] = None) -> subprocess.CompletedProcess:
23 | """Run Open Fortran Compiler with given parameters."""
24 | assert isinstance(input_path, pathlib.Path), type(input_path)
25 | assert output_path is None or isinstance(output_path, pathlib.Path), type(output_path)
26 | assert indent is None or isinstance(indent, int), type(indent)
27 | assert indent is None or indent > 0, indent
28 | assert form is None or isinstance(form, CodeForm), type(form)
29 |
30 | if ofc_config['path'] is not None:
31 | command = [str(ofc_config['path'].joinpath(ofc_config['executable']))]
32 | else:
33 | command = [str(ofc_config['executable'])]
34 | command.append('--sema-tree')
35 | if indent is not None:
36 | command += ['--indent-width', str(indent)]
37 | if form is not None:
38 | command.append('--{}-form'.format(form.name.lower()))
39 | command.append(str(input_path))
40 |
41 | _LOG.debug('Executing %s...', command)
42 | result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
43 |
44 | if output_path is not None:
45 | with open(str(output_path), 'wb') as output_file:
46 | output_file.write(result.stdout)
47 |
48 | return result
49 |
50 |
51 | def transpile(
52 | input_path: pathlib.Path, indent: int = 4, form: t.Optional[CodeForm] = None,
53 | raise_on_error: bool = False) -> str:
54 | """Parse given (possibly non-standard) Fortran code and return standard-compliant code."""
55 | assert isinstance(input_path, pathlib.Path), type(input_path)
56 | assert indent is None or isinstance(indent, int), type(indent)
57 | assert indent is None or indent > 0, indent
58 | assert form is None or isinstance(form, CodeForm), type(form)
59 |
60 | process = execute_compiler(input_path, None, indent, form)
61 | process_stdout = process.stdout.decode()
62 | if process.returncode != 0:
63 | _LOG.warning('%s', process_stdout)
64 | _LOG.error('Open Fortran Compiler returned %i', process.returncode)
65 | if process.stderr:
66 | _LOG.warning(process.stderr.decode())
67 | if raise_on_error:
68 | process.check_returncode()
69 |
70 | return process_stdout
71 |
--------------------------------------------------------------------------------
/open_fortran_parser/parser_wrapper.py:
--------------------------------------------------------------------------------
1 | """Implementation of Python wrapper for OpenFortranParserXML."""
2 |
3 | import logging
4 | import pathlib
5 | import subprocess
6 | import typing as t
7 | import xml.etree.ElementTree as ET
8 |
9 | from .config import JAVA as java_config
10 |
11 | _LOG = logging.getLogger(__name__)
12 |
13 |
14 | def execute_parser(
15 | input_path: pathlib.Path, output_path: t.Optional[pathlib.Path],
16 | verbosity: int = 100, tokenize_instead: bool = False, *args) -> subprocess.CompletedProcess:
17 | """Execute Open Fortran Parser according to current configuration and function parameters.
18 |
19 | If tokenize_instead is True, given file will not be parsed, but just tokenized instead.
20 | """
21 |
22 | command = [str(java_config['executable'])]
23 | if java_config['classpath'] is not None:
24 | command += ['-cp', str(java_config['classpath'])]
25 | if java_config['options'] is not None:
26 | command += java_config['options']
27 | command.append(java_config['ofp_class'])
28 | if tokenize_instead:
29 | command.append('--tokens')
30 | command += list(args)
31 | command += ['--class', java_config['ofp_xml_class'], '--verbosity', str(verbosity)]
32 | if output_path is not None:
33 | command += ['--output', str(output_path)]
34 | command.append(str(input_path))
35 |
36 | _LOG.debug('Executing %s...', command)
37 | return subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
38 |
39 |
40 | def parse(
41 | input_path: pathlib.Path, verbosity: int = 100, raise_on_error: bool = False) -> ET.Element:
42 | """Parse given Fortran file and return parse tree as XML."""
43 |
44 | process = execute_parser(input_path, None, verbosity)
45 | if process.returncode != 0:
46 | _LOG.warning('%s', process.stdout.decode())
47 | _LOG.error('Open Fortran Parser returned %i', process.returncode)
48 | if process.stderr:
49 | _LOG.warning(process.stderr.decode())
50 | if raise_on_error:
51 | process.check_returncode()
52 |
53 | return ET.fromstring(process.stdout)
54 |
--------------------------------------------------------------------------------
/open_fortran_parser/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mbdevpl/open-fortran-parser-xml/127f4ec2ba7cd06eb010794560eb8e4b9494fdfe/open_fortran_parser/py.typed
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | version-query
2 | wget
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Setup script for open_fortran_parser package."""
2 |
3 | import setup_boilerplate
4 |
5 |
6 | class Package(setup_boilerplate.Package):
7 |
8 | """Package metadata."""
9 |
10 | name = 'open-fortran-parser'
11 | description = 'Python wrapper for XML output generator for Open Fortran Parser'
12 | url = 'https://github.com/mbdevpl/open-fortran-parser-xml'
13 | classifiers = [
14 | 'Development Status :: 3 - Alpha',
15 | 'Environment :: Console',
16 | 'Intended Audience :: Developers',
17 | 'Intended Audience :: Science/Research',
18 | 'License :: OSI Approved :: Apache Software License',
19 | 'Natural Language :: English',
20 | 'Operating System :: MacOS :: MacOS X',
21 | 'Operating System :: Microsoft :: Windows',
22 | 'Operating System :: POSIX :: Linux',
23 | 'Programming Language :: Python :: 3.5',
24 | 'Programming Language :: Python :: 3.6',
25 | 'Programming Language :: Python :: 3.7',
26 | 'Programming Language :: Python :: 3 :: Only',
27 | 'Topic :: Education',
28 | 'Topic :: Scientific/Engineering',
29 | 'Topic :: Utilities']
30 | keywords = ['abstract syntax tree', 'ast', 'parser', 'xml']
31 |
32 |
33 | if __name__ == '__main__':
34 | Package.setup()
35 |
--------------------------------------------------------------------------------
/setup_boilerplate.py:
--------------------------------------------------------------------------------
1 | """Below code is generic boilerplate and normally should not be changed.
2 |
3 | To avoid setup script boilerplate, create "setup.py" file with the minimal contents as given
4 | in SETUP_TEMPLATE below, and modify it according to the specifics of your package.
5 |
6 | See the implementation of setup_boilerplate.Package for default metadata values and available
7 | options.
8 | """
9 |
10 | import pathlib
11 | import runpy
12 | import sys
13 | import typing as t
14 |
15 | import setuptools
16 |
17 | __updated__ = '2019-06-04'
18 |
19 | SETUP_TEMPLATE = '''"""Setup script."""
20 |
21 | import setup_boilerplate
22 |
23 |
24 | class Package(setup_boilerplate.Package):
25 |
26 | """Package metadata."""
27 |
28 | name = ''
29 | description = ''
30 | url = 'https://github.com/mbdevpl/...'
31 | classifiers = [
32 | 'Development Status :: 1 - Planning',
33 | 'Programming Language :: Python :: 3.5',
34 | 'Programming Language :: Python :: 3.6',
35 | 'Programming Language :: Python :: 3.7',
36 | 'Programming Language :: Python :: 3.8',
37 | 'Programming Language :: Python :: 3 :: Only']
38 | keywords = []
39 |
40 |
41 | if __name__ == '__main__':
42 | Package.setup()
43 | '''
44 |
45 | HERE = pathlib.Path(__file__).resolve().parent
46 |
47 |
48 | def find_version(
49 | package_name: str, version_module_name: str = '_version',
50 | version_variable_name: str = 'VERSION') -> str:
51 | """Simulate behaviour of "from package_name._version import VERSION", and return VERSION.
52 |
53 | To avoid importing whole package only to read the version, just module containing the version
54 | is imported. Therefore relative imports in that module will break the setup.
55 | """
56 | version_module_path = '{}/{}.py'.format(package_name.replace('-', '_'), version_module_name)
57 | version_module_vars = runpy.run_path(version_module_path)
58 | return version_module_vars[version_variable_name]
59 |
60 |
61 | def find_packages(root_directory: str = '.') -> t.List[str]:
62 | """Find packages to pack."""
63 | exclude = ['test', 'test.*'] if ('bdist_wheel' in sys.argv or 'bdist' in sys.argv) else []
64 | packages_list = setuptools.find_packages(root_directory, exclude=exclude)
65 | return packages_list
66 |
67 |
68 | def parse_requirements(
69 | requirements_path: str = 'requirements.txt') -> t.List[str]:
70 | """Read contents of requirements.txt file and return data from its relevant lines.
71 |
72 | Only non-empty and non-comment lines are relevant.
73 | """
74 | requirements = []
75 | with HERE.joinpath(requirements_path).open() as reqs_file:
76 | for requirement in [line.strip() for line in reqs_file.read().splitlines()]:
77 | if not requirement or requirement.startswith('#'):
78 | continue
79 | requirements.append(requirement)
80 | return requirements
81 |
82 |
83 | def partition_version_classifiers(
84 | classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ',
85 | only_suffix: str = ' :: Only') -> t.Tuple[t.List[str], t.List[str]]:
86 | """Find version number classifiers in given list and partition them into 2 groups."""
87 | versions_min, versions_only = [], []
88 | for classifier in classifiers:
89 | version = classifier.replace(version_prefix, '')
90 | versions = versions_min
91 | if version.endswith(only_suffix):
92 | version = version.replace(only_suffix, '')
93 | versions = versions_only
94 | try:
95 | versions.append(tuple([int(_) for _ in version.split('.')]))
96 | except ValueError:
97 | pass
98 | return versions_min, versions_only
99 |
100 |
101 | def find_required_python_version(
102 | classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ',
103 | only_suffix: str = ' :: Only') -> t.Optional[str]:
104 | """Determine the minimum required Python version."""
105 | versions_min, versions_only = partition_version_classifiers(
106 | classifiers, version_prefix, only_suffix)
107 | if len(versions_only) > 1:
108 | raise ValueError(
109 | 'more than one "{}" version encountered in {}'.format(only_suffix, versions_only))
110 | only_version = None
111 | if len(versions_only) == 1:
112 | only_version = versions_only[0]
113 | for version in versions_min:
114 | if version[:len(only_version)] != only_version:
115 | raise ValueError(
116 | 'the "{}" version {} is inconsistent with version {}'
117 | .format(only_suffix, only_version, version))
118 | min_supported_version = None
119 | for version in versions_min:
120 | if min_supported_version is None or \
121 | (len(version) >= len(min_supported_version) and version < min_supported_version):
122 | min_supported_version = version
123 | if min_supported_version is None:
124 | if only_version is not None:
125 | return '.'.join([str(_) for _ in only_version])
126 | else:
127 | return '>=' + '.'.join([str(_) for _ in min_supported_version])
128 | return None
129 |
130 |
131 | def resolve_relative_rst_links(text: str, base_link: str):
132 | """Resolve all relative links in a given RST document.
133 |
134 | All links of form `link`_ become `link `_.
135 | """
136 | import docutils.nodes
137 | import docutils.parsers.rst
138 | import docutils.utils
139 |
140 | def parse_rst(text: str) -> docutils.nodes.document:
141 | """Parse text assuming it's an RST markup."""
142 | parser = docutils.parsers.rst.Parser()
143 | components = (docutils.parsers.rst.Parser,)
144 | settings = docutils.frontend.OptionParser(components=components).get_default_values()
145 | document = docutils.utils.new_document('', settings=settings)
146 | parser.parse(text, document)
147 | return document
148 |
149 | class SimpleRefCounter(docutils.nodes.NodeVisitor):
150 | """Find all simple references in a given docutils document."""
151 |
152 | def __init__(self, *args, **kwargs):
153 | """Initialize the SimpleRefCounter object."""
154 | super().__init__(*args, **kwargs)
155 | self.references = []
156 |
157 | def visit_reference(self, node: docutils.nodes.reference) -> None:
158 | """Call for "reference" nodes."""
159 | if len(node.children) != 1 or not isinstance(node.children[0], docutils.nodes.Text) \
160 | or not all(_ in node.attributes for _ in ('name', 'refuri')):
161 | return
162 | path = pathlib.Path(node.attributes['refuri'])
163 | try:
164 | if path.is_absolute():
165 | return
166 | resolved_path = path.resolve()
167 | except FileNotFoundError: # in resolve(), prior to Python 3.6
168 | return
169 | except OSError: # in is_absolute() and resolve(), on URLs in Windows
170 | return
171 | try:
172 | resolved_path.relative_to(HERE)
173 | except ValueError:
174 | return
175 | if not path.is_file():
176 | return
177 | assert node.attributes['name'] == node.children[0].astext()
178 | self.references.append(node)
179 |
180 | def unknown_visit(self, node: docutils.nodes.Node) -> None:
181 | """Call for unknown node types."""
182 | return
183 |
184 | document = parse_rst(text)
185 | visitor = SimpleRefCounter(document)
186 | document.walk(visitor)
187 | for target in visitor.references:
188 | name = target.attributes['name']
189 | uri = target.attributes['refuri']
190 | new_link = '`{} <{}{}>`_'.format(name, base_link, uri)
191 | if name == uri:
192 | text = text.replace('`<{}>`_'.format(uri), new_link)
193 | else:
194 | text = text.replace('`{} <{}>`_'.format(name, uri), new_link)
195 | return text
196 |
197 |
198 | class Package:
199 | """Default metadata and behaviour for a Python package setup script."""
200 |
201 | root_directory = '.' # type: str
202 | """Root directory of the source code of the package, relative to the setup.py file location."""
203 |
204 | name = None # type: str
205 |
206 | version = None # type: str
207 | """"If None, it will be obtained from "package_name._version.VERSION" variable."""
208 |
209 | description = None # type: str
210 |
211 | long_description = None # type: str
212 | """If None, it will be generated from readme."""
213 |
214 | long_description_content_type = None # type: str
215 | """If None, it will be set accodring to readme file extension.
216 |
217 | For this field to be automatically set, also long_description field has to be None.
218 | """
219 |
220 | url = 'https://github.com/mbdevpl' # type: str
221 | download_url = None # type: str
222 | author = 'Mateusz Bysiek' # type: str
223 | author_email = 'mateusz.bysiek@gmail.com' # type: str
224 | # maintainer = None # type: str
225 | # maintainer_email = None # type: str
226 | license_str = 'Apache License 2.0' # type: str
227 |
228 | classifiers = [] # type: t.List[str]
229 | """List of valid project classifiers: https://pypi.org/pypi?:action=list_classifiers"""
230 |
231 | keywords = [] # type: t.List[str]
232 |
233 | packages = None # type: t.List[str]
234 | """If None, determined with help of setuptools."""
235 |
236 | package_data = {}
237 | exclude_package_data = {}
238 |
239 | install_requires = None # type: t.List[str]
240 | """If None, determined using requirements.txt."""
241 |
242 | extras_require = {} # type: t.Mapping[str, t.List[str]]
243 | """A dictionary containing entries of type 'some_feature': ['requirement1', 'requirement2']."""
244 |
245 | python_requires = None # type: str
246 | """If None, determined from provided classifiers."""
247 |
248 | entry_points = {} # type: t.Mapping[str, t.List[str]]
249 | """A dictionary used to enable automatic creation of console scripts, gui scripts and plugins.
250 |
251 | Example entry:
252 | 'console_scripts': ['script_name = package.subpackage:function']
253 | """
254 |
255 | test_suite = 'test' # type: str
256 |
257 | @classmethod
258 | def try_fields(cls, *names) -> t.Optional[t.Any]:
259 | """Return first existing of given class field names."""
260 | for name in names:
261 | if hasattr(cls, name):
262 | return getattr(cls, name)
263 | raise AttributeError((cls, names))
264 |
265 | @classmethod
266 | def parse_readme(cls, readme_path: str = 'README.rst',
267 | encoding: str = 'utf-8') -> t.Tuple[str, str]:
268 | """Parse readme and resolve relative links in it if it is feasible.
269 |
270 | Links are resolved if readme is in rst format and the package is hosted on GitHub.
271 | """
272 | readme_path = pathlib.Path(readme_path)
273 | with HERE.joinpath(readme_path).open(encoding=encoding) as readme_file:
274 | long_description = readme_file.read() # type: str
275 |
276 | if readme_path.suffix.lower() == '.rst' and cls.url.startswith('https://github.com/'):
277 | base_url = '{}/blob/v{}/'.format(cls.url, cls.version)
278 | long_description = resolve_relative_rst_links(long_description, base_url)
279 |
280 | long_description_content_type = {'.rst': 'text/x-rst', '.md': 'text/markdown'}.get(
281 | readme_path.suffix.lower(), 'text/plain')
282 | long_description_content_type += '; charset=UTF-8'
283 |
284 | return long_description, long_description_content_type
285 |
286 | @classmethod
287 | def prepare(cls) -> None:
288 | """Fill in possibly missing package metadata."""
289 | if cls.version is None:
290 | cls.version = find_version(cls.name)
291 | if cls.long_description is None:
292 | cls.long_description, cls.long_description_content_type = cls.parse_readme()
293 | if cls.packages is None:
294 | cls.packages = find_packages(cls.root_directory)
295 | if cls.install_requires is None:
296 | cls.install_requires = parse_requirements()
297 | if cls.python_requires is None:
298 | cls.python_requires = find_required_python_version(cls.classifiers)
299 |
300 | @classmethod
301 | def setup(cls) -> None:
302 | """Call setuptools.setup with correct arguments."""
303 | cls.prepare()
304 | setuptools.setup(
305 | name=cls.name, version=cls.version, description=cls.description,
306 | long_description=cls.long_description,
307 | long_description_content_type=cls.long_description_content_type,
308 | url=cls.url, download_url=cls.download_url,
309 | author=cls.author, author_email=cls.author_email,
310 | maintainer=cls.try_fields('maintainer', 'author'),
311 | maintainer_email=cls.try_fields('maintainer_email', 'author_email'),
312 | license=cls.license_str, classifiers=cls.classifiers, keywords=cls.keywords,
313 | packages=cls.packages, package_dir={'': cls.root_directory},
314 | include_package_data=True,
315 | package_data=cls.package_data, exclude_package_data=cls.exclude_package_data,
316 | install_requires=cls.install_requires, extras_require=cls.extras_require,
317 | python_requires=cls.python_requires,
318 | entry_points=cls.entry_points, test_suite=cls.test_suite)
319 |
--------------------------------------------------------------------------------
/src/fortran/ofp/XMLPrinterBase.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp;
2 |
3 | import java.io.File;
4 | import java.io.IOException;
5 | import java.util.ArrayList;
6 | import java.util.Arrays;
7 | import java.util.HashMap;
8 | import java.util.List;
9 | import java.util.Map;
10 | import java.util.logging.Level;
11 | import java.util.logging.Logger;
12 |
13 | import javax.xml.parsers.DocumentBuilder;
14 | import javax.xml.parsers.DocumentBuilderFactory;
15 | import javax.xml.transform.OutputKeys;
16 | import javax.xml.transform.Transformer;
17 | import javax.xml.transform.TransformerException;
18 | import javax.xml.transform.TransformerFactory;
19 | import javax.xml.transform.dom.DOMSource;
20 | import javax.xml.transform.stream.StreamResult;
21 |
22 | import org.antlr.runtime.Token;
23 | import org.apache.commons.cli.CommandLine;
24 | import org.w3c.dom.Attr;
25 | import org.w3c.dom.Document;
26 | import org.w3c.dom.Element;
27 | import org.w3c.dom.NamedNodeMap;
28 | import org.w3c.dom.NodeList;
29 |
30 | import fortran.ofp.parser.java.TokensList;
31 | import fortran.ofp.parser.java.CodeBounds;
32 | import fortran.ofp.parser.java.FortranLexer;
33 | import fortran.ofp.parser.java.FortranParserActionPrint;
34 | import fortran.ofp.parser.java.IFortranParser;
35 |
36 | /**
37 | * Base class for XML output generator for Open Fortran Parser.
38 | *
39 | * @author Mateusz Bysiek https://mbdevpl.github.io/
40 | */
41 | public class XMLPrinterBase extends FortranParserActionPrint {
42 |
43 | private static final Logger LOG = Logger.getLogger(XMLPrinterBase.class.getName());
44 |
45 | /**
46 | * Parsed command-line arguments.
47 | */
48 | private CommandLine cmd;
49 |
50 | /**
51 | * Verbosity level from 0 to 100.
52 | */
53 | protected int verbosity;
54 |
55 | /**
56 | * XML document.
57 | */
58 | protected Document doc;
59 |
60 | /**
61 | * XML root node, the outermost open XML context.
62 | */
63 | protected Element root;
64 |
65 | /**
66 | * Current open XML context.
67 | */
68 | protected Element context = null;
69 |
70 | static public ArrayList tokenLocationsWhitelist = new ArrayList(
71 | Arrays.asList(new String[] { "file", "members", "body", "specification" }));
72 |
73 | static public Map listContexts = new HashMap();
74 |
75 | static public void addListContext(String eventNamePrefix, String listName, String elementName) {
76 | listContexts.put(eventNamePrefix + "-list__begin", listName);
77 | listContexts.put(eventNamePrefix + "-list-part", elementName);
78 | listContexts.put(eventNamePrefix + "-list", listName);
79 | }
80 |
81 | static {
82 | addListContext("generic-name", "names", "name");
83 | addListContext("label", "labels", "label");
84 | /*
85 | addListContext("type-attr-spec", "", "");
86 | addListContext("type-param-decl", "", "");
87 | addListContext("component-attr-spec", "", "");
88 | addListContext("component-decl", "", "");
89 | addListContext("deferred-shape-spec", "", "");
90 | addListContext("proc-component-attr-spec", "", "");
91 | addListContext("binding-attr", "", "");
92 | addListContext("type-param-spec", "", "");
93 | addListContext("component-spec", "", "");
94 | addListContext("enumerator", "", "");
95 | */
96 | // addListContext("ac-value", /*"array-constructor-values"*/ null, /*"value"*/ null); // not straightforward
97 | addListContext("entity-decl", "variables", "variable");
98 | /*
99 | addListContext("explicit-shape-spec", "", "");
100 | */
101 | addListContext("access-id", "names", null);
102 | // addListContext("allocatable-decl", null, null); // currently not necessary
103 | /*
104 | addListContext("bind-entity", "", "");
105 | */
106 | // addListContext("codimension-decl", null, null); // currently not necessary
107 | addListContext("data-stmt-object", "variables", null);
108 | /*
109 | addListContext("data-i-do-object", "", "");
110 | */
111 | addListContext("data-stmt-value", "values", null);
112 | addListContext("named-constant-def", "constants", null);
113 | addListContext("pointer-decl", "names", null);
114 | /*
115 | addListContext("cray-pointer-assoc", "", "");
116 | */
117 | addListContext("saved-entity", "names", "name");
118 | // addListContext("target-decl", null, null); // currently not necessary
119 | /*
120 | addListContext("implicit-spec", "", "");
121 | */
122 | addListContext("letter-spec", "letter-ranges", null);
123 | /*
124 | addListContext("namelist-group-object", "", "");
125 | */
126 | addListContext("equivalence-set", "equivalents", "equivalent");
127 | // addListContext("equivalence-object", null, null); // currently not necessary
128 | addListContext("common-block-object", "objects", null);
129 | addListContext("section-subscript", "subscripts", /*"subscript"*/ null);
130 | addListContext("alloc-opt", "keyword-arguments", null);
131 | /*
132 | addListContext("cosubscript", "", "");
133 | */
134 | addListContext("allocation", "expressions", null);
135 | addListContext("allocate-object", "expressions", null);
136 | /*
137 | addListContext("allocate-shape-spec", "", "");
138 | */
139 | addListContext("pointer-object", "pointers", "pointer");
140 | addListContext("dealloc-opt", "keyword-arguments", null);
141 | /*
142 | addListContext("allocate-coshape-spec", "", "");
143 | addListContext("bounds-spec", "", "");
144 | addListContext("bounds-remapping", "", "");
145 | */
146 | addListContext("forall-triplet-spec", "index-variables", null);
147 | addListContext("case-value-range", "value-ranges", null);
148 | addListContext("association", "keyword-arguments", null);
149 | /*
150 | addListContext("sync-stat", "", "");
151 | addListContext("lock-stat", "", "");
152 | */
153 | addListContext("connect-spec", "keyword-arguments", "keyword-argument");
154 | addListContext("close-spec", "keyword-arguments", "keyword-argument");
155 | addListContext("io-control-spec", "io-controls", null);
156 | addListContext("input-item", "inputs", null);
157 | addListContext("output-item", "outputs", null);
158 | /*
159 | addListContext("wait-spec", "", "");
160 | addListContext("position-spec", "", "");
161 | addListContext("flush-spec", "", "");
162 | addListContext("inquire-spec", "", "");
163 | */
164 | addListContext("format-item", "format-items", null);
165 | /*
166 | addListContext("v", "", "");
167 | */
168 | addListContext("rename", "rename", null);
169 | addListContext("only", "only", null);
170 | addListContext("proc-decl", "procedures", null);
171 | addListContext("actual-arg-spec", "arguments", "argument");
172 | addListContext("dummy-arg", "arguments", null);
173 | }
174 |
175 | public XMLPrinterBase(String[] args, IFortranParser parser, String filename) {
176 | super(args, parser, filename);
177 |
178 | // System.err.println(Arrays.toString(args));
179 | cmd = new XmlPrinterArgsParser().parse(args);
180 | verbosity = Integer.parseInt(cmd.getOptionValue("verbosity", "100"));
181 | if (verbosity >= 100) {
182 | setVerbose(true);
183 | setPrintKeywords(true);
184 | }
185 |
186 | try {
187 | DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
188 | DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
189 | doc = docBuilder.newDocument();
190 | root = contextOpen("ofp");
191 | setAttribute("version", "0.8.5-1");
192 | doc.appendChild(root);
193 | } catch (Exception error) {
194 | error.printStackTrace();
195 | System.exit(1);
196 | }
197 | }
198 |
199 | /**
200 | * Open a new XML context.
201 | *
202 | * @param name
203 | */
204 | protected Element contextOpen(String name) {
205 | Element newContext = doc.createElement(name);
206 | if (context != null)
207 | context.appendChild(newContext);
208 | context = newContext;
209 | return context;
210 | }
211 |
212 | protected ArrayList contextHierarchy(Element context) {
213 | ArrayList hierarchy = new ArrayList();
214 | hierarchy.add(context);
215 | Element found = context;
216 | while (found != root && found.getParentNode() != null) {
217 | found = (Element) found.getParentNode();
218 | hierarchy.add(found);
219 | }
220 | return hierarchy;
221 | }
222 |
223 | protected ArrayList contextHierarchy() {
224 | return contextHierarchy(context);
225 | }
226 |
227 | protected ArrayList contextNameHierarchy(Element context) {
228 | ArrayList names = new ArrayList();
229 | for (Element found : contextHierarchy(context))
230 | names.add(found.getTagName());
231 | return names;
232 | }
233 |
234 | protected ArrayList contextNameHierarchy() {
235 | return contextNameHierarchy(context);
236 | }
237 |
238 | /**
239 | * Try to find innermost open XML context with name equal to any of given names.
240 | *
241 | * @param names
242 | * @return found context of null
243 | */
244 | protected Element contextTryFind(String... names) {
245 | if (context == null)
246 | return null;
247 | Element found = context;
248 | List names_list = Arrays.asList(names);
249 | while (!names_list.contains(found.getTagName())) {
250 | if (found == root)
251 | return null;
252 | found = (Element) found.getParentNode();
253 | }
254 | return found;
255 | }
256 |
257 | protected Element contextFind(String... names) {
258 | if (context == null)
259 | throw new NullPointerException("No open contexts, so " + Arrays.toString(names) + " cannot be found.");
260 | Element found = contextTryFind(names);
261 | if (found != null)
262 | return found;
263 | System.err.println("Cannot find any context of " + Arrays.toString(names) + " among open contexts.");
264 | System.err.println("Current context hierarchy (innermost first) is:");
265 | for (String name : contextNameHierarchy())
266 | System.err.println(" " + name);
267 | cleanUpAfterError();
268 | return null;
269 | }
270 |
271 | /**
272 | * Rename given context to given name.
273 | *
274 | * @param context
275 | * @param toName
276 | */
277 | protected void contextRename(Element context, String toName) {
278 | doc.renameNode(context, null, toName);
279 | }
280 |
281 | protected void contextRename(String toName) {
282 | contextRename(context, context.getTagName(), toName);
283 | }
284 |
285 | /**
286 | * Rename given context to given name after making sure about its current name.
287 | *
288 | * @param context
289 | * @param fromName
290 | * @param toName
291 | */
292 | protected void contextRename(Element context, String fromName, String toName) {
293 | if (context.getTagName() != fromName)
294 | cleanUpAfterError("Cannot rename current context from '" + fromName + "' to '" + toName
295 | + "' because its name is '" + context.getTagName() + "'.");
296 | contextRename(context, toName);
297 | }
298 |
299 | protected void contextRename(String fromName, String toName) {
300 | contextRename(context, fromName, toName);
301 | }
302 |
303 | /**
304 | * Close the given XML context.
305 | *
306 | * @param name
307 | */
308 | protected void contextClose(Element context) {
309 | if (context == root) {
310 | // if (context == null)
311 | // System.err.println("Cannot close given context because 'null' was given.");
312 | // else
313 | cleanUpAfterError("Cannot close given context because it is root node of the document.");
314 | }
315 | this.context = (Element) context.getParentNode();
316 | }
317 |
318 | protected void contextClose() {
319 | contextClose(context);
320 | }
321 |
322 | /**
323 | * Close an innermost open XML context with name equal to any of given names.
324 | *
325 | * @param names
326 | */
327 | protected void contextClose(String... names) {
328 | contextClose(contextFind(names));
329 | }
330 |
331 | /**
332 | * Close all inner open XML contexts (if any) that are in first innermost context with name equal to any of given
333 | * names.
334 | *
335 | * @param name
336 | */
337 | protected void contextCloseAllInner(String... names) {
338 | context = contextFind(names);
339 | }
340 |
341 | /**
342 | * Collection of attributes of given XML context.
343 | *
344 | * @param context the XML context to be queried
345 | * @return list of attributes
346 | */
347 | protected ArrayList contextAttributes(Element context) {
348 | NamedNodeMap attributesMap = context.getAttributes();
349 | ArrayList attributes = new ArrayList();
350 | for (int i = 0; i < attributesMap.getLength(); i++)
351 | attributes.add((Attr) attributesMap.item(i));
352 | return attributes;
353 | }
354 |
355 | protected ArrayList contextAttributes() {
356 | return contextAttributes(context);
357 | }
358 |
359 | /**
360 | * Collection of children nodes of given XML context.
361 | *
362 | * @param context the XML context to be queried
363 | * @param begin_index the index will be chosen from the end if negative number is given
364 | * @param count number of results to return, return all results if zero is given
365 | * @return list of nodes
366 | */
367 | protected ArrayList contextNodes(Element context, int beginIndex, int count) {
368 | NodeList nodeList = context.getChildNodes();
369 | int nodeListLength = nodeList.getLength();
370 | ArrayList nodes = new ArrayList();
371 | // System.err.println("contextNodes of " + context + " " + beginIndex + " " + count);
372 | if (count == 0 && nodeListLength == 0)
373 | return nodes;
374 | if (beginIndex < 0)
375 | beginIndex = nodeListLength + beginIndex;
376 | if (beginIndex < 0 || beginIndex >= nodeListLength)
377 | // throw new IndexOutOfBoundsException(
378 | cleanUpAfterError("starting index " + beginIndex + " out of bounds [" + 0 + ", " + nodeListLength + ")");
379 | if (count == 0)
380 | count = nodeListLength - beginIndex;
381 | if (count < 0)
382 | // throw new IndexOutOfBoundsException(
383 | cleanUpAfterError("attemted to return " + count + " number of nodes");
384 | int endIndex = beginIndex + count;
385 | /*
386 | System.err.println("returning " + count + " subnodes of " + context + " (from index " + beginIndex + " to "
387 | + endIndex + ")");
388 | */
389 | for (int i = beginIndex; i < endIndex; i++)
390 | nodes.add((Element) nodeList.item(i));
391 | return nodes;
392 | }
393 |
394 | protected ArrayList contextNodes(Element context) {
395 | return contextNodes(context, 0, 0);
396 | }
397 |
398 | protected ArrayList contextNodes(int beginIndex, int count) {
399 | return contextNodes(context, beginIndex, count);
400 | }
401 |
402 | protected ArrayList contextNodes() {
403 | return contextNodes(context, 0, 0);
404 | }
405 |
406 | protected int contextNodesCount(Element context) {
407 | return context.getChildNodes().getLength();
408 | }
409 |
410 | protected int contextNodesCount() {
411 | return contextNodesCount(context);
412 | }
413 |
414 | protected Element contextNode(Element context, int index) {
415 | return contextNodes(context, index, 1).get(0);
416 | }
417 |
418 | protected Element contextNode(int index) {
419 | return contextNode(context, index);
420 | }
421 |
422 | protected String contextString(Element context) {
423 | if (context == null)
424 | return "context is null";
425 | ArrayList names = new ArrayList();
426 | for (Element node : contextNodes(context))
427 | names.add(node.getTagName());
428 | return "context: " + context.getTagName() + "\n" + " attributes: " + contextAttributes(context) + "\n"
429 | + " sub-contexts: " + names;
430 | }
431 |
432 | protected void contextPrint(Element context) {
433 | // System.err.println(contextString(context));
434 | if (context == null) {
435 | System.err.println("context is null");
436 | return;
437 | }
438 | System.err.println("context: " + context.getTagName());
439 | System.err.println(" attributes: " + contextAttributes(context));
440 | ArrayList names = new ArrayList();
441 | for (Element node : contextNodes(context))
442 | names.add(node.getTagName());
443 | System.err.println(" sub-contexts: " + names);
444 | }
445 |
446 | protected Attr getAttribute(String name, Element context) {
447 | return (Attr) context.getAttributes().getNamedItem(name);
448 | }
449 |
450 | protected Attr getAttribute(String name) {
451 | return getAttribute(name, context);
452 | }
453 |
454 | /**
455 | * Set attribute for a given context.
456 | *
457 | * @param name
458 | * @param value
459 | * @param contextName
460 | */
461 | protected void setAttribute(String name, Object value, Element context) {
462 | String valueString = null;
463 | if (value == null)
464 | valueString = "";
465 | else if (value instanceof Token) {
466 | Token token = (Token) value;
467 | valueString = token.getText();
468 | if (verbosity >= 100) {
469 | CodeBounds bounds = new CodeBounds(context);
470 | bounds.extend(token);
471 | bounds.persist(context);
472 | }
473 | } else
474 | valueString = value.toString();
475 | context.setAttribute(name, valueString);
476 | }
477 |
478 | protected void setAttribute(String name, Object value, String... names) {
479 | setAttribute(name, value, contextFind(names));
480 | }
481 |
482 | protected void setAttribute(String name, Object value) {
483 | setAttribute(name, value, context);
484 | }
485 |
486 | /**
487 | * Return null if (line, col) not in this context, and when it cannot be determined if it is in it or not.
488 | *
489 | * Otherwise, return an innermost context which contains a given location.
490 | */
491 | public Element findContext(Element context, int line, int col) {
492 | for (Element node : contextNodes(context)) {
493 | Element containingNode = findContext(node, line, col);
494 | if (containingNode == null)
495 | continue;
496 | return containingNode;
497 | }
498 | CodeBounds bounds = new CodeBounds(context);
499 | if (bounds.begin == null || bounds.end == null)
500 | return null;
501 | if (line < bounds.begin.line || line > bounds.end.line)
502 | return null;
503 | if (line > bounds.begin.line && line < bounds.end.line)
504 | return context;
505 | if (line == bounds.begin.line)
506 | return col >= bounds.begin.col ? context : null;
507 | if (line == bounds.end.line)
508 | return col <= bounds.end.col ? context : null;
509 | throw new RuntimeException();
510 | }
511 |
512 | public int findPosition(Element context, int line, int col) {
513 | int index = -1;
514 | for (Element node : contextNodes(context)) {
515 | CodeBounds bounds = new CodeBounds(node);
516 | ++index;
517 | if (bounds.begin == null || bounds.end == null)
518 | continue;
519 | if (line < bounds.begin.line)
520 | return index;
521 | if (line > bounds.end.line)
522 | continue;
523 | if (line == bounds.begin.line)
524 | if (col < bounds.begin.col)
525 | return index;
526 | if (col > bounds.end.col)
527 | continue;
528 | throw new RuntimeException("looking for (" + line + "," + col + ")" + " within bounds " + bounds + "\n"
529 | + "of " + contextString(node) + "\n" + "subnode of " + contextString(context));
530 | }
531 | return contextNodesCount(context);
532 | }
533 |
534 | /**
535 | * Propagate code bounds within a given context.
536 | *
537 | * Propagating code bounds means that code bounds of each node within given context are extended using bounds of all
538 | * its subnodes. This is done recursively, depth-first.
539 | *
540 | * An exception to this rule are nodes, the bounds of which are not propagated outside of them.
541 | */
542 | protected void propagateBounds(Element context) {
543 | ArrayList nodes = contextNodes(context);
544 | for (Element node : nodes) {
545 | propagateBounds(node);
546 | if (context == root)
547 | continue;
548 | if (node.getNodeName().equals("file"))
549 | continue; // propagating bounds beyond node makes them inconsistent
550 | CodeBounds bounds = new CodeBounds(node);
551 | if (bounds.begin == null)
552 | continue;
553 | CodeBounds rootBounds = new CodeBounds(context);
554 | rootBounds.extend(bounds.begin);
555 | rootBounds.extend(bounds.end);
556 | rootBounds.persist(context);
557 | }
558 | }
559 |
560 | /**
561 | * Move given element from its current context to a given target context.
562 | *
563 | * The element is inserted at target index - so that later contextNode(targetContext, targetIndex) will return the
564 | * element.
565 | *
566 | * The index, as in contextNode() method, can be negative.
567 | *
568 | * Unlike contextNode() method the index can be also null, which appends the element at the end of target context.
569 | *
570 | * @param targetContext
571 | * @param targetIndex
572 | * @param element
573 | */
574 | protected void moveTo(Element targetContext, Integer targetIndex, Element element) {
575 | if (targetContext == element)
576 | cleanUpAfterError("Cannot move " + element + " to itself.");
577 | try {
578 | element.getParentNode().removeChild(element);
579 | boolean insert = false;
580 | if (targetIndex != null)
581 | if (targetIndex < 0) {
582 | targetIndex += 1;
583 | if (targetIndex < 0)
584 | insert = true;
585 | } else
586 | insert = true;
587 | if (insert)
588 | targetContext.insertBefore(element, contextNode(targetContext, targetIndex));
589 | else
590 | targetContext.appendChild(element);
591 | } catch (org.w3c.dom.DOMException error) {
592 | System.err.println("Cannot move " + element + " to " + targetContext + ".");
593 | contextPrint(element);
594 | System.err.println(contextNameHierarchy(element));
595 | contextPrint(targetContext);
596 | System.err.println(contextNameHierarchy(targetContext));
597 | cleanUpAfterError(error);
598 | }
599 | }
600 |
601 | protected void moveTo(Element targetContext, Element element) {
602 | moveTo(targetContext, null, element);
603 | }
604 |
605 | protected void moveHere(Integer targetIndex, Element element) {
606 | moveTo(context, targetIndex, element);
607 | }
608 |
609 | protected void moveHere(Element element) {
610 | moveTo(context, null, element);
611 | }
612 |
613 | protected void moveTo(Element targetContext, Integer targetIndex, ArrayList elements) {
614 | for (Element element : elements)
615 | moveTo(targetContext, targetIndex, element);
616 | }
617 |
618 | protected void moveTo(Element targetContext, ArrayList elements) {
619 | moveTo(targetContext, null, elements);
620 | }
621 |
622 | protected void moveTo(Integer targetIndex, ArrayList elements) {
623 | moveTo(context, targetIndex, elements);
624 | }
625 |
626 | protected void moveHere(ArrayList elements) {
627 | moveTo(context, null, elements);
628 | }
629 |
630 | protected void printRuleHeader(int rule, String name, String addendum) {
631 | if (addendum == "list-begin") {
632 | if (!listContexts.containsKey(name))
633 | LOG.info("list context not recognized: " + name);
634 | else
635 | contextOpen(listContexts.get(name));
636 | }
637 | contextOpen(name);
638 | setAttribute("rule", rule);
639 | if (addendum.length() > 0)
640 | setAttribute("addendum", addendum);
641 | }
642 |
643 | protected void printRuleTrailer() {
644 | Element innerContext = context;
645 | Attr addendum = getAttribute("addendum");
646 | contextClose();
647 | if (addendum != null) {
648 | if (addendum.getValue() == "list") {
649 | String name = innerContext.getTagName();
650 | if (listContexts.containsKey(name)) {
651 | contextCloseAllInner(listContexts.get(name));
652 | setAttribute("count", getAttribute("count", innerContext).getValue());
653 | moveHere(innerContext);
654 | contextClose();
655 | }
656 | }
657 | if (addendum.getValue() == "list-begin" || addendum.getValue() == "list") {
658 | // LOG.log(Level.FINE, "removing {0} from {1}", new Object[]{innerContext, context});
659 | if (verbosity < 100)
660 | innerContext.getParentNode().removeChild(innerContext);
661 | }
662 | }
663 | }
664 |
665 | protected void printParameter(Object param, String name) {
666 | setAttribute(name, param);
667 | }
668 |
669 | protected void printParameter(Token param, String name) {
670 | setAttribute(name, param);
671 | }
672 |
673 | protected void printTokens(Token... tokens) {
674 | for (Token token : tokens) {
675 | if (token == null) {
676 | System.err.println("token is null");
677 | continue;
678 | }
679 | int line = token.getLine();
680 | int colBegin = token.getCharPositionInLine();
681 | String text = token.getText();
682 | int colEnd = colBegin + text.length();
683 | System.err.println(filename + "@" + line + ":" + colBegin + "~" + colEnd + ": \"" + text + "\"");
684 | }
685 | /*
686 | try {
687 | TokensList tokens = new TokensList(new File(filename), false);
688 | System.err.println("found tokens: " + tokens);
689 | } catch (IOException e) {
690 | }
691 | */
692 | }
693 |
694 | /**
695 | * Insert raw tokens from current file into given context.
696 | */
697 | protected void insertTokens(Element context, int tokenType, String tokenContextName, String tokenTextAttributeName)
698 | throws IOException {
699 | // LOG.log(Level.INFO, "all tokens: {0}", new TokensList(new File(filename)));
700 | TokensList tokens = new TokensList(new File(filename), tokenType);
701 | // LOG.log(Level.INFO, "found tokens of type={0}: {1}", new Object[]{tokenType, tokens});
702 | insertTokens(context, tokens, tokenContextName, tokenTextAttributeName);
703 | }
704 |
705 | protected void insertTokens(Element context, ArrayList tokens, String tokenContextName,
706 | String tokenTextAttributeName) {
707 | for (Token token : tokens)
708 | insertToken(context, token, tokenContextName, tokenTextAttributeName);
709 | }
710 |
711 | protected void insertToken(Element context, Token token, String tokenContextName, String tokenTextAttributeName) {
712 | TokenTarget target = findTarget(context, token);
713 |
714 | Element tokenNode = contextOpen(tokenContextName);
715 | setAttribute(tokenTextAttributeName, token.getText());
716 | CodeBounds bounds = new CodeBounds(token);
717 | bounds.persist(tokenNode); // updateBounds(token);
718 | contextClose();
719 |
720 | tokenNode.getParentNode().removeChild(tokenNode);
721 | if (target.index < contextNodesCount(target.element))
722 | target.element.insertBefore(tokenNode, contextNode(target.element, target.index));
723 | else if (target.index == contextNodesCount(target.element))
724 | target.element.appendChild(tokenNode);
725 | else
726 | throw new IllegalArgumentException("location within target is invalid");
727 |
728 | propagateBounds(target.element);
729 | }
730 |
731 | private class TokenTarget {
732 |
733 | public Element element;
734 | public int index;
735 |
736 | public TokenTarget(Element target, int targetIndex) {
737 | element = target;
738 | index = targetIndex;
739 | }
740 |
741 | }
742 |
743 | private TokenTarget findTarget(Element context, Token token) {
744 | int line = token.getLine();
745 | int col_begin = token.getCharPositionInLine();
746 | Element target = findContext(context, line, col_begin);
747 | /* debug-only
748 | int col_end = col_begin + comment.getText().length();
749 | Element targetAlt = findContext(context, line, col_end);
750 | */
751 | if (target == null /*&& targetAlt == null*/) {
752 | target = contextNode(root, 0);
753 | // System.err.println("either in the beginning or at the end...");
754 | /* debug-only
755 | } else if (target != targetAlt) {
756 | contextPrint(target);
757 | contextPrint(targetAlt);
758 | throw new IllegalArgumentException();
759 | */
760 | }
761 | int targetIndex = findPosition(target, line, col_begin);
762 | /* debug-only
763 | int targetIndexAlt = findPosition(target, line, col_end);
764 | if (targetIndex != targetIndexAlt) {
765 | System.err.println("should be at index " + targetIndex + " or " + targetIndexAlt);
766 | throw new IllegalArgumentException("two possible targets");
767 | }
768 | System.err.println("adjusting " + target.getNodeName() + "@" + targetIndex + "/" + contextNodesCount(target));
769 | */
770 | return refineTarget(token, target, targetIndex);
771 | }
772 |
773 | private TokenTarget refineTarget(Token token, Element target, int targetIndex) {
774 | if (contextNodesCount(target) == 0) {
775 | /*
776 | System.err.println("target is empty");
777 | */
778 | ArrayList hierarchy = contextHierarchy(target);
779 | hierarchy.remove(0);
780 | for (Element parent : hierarchy) {
781 | ArrayList parentNodes = contextNodes(parent);
782 | int indexInParent = parentNodes.indexOf(target);
783 | target = parent;
784 | targetIndex = indexInParent + 1;
785 | if (XMLPrinterBase.tokenLocationsWhitelist.contains(target.getNodeName()))
786 | break;
787 | }
788 | if (!XMLPrinterBase.tokenLocationsWhitelist.contains(target.getNodeName()))
789 | throw new IllegalArgumentException(
790 | "didn't find good candidate to adjust token " + token + " location in hierarchy " + hierarchy);
791 | }
792 | boolean updated = false;
793 | if (targetIndex > 0) {
794 | Element beforeTarget = contextNode(target, targetIndex - 1);
795 | if (beforeTarget.getNodeName().equals("body")) {
796 | target = beforeTarget;
797 | targetIndex = contextNodesCount(beforeTarget);
798 | updated = true;
799 | /*
800 | System.err.println("beforeTarget: " + target.getNodeName() + "@" + targetIndex + "/" + contextNodesCount(target));
801 | */
802 | }
803 | /*
804 | else
805 | System.err.println("before is " + beforeTarget.getNodeName());
806 | */
807 | }
808 | if (!updated && targetIndex < contextNodesCount(target) - 1) {
809 | Element afterTarget = contextNode(target, targetIndex);
810 | if (afterTarget.getNodeName().equals("body")) {
811 | target = afterTarget;
812 | targetIndex = 0;
813 | updated = true;
814 | /*
815 | System.err.println("afterTarget: " + target.getNodeName() + "@" + targetIndex + "/" + contextNodesCount(target));
816 | */
817 | }
818 | /*
819 | else
820 | System.err.println("after is " + afterTarget.getNodeName());
821 | */
822 | }
823 | return new TokenTarget(target, targetIndex);
824 | }
825 |
826 | public void persist() throws TransformerException {
827 | TransformerFactory transformerFactory = TransformerFactory.newInstance();
828 | Transformer transformer = transformerFactory.newTransformer();
829 | transformer.setOutputProperty(OutputKeys.INDENT, "yes");
830 | transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
831 | DOMSource source = new DOMSource(doc);
832 | StreamResult result;
833 | if (cmd.hasOption("output"))
834 | result = new StreamResult(new File(cmd.getOptionValue("output")));
835 | else
836 | result = new StreamResult(System.out);
837 | transformer.transform(source, result);
838 | }
839 |
840 | public void cleanUpAfterError(String comment, Exception error) {
841 | if (comment != null)
842 | System.err.println(comment);
843 | new RuntimeException("Aborting construction of the AST.", error).printStackTrace();
844 | cleanUp();
845 | System.exit(1);
846 | }
847 |
848 | public void cleanUpAfterError(String comment) {
849 | cleanUpAfterError(comment, null);
850 | }
851 |
852 | public void cleanUpAfterError(Exception error) {
853 | cleanUpAfterError(null, error);
854 | }
855 |
856 | public void cleanUpAfterError() {
857 | cleanUpAfterError(null, null);
858 | }
859 |
860 | public void cleanUp() {
861 | while (context != root)
862 | contextClose(context);
863 | if (verbosity >= 100) {
864 | propagateBounds(context);
865 | try {
866 | insertTokens(context, FortranLexer.LINE_COMMENT, "comment", "text");
867 | insertTokens(context, FortranLexer.PREPROCESS_LINE, "directive", "text");
868 | } catch (IOException error) {
869 | error.printStackTrace();
870 | System.exit(1);
871 | }
872 | propagateBounds(context);
873 | }
874 | try {
875 | persist();
876 | } catch (Exception error) {
877 | error.printStackTrace();
878 | System.exit(1);
879 | }
880 | }
881 |
882 | }
883 |
--------------------------------------------------------------------------------
/src/fortran/ofp/XmlPrinterArgsParser.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp;
2 |
3 | import org.apache.commons.cli.CommandLine;
4 | import org.apache.commons.cli.DefaultParser;
5 | import org.apache.commons.cli.HelpFormatter;
6 | import org.apache.commons.cli.Option;
7 | import org.apache.commons.cli.Options;
8 | import org.apache.commons.cli.ParseException;
9 |
10 | public class XmlPrinterArgsParser {
11 |
12 | private Options options;
13 |
14 | public XmlPrinterArgsParser() {
15 | options = new Options();
16 |
17 | Option output = new Option(null, "output", true, "output file path, print to System.out if not provided");
18 | output.setRequired(false);
19 | options.addOption(output);
20 |
21 | Option verbosity = new Option(null, "verbosity", true, "verbosity level, assume max if not provided");
22 | options.addOption(verbosity);
23 | }
24 |
25 | public CommandLine parse(String... args) {
26 | DefaultParser parser = new DefaultParser();
27 | CommandLine cmd = null;
28 | try {
29 | cmd = parser.parse(options, args);
30 | } catch (ParseException e) {
31 | System.err.println(e.getMessage());
32 | HelpFormatter formatter = new HelpFormatter();
33 | formatter.printHelp("fortran.ofp.FrontEnd --class fortran.ofp.XMLPrinter",
34 | "XML output generator for Open Fortran Parser", options,
35 | "Copyright 2017 Apache License 2.0 Mateusz Bysiek https://mbdevpl.github.io/", true);
36 | }
37 |
38 | return cmd;
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/src/fortran/ofp/parser/java/CodeBounds.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp.parser.java;
2 |
3 | import org.antlr.runtime.Token;
4 | import org.w3c.dom.Element;
5 |
6 | public class CodeBounds {
7 |
8 | protected static String Y_MIN = "line_begin";
9 | protected static String X_MIN = "col_begin";
10 | protected static String Y_MAX = "line_end";
11 | protected static String X_MAX = "col_end";
12 |
13 | public CodeLocation begin;
14 | public CodeLocation end;
15 |
16 | public CodeBounds() {
17 | begin = null;
18 | end = null;
19 | }
20 |
21 | public CodeBounds(int beginLine, int beginCol, int endLine, int endCol) {
22 | begin = new CodeLocation(beginLine, beginCol);
23 | end = new CodeLocation(endLine, endCol);
24 | }
25 |
26 | public CodeBounds(Element context) {
27 | Integer lineBegin = context.hasAttribute(Y_MIN) ? Integer.valueOf(context.getAttribute(Y_MIN)) : null;
28 | Integer colBegin = context.hasAttribute(X_MIN) ? Integer.valueOf(context.getAttribute(X_MIN)) : null;
29 | if (lineBegin == null && colBegin == null)
30 | begin = null;
31 | else if (lineBegin == null || colBegin == null)
32 | throw new IllegalArgumentException("the implementation of this method is all-or-nothing");
33 | else
34 | begin = new CodeLocation(lineBegin, colBegin);
35 | Integer lineEnd = context.hasAttribute(Y_MAX) ? Integer.valueOf(context.getAttribute(Y_MAX)) : null;
36 | Integer colEnd = context.hasAttribute(X_MAX) ? Integer.valueOf(context.getAttribute(X_MAX)) : null;
37 | if (lineEnd == null && colEnd == null)
38 | begin = null;
39 | else if (lineEnd == null || colEnd == null)
40 | throw new IllegalArgumentException("the implementation of this method is all-or-nothing");
41 | else
42 | end = new CodeLocation(lineEnd, colEnd);
43 | }
44 |
45 | public CodeBounds(Token token) {
46 | Integer line = token.getLine();
47 | Integer colBegin = token.getCharPositionInLine();
48 | Integer colEnd = colBegin + token.getText().length();
49 | begin = new CodeLocation(line, colBegin);
50 | end = new CodeLocation(line, colEnd);
51 | }
52 |
53 | public void extend(CodeLocation loc) {
54 | if (loc == null)
55 | throw new IllegalArgumentException("the implementation of this method is all-or-nothing");
56 |
57 | if (begin == null && end == null) {
58 | begin = new CodeLocation(loc.line, loc.col);
59 | end = new CodeLocation(loc.line, loc.col);
60 | return;
61 | } else if (begin == null || end == null)
62 | throw new IllegalArgumentException("the implementation of this method is all-or-nothing");
63 |
64 | boolean updateLineBegin = loc.line < begin.line;
65 | boolean updateColBegin = updateLineBegin || loc.line == begin.line && loc.col < begin.col;
66 | boolean updateLineEnd = loc.line > end.line;
67 | boolean updateColEnd = updateLineEnd || loc.line == end.line && loc.col > end.col;
68 |
69 | if (updateLineBegin)
70 | begin.line = loc.line;
71 | if (updateColBegin)
72 | begin.col = loc.col;
73 | if (updateLineEnd)
74 | end.line = loc.line;
75 | if (updateColEnd)
76 | end.col = loc.col;
77 | }
78 |
79 | public void extend(Token token) {
80 | CodeBounds bounds = new CodeBounds(token);
81 | extend(bounds.begin);
82 | extend(bounds.end);
83 | }
84 |
85 | public void persist(Element context) {
86 | context.setAttribute(Y_MIN, Integer.toString(begin.line));
87 | context.setAttribute(X_MIN, Integer.toString(begin.col));
88 | context.setAttribute(Y_MAX, Integer.toString(end.line));
89 | context.setAttribute(X_MAX, Integer.toString(end.col));
90 | }
91 |
92 | public String toString() {
93 | return begin + "~" + end;
94 | }
95 |
96 | }
97 |
--------------------------------------------------------------------------------
/src/fortran/ofp/parser/java/CodeLocation.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp.parser.java;
2 |
3 | public class CodeLocation {
4 |
5 | public int line;
6 | public int col;
7 |
8 | public CodeLocation(int line, int col) {
9 | this.line = line;
10 | this.col = col;
11 | }
12 |
13 | public String toString() {
14 | return "@" + line + ":" + col;
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/src/fortran/ofp/parser/java/TokensList.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp.parser.java;
2 |
3 | import java.io.File;
4 | import java.io.IOException;
5 | import java.util.ArrayList;
6 | import java.util.logging.Level;
7 | import java.util.logging.Logger;
8 |
9 | import org.antlr.runtime.Token;
10 |
11 | public class TokensList extends ArrayList {
12 |
13 | private static final Logger LOG = Logger.getLogger(TokensList.class.getName());
14 |
15 | private static final long serialVersionUID = -8037754729217056476L;
16 |
17 | public TokensList(File file) throws IOException {
18 | addAll(file, null);
19 | }
20 |
21 | public TokensList(File file, int onlyOfType) throws IOException {
22 | addAll(file, onlyOfType);
23 | }
24 |
25 | public void addAll(File file) throws IOException {
26 | addAll(file, null);
27 | }
28 |
29 | public void addAll(File file, Integer onlyOfType) throws IOException {
30 | FortranStream stream = new FortranStream(file.getName(), file.getAbsolutePath(), null);
31 | FortranLexer lexer = new FortranLexer(stream);
32 | lexer.setIncludeDirs(new ArrayList());
33 |
34 | Token token = lexer.nextToken();
35 | while (token.getType() != FortranLexer.EOF) {
36 | if (onlyOfType == null || token.getType() == ((int) onlyOfType))
37 | add(token);
38 | // else LOG.log(Level.INFO, "skipping token of type {0}", token.getType());
39 | token = lexer.nextToken();
40 | }
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/test/.gitignore:
--------------------------------------------------------------------------------
1 | /results
2 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | """Initialization of tests of open_fortran_parser package."""
2 |
3 | import os
4 | import pathlib
5 |
6 | from open_fortran_parser.config import JAVA as java_config
7 |
8 |
9 | if java_config['options'] is None:
10 | java_config['options'] = []
11 | java_config['options'].append('-Djava.util.logging.config.file=logging.properties')
12 |
13 | os.makedirs(str(pathlib.Path('test', 'results', 'logs')), exist_ok=True)
14 |
15 | if os.environ.get('TEST_COVERAGE'):
16 | JACOCO_PATH = pathlib.Path('lib', 'org.jacoco.agent-0.8.3-runtime.jar').resolve()
17 | JACOCO_EXCLUDES = ('fortran.ofp.parser.java.FortranParser2008_FortranParserBase',)
18 | if JACOCO_PATH.is_file():
19 | java_config['options'].append(
20 | '-javaagent:{}=excludes={}'.format(str(JACOCO_PATH), ':'.join(JACOCO_EXCLUDES)))
21 |
--------------------------------------------------------------------------------
/test/examples/arithmetic_kind.f90:
--------------------------------------------------------------------------------
1 | subroutine test_arithmetic(var1, var2, var3)
2 |
3 | use mydatatypes, only: float64
4 |
5 | implicit none
6 |
7 | var1 = 100._float64 * var2
8 |
9 | if (var2 >= 0.5_float64 + var3) then
10 | var2 = var3 * (1._float64 - var2)
11 | end if
12 |
13 | end subroutine test_arithmetic
14 |
--------------------------------------------------------------------------------
/test/examples/comments.f:
--------------------------------------------------------------------------------
1 |
2 | C comment 1
3 | C comment 2
4 | program comments
5 | C comment 3
6 | C comment 4
7 | if (.true.) then
8 | C comment 5
9 | C comment 6
10 | continue
11 | C comment 7
12 | C comment 8
13 | end if
14 | C comment 9
15 | C comment 10
16 | end
17 | C comment 11
18 | C comment 12
19 |
--------------------------------------------------------------------------------
/test/examples/comments.f90:
--------------------------------------------------------------------------------
1 |
2 | ! comment 1
3 | ! comment 2
4 | program comments
5 | ! comment 3
6 | ! comment 4
7 | if (.true.) then
8 | ! comment 5
9 | ! comment 6
10 | continue
11 | ! comment 7
12 | ! comment 8
13 | end if ! comment 9
14 | ! comment 10
15 | end
16 | ! comment 11
17 | ! comment 12
18 |
--------------------------------------------------------------------------------
/test/examples/deepvar.f90:
--------------------------------------------------------------------------------
1 | SUBROUTINE TEST_ASSOC(VAR1, VAR2)
2 |
3 | IMPLICIT NONE
4 |
5 | TYPE(SOME_TYPE) , INTENT (INOUT) :: VAR1
6 | TYPE(SOME_OTHER_TYPE), INTENT (INOUT) :: VAR2
7 |
8 | ASSOCIATE(V1=>VAR1%FIELD1, V2=>VAR2%FIELD2(42)%FIELD3)
9 |
10 | IF (VAR2%FIELD4%BOOL1 == 1) THEN
11 | V1(:) = V2(:)
12 | END IF
13 |
14 | END ASSOCIATE
15 |
16 | END SUBROUTINE TEST_ASSOC
17 |
--------------------------------------------------------------------------------
/test/examples/empty.f:
--------------------------------------------------------------------------------
1 |
2 | ! minimalistic Fortran program
3 |
4 | program empty
5 |
6 | implicit none
7 |
8 | end program empty
9 |
--------------------------------------------------------------------------------
/test/examples/problematic_write.f90:
--------------------------------------------------------------------------------
1 |
2 | PROGRAM problematic_if
3 |
4 | ! IF (printlev_loc >=2) WRITE(numout,*),'tau_outflow, coeff_rel = ', tau_outflow, coeff_rel
5 | IF (printlev_loc >=2) WRITE(numout,*) 'tau_outflow, coeff_rel = ', tau_outflow, coeff_rel
6 |
7 | END
8 |
--------------------------------------------------------------------------------
/test/examples/simple_if.f90:
--------------------------------------------------------------------------------
1 |
2 | PROGRAM simple_if
3 |
4 | IF (map_pft_format .AND. .NOT. impveg) THEN
5 | CONTINUE
6 | ENDIF
7 |
8 | END
9 |
--------------------------------------------------------------------------------
/test/examples/strings.f90:
--------------------------------------------------------------------------------
1 | program strings
2 |
3 | implicit none
4 |
5 | character(*) :: mystring01a
6 | character(16) :: mystring01b
7 | character(len=*) :: mystring02a
8 | character(len=16) :: mystring02b
9 | character(len=*, kind=c_char) :: mystring03a
10 | character(len=16, kind=c_char) :: mystring03b
11 | character :: mystring04a(*)
12 | character :: mystring04b(16)
13 | character(kind=c_char) :: mystring05a(*)
14 | character(kind=c_char) :: mystring05b(16)
15 | character(kind=c_char, len=*) :: mystring06a
16 | character(kind=c_char, len=16) :: mystring06b
17 |
18 | end program strings
19 |
--------------------------------------------------------------------------------
/test/examples_large/ORCHIDEE_grid.f90:
--------------------------------------------------------------------------------
1 |
2 | !! This module define variables for the grid to gathered points.
3 | !!
4 | !! @call sechiba_main
5 | !! @Version : $Revision: 4357 $, $Date: 2017-05-19 10:02:22 +0200 (ven. 19 mai 2017) $
6 | !!
7 | !< $HeadURL: svn://forge.ipsl.jussieu.fr/orchidee/branches/ORCHIDEE-MICT/ORCHIDEE/src_global/grid.f90 $
8 | !< $Date: 2017-05-19 10:02:22 +0200 (ven. 19 mai 2017) $
9 | !< $Revision: 4357 $
10 | !!
11 | !! @author Marie-Alice Foujols, Jan Polcher and Martial Mancip
12 | !!
13 | !! This module archives and makes available for all ORCHIDEE routine the information on the grid
14 | !! being used. 3 types of grids are foreseen :
15 | !! - Regular longitude latitude grid : This is the default and mostly used for global applications.
16 | !! - Regular X/Y grid : this is a typical grid for regional models and requires a projection method
17 | !! to go from X/y to lon/lat.
18 | !! - unstructures grid : This is a general grid where each cell is a polygone. It prepares ORCHIDEE
19 | !! for DYNAMICO.
20 | !!
21 | !! The subroutines have the following role :
22 | !! grid_init : this routine will provide the dimensions needed to allocate the memory and the
23 | !! characteristics of the grid.
24 | !!
25 | !! grid_stuff : This subroutine provides the grid details for all land points. Obviously depending
26 | !! on the grid type different level of information need to be provided.
27 | !!
28 | !f90doc MODULEgrid
29 | MODULE grid
30 |
31 | USE grid_var
32 | USE defprec
33 | USE constantes
34 | USE mod_orchidee_para
35 |
36 | USE haversine
37 | USE module_llxy
38 |
39 | USE ioipsl
40 | USE netcdf
41 |
42 | IMPLICIT NONE
43 | !
44 | !=================================================================================
45 | !
46 | ! Horizontal grid information
47 | !
48 | !=================================================================================
49 |
50 | ! Global map or not.
51 | ! There is little chance that if iim <=2 and jjm <= 2 that we have global grid.
52 | ! Furthermore using the second line allows to avoid pole problems for global grids
53 | LOGICAL, SAVE :: global = .TRUE.
54 | !$OMP THREADPRIVATE(global)
55 |
56 | ! PARAMETERS
57 | ! default resolution (m)
58 | REAL(r_std), PARAMETER :: default_resolution = 250000.
59 | !
60 | ! VARIABLES
61 | !
62 | !-
63 | !- Variable to help describe the grid
64 | !- once the points are gathered.
65 | !-
66 | !! Limits of the domain
67 | REAL(r_std), SAVE :: limit_west, limit_east, &
68 | & limit_north, limit_south
69 | !$OMP THREADPRIVATE(limit_west, limit_east, limit_north, limit_south)
70 | !-
71 | !! Geographical coordinates
72 | REAL(r_std), ALLOCATABLE, DIMENSION (:,:), SAVE :: lalo
73 | !$OMP THREADPRIVATE(lalo)
74 | !! index of land points
75 | INTEGER, ALLOCATABLE, DIMENSION (:), SAVE :: ilandindex,jlandindex
76 | !$OMP THREADPRIVATE(ilandindex, jlandindex)
77 | !-
78 | !! Fraction of continents.
79 | REAL(r_std), ALLOCATABLE, DIMENSION (:), SAVE :: contfrac
80 | !$OMP THREADPRIVATE(contfrac)
81 | !
82 | ! indices of the NbNeighb neighbours of each grid point
83 | ! (1=Northern most vertex and then in clockwise order)
84 | ! Zero or negative index means that this neighbour is not a land point
85 | INTEGER(i_std), ALLOCATABLE, DIMENSION (:,:), SAVE :: neighbours
86 | !$OMP THREADPRIVATE(neighbours)
87 | !
88 | ! Heading of the direction out of the grid box either through the vertex
89 | ! of the mid-segment of the polygon.
90 | !
91 | REAL(r_std), ALLOCATABLE, DIMENSION(:,:), SAVE :: headings
92 | !$OMP THREADPRIVATE(headings)
93 | !
94 | ! Length of segments of the polygon.
95 | !
96 | REAL(r_std), ALLOCATABLE, DIMENSION(:,:), SAVE :: seglength
97 | !$OMP THREADPRIVATE(seglength)
98 | !
99 | ! Area of the grid box
100 | !
101 | REAL(r_std), ALLOCATABLE, DIMENSION(:), SAVE :: area
102 | !$OMP THREADPRIVATE(area)
103 | !
104 | ! Coordinats of the vertices
105 | !
106 | REAL(r_std), ALLOCATABLE, DIMENSION(:,:,:), SAVE :: corners
107 | !$OMP THREADPRIVATE(corners)
108 | !
109 | ! Resolution remains a temporary variable until the merge of the
110 | ! re-interfacing of the interpolation by Lluis. One this is done
111 | ! Resolution will be replaced in the model either by area or seglength.
112 | !
113 | REAL(r_std), ALLOCATABLE, DIMENSION (:,:), SAVE :: resolution
114 | !$OMP THREADPRIVATE(resolution)
115 | !
116 | !
117 | !
118 | ! Get the direction of the grid
119 | !
120 | CHARACTER(LEN=2), DIMENSION(2), SAVE, PRIVATE :: grid_dir
121 | !$OMP THREADPRIVATE(grid_dir)
122 | !
123 | INTEGER(i_std), PARAMETER :: MAX_DOMAINS=1
124 | !
125 | type (proj_info), SAVE, dimension(1:MAX_DOMAINS) :: proj_stack
126 | !
127 | real(r_std), SAVE, ALLOCATABLE, DIMENSION(:,:) :: dxwrf, dywrf
128 | !
129 | !
130 | !=================================================================================
131 | !
132 | ! Calendar information
133 | !
134 | !=================================================================================
135 | !
136 | ! The calendar
137 | CHARACTER(LEN=20), SAVE :: calendar_str
138 | !$OMP THREADPRIVATE(calendar_str)
139 | !
140 | ! The date
141 | REAL(r_std), SAVE :: in_julian
142 | !$OMP THREADPRIVATE(in_julian)
143 | ! Diff with day 0
144 | REAL(r_std), SAVE :: julian_diff
145 | !$OMP THREADPRIVATE(julian_diff)
146 | !
147 | INTEGER(i_std), SAVE :: year, month, day
148 | !$OMP THREADPRIVATE(year, month, day)
149 | REAL(r_std), SAVE :: sec
150 | !$OMP THREADPRIVATE(sec)
151 | !
152 | ! month_len (d)
153 | INTEGER(i_std), SAVE :: month_len
154 | !$OMP THREADPRIVATE(month_len)
155 | !
156 | ! year length (d)
157 | INTEGER(i_std), SAVE :: year_length=0
158 | !$OMP THREADPRIVATE(year_length)
159 | !
160 | ! Ration between calendar year in days (ie 360d or 365d ...) to gregorian year length
161 | REAL(r_std), SAVE :: year_spread
162 | !$OMP THREADPRIVATE(year_spread)
163 | !
164 | !
165 | INTERFACE grid_tolola
166 | MODULE PROCEDURE grid_tolola_scal, grid_tolola_1d, grid_tolola_2d
167 | END INTERFACE grid_tolola
168 |
169 | INTERFACE grid_toij
170 | MODULE PROCEDURE grid_toij_scal, grid_toij_1d, grid_toij_2d
171 | END INTERFACE grid_toij
172 | !
173 | CONTAINS
174 | !
175 | !f90doc CONTAINS
176 | !
177 | !
178 | !! =============================================================================================================================
179 | !! SUBROUTINE: grid_init
180 | !!
181 | !>\BRIEF Initialization of grid description distributed by this module to the rest of the model.
182 | !!
183 | !! DESCRIPTION: Routine which provides the dimension of the grid (number of land points) as well as the
184 | !! grid characteristics (type and name) so that the memory can be allocated.
185 | !!
186 | !! This subroutine is called by intersurf_main_2d or any driver of the model.
187 | !!
188 | !! \n
189 | !_ ==============================================================================================================================
190 | !!
191 | SUBROUTINE grid_init ( npts, nbseg, gtype, gname, isglobal )
192 | !
193 | ! 0 interface
194 | !
195 | IMPLICIT NONE
196 | !
197 | ! 0.1 input !
198 | !
199 | ! Domain size
200 | INTEGER(i_std), INTENT(in) :: npts !! Number of local continental points
201 | INTEGER(i_std), INTENT(in) :: nbseg !! number of segments of the polygone of the mesh
202 | CHARACTER(LEN=*), INTENT(in) :: gtype !! Type of grid
203 | CHARACTER(LEN=*), INTENT(in) :: gname !! Name of the grid
204 | LOGICAL, OPTIONAL :: isglobal
205 | !
206 | !
207 | !
208 | CHARACTER(LEN=20) :: gtype_lower
209 | !
210 | ! Verify the information passed and save it in the global variables of the model.
211 | !
212 | gtype_lower = gtype
213 | CALL strlowercase(gtype_lower)
214 |
215 | IF ( INDEX(gtype_lower, "reglonlat") > 0) THEN
216 | IF ( nbseg /= 4 ) THEN
217 | CALL ipslerr(3, "grid_init", "This regular Lon/lat grid should have 4 segments", &
218 | & "per horizontal grid box","")
219 | ELSE
220 | NbSegments=4
221 | ENDIF
222 | GridType="RegLonLat"
223 | GridName=gridname
224 | IF ( PRESENT(isglobal) ) THEN
225 | global = isglobal
226 | ELSE
227 | global = .TRUE.
228 | ENDIF
229 | ELSE IF ( INDEX(gtype_lower, "regxy") > 0) THEN
230 | IF ( nbseg /= 4 ) THEN
231 | CALL ipslerr(3, "grid_init", "This regular X/Y grid should have 4 segments", &
232 | & "per horizontal grid box","")
233 | ELSE
234 | NbSegments=4
235 | ENDIF
236 | GridType="RegXY"
237 | GridName=gridname
238 | IF ( PRESENT(isglobal) ) THEN
239 | global = isglobal
240 | ELSE
241 | global = .FALSE.
242 | ENDIF
243 | ELSE IF ( INDEX(gtype_lower, "unstruct") > 0) THEN
244 | NbSegments=nbseg
245 | GridType="UnStruct"
246 | GridName=gridname
247 | IF ( PRESENT(isglobal) ) THEN
248 | global = isglobal
249 | ELSE
250 | global = .TRUE.
251 | ENDIF
252 | ELSE
253 | CALL ipslerr(3, "grid_init", "unrecognized grid type.",&
254 | & "It has to be either reglatlon, regxy or unstruct","")
255 | ENDIF
256 | !
257 | ! Create the internal coordinate table
258 | !
259 | IF ( (.NOT.ALLOCATED(lalo))) THEN
260 | ALLOCATE(lalo(npts,2))
261 | lalo(:,:) = val_exp
262 | ENDIF
263 | !-
264 | !- Store variable to help describe the grid
265 | !- once the points are gathered.
266 | !-
267 | NbNeighb=2*NbSegments
268 | IF ( (.NOT.ALLOCATED(neighbours))) THEN
269 | ALLOCATE(neighbours(npts,NbNeighb))
270 | neighbours(:,:) = -999999
271 | ENDIF
272 | IF ( (.NOT.ALLOCATED(headings))) THEN
273 | ALLOCATE(headings(npts,NbNeighb))
274 | headings(:,:) = val_exp
275 | ENDIF
276 | IF ( (.NOT.ALLOCATED(seglength))) THEN
277 | ALLOCATE(seglength(npts,NbSegments))
278 | seglength(:,:) = val_exp
279 | ENDIF
280 | IF ( (.NOT.ALLOCATED(corners))) THEN
281 | ALLOCATE(corners(npts,NbSegments,2))
282 | corners(:,:,:) = val_exp
283 | ENDIF
284 | IF ( (.NOT.ALLOCATED(area))) THEN
285 | ALLOCATE(area(npts))
286 | area(:) = val_exp
287 | ENDIF
288 | !
289 | ! TEMPORARY
290 | !
291 | IF ( (.NOT.ALLOCATED(resolution))) THEN
292 | ALLOCATE(resolution(npts,2))
293 | resolution(:,:) = val_exp
294 | ENDIF
295 | !
296 | !- Store the fraction of the continents only once so that the user
297 | !- does not change them afterwards.
298 | !
299 | IF ( (.NOT.ALLOCATED(contfrac))) THEN
300 | ALLOCATE(contfrac(npts))
301 | contfrac(:) = val_exp
302 | ENDIF
303 | !
304 | ! Allocation of index coordinates ...
305 | ! JP : these are global fields and should perhaps be allocated somewhere else.
306 | IF (.NOT. ALLOCATED(ilandindex)) THEN
307 | ALLOCATE(ilandindex(nbp_glo),jlandindex(nbp_glo))
308 | ilandindex(:) = -10000000
309 | jlandindex(:) = -10000000
310 | ENDIF
311 | !
312 | END SUBROUTINE grid_init
313 | !!
314 | !!
315 | !! =============================================================================================================================
316 | !! FUNCTION grid_set
317 | !!
318 | !>\BRIEF subroutine to set global grid parameters present on all procs
319 | !!
320 | !! DESCRIPTION:
321 | !!
322 | !!
323 | !!
324 | !!
325 | !! \n
326 | !_ ==============================================================================================================================
327 | !!
328 | SUBROUTINE grid_set_glo(arg_nbp_lon,arg_nbp_lat,arg_nbp_glo)
329 | IMPLICIT NONE
330 |
331 | INTEGER(i_std), INTENT(IN) :: arg_nbp_lon
332 | INTEGER(i_std), INTENT(IN) :: arg_nbp_lat
333 | INTEGER(i_std), INTENT(IN),OPTIONAL :: arg_nbp_glo
334 | iim_g=arg_nbp_lon
335 | jjm_g=arg_nbp_lat
336 | IF (PRESENT(arg_nbp_glo)) nbp_glo=arg_nbp_glo
337 | END SUBROUTINE grid_set_glo
338 | !! =============================================================================================================================
339 | !! FUNCTION grid_set/allocate_glo
340 | !!
341 | !>\BRIEF subroutines to allocate variables present on all procs
342 | !!
343 | !! DESCRIPTION:
344 | !!
345 | !!
346 | !!
347 | !!
348 | !! \n
349 | !_ ==============================================================================================================================
350 | !!
351 | SUBROUTINE grid_allocate_glo(nbseg)
352 | !
353 | IMPLICIT NONE
354 | ! 0.1 input !
355 | !
356 | ! Domain size
357 | INTEGER(i_std), INTENT(in) :: nbseg !! number of segments of the polygone of the mesh
358 | !
359 | ! In case the allocation of the grid is called before the initialisation,
360 | ! we already set the number of segments.
361 | ! This will be done properly in grid_init.
362 | !
363 | IF ( NbSegments < 3 ) THEN
364 | NbSegments = nbseg
365 | NbNeighb=2*NbSegments
366 | ENDIF
367 | !
368 | !
369 | ALLOCATE(neighbours_g(nbp_glo,NbNeighb))
370 | ALLOCATE(headings_g(nbp_glo,NbNeighb))
371 | ALLOCATE(seglength_g(nbp_glo,NbSegments))
372 | ALLOCATE(corners_g(nbp_glo,NbSegments,2))
373 | ALLOCATE(area_g(nbp_glo))
374 | !
375 | ! TEMPORARY
376 | !
377 | ALLOCATE(resolution_g(nbp_glo,2))
378 | !
379 | ! Allocate other variables
380 | !
381 | ALLOCATE(lalo_g(nbp_glo,2), contfrac_g(nbp_glo),index_g(nbp_glo))
382 | ALLOCATE(lon_g(iim_g, jjm_g), lat_g(iim_g, jjm_g), zlev_g(iim_g, jjm_g))
383 | !
384 | END SUBROUTINE grid_allocate_glo
385 | !!
386 | !! =============================================================================================================================
387 | !! SUBROUTINE: grid_stuff
388 | !!
389 | !>\BRIEF transfers the global horizontal grid information to ORCHIDEE in the case of grid regular in Longitude
390 | !! and Latitude.
391 | !!
392 | !! DESCRIPTION:
393 | !!
394 | !!
395 | !! This subroutine is called by intersurf_main_2d or any driver of the model.
396 | !!
397 | !! \n
398 | !_ ==============================================================================================================================
399 | !!
400 | SUBROUTINE grid_stuff (npts_glo, iim, jjm, grid_lon, grid_lat, kindex, contfrac_tmp)
401 | !
402 | ! 0 interface
403 | !
404 | IMPLICIT NONE
405 | !
406 | ! 0.1 input !
407 |
408 | ! Domain size
409 | INTEGER(i_std), INTENT(in) :: npts_glo
410 | ! Size of cartesian grid
411 | INTEGER(i_std), INTENT(in) :: iim, jjm
412 | ! Longitudes on cartesian grid
413 | REAL(r_std), DIMENSION(iim,jjm), INTENT(in) :: grid_lon
414 | ! Latitudes on cartesian grid
415 | REAL(r_std), DIMENSION(iim,jjm), INTENT(in) :: grid_lat
416 | ! Index of land point on 2D map (in local position)
417 | INTEGER(i_std), DIMENSION(npts_glo), INTENT(in) :: kindex
418 | ! The fraction of continent in the grid box [0-1]
419 | REAL(r_std), DIMENSION(npts_glo), OPTIONAL, INTENT(in) :: contfrac_tmp
420 | !
421 | !
422 | ! =========================================================================
423 |
424 | IF ( printlev >= 4 ) WRITE(numout,*) 'Entering grid_stuff'
425 |
426 | ! default resolution
427 | IF ( printlev >=2 ) WRITE(numout,*) 'grid stuff: default resolution (m): ',default_resolution
428 | !
429 | !-
430 | IF (is_root_prc) THEN
431 | !
432 | CALL grid_topolylist(GridType, NbSegments, npts_glo, iim, jjm, grid_lon, grid_lat, kindex, &
433 | & global, corners_g, neighbours_g, headings_g, seglength_g, area_g, ilandindex, jlandindex)
434 | !
435 | IF (PRESENT(contfrac_tmp)) THEN
436 | !
437 | ! Transfer the contfrac into the array managed in this module.
438 | !
439 | contfrac_g(:) = contfrac_tmp(:)
440 | ENDIF
441 | !
442 | ENDIF
443 | !
444 | ! With this the description of the grid is complete and the information
445 | ! can be scattered to all processors.
446 | !
447 | CALL grid_scatter()
448 | !
449 | CALL bcast(neighbours_g)
450 | CALL bcast(resolution_g)
451 | !
452 | IF ( printlev >= 3 ) WRITE(numout,*) 'Leaving grid_stuff'
453 |
454 | END SUBROUTINE grid_stuff
455 | !!
456 | !! =============================================================================================================================
457 | !! SUBROUTINE: grid_topolylist
458 | !!
459 | !>\BRIEF This routine transforms a regular grid into a list of polygons which are defined by the following
460 | !! quantities :
461 | !!
462 | !! corners : the n vertices of the polugon in longitude and latitude
463 | !! neighbours : the neighbouring land grid box for each of the vertices and segments
464 | !! headings : the direction in which the neighbour is
465 | !! seglength : the lenght of each segment
466 | !! area : the area of the polygon
467 | !! ilindex, jlindex : provides the i,j coordinates of the mesh in the global grid.
468 | !!
469 | !! DESCRIPTION:
470 | !!
471 | !! \n
472 | !_ ==============================================================================================================================
473 | !!
474 | SUBROUTINE grid_topolylist(gtype, nbseg, nland, iim, jjm, grid_lon, grid_lat, kindex, &
475 | & globalg, corners_loc, neighbours_loc, headings_loc, seglength_loc, &
476 | & area_loc, ilindex_loc, jlindex_loc)
477 | !
478 | ! 0 interface
479 | !
480 | IMPLICIT NONE
481 | !
482 | ! 0.1 input !
483 | ! Grid type
484 | CHARACTER(LEN=20), INTENT(in) :: gtype
485 | ! Number of segments for each polygon
486 | INTEGER(i_std), INTENT(in) :: nbseg
487 | ! Number of land points on the grid
488 | INTEGER(i_std), INTENT(in) :: nland
489 | ! Size of cartesian grid
490 | INTEGER(i_std), INTENT(in) :: iim, jjm
491 | ! Longitudes on cartesian grid
492 | REAL(r_std), DIMENSION(iim,jjm), INTENT(in) :: grid_lon
493 | ! Latitudes on cartesian grid
494 | REAL(r_std), DIMENSION(iim,jjm), INTENT(in) :: grid_lat
495 | ! Index of land point on 2D map (in local position)
496 | INTEGER(i_std), DIMENSION(nland), INTENT(in) :: kindex
497 | !
498 | ! 0.2 Output
499 | !
500 | LOGICAL, INTENT(out) :: globalg
501 | !
502 | REAL(r_std), DIMENSION(nland,nbseg,2), INTENT(out) :: corners_loc
503 | INTEGER(i_std), DIMENSION(nland,nbseg*2), INTENT(out) :: neighbours_loc
504 | REAL(r_std), DIMENSION(nland,nbseg*2), INTENT(out) :: headings_loc
505 | REAL(r_std), DIMENSION(nland,nbseg), INTENT(out) :: seglength_loc
506 | REAL(r_std), DIMENSION(nland), INTENT(out) :: area_loc
507 | INTEGER(i_std), DIMENSION(nland), INTENT(out) :: ilindex_loc, jlindex_loc
508 | !
509 | ! 0.3 Local variables
510 | !
511 | INTEGER(i_std) :: i, is, iss
512 | REAL(r_std), DIMENSION(nland,2) :: center
513 | REAL(r_std) :: maxdellon, mindellon, maxlon, minlon
514 | REAL(r_std), DIMENSION(nland,nbseg*2) :: lonpoly, latpoly
515 | !
516 | IF ( INDEX(gtype,"RegLonLat") > 0 ) THEN
517 | !
518 | ! If we are in regular Lon Lat, then we test just the longitude and see if we span 0-360deg.
519 | !
520 | maxdellon=MAXVAL(ABS(grid_lon(1:iim-1,1)-grid_lon(2:iim,1)))
521 | mindellon=MINVAL(ABS(grid_lon(1:iim-1,1)-grid_lon(2:iim,1)))
522 | maxlon=MAXVAL(grid_lon(1:iim,1))
523 | minlon=MINVAL(grid_lon(1:iim,1))
524 | !
525 | ! test if it could be a global grid on 0 -> 360
526 | !
527 | IF ( minlon > 0 .AND. maxlon > 180 ) THEN
528 | IF ( (minlon - maxdellon/2.0 ) <= 0 .AND. (maxlon + maxdellon/2.0) >= 360) THEN
529 | globalg = .TRUE.
530 | ELSE
531 | globalg = .FALSE.
532 | ENDIF
533 | !
534 | ! Test if it could be a -180 to 180 grid
535 | !
536 | ELSE IF ( minlon < 0 .AND. maxlon > 0 ) THEN
537 | IF ( (minlon - maxdellon/2.0 ) <= -180 .AND. (maxlon + maxdellon/2.0) >= 180) THEN
538 | globalg = .TRUE.
539 | ELSE
540 | globalg = .FALSE.
541 | ENDIF
542 | !
543 | ! If neither condition is met then it cannot be global.
544 | !
545 | ELSE
546 | globalg = .FALSE.
547 | ENDIF
548 | ELSE IF ( gtype == "RegXY" ) THEN
549 | !
550 | ! The hypothesis is that if we are in RegXY then we are not global
551 | !
552 | globalg = .FALSE.
553 | ELSE
554 | STOP "Unknown grid"
555 | ENDIF
556 | !
557 | ! 2.0 Transform the grid into a list of polygones while keeping the neighbour relations
558 | ! between these polygones.
559 | !
560 | ! Each polygone starts with a vertex and alternates vertices and mid-points of segments.
561 | !
562 | IF (nland == 1) THEN
563 | CALL haversine_singlepointploy(iim, jjm, grid_lon, grid_lat, nland, kindex, global, &
564 | & nbseg, lonpoly, latpoly, center, &
565 | & neighbours_loc, ilindex_loc, jlindex_loc)
566 | ELSE IF ( INDEX(gtype, "RegLonLat") > 0 ) THEN
567 | CALL haversine_reglatlontoploy(iim, jjm, grid_lon, grid_lat, nland, kindex, global, &
568 | & nbseg, lonpoly, latpoly, center, &
569 | & neighbours_loc, ilindex_loc, jlindex_loc)
570 | ELSE IF ( INDEX(gtype, "RegXY") > 0 ) THEN
571 | CALL haversine_regxytoploy(iim, jjm, grid_lon, grid_lat, nland, kindex, proj_stack, &
572 | & nbseg, lonpoly, latpoly, center, &
573 | & neighbours_loc, ilindex_loc, jlindex_loc)
574 | ELSE
575 | STOP "Unknown grid"
576 | ENDIF
577 | !
578 | ! Save the longitude and latitudes nbseg corners (=vertices) of the polygones
579 | !
580 | DO i=1,nland
581 | DO is=1,nbseg
582 | iss=(is-1)*2+1
583 | corners_loc(i,is,1) = lonpoly(i,iss)
584 | corners_loc(i,is,2) = latpoly(i,iss)
585 | ENDDO
586 | ENDDO
587 | !
588 | ! Get the heading normal to the 4 segments and through the 4 corners.
589 | !
590 | CALL haversine_polyheadings(nland, nbseg, lonpoly, latpoly, center, headings_loc)
591 | !
592 | ! Order the points of the polygone in clockwise order Starting with the northern most
593 | !
594 | CALL haversine_polysort(nland, nbseg, lonpoly, latpoly, headings_loc, neighbours_loc)
595 | !
596 | ! Compute the segment length and area.
597 | ! For the RegLonLat we have specific calculations for seglength and area.
598 | ! For projected regular grids we use the great cicle assumption for the segments
599 | ! but the projected area.
600 | ! For unstructured grid we use the most general routines.
601 | !
602 | IF ( INDEX(gtype, "RegLonLat") > 0 ) THEN
603 | CALL haversine_laloseglen(nland, nbseg, lonpoly, latpoly, seglength_loc)
604 | CALL haversine_laloarea(nland, nbseg, seglength_loc, area_loc)
605 | ELSE IF ( INDEX(gtype, "RegXY") > 0 ) THEN
606 | CALL haversine_polyseglen(nland, nbseg, lonpoly, latpoly, seglength_loc)
607 | CALL haversine_xyarea(nland, nbseg, ilindex_loc, jlindex_loc, dxwrf, dywrf, area_loc)
608 | ELSE
609 | CALL haversine_polyseglen(nland, nbseg, lonpoly, latpoly, seglength_loc)
610 | CALL haversine_polyarea(nland, nbseg, lonpoly, latpoly, area_loc)
611 | ENDIF
612 | ! Compute the area
613 |
614 | !
615 | END SUBROUTINE grid_topolylist
616 | !!
617 | !!
618 | !!
619 | !! =============================================================================================================================
620 | !! SUBROUTINE: grid_scatter
621 | !!
622 | !>\BRIEF Scatter the grid information so that each processor knows the characteristics of the grid it works on.
623 | !!
624 | !! DESCRIPTION:
625 | !!
626 | !!
627 | !! The grid information has been computed for the entire grid on the root processor. Now we give each processor
628 | !! the information of the piece of the grid it works on. This concerns the following variables describing the grid :
629 | !! - area
630 | !! - resolution
631 | !! - neighbours
632 | !! - contfrac : fraction of continent
633 | !!
634 | !! Should ilandindex and jlandindex not b initialized, we catch-up here. This field is the same on all processors.
635 | !!
636 | !! TODO :
637 | !! This code should get the grid describing fields as arguments and then writem into the *_g variables on
638 | !! root_prc before scattering. This would allow to compute the grid characteristics in any subroutine
639 | !! fore calling grid_scatter.
640 | !!
641 | !!
642 | !!
643 | !! \n
644 | !_ ==============================================================================================================================
645 | !!
646 | !!
647 | SUBROUTINE grid_scatter()
648 | !
649 | !
650 | INTEGER(i_std) :: i, ip, jp
651 | !
652 | IF ( MAXVAL(ilandindex) < 0 .AND. MAXVAL(jlandindex) < 0 ) THEN
653 | DO i = 1, nbp_glo
654 | !
655 | ! 1 find numbers of the latitude and longitude of each point
656 | !
657 |
658 | ! index of latitude
659 | jp = INT( (index_g(i)-1) /iim_g ) + 1
660 |
661 | ! index of longitude
662 | ip = index_g(i) - ( jp-1 ) * iim_g
663 | !
664 | ! Save this information for usage in other modules.
665 | !
666 | ilandindex(i)=ip
667 | jlandindex(i)=jp
668 | !
669 | ENDDO
670 | ENDIF
671 | !
672 | CALL scatter(neighbours_g, neighbours)
673 | CALL scatter(contfrac_g, contfrac)
674 | CALL scatter(headings_g, headings)
675 | CALL scatter(seglength_g, seglength)
676 | CALL scatter(corners_g, corners)
677 | CALL scatter(area_g, area)
678 | !
679 | ! TEMPORARY section for resolution
680 | !
681 | IF ( is_root_prc) THEN
682 | IF ( INDEX(GridType,"Reg") > 0 ) THEN
683 | resolution_g(:,1) = (seglength_g(:,1)+seglength_g(:,3))/2.0
684 | resolution_g(:,2) = (seglength_g(:,2)+seglength_g(:,4))/2.0
685 | ELSE
686 | CALL ipslerr(3, "grid_scatter", "unsupported grid type.",&
687 | & "As long as resolution has not been replaced,",&
688 | & "ORCHIDEE cannot run on anything other than regular grids.")
689 | ENDIF
690 | ENDIF
691 | CALL scatter(resolution_g, resolution)
692 |
693 | !
694 | !
695 | IF ( printlev >=4 ) THEN
696 | WRITE(numout,*) 'grid_scatter > seglength = ', seglength(1,:)
697 | WRITE(numout,*) 'grid_scatter > neighbours = ', neighbours(1,:)
698 | WRITE(numout,*) 'grid_scatter > contfrac = ', contfrac(1)
699 | WRITE(numout,*) 'grid_scatter > area = ', area(1)
700 | ENDIF
701 | !
702 | END SUBROUTINE grid_scatter
703 | !!
704 | !!
705 | !! =============================================================================================================================
706 | !! SUBROUTINE: grid_initproj
707 | !!
708 | !>\BRIEF Routine to initialise the projection
709 | !!
710 | !! DESCRIPTION:
711 | !!
712 | !!
713 | !! This subroutine is called by the routine whichs ets-up th grid on which ORCHIDEE is to run.
714 | !! The aim is to set-upu the projection so that all the grid variables needed by ORCHIDEE can
715 | !! be computed in grid_stuff_regxy
716 | !!
717 | !! \n
718 | !_ ==============================================================================================================================
719 | !!
720 | !!
721 | SUBROUTINE grid_initproj (fid, iim, jjm)
722 | !
723 | !
724 | ! 0 interface
725 | !
726 | IMPLICIT NONE
727 | !
728 | ! 0.1 input !
729 | !
730 | ! Domain size
731 | INTEGER(i_std), INTENT(in) :: fid
732 | INTEGER(i_std), INTENT(in) :: iim, jjm
733 | !
734 | ! 0.2 Local variables
735 | !
736 | INTEGER(i_std) :: current_proj, idom, iret, lonid, latid, numLons, numLats
737 | INTEGER, DIMENSION(nf90_max_var_dims) :: dimIDs
738 | REAL(r_std) :: user_stand_lon, user_truelat1, user_truelat2, user_dxkm, user_dykm
739 | REAL(r_std) :: user_dlat, user_dlon, user_known_x, user_known_y, user_known_lat, user_known_lon
740 | REAL(r_std), DIMENSION(16) :: corner_lons, corner_lats
741 | !
742 | INTEGER(i_std) :: iv, i, j
743 | CHARACTER(LEN=20) :: varname
744 | REAL(r_std) :: dx, dy, dtx, dty, coslat
745 | REAL(r_std), ALLOCATABLE, DIMENSION (:) :: LON, LAT
746 | REAL(r_std), ALLOCATABLE, DIMENSION (:,:) :: mapfac_x, mapfac_y
747 | !
748 | !
749 | ! Only one domain is possible for the moment
750 | !
751 | idom=1
752 | CALL map_init(proj_stack(idom))
753 | !
754 | ! Does ORCHIDEE have the same Earth Radius as the map projection ?
755 | !
756 | IF ( ABS(R_Earth-EARTH_RADIUS_M) > 0.1 ) THEN
757 | WRITE(*,*) "Earth Radius in WRF : ", EARTH_RADIUS_M
758 | WRITE(*,*) "Earth Radius in ORCHIDEE : ", R_Earth
759 | CALL ipslerr (3,'grid_initproj','The Earth radius is not the same in the projection module and ORCHIDEE',&
760 | & " ", " ")
761 | ENDIF
762 | !
763 | ! Get parameters of the projection from the netCDF file
764 | !
765 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "MAP_PROJ", current_proj)
766 | !
767 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "STAND_LON", user_stand_lon)
768 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "TRUELAT1", user_truelat1)
769 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "TRUELAT2", user_truelat2)
770 | !
771 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "DX", user_dxkm)
772 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "DY", user_dykm)
773 | user_dlat = undef
774 | user_dlon = undef
775 | !
776 | IF ( current_proj == PROJ_LATLON ) THEN
777 | !
778 | iret = NF90_inq_VARID(fid, "XLONG_M",lonid)
779 | iret = NF90_INQUIRE_VARIABLE(fid, lonid, dimids = dimIDs)
780 | iret = NF90_INQUIRE_DIMENSION(fid, dimIDs(1), len = numLons)
781 | iret = NF90_INQUIRE_DIMENSION(fid, dimIDs(2), len = numLats)
782 | ALLOCATE(LON(numLons))
783 | iret = NF90_GET_VAR(fid, lonid, LON(:), start = (/ 1, 1, 1 /), count = (/ numLons, 1, 1 /))
784 |
785 | iret = NF90_inq_VARID(fid, "XLAT_M",latid)
786 | ALLOCATE(LAT(numLats))
787 | iret = NF90_GET_VAR(fid, latid, LAT(:), start = (/ 1, 1, 1 /), count = (/ 1, numLats, 1 /))
788 |
789 | user_dlon = (LON(numLons) - LON(1)) / (numLons - 1)
790 | user_dlat = (LAT(numLats) - LAT(1)) / (numLats - 1)
791 |
792 | DEALLOCATE(LON,LAT)
793 |
794 | ENDIF
795 | ! Unable to know from where to get the information
796 | user_known_x = 1
797 | user_known_y = 1
798 | !
799 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "corner_lats", corner_lats)
800 | iret = NF90_GET_ATT(fid, NF90_GLOBAL, "corner_lons", corner_lons)
801 | user_known_lat = corner_lats(1)
802 | user_known_lon = corner_lons(1)
803 | !
804 | ! Read mapfactor, land mask and orography
805 | !
806 | !
807 | ! Allocation
808 | !
809 | ALLOCATE(mapfac_x(iim,jjm))
810 | ALLOCATE(mapfac_y(iim,jjm))
811 | ALLOCATE(dxwrf(iim,jjm))
812 | ALLOCATE(dywrf(iim,jjm))
813 | !
814 | varname = "MAPFAC_MX"
815 | iret = NF90_INQ_VARID (fid, varname, iv)
816 | IF (iret /= NF90_NOERR) THEN
817 | CALL ipslerr (3,'WRFdomain_Read',"Could not find variable ", varname," ")
818 | ELSE
819 | iret = NF90_GET_VAR (fid,iv,mapfac_x)
820 | ENDIF
821 | varname = "MAPFAC_MY"
822 | iret = NF90_INQ_VARID (fid, varname, iv)
823 | IF (iret /= NF90_NOERR) THEN
824 | CALL ipslerr (3,'WRFdomain_Read',"Could not find variable ", varname," ")
825 | ELSE
826 | iret = NF90_GET_VAR (fid,iv,mapfac_y)
827 | ENDIF
828 | !
829 | ! Initilize the projection
830 | !
831 | if (current_proj == PROJ_LATLON) then
832 | call map_set(current_proj, proj_stack(idom), &
833 | lat1=user_known_lat, &
834 | lon1=user_known_lon, &
835 | knowni=user_known_x, &
836 | knownj=user_known_y, &
837 | latinc=user_dlat, &
838 | loninc=user_dlon, &
839 | r_earth=R_Earth)
840 |
841 | else if (current_proj == PROJ_MERC) then
842 | call map_set(current_proj, proj_stack(idom), &
843 | truelat1=user_truelat1, &
844 | lat1=user_known_lat, &
845 | lon1=user_known_lon, &
846 | knowni=user_known_x, &
847 | knownj=user_known_y, &
848 | dx=user_dxkm, &
849 | r_earth=R_Earth)
850 |
851 | else if (current_proj == PROJ_CYL) then
852 | call ipslerr(3,"grid_initproj",'Should not have PROJ_CYL as projection for',&
853 | 'source data in push_source_projection()', " ")
854 |
855 | else if (current_proj == PROJ_CASSINI) then
856 | call ipslerr(3,"grid_initproj",'Should not have PROJ_CASSINI as projection for', &
857 | 'source data in push_source_projection()', " ")
858 |
859 | else if (current_proj == PROJ_LC) then
860 | call map_set(current_proj, proj_stack(idom), &
861 | truelat1=user_truelat1, &
862 | truelat2=user_truelat2, &
863 | stdlon=user_stand_lon, &
864 | lat1=user_known_lat, &
865 | lon1=user_known_lon, &
866 | knowni=user_known_x, &
867 | knownj=user_known_y, &
868 | dx=user_dxkm, &
869 | r_earth=R_Earth)
870 |
871 | else if (current_proj == PROJ_ALBERS_NAD83) then
872 | call map_set(current_proj, proj_stack(idom), &
873 | truelat1=user_truelat1, &
874 | truelat2=user_truelat2, &
875 | stdlon=user_stand_lon, &
876 | lat1=user_known_lat, &
877 | lon1=user_known_lon, &
878 | knowni=user_known_x, &
879 | knownj=user_known_y, &
880 | dx=user_dxkm, &
881 | r_earth=R_Earth)
882 |
883 | else if (current_proj == PROJ_PS) then
884 | call map_set(current_proj, proj_stack(idom), &
885 | truelat1=user_truelat1, &
886 | stdlon=user_stand_lon, &
887 | lat1=user_known_lat, &
888 | lon1=user_known_lon, &
889 | knowni=user_known_x, &
890 | knownj=user_known_y, &
891 | dx=user_dxkm, &
892 | r_earth=R_Earth)
893 |
894 | else if (current_proj == PROJ_PS_WGS84) then
895 | call map_set(current_proj, proj_stack(idom), &
896 | truelat1=user_truelat1, &
897 | stdlon=user_stand_lon, &
898 | lat1=user_known_lat, &
899 | lon1=user_known_lon, &
900 | knowni=user_known_x, &
901 | knownj=user_known_y, &
902 | dx=user_dxkm, &
903 | r_earth=R_Earth)
904 |
905 | else if (current_proj == PROJ_GAUSS) then
906 | call map_set(current_proj, proj_stack(idom), &
907 | lat1=user_known_lat, &
908 | lon1=user_known_lon, &
909 | nlat=nint(user_dlat), &
910 | loninc=user_dlon, &
911 | r_earth=R_Earth)
912 |
913 | else if (current_proj == PROJ_ROTLL) then
914 | call ipslerr(3 ,"grid_initproj",'Should not have PROJ_ROTLL as projection for', &
915 | 'source data in push_source_projection() as not yet implemented', '')
916 | end if
917 | !
918 | ! Transform the mapfactors into dx and dy to be used for the description of the polygons and
919 | ! interpolations.
920 | !
921 | DO i=1,iim
922 | DO j=1,jjm
923 | !
924 | IF (proj_stack(idom)%code /= PROJ_LATLON ) THEN
925 | dx = proj_stack(idom)%dx
926 | ! Some projections in WRF do not store dy, in that case dy=dx.
927 | IF ( proj_stack(idom)%dy > 0 ) THEN
928 | dy = proj_stack(idom)%dy
929 | ELSE
930 | dy = proj_stack(idom)%dx
931 | ENDIF
932 | dxwrf(i,j) = dx/mapfac_x(i,j)
933 | dywrf(i,j) = dy/mapfac_y(i,j)
934 | ELSE
935 | !
936 | ! The LatLon projection is also a special case as here it is not the dx and dy
937 | ! which are stored in the projection file but the increments in Lon and Lat.
938 | !
939 | dtx = proj_stack(idom)%loninc
940 | dty = proj_stack(idom)%latinc
941 | coslat = COS(lat(j) * pi/180. )
942 | dxwrf(i,j) = dtx * pi/180. * R_Earth * coslat
943 | dywrf(i,j) = dty * pi/180. * R_Earth
944 | !
945 | ENDIF
946 | !
947 | ENDDO
948 | ENDDO
949 | !
950 | END SUBROUTINE grid_initproj
951 | !
952 | !
953 | !
954 | !=========================================================================================
955 | !
956 | SUBROUTINE grid_tolola_scal (ri, rj, lon, lat)
957 | !
958 | !
959 | ! Argument
960 | REAL(r_std), INTENT(in) :: ri, rj
961 | REAL(r_std), INTENT(out) :: lon, lat
962 | !
963 | !
964 | IF ( proj_stack(1)%code < undef_int ) THEN
965 | !
966 | CALL ij_to_latlon(proj_stack(1), ri, rj, lat, lon)
967 | !
968 | ELSE
969 | CALL ipslerr(3, "grid_tolola_scal", "Projection not initilized"," "," ")
970 | ENDIF
971 | !
972 | END SUBROUTINE grid_tolola_scal
973 | !
974 | !=========================================================================================
975 | !
976 | SUBROUTINE grid_tolola_1d (ri, rj, lon, lat)
977 | !
978 | !
979 | ! Argument
980 | REAL(r_std), INTENT(in), DIMENSION(:) :: ri, rj
981 | REAL(r_std), INTENT(out), DIMENSION(:) :: lon, lat
982 | !
983 | ! Local
984 | INTEGER :: i, imax
985 | !
986 | imax=SIZE(lon)
987 | !
988 | IF ( proj_stack(1)%code < undef_int ) THEN
989 | DO i=1,imax
990 | !
991 | CALL ij_to_latlon(proj_stack(1), ri(i), rj(i), lat(i), lon(i))
992 | !
993 | ENDDO
994 | ELSE
995 | CALL ipslerr(3, "grid_tolola_1d", "Projection not initilized"," "," ")
996 | ENDIF
997 | !
998 | END SUBROUTINE grid_tolola_1d
999 | !
1000 | !=========================================================================================
1001 | !
1002 | SUBROUTINE grid_tolola_2d (ri, rj, lon, lat)
1003 | !
1004 | !
1005 | ! Argument
1006 | REAL(r_std), INTENT(in), DIMENSION(:,:) :: ri, rj
1007 | REAL(r_std), INTENT(out), DIMENSION(:,:) :: lon, lat
1008 | !
1009 | ! Local
1010 | INTEGER :: i, imax, j, jmax
1011 | !
1012 | imax=SIZE(lon,DIM=1)
1013 | jmax=SIZE(lon,DIM=2)
1014 | !
1015 | IF ( proj_stack(1)%code < undef_int ) THEN
1016 | DO i=1,imax
1017 | DO j=1,jmax
1018 | !
1019 | CALL ij_to_latlon(proj_stack(1), ri(i,j), rj(i,j), lat(i,j), lon(i,j))
1020 | !
1021 | ENDDO
1022 | ENDDO
1023 | ELSE
1024 | CALL ipslerr(3, "grid_tolola_2d", "Projection not initilized"," "," ")
1025 | ENDIF
1026 | !
1027 | END SUBROUTINE grid_tolola_2d
1028 | !
1029 | !=========================================================================================
1030 | !
1031 | SUBROUTINE grid_toij_scal (lon, lat, ri, rj)
1032 | !
1033 | !
1034 | ! Argument
1035 | REAL(r_std), INTENT(in) :: lon, lat
1036 | REAL(r_std), INTENT(out) :: ri, rj
1037 | !
1038 | !
1039 | IF ( proj_stack(1)%code < undef_int ) THEN
1040 | !
1041 | CALL latlon_to_ij(proj_stack(1), lat, lon, ri, rj)
1042 | !
1043 | ELSE
1044 | CALL ipslerr(3, "grid_toij_scal", "Projection not initilized"," "," ")
1045 | ENDIF
1046 | !
1047 | END SUBROUTINE grid_toij_scal
1048 | !
1049 | !=========================================================================================
1050 | !
1051 | SUBROUTINE grid_toij_1d (lon, lat, ri, rj)
1052 | !
1053 | !
1054 | ! Argument
1055 | REAL(r_std), INTENT(in), DIMENSION(:) :: lon, lat
1056 | REAL(r_std), INTENT(out), DIMENSION(:) :: ri, rj
1057 | !
1058 | ! Local
1059 | INTEGER :: i, imax
1060 | !
1061 | imax=SIZE(lon)
1062 | !
1063 | IF ( proj_stack(1)%code < undef_int ) THEN
1064 | DO i=1,imax
1065 | !
1066 | CALL latlon_to_ij(proj_stack(1), lat(i), lon(i), ri(i), rj(i))
1067 | !
1068 | ENDDO
1069 | ELSE
1070 | CALL ipslerr(3, "grid_toij_1d", "Projection not initilized"," "," ")
1071 | ENDIF
1072 | !
1073 | END SUBROUTINE grid_toij_1d
1074 | !
1075 | !=========================================================================================
1076 | !
1077 | SUBROUTINE grid_toij_2d (lon, lat, ri, rj)
1078 | !
1079 | !
1080 | ! Argument
1081 | REAL(r_std), INTENT(in), DIMENSION(:,:) :: lon, lat
1082 | REAL(r_std), INTENT(out), DIMENSION(:,:) :: ri, rj
1083 | !
1084 | ! Local
1085 | INTEGER :: i, imax, j, jmax
1086 | !
1087 | imax=SIZE(lon,DIM=1)
1088 | jmax=SIZE(lon,DIM=2)
1089 | !
1090 | IF ( proj_stack(1)%code < undef_int ) THEN
1091 | DO i=1,imax
1092 | DO j=1,jmax
1093 | !
1094 | CALL latlon_to_ij(proj_stack(1), lat(i,j), lon(i,j), ri(i,j), rj(i,j))
1095 | !
1096 | ENDDO
1097 | ENDDO
1098 | ELSE
1099 | CALL ipslerr(3, "grid_toij_2d", "Projection not initilized"," "," ")
1100 | ENDIF
1101 | !
1102 | END SUBROUTINE grid_toij_2d
1103 | !
1104 | !
1105 | !=========================================================================================
1106 | !
1107 | !
1108 | END MODULE grid
--------------------------------------------------------------------------------
/test/fortran/ofp/XMLPrinterBaseTests.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp;
2 |
3 | import static org.junit.jupiter.api.Assertions.*;
4 |
5 | import org.junit.jupiter.api.BeforeEach;
6 | import org.junit.jupiter.api.Test;
7 |
8 | class XMLPrinterBaseTests {
9 |
10 | @BeforeEach
11 | void setUp() throws Exception {
12 | }
13 |
14 | @Test
15 | void testContextFind() {
16 | fail("Not yet implemented");
17 | }
18 |
19 | @Test
20 | void testContextString() {
21 | fail("Not yet implemented");
22 | }
23 |
24 | @Test
25 | void testContextPrint() {
26 | fail("Not yet implemented");
27 | }
28 |
29 | @Test
30 | void testUpdateBoundsFromCodeBounds() {
31 | fail("Not yet implemented");
32 | }
33 |
34 | @Test
35 | void testUpdateBoundsFromToken() {
36 | fail("Not yet implemented");
37 | }
38 |
39 | @Test
40 | void testMoveTo() {
41 | fail("Not yet implemented");
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/test/fortran/ofp/parser/java/CodeBoundsTests.java:
--------------------------------------------------------------------------------
1 | package fortran.ofp.parser.java;
2 |
3 | import static org.junit.jupiter.api.Assertions.*;
4 |
5 | import javax.xml.parsers.DocumentBuilder;
6 | import javax.xml.parsers.DocumentBuilderFactory;
7 |
8 | import org.antlr.runtime.Token;
9 | import org.junit.jupiter.api.BeforeEach;
10 | import org.junit.jupiter.api.Test;
11 | import org.w3c.dom.Document;
12 | import org.w3c.dom.Element;
13 |
14 | class CodeBoundsTests {
15 |
16 | Document doc;
17 |
18 | @BeforeEach
19 | void setUp() throws Exception {
20 | DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
21 | DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
22 | doc = docBuilder.newDocument();
23 | }
24 |
25 | @Test
26 | void testExtendPersist2() {
27 | Element context = doc.createElement("test-node"); // contextOpen("test-node");
28 | System.err.println("testing end subroutine tokens");
29 | // printTokens(label, keyword1, keyword2, name, eos);
30 | CodeBounds bounds;
31 | bounds = new CodeBounds(598, 0, 598, 3); // bounds = new CodeBounds(keyword1);
32 | bounds.persist(context);
33 | bounds = new CodeBounds(context);
34 |
35 | assertEquals((int)bounds.begin.line, 598);
36 | assertEquals((int)bounds.begin.col, 0);
37 | assertEquals((int)bounds.end.line, 598);
38 | assertEquals((int)bounds.end.col, 3);
39 |
40 | bounds.extend(new CodeLocation(598, 4)); //
41 | assertEquals((int)bounds.end.col, 4); //
42 | bounds.extend(new CodeLocation(598, 14)); // bounds.extend(name);
43 | bounds.persist(context);
44 | System.err.println(new CodeBounds(context));
45 | // bounds.extend(eos);
46 | bounds.persist(context);
47 | System.err.println(new CodeBounds(context));
48 |
49 | bounds = new CodeBounds(context);
50 | assertEquals((int)bounds.begin.line, 598);
51 | assertEquals((int)bounds.begin.col, 0);
52 | assertEquals((int)bounds.end.line, 598);
53 | assertEquals((int)bounds.end.col, 14);
54 | }
55 |
56 | @Test
57 | void testExtendPersist() {
58 | CodeBounds bounds;
59 | Element e = doc.createElement("blah");
60 | bounds = new CodeBounds(e);
61 | bounds.extend(new CodeLocation(10, 0));
62 | bounds.persist(e);
63 | bounds = new CodeBounds(e);
64 | bounds.extend(new CodeLocation(10, 20));
65 | bounds.persist(e);
66 |
67 | bounds = new CodeBounds(e);
68 | assertEquals((int)bounds.begin.line, 10);
69 | assertEquals((int)bounds.end.col, 20);
70 | }
71 |
72 | @Test
73 | void testExtend() {
74 | CodeBounds bounds = new CodeBounds();
75 | bounds.extend(new CodeLocation(10, 0));
76 | bounds.extend(new CodeLocation(10, 3));
77 | bounds.extend(new CodeLocation(10, 10));
78 | bounds.extend(new CodeLocation(10, 15));
79 | bounds.extend(new CodeLocation(10, 20));
80 |
81 | assertEquals((int)bounds.begin.line, 10);
82 | assertEquals((int)bounds.begin.col, 0);
83 | assertEquals((int)bounds.end.line, 10);
84 | assertEquals((int)bounds.end.col, 20);
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/test/test_apps.py:
--------------------------------------------------------------------------------
1 | """Testing ast_transformer module on FFB-MINI application."""
2 |
3 | import logging
4 | import os
5 | import pathlib
6 | import platform
7 | import unittest
8 |
9 | from .test_compatibility import all_fortran_paths, TestsBase
10 |
11 | _LOG = logging.getLogger(__name__)
12 |
13 | _HERE = pathlib.Path(__file__).resolve().parent
14 |
15 | _ROOT = _HERE.parent
16 |
17 | _APPS_ROOT = pathlib.Path(os.environ.get('TEST_APPS_ROOT', _ROOT.parent)).resolve()
18 |
19 | _APPS_ROOT_PATHS = {
20 | 'miranda_io': pathlib.Path('..', 'miranda_io'),
21 | 'FLASH-4.5': pathlib.Path('..', 'flash-4.5'),
22 | 'FLASH-SUBSET': pathlib.Path('..', 'flash-subset', 'FLASH4.4'),
23 | 'FFB-MINI': pathlib.Path('..', 'ffb-mini'),
24 | 'flash5': _APPS_ROOT.joinpath('flash5')}
25 |
26 | _APPS_OPTIONAL = {'FLASH-4.5', 'FLASH-SUBSET'}
27 |
28 | _APPS_ROOT_PATHS = {
29 | app: _HERE.parent.joinpath(path).resolve() for app, path in _APPS_ROOT_PATHS.items()
30 | if app not in _APPS_OPTIONAL or _HERE.parent.joinpath(path).is_dir()}
31 |
32 | _FLASH_COMMON_PATHS = [
33 | 'physics/Hydro/HydroMain/split/MHD_8Wave/hy_8wv_interpolate.F90',
34 | 'physics/Hydro/HydroMain/split/MHD_8Wave/hy_8wv_fluxes.F90',
35 | 'physics/Eos/EosMain/Gamma/eos_idealGamma.F90',
36 | 'physics/Hydro/HydroMain/split/MHD_8Wave/hy_8wv_sweep.F90']
37 |
38 | _APPS_CODE_FILEPATHS = {
39 | 'miranda_io': all_fortran_paths(_APPS_ROOT_PATHS['miranda_io']),
40 | 'FLASH-4.5': [
41 | pathlib.Path(_APPS_ROOT_PATHS['FLASH-4.5'], 'source', pathlib.Path(input_path))
42 | for input_path in [
43 | 'physics/Hydro/HydroMain/unsplit/hy_uhd_getFaceFlux.F90',
44 | 'physics/Hydro/HydroMain/unsplit/hy_uhd_DataReconstructNormalDir_MH.F90',
45 | 'physics/Hydro/HydroMain/unsplit/hy_uhd_upwindTransverseFlux.F90',
46 | 'physics/Hydro/HydroMain/unsplit/hy_uhd_TVDslope.F90',
47 | 'physics/Hydro/HydroMain/unsplit/hy_uhd_Roe.F90'
48 | ] + _FLASH_COMMON_PATHS] if 'FLASH-4.5' in _APPS_ROOT_PATHS else [],
49 | 'FLASH-SUBSET': [
50 | pathlib.Path(_APPS_ROOT_PATHS['FLASH-SUBSET'], 'source', pathlib.Path(input_path))
51 | for input_path in [
52 | 'physics/Hydro/HydroMain/simpleUnsplit/HLL/hy_hllUnsplit.F90',
53 | 'physics/Hydro/HydroMain/unsplit/hy_getFaceFlux.F90',
54 | 'physics/Hydro/HydroMain/unsplit/hy_DataReconstructNormalDir_MH.F90',
55 | 'physics/Hydro/HydroMain/unsplit/hy_upwindTransverseFlux.F90',
56 | 'physics/Hydro/HydroMain/unsplit/hy_TVDslope.F90',
57 | 'physics/Hydro/HydroMain/unsplit/hy_Roe.F90'
58 | ] + _FLASH_COMMON_PATHS] if 'FLASH-SUBSET' in _APPS_ROOT_PATHS else [],
59 | 'FFB-MINI': [path for path in all_fortran_paths(_APPS_ROOT_PATHS['FFB-MINI'].joinpath('src'))
60 | if path.name not in ('gfc.h', 'gfrd_c.h', 'gfutil_c.h', 'gfutil_f.h', 'gfwrt_c.h',
61 | 'maprof.h', 'maprof_proc.h', 'maprof_yaml.h')],
62 | 'flash5': [_APPS_ROOT_PATHS['flash5'].joinpath('source', _) for _ in {
63 | pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit', 'hy_getFaceFlux.F90'),
64 | pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit', 'hy_getRiemannState.F90'),
65 | pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit', 'hy_TVDslope.F90'),
66 | pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit', 'hy_upwindTransverseFlux.F90'),
67 | pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit', 'MHD', 'hy_eigenVector.F90'),
68 | pathlib.Path('physics', 'Eos', 'EosMain', 'Helmholtz_starkiller', 'SpeciesBased',
69 | 'actual_eos.F90'),
70 | pathlib.Path('physics', 'sourceTerms', 'Burn', 'BurnMain', 'nuclearBurn', 'Aprox13',
71 | 'bn_mapNetworkToSpecies.F90'),
72 | pathlib.Path('physics', 'sourceTerms', 'Burn', 'BurnMain', 'nuclearBurn', 'bn_burner.F90'),
73 | pathlib.Path('physics', 'sourceTerms', 'Burn', 'BurnMain', 'nuclearBurn', 'Burn.F90'),
74 | pathlib.Path('Simulation', 'Simulation_init.F90')}]}
75 |
76 |
77 | class Tests(TestsBase):
78 |
79 | maxDiff = None
80 |
81 | def _run_app_test(
82 | self, app_name: str, app_dirname: str = None, minimum_passed_cases: int = None,
83 | fall_back_to_ofc: bool = False):
84 | if app_name not in _APPS_ROOT_PATHS and app_name in _APPS_OPTIONAL:
85 | self.skipTest('{} directory not found'.format(app_name))
86 | if app_dirname is None:
87 | app_dirname = app_name.lower()
88 |
89 | _suffix = '_ofc' if fall_back_to_ofc else ''
90 |
91 | failure_reports_path = _HERE.joinpath('results', 'apps', app_dirname, 'failure' + _suffix)
92 | success_reports_path = _HERE.joinpath('results', 'apps', app_dirname, 'success' + _suffix)
93 |
94 | self.check_cases_and_report(
95 | app_name, failure_reports_path, success_reports_path,
96 | _APPS_ROOT_PATHS[app_name], _APPS_CODE_FILEPATHS[app_name],
97 | minimum_passed_cases, fall_back_to_ofc)
98 |
99 | def test_miranda_io(self):
100 | self._run_app_test('miranda_io')
101 |
102 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
103 | def test_flash_45(self):
104 | self._run_app_test('FLASH-4.5')
105 |
106 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
107 | def test_flash_subset(self):
108 | self._run_app_test('FLASH-SUBSET')
109 |
110 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
111 | def test_ffb_mini(self):
112 | self._run_app_test('FFB-MINI', None, 24)
113 |
114 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
115 | @unittest.skipIf(platform.system() == 'Windows', 'OFC not available on Windows')
116 | def test_ffb_mini_with_ofc(self):
117 | self._run_app_test('FFB-MINI', None, 35, True)
118 |
119 | def test_flash5(self):
120 | self._run_app_test('flash5', None, 9)
121 |
--------------------------------------------------------------------------------
/test/test_compatibility.py:
--------------------------------------------------------------------------------
1 | """Tests using the test cases from Open Fortran Parser."""
2 |
3 | import itertools
4 | import logging
5 | import os
6 | import pathlib
7 | import subprocess
8 | import typing as t
9 | import unittest
10 | import xml.etree.ElementTree as ET
11 |
12 | from open_fortran_parser.parser_wrapper import parse
13 | from open_fortran_parser.ofc_wrapper import transpile
14 |
15 | _LOG = logging.getLogger(__name__)
16 |
17 | _HERE = pathlib.Path(__file__).resolve().parent
18 |
19 |
20 | def all_fortran_paths(root_path: pathlib.Path):
21 | """Recursively find all Fortran files in a given directory."""
22 | if not root_path.exists():
23 | return []
24 | all_input_paths = []
25 | for extension in itertools.chain(
26 | *[(_, _.upper()) for _ in ('.f', '.f90', '.f03', '.f08', '.h')]):
27 | input_paths = root_path.glob('**/*{}'.format(extension))
28 | for input_path in input_paths:
29 | input_path = input_path.resolve()
30 | all_input_paths.append(input_path)
31 | return all_input_paths
32 |
33 |
34 | _OFP_RELATIVE_REPO_PATH = pathlib.Path('..', 'open-fortran-parser')
35 | _OFP_TESTS_DIR = _HERE.parent.joinpath(_OFP_RELATIVE_REPO_PATH, 'tests')
36 |
37 | ALL_OFP_TEST_PATHS = all_fortran_paths(_OFP_TESTS_DIR)
38 |
39 |
40 | class TestsBase(unittest.TestCase):
41 |
42 | maxDiff = None
43 |
44 | def check_cases(self, input_paths, relative=True):
45 | """Try to parse all given files, fail on first failure."""
46 | for input_path in input_paths:
47 | if relative:
48 | input_path = _OFP_TESTS_DIR.joinpath(input_path).resolve()
49 | try:
50 | root_node = parse(input_path, verbosity=100, raise_on_error=True)
51 | self.assertIsNotNone(root_node)
52 | except subprocess.CalledProcessError as err:
53 | _LOG.exception(err.stdout.decode().rstrip())
54 | self.fail('failed to parse "{}"'.format(input_path))
55 |
56 | def _check_cases(self, cases, relative=True):
57 | if relative:
58 | cases = [_OFP_TESTS_DIR.joinpath(input_path).resolve() for input_path in cases]
59 | tests_absolute_path = _OFP_TESTS_DIR.resolve()
60 | failure_reports_path = _HERE.joinpath('results', 'compatibility', 'failure')
61 | success_reports_path = _HERE.joinpath('results', 'compatibility', 'success')
62 | self.check_cases_and_report(
63 | 'OFP', failure_reports_path, success_reports_path, tests_absolute_path, cases)
64 |
65 | def _check_case(self, input_path: pathlib.Path, fall_back_to_ofc: bool = False,
66 | ofc_target_path: pathlib.Path = None):
67 | result = None
68 | try:
69 | try:
70 | result = parse(input_path, verbosity=100, raise_on_error=True)
71 | self.assertIsNotNone(result)
72 | except subprocess.CalledProcessError as parser_err:
73 | if not fall_back_to_ofc:
74 | raise parser_err
75 | assert isinstance(ofc_target_path, pathlib.Path), ofc_target_path
76 | code = None
77 | try:
78 | code = transpile(input_path, raise_on_error=True)
79 | self.assertIsInstance(code, str)
80 | with ofc_target_path.open('w') as transpiled_file:
81 | transpiled_file.write(code)
82 | result = parse(ofc_target_path, verbosity=100, raise_on_error=True)
83 | self.assertIsNotNone(result)
84 | _LOG.info('OFC definitely fixed something, see %s', ofc_target_path)
85 | except subprocess.CalledProcessError as err3:
86 | if code is not None:
87 | _LOG.warning('OFC succeeded but parser failed %s', ofc_target_path)
88 | raise parser_err from err3
89 | except subprocess.CalledProcessError as err:
90 | result = err
91 | return result
92 |
93 | def check_cases_and_report(
94 | self, scenario_name: str, failure_reports_path: pathlib.Path,
95 | success_reports_path: pathlib.Path, input_paths_root: pathlib.Path,
96 | input_paths: t.Sequence[pathlib.Path], minimum_passed_cases: int = None,
97 | fall_back_to_ofc: bool = False):
98 | """Try to parse all given files, fail if there are not enough successes."""
99 | all_count = len(input_paths)
100 | if minimum_passed_cases is None:
101 | minimum_passed_cases = all_count
102 | else:
103 | self.assertGreaterEqual(all_count, minimum_passed_cases, 'not enough cases to pass')
104 |
105 | failure_reports_path.mkdir(parents=True, exist_ok=True)
106 | failure_reports_path.joinpath('filtered').mkdir(parents=True, exist_ok=True)
107 | success_reports_path.mkdir(parents=True, exist_ok=True)
108 | passed_test_cases = []
109 | new_passed_cases = []
110 | failed_test_cases = []
111 | new_failed_cases = []
112 |
113 | for i, input_path in enumerate(input_paths):
114 | if i % 50 == 49:
115 | _LOG.warning('%s: testing case %i of %i', scenario_name, i + 1, len(input_paths))
116 | # with self.subTest(input_path=input_path):
117 |
118 | relative_input_path = input_path.relative_to(input_paths_root)
119 | flat_relative_input_path = str(relative_input_path).replace(os.sep, '_')
120 | ofc_target_path = pathlib.Path('/tmp', flat_relative_input_path)
121 |
122 | logger_level = logging.getLogger('open_fortran_parser.parser_wrapper').level
123 | logging.getLogger('open_fortran_parser.parser_wrapper').setLevel(logging.CRITICAL)
124 | ofc_logger_level = logging.getLogger('open_fortran_parser.ofc_wrapper').level
125 | logging.getLogger('open_fortran_parser.ofc_wrapper').setLevel(logging.CRITICAL)
126 | result = self._check_case(input_path, fall_back_to_ofc, ofc_target_path)
127 | logging.getLogger('open_fortran_parser.parser_wrapper').setLevel(logger_level)
128 | logging.getLogger('open_fortran_parser.ofc_wrapper').setLevel(ofc_logger_level)
129 |
130 | report_filename = flat_relative_input_path + '.xml'
131 | failure_report_path = failure_reports_path.joinpath(report_filename)
132 | filtered_report_path = failure_reports_path.joinpath('filtered', report_filename)
133 | success_report_path = success_reports_path.joinpath(report_filename)
134 | old_success_report_path = success_reports_path.joinpath('old_' + report_filename)
135 |
136 | if isinstance(result, ET.Element):
137 | passed_test_cases.append(input_path)
138 | if not success_report_path.exists():
139 | new_passed_cases.append(input_path)
140 | if old_success_report_path.exists():
141 | old_success_report_path.unlink()
142 | if failure_report_path.exists():
143 | failure_report_path.unlink()
144 | if filtered_report_path.exists():
145 | filtered_report_path.unlink()
146 | report_path = success_report_path
147 | elif isinstance(result, subprocess.CalledProcessError):
148 | failed_test_cases.append(input_path)
149 | if not failure_report_path.exists() and not filtered_report_path.exists():
150 | new_failed_cases.append(input_path)
151 | if success_report_path.exists():
152 | success_report_path.rename(old_success_report_path)
153 | if b'XMLPrinter' in result.stderr:
154 | if filtered_report_path.exists():
155 | filtered_report_path.unlink()
156 | report_path = failure_report_path
157 | else:
158 | if failure_report_path.exists():
159 | failure_report_path.unlink()
160 | report_path = filtered_report_path
161 | else:
162 | self.fail('{} {}'.format(type(result), result))
163 |
164 | with open(str(report_path), 'w') as report_file:
165 | print('{} '.format(input_path), file=report_file)
166 | if hasattr(result, 'stderr') and result.stderr:
167 | print('', file=report_file)
168 | print(result.stderr.decode().rstrip(), file=report_file)
169 | print(' ', file=report_file)
170 | print('', file=report_file)
171 | with open(str(input_path)) as fortran_file:
172 | print(fortran_file.read(), file=report_file)
173 | print('
', file=report_file)
174 | if isinstance(result, ET.Element):
175 | print(ET.tostring(result).decode(), file=report_file)
176 | if hasattr(result, 'stdout') and result.stdout:
177 | print(result.stdout.decode().rstrip(), file=report_file)
178 |
179 | failed_count = len(failed_test_cases)
180 | passed_count = len(passed_test_cases)
181 | self.assertEqual(passed_count + failed_count, all_count)
182 | _LOG.warning(
183 | '%s test case pass rate is %i of %i = %f', scenario_name, passed_count,
184 | all_count, passed_count / (passed_count + failed_count))
185 | _LOG.info('failed %s test cases (%i): %s', scenario_name, failed_count, failed_test_cases)
186 | _LOG.debug('passed %s test cases (%i): %s', scenario_name, passed_count, passed_test_cases)
187 | if new_failed_cases:
188 | _LOG.warning(
189 | 'new failed %s test cases (%i): %s', scenario_name, len(new_failed_cases),
190 | new_failed_cases)
191 | if new_passed_cases:
192 | _LOG.warning(
193 | 'new passed %s test cases (%i): %s', scenario_name, len(new_passed_cases),
194 | new_passed_cases)
195 | self.assertLessEqual(failed_count, all_count - minimum_passed_cases, msg=failed_test_cases)
196 | self.assertGreaterEqual(passed_count, minimum_passed_cases, msg=passed_test_cases)
197 |
198 | return passed_test_cases, new_passed_cases, failed_test_cases, new_failed_cases
199 |
200 |
201 | class Tests(TestsBase):
202 |
203 | maxDiff = None
204 |
205 | def test_comments(self):
206 | for suffix in ('.f', '.f90'):
207 | input_path = pathlib.Path(_HERE, 'examples', 'comments{}'.format(suffix))
208 | with self.subTest(input_path=input_path):
209 | result = parse(input_path, raise_on_error=True)
210 | all_comments = result.findall('.//comment')
211 | self.assertEqual(len(all_comments), 12, msg='found {} comments: {} in:\n{}'.format(
212 | len(all_comments), [cmnt.attrib['text'] for cmnt in all_comments],
213 | ET.tostring(result).decode()))
214 | _LOG.debug('%s', ET.tostring(result).decode())
215 | for xpath, numbers in [
216 | ('./file/comment', (1, 2, 11, 12)),
217 | ('./file/program/body/comment', (3, 4, 9, 10)),
218 | ('./file/program/body/if/body/comment', (5, 6, 7, 8))]:
219 | comments = result.findall(xpath)
220 | self.assertEqual(len(comments), len(numbers), msg=(
221 | xpath, numbers, [cmnt.attrib['text'] for cmnt in comments]))
222 | for comment, number in zip(comments, numbers):
223 | self.assertTrue(comment.attrib['text'].endswith(str(number)), msg=(
224 | xpath, number, comment.attrib['text']))
225 |
226 | def test_ofp_simple_expressions(self):
227 | input_paths = [pathlib.Path(_) for _ in [
228 | 'annex_c/c_5_3_2.f03',
229 | 'annex_c/c_10_2_3.f03',
230 | 'bug-reports/bug-182228.f90',
231 | 'bug-reports/bug-194452.f',
232 | 'bug-reports/bug-1867239.f',
233 | 'bug-reports/bug-3056328.f90',
234 | 'bug-reports/bug-3076096.f90',
235 | 'f08-tests/R810-F08.f08',
236 | 'rule-f08-tests/R711.f90',
237 | 'rule-tests/R706.f03',
238 | 'rule-tests/R802.f03',
239 | 'rule-tests/R826.f03']]
240 | self.check_cases(input_paths)
241 |
242 | def test_ofp_unary_expressions(self):
243 | input_paths = [pathlib.Path(_) for _ in [
244 | 'annex_c/c_3_2_0.f03',
245 | 'annex_c/c_9_6_2.f03',
246 | 'bug-reports/bug-3040730.f90',
247 | 'bug-reports/bug-3056309.f90',
248 | 'rule-tests/R1220.f90',
249 | 'rule-tests/R1222.f90',
250 | 'rule-tests/R1223.f90']]
251 | self.check_cases(input_paths)
252 |
253 | def test_ofp_expressions(self):
254 | input_paths = [pathlib.Path(_) for _ in [
255 | 'bug-reports/bug-196993.f90',
256 | 'bug-reports/bug-3040730.f90',
257 | 'bug-reports/bug-3313167.f90',
258 | 'LOPe/multigrid.f90',
259 | 'rule-tests/R714.f03',
260 | 'rule-tests/R716.f03']]
261 | self.check_cases(input_paths)
262 |
263 | def test_ofp_if(self):
264 | input_paths = [pathlib.Path(_) for _ in [
265 | 'rule-tests/R802.f03']]
266 | self.check_cases(input_paths)
267 |
268 | def test_ofp_declaration_attributes(self):
269 | input_paths = [pathlib.Path(_) for _ in [
270 | 'rule-tests/R503.f03']]
271 | self.check_cases(input_paths)
272 |
273 | def test_ofp_dimensions(self):
274 | input_paths = [pathlib.Path(_) for _ in [
275 | 'rule-tests/R510.f03']]
276 | self.check_cases(input_paths)
277 |
278 | def test_ofp_allocate_deallocate(self):
279 | input_paths = [pathlib.Path(_) for _ in [
280 | 'rule-tests/R635.f03',
281 | 'rule-tests/R636.f03']]
282 | self.check_cases(input_paths)
283 |
284 | def test_ofp_do(self):
285 | input_paths = [pathlib.Path(_) for _ in [
286 | 'annex_c/c_5_3_7.f03',
287 | 'annex_c/c_8_3_7.f03',
288 | 'bug-reports/bug-1874171.f',
289 | 'rule-tests/R826.f03',
290 | 'rule-tests/R835.f03',
291 | 'rule-tests/R843.f03']]
292 | self.check_cases(input_paths)
293 |
294 | def test_ofp_do_concurr_and_forall(self):
295 | input_paths = [pathlib.Path(_) for _ in [
296 | 'annex_c/c_4_5.f03',
297 | 'annex_c/c_4_6.f03',
298 | 'bug-reports/bug-3076097.f90',
299 | 'f08-tests/R818-F08.f08',
300 | 'LOPe/multigrid.f90',
301 | 'rule-tests/R755.f03',
302 | 'rule-tests/R917.f03']]
303 | self.check_cases(input_paths)
304 |
305 | def test_ofp_implied_do(self):
306 | input_paths = [pathlib.Path(_) for _ in [
307 | 'bug-reports/bug-1759956.f90']]
308 | self.check_cases(input_paths)
309 |
310 | def test_ofp_stop(self):
311 | input_paths = [pathlib.Path(_) for _ in [
312 | 'bug-reports/rose/bug-3385969.f90',
313 | 'f08-tests/R856-F08.f08',
314 | 'rule-tests/R849.f03']]
315 | self._check_cases(input_paths)
316 |
317 | def test_ofp_associate(self):
318 | input_paths = [pathlib.Path(_) for _ in [
319 | 'rule-tests/R817.f03']]
320 | self.check_cases(input_paths)
321 |
322 | def test_ofp_module_contains(self):
323 | input_paths = [pathlib.Path(_) for _ in [
324 | 'bug-reports/bug-3053141.f90',
325 | 'rule-f08-tests/R1104.f90',
326 | 'f08-tests/R1237-F08.f08',
327 | 'rule-f08-tests/R1101.f90',
328 | 'rule-tests/R455.f90',
329 | 'rule-tests/R1104.f90']]
330 | self.check_cases(input_paths)
331 |
332 | def test_ofp_select_case(self):
333 | input_paths = [pathlib.Path(_) for _ in [
334 | 'rule-tests/R808.f03',
335 | 'rule-tests/R814.f03']]
336 | self.check_cases(input_paths)
337 |
338 | def test_ofp_interface(self):
339 | input_paths = [pathlib.Path(_) for _ in [
340 | 'annex_c/c_8_3_7.f03',
341 | 'rule-f08-tests/R1207.f90',
342 | 'rule-tests/R310.f03',
343 | 'rule-tests/R311.f03',
344 | 'rule-tests/R1207.f90']]
345 | self.check_cases(input_paths)
346 |
347 | def test_module_nature(self):
348 | input_paths = [pathlib.Path(_) for _ in [
349 | 'annex_c/c_10_2_1.f03',
350 | 'annex_c/c_10_2_2.f03',
351 | 'annex_c/c_10_2_3.f03',
352 | 'annex_c/c_10_2_4.f03',
353 | 'rule-f08-tests/R1109.f90',
354 | 'rule-f08-tests/R1228.f90']]
355 | self.check_cases(input_paths)
356 |
357 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
358 | def test_ofp_all_cases(self):
359 | tests_absolute_path = _OFP_TESTS_DIR.resolve()
360 | failure_reports_path = _HERE.joinpath('results', 'compatibility', 'failure')
361 | success_reports_path = _HERE.joinpath('results', 'compatibility', 'success')
362 |
363 | self.check_cases_and_report(
364 | 'OFP', failure_reports_path, success_reports_path, tests_absolute_path,
365 | ALL_OFP_TEST_PATHS, 398)
366 |
--------------------------------------------------------------------------------
/test/test_dependencies.py:
--------------------------------------------------------------------------------
1 | """Tests for dependencies downloader scripts."""
2 |
3 | import os
4 | import pathlib
5 | import tempfile
6 | import unittest
7 |
8 | from open_fortran_parser.config import DEV_DEPENDENCIES
9 | from open_fortran_parser.dependencies import ensure_dependencies, cleanup_old_dependencies
10 |
11 | EXAMPLE_DEPENDENCY = 'Apache Commons CLI 1.4'
12 |
13 | TESTED_DEPENDENCIES = {EXAMPLE_DEPENDENCY: DEV_DEPENDENCIES[EXAMPLE_DEPENDENCY]}
14 |
15 |
16 | class Tests(unittest.TestCase):
17 |
18 | @unittest.skipUnless(os.environ.get('TEST_DEPENDENCIES'), 'skipping dependency test')
19 | def test_deps(self):
20 | with tempfile.TemporaryDirectory() as temp_dir:
21 | self.assertEqual(len(os.listdir(temp_dir)), 0)
22 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), download=False)
23 | self.assertEqual(len(os.listdir(temp_dir)), 0)
24 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), silent=False)
25 | self.assertGreater(len(os.listdir(temp_dir)), 0)
26 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), silent=False)
27 | with tempfile.TemporaryDirectory() as temp_dir:
28 | os.rmdir(temp_dir)
29 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), silent=True)
30 | self.assertGreater(len(os.listdir(temp_dir)), 0)
31 |
32 | @unittest.skipUnless(os.environ.get('TEST_DEPENDENCIES'), 'skipping dependency test')
33 | def test_cleanup_deps(self):
34 | to_clean = {k: v[1] for k, v in TESTED_DEPENDENCIES.items()}
35 | with tempfile.TemporaryDirectory() as temp_dir:
36 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), silent=True)
37 | self.assertGreater(len(os.listdir(temp_dir)), 0)
38 | cleanup_old_dependencies(to_clean, pathlib.Path(temp_dir))
39 | self.assertEqual(len(os.listdir(temp_dir)), 0)
40 | cleanup_old_dependencies(to_clean, pathlib.Path(temp_dir))
41 | with tempfile.TemporaryDirectory() as temp_dir:
42 | ensure_dependencies(TESTED_DEPENDENCIES, pathlib.Path(temp_dir), silent=True)
43 | count = len(os.listdir(temp_dir))
44 | self.assertGreater(count, 0)
45 | with tempfile.TemporaryDirectory() as temp_backup_dir:
46 | os.rmdir(temp_backup_dir)
47 | cleanup_old_dependencies(to_clean, pathlib.Path(temp_dir),
48 | pathlib.Path(temp_backup_dir))
49 | self.assertEqual(len(os.listdir(temp_backup_dir)), count)
50 | self.assertEqual(len(os.listdir(temp_dir)), 0)
51 |
--------------------------------------------------------------------------------
/test/test_ofc.py:
--------------------------------------------------------------------------------
1 | """Tests for ofc_wrapper module."""
2 |
3 | import logging
4 | import pathlib
5 | import platform
6 | import tempfile
7 | import unittest
8 |
9 | from open_fortran_parser.ofc_wrapper import CodeForm, execute_compiler, transpile
10 |
11 | _LOG = logging.getLogger(__name__)
12 |
13 | _HERE = pathlib.Path(__file__).resolve().parent
14 |
15 | INPUT_PATHS = [_HERE.joinpath('examples', _) for _ in ['empty.f']]
16 | INDENTS = (None, 2, 4, 8)
17 | FORMS = (None, CodeForm.Fixed, CodeForm.Free)
18 |
19 |
20 | class Tests(unittest.TestCase):
21 |
22 | maxDiff = None
23 |
24 | @unittest.skipIf(platform.system() == 'Windows', 'OFC not available on Windows')
25 | def test_execute_compiler(self):
26 | for input_path in INPUT_PATHS:
27 | for indent in INDENTS:
28 | for form in FORMS:
29 | output_file = tempfile.NamedTemporaryFile(delete=False)
30 | output_file_path = pathlib.Path(output_file.name)
31 | for output_path in (None, output_file_path):
32 | with self.subTest(input_path=input_path, output_path=output_path,
33 | indent=indent, form=form):
34 | result = execute_compiler(input_path, output_path, indent, form)
35 | self.assertEqual(result.returncode, 0, msg=result)
36 |
37 | @unittest.skipIf(platform.system() == 'Windows', 'OFC not available on Windows')
38 | def test_transpile(self):
39 | for input_path in INPUT_PATHS:
40 | for indent in INDENTS:
41 | for form in FORMS:
42 | with self.subTest(input_path=input_path, indent=indent, form=form):
43 | code = transpile(input_path, indent, form, raise_on_error=True)
44 | self.assertIsNotNone(code)
45 | self.assertIsInstance(code, str)
46 | self.assertGreater(len(code), 0)
47 |
--------------------------------------------------------------------------------
/test/test_parser_wrapper.py:
--------------------------------------------------------------------------------
1 | """Tests for parser_wrapper module."""
2 |
3 | import itertools
4 | import logging
5 | import os
6 | import pathlib
7 | import tempfile
8 | import unittest
9 | import xml.etree.ElementTree as ET
10 |
11 | from open_fortran_parser.config import JAVA as java_config
12 | from open_fortran_parser.parser_wrapper import execute_parser, parse
13 | from .test_setup import run_program
14 |
15 | _LOG = logging.getLogger(__name__)
16 |
17 | _HERE = pathlib.Path(__file__).resolve().parent
18 |
19 | INPUT_PATH = _HERE.joinpath('examples', 'empty.f')
20 |
21 | INPUT_PATHS = list(pathlib.Path(_HERE, 'examples').glob('**/*.*'))
22 | INPUT_PATHS_LARGE = list(pathlib.Path(_HERE, 'examples_large').glob('**/*.*'))
23 | VERBOSITIES = (0, 20, 80, 100)
24 |
25 | SOME_INPUT_PATHS = [_HERE.joinpath('examples', _) for _ in ('comments.f', 'strings.f90')]
26 | SOME_INPUT_PATHS_LARGE = [_HERE.joinpath('examples_large', 'ORCHIDEE_grid.f90')]
27 | SOME_VERBOSITIES = (0, 100)
28 |
29 |
30 | class Tests(unittest.TestCase):
31 |
32 | maxDiff = None
33 |
34 | def test_config_variants(self):
35 | executable = java_config['executable']
36 | options = java_config['options']
37 | ofp_class = java_config['ofp_class']
38 | ofp_xml_class = java_config['ofp_xml_class']
39 | for classpath, verbosity, invalid_options in itertools.product(
40 | (None, java_config['classpath']), (0, 100, 'invalid'),
41 | ([], ['--invalid', 'option'], ['--help', 'true'])):
42 | output_file = tempfile.NamedTemporaryFile(delete=False)
43 | output_file_name = pathlib.Path(output_file.name)
44 | output_file.close()
45 | for output_path in (None, output_file_name):
46 | with self.subTest(classpath=classpath, verbosity=verbosity,
47 | invalid_options=invalid_options, output_path=output_path):
48 | command = [str(executable)]
49 | if classpath is not None:
50 | command += ['-cp', str(classpath)]
51 | if options is not None:
52 | command += options
53 | command.append(ofp_class)
54 | command += ['--class', ofp_xml_class, '--verbosity', str(verbosity)]
55 | if output_path is not None:
56 | command += ['--output', str(output_path)]
57 | command += invalid_options
58 | command.append(str(INPUT_PATH))
59 |
60 | if invalid_options or verbosity == 'invalid' \
61 | or classpath is None and 'CLASSPATH' not in os.environ:
62 | with self.assertRaises(AssertionError):
63 | run_program(*command)
64 | continue
65 | run_program(*command)
66 | output_file_name.unlink()
67 |
68 | def test_unset_config(self):
69 | classpath = java_config['classpath']
70 | options = java_config['options']
71 |
72 | java_config['classpath'] = None
73 | with tempfile.NamedTemporaryFile() as output_file:
74 | execute_parser(INPUT_PATH, pathlib.Path(output_file.name))
75 | self.assertIsNone(java_config['classpath'])
76 |
77 | java_config['classpath'] = classpath
78 | java_config['options'] = options
79 |
80 | def test_execute_parser_stdout(self):
81 | for input_path in SOME_INPUT_PATHS:
82 | for verbosity in SOME_VERBOSITIES:
83 | with self.subTest(input_path=input_path, verbosity=verbosity):
84 | process = execute_parser(input_path, None, verbosity)
85 | self.assertEqual(process.returncode, 0)
86 | fortran_ast = ET.fromstring(process.stdout)
87 | self._validate_tree(fortran_ast)
88 |
89 | def test_generate_xml(self):
90 | results_path = pathlib.Path(_HERE, 'results', 'examples')
91 | results_path.mkdir(exist_ok=True)
92 | for input_path in INPUT_PATHS:
93 | for verbosity in VERBOSITIES:
94 | with self.subTest(input_path=input_path, verbosity=verbosity):
95 | output_path = results_path.joinpath(
96 | '{}.{}.xml'.format(input_path.name, verbosity))
97 | process = execute_parser(input_path, output_path, verbosity)
98 | self.assertEqual(process.returncode, 0, process)
99 | self.assertTrue(output_path.exists())
100 |
101 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
102 | def test_generate_xml_large(self):
103 | results_path = pathlib.Path(_HERE, 'results', 'examples')
104 | results_path.mkdir(exist_ok=True)
105 | for input_path in INPUT_PATHS_LARGE:
106 | for verbosity in VERBOSITIES:
107 | with self.subTest(input_path=input_path, verbosity=verbosity):
108 | output_path = results_path.joinpath(
109 | '{}.{}.xml'.format(input_path.name, verbosity))
110 | process = execute_parser(input_path, output_path, verbosity)
111 | self.assertEqual(process.returncode, 0)
112 | self.assertTrue(output_path.exists())
113 |
114 | def _validate_tree(self, tree):
115 | self.assertIsNotNone(tree)
116 | self.assertIsInstance(tree, ET.Element)
117 | self.assertEqual(tree.tag, 'ofp')
118 | self.assertEqual(len(tree), 1)
119 | file_node = tree[0]
120 | self.assertEqual(file_node.tag, 'file')
121 | self.assertGreater(len(file_node), 0)
122 |
123 | def test_parse(self):
124 | for input_path in SOME_INPUT_PATHS:
125 | for verbosity in SOME_VERBOSITIES:
126 | with self.subTest(input_path=input_path, verbosity=verbosity):
127 | root_node = parse(input_path, verbosity)
128 | self._validate_tree(root_node)
129 |
130 | @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test')
131 | def test_parse_large(self):
132 | for input_path in SOME_INPUT_PATHS_LARGE:
133 | for verbosity in SOME_VERBOSITIES:
134 | with self.subTest(input_path=input_path, verbosity=verbosity):
135 | root_node = parse(input_path, verbosity)
136 | self._validate_tree(root_node)
137 |
--------------------------------------------------------------------------------
/test/test_script.py:
--------------------------------------------------------------------------------
1 | """Tests for main script."""
2 |
3 | import contextlib
4 | import io
5 | import os
6 | import pathlib
7 | import tempfile
8 | import unittest
9 |
10 | from open_fortran_parser.config import DEV_DEPENDENCIES_PATH, DEPENDENCIES_PATH
11 | from .test_setup import run_module
12 |
13 | _HERE = pathlib.Path(__file__).resolve().parent
14 |
15 | INPUT_PATH = _HERE.joinpath('examples', 'empty.f')
16 |
17 |
18 | def normalize_newlines(text: str) -> str:
19 | return text.replace('\r\n', '\n').replace('\r', '\n')
20 |
21 |
22 | class Tests(unittest.TestCase):
23 |
24 | maxDiff = None
25 |
26 | def test_run_not_main(self):
27 | sio = io.StringIO()
28 | with contextlib.redirect_stderr(sio):
29 | run_module('open_fortran_parser', 'some', 'bad', 'args', run_name='not_main')
30 | self.assertEqual(len(sio.getvalue()), 0)
31 |
32 | def test_help(self):
33 | sio = io.StringIO()
34 | with contextlib.redirect_stderr(sio):
35 | with self.assertRaises(SystemExit):
36 | run_module('open_fortran_parser')
37 | text = sio.getvalue()
38 | self.assertIn('usage', text)
39 | self.assertIn('open_fortran_parser', text)
40 |
41 | def test_check_deps_flag(self):
42 | sio = io.StringIO()
43 | with contextlib.redirect_stderr(sio):
44 | run_module('open_fortran_parser', '--check-deps')
45 | self.assertGreater(len(sio.getvalue()), 0)
46 | self.assertGreater(len(os.listdir(str(DEPENDENCIES_PATH))), 0)
47 |
48 | @unittest.skipUnless(os.environ.get('TEST_DEPENDENCIES'), 'skipping dependency test')
49 | def test_development_flags(self):
50 | run_module('open_fortran_parser', '--deps')
51 | run_module('open_fortran_parser', '--cleanup-deps')
52 | self.assertGreater(len(os.listdir(str(DEV_DEPENDENCIES_PATH))), 0)
53 |
54 | def test_verbosity_flag(self):
55 | verbosities = (0, 20, 40, 60, 80, 100)
56 | for verbosity in verbosities:
57 | sio = io.StringIO()
58 | with contextlib.redirect_stdout(sio):
59 | run_module('open_fortran_parser', '-v', str(verbosity), str(INPUT_PATH))
60 | self.assertGreater(len(sio.getvalue()), 0)
61 |
62 | def test_output_flag(self):
63 | output_file = tempfile.NamedTemporaryFile(delete=False)
64 | output_file.close()
65 | sio = io.StringIO()
66 | with contextlib.redirect_stdout(sio):
67 | run_module('open_fortran_parser', str(INPUT_PATH))
68 | run_module('open_fortran_parser', str(INPUT_PATH), output_file.name)
69 | with open(output_file.name) as output_file:
70 | self.assertEqual(normalize_newlines(sio.getvalue()),
71 | normalize_newlines(output_file.read()))
72 | os.remove(output_file.name)
73 |
--------------------------------------------------------------------------------
/test/test_setup.py:
--------------------------------------------------------------------------------
1 | """Tests for setup scripts."""
2 |
3 | import importlib
4 | import itertools
5 | import os
6 | import pathlib
7 | import runpy
8 | import subprocess
9 | import sys
10 | import tempfile
11 | import types
12 | import typing as t
13 | import unittest
14 |
15 | __updated__ = '2019-06-04'
16 |
17 |
18 | def run_program(*args, glob: bool = False):
19 | """Run subprocess with given args. Use path globbing for each arg that contains an asterisk."""
20 | if glob:
21 | cwd = pathlib.Path.cwd()
22 | args = tuple(itertools.chain.from_iterable(
23 | list(str(_.relative_to(cwd)) for _ in cwd.glob(arg)) if '*' in arg else [arg]
24 | for arg in args))
25 | process = subprocess.Popen(args)
26 | process.wait()
27 | if process.returncode != 0:
28 | raise AssertionError('execution of {} returned {}'.format(args, process.returncode))
29 | return process
30 |
31 |
32 | def run_pip(*args, **kwargs):
33 | python_exec_name = pathlib.Path(sys.executable).name
34 | pip_exec_name = python_exec_name.replace('python', 'pip')
35 | run_program(pip_exec_name, *args, **kwargs)
36 |
37 |
38 | def run_module(name: str, *args, run_name: str = '__main__') -> None:
39 | backup_sys_argv = sys.argv
40 | sys.argv = [name + '.py'] + list(args)
41 | runpy.run_module(name, run_name=run_name)
42 | sys.argv = backup_sys_argv
43 |
44 |
45 | def import_module(name: str = 'setup') -> types.ModuleType:
46 | setup_module = importlib.import_module(name)
47 | return setup_module
48 |
49 |
50 | def import_module_member(module_name: str, member_name: str) -> t.Any:
51 | module = import_module(module_name)
52 | return getattr(module, member_name)
53 |
54 |
55 | CLASSIFIERS_LICENSES = (
56 | 'License :: OSI Approved :: Python License (CNRI Python License)',
57 | 'License :: OSI Approved :: Python Software Foundation License',
58 | 'License :: Other/Proprietary License',
59 | 'License :: Public Domain')
60 |
61 | CLASSIFIERS_PYTHON_VERSIONS = tuple("""Programming Language :: Python
62 | Programming Language :: Python :: 2
63 | Programming Language :: Python :: 2.2
64 | Programming Language :: Python :: 2.7
65 | Programming Language :: Python :: 2 :: Only
66 | Programming Language :: Python :: 3
67 | Programming Language :: Python :: 3.0
68 | Programming Language :: Python :: 3.5
69 | Programming Language :: Python :: 3 :: Only""".splitlines())
70 |
71 | CLASSIFIERS_PYTHON_IMPLEMENTATIONS = tuple("""Programming Language :: Python :: Implementation
72 | Programming Language :: Python :: Implementation :: CPython
73 | Programming Language :: Python :: Implementation :: Jython
74 | Programming Language :: Python :: Implementation :: PyPy
75 | Programming Language :: Python :: Implementation :: Stackless""".splitlines())
76 |
77 | CLASSIFIERS_VARIOUS = (
78 | 'Framework :: IPython',
79 | 'Topic :: Scientific/Engineering',
80 | 'Topic :: Sociology',
81 | 'Topic :: Security :: Cryptography',
82 | 'Topic :: Software Development :: Libraries :: Python Modules',
83 | 'Topic :: Software Development :: Version Control :: Git',
84 | 'Topic :: System',
85 | 'Topic :: Utilities')
86 |
87 | CLASSIFIERS_LICENSES_TUPLES = tuple((_,) for _ in CLASSIFIERS_LICENSES) + ((),)
88 |
89 | CLASSIFIERS_PYTHON_VERSIONS_COMBINATIONS = tuple((_,) for _ in CLASSIFIERS_PYTHON_VERSIONS)
90 |
91 | CLASSIFIERS_PYTHON_IMPLEMENTATIONS_TUPLES = tuple((_,) for _ in CLASSIFIERS_PYTHON_IMPLEMENTATIONS)
92 |
93 | CLASSIFIERS_VARIOUS_COMBINATIONS = tuple(itertools.combinations(
94 | CLASSIFIERS_VARIOUS, len(CLASSIFIERS_VARIOUS) - 1)) + (CLASSIFIERS_VARIOUS,)
95 |
96 | ALL_CLASSIFIERS_VARIANTS = [
97 | licenses + versions + implementations + various
98 | for licenses in CLASSIFIERS_LICENSES_TUPLES
99 | for versions in CLASSIFIERS_PYTHON_VERSIONS_COMBINATIONS
100 | for implementations in CLASSIFIERS_PYTHON_IMPLEMENTATIONS_TUPLES
101 | for various in CLASSIFIERS_VARIOUS_COMBINATIONS]
102 |
103 | LINK_EXAMPLES = [
104 | (None, 'setup.py', True), ('this file', 'setup.py', True), (None, 'test/test_setup.py', True),
105 | (None, 'http://site.com', False), (None, '../something/else', False), (None, 'no.thing', False),
106 | (None, '/my/abs/path', False)]
107 |
108 |
109 | def get_package_folder_name():
110 | """Attempt to guess the built package name."""
111 | cwd = pathlib.Path.cwd()
112 | directories = [
113 | path for path in cwd.iterdir() if pathlib.Path(cwd, path).is_dir()
114 | and pathlib.Path(cwd, path, '__init__.py').is_file() and path.name != 'test']
115 | assert len(directories) == 1, directories
116 | return directories[0].name
117 |
118 |
119 | class UnitTests(unittest.TestCase):
120 | """Test basic functionalities of the setup boilerplate."""
121 |
122 | def test_find_version(self):
123 | find_version = import_module_member('setup_boilerplate', 'find_version')
124 | result = find_version(get_package_folder_name())
125 | self.assertIsInstance(result, str)
126 |
127 | def test_find_packages(self):
128 | find_packages = import_module_member('setup_boilerplate', 'find_packages')
129 | results = find_packages()
130 | self.assertIsInstance(results, list)
131 | for result in results:
132 | self.assertIsInstance(result, str)
133 |
134 | def test_requirements(self):
135 | parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements')
136 | results = parse_requirements()
137 | self.assertIsInstance(results, list)
138 | self.assertTrue(all(isinstance(result, str) for result in results), msg=results)
139 |
140 | def test_requirements_empty(self):
141 | parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements')
142 | reqs_file = tempfile.NamedTemporaryFile('w', delete=False)
143 | reqs_file.close()
144 | results = parse_requirements(reqs_file.name)
145 | self.assertIsInstance(results, list)
146 | self.assertEqual(len(results), 0)
147 | os.remove(reqs_file.name)
148 |
149 | def test_requirements_comments(self):
150 | parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements')
151 | reqs = ['# comment', 'numpy', '', '# another comment', 'scipy', '', '# one more comment']
152 | reqs_file = tempfile.NamedTemporaryFile('w', delete=False)
153 | for req in reqs:
154 | print(req, file=reqs_file)
155 | reqs_file.close()
156 | results = parse_requirements(reqs_file.name)
157 | self.assertIsInstance(results, list)
158 | self.assertGreater(len(results), 0)
159 | self.assertLess(len(results), len(reqs))
160 | os.remove(reqs_file.name)
161 |
162 | def test_python_versions(self):
163 | find_required_python_version = import_module_member(
164 | 'setup_boilerplate', 'find_required_python_version')
165 | for variant in ALL_CLASSIFIERS_VARIANTS:
166 | with self.subTest(variant=variant):
167 | result = find_required_python_version(variant)
168 | if result is not None:
169 | self.assertIsInstance(result, str)
170 |
171 | def test_python_versions_combined(self):
172 | find_required_python_version = import_module_member(
173 | 'setup_boilerplate', 'find_required_python_version')
174 | classifiers = [
175 | 'Programming Language :: Python :: 3 :: Only',
176 | 'Programming Language :: Python :: 3.5']
177 | req = find_required_python_version(classifiers)
178 | self.assertEqual(req, '>=3.5')
179 |
180 | def test_python_versions_reversed(self):
181 | find_required_python_version = import_module_member(
182 | 'setup_boilerplate', 'find_required_python_version')
183 | classifiers = [
184 | 'Programming Language :: Python :: 3.4',
185 | 'Programming Language :: Python :: 3.5',
186 | 'Programming Language :: Python :: 3.6']
187 | req = find_required_python_version(classifiers)
188 | self.assertEqual(req, '>=3.4')
189 | req = find_required_python_version(reversed(classifiers))
190 | self.assertEqual(req, '>=3.4')
191 |
192 | def test_python_versions_none(self):
193 | find_required_python_version = import_module_member(
194 | 'setup_boilerplate', 'find_required_python_version')
195 | result = find_required_python_version([])
196 | self.assertIsNone(result)
197 |
198 | def test_python_versions_many_only(self):
199 | find_required_python_version = import_module_member(
200 | 'setup_boilerplate', 'find_required_python_version')
201 | classifiers = [
202 | 'Programming Language :: Python :: 2 :: Only',
203 | 'Programming Language :: Python :: 3 :: Only']
204 | with self.assertRaises(ValueError):
205 | find_required_python_version(classifiers)
206 |
207 | def test_python_versions_conflict(self):
208 | find_required_python_version = import_module_member(
209 | 'setup_boilerplate', 'find_required_python_version')
210 | classifier_variants = [
211 | ['Programming Language :: Python :: 2.7',
212 | 'Programming Language :: Python :: 3 :: Only'],
213 | ['Programming Language :: Python :: 2 :: Only',
214 | 'Programming Language :: Python :: 3.0']]
215 | for classifiers in classifier_variants:
216 | with self.assertRaises(ValueError):
217 | find_required_python_version(classifiers)
218 |
219 |
220 | class PackageTests(unittest.TestCase):
221 |
222 | """Test methods of Package class."""
223 |
224 | def test_try_fields(self):
225 | package = import_module_member('setup_boilerplate', 'Package')
226 |
227 | class Package(package): # pylint: disable=too-few-public-methods
228 | name = 'package name'
229 | description = 'package description'
230 | self.assertEqual(Package.try_fields('name', 'description'), 'package name')
231 | self.assertEqual(Package.try_fields('bad_field', 'description'), 'package description')
232 | with self.assertRaises(AttributeError):
233 | self.assertIsNone(Package.try_fields())
234 | with self.assertRaises(AttributeError):
235 | Package.try_fields('bad_field', 'another_bad_field')
236 |
237 | def test_parse_readme(self):
238 | package = import_module_member('setup_boilerplate', 'Package')
239 |
240 | class Package(package): # pylint: disable=too-few-public-methods
241 | name = 'package name'
242 | description = 'package description'
243 | version = '1.2.3.4'
244 | url = 'https://github.com/example'
245 |
246 | with tempfile.NamedTemporaryFile('w', suffix='.md', delete=False) as temp_file:
247 | temp_file.write('test test test')
248 | result, content_type = Package.parse_readme(temp_file.name)
249 | os.remove(temp_file.name)
250 | self.assertIsInstance(result, str)
251 | self.assertIsInstance(content_type, str)
252 |
253 | prefix = 'https://github.com/example/blob/v1.2.3.4/'
254 | for name, link, done in LINK_EXAMPLES:
255 | name = '' if name is None else name + ' '
256 | text = 'Please see `{}<{}>`_ for details.'.format(name, link)
257 | with tempfile.NamedTemporaryFile('w', suffix='.rst', delete=False) as temp_file:
258 | temp_file.write(text)
259 | result, content_type = Package.parse_readme(temp_file.name)
260 | os.remove(temp_file.name)
261 | self.assertIsInstance(result, str)
262 | self.assertIsInstance(content_type, str)
263 | if not done:
264 | self.assertEqual(result, text)
265 | continue
266 | if name == '':
267 | name = link + ' '
268 | self.assertIn('`{}<{}{}>`_'.format(name, prefix, link), result)
269 |
270 | def test_prepare(self):
271 | package = import_module_member('setup_boilerplate', 'Package')
272 |
273 | version_ = '1.2.3.4.5.6.7'
274 | long_description_ = 'long package description'
275 |
276 | class Package(package): # pylint: disable=too-few-public-methods, missing-docstring
277 | name = 'package name'
278 | version = version_
279 | description = 'package description'
280 | long_description = long_description_
281 | packages = []
282 | install_requires = []
283 | python_requires = ''
284 |
285 | self.assertEqual(Package.version, version_)
286 | self.assertEqual(Package.long_description, long_description_)
287 | Package.prepare()
288 | self.assertEqual(Package.version, version_)
289 | self.assertEqual(Package.long_description, long_description_)
290 |
291 | Package.long_description = None
292 | Package.packages = None
293 | Package.install_requires = None
294 | Package.python_requires = None
295 | Package.prepare()
296 |
297 | Package.version = None
298 | with self.assertRaises(FileNotFoundError):
299 | Package.prepare()
300 |
301 |
302 | @unittest.skipUnless(os.environ.get('TEST_PACKAGING') or os.environ.get('CI'),
303 | 'skipping packaging tests for actual package')
304 | class IntergrationTests(unittest.TestCase):
305 |
306 | """Test if the boilerplate can actually create a valid package."""
307 |
308 | pkg_name = get_package_folder_name()
309 |
310 | def test_build_binary(self):
311 | run_module('setup', 'bdist')
312 | self.assertTrue(os.path.isdir('dist'))
313 |
314 | def test_build_wheel(self):
315 | run_module('setup', 'bdist_wheel')
316 | self.assertTrue(os.path.isdir('dist'))
317 |
318 | def test_build_source(self):
319 | run_module('setup', 'sdist', '--formats=gztar,zip')
320 | self.assertTrue(os.path.isdir('dist'))
321 |
322 | def test_install_code(self):
323 | run_pip('install', '.')
324 | run_pip('uninstall', '-y', self.pkg_name)
325 |
326 | def test_install_source_tar(self):
327 | find_version = import_module_member('setup_boilerplate', 'find_version')
328 | version = find_version(self.pkg_name)
329 | run_pip('install', 'dist/*-{}.tar.gz'.format(version), glob=True)
330 | run_pip('uninstall', '-y', self.pkg_name)
331 |
332 | def test_install_source_zip(self):
333 | find_version = import_module_member('setup_boilerplate', 'find_version')
334 | version = find_version(self.pkg_name)
335 | run_pip('install', 'dist/*-{}.zip'.format(version), glob=True)
336 | run_pip('uninstall', '-y', self.pkg_name)
337 |
338 | def test_install_wheel(self):
339 | find_version = import_module_member('setup_boilerplate', 'find_version')
340 | version = find_version(self.pkg_name)
341 | run_pip('install', 'dist/*-{}-*.whl'.format(version), glob=True)
342 | run_pip('uninstall', '-y', self.pkg_name)
343 |
344 | def test_pip_error(self):
345 | with self.assertRaises(AssertionError):
346 | run_pip('wrong_pip_command')
347 |
348 | def test_setup_do_nothing_or_error(self):
349 | run_module('setup', 'wrong_setup_command', run_name='__not_main__')
350 | with self.assertRaises(SystemExit):
351 | run_module('setup', 'wrong_setup_command')
352 |
--------------------------------------------------------------------------------
/test_requirements.txt:
--------------------------------------------------------------------------------
1 | docutils
2 | pip >= 9.0
3 | pygments
4 | setuptools >= 20.5
5 | wheel
6 | -rrequirements.txt
7 |
--------------------------------------------------------------------------------