├── .flake8 ├── .github └── workflows │ └── pythonapp.yml ├── .gitignore ├── .readthedocs.yaml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── Requirements.txt ├── docs ├── .gitignore ├── Makefile ├── conf.py ├── index.rst ├── make.bat ├── modules.rst ├── pylasu.StrumentaLanguageSupport.rst ├── pylasu.emf.rst ├── pylasu.mapping.rst ├── pylasu.model.rst ├── pylasu.parsing.rst ├── pylasu.playground.rst ├── pylasu.reflection.rst ├── pylasu.rst ├── pylasu.testing.rst ├── pylasu.transformation.rst ├── pylasu.validation.rst └── requirements.txt ├── pylasu ├── StrumentaLanguageSupport │ ├── StrumentaLanguageSupport.py │ └── __init__.py ├── __init__.py ├── astruntime.py ├── emf │ ├── __init__.py │ ├── metamodel_builder.py │ └── model.py ├── mapping │ ├── __init__.py │ └── parse_tree_to_ast_transformer.py ├── model │ ├── __init__.py │ ├── errors.py │ ├── model.py │ ├── naming.py │ ├── position.py │ ├── processing.py │ ├── reflection.py │ └── traversing.py ├── parsing │ ├── __init__.py │ ├── parse_tree.py │ └── results.py ├── playground │ ├── __init__.py │ ├── transpilation_trace.py │ └── transpilation_trace_ecore.py ├── reflection │ ├── __init__.py │ └── reflection.py ├── support.py ├── testing │ ├── __init__.py │ └── testing.py ├── transformation │ ├── __init__.py │ ├── generic_nodes.py │ └── transformation.py └── validation │ ├── __init__.py │ └── validation.py ├── pyproject.toml ├── setup.cfg └── tests ├── .flake8 ├── .gitignore ├── AntlrEntityLexer.g4 ├── AntlrEntityParser.g4 ├── AntlrScriptLexer.g4 ├── AntlrScriptParser.g4 ├── SimpleLangLexer.g4 ├── SimpleLangParser.g4 ├── __init__.py ├── fixtures.py ├── generate-test-parsers.sh ├── mapping ├── __init__.py └── test_parse_tree_to_ast_transformers.py ├── model ├── __init__.py ├── test_model.py └── test_position.py ├── test_metamodel_builder.py ├── test_parse_tree.py ├── test_processing.py ├── test_transpilation_trace.py ├── test_traversing.py └── transformation ├── __init__.py └── test_ast_transformers.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = .git,__pycache__,bin,lib,pylasu/StrumentaLanguageSupport/*,tests 3 | max-complexity = 10 4 | max-line-length = 120 5 | per-file-ignores = __init__.py:F401 6 | ignore = F811, W503 7 | # F811 = Redefinition of unused function. It doesn't recognize functions that are extension methods with the same name 8 | # in different classes. 9 | # W503 = Line break before binary operator. We have to choose between W503 and W504 as they're incompatible. W504 is 10 | # compliant with the more recent PEP8. 11 | -------------------------------------------------------------------------------- /.github/workflows/pythonapp.yml: -------------------------------------------------------------------------------- 1 | name: Build, lint and test 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | strategy: 8 | matrix: 9 | python: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Set up Python ${{ matrix.python }} 14 | uses: actions/setup-python@v5 15 | with: 16 | python-version: ${{ matrix.python }} 17 | - name: Setup Java 11 18 | uses: actions/setup-java@v4 19 | with: 20 | distribution: 'adopt' 21 | java-version: 11 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install -r Requirements.txt 26 | curl -O https://www.antlr.org/download/antlr-4.11.1-complete.jar 27 | - name: Lint with flake8 28 | run: | 29 | pip install flake8 30 | # stop the build if there are Python style violations 31 | flake8 . --count --show-source --statistics 32 | flake8 tests --config tests/.flake8 --count --show-source --statistics 33 | - name: Generate test parsers 34 | run: ./generate-test-parsers.sh 35 | working-directory: tests 36 | - name: Test with pytest 37 | run: | 38 | pip install pytest pytest-cov pyecore==0.12.2 39 | pytest --cov=pylasu --cov-fail-under=60 tests 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | bin/ 3 | gen/ 4 | lib/ 5 | venv 6 | build 7 | dist 8 | .venv 9 | pyvenv.cfg 10 | *.egg-info 11 | .eggs/ 12 | __pycache__ 13 | /.token 14 | /antlr-*.jar 15 | .DS_Store 16 | .coverage 17 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.11" 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: docs/conf.py 16 | 17 | # Optional but recommended, declare the Python requirements required 18 | # to build your documentation 19 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 20 | python: 21 | install: 22 | - requirements: docs/requirements.txt 23 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project from version 0.4.0 upwards are documented in this file. 3 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). 4 | 5 | ## [0.8.1] – 2025-02-21 6 | 7 | ### Added 8 | - More reflection to support PEP-0563 in a Cython environment 9 | 10 | ## [0.8.0] – 2025-02-20 11 | 12 | ### Added 13 | - Support for string-encoded types (PEP-0563) 14 | 15 | ## [0.7.3] – 2025-01-13 16 | 17 | ### Changed 18 | - More type-safe signature for `find_ancestor_of_type` 19 | 20 | ### Fixed 21 | - `provides_nodes` for optional and union types 22 | 23 | ## [0.7.2] – 2024-11-07 24 | 25 | ### Added 26 | - Case-insensitive symbol lookup 27 | 28 | ### Changed 29 | - Improved performance of `Concept.is_node_property` 30 | 31 | ### Fixed 32 | - inheritance of internal properties 33 | 34 | ## [0.7.1] – 2024-05-16 35 | 36 | ### Fixed 37 | - `ParserRuleContext.to_position` extension method when the input stream is empty 38 | 39 | ## [0.7.0] – 2023-11-21 40 | 41 | ### Added 42 | - `Point.isBefore` method as in Kolasu 43 | 44 | ### Fixed 45 | - Bug in the deserialization of Result 46 | 47 | ## [0.6.0] – 2023-10-10 48 | 49 | ### Added 50 | - Support for Python 3.11 and 3.12 51 | - Classes to track issues (ported from Kolasu) 52 | 53 | ### Changed 54 | - Updated ANTLR runtime to 4.11.1 55 | 56 | ### Fixed 57 | - `internal_field` on Python 3.10+ 58 | 59 | ## [0.5.0] – 2023-09-06 60 | 61 | ### Added 62 | - AST transformers, aligned with the latest Kolasu 1.5.x version 63 | - `assert_asts_are_equal` function to support writing assertions in a test suite 64 | - documentation generation (published on https://pylasu.readthedocs.io) 65 | - export more symbols 66 | 67 | ### Changed 68 | - Alignment with Kolasu: 69 | - `PropertyDescriptor` renamed to `PropertyDescription` 70 | - `Node.properties` generates `PropertyDescription` instances rather than tuples 71 | 72 | ### Fixed 73 | - `PossiblyNamed` implementation 74 | - `Concept.node_properties` 75 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | prune tests 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pylasu – Python Language Support # 2 | 3 | [![Build Status](https://github.com/Strumenta/pylasu/actions/workflows/pythonapp.yml/badge.svg)](https://github.com/Strumenta/pylasu/actions/workflows/pythonapp.yml) 4 | [![PyPI](https://img.shields.io/pypi/v/pylasu.svg)](https://pypi.org/project/pylasu) 5 | [![Documentation](https://readthedocs.org/projects/pylasu/badge/?version=latest&style=flat)](https://pylasu.readthedocs.io) 6 | 7 | Pylasu is an AST Library in the [StarLasu](https://github.com/Strumenta/StarLasu) family, targeting the Python language. [Documentation](https://pylasu.readthedocs.io) is on Read the Docs. 8 | 9 | ## Linting 10 | 11 | ```shell 12 | flake8 . && flake8 tests 13 | ``` 14 | 15 | ## Testing 16 | 17 | ```shell 18 | pytest tests 19 | ``` 20 | 21 | ## Packaging and Distribution 22 | 23 | Update version in `pylasu/__init__.py`, commit, push and check that CI completes normally. 24 | 25 | Let's ensure that we have build and twine installed: 26 | 27 | ```shell 28 | pip install build twine 29 | ``` 30 | 31 | Then, check the project can be released by linting and running the test suite: 32 | 33 | ```shell 34 | flake8 . && flake8 tests --config tests/.flake8 35 | pytest tests 36 | ``` 37 | 38 | Finally, we can run: 39 | 40 | ```shell 41 | rm -f dist/* 42 | python -m build 43 | python -m twine upload dist/* 44 | ``` 45 | 46 | **Note:** if we have [two-factor authentication (2FA)](https://pypi.org/help/#twofa) enabled on PyPI, 47 | we have to [use an API token](https://pypi.org/help/#apitoken). 48 | 49 | If all goes well, tag the release: 50 | 51 | ```shell 52 | git tag -a v0.7.3 -m "Version 0.7.3" 53 | git push origin v0.7.3 54 | ``` 55 | 56 | ### Extracting Documentation 57 | 58 | Here's how to extract the documentation into HTML using Sphinx, the most popular documentation generator for Python. 59 | 60 | First, ensure you have Sphinx and the chosen theme installed: 61 | ```shell 62 | pip install sphinx sphinx_rtd_theme 63 | ``` 64 | 65 | Then, extract the documentation from the source code: 66 | ```shell 67 | sphinx-apidoc -o docs pylasu 68 | ``` 69 | 70 | Finally, change into the docs directory and launch the build process: 71 | ```shell 72 | cd docs 73 | make html 74 | ``` 75 | 76 | If everything goes as it should, in `docs/_build/html` you'll find the generated documentation. 77 | 78 | We also host the documentation on ReadTheDocs. The project is [pylasu](https://readthedocs.org/projects/pylasu/). 79 | Documentation needs to be [built manually](https://readthedocs.org/projects/pylasu/) for each release for it to appear 80 | online on https://pylasu.readthedocs.io. 81 | -------------------------------------------------------------------------------- /Requirements.txt: -------------------------------------------------------------------------------- 1 | antlr4-python3-runtime==4.11.1 2 | pyecore==0.12.2; extra == 'ecore' 3 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | _build/ 2 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | import os 6 | import sys 7 | sys.path.insert(0, os.path.abspath('..')) 8 | 9 | # -- Project information ----------------------------------------------------- 10 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 11 | 12 | project = 'Pylasu' 13 | copyright = '2023, Strumenta srl' 14 | author = 'Lorenzo Addazi, Alessio Stalla, Federico Tomassetti' 15 | 16 | # -- General configuration --------------------------------------------------- 17 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 18 | 19 | extensions = [ 20 | "sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx.ext.napoleon" 21 | ] 22 | 23 | templates_path = ['_templates'] 24 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 25 | 26 | 27 | # -- Options for HTML output ------------------------------------------------- 28 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 29 | 30 | html_theme = 'sphinx_rtd_theme' 31 | html_static_path = ['_static'] 32 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Pylasu documentation master file, created by 2 | sphinx-quickstart on Tue Jan 10 09:33:01 2023. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Pylasu's documentation! 7 | ================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 6 11 | :caption: Contents: 12 | 13 | modules 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | pylasu 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 6 6 | 7 | pylasu 8 | -------------------------------------------------------------------------------- /docs/pylasu.StrumentaLanguageSupport.rst: -------------------------------------------------------------------------------- 1 | pylasu.StrumentaLanguageSupport package 2 | ======================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.StrumentaLanguageSupport.StrumentaLanguageSupport module 8 | --------------------------------------------------------------- 9 | 10 | .. automodule:: pylasu.StrumentaLanguageSupport.StrumentaLanguageSupport 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.StrumentaLanguageSupport 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/pylasu.emf.rst: -------------------------------------------------------------------------------- 1 | pylasu.emf package 2 | ================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.emf.metamodel\_builder module 8 | ------------------------------------ 9 | 10 | .. automodule:: pylasu.emf.metamodel_builder 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pylasu.emf.model module 16 | ----------------------- 17 | 18 | .. automodule:: pylasu.emf.model 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: pylasu.emf 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/pylasu.mapping.rst: -------------------------------------------------------------------------------- 1 | pylasu.mapping package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.mapping.parse\_tree\_to\_ast\_transformer module 8 | ------------------------------------------------------- 9 | 10 | .. automodule:: pylasu.mapping.parse_tree_to_ast_transformer 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.mapping 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/pylasu.model.rst: -------------------------------------------------------------------------------- 1 | pylasu.model package 2 | ==================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.model.errors module 8 | -------------------------- 9 | 10 | .. automodule:: pylasu.model.errors 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pylasu.model.model module 16 | ------------------------- 17 | 18 | .. automodule:: pylasu.model.model 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pylasu.model.naming module 24 | -------------------------- 25 | 26 | .. automodule:: pylasu.model.naming 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pylasu.model.position module 32 | ---------------------------- 33 | 34 | .. automodule:: pylasu.model.position 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | pylasu.model.processing module 40 | ------------------------------ 41 | 42 | .. automodule:: pylasu.model.processing 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | pylasu.model.traversing module 48 | ------------------------------ 49 | 50 | .. automodule:: pylasu.model.traversing 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | Module contents 56 | --------------- 57 | 58 | .. automodule:: pylasu.model 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/pylasu.parsing.rst: -------------------------------------------------------------------------------- 1 | pylasu.parsing package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.parsing.parse\_tree module 8 | --------------------------------- 9 | 10 | .. automodule:: pylasu.parsing.parse_tree 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.parsing 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/pylasu.playground.rst: -------------------------------------------------------------------------------- 1 | pylasu.playground package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.playground.transpilation\_trace module 8 | --------------------------------------------- 9 | 10 | .. automodule:: pylasu.playground.transpilation_trace 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pylasu.playground.transpilation\_trace\_ecore module 16 | ---------------------------------------------------- 17 | 18 | .. automodule:: pylasu.playground.transpilation_trace_ecore 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: pylasu.playground 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/pylasu.reflection.rst: -------------------------------------------------------------------------------- 1 | pylasu.reflection package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.reflection.reflection module 8 | ----------------------------------- 9 | 10 | .. automodule:: pylasu.reflection.reflection 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.reflection 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/pylasu.rst: -------------------------------------------------------------------------------- 1 | pylasu package 2 | ============== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 6 9 | 10 | pylasu.StrumentaLanguageSupport 11 | pylasu.emf 12 | pylasu.mapping 13 | pylasu.model 14 | pylasu.parsing 15 | pylasu.playground 16 | pylasu.reflection 17 | pylasu.testing 18 | pylasu.transformation 19 | pylasu.validation 20 | 21 | Submodules 22 | ---------- 23 | 24 | pylasu.astruntime module 25 | ------------------------ 26 | 27 | .. automodule:: pylasu.astruntime 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | 32 | pylasu.support module 33 | --------------------- 34 | 35 | .. automodule:: pylasu.support 36 | :members: 37 | :undoc-members: 38 | :show-inheritance: 39 | 40 | Module contents 41 | --------------- 42 | 43 | .. automodule:: pylasu 44 | :members: 45 | :undoc-members: 46 | :show-inheritance: 47 | -------------------------------------------------------------------------------- /docs/pylasu.testing.rst: -------------------------------------------------------------------------------- 1 | pylasu.testing package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.testing.testing module 8 | ----------------------------- 9 | 10 | .. automodule:: pylasu.testing.testing 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.testing 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/pylasu.transformation.rst: -------------------------------------------------------------------------------- 1 | pylasu.transformation package 2 | ============================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.transformation.generic\_nodes module 8 | ------------------------------------------- 9 | 10 | .. automodule:: pylasu.transformation.generic_nodes 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pylasu.transformation.transformation module 16 | ------------------------------------------- 17 | 18 | .. automodule:: pylasu.transformation.transformation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: pylasu.transformation 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/pylasu.validation.rst: -------------------------------------------------------------------------------- 1 | pylasu.validation package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pylasu.validation.validation module 8 | ----------------------------------- 9 | 10 | .. automodule:: pylasu.validation.validation 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: pylasu.validation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx_rtd_theme 2 | -------------------------------------------------------------------------------- /pylasu/StrumentaLanguageSupport/StrumentaLanguageSupport.py: -------------------------------------------------------------------------------- 1 | """Definition of meta model 'StrumentaLanguageSupport'.""" 2 | from functools import partial 3 | import pyecore.ecore as Ecore 4 | from pyecore.ecore import * 5 | 6 | 7 | name = 'StrumentaLanguageSupport' 8 | nsURI = 'https://strumenta.com/kolasu/v2' 9 | nsPrefix = '' 10 | 11 | eClass = EPackage(name=name, nsURI=nsURI, nsPrefix=nsPrefix) 12 | 13 | eClassifiers = {} 14 | getEClassifier = partial(Ecore.getEClassifier, searchspace=eClassifiers) 15 | IssueType = EEnum('IssueType', literals=['LEXICAL', 'SYNTACTIC', 'SEMANTIC']) 16 | 17 | IssueSeverity = EEnum('IssueSeverity', literals=['ERROR', 'WARNING', 'INFO']) 18 | 19 | 20 | BigDecimal = EDataType('BigDecimal', instanceClassName='java.math.BigDecimal') 21 | 22 | BigInteger = EDataType('BigInteger', instanceClassName='java.math.BigInteger') 23 | 24 | 25 | class LocalDate(EObject, metaclass=MetaEClass): 26 | 27 | year = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 28 | month = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 29 | dayOfMonth = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 30 | 31 | def __init__(self, *, year=None, month=None, dayOfMonth=None): 32 | # if kwargs: 33 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 34 | 35 | super().__init__() 36 | 37 | if year is not None: 38 | self.year = year 39 | 40 | if month is not None: 41 | self.month = month 42 | 43 | if dayOfMonth is not None: 44 | self.dayOfMonth = dayOfMonth 45 | 46 | 47 | class LocalTime(EObject, metaclass=MetaEClass): 48 | 49 | hour = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 50 | minute = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 51 | second = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 52 | nanosecond = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 53 | 54 | def __init__(self, *, hour=None, minute=None, second=None, nanosecond=None): 55 | # if kwargs: 56 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 57 | 58 | super().__init__() 59 | 60 | if hour is not None: 61 | self.hour = hour 62 | 63 | if minute is not None: 64 | self.minute = minute 65 | 66 | if second is not None: 67 | self.second = second 68 | 69 | if nanosecond is not None: 70 | self.nanosecond = nanosecond 71 | 72 | 73 | class LocalDateTime(EObject, metaclass=MetaEClass): 74 | 75 | date = EReference(ordered=True, unique=True, containment=True, derived=False) 76 | time = EReference(ordered=True, unique=True, containment=True, derived=False) 77 | 78 | def __init__(self, *, date=None, time=None): 79 | # if kwargs: 80 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 81 | 82 | super().__init__() 83 | 84 | if date is not None: 85 | self.date = date 86 | 87 | if time is not None: 88 | self.time = time 89 | 90 | 91 | class Point(EObject, metaclass=MetaEClass): 92 | 93 | line = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 94 | column = EAttribute(eType=EInt, unique=True, derived=False, changeable=True) 95 | 96 | def __init__(self, *, line=None, column=None): 97 | # if kwargs: 98 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 99 | 100 | super().__init__() 101 | 102 | if line is not None: 103 | self.line = line 104 | 105 | if column is not None: 106 | self.column = column 107 | 108 | 109 | class Position(EObject, metaclass=MetaEClass): 110 | 111 | start = EReference(ordered=True, unique=True, containment=True, derived=False) 112 | end = EReference(ordered=True, unique=True, containment=True, derived=False) 113 | 114 | def __init__(self, *, start=None, end=None): 115 | # if kwargs: 116 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 117 | 118 | super().__init__() 119 | 120 | if start is not None: 121 | self.start = start 122 | 123 | if end is not None: 124 | self.end = end 125 | 126 | 127 | @abstract 128 | class Origin(EObject, metaclass=MetaEClass): 129 | 130 | def __init__(self): 131 | # if kwargs: 132 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 133 | 134 | super().__init__() 135 | 136 | 137 | @abstract 138 | class Destination(EObject, metaclass=MetaEClass): 139 | 140 | def __init__(self): 141 | # if kwargs: 142 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 143 | 144 | super().__init__() 145 | 146 | 147 | class Statement(EObject, metaclass=MetaEClass): 148 | 149 | def __init__(self): 150 | # if kwargs: 151 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 152 | 153 | super().__init__() 154 | 155 | 156 | class Expression(EObject, metaclass=MetaEClass): 157 | 158 | def __init__(self): 159 | # if kwargs: 160 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 161 | 162 | super().__init__() 163 | 164 | 165 | class EntityDeclaration(EObject, metaclass=MetaEClass): 166 | 167 | def __init__(self): 168 | # if kwargs: 169 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 170 | 171 | super().__init__() 172 | 173 | 174 | class Issue(EObject, metaclass=MetaEClass): 175 | 176 | type = EAttribute(eType=IssueType, unique=True, derived=False, changeable=True) 177 | message = EAttribute(eType=EString, unique=True, derived=False, changeable=True) 178 | severity = EAttribute(eType=IssueSeverity, unique=True, derived=False, changeable=True) 179 | position = EReference(ordered=True, unique=True, containment=True, derived=False) 180 | 181 | def __init__(self, *, type=None, message=None, severity=None, position=None): 182 | # if kwargs: 183 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 184 | 185 | super().__init__() 186 | 187 | if type is not None: 188 | self.type = type 189 | 190 | if message is not None: 191 | self.message = message 192 | 193 | if severity is not None: 194 | self.severity = severity 195 | 196 | if position is not None: 197 | self.position = position 198 | 199 | 200 | class PossiblyNamed(EObject, metaclass=MetaEClass): 201 | 202 | name = EAttribute(eType=EString, unique=True, derived=False, changeable=True) 203 | 204 | def __init__(self, *, name=None): 205 | # if kwargs: 206 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 207 | 208 | super().__init__() 209 | 210 | if name is not None: 211 | self.name = name 212 | 213 | 214 | class ReferenceByName(EObject, metaclass=MetaEClass): 215 | 216 | name = EAttribute(eType=EString, unique=True, derived=False, changeable=True) 217 | referenced = EReference(ordered=True, unique=True, containment=False, derived=False) 218 | 219 | def __init__(self, *, name=None, referenced=None): 220 | # if kwargs: 221 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 222 | 223 | super().__init__() 224 | 225 | if name is not None: 226 | self.name = name 227 | 228 | if referenced is not None: 229 | self.referenced = referenced 230 | 231 | 232 | class Result(EObject, metaclass=MetaEClass): 233 | 234 | root = EReference(ordered=True, unique=True, containment=True, derived=False) 235 | issues = EReference(ordered=True, unique=True, containment=True, derived=False, upper=-1) 236 | 237 | def __init__(self, *, root=None, issues=None): 238 | # if kwargs: 239 | # raise AttributeError('unexpected arguments: {}'.format(kwargs)) 240 | 241 | super().__init__() 242 | 243 | if root is not None: 244 | self.root = root 245 | 246 | if issues: 247 | self.issues.extend(issues) 248 | 249 | 250 | class NodeDestination(Destination): 251 | 252 | node = EReference(ordered=True, unique=True, containment=False, derived=False) 253 | 254 | def __init__(self, *, node=None, **kwargs): 255 | 256 | super().__init__(**kwargs) 257 | 258 | if node is not None: 259 | self.node = node 260 | 261 | 262 | class TextFileDestination(Destination): 263 | 264 | position = EReference(ordered=True, unique=True, containment=True, derived=False) 265 | 266 | def __init__(self, *, position=None, **kwargs): 267 | 268 | super().__init__(**kwargs) 269 | 270 | if position is not None: 271 | self.position = position 272 | 273 | 274 | @abstract 275 | class ASTNode(Origin): 276 | 277 | position = EReference(ordered=True, unique=True, containment=True, derived=False) 278 | origin = EReference(ordered=True, unique=True, containment=False, derived=False) 279 | destination = EReference(ordered=True, unique=True, containment=True, derived=False) 280 | 281 | def __init__(self, *, position=None, origin=None, destination=None, **kwargs): 282 | 283 | super().__init__(**kwargs) 284 | 285 | if position is not None: 286 | self.position = position 287 | 288 | if origin is not None: 289 | self.origin = origin 290 | 291 | if destination is not None: 292 | self.destination = destination 293 | 294 | 295 | class Named(PossiblyNamed): 296 | 297 | name = EAttribute(eType=EString, unique=True, derived=False, changeable=True) 298 | 299 | def __init__(self, *, name=None, **kwargs): 300 | 301 | super().__init__(**kwargs) 302 | 303 | if name is not None: 304 | self.name = name 305 | -------------------------------------------------------------------------------- /pylasu/StrumentaLanguageSupport/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from .StrumentaLanguageSupport import getEClassifier, eClassifiers 3 | from .StrumentaLanguageSupport import name, nsURI, nsPrefix, eClass 4 | from .StrumentaLanguageSupport import BigDecimal, BigInteger, LocalDate, LocalTime, LocalDateTime, Point, Position, Origin, Destination, NodeDestination, TextFileDestination, ASTNode, Statement, Expression, EntityDeclaration, IssueType, IssueSeverity, Issue, PossiblyNamed, Named, ReferenceByName, Result 5 | 6 | 7 | from . import StrumentaLanguageSupport 8 | 9 | __all__ = ['BigDecimal', 'BigInteger', 'LocalDate', 'LocalTime', 'LocalDateTime', 'Point', 'Position', 'Origin', 'Destination', 'NodeDestination', 'TextFileDestination', 10 | 'ASTNode', 'Statement', 'Expression', 'EntityDeclaration', 'IssueType', 'IssueSeverity', 'Issue', 'PossiblyNamed', 'Named', 'ReferenceByName', 'Result'] 11 | 12 | eSubpackages = [] 13 | eSuperPackage = None 14 | StrumentaLanguageSupport.eSubpackages = eSubpackages 15 | StrumentaLanguageSupport.eSuperPackage = eSuperPackage 16 | 17 | LocalDateTime.date.eType = LocalDate 18 | LocalDateTime.time.eType = LocalTime 19 | Position.start.eType = Point 20 | Position.end.eType = Point 21 | NodeDestination.node.eType = ASTNode 22 | TextFileDestination.position.eType = Position 23 | ASTNode.position.eType = Position 24 | ASTNode.origin.eType = Origin 25 | ASTNode.destination.eType = Destination 26 | Issue.position.eType = Position 27 | # TODO eGenericType not supported ReferenceByName.referenced.eType = 28 | ReferenceByName.referenced.eType = ASTNode 29 | # TODO eGenericType not supported 30 | Result.root.eType = ASTNode 31 | Result.issues.eType = Issue 32 | 33 | otherClassifiers = [BigDecimal, BigInteger, IssueType, IssueSeverity] 34 | 35 | for classif in otherClassifiers: 36 | eClassifiers[classif.name] = classif 37 | classif.ePackage = eClass 38 | 39 | for subpack in eSubpackages: 40 | eClass.eSubpackages.append(subpack.eClass) 41 | -------------------------------------------------------------------------------- /pylasu/__init__.py: -------------------------------------------------------------------------------- 1 | VERSION = "0.8.1" 2 | -------------------------------------------------------------------------------- /pylasu/astruntime.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | from .model.naming import ReferenceByName 4 | from .model.position import Point, Position 5 | from .validation.validation import Result, Issue, IssueType, IssueSeverity 6 | 7 | 8 | def unserialize_result(json_result, root_unserializer) -> Result: 9 | result = Result(root=root_unserializer(json_result['root']) if 'root' in json_result else None) 10 | result.issues = [unserialize_issue(issue) for issue in json_result['issues']] if 'issues' in json_result else [] 11 | return result 12 | 13 | 14 | def unserialize_issue(json_issue) -> Issue: 15 | return Issue( 16 | type=unserialize_issue_type(json_issue['type']) if json_issue['type'] else None, 17 | message=json_issue['message'] if json_issue['message'] else None, 18 | severity=unserialize_issue_severity(json_issue['severity']) if json_issue['severity'] else None, 19 | position=unserialize_position(json_issue['position']) if json_issue['position'] else None 20 | ) 21 | 22 | 23 | def unserialize_issue_type(json_issue_type) -> IssueType or None: 24 | with contextlib.suppress(Exception): 25 | return IssueType[json_issue_type] 26 | 27 | 28 | def unserialize_issue_severity(json_issue_severity) -> IssueSeverity: 29 | with contextlib.suppress(Exception): 30 | return IssueSeverity[json_issue_severity] 31 | 32 | 33 | def check_type(json, expected_type): 34 | if "#type" not in json: 35 | raise Exception("type not specified, expected %s" % expected_type) 36 | if json["#type"] != expected_type: 37 | raise Exception("unexpected type, expected %s but found %s" % (expected_type, json["#type"])) 38 | 39 | 40 | def unserialize_point(json_point): 41 | return Point( 42 | line=json_point['line'] if 'line' in json_point else None, 43 | column=json_point['column'] if 'column' in json_point else None, 44 | ) 45 | 46 | 47 | def unserialize_position(json_position): 48 | return Position( 49 | start=unserialize_point(json_position['start']) if 'start' in json_position else None, 50 | end=unserialize_point(json_position['end']) if 'end' in json_position else None 51 | ) 52 | 53 | 54 | def unserialize_reference_by_name(json_reference_by_name): 55 | return ReferenceByName( 56 | name=json_reference_by_name['name'] if 'name' in json_reference_by_name else None 57 | ) 58 | 59 | 60 | def unserialize_long(json): 61 | return json 62 | -------------------------------------------------------------------------------- /pylasu/emf/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import find_eclassifier 2 | from .metamodel_builder import MetamodelBuilder 3 | -------------------------------------------------------------------------------- /pylasu/emf/metamodel_builder.py: -------------------------------------------------------------------------------- 1 | import types 2 | import typing 3 | from dataclasses import is_dataclass, fields 4 | 5 | from pyecore.ecore import EAttribute, ECollection, EObject, EPackage, EReference, MetaEClass, EBoolean, EString, EInt, \ 6 | EEnum 7 | from pyecore.resources import Resource 8 | from pylasu import StrumentaLanguageSupport as starlasu 9 | from pylasu.StrumentaLanguageSupport import ASTNode 10 | from pylasu.emf.model import find_eclassifier 11 | from pylasu.model import Node 12 | from pylasu.model.model import InternalField 13 | from pylasu.reflection import get_type_annotations 14 | from pylasu.reflection.reflection import get_type_origin, is_enum_type, is_sequence_type, get_type_arguments 15 | 16 | 17 | def resolve_bases(bases): 18 | """Resolve MRO entries dynamically as specified by PEP 560.""" 19 | if hasattr(types, "resolve_bases"): 20 | return getattr(types, "resolve_bases")(bases) 21 | new_bases = list(bases) 22 | updated = False 23 | shift = 0 24 | for i, base in enumerate(bases): 25 | if isinstance(base, type): 26 | continue 27 | if not hasattr(base, "__mro_entries__"): 28 | continue 29 | new_base = base.__mro_entries__(bases) 30 | updated = True 31 | if not isinstance(new_base, tuple): 32 | raise TypeError("__mro_entries__ must return a tuple") 33 | else: 34 | new_bases[i + shift:i + shift + 1] = new_base 35 | shift += len(new_base) - 1 36 | if not updated: 37 | return bases 38 | return tuple(new_bases) 39 | 40 | 41 | class MetamodelBuilder: 42 | def __init__(self, package_name: str, ns_uri: str, ns_prefix: str = None, resource: Resource = None, 43 | base_node_class: type = Node): 44 | self.package = EPackage(package_name, ns_uri, ns_prefix) 45 | if resource: 46 | resource.append(self.package) 47 | self.data_types = { 48 | bool: EBoolean, 49 | int: EInt, 50 | str: EString, 51 | } 52 | self.base_node_class = base_node_class 53 | self.forward_references = [] 54 | 55 | def can_provide_class(self, cls: type): 56 | return cls.__module__ == self.package.name 57 | 58 | def provide_class(self, cls: type): 59 | if cls == self.base_node_class: 60 | return ASTNode 61 | if not self.can_provide_class(cls): 62 | if self.package.eResource: 63 | eclass = find_eclassifier(self.package.eResource, cls) 64 | if eclass: 65 | return eclass 66 | raise Exception(self.package.name + " cannot provide class " + str(cls)) 67 | eclass = self.package.getEClassifier(cls.__name__) 68 | if not eclass: 69 | nmspc = self.setup_attributes(cls) 70 | bases = self.setup_base_classes(cls) 71 | eclass = MetaEClass(cls.__name__, resolve_bases(tuple(bases)), nmspc) 72 | eclass.eClass.ePackage = self.package 73 | for (type_name, ref) in self.forward_references: 74 | if type_name == cls.__name__: 75 | ref.eType = eclass 76 | self.forward_references = [(t, r) for t, r in self.forward_references if not r.eType] 77 | return eclass 78 | 79 | def setup_base_classes(self, cls): 80 | bases = [] 81 | for c in cls.__mro__[1:]: 82 | if c == self.base_node_class: 83 | bases.append(ASTNode) 84 | elif self.can_provide_class(c): 85 | bases.append(self.provide_class(c)) 86 | elif self.package.eResource: 87 | esuperclass = find_eclassifier(self.package.eResource, c) 88 | if esuperclass: 89 | bases.append(esuperclass) 90 | bases.append(EObject) 91 | return bases 92 | 93 | def setup_attributes(self, cls): 94 | anns = get_type_annotations(cls) 95 | nmspc = { 96 | "position": EReference("position", starlasu.Position, containment=True) 97 | } 98 | for attr in anns if anns else []: 99 | if attr.startswith('_'): 100 | continue 101 | elif is_dataclass(cls): 102 | field = next((f for f in fields(cls) if f.name == attr), None) 103 | if isinstance(field, InternalField): 104 | continue 105 | attr_type = anns[attr] 106 | nmspc[attr] = self.to_structural_feature(attr, attr_type) 107 | return nmspc 108 | 109 | def to_structural_feature(self, attr, attr_type, unsupported_type_handler=None): # noqa: C901 110 | def raise_on_unsupported_type(attr_type, attr): 111 | raise Exception("Unsupported type " + str(attr_type) + " for attribute " + attr) 112 | 113 | def default_unsupported_type(_, __): 114 | return EObject 115 | 116 | if not unsupported_type_handler: 117 | unsupported_type_handler = raise_on_unsupported_type 118 | if isinstance(attr_type, str): 119 | return self.to_reference(attr, attr_type) 120 | elif attr_type in self.data_types: 121 | return EAttribute(attr, self.data_types[attr_type]) 122 | elif attr_type == object: 123 | return EAttribute(attr) 124 | elif self.is_node_type(attr_type): 125 | return EReference(attr, self.provide_class(attr_type), containment=True) 126 | elif is_sequence_type(attr_type): 127 | return self.to_list_reference(attr, attr_type, default_unsupported_type) 128 | elif get_type_origin(attr_type) == typing.Union: 129 | return EReference(attr, EObject, containment=True) # TODO here we could refine the type better 130 | elif is_enum_type(attr_type): 131 | return self.to_enum_attribute(attr, attr_type) 132 | else: 133 | return unsupported_type_handler(attr_type, attr) 134 | 135 | def is_node_type(self, attr_type): 136 | return isinstance(attr_type, type) and issubclass(attr_type, self.base_node_class) 137 | 138 | def to_enum_attribute(self, attr, attr_type): 139 | tp = EEnum(name=attr_type.__name__, literals=attr_type.__members__) 140 | tp.ePackage = self.package 141 | self.data_types[attr_type] = tp 142 | return EAttribute(attr, tp) 143 | 144 | def to_list_reference(self, attr, attr_type, default_unsupported_type): 145 | type_args = get_type_arguments(attr_type) 146 | if type_args and len(type_args) == 1: 147 | ft = self.to_structural_feature(attr, type_args[0], default_unsupported_type) 148 | ft.upperBound = -1 149 | return ft 150 | else: 151 | raise "Unsupported list type: " + str(attr_type) 152 | 153 | def to_reference(self, attr, attr_type): 154 | resolved = self.package.getEClassifier(attr_type) 155 | if resolved: 156 | return EReference(attr, resolved, containment=True) 157 | else: 158 | forward_reference = EReference(attr, containment=True) 159 | self.forward_references.append((attr_type, forward_reference)) 160 | return forward_reference 161 | 162 | def generate(self): 163 | if self.forward_references: 164 | raise Exception("The following classes are missing from " + self.package.name + ": " 165 | + ", ".join(n for n, _ in self.forward_references)) 166 | return self.package 167 | 168 | 169 | # Monkey patch until fix 170 | update_opposite = ECollection._update_opposite 171 | 172 | 173 | def update_opposite_if_not_none(self, owner, new_value, remove=False): 174 | if owner: 175 | update_opposite(self, owner, new_value, remove) 176 | 177 | 178 | ECollection._update_opposite = update_opposite_if_not_none 179 | -------------------------------------------------------------------------------- /pylasu/emf/model.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from pyecore.ecore import EPackage 4 | from pyecore.resources import Resource 5 | 6 | from pylasu.model import Node 7 | from pylasu.support import extension_method 8 | 9 | 10 | def find_eclassifier_in_resource(cls: type, resource: Resource): 11 | pkg_name = cls.__module__ 12 | for p in resource.contents: 13 | if isinstance(p, EPackage) and p.name == pkg_name: 14 | return p.getEClassifier(cls.__name__) 15 | 16 | 17 | @extension_method(Resource) 18 | def find_eclassifier(self: Resource, cls: type): 19 | eclass = find_eclassifier_in_resource(cls, self) 20 | if not eclass: 21 | for uri in (self.resource_set.resources if self.resource_set else {}): 22 | resource = self.resource_set.resources[uri] 23 | if resource != self: 24 | eclass = find_eclassifier_in_resource(cls, resource) 25 | if eclass: 26 | return eclass 27 | return eclass 28 | 29 | 30 | @extension_method(Node) 31 | def to_eobject(self: Node, resource: Resource, mappings=None): 32 | if self is None: 33 | return None 34 | if mappings is None: 35 | mappings = {} 36 | elif id(self) in mappings: 37 | return mappings[id(self)] 38 | eclass = resource.find_eclassifier(type(self)) 39 | if not eclass: 40 | raise Exception("Unknown classifier for " + str(type(self))) 41 | eobject = eclass() 42 | mappings[id(self)] = eobject 43 | for p in self.properties: 44 | v = p.value 45 | ev = translate_value(v, resource, mappings) 46 | if isinstance(v, list): 47 | eobject.eGet(p.name).extend(ev) 48 | else: 49 | eobject.eSet(p.name, ev) 50 | return eobject 51 | 52 | 53 | def translate_value(v, resource, mappings): 54 | if isinstance(v, Enum): 55 | enum_type = resource.find_eclassifier(type(v)) 56 | if enum_type: 57 | return enum_type.getEEnumLiteral(v.name) 58 | else: 59 | raise Exception("Unknown enum " + str(type(v))) 60 | if isinstance(v, list): 61 | return [translate_value(x, resource, mappings) for x in v] 62 | elif isinstance(v, Node): 63 | return to_eobject(v, resource, mappings) 64 | else: 65 | return v 66 | -------------------------------------------------------------------------------- /pylasu/mapping/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/pylasu/mapping/__init__.py -------------------------------------------------------------------------------- /pylasu/mapping/parse_tree_to_ast_transformer.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Any 2 | 3 | from antlr4 import ParserRuleContext 4 | from antlr4.tree.Tree import ParseTree 5 | 6 | from pylasu.model import Node, Origin 7 | from pylasu.parsing.parse_tree import ParseTreeOrigin, with_parse_tree 8 | from pylasu.transformation.transformation import ASTTransformer 9 | 10 | 11 | class ParseTreeToASTTransformer(ASTTransformer): 12 | """Implements a transformation from an ANTLR parse tree (the output of the parser) to an AST 13 | (a higher-level representation of the source code).""" 14 | 15 | def transform(self, source: Optional[Any], parent: Optional[Node] = None) -> Optional[Node]: 16 | """Performs the transformation of a node and, recursively, its descendants. In addition to the overridden 17 | method, it also assigns the parseTreeNode to the AST node so that it can keep track of its position. 18 | However, a node factory can override the parseTreeNode of the nodes it creates (but not the parent).""" 19 | node = super().transform(source, parent) 20 | if node and node.origin and isinstance(source, ParserRuleContext): 21 | with_parse_tree(node, source) 22 | return node 23 | 24 | def get_source(self, node: Node, source: Any) -> Any: 25 | origin = node.origin 26 | if isinstance(origin, ParseTreeOrigin): 27 | return origin.parse_tree 28 | else: 29 | return source 30 | 31 | def as_origin(self, source: Any) -> Optional[Origin]: 32 | if isinstance(source, ParseTree): 33 | return ParseTreeOrigin(source) 34 | else: 35 | return None 36 | -------------------------------------------------------------------------------- /pylasu/model/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import Destination, Node, Origin, internal_field, internal_properties 2 | from .naming import Named, PossiblyNamed, ReferenceByName 3 | from .position import Point, Position, pos, Source 4 | from .traversing import walk, walk_ancestors, walk_descendants, walk_leaves_first 5 | from .processing import children, search_by_type 6 | -------------------------------------------------------------------------------- /pylasu/model/errors.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional 3 | 4 | from pylasu.model import Position, Node 5 | 6 | 7 | @dataclass 8 | class ErrorNode: 9 | """An AST node that marks the presence of an error, 10 | for example a syntactic or semantic error in the original tree.""" 11 | message: str = None 12 | position: Optional[Position] = None 13 | 14 | 15 | @dataclass 16 | class GenericErrorNode(Node, ErrorNode): 17 | error: Optional[Exception] = None 18 | 19 | def __post_init__(self): 20 | if not self.message: 21 | if self.error: 22 | self.message = f"Exception {self.error}" 23 | else: 24 | self.message = "Unspecified error node" 25 | -------------------------------------------------------------------------------- /pylasu/model/model.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import inspect 3 | import sys 4 | import typing 5 | from abc import ABC, abstractmethod, ABCMeta 6 | from dataclasses import Field, MISSING, dataclass, field 7 | from typing import Optional, Callable, List, Union 8 | 9 | from .naming import ReferenceByName 10 | from .position import Position, Source 11 | from .reflection import Multiplicity, PropertyDescription 12 | from ..reflection import get_type_annotations, get_type_arguments, is_sequence_type 13 | from ..reflection.reflection import get_type_origin 14 | 15 | PYLASU_FEATURE = "pylasu_feature" 16 | 17 | 18 | class internal_property(property): 19 | pass 20 | 21 | 22 | def internal_properties(*props: str): 23 | def decorate(cls: type): 24 | cls.__internal_properties__ = ( 25 | getattr(cls, "__internal_properties__", []) + [*Node.__internal_properties__, *props]) 26 | return cls 27 | return decorate 28 | 29 | 30 | class InternalField(Field): 31 | pass 32 | 33 | 34 | def internal_field( 35 | *, default=MISSING, default_factory=MISSING, init=True, repr=True, hash=None, compare=True, metadata=None, 36 | kw_only=False): 37 | """Return an object to identify internal dataclass fields. The arguments are the same as dataclasses.field.""" 38 | 39 | if default is not MISSING and default_factory is not MISSING: 40 | raise ValueError('cannot specify both default and default_factory') 41 | try: 42 | # Python 3.10+ 43 | return InternalField(default, default_factory, init, repr, hash, compare, metadata, kw_only) 44 | except TypeError: 45 | return InternalField(default, default_factory, init, repr, hash, compare, metadata) 46 | 47 | 48 | def node_property(default=MISSING): 49 | description = PropertyDescription( 50 | "", None, 51 | multiplicity=Multiplicity.OPTIONAL if default is None else Multiplicity.SINGULAR) 52 | return field(default=default, metadata={PYLASU_FEATURE: description}) 53 | 54 | 55 | def node_containment(multiplicity: Multiplicity = Multiplicity.SINGULAR): 56 | description = PropertyDescription("", None, is_containment=True, multiplicity=multiplicity) 57 | 58 | if multiplicity == Multiplicity.SINGULAR: 59 | return field(metadata={PYLASU_FEATURE: description}) 60 | elif multiplicity == Multiplicity.OPTIONAL: 61 | return field(default=None, metadata={PYLASU_FEATURE: description}) 62 | elif multiplicity == Multiplicity.MANY: 63 | return field(default_factory=list, metadata={PYLASU_FEATURE: description}) 64 | 65 | 66 | class Origin(ABC): 67 | @internal_property 68 | @abstractmethod 69 | def position(self) -> Optional[Position]: 70 | pass 71 | 72 | @internal_property 73 | def source_text(self) -> Optional[str]: 74 | return None 75 | 76 | @internal_property 77 | def source(self) -> Optional[Source]: 78 | return self.position.source if self.position is not None else None 79 | 80 | 81 | @dataclass 82 | class CompositeOrigin(Origin): 83 | elements: List[Origin] = field(default_factory=list) 84 | position: Optional[Position] = None 85 | source_text: Optional[str] = None 86 | 87 | 88 | class Destination(ABC): 89 | pass 90 | 91 | 92 | @dataclass 93 | class CompositeDestination(Destination): 94 | elements: List[Destination] = field(default_factory=list) 95 | 96 | 97 | @dataclass 98 | class TextFileDestination(Destination): 99 | position: Optional[Position] = None 100 | 101 | 102 | def is_internal_property_or_method(value): 103 | return isinstance(value, internal_property) or isinstance(value, InternalField) or isinstance(value, Callable) 104 | 105 | 106 | def provides_nodes(decl_type): 107 | if get_type_origin(decl_type) is Union: 108 | provides = None 109 | for tp in get_type_arguments(decl_type): 110 | if tp is type(None): 111 | continue 112 | arg_provides = provides_nodes(tp) 113 | if provides is None: 114 | provides = arg_provides 115 | elif provides != arg_provides: 116 | raise Exception(f"Type {decl_type} mixes nodes and non-nodes") 117 | return provides 118 | else: 119 | return isinstance(decl_type, type) and issubclass(decl_type, Node) 120 | 121 | 122 | def get_only_type_arg(decl_type): 123 | """If decl_type has a single type argument, return it, otherwise return None""" 124 | type_args = get_type_arguments(decl_type) 125 | if len(type_args) == 1: 126 | return type_args[0] 127 | else: 128 | return None 129 | 130 | 131 | def process_annotated_property(cl: type, name: str, decl_type): 132 | try: 133 | fields = dataclasses.fields(cl) 134 | except TypeError: 135 | fields = tuple() 136 | for field in fields: 137 | if field.name == name and PYLASU_FEATURE in field.metadata: 138 | feature = field.metadata[PYLASU_FEATURE] 139 | feature.name = name 140 | if isinstance(decl_type, type): 141 | feature.type = decl_type 142 | elif type(field.type) is str: 143 | feature.type = try_to_resolve_string_type(field.type, name, cl) 144 | return feature 145 | return compute_feature_from_annotation(cl, name, decl_type) 146 | 147 | 148 | def compute_feature_from_annotation(cl, name, decl_type): 149 | feature = PropertyDescription(name, None, False, False, Multiplicity.SINGULAR) 150 | decl_type = try_to_resolve_type(decl_type, feature) 151 | if not isinstance(decl_type, type): 152 | fwref = None 153 | if hasattr(typing, "ForwardRef"): 154 | fwref = typing.ForwardRef 155 | if fwref and isinstance(decl_type, fwref): 156 | raise Exception(f"Feature {name}'s type is unresolved forward reference {decl_type}, " 157 | f"please use node_containment or node_property") 158 | elif type(decl_type) is str: 159 | decl_type = try_to_resolve_string_type(decl_type, name, cl) 160 | if not isinstance(decl_type, type): 161 | raise Exception(f"Unsupported feature {name} of type {decl_type}") 162 | feature.type = decl_type 163 | feature.is_containment = provides_nodes(decl_type) and not feature.is_reference 164 | return feature 165 | 166 | 167 | def try_to_resolve_string_type(decl_type, name, cl): 168 | try: 169 | ns = getattr(sys.modules.get(cl.__module__, None), '__dict__', globals()) 170 | decl_type = ns[decl_type] 171 | except KeyError: 172 | raise Exception(f"Unsupported feature {name} of unknown type {decl_type}") 173 | return decl_type 174 | 175 | 176 | def try_to_resolve_type(decl_type, feature): 177 | if get_type_origin(decl_type) is ReferenceByName: 178 | decl_type = get_only_type_arg(decl_type) or decl_type 179 | feature.is_reference = True 180 | if is_sequence_type(decl_type): 181 | decl_type = get_only_type_arg(decl_type) or decl_type 182 | feature.multiplicity = Multiplicity.MANY 183 | if get_type_origin(decl_type) is Union: 184 | type_args = get_type_arguments(decl_type) 185 | if len(type_args) == 1: 186 | decl_type = type_args[0] 187 | elif len(type_args) == 2: 188 | if type_args[0] is type(None): 189 | decl_type = type_args[1] 190 | elif type_args[1] is type(None): 191 | decl_type = type_args[0] 192 | else: 193 | raise Exception(f"Unsupported feature {feature.name} of union type {decl_type}") 194 | if feature.multiplicity == Multiplicity.SINGULAR: 195 | feature.multiplicity = Multiplicity.OPTIONAL 196 | else: 197 | raise Exception(f"Unsupported feature {feature.name} of union type {decl_type}") 198 | return decl_type 199 | 200 | 201 | class Concept(ABCMeta): 202 | 203 | def __init__(cls, what, bases=None, dict=None): 204 | super().__init__(what, bases, dict) 205 | cls.__internal_properties__ = [] 206 | for base in bases: 207 | if hasattr(base, "__internal_properties__"): 208 | cls.__internal_properties__.extend(base.__internal_properties__) 209 | if not cls.__internal_properties__: 210 | cls.__internal_properties__ = ["origin", "destination", "parent", "position", "position_override"] 211 | cls.__internal_properties__.extend([n for n, v in inspect.getmembers(cls, is_internal_property_or_method)]) 212 | 213 | @property 214 | def node_properties(cls): 215 | names = set() 216 | for cl in cls.__mro__: 217 | yield from cls._direct_node_properties(cl, names) 218 | 219 | def _direct_node_properties(cls, cl, known_property_names): 220 | if not isinstance(cls, Concept): 221 | return 222 | anns = get_type_annotations(cl) 223 | if not anns: 224 | return 225 | for name in anns: 226 | if name not in known_property_names and cls.is_node_property(name): 227 | feature = process_annotated_property(cl, name, anns[name]) 228 | known_property_names.add(name) 229 | yield feature 230 | for name in dir(cl): 231 | if name not in known_property_names and cls.is_node_property(name): 232 | feature = PropertyDescription(name, None, False, False) 233 | known_property_names.add(name) 234 | yield feature 235 | 236 | def is_node_property(cls, name): 237 | return not name.startswith('_') and name not in cls.__internal_properties__ 238 | 239 | 240 | class Node(Origin, Destination, metaclass=Concept): 241 | origin: Optional[Origin] = None 242 | destination: Optional[Destination] = None 243 | parent: Optional["Node"] = None 244 | position_override: Optional[Position] = None 245 | 246 | def __init__(self, origin: Optional[Origin] = None, parent: Optional["Node"] = None, 247 | position_override: Optional[Position] = None): 248 | self.origin = origin 249 | self.parent = parent 250 | self.position_override = position_override 251 | 252 | def with_origin(self, origin: Optional[Origin]): 253 | self.origin = origin 254 | return self 255 | 256 | def with_parent(self, parent: Optional["Node"]): 257 | self.parent = parent 258 | return self 259 | 260 | def with_position(self, position: Optional[Position]): 261 | self.position = position 262 | return self 263 | 264 | @internal_property 265 | def position(self) -> Optional[Position]: 266 | return self.position_override if self.position_override is not None\ 267 | else self.origin.position if self.origin is not None else None 268 | 269 | @position.setter 270 | def position(self, position: Optional[Position]): 271 | self.position_override = position 272 | 273 | @internal_property 274 | def source_text(self) -> Optional[str]: 275 | return self.origin.source_text if self.origin is not None else None 276 | 277 | @internal_property 278 | def source(self) -> Optional[Source]: 279 | return self.origin.source if self.origin is not None else None 280 | 281 | @internal_property 282 | def properties(self): 283 | return (PropertyDescription(p.name, p.type, 284 | is_containment=p.is_containment, is_reference=p.is_reference, 285 | multiplicity=p.multiplicity, value=getattr(self, p.name)) 286 | for p in self.__class__.node_properties) 287 | 288 | @internal_property 289 | def _fields(self): 290 | yield from (name for name, _ in self.properties) 291 | 292 | @internal_property 293 | def node_type(self): 294 | return type(self) 295 | 296 | 297 | def concept_of(node): 298 | properties = dir(node) 299 | if "__concept__" in properties: 300 | node_type = node.__concept__ 301 | elif "node_type" in properties: 302 | node_type = node.node_type 303 | else: 304 | node_type = type(node) 305 | if isinstance(node_type, Concept): 306 | return node_type 307 | else: 308 | raise Exception(f"Not a concept: {node_type} of {node}") 309 | -------------------------------------------------------------------------------- /pylasu/model/naming.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from itertools import chain 3 | from typing import TypeVar, Generic, Optional, List, Dict 4 | 5 | 6 | @dataclass 7 | class PossiblyNamed: 8 | name: str 9 | 10 | def __init__(self, name: str = None): 11 | self.name = name 12 | 13 | 14 | @dataclass 15 | class Named(PossiblyNamed): 16 | name: str 17 | 18 | 19 | T = TypeVar("T", bound=PossiblyNamed) 20 | 21 | 22 | @dataclass 23 | class ReferenceByName(Generic[T]): 24 | name: str 25 | referred: Optional[T] = field(default=None) 26 | 27 | def __str__(self): 28 | status = 'Solved' if self.resolved() else 'Unsolved' 29 | return f"Ref({self.name})[{status}]" 30 | 31 | def __hash__(self): 32 | return self.name.__hash__() * (7 + 2 if self.resolved() else 1) 33 | 34 | def resolved(self): 35 | return self.referred is not None 36 | 37 | def resolve(self, scope: 'Scope', case_insensitive: bool = False) -> bool: 38 | self.referred = scope.lookup(symbol_name=self.name, case_insensitive=case_insensitive) 39 | return self.resolved() 40 | 41 | def try_to_resolve(self, candidates: List[T], case_insensitive: bool = False) -> bool: 42 | """ 43 | Try to resolve the reference by finding a named element with a matching name. 44 | The name match is performed in a case sensitive or insensitive way depending on the value of case_insensitive. 45 | """ 46 | 47 | def check_name(candidate: T) -> bool: 48 | return candidate.name is not None and ( 49 | candidate.name == self.name if not case_insensitive else candidate.name.lower() == self.name.lower()) 50 | 51 | self.referred = next((candidate for candidate in candidates if check_name(candidate)), None) 52 | return self.resolved() 53 | 54 | 55 | @dataclass 56 | class Symbol(PossiblyNamed): 57 | pass 58 | 59 | 60 | @dataclass 61 | class Scope: 62 | symbols: Dict[str, List[Symbol]] = field(default_factory=list) 63 | parent: Optional['Scope'] = field(default=None) 64 | insensitive_map: Optional[Dict[str, List[str]]] = field(default=None) 65 | 66 | def lookup(self, symbol_name: str, symbol_type: type = Symbol, case_insensitive: bool = False) -> Optional[Symbol]: 67 | if case_insensitive: 68 | if self.insensitive_map is None: 69 | self.insensitive_map = {} 70 | for key in self.symbols.keys(): 71 | key_lower: str = key.lower() 72 | self.insensitive_map[key_lower] = self.insensitive_map.get(key_lower, []) + [key] 73 | 74 | symbol_name_lower: str = symbol_name.lower() 75 | 76 | return next( 77 | ( 78 | symbol 79 | for symbol in chain.from_iterable( 80 | self.symbols.get(orig_symbol_name, []) 81 | for orig_symbol_name in self.insensitive_map[symbol_name_lower] 82 | ) 83 | if isinstance(symbol, symbol_type) 84 | ), 85 | self.parent.lookup(symbol_name, symbol_type) if self.parent is not None else None 86 | ) 87 | 88 | return next((symbol for symbol in self.symbols.get(symbol_name, []) if isinstance(symbol, symbol_type)), 89 | self.parent.lookup(symbol_name, symbol_type) if self.parent is not None else None) 90 | 91 | def add(self, symbol: Symbol): 92 | self.symbols[symbol.name] = self.symbols.get(symbol.name, []) + [symbol] 93 | 94 | if self.insensitive_map is not None: 95 | symbol_name_lower: str = symbol.name.lower() 96 | self.insensitive_map[symbol_name_lower] = self.insensitive_map.get(symbol_name_lower, []) + [symbol.name] 97 | -------------------------------------------------------------------------------- /pylasu/model/position.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from pathlib import Path 3 | 4 | 5 | @dataclass(order=True) 6 | class Point: 7 | line: int 8 | column: int 9 | 10 | def __post_init__(self): 11 | if self.line < 1: 12 | raise Exception(f"Line {self.line} cannot be less than 1") 13 | if self.column < 0: 14 | raise Exception(f"Column {self.column} cannot be less than 0") 15 | 16 | def is_before(self, other: "Point"): 17 | return self < other 18 | 19 | def __add__(self, other): 20 | if isinstance(other, str): 21 | if len(other) == 0: 22 | return self 23 | line = self.line 24 | column = self.column 25 | i = 0 26 | while i < len(other): 27 | if other[i] == '\n' or other[i] == '\r': 28 | line += 1 29 | column = 0 30 | if other[i] == '\r' and i < len(other) - 1 and other[i + 1] == '\n': 31 | i += 1 # Count the \r\n sequence as 1 line 32 | else: 33 | column += 1 34 | i += 1 35 | return Point(line, column) 36 | else: 37 | raise NotImplementedError() 38 | 39 | def __repr__(self): 40 | return f"Line {self.line}, Column {self.column}" 41 | 42 | def __str__(self): 43 | return f"{self.line}:{self.column}" 44 | 45 | 46 | class Source: 47 | pass 48 | 49 | 50 | @dataclass 51 | class SourceSet: 52 | name: str 53 | root: Path 54 | 55 | 56 | @dataclass 57 | class SourceSetElement(Source): 58 | sourceSet: SourceSet 59 | relativePath: Path 60 | 61 | 62 | @dataclass 63 | class FileSource(Source): 64 | file: Path 65 | 66 | def __str__(self): 67 | return str(self.file) 68 | 69 | 70 | @dataclass 71 | class StringSource(Source): 72 | code: str = None 73 | 74 | 75 | @dataclass 76 | class URLSource(Source): 77 | url: str = None 78 | 79 | 80 | @dataclass(order=True) 81 | class Position: 82 | """An area in a source file, from start to end. 83 | The start point is the point right before the starting character. 84 | The end point is the point right after the last character. 85 | An empty position will have coinciding points. 86 | 87 | Consider a file with one line, containing the text "HELLO". 88 | The Position of such text will be Position(Point(1, 0), Point(1, 5)).""" 89 | start: Point 90 | end: Point 91 | source: Source = field(compare=False, default=None) 92 | 93 | def __post_init__(self): 94 | if self.end < self.start: 95 | raise Exception(f"End point can't be before starting point: {self.start} – {self.end}") 96 | 97 | def __contains__(self, pos): 98 | return isinstance(pos, Position) and self.start <= pos.start and self.end >= pos.end 99 | 100 | def __repr__(self): 101 | return f"Position(start={self.start}, end={self.end}"\ 102 | + (f", source={self.source}" if self.source is not None else "") 103 | 104 | def __str__(self): 105 | str_rep = (f"{self.source}:" if self.source is not None else "") + str(self.start) 106 | if self.start != self.end: 107 | str_rep += f"-{self.end}" 108 | return str_rep 109 | 110 | 111 | def pos(start_line: int, start_col: int, end_line: int, end_col: int, source: Source = None): 112 | """Utility function to create a Position""" 113 | return Position(Point(start_line, start_col), Point(end_line, end_col), source) 114 | -------------------------------------------------------------------------------- /pylasu/model/processing.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from typing import Callable, List, Set 3 | 4 | from . import walk 5 | from .model import Node, internal_property 6 | from ..support import extension_method 7 | 8 | 9 | @extension_method(Node) 10 | def assign_parents(self: Node): 11 | """Sets or corrects the parent of all AST nodes. 12 | 13 | Pylasu does not see set/add/delete operations on the AST nodes, so this function should be called manually after 14 | modifying the AST, unless you've taken care of assigning the parents yourself. 15 | 16 | :param self: the root of the AST subtree to start from. 17 | """ 18 | for node in children(self): 19 | node.parent = self 20 | assign_parents(node) 21 | 22 | 23 | def children(self: Node): 24 | yield from nodes_in(p.value for p in self.properties) 25 | 26 | 27 | Node.children = internal_property(children) 28 | 29 | 30 | def nodes_in(iterable): 31 | for value in iterable: 32 | if isinstance(value, Node): 33 | yield value 34 | elif isinstance(value, Iterable) and not isinstance(value, str): 35 | yield from nodes_in(value) 36 | 37 | 38 | @extension_method(Node) 39 | def search_by_type(self: Node, target_type, walker=walk): 40 | for node in walker(self): 41 | if isinstance(node, target_type): 42 | yield node 43 | 44 | 45 | @extension_method(Node) 46 | def transform_children(self: Node, operation: Callable[[Node], Node]): 47 | for prop in self.properties: 48 | name = prop.name 49 | value = prop.value 50 | if isinstance(value, Node): 51 | new_value = operation(value) 52 | if new_value != value: 53 | setattr(self, name, new_value) 54 | elif isinstance(value, List): 55 | setattr(self, name, [operation(item) if isinstance(item, Node) else item for item in value]) 56 | elif isinstance(value, Set): 57 | raise Exception("Sets are not supported currently") 58 | 59 | 60 | @extension_method(Node) 61 | def replace_with(self: Node, other: Node): 62 | """Replace this node with another (by modifying the children of the parent node). 63 | For this to work, this node must have a parent assigned. 64 | 65 | :param self: the node to replace. 66 | :param other: the replacement node.""" 67 | if not self.parent: 68 | raise Exception("Parent not set, cannot replace node") 69 | transform_children(self.parent, lambda x: other if x == self else x) 70 | -------------------------------------------------------------------------------- /pylasu/model/reflection.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from dataclasses import dataclass 3 | from typing import Optional 4 | 5 | 6 | class Multiplicity(enum.Enum): 7 | OPTIONAL = 0 8 | SINGULAR = 1 9 | MANY = 2 10 | 11 | 12 | @dataclass 13 | class PropertyDescription: 14 | name: str 15 | type: Optional[type] 16 | is_containment: bool = False 17 | is_reference: bool = False 18 | multiplicity: Multiplicity = Multiplicity.SINGULAR 19 | value: object = None 20 | 21 | @property 22 | def multiple(self): 23 | return self.multiplicity == Multiplicity.MANY 24 | -------------------------------------------------------------------------------- /pylasu/model/traversing.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Type 2 | 3 | from . import Position 4 | from .model import Node 5 | from ..support import extension_method 6 | 7 | 8 | @extension_method(Node) 9 | def walk(self: Node): 10 | """Walks the whole AST starting from this node, depth-first.""" 11 | yield self 12 | for child in self.children: 13 | yield from walk(child) 14 | 15 | 16 | @extension_method(Node) 17 | def walk_within(self: Node, position: Position): 18 | """Walks the AST within the given [position] starting from this node, depth-first. 19 | :param self: the node from which to start the walk. 20 | :param position: the position within which the walk should remain.""" 21 | if self.position in position: 22 | yield self 23 | for child in self.children: 24 | yield from walk_within(child, position) 25 | elif position in self.position: 26 | for child in self.children: 27 | yield from walk_within(child, position) 28 | 29 | 30 | @extension_method(Node) 31 | def walk_leaves_first(self: Node): 32 | """Performs a post-order (or leaves-first) node traversal starting with a given node.""" 33 | for child in self.children: 34 | yield from walk_leaves_first(child) 35 | yield self 36 | 37 | 38 | @extension_method(Node) 39 | def walk_ancestors(self: Node): 40 | """Iterator over the sequence of nodes from this node's parent all the way up to the root node.""" 41 | if self.parent is not None: 42 | yield self.parent 43 | yield from walk_ancestors(self.parent) 44 | 45 | 46 | @extension_method(Node) 47 | def walk_descendants(self: Node, walker=walk, restrict_to=Node): 48 | """Walks the whole AST starting from the child nodes of this node. 49 | 50 | :param self: the node from which to start the walk, which is NOT included in the walk. 51 | :param walker: a function that generates a sequence of nodes. By default this is the depth-first "walk" method. 52 | For post-order traversal, use "walk_leaves_first". 53 | :param restrict_to: optional type filter. By default, all nodes (i.e., subclasses of Node) are included, but you can 54 | limit the walk to only a subtype of Node. 55 | """ 56 | for node in walker(self): 57 | if node != self and isinstance(node, restrict_to): 58 | yield node 59 | 60 | 61 | T = TypeVar("T") 62 | 63 | 64 | @extension_method(Node) 65 | def find_ancestor_of_type(self: Node, target: Type[T]) -> T: 66 | """Returns the nearest ancestor of this node that is an instance of the target type. 67 | 68 | Note that type is not strictly forced to be a subtype of Node. This is intended to support trait types like 69 | `Statement` or `Expression`. However, the returned value is guaranteed to be a Node, as only Node instances can be 70 | part of the hierarchy. 71 | 72 | :param self: the node from which to start the search. 73 | :param target: the target type. 74 | """ 75 | for node in walk_ancestors(self): 76 | if isinstance(node, target): 77 | return node 78 | -------------------------------------------------------------------------------- /pylasu/parsing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/pylasu/parsing/__init__.py -------------------------------------------------------------------------------- /pylasu/parsing/parse_tree.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Optional, List, Sequence 3 | 4 | from antlr4 import ParserRuleContext, TerminalNode, Token 5 | from antlr4.tree.Tree import ParseTree 6 | 7 | from pylasu.model import Origin, Position, Point 8 | from pylasu.model.model import internal_property, Node 9 | from pylasu.model.position import Source 10 | from pylasu.support import extension_method 11 | 12 | import inspect 13 | 14 | 15 | @dataclass 16 | class ParseTreeOrigin(Origin): 17 | parse_tree: ParseTree 18 | source: Source = None 19 | 20 | @internal_property 21 | def position(self) -> Optional[Position]: 22 | return self.parse_tree.to_position(self.source) 23 | 24 | @internal_property 25 | def source_text(self) -> Optional[str]: 26 | return self.parse_tree.get_original_text() 27 | 28 | 29 | def token_start_point(token: Token): 30 | return Point(token.line, token.column) 31 | 32 | 33 | def token_end_point(token: Token): 34 | if token.type == Token.EOF: 35 | return token.start_point 36 | else: 37 | return token.start_point + token.text 38 | 39 | 40 | Token.start_point = property(token_start_point) 41 | Token.end_point = property(token_end_point) 42 | 43 | 44 | @extension_method(ParserRuleContext) 45 | def to_position(self: ParserRuleContext, source: Source = None): 46 | # In case of an empty input, the start token will be EOF and the end token will be None 47 | if self.stop and self.start.start_point <= self.stop.end_point: 48 | return Position(self.start.start_point, self.stop.end_point, source) 49 | else: 50 | # In case of parse errors, sometimes ANTLR inserts nodes that end before they start 51 | return Position(self.start.start_point, self.start.end_point, source) 52 | 53 | 54 | @extension_method(TerminalNode) 55 | def to_position(self: TerminalNode, source: Source = None): 56 | return self.symbol.to_position(source) 57 | 58 | 59 | @extension_method(Token) 60 | def to_position(self: Token, source: Source = None): 61 | return Position(self.start_point, self.end_point, source) 62 | 63 | 64 | @extension_method(ParseTree) 65 | def get_original_text(self: ParseTree) -> str: 66 | return self.getText() 67 | 68 | 69 | @extension_method(ParserRuleContext) 70 | def get_original_text(self: ParserRuleContext) -> str: 71 | a = self.start.start 72 | b = self.stop.stop 73 | return self.start.getInputStream().getText(a, b) 74 | 75 | 76 | @extension_method(Node) 77 | def with_parse_tree(self: Node, parse_tree: Optional[ParseTree], source: Source = None): 78 | """Set the origin of the AST node as a ParseTreeOrigin, providing the parse_tree is not None. 79 | If the parse_tree is None, no operation is performed.""" 80 | if parse_tree: 81 | self.origin = ParseTreeOrigin(parse_tree=parse_tree, source=source) 82 | return self 83 | 84 | 85 | def generate_nodes_classes_for_parser(parser_class: type, ns: dict): 86 | for name, definition in parse_tree_node_definitions(parser_class): 87 | fields = {"__annotations__": {}} 88 | property_names = [] 89 | aliases = {} 90 | for child_name, value in parse_tree_node_children(definition): 91 | default_value = None 92 | field_type = None 93 | if inspect.isfunction(value): 94 | n_args = len(inspect.signature(value).parameters) 95 | if n_args == 2: 96 | suffix = "_list" 97 | child_name += suffix 98 | field_type = List 99 | default_value = field(default_factory=list) 100 | aliases[child_name] = child_name[:-len(suffix)] 101 | fields[child_name] = default_value 102 | fields["__annotations__"][child_name] = field_type 103 | property_names.append(child_name) 104 | name = ast_node_name(name) 105 | class_def = type(name, (Node,), fields) 106 | class_def.properties = properties_method(property_names) 107 | class_def.from_parse_tree = from_parse_tree_function(class_def, property_names, aliases, ns) 108 | ns[name] = dataclass(class_def) 109 | 110 | 111 | def ast_node_name(parse_tree_node_name): 112 | if parse_tree_node_name.endswith("Context"): 113 | parse_tree_node_name = parse_tree_node_name[:-len("Context")] 114 | return parse_tree_node_name 115 | 116 | 117 | def properties_method(property_names): 118 | return internal_property(lambda node: ((p, getattr(node, p)) for p in property_names)) 119 | 120 | 121 | def stop_token(node): 122 | if isinstance(node, TerminalNode): 123 | return node.symbol 124 | elif isinstance(node, ParserRuleContext): 125 | return node.stop 126 | 127 | 128 | def start_token(node): 129 | if isinstance(node, TerminalNode): 130 | return node.symbol 131 | elif isinstance(node, ParserRuleContext): 132 | return node.start 133 | 134 | 135 | def make_ast_node_or_value(parse_tree_node, prev_node, ns, parent: Node, source: Source): 136 | ast_node_type_name = ast_node_name(type(parse_tree_node).__name__) 137 | if ast_node_type_name in ns: 138 | ast_node = ns[ast_node_type_name].from_parse_tree(parse_tree_node, parent, source) 139 | return ast_node 140 | else: 141 | return parse_tree_node.getText() 142 | 143 | 144 | def from_parse_tree_function(node_class, property_names, aliases, ns: dict): 145 | def from_parse_tree(parse_tree: ParseTree, parent: Node = None, source: Source = None): 146 | node = node_class().with_parent(parent).with_parse_tree(parse_tree, source) 147 | last_pt_node = parse_tree 148 | for prop in property_names: 149 | if prop in aliases: 150 | pt_prop = aliases[prop] 151 | else: 152 | pt_prop = prop 153 | prop_val = getattr(parse_tree, pt_prop) 154 | if inspect.ismethod(prop_val): 155 | prop_val = prop_val() 156 | if isinstance(prop_val, Sequence) and not isinstance(prop_val, str): 157 | children = [] 158 | for child in prop_val: 159 | children.append(make_ast_node_or_value(child, last_pt_node, ns, node, source)) 160 | last_pt_node = child 161 | setattr(node, prop, children) 162 | elif prop_val is not None: 163 | setattr(node, prop, make_ast_node_or_value(prop_val, last_pt_node, ns, node, source)) 164 | last_pt_node = prop_val 165 | return node 166 | return from_parse_tree 167 | 168 | 169 | def parse_tree_node_definitions(parser_class: type): 170 | for name in dir(parser_class): 171 | definition = getattr(parser_class, name) 172 | if isinstance(definition, type) and name != "__class__": 173 | yield name, definition 174 | 175 | 176 | def parse_tree_node_children(parse_tree_node_class: type): 177 | for x in dir(parse_tree_node_class): 178 | if x not in dir(ParserRuleContext) and x != "parser": 179 | yield x, getattr(parse_tree_node_class, x) 180 | -------------------------------------------------------------------------------- /pylasu/parsing/results.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import List 3 | 4 | from antlr4 import ParserRuleContext, Token 5 | 6 | from pylasu.model import Source 7 | from pylasu.validation.validation import WithIssues, IssueType, Issue 8 | 9 | 10 | @dataclass 11 | class FirstStageResult(WithIssues): 12 | parse_tree: ParserRuleContext 13 | 14 | 15 | @dataclass 16 | class LexingResult(WithIssues): 17 | tokens: List[Token] 18 | 19 | 20 | @dataclass 21 | class IssuesErrorListener: 22 | """This Error Listener should be used with ANTLR lexers and parsers to capture issues""" 23 | type: IssueType 24 | source: Source 25 | issues: WithIssues 26 | 27 | def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): 28 | self.issues.append(Issue(type=self.type, message=msg)) 29 | 30 | def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs): 31 | pass 32 | 33 | def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs): 34 | pass 35 | 36 | def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs): 37 | pass 38 | -------------------------------------------------------------------------------- /pylasu/playground/__init__.py: -------------------------------------------------------------------------------- 1 | from .transpilation_trace import TranspilationTrace 2 | from .transpilation_trace_ecore import JsonResource, TranspilationTrace as ETranspilationTrace 3 | -------------------------------------------------------------------------------- /pylasu/playground/transpilation_trace.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from io import BytesIO 3 | from typing import List 4 | 5 | from pyecore.resources import Resource, ResourceSet, URI 6 | 7 | from pylasu import StrumentaLanguageSupport as starlasu 8 | from pylasu.playground.transpilation_trace_ecore import TranspilationTrace as ETranspilationTrace, JsonResource 9 | from pylasu.validation.validation import Result, Issue 10 | 11 | 12 | @dataclass 13 | class TranspilationTrace: 14 | original_code: str 15 | source_result: Result 16 | target_result: Result 17 | generated_code: str 18 | issues: List[Issue] = field(default_factory=list) 19 | 20 | def to_eobject(self, resource: Resource): 21 | mappings = {} 22 | return ETranspilationTrace( 23 | original_code=self.original_code, 24 | source_result=starlasu.Result(root=self.source_result.root.to_eobject(resource, mappings)), 25 | target_result=starlasu.Result(root=self.target_result.root.to_eobject(resource, mappings)), 26 | generated_code=self.generated_code 27 | ) 28 | 29 | def save_as_json(self, name, *packages): 30 | rset = ResourceSet() 31 | rset.resource_factory['json'] = JsonResource 32 | resource = rset.create_resource(URI(name)) 33 | for pkg in packages: 34 | package_resource = rset.create_resource(URI(pkg.nsURI)) 35 | package_resource.contents.append(pkg) 36 | resource.contents.append(self.to_eobject(resource)) 37 | with BytesIO() as out: 38 | resource.save(out) 39 | return out.getvalue().decode('utf-8') 40 | -------------------------------------------------------------------------------- /pylasu/playground/transpilation_trace_ecore.py: -------------------------------------------------------------------------------- 1 | from io import IOBase, BytesIO 2 | 3 | from pyecore.ecore import EObject, MetaEClass, EAttribute, EString, EReference 4 | from pyecore.resources import ResourceSet, URI 5 | from pyecore.resources.json import JsonResource as BaseJsonResource 6 | 7 | from pylasu import StrumentaLanguageSupport as starlasu 8 | 9 | nsURI = "https://strumenta.com/kolasu/transpilation/v1" 10 | name = "StrumentaLanguageSupportTranspilation" 11 | 12 | 13 | class JsonResource(BaseJsonResource): 14 | 15 | def open_out_stream(self, other=None): 16 | if isinstance(other, IOBase): 17 | return other 18 | else: 19 | return super().open_out_stream(other) 20 | 21 | 22 | class TranspilationTrace(EObject, metaclass=MetaEClass): 23 | # Note: we use camelCase here because Pyecore's JSON serialization doesn't handle having different names for 24 | # Python attributes and their corresponding Ecore structural features. 25 | originalCode = EAttribute(eType=EString) 26 | sourceResult = EReference(containment=True, eType=starlasu.Result) 27 | targetResult = EReference(containment=True, eType=starlasu.Result) 28 | generatedCode = EAttribute(eType=EString) 29 | issues = EReference(containment=True, eType=starlasu.Issue, upper=-1) 30 | 31 | def __init__(self, *, original_code=None, source_result=None, target_result=None, generated_code=None, issues=None): 32 | super().__init__() 33 | if original_code is not None: 34 | self.originalCode = original_code 35 | if source_result is not None: 36 | self.sourceResult = source_result 37 | if target_result is not None: 38 | self.targetResult = target_result 39 | if generated_code is not None: 40 | self.generatedCode = generated_code 41 | if issues: 42 | self.issues.extend(issues) 43 | 44 | def save_as_json(self, name, *packages): 45 | rset = ResourceSet() 46 | rset.resource_factory['json'] = JsonResource 47 | resource = rset.create_resource(URI(name)) 48 | for pkg in packages: 49 | package_resource = rset.create_resource(URI(pkg.nsURI)) 50 | package_resource.contents.append(pkg) 51 | resource.contents.append(self) 52 | with BytesIO() as out: 53 | resource.save(out) 54 | return out.getvalue().decode('utf-8') 55 | -------------------------------------------------------------------------------- /pylasu/reflection/__init__.py: -------------------------------------------------------------------------------- 1 | from .reflection import get_type_annotations, get_type_arguments, is_sequence_type 2 | -------------------------------------------------------------------------------- /pylasu/reflection/reflection.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from enum import EnumMeta, Enum 3 | from typing import Callable 4 | 5 | 6 | def get_type_annotations(cls: type): 7 | if hasattr(typing, "get_type_hints"): 8 | # https://peps.python.org/pep-0563/ 9 | return typing.get_type_hints(cls) 10 | else: 11 | try: 12 | # On Python 3.10+ 13 | import inspect 14 | if hasattr(inspect, "get_annotations"): 15 | return inspect.get_annotations(cls) 16 | elif hasattr(inspect, "getannotations"): 17 | return inspect.getannotations(cls) 18 | except ModuleNotFoundError: 19 | pass 20 | if isinstance(cls, type): 21 | return cls.__dict__.get('__annotations__', {}) 22 | else: 23 | return getattr(cls, '__annotations__', {}) 24 | 25 | 26 | def get_type_origin(tp): 27 | origin = None 28 | if hasattr(typing, "get_origin"): 29 | origin = typing.get_origin(tp) 30 | elif hasattr(tp, "__origin__"): 31 | origin = tp.__origin__ 32 | elif tp is typing.Generic: 33 | origin = typing.Generic 34 | return origin or (tp if isinstance(tp, type) else None) 35 | 36 | 37 | def is_enum_type(attr_type): 38 | return isinstance(attr_type, EnumMeta) and issubclass(attr_type, Enum) 39 | 40 | 41 | def is_sequence_type(attr_type): 42 | return isinstance(get_type_origin(attr_type), type) and \ 43 | issubclass(get_type_origin(attr_type), typing.Sequence) 44 | 45 | 46 | def get_type_arguments(tp): 47 | if hasattr(typing, "get_args"): 48 | return typing.get_args(tp) 49 | elif hasattr(tp, "__args__"): 50 | res = tp.__args__ 51 | if get_type_origin(tp) is Callable and res[0] is not Ellipsis: 52 | res = (list(res[:-1]), res[-1]) 53 | return res 54 | return () 55 | -------------------------------------------------------------------------------- /pylasu/support.py: -------------------------------------------------------------------------------- 1 | from pylasu.model.model import Concept 2 | 3 | 4 | def register_internal_property(cls, name): 5 | cls.__internal_properties__.append(name) 6 | for s in cls.__subclasses__(): 7 | register_internal_property(s, name) 8 | 9 | 10 | def extension_method(cls): 11 | """Installs the decorated function as an extension method on cls. 12 | See https://mail.python.org/pipermail/python-dev/2008-January/076194.html""" 13 | def decorator(func): 14 | name = func.__name__ 15 | if name in cls.__dict__: 16 | raise Exception(f"{cls} already has a member called {name}") 17 | setattr(cls, name, func) 18 | if isinstance(cls, Concept): 19 | register_internal_property(cls, name) 20 | return func 21 | return decorator 22 | -------------------------------------------------------------------------------- /pylasu/testing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/pylasu/testing/__init__.py -------------------------------------------------------------------------------- /pylasu/testing/testing.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from pylasu.model import Node 4 | 5 | 6 | def assert_asts_are_equal( 7 | case: unittest.TestCase, 8 | expected: Node, actual: Node, 9 | context: str = "", consider_position: bool = False 10 | ): 11 | if expected.node_type != actual.node_type: 12 | case.fail(f"{context}: expected node of type {expected.node_type}, " 13 | f"but found {actual.node_type}") 14 | if consider_position: 15 | case.assertEqual(expected.position, actual.position, f"{context}.position") 16 | for expected_property in expected.properties: 17 | try: 18 | actual_property = next(filter(lambda p: p.name == expected_property.name, actual.properties)) 19 | except StopIteration: 20 | case.fail(f"No property {expected_property.name} found at {context}") 21 | actual_prop_value = actual_property.value 22 | expected_prop_value = expected_property.value 23 | if expected_property.is_containment: 24 | if expected_property.multiple: 25 | assert_multi_properties_are_equal( 26 | case, expected_property, expected_prop_value, actual_prop_value, context, consider_position) 27 | else: 28 | assert_single_properties_are_equal(case, expected_property, expected_prop_value, actual_prop_value, 29 | context, consider_position) 30 | # TODO not yet supported elif expected_property.property_type == PropertyType.REFERENCE: 31 | else: 32 | case.assertEqual( 33 | expected_prop_value, actual_prop_value, 34 | f"{context}, comparing property {expected_property.name} of {expected.node_type}") 35 | 36 | 37 | def assert_single_properties_are_equal(case, expected_property, expected_prop_value, actual_prop_value, context, 38 | consider_position): 39 | if expected_prop_value is None and actual_prop_value is not None: 40 | case.assertEqual(expected_prop_value, actual_prop_value, 41 | f"{context}.{expected_property.name}") 42 | elif expected_prop_value is not None and actual_prop_value is None: 43 | case.assertEqual(expected_prop_value, actual_prop_value, 44 | f"{context}.{expected_property.name}") 45 | elif expected_prop_value is None and actual_prop_value is None: 46 | # that is ok 47 | pass 48 | else: 49 | case.assertIsInstance(actual_prop_value, Node) 50 | assert_asts_are_equal( 51 | case, expected_prop_value, actual_prop_value, 52 | context=f"{context}.{expected_property.name}", 53 | consider_position=consider_position) 54 | 55 | 56 | def assert_multi_properties_are_equal(case, expected_property, expected_prop_value, actual_prop_value, context, 57 | consider_position): 58 | # TODO IgnoreChildren 59 | case.assertEqual(actual_prop_value is None, expected_prop_value is None, 60 | f"{context}.{expected_property.name} nullness") 61 | if actual_prop_value is not None and expected_prop_value is not None: 62 | case.assertEqual(len(actual_prop_value), len(expected_prop_value), 63 | f"{context}.{expected_property.name} length") 64 | for expected_it, actual_it, i in \ 65 | zip(expected_prop_value, actual_prop_value, range(len(expected_prop_value))): 66 | assert_asts_are_equal(case, expected_it, actual_it, f"{context}[{i}]", 67 | consider_position=consider_position) 68 | -------------------------------------------------------------------------------- /pylasu/transformation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/pylasu/transformation/__init__.py -------------------------------------------------------------------------------- /pylasu/transformation/generic_nodes.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from pylasu.model import Node 4 | 5 | 6 | @dataclass 7 | class GenericNode(Node): 8 | """A generic AST node. We use it to represent parts of a source tree that we don't know how to translate yet.""" 9 | parent: Node = None 10 | -------------------------------------------------------------------------------- /pylasu/transformation/transformation.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from dataclasses import dataclass, field 3 | from inspect import signature 4 | from typing import Any, Dict, Callable, TypeVar, Generic, Optional, List, Set, Iterable, Type, Union 5 | 6 | from pylasu.model import Node, Origin 7 | from pylasu.model.errors import GenericErrorNode 8 | from pylasu.model.model import concept_of 9 | from pylasu.model.reflection import PropertyDescription 10 | from pylasu.transformation.generic_nodes import GenericNode 11 | from pylasu.validation import Issue, IssueSeverity 12 | 13 | Child = TypeVar('Child') 14 | Output = TypeVar('Output', bound=Node) 15 | Source = TypeVar('Source') 16 | Target = TypeVar('Target') 17 | node_factory_constructor_type = Callable[[Source, "ASTTransformer", "NodeFactory[Source, Output]"], List[Output]] 18 | node_factory_single_constructor_type = Callable[[Source, "ASTTransformer", "NodeFactory[Source, Output]"], Output] 19 | 20 | 21 | @dataclass 22 | class PropertyRef: 23 | name: str 24 | 25 | def get(self, node: Node): 26 | return getattr(node, self.name) 27 | 28 | def set(self, node: Node, value): 29 | return setattr(node, self.name, value) 30 | 31 | 32 | @dataclass 33 | class NodeFactory(Generic[Source, Output]): 34 | constructor: node_factory_constructor_type 35 | children: Dict[str, "ChildNodeFactory[Source, Any, Any]"] = field(default_factory=dict) 36 | finalizer: Callable[[Source], None] = field(default=lambda _: None) 37 | 38 | def with_child( 39 | self, 40 | setter: Union[Callable[[Target, Optional[Child]], None], PropertyRef], 41 | getter: Union[Callable[[Source], Optional[Any]], PropertyRef], 42 | name: Optional[str] = None, 43 | target_type: Optional[type] = None 44 | ) -> "NodeFactory[Source, Output]": 45 | if not name: 46 | name = setter.name 47 | if target_type: 48 | prefix = target_type.__qualname__ + "#" 49 | else: 50 | prefix = "" 51 | if isinstance(getter, PropertyRef): 52 | getter = getter.get 53 | if isinstance(setter, PropertyRef): 54 | setter = setter.set 55 | self.children[prefix + name] = ChildNodeFactory(prefix + name, getter, setter) 56 | return self 57 | 58 | 59 | @dataclass 60 | class ChildNodeFactory(Generic[Source, Target, Child]): 61 | name: str 62 | get: Callable[[Source], Optional[Any]] 63 | setter: Callable[[Target, Optional[Child]], None] 64 | 65 | def set(self, node: Target, child: Optional[Child]): 66 | try: 67 | self.setter(node, child) 68 | except Exception as e: 69 | raise Exception(f"{self.name} could not set child {child} of {node} using {self.setter}") from e 70 | 71 | 72 | """Sentinel value used to represent the information that a given property is not a child node.""" 73 | NO_CHILD_NODE = ChildNodeFactory("", lambda x: x, lambda _, __: None) 74 | 75 | 76 | class ASTTransformer: 77 | issues: List[Issue] = [] 78 | "Additional issues found during the transformation process." 79 | allow_generic_node: bool = True 80 | factories: Dict[type, NodeFactory] 81 | "Factories that map from source tree node to target tree node." 82 | known_classes: Dict[str, Set[type]] 83 | 84 | def __init__(self, issues: List[Issue] = None, allow_generic_node: bool = True): 85 | self.issues = issues or [] 86 | self.allow_generic_node = allow_generic_node 87 | self.factories = dict() 88 | self.known_classes = dict() 89 | 90 | def transform(self, source: Optional[Any], parent: Optional[Node] = None) -> Optional[Node]: 91 | result = self.transform_into_nodes(source, parent) 92 | if len(result) == 0: 93 | return None 94 | elif len(result) == 1: 95 | return result[0] 96 | else: 97 | raise Exception(f"Cannot transform {source} into a single Node as multiple nodes where produced") 98 | 99 | def transform_into_nodes(self, source: Optional[Any], parent: Optional[Node] = None) -> List[Node]: 100 | if source is None: 101 | return [] 102 | elif isinstance(source, Iterable): 103 | raise Exception(f"Mapping error: received collection when value was expected: {source}") 104 | factory = self.get_node_factory(type(source)) 105 | if factory: 106 | nodes = self.make_nodes(factory, source) 107 | for node in nodes: 108 | for pd in concept_of(node).node_properties: 109 | self.process_child(source, node, pd, factory) 110 | factory.finalizer(node) 111 | node.parent = parent 112 | 113 | else: 114 | if self.allow_generic_node: 115 | origin = self.as_origin(source) 116 | nodes = [GenericNode(parent).with_origin(origin)] 117 | self.issues.append( 118 | Issue.semantic( 119 | f"Source node not mapped: {type(source).__qualname__}", 120 | IssueSeverity.INFO, 121 | origin.position if origin else None)) 122 | else: 123 | raise Exception(f"Unable to transform node {source} (${type(source)})") 124 | return nodes 125 | 126 | def process_child(self, source, node, pd, factory): 127 | child_key = type(node).__qualname__ + "#" + pd.name 128 | if child_key in factory.children: 129 | child_node_factory = factory.children[child_key] 130 | elif pd.name in factory.children: 131 | child_node_factory = factory.children[pd.name] 132 | else: 133 | child_node_factory = None 134 | if child_node_factory: 135 | if child_node_factory != NO_CHILD_NODE: 136 | self.set_child(child_node_factory, source, node, pd) 137 | else: 138 | # TODO should we support @Mapped / dot-notation? 139 | factory.children[child_key] = NO_CHILD_NODE 140 | 141 | def as_origin(self, source: Any) -> Optional[Origin]: 142 | return source if isinstance(source, Origin) else None 143 | 144 | def set_child(self, child_node_factory: ChildNodeFactory, source: Any, node: Node, pd: PropertyDescription): 145 | src = child_node_factory.get(self.get_source(node, source)) 146 | if pd.multiple: 147 | child = [] 148 | for child_src in src: 149 | child.extend(self.transform_into_nodes(child_src, node)) 150 | else: 151 | child = self.transform(src, node) 152 | try: 153 | child_node_factory.set(node, child) 154 | except Exception as e: 155 | raise Exception(f"Could not set child {child_node_factory}") from e 156 | 157 | def get_source(self, node: Node, source: Any) -> Any: 158 | return source 159 | 160 | def make_nodes(self, factory: NodeFactory[Source, Target], source: Source) -> List[Node]: 161 | try: 162 | nodes = factory.constructor(source, self, factory) 163 | for node in nodes: 164 | if node.origin is None: 165 | node.with_origin(self.as_origin(source)) 166 | return nodes 167 | except Exception as e: 168 | if self.allow_generic_node: 169 | return [GenericErrorNode(error=e).with_origin(self.as_origin(source))] 170 | else: 171 | raise e 172 | 173 | def get_node_factory(self, node_type: Type[Source]) -> Optional[NodeFactory[Source, Target]]: 174 | if node_type in self.factories: 175 | return self.factories[node_type] 176 | else: 177 | for superclass in node_type.__mro__[1:]: 178 | factory = self.get_node_factory(superclass) 179 | if factory: 180 | return factory 181 | 182 | def register_node_factory( 183 | self, source: Type[Source], 184 | factory: Union[node_factory_constructor_type, node_factory_single_constructor_type, Type[Target]] 185 | ) -> NodeFactory[Source, Target]: 186 | if isinstance(factory, type): 187 | node_factory = NodeFactory(lambda _, __, ___: [factory()]) 188 | else: 189 | node_factory = NodeFactory(get_node_constructor_wrapper(factory)) 190 | self.factories[source] = node_factory 191 | return node_factory 192 | 193 | def register_identity_transformation(self, node_class: Type[Target]): 194 | self.register_node_factory(node_class, lambda node: node) 195 | 196 | 197 | def ensure_list(obj): 198 | if isinstance(obj, list): 199 | return obj 200 | elif obj is not None: 201 | return [obj] 202 | else: 203 | return [] 204 | 205 | 206 | def get_node_constructor_wrapper(decorated_function): # noqa C901 207 | try: 208 | sig = signature(decorated_function) 209 | try: 210 | sig.bind(1, 2, 3) 211 | 212 | def wrapper(node: Node, transformer: ASTTransformer, factory): 213 | return ensure_list(decorated_function(node, transformer, factory)) 214 | except TypeError: 215 | try: 216 | sig.bind(1, 2) 217 | 218 | def wrapper(node: Node, transformer: ASTTransformer, _): 219 | return ensure_list(decorated_function(node, transformer)) 220 | except TypeError: 221 | sig.bind(1) 222 | 223 | def wrapper(node: Node, _, __): 224 | return ensure_list(decorated_function(node)) 225 | except ValueError: 226 | def wrapper(node: Node, transformer: ASTTransformer, factory): 227 | return ensure_list(decorated_function(node, transformer, factory)) 228 | 229 | functools.update_wrapper(wrapper, decorated_function) 230 | return wrapper 231 | 232 | 233 | def ast_transformer( 234 | node_class: Type[Node], 235 | transformer: ASTTransformer, 236 | method_name: str = None): 237 | """Decorator to register a function as an AST transformer""" 238 | def decorator(decorated_function): 239 | if method_name: 240 | def transformer_method(self, parent: Optional[Node] = None, transformer: ASTTransformer = transformer): 241 | return transformer.transform(self, parent) 242 | setattr(node_class, method_name, transformer_method) 243 | if transformer: 244 | return transformer.register_node_factory(node_class, decorated_function).constructor 245 | else: 246 | return get_node_constructor_wrapper(decorated_function) 247 | 248 | return decorator 249 | -------------------------------------------------------------------------------- /pylasu/validation/__init__.py: -------------------------------------------------------------------------------- 1 | from .validation import Issue, IssueSeverity, IssueType, Result 2 | -------------------------------------------------------------------------------- /pylasu/validation/validation.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from dataclasses import dataclass, field 3 | from typing import List 4 | 5 | from pylasu.model import Position, Node 6 | 7 | 8 | class IssueType(enum.Enum): 9 | LEXICAL = 0 10 | SYNTACTIC = 1 11 | SEMANTIC = 2 12 | 13 | 14 | class IssueSeverity(enum.Enum): 15 | ERROR = 30 16 | WARNING = 20 17 | INFO = 10 18 | 19 | 20 | @dataclass 21 | class Issue: 22 | type: IssueType 23 | message: str 24 | severity: IssueSeverity = IssueSeverity.ERROR 25 | position: Position = None 26 | 27 | def __str__(self): 28 | msg = f"{self.severity.name.capitalize()} ({self.type.name.lower()}): {self.message}" 29 | if self.position: 30 | msg += f" @ {self.position}" 31 | return msg 32 | 33 | @staticmethod 34 | def semantic(message: str, severity: IssueSeverity = IssueSeverity.ERROR, position: Position = None): 35 | return Issue(IssueType.SEMANTIC, message, severity, position) 36 | 37 | 38 | @dataclass 39 | class WithIssues: 40 | """Many classes have the necessity of tracking issues""" 41 | issues: List[Issue] = field(default_factory=list, init=False) 42 | 43 | 44 | @dataclass 45 | class Result(WithIssues): 46 | root: Node 47 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "pylasu" 7 | dynamic = ["version", "optional-dependencies"] 8 | authors = [ 9 | { name="Lorenzo Addazi", email="lorenzo.addazi@strumenta.com" }, 10 | { name="Alessio Stalla", email="alessio.stalla@strumenta.com" }, 11 | { name="Federico Tomassetti", email="federico@strumenta.com" }, 12 | ] 13 | description = "Pylasu is an AST Library in the StarLasu family, targeting the Python language." 14 | readme = "README.md" 15 | requires-python = ">=3.7" 16 | classifiers = [ 17 | "Programming Language :: Python :: 3", 18 | "License :: OSI Approved :: Apache Software License", 19 | "Operating System :: OS Independent", 20 | ] 21 | license = { file="LICENSE" } 22 | 23 | [project.urls] 24 | "Homepage" = "https://github.com/strumenta/pylasu" 25 | "Bug Tracker" = "https://github.com/strumenta/pylasu/issues" 26 | 27 | [tool.setuptools.dynamic] 28 | version = {attr = "pylasu.VERSION"} 29 | 30 | [tool.setuptools.packages.find] 31 | include = ["pylasu*"] 32 | 33 | [tool.coverage.run] 34 | branch = true 35 | omit = ["docs/*", "tests/*"] 36 | 37 | [tool.coverage.report] 38 | show_missing = true 39 | skip_covered = true 40 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [options.extras_require] 2 | ecore = 3 | pyecore 4 | -------------------------------------------------------------------------------- /tests/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-complexity = 10 3 | max-line-length = 120 4 | ignore = F841, W503 5 | # F841 = Variable assigned but not used. In tests, it's useful to keep those for debugging. 6 | # W503 = Line break before binary operator – we have to choose between W503 and W504 as they're incompatible. W504 is 7 | # compliant with the more recent PEP8. 8 | exclude = antlr_entity/*, antlr_script/*, simple_lang/* 9 | -------------------------------------------------------------------------------- /tests/.gitignore: -------------------------------------------------------------------------------- 1 | antlr_entity/ 2 | antlr_script/ 3 | simple_lang/ 4 | *.tokens 5 | -------------------------------------------------------------------------------- /tests/AntlrEntityLexer.g4: -------------------------------------------------------------------------------- 1 | lexer grammar AntlrEntityLexer; 2 | 3 | options { 4 | caseInsensitive=true; //Opzione aggiunta recentemente, ignora il case 5 | } 6 | 7 | // Token per i tipi 8 | INTEGER: 'integer'; // I nomi di token iniziano con la maiuscola 9 | BOOLEAN: 'boolean'; // Per convenzione si scrivono in ALL_CAPS 10 | STRING: 'string'; // Ogni token ha il suo pattern 11 | // Questi token corrispondono ad una stringa fissa (case insensitive) 12 | 13 | // Idem per le keyword (parole chiave) 14 | ENTITY: 'entity'; 15 | MODULE: 'module'; 16 | 17 | // Segni di punteggiatura 18 | COLON: ':'; 19 | SEMI: ';'; 20 | LSQRD: '['; 21 | RSQRD: ']'; 22 | LCRLY: '{'; 23 | RCRLY: '}'; 24 | 25 | // Nomi (o identifier) 26 | ID: [A-Z]+; // Notare il pattern tipo espressione regolare 27 | 28 | // I caratteri di spaziatura non ci interessano, 29 | // dunque li nascondiamo al parser 30 | WS: [ \r\n\t]+ -> channel(HIDDEN); -------------------------------------------------------------------------------- /tests/AntlrEntityParser.g4: -------------------------------------------------------------------------------- 1 | parser grammar AntlrEntityParser; 2 | 3 | options { 4 | tokenVocab=AntlrEntityLexer; // Riferimento al lexer 5 | } 6 | 7 | // Dichiarazione di una regola (o produzione). 8 | // Le regole iniziano con la lettera minuscola. 9 | module: 10 | MODULE name=ID LCRLY // Match di 3 token: MODULE, ID e LCRLY 11 | entities+=entity* // Match di 0 o più sottoregole entity 12 | RCRLY // Match di 1 token RCRLY 13 | EOF 14 | ; 15 | 16 | entity: 17 | ENTITY name=ID LCRLY 18 | features+=feature* 19 | RCRLY 20 | ; 21 | 22 | feature: 23 | name=ID COLON type=type_spec SEMI 24 | ; 25 | 26 | type_spec // Regola definita per casi 27 | : INTEGER #integer_type 28 | | BOOLEAN #boolean_type 29 | | STRING #string_type 30 | | target=ID #entity_type 31 | ; -------------------------------------------------------------------------------- /tests/AntlrScriptLexer.g4: -------------------------------------------------------------------------------- 1 | lexer grammar AntlrScriptLexer; 2 | 3 | options { 4 | caseInsensitive=true; //Opzione aggiunta recentemente, ignora il case 5 | } 6 | 7 | // Token per i tipi 8 | INTEGER: 'integer'; // I nomi di token iniziano con la maiuscola 9 | BOOLEAN: 'boolean'; // Per convenzione si scrivono in ALL_CAPS 10 | STRING: 'string'; // Ogni token ha il suo pattern 11 | // Questi token corrispondono ad una stringa fissa (case insensitive) 12 | 13 | // Idem per le keyword (parole chiave) 14 | ENTITY: 'entity'; 15 | MODULE: 'module'; 16 | 17 | // Segni di punteggiatura 18 | COLON: ':'; 19 | SEMI: ';'; 20 | LSQRD: '['; 21 | RSQRD: ']'; 22 | LCRLY: '{'; 23 | RCRLY: '}'; 24 | LPAREN: '('; 25 | RPAREN: ')'; 26 | 27 | DIV: '/'; 28 | MULT: '*'; 29 | PLUS: '+'; 30 | MINUS: '-'; 31 | HASH: '#'; 32 | 33 | CREATE: 'create'; 34 | AS: 'as'; 35 | SET: 'set'; 36 | OF: 'of'; 37 | TO: 'to'; 38 | PRINT: 'print'; 39 | CONCAT: 'concat'; 40 | AND: 'and'; 41 | 42 | // Nomi (o identifier) 43 | ID: [a-zA-Z][a-zA-Z0-9_]*; // Notare il pattern tipo espressione regolare 44 | INT_VALUE: '0'|[1-9][0-9]*; 45 | STR_VALUE: '\'' ~['\r\n]* '\''; 46 | 47 | // I caratteri di spaziatura non ci interessano, 48 | // dunque li nascondiamo al parser 49 | WS: [ \r\n\t]+ -> channel(HIDDEN); -------------------------------------------------------------------------------- /tests/AntlrScriptParser.g4: -------------------------------------------------------------------------------- 1 | parser grammar AntlrScriptParser; 2 | 3 | options { 4 | tokenVocab=AntlrScriptLexer; // Riferimento al lexer 5 | } 6 | 7 | script: 8 | (statements+=statement)* 9 | EOF 10 | ; 11 | 12 | statement: 13 | CREATE entity=ID (AS var_name=ID)? #create_statement 14 | | SET feature=ID OF instance=expression TO value=expression #set_statement 15 | | PRINT message=expression #print_statement 16 | ; 17 | 18 | expression: 19 | name=ID #reference_expression 20 | | entity=ID HASH id=expression #entity_by_id_expression 21 | | MINUS expression #minus_expression 22 | | INT_VALUE #int_literal_expression 23 | | STR_VALUE #string_literal_expression 24 | | left=expression op=(DIV|MULT) right=expression #div_mult_expression 25 | | left=expression op=(PLUS|MINUS) right=expression #sum_sub_expression 26 | | LPAREN expression RPAREN #parens_expression 27 | | feature=ID OF instance=expression #feature_access_expression 28 | | CONCAT left=expression AND right=expression #concat_expression 29 | ; 30 | 31 | 32 | type_spec // Regola definita per casi 33 | : INTEGER #integer_type 34 | | BOOLEAN #boolean_type 35 | | STRING #string_type 36 | | target=ID #entity_type 37 | ; -------------------------------------------------------------------------------- /tests/SimpleLangLexer.g4: -------------------------------------------------------------------------------- 1 | lexer grammar SimpleLangLexer; 2 | 3 | channels { 4 | WS_CHANNEL 5 | } 6 | 7 | WS : [ \t]+ -> channel(WS_CHANNEL); 8 | NEWLINE : '\r\n' | '\r' | '\n' ; 9 | 10 | DISPLAY : 'display' ; 11 | SET : 'set' ; 12 | INPUT : 'input' ; 13 | IS : 'is' ; 14 | 15 | PLUS : '+' ; 16 | MINUS : '-' ; 17 | MULT : '*' ; 18 | DIV : '/' ; 19 | EQUAL : '=' ; 20 | 21 | INT : 'int' ; 22 | DEC : 'dec' ; 23 | STRING : 'string' ; 24 | BOOLEAN : 'boolean' ; 25 | 26 | INT_LIT : [0-9]+ ; 27 | DEC_LIT : [0-9]+ '.' [0-9]+ ; 28 | STRING_LIT : '"' '"' ; 29 | BOOLEAN_LIT : 'false' | 'true' ; 30 | 31 | ID : [a-zA-Z][a-zA-Z_0-9]* ; 32 | -------------------------------------------------------------------------------- /tests/SimpleLangParser.g4: -------------------------------------------------------------------------------- 1 | parser grammar SimpleLangParser; 2 | 3 | options { tokenVocab = SimpleLangLexer; } 4 | 5 | compilationUnit: 6 | statement+; 7 | 8 | statement: 9 | DISPLAY expression #displayStmt 10 | | SET ID EQUAL expression #setStmt 11 | | INPUT ID IS type #inputDeclStmt 12 | ; 13 | 14 | expression: 15 | INT_LIT 16 | | DEC_LIT 17 | | STRING_LIT 18 | | BOOLEAN_LIT 19 | | expression PLUS expression 20 | | expression MINUS expression 21 | | expression MULT expression 22 | | expression DIV expression 23 | ; 24 | 25 | type: 26 | INT 27 | | DEC 28 | | STRING 29 | | BOOLEAN 30 | ; 31 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/tests/__init__.py -------------------------------------------------------------------------------- /tests/fixtures.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import List 3 | 4 | from pylasu.model import Node, pos, internal_field 5 | 6 | 7 | @dataclass 8 | class Box(Node): 9 | name: str = None 10 | contents: List[Node] = field(default_factory=list) 11 | internal: str = internal_field(default="unused") 12 | 13 | 14 | @dataclass 15 | class Item(Node): 16 | name: str = None 17 | 18 | 19 | @dataclass 20 | class ReinforcedBox(Box): 21 | strength: int = 10 22 | 23 | 24 | box = Box( 25 | name="root", 26 | contents=[ 27 | Box("first", [Item("1").with_position(pos(3, 6, 3, 12))]).with_position(pos(2, 3, 4, 3)), 28 | Item(name="2").with_position(pos(5, 3, 5, 9)), 29 | Box( 30 | name="big", 31 | contents=[ 32 | Box( 33 | name="small", 34 | contents=[ 35 | Item(name="3").with_position(pos(8, 7, 8, 13)), 36 | Item(name="4").with_position(pos(9, 7, 9, 13)), 37 | Item(name="5").with_position(pos(10, 7, 10, 13)) 38 | ] 39 | ).with_position(pos(7, 5, 11, 5)) 40 | ] 41 | ).with_position(pos(6, 3, 12, 3)), 42 | Item(name="6").with_position(pos(13, 3, 13, 9)) 43 | ] 44 | ).with_position(pos(1, 1, 14, 1)) 45 | -------------------------------------------------------------------------------- /tests/generate-test-parsers.sh: -------------------------------------------------------------------------------- 1 | java -cp ../antlr-4.11.1-complete.jar org.antlr.v4.Tool -Dlanguage=Python3 -visitor -o simple_lang SimpleLangLexer.g4 SimpleLangParser.g4 2 | java -cp ../antlr-4.11.1-complete.jar org.antlr.v4.Tool -Dlanguage=Python3 -visitor -o antlr_entity AntlrEntityLexer.g4 AntlrEntityParser.g4 3 | java -cp ../antlr-4.11.1-complete.jar org.antlr.v4.Tool -Dlanguage=Python3 -visitor -o antlr_script AntlrScriptLexer.g4 AntlrScriptParser.g4 4 | -------------------------------------------------------------------------------- /tests/mapping/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/tests/mapping/__init__.py -------------------------------------------------------------------------------- /tests/mapping/test_parse_tree_to_ast_transformers.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from dataclasses import dataclass, field 3 | from typing import List 4 | 5 | from antlr4 import CommonTokenStream, InputStream 6 | 7 | from pylasu.mapping.parse_tree_to_ast_transformer import ParseTreeToASTTransformer 8 | from pylasu.model import Node, Named, ReferenceByName 9 | from pylasu.transformation.transformation import PropertyRef 10 | from tests.antlr_entity.AntlrEntityLexer import AntlrEntityLexer 11 | from tests.antlr_entity.AntlrEntityParser import AntlrEntityParser 12 | 13 | 14 | @dataclass 15 | class EModule(Node, Named): 16 | entities: List["EEntity"] = field(default_factory=list) 17 | 18 | 19 | @dataclass 20 | class EEntity(Node, Named): 21 | features: List["EFeature"] = field(default_factory=list) 22 | 23 | 24 | @dataclass 25 | class EFeature(Node, Named): 26 | type: "EType" = None 27 | 28 | 29 | class EType(Node): 30 | pass 31 | 32 | 33 | @dataclass 34 | class EBooleanType(EType): 35 | pass 36 | 37 | 38 | @dataclass 39 | class EStringType(EType): 40 | pass 41 | 42 | 43 | @dataclass 44 | class EEntityRefType(EType): 45 | entity: ReferenceByName[EEntity] 46 | 47 | 48 | class ParseTreeToASTTransformerTest(unittest.TestCase): 49 | 50 | def test_simple_entities_transformer(self): 51 | transformer = ParseTreeToASTTransformer(allow_generic_node=False) 52 | transformer.register_node_factory(AntlrEntityParser.ModuleContext, lambda ctx: EModule(name=ctx.name.text)) \ 53 | .with_child(PropertyRef("entities"), AntlrEntityParser.ModuleContext.entity) 54 | transformer.register_node_factory(AntlrEntityParser.EntityContext, lambda ctx: EEntity(name=ctx.name.text)) 55 | expected_ast = EModule("M", [EEntity("FOO", []), EEntity("BAR", [])]) 56 | actual_ast = transformer.transform(self.parse_entities(""" 57 | module M { 58 | entity FOO { } 59 | entity BAR { } 60 | } 61 | """)) 62 | self.assertEqual(expected_ast, actual_ast) 63 | 64 | def test_entities_with_features_transformer(self): 65 | transformer = ParseTreeToASTTransformer(allow_generic_node=False) 66 | transformer.register_node_factory(AntlrEntityParser.ModuleContext, lambda ctx: EModule(name=ctx.name.text)) \ 67 | .with_child(PropertyRef("entities"), AntlrEntityParser.ModuleContext.entity) 68 | transformer.register_node_factory(AntlrEntityParser.EntityContext, lambda ctx: EEntity(name=ctx.name.text)) \ 69 | .with_child(PropertyRef("features"), AntlrEntityParser.EntityContext.feature) 70 | transformer.register_node_factory(AntlrEntityParser.FeatureContext, lambda ctx: EFeature(name=ctx.name.text)) \ 71 | .with_child(PropertyRef("type"), AntlrEntityParser.FeatureContext.type_spec) 72 | transformer.register_node_factory(AntlrEntityParser.Boolean_typeContext, EBooleanType) 73 | transformer.register_node_factory(AntlrEntityParser.String_typeContext, EStringType) 74 | transformer.register_node_factory( 75 | AntlrEntityParser.Entity_typeContext, 76 | lambda ctx: EEntityRefType(ReferenceByName(ctx.target.text))) 77 | 78 | expected_ast = EModule( 79 | "M", 80 | [ 81 | EEntity( 82 | "FOO", 83 | [EFeature("A", EStringType()), EFeature("B", EBooleanType())] 84 | ), 85 | EEntity( 86 | "BAR", 87 | [EFeature("C", EEntityRefType(ReferenceByName("FOO")))] 88 | ) 89 | ] 90 | ) 91 | actual_ast = transformer.transform( 92 | self.parse_entities(""" 93 | module M { 94 | entity FOO { 95 | A: string; 96 | B: boolean; 97 | } 98 | entity BAR { 99 | C: FOO; 100 | } 101 | }""")) 102 | self.assertEqual(expected_ast, actual_ast) 103 | 104 | def parse_entities(self, code: str) -> AntlrEntityParser.ModuleContext: 105 | lexer = AntlrEntityLexer(InputStream(code)) 106 | parser = AntlrEntityParser(CommonTokenStream(lexer)) 107 | return parser.module() 108 | 109 | 110 | if __name__ == '__main__': 111 | unittest.main() 112 | -------------------------------------------------------------------------------- /tests/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/tests/model/__init__.py -------------------------------------------------------------------------------- /tests/model/test_model.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import unittest 3 | from typing import List, Optional, Union 4 | 5 | from pylasu.model import Node, Position, Point, internal_field 6 | from pylasu.model.model import node_property, node_containment 7 | from pylasu.model.reflection import Multiplicity, PropertyDescription 8 | from pylasu.model.naming import ReferenceByName, Named, Scope, Symbol 9 | from pylasu.support import extension_method 10 | 11 | 12 | @dataclasses.dataclass 13 | class SomeNode(Node, Named): 14 | foo = 3 15 | bar: int = dataclasses.field(init=False) 16 | __private__ = 4 17 | containment: Node = None 18 | reference: ReferenceByName[Node] = None 19 | multiple: List[Node] = dataclasses.field(default_factory=list) 20 | optional: Optional[Node] = None 21 | multiple_opt: List[Optional[Node]] = dataclasses.field(default_factory=list) 22 | internal: Node = internal_field(default=None) 23 | 24 | def __post_init__(self): 25 | self.bar = 5 26 | 27 | 28 | @dataclasses.dataclass 29 | class ExtendedNode(SomeNode): 30 | prop = 2 31 | cont_fwd: "ForwardReferencedNode" = None 32 | cont_ref: ReferenceByName["ForwardReferencedNode"] = None 33 | multiple2: List[SomeNode] = dataclasses.field(default_factory=list) 34 | multiple_fwd: List["ForwardReferencedNode"] = dataclasses.field(default_factory=list) 35 | internal2: Node = internal_field(default=None) 36 | explicit_property: str = node_property("42") 37 | explicit_containment: "ExtendedNode" = node_containment(Multiplicity.MANY) 38 | 39 | 40 | @dataclasses.dataclass 41 | class ForwardReferencedNode(Node): 42 | pass 43 | 44 | 45 | @dataclasses.dataclass 46 | class SomeSymbol(Symbol): 47 | index: int = dataclasses.field(default=None) 48 | 49 | 50 | @dataclasses.dataclass 51 | class AnotherSymbol(Symbol): 52 | index: int = dataclasses.field(default=None) 53 | 54 | 55 | @dataclasses.dataclass 56 | class InvalidNode(Node): 57 | attr: int 58 | child: SomeNode 59 | invalid_prop: Union[Node, str] = None 60 | another_child: Node = None 61 | 62 | 63 | def require_feature(node, name) -> PropertyDescription: 64 | return next(n for n in node.properties if n.name == name) 65 | 66 | 67 | def find_feature(node, name) -> Optional[PropertyDescription]: 68 | return next((n for n in node.properties if n.name == name), None) 69 | 70 | 71 | class ModelTest(unittest.TestCase): 72 | 73 | def test_reference_by_name_unsolved_str(self): 74 | ref_unsolved = ReferenceByName[SomeNode]("foo") 75 | self.assertEqual("Ref(foo)[Unsolved]", str(ref_unsolved)) 76 | 77 | def test_reference_by_name_solved_str(self): 78 | ref_solved = ReferenceByName[SomeNode]("foo", SomeNode(name="foo")) 79 | self.assertEqual("Ref(foo)[Solved]", str(ref_solved)) 80 | 81 | def test_try_to_resolve_positive_case_same_case(self): 82 | ref = ReferenceByName[SomeNode]("foo") 83 | self.assertTrue(ref.try_to_resolve(candidates=[SomeNode(name="foo")])) 84 | 85 | def test_try_to_resolve_negative_case_same_case(self): 86 | ref = ReferenceByName[SomeNode]("foo") 87 | self.assertFalse(ref.try_to_resolve(candidates=[SomeNode(name="not_foo")])) 88 | 89 | def test_try_to_resolve_positive_case_different_case(self): 90 | ref = ReferenceByName[SomeNode]("foo") 91 | self.assertTrue(ref.try_to_resolve(candidates=[SomeNode(name="Foo")], case_insensitive=True)) 92 | 93 | def test_try_to_resolve_negative_case_different_case(self): 94 | ref = ReferenceByName[SomeNode]("foo") 95 | self.assertFalse(ref.try_to_resolve(candidates=[SomeNode(name="Foo")])) 96 | 97 | def test_empty_node(self): 98 | node = Node() 99 | self.assertIsNone(node.origin) 100 | 101 | def test_node_with_position(self): 102 | node = Node().with_position(Position(Point(1, 0), Point(2, 1))) 103 | self.assertEqual(Position(Point(1, 0), Point(2, 1)), node.position) 104 | node = SomeNode("").with_position(Position(Point(1, 0), Point(2, 1))) 105 | self.assertEqual(Position(Point(1, 0), Point(2, 1)), node.position) 106 | 107 | def test_node_properties(self): 108 | node = SomeNode("n").with_position(Position(Point(1, 0), Point(2, 1))) 109 | self.assertIsNotNone(find_feature(node, 'foo')) 110 | self.assertFalse(find_feature(node, 'foo').is_containment) 111 | self.assertIsNotNone(find_feature(node, 'bar')) 112 | self.assertFalse(find_feature(node, 'bar').is_containment) 113 | self.assertIsNotNone(find_feature(node, 'name')) 114 | self.assertTrue(find_feature(node, 'containment').is_containment) 115 | self.assertFalse(find_feature(node, 'containment').is_reference) 116 | self.assertFalse(find_feature(node, 'reference').is_containment) 117 | self.assertTrue(find_feature(node, 'reference').is_reference) 118 | with self.assertRaises(StopIteration): 119 | next(n for n in node.properties if n.name == '__private__') 120 | with self.assertRaises(StopIteration): 121 | next(n for n in node.properties if n.name == 'non_existent') 122 | with self.assertRaises(StopIteration): 123 | next(n for n in node.properties if n.name == 'properties') 124 | with self.assertRaises(StopIteration): 125 | next(n for n in node.properties if n.name == "origin") 126 | 127 | def test_node_properties_inheritance(self): 128 | node = ExtendedNode("n").with_position(Position(Point(1, 0), Point(2, 1))) 129 | self.assertIsNotNone(find_feature(node, 'foo')) 130 | self.assertIsNotNone(find_feature(node, 'bar')) 131 | self.assertIsNotNone(find_feature(node, 'name')) 132 | with self.assertRaises(StopIteration): 133 | next(n for n in node.properties if n.name == '__private__') 134 | with self.assertRaises(StopIteration): 135 | next(n for n in node.properties if n.name == 'non_existent') 136 | with self.assertRaises(StopIteration): 137 | next(n for n in node.properties if n.name == 'properties') 138 | with self.assertRaises(StopIteration): 139 | next(n for n in node.properties if n.name == "origin") 140 | 141 | def test_scope_lookup_0(self): 142 | """Symbol found in local scope with name and default type""" 143 | local_symbol = SomeSymbol(name='a', index=0) 144 | scope = Scope(symbols={'a': [local_symbol]}, parent=Scope(symbols={'a': [SomeSymbol(name='a', index=1)]})) 145 | result = scope.lookup(symbol_name='a') 146 | self.assertEqual(result, local_symbol) 147 | self.assertIsInstance(result, Symbol) 148 | 149 | def test_scope_lookup_1(self): 150 | """Symbol found in upper scope with name and default type""" 151 | upper_symbol = SomeSymbol(name='a', index=0) 152 | scope = Scope(symbols={'b': [SomeSymbol(name='b', index=0)]}, parent=Scope(symbols={'a': [upper_symbol]})) 153 | result = scope.lookup(symbol_name='a') 154 | self.assertEqual(result, upper_symbol) 155 | self.assertIsInstance(result, Symbol) 156 | 157 | def test_scope_lookup_2(self): 158 | """Symbol not found with name and default type""" 159 | scope = Scope(symbols={'b': [SomeSymbol(name='b', index=0)]}, 160 | parent=Scope(symbols={'b': [SomeSymbol(name='b', index=1)]})) 161 | result = scope.lookup(symbol_name='a') 162 | self.assertIsNone(result) 163 | 164 | def test_scope_lookup_3(self): 165 | """Symbol found in local scope with name and type""" 166 | pass 167 | 168 | def test_scope_lookup_4(self): 169 | """Symbol found in upper scope with name and type""" 170 | upper_symbol = SomeSymbol(name='a', index=0) 171 | scope = Scope(symbols={'b': [SomeSymbol(name='b', index=0)]}, parent=Scope(symbols={'a': [upper_symbol]})) 172 | result = scope.lookup(symbol_name='a', symbol_type=SomeSymbol) 173 | self.assertEqual(result, upper_symbol) 174 | self.assertIsInstance(result, SomeSymbol) 175 | 176 | def test_scope_lookup_5(self): 177 | """Symbol found in upper scope with name and type (local with different type)""" 178 | upper_symbol = SomeSymbol(name='a', index=0) 179 | scope = Scope(symbols={'a': [AnotherSymbol(name='a', index=0)]}, parent=Scope(symbols={'a': [upper_symbol]})) 180 | result = scope.lookup(symbol_name='a', symbol_type=SomeSymbol) 181 | self.assertEqual(result, upper_symbol) 182 | self.assertIsInstance(result, SomeSymbol) 183 | 184 | def test_scope_lookup_6(self): 185 | """Symbol not found with name and type (different name)""" 186 | scope = Scope(symbols={'b': [SomeSymbol(name='b', index=0)]}, 187 | parent=Scope(symbols={'b': [SomeSymbol(name='b', index=1)]})) 188 | result = scope.lookup(symbol_name='a', symbol_type=SomeSymbol) 189 | self.assertIsNone(result) 190 | 191 | def test_scope_lookup_7(self): 192 | """Symbol not found with name and type (different type)""" 193 | scope = Scope(symbols={'a': [SomeSymbol(name='a', index=0)]}, 194 | parent=Scope(symbols={'a': [SomeSymbol(name='a', index=1)]})) 195 | result = scope.lookup(symbol_name='a', symbol_type=AnotherSymbol) 196 | self.assertIsNone(result) 197 | 198 | def test_scope_case_insensitive_lookup(self): 199 | local_symbol = SomeSymbol(name='a', index=0) 200 | scope = Scope(symbols={'a': [local_symbol]}, parent=Scope(symbols={'a': [SomeSymbol(name='a', index=1)]})) 201 | result = scope.lookup(symbol_name='A', case_insensitive=True) 202 | self.assertEqual(result, local_symbol) 203 | self.assertIsInstance(result, Symbol) 204 | 205 | def test_node_properties_meta(self): 206 | @extension_method(Node) 207 | def frob_node(_: Node): 208 | pass 209 | 210 | pds = [pd for pd in sorted(SomeNode.node_properties, key=lambda x: x.name)] 211 | self.assertEqual(8, len(pds), f"{pds} should be 7") 212 | self.assertEqual("bar", pds[0].name) 213 | self.assertFalse(pds[0].is_containment) 214 | self.assertEqual("containment", pds[1].name) 215 | self.assertTrue(pds[1].is_containment) 216 | self.assertEqual("foo", pds[2].name) 217 | self.assertFalse(pds[2].is_containment) 218 | self.assertEqual("multiple", pds[3].name) 219 | self.assertTrue(pds[3].is_containment) 220 | self.assertEqual(Multiplicity.MANY, pds[3].multiplicity) 221 | self.assertEqual("multiple_opt", pds[4].name) 222 | self.assertTrue(pds[4].is_containment) 223 | self.assertEqual(Multiplicity.MANY, pds[4].multiplicity) 224 | self.assertEqual("name", pds[5].name) 225 | self.assertFalse(pds[5].is_containment) 226 | self.assertEqual("optional", pds[6].name) 227 | self.assertTrue(pds[6].is_containment) 228 | self.assertEqual(Multiplicity.OPTIONAL, pds[6].multiplicity) 229 | self.assertEqual("reference", pds[7].name) 230 | self.assertTrue(pds[7].is_reference) 231 | 232 | self.assertRaises(Exception, lambda: [x for x in InvalidNode.node_properties]) 233 | 234 | def test_node_properties_meta_inheritance(self): 235 | @extension_method(Node) 236 | def frob_node_2(_: Node): 237 | pass 238 | 239 | pds = [pd for pd in sorted(ExtendedNode.node_properties, key=lambda x: x.name)] 240 | self.assertEqual(15, len(pds), f"{pds}") 241 | self.assertEqual("bar", pds[0].name) 242 | self.assertFalse(pds[0].is_containment) 243 | self.assertEqual("cont_fwd", pds[1].name) 244 | self.assertTrue(pds[1].is_containment) 245 | self.assertEqual(ForwardReferencedNode, pds[1].type) 246 | self.assertEqual("cont_ref", pds[2].name) 247 | self.assertTrue(pds[2].is_reference) 248 | self.assertEqual(ForwardReferencedNode, pds[2].type) 249 | self.assertEqual("containment", pds[3].name) 250 | self.assertTrue(pds[3].is_containment) 251 | self.assertEqual("explicit_containment", pds[4].name) 252 | self.assertTrue(pds[4].is_containment) 253 | self.assertEqual(ExtendedNode, pds[4].type) 254 | self.assertEqual(Multiplicity.MANY, pds[4].multiplicity) 255 | self.assertEqual("explicit_property", pds[5].name) 256 | self.assertEqual(str, pds[5].type) 257 | self.assertEqual(Multiplicity.SINGULAR, pds[5].multiplicity) 258 | self.assertEqual("foo", pds[6].name) 259 | self.assertEqual("multiple", pds[7].name) 260 | self.assertTrue(pds[7].is_containment) 261 | self.assertEqual(Multiplicity.MANY, pds[7].multiplicity) 262 | self.assertEqual("multiple2", pds[8].name) 263 | self.assertTrue(pds[8].is_containment) 264 | self.assertEqual(Multiplicity.MANY, pds[8].multiplicity) 265 | self.assertEqual("multiple_fwd", pds[9].name) 266 | self.assertTrue(pds[9].is_containment) 267 | self.assertEqual(ForwardReferencedNode, pds[9].type) 268 | self.assertEqual(Multiplicity.MANY, pds[9].multiplicity) 269 | 270 | self.assertRaises(Exception, lambda: [x for x in InvalidNode.node_properties]) 271 | -------------------------------------------------------------------------------- /tests/model/test_position.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from pylasu.model import Point 4 | 5 | START_LINE = 1 6 | START_COLUMN = 0 7 | START_POINT = Point(START_LINE, START_COLUMN) 8 | 9 | 10 | class PositionTest(unittest.TestCase): 11 | def test_point_compare(self): 12 | p0 = START_POINT 13 | p1 = Point(1, 1) 14 | p2 = Point(1, 100) 15 | p3 = Point(2, 90) 16 | 17 | self.assertFalse(p0 < p0) 18 | self.assertTrue(p0 <= p0) 19 | self.assertTrue(p0 >= p0) 20 | self.assertFalse(p0 > p0) 21 | 22 | self.assertTrue(p0 < p1) 23 | self.assertTrue(p0 <= p1) 24 | self.assertFalse(p0 >= p1) 25 | self.assertFalse(p0 > p1) 26 | 27 | self.assertTrue(p0 < p2) 28 | self.assertTrue(p0 <= p2) 29 | self.assertFalse(p0 >= p2) 30 | self.assertFalse(p0 > p2) 31 | 32 | self.assertTrue(p0 < p3) 33 | self.assertTrue(p0 <= p3) 34 | self.assertFalse(p0 >= p3) 35 | self.assertFalse(p0 > p3) 36 | 37 | self.assertTrue(p1 < p2) 38 | self.assertTrue(p1 <= p2) 39 | self.assertFalse(p1 >= p2) 40 | self.assertFalse(p1 > p2) 41 | 42 | self.assertTrue(p1 < p3) 43 | self.assertTrue(p1 <= p3) 44 | self.assertFalse(p1 >= p3) 45 | self.assertFalse(p1 > p3) 46 | 47 | def test_is_before(self): 48 | p0 = START_POINT 49 | p1 = Point(1, 1) 50 | p2 = Point(1, 100) 51 | p3 = Point(2, 90) 52 | 53 | self.assertFalse(p0.is_before(p0)) 54 | self.assertTrue(p0.is_before(p1)) 55 | self.assertTrue(p0.is_before(p2)) 56 | self.assertTrue(p0.is_before(p3)) 57 | 58 | self.assertFalse(p1.is_before(p0)) 59 | self.assertFalse(p1.is_before(p1)) 60 | self.assertTrue(p1.is_before(p2)) 61 | self.assertTrue(p1.is_before(p3)) 62 | 63 | self.assertFalse(p2.is_before(p0)) 64 | self.assertFalse(p2.is_before(p1)) 65 | self.assertFalse(p2.is_before(p2)) 66 | self.assertTrue(p2.is_before(p3)) 67 | 68 | self.assertFalse(p3.is_before(p0)) 69 | self.assertFalse(p3.is_before(p1)) 70 | self.assertFalse(p3.is_before(p2)) 71 | self.assertFalse(p3.is_before(p3)) 72 | -------------------------------------------------------------------------------- /tests/test_metamodel_builder.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | from io import BytesIO, IOBase 4 | 5 | from pyecore.ecore import EObject, EPackage, EEnum, EMetaclass, EAttribute, EString 6 | from pyecore.resources import URI 7 | 8 | from pylasu.StrumentaLanguageSupport import ASTNode 9 | from pylasu.emf import MetamodelBuilder 10 | from pylasu.playground import JsonResource 11 | from tests.fixtures import Box, ReinforcedBox 12 | 13 | 14 | eClass = EPackage('test', nsURI='http://test/1.0', nsPrefix='test') 15 | nsURI = 'http://test/1.0' 16 | nsPrefix = 'test' 17 | 18 | BookCategory = EEnum('BookCategory', literals=['ScienceFiction', 'Biographie', 'Mistery']) 19 | eClass.eClassifiers.append(BookCategory) 20 | 21 | 22 | @EMetaclass 23 | class A(object): 24 | names = EAttribute(eType=EString, upper=-1) 25 | bcat = EAttribute(eType=BookCategory) 26 | 27 | 28 | class MetamodelBuilderTest(unittest.TestCase): 29 | def test_pyecore_enum(self): 30 | from pyecore.resources import ResourceSet 31 | from pyecore.resources.json import JsonResource as BaseJsonResource 32 | 33 | class TestJsonResource(BaseJsonResource): 34 | def open_out_stream(self, other=None): 35 | if isinstance(other, IOBase): 36 | return other 37 | else: 38 | return super().open_out_stream(other) 39 | 40 | rset = ResourceSet() 41 | rset.resource_factory['json'] = lambda uri: TestJsonResource(uri=uri, indent=2) 42 | resource = rset.create_resource('ZMM.json') 43 | resource.append(eClass) 44 | with BytesIO() as out: 45 | resource.save(out) 46 | self.assertEqual( 47 | json.loads('''{ 48 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EPackage", 49 | "nsPrefix": "test", 50 | "nsURI": "http://test/1.0", 51 | "name": "test", 52 | "eClassifiers": [ 53 | { 54 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 55 | "name": "BookCategory", 56 | "eLiterals": [ 57 | "ScienceFiction", 58 | "Biographie", 59 | "Mistery" 60 | ] 61 | }, 62 | { 63 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 64 | "eStructuralFeatures": [ 65 | { 66 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 67 | "upperBound": -1, 68 | "name": "names", 69 | "eType": { 70 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 71 | "$ref": "#//EString" 72 | } 73 | }, 74 | { 75 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 76 | "name": "bcat", 77 | "eType": { 78 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 79 | "$ref": "#//BookCategory" 80 | } 81 | } 82 | ], 83 | "name": "A" 84 | } 85 | ] 86 | }'''), 87 | json.loads(out.getvalue().decode("utf-8"))) 88 | 89 | def test_can_serialize_starlasu_model(self): 90 | starlasu_package = ASTNode.eClass.ePackage 91 | resource = JsonResource(URI(starlasu_package.nsURI)) 92 | resource.contents.append(starlasu_package) 93 | with BytesIO() as out: 94 | resource.save(out) 95 | starlasu_model = json.loads(STARLASU_MODEL_JSON) 96 | serialized_model = json.loads(out.getvalue().decode('utf-8')) 97 | self.maxDiff = None 98 | self.assertDictEqual(serialized_model, starlasu_model) 99 | 100 | def test_build_metamodel_single_package(self): 101 | mb = MetamodelBuilder("tests.fixtures", "https://strumenta.com/pylasu/test/fixtures") 102 | box = mb.provide_class(Box) 103 | self.assertIsInstance(box(), EObject) 104 | self.assertEqual(box.eClass.ePackage, mb.package) 105 | self.assertTrue(box.eClass in mb.package.eContents) 106 | self.assertIsNotNone( 107 | next((a for a in box.eClass.eAllAttributes() if a.name == "name"), None)) 108 | self.assertEqual(1, len(box.eClass.eAllAttributes())) 109 | 110 | def test_build_metamodel_single_package_inheritance(self): 111 | mb = MetamodelBuilder("tests.fixtures", "https://strumenta.com/pylasu/test/fixtures") 112 | box = mb.provide_class(ReinforcedBox) 113 | self.assertIsInstance(box(), EObject) 114 | self.assertEqual(box.eClass.ePackage, mb.package) 115 | self.assertEqual(2, len(mb.package.eContents)) 116 | self.assertTrue(box.eClass in mb.package.eContents) 117 | self.assertIsNotNone( 118 | next((a for a in box.eClass.eAllAttributes() if a.name == "name"), None)) 119 | self.assertIsNotNone( 120 | next((a for a in box.eClass.eAllAttributes() if a.name == "strength"), None)) 121 | self.assertEqual(3, len(box.eClass.eAllAttributes())) 122 | 123 | 124 | STARLASU_MODEL_JSON = '''{ 125 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EPackage", 126 | "eClassifiers": [ 127 | { 128 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 129 | "eStructuralFeatures": [ 130 | { 131 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 132 | "name": "year", 133 | "eType": { 134 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 135 | "$ref": "#//EInt" 136 | } 137 | }, 138 | { 139 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 140 | "name": "month", 141 | "eType": { 142 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 143 | "$ref": "#//EInt" 144 | } 145 | }, 146 | { 147 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 148 | "name": "dayOfMonth", 149 | "eType": { 150 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 151 | "$ref": "#//EInt" 152 | } 153 | } 154 | ], 155 | "name": "LocalDate" 156 | }, 157 | { 158 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 159 | "eStructuralFeatures": [ 160 | { 161 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 162 | "name": "hour", 163 | "eType": { 164 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 165 | "$ref": "#//EInt" 166 | } 167 | }, 168 | { 169 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 170 | "name": "minute", 171 | "eType": { 172 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 173 | "$ref": "#//EInt" 174 | } 175 | }, 176 | { 177 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 178 | "name": "second", 179 | "eType": { 180 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 181 | "$ref": "#//EInt" 182 | } 183 | }, 184 | { 185 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 186 | "name": "nanosecond", 187 | "eType": { 188 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 189 | "$ref": "#//EInt" 190 | } 191 | } 192 | ], 193 | "name": "LocalTime" 194 | }, 195 | { 196 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 197 | "eStructuralFeatures": [ 198 | { 199 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 200 | "containment": true, 201 | "name": "date", 202 | "eType": { 203 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 204 | "$ref": "#//LocalDate" 205 | } 206 | }, 207 | { 208 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 209 | "containment": true, 210 | "name": "time", 211 | "eType": { 212 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 213 | "$ref": "#//LocalTime" 214 | } 215 | } 216 | ], 217 | "name": "LocalDateTime" 218 | }, 219 | { 220 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 221 | "eStructuralFeatures": [ 222 | { 223 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 224 | "name": "line", 225 | "eType": { 226 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 227 | "$ref": "#//EInt" 228 | } 229 | }, 230 | { 231 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 232 | "name": "column", 233 | "eType": { 234 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 235 | "$ref": "#//EInt" 236 | } 237 | } 238 | ], 239 | "name": "Point" 240 | }, 241 | { 242 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 243 | "eStructuralFeatures": [ 244 | { 245 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 246 | "containment": true, 247 | "name": "start", 248 | "eType": { 249 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 250 | "$ref": "#//Point" 251 | } 252 | }, 253 | { 254 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 255 | "containment": true, 256 | "name": "end", 257 | "eType": { 258 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 259 | "$ref": "#//Point" 260 | } 261 | } 262 | ], 263 | "name": "Position" 264 | }, 265 | { 266 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 267 | "name": "Origin", 268 | "abstract": true 269 | }, 270 | { 271 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 272 | "name": "Destination", 273 | "abstract": true 274 | }, 275 | { 276 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 277 | "name": "Statement" 278 | }, 279 | { 280 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 281 | "name": "Expression" 282 | }, 283 | { 284 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 285 | "name": "EntityDeclaration" 286 | }, 287 | { 288 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 289 | "eStructuralFeatures": [ 290 | { 291 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 292 | "name": "type", 293 | "eType": { 294 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 295 | "$ref": "#//IssueType" 296 | } 297 | }, 298 | { 299 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 300 | "name": "message", 301 | "eType": { 302 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 303 | "$ref": "#//EString" 304 | } 305 | }, 306 | { 307 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 308 | "name": "severity", 309 | "eType": { 310 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 311 | "$ref": "#//IssueSeverity" 312 | } 313 | }, 314 | { 315 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 316 | "containment": true, 317 | "name": "position", 318 | "eType": { 319 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 320 | "$ref": "#//Position" 321 | } 322 | } 323 | ], 324 | "name": "Issue" 325 | }, 326 | { 327 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 328 | "eStructuralFeatures": [ 329 | { 330 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 331 | "name": "name", 332 | "eType": { 333 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 334 | "$ref": "#//EString" 335 | } 336 | } 337 | ], 338 | "name": "PossiblyNamed" 339 | }, 340 | { 341 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 342 | "eStructuralFeatures": [ 343 | { 344 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 345 | "name": "name", 346 | "eType": { 347 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 348 | "$ref": "#//EString" 349 | } 350 | }, 351 | { 352 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 353 | "name": "referenced", 354 | "eType": { 355 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 356 | "$ref": "#//ASTNode" 357 | } 358 | } 359 | ], 360 | "name": "ReferenceByName" 361 | }, 362 | { 363 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 364 | "eStructuralFeatures": [ 365 | { 366 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 367 | "containment": true, 368 | "name": "root", 369 | "eType": { 370 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 371 | "$ref": "#//ASTNode" 372 | } 373 | }, 374 | { 375 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 376 | "upperBound": -1, 377 | "containment": true, 378 | "name": "issues", 379 | "eType": { 380 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 381 | "$ref": "#//Issue" 382 | } 383 | } 384 | ], 385 | "name": "Result" 386 | }, 387 | { 388 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 389 | "eStructuralFeatures": [ 390 | { 391 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 392 | "name": "node", 393 | "eType": { 394 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 395 | "$ref": "#//ASTNode" 396 | } 397 | } 398 | ], 399 | "eSuperTypes": [ 400 | { 401 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 402 | "$ref": "#//Destination" 403 | } 404 | ], 405 | "name": "NodeDestination" 406 | }, 407 | { 408 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 409 | "eStructuralFeatures": [ 410 | { 411 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 412 | "containment": true, 413 | "name": "position", 414 | "eType": { 415 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 416 | "$ref": "#//Position" 417 | } 418 | } 419 | ], 420 | "eSuperTypes": [ 421 | { 422 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 423 | "$ref": "#//Destination" 424 | } 425 | ], 426 | "name": "TextFileDestination" 427 | }, 428 | { 429 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 430 | "eStructuralFeatures": [ 431 | { 432 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 433 | "containment": true, 434 | "name": "position", 435 | "eType": { 436 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 437 | "$ref": "#//Position" 438 | } 439 | }, 440 | { 441 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 442 | "name": "origin", 443 | "eType": { 444 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 445 | "$ref": "#//Origin" 446 | } 447 | }, 448 | { 449 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EReference", 450 | "containment": true, 451 | "name": "destination", 452 | "eType": { 453 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 454 | "$ref": "#//Destination" 455 | } 456 | } 457 | ], 458 | "eSuperTypes": [ 459 | { 460 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 461 | "$ref": "#//Origin" 462 | } 463 | ], 464 | "name": "ASTNode", 465 | "abstract": true 466 | }, 467 | { 468 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 469 | "eStructuralFeatures": [ 470 | { 471 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EAttribute", 472 | "name": "name", 473 | "eType": { 474 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 475 | "$ref": "#//EString" 476 | } 477 | } 478 | ], 479 | "eSuperTypes": [ 480 | { 481 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EClass", 482 | "$ref": "#//PossiblyNamed" 483 | } 484 | ], 485 | "name": "Named" 486 | }, 487 | { 488 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 489 | "instanceClassName": "java.math.BigDecimal", 490 | "name": "BigDecimal" 491 | }, 492 | { 493 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EDataType", 494 | "instanceClassName": "java.math.BigInteger", 495 | "name": "BigInteger" 496 | }, 497 | { 498 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 499 | "eLiterals": [ 500 | "LEXICAL", 501 | "SYNTACTIC", 502 | "SEMANTIC" 503 | ], 504 | "name": "IssueType" 505 | }, 506 | { 507 | "eClass": "http://www.eclipse.org/emf/2002/Ecore#//EEnum", 508 | "eLiterals": [ 509 | "ERROR", 510 | "WARNING", 511 | "INFO" 512 | ], 513 | "name": "IssueSeverity" 514 | } 515 | ], 516 | "name": "StrumentaLanguageSupport", 517 | "nsURI": "https://strumenta.com/kolasu/v2", 518 | "nsPrefix": "" 519 | }''' 520 | -------------------------------------------------------------------------------- /tests/test_parse_tree.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from antlr4 import CommonTokenStream, InputStream 4 | 5 | from pylasu.model import Point 6 | from pylasu.parsing.parse_tree import ParseTreeOrigin, generate_nodes_classes_for_parser 7 | from tests.antlr_script.AntlrScriptLexer import AntlrScriptLexer 8 | from tests.antlr_script.AntlrScriptParser import AntlrScriptParser 9 | from tests.simple_lang.SimpleLangLexer import SimpleLangLexer 10 | from tests.simple_lang.SimpleLangParser import SimpleLangParser 11 | 12 | 13 | class ParseTreeTest(unittest.TestCase): 14 | def test_parse_tree_origin(self): 15 | lexer = SimpleLangLexer(InputStream("display\n42")) 16 | parser = SimpleLangParser(CommonTokenStream(lexer)) 17 | parse_tree = parser.compilationUnit() 18 | origin = ParseTreeOrigin(parse_tree) 19 | position = origin.position 20 | self.assertIsNotNone(position) 21 | self.assertEqual(position.start, Point(1, 0)) 22 | self.assertEqual(position.end, Point(2, 2)) 23 | 24 | def test_empty_parse_tree_position(self): 25 | lexer = AntlrScriptLexer(InputStream("")) 26 | parser = AntlrScriptParser(CommonTokenStream(lexer)) 27 | parse_tree = parser.script() 28 | self.assertIsNone(parse_tree.stop) 29 | origin = ParseTreeOrigin(parse_tree) 30 | position = origin.position 31 | self.assertIsNotNone(position) 32 | self.assertEqual(position.start, Point(1, 0)) 33 | self.assertEqual(position.end, Point(1, 0)) 34 | 35 | def test_ast_gen(self): 36 | generate_nodes_classes_for_parser(SimpleLangParser, globals()) 37 | self.assertTrue("CompilationUnit" in globals()) 38 | CompilationUnit = globals()["CompilationUnit"] 39 | cu = CompilationUnit() 40 | self.assertIsNotNone(cu) 41 | self.assertTrue(("statement_list", []) in cu.properties) 42 | -------------------------------------------------------------------------------- /tests/test_processing.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from dataclasses import dataclass 3 | from typing import List 4 | 5 | from pylasu.model import Node 6 | from tests.fixtures import box, Item 7 | 8 | 9 | @dataclass(unsafe_hash=True) 10 | class AW(Node): 11 | s: str 12 | 13 | 14 | @dataclass 15 | class BW(Node): 16 | a: AW 17 | many_as: List[AW] 18 | 19 | 20 | class ProcessingTest(unittest.TestCase): 21 | def test_search_by_type(self): 22 | self.assertEqual(["1", "2", "3", "4", "5", "6"], [i.name for i in box.search_by_type(Item)]) 23 | self.assertEqual( 24 | ["root", "first", "1", "2", "big", "small", "3", "4", "5", "6"], 25 | [n.name for n in box.search_by_type(Node)] 26 | ) 27 | 28 | def test_replace_in_list(self): 29 | a1 = AW("1") 30 | a2 = AW("2") 31 | a3 = AW("3") 32 | a4 = AW("4") 33 | b = BW(a1, [a2, a3]) 34 | b.assign_parents() 35 | a2.replace_with(a4) 36 | self.assertEqual("4", b.many_as[0].s) 37 | self.assertEqual(BW(a1, [a4, a3]), b) 38 | 39 | def test_replace_single(self): 40 | a1 = AW("1") 41 | a2 = AW("2") 42 | b = BW(a1, []) 43 | b.assign_parents() 44 | a1.replace_with(a2) 45 | self.assertEqual("2", b.a.s) 46 | -------------------------------------------------------------------------------- /tests/test_transpilation_trace.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | 4 | from pyecore.ecore import EString, EAttribute, EInt 5 | 6 | import pylasu.StrumentaLanguageSupport as starlasu 7 | from pylasu.StrumentaLanguageSupport import ASTNode 8 | from pylasu.emf import MetamodelBuilder 9 | from pylasu.playground import TranspilationTrace, ETranspilationTrace 10 | from pylasu.validation.validation import Result 11 | from tests.fixtures import Box, Item 12 | 13 | nsURI = "http://mypackage.com" 14 | name = "StrumentaLanguageSupportTranspilationTest" 15 | 16 | 17 | class ANode(ASTNode): 18 | name = EAttribute(eType=EString) 19 | value = EAttribute(eType=EInt) 20 | 21 | def __init__(self, *, name=None, value=None, **kwargs): 22 | super().__init__(**kwargs) 23 | if name is not None: 24 | self.name = name 25 | if value is not None: 26 | self.value = value 27 | 28 | 29 | class ModelTest(unittest.TestCase): 30 | 31 | def test_serialize_transpilation_issue(self): 32 | tt = ETranspilationTrace( 33 | original_code="a:1", generated_code="b:2", 34 | source_result=starlasu.Result(root=ANode(name="a", value=1)), 35 | target_result=starlasu.Result(root=ANode(name="b", value=2)), 36 | issues=[starlasu.Issue( 37 | type=starlasu.IssueType.getEEnumLiteral("TRANSLATION"), 38 | message="some issue", 39 | severity=starlasu.IssueSeverity.getEEnumLiteral("WARNING"))] 40 | ) 41 | self.assertEqual("a:1", tt.originalCode) 42 | self.assertEqual("b:2", tt.generatedCode) 43 | self.assertEqual("some issue", tt.issues[0].message) 44 | self.assertEqual("a", tt.sourceResult.root.name) 45 | self.assertEqual(1, tt.sourceResult.root.value) 46 | self.assertEqual("b", tt.targetResult.root.name) 47 | self.assertEqual(2, tt.targetResult.root.value) 48 | 49 | expected = """{ 50 | "eClass" : "https://strumenta.com/kolasu/transpilation/v1#//TranspilationTrace", 51 | "originalCode" : "a:1", 52 | "sourceResult" : { 53 | "root" : { 54 | "eClass" : "http://mypackage.com#//ANode", 55 | "name" : "a", 56 | "value" : 1 57 | } 58 | }, 59 | "targetResult" : { 60 | "root" : { 61 | "eClass" : "http://mypackage.com#//ANode", 62 | "name" : "b", 63 | "value" : 2 64 | } 65 | }, 66 | "generatedCode" : "b:2", 67 | "issues" : [ { 68 | "message" : "some issue", 69 | "severity" : "WARNING" 70 | } ] 71 | }""" 72 | self.assertEqual(json.loads(expected), json.loads(tt.save_as_json("foo.json"))) 73 | 74 | def test_serialize_transpilation_from_nodes(self): 75 | mmb = MetamodelBuilder("tests.fixtures", "https://strumenta.com/pylasu/test/fixtures") 76 | mmb.provide_class(Box) 77 | mmb.provide_class(Item) 78 | 79 | tt = TranspilationTrace( 80 | original_code="box(a)[i1, bar]", generated_code='', 81 | source_result=Result(Box("a", [Item("i1"), Box("b", [Item("i2"), Item("i3")])])), 82 | target_result=Result(Box("A"))) 83 | 84 | expected = """{ 85 | "eClass": "https://strumenta.com/kolasu/transpilation/v1#//TranspilationTrace", 86 | "generatedCode": "", 87 | "originalCode": "box(a)[i1, bar]", 88 | "sourceResult": { "root": { 89 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Box", 90 | "name": "a", 91 | "contents": [{ 92 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Item", 93 | "name": "i1" 94 | }, { 95 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Box", 96 | "name": "b", 97 | "contents": [{ 98 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Item", 99 | "name": "i2" 100 | }, { 101 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Item", 102 | "name": "i3" 103 | }] 104 | }] 105 | } 106 | }, 107 | "targetResult": { "root": { 108 | "eClass": "https://strumenta.com/pylasu/test/fixtures#//Box", 109 | "name": "A", 110 | "contents": [] 111 | } 112 | } 113 | }""" 114 | as_json = tt.save_as_json("foo.json", mmb.generate()) 115 | self.assertEqual(json.loads(expected), json.loads(as_json)) 116 | -------------------------------------------------------------------------------- /tests/test_traversing.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from pylasu.model import pos 4 | from pylasu.model.traversing import find_ancestor_of_type 5 | from tests.fixtures import box, Box 6 | 7 | 8 | class TraversingTest(unittest.TestCase): 9 | def test_walk_within_with_outside_position(self): 10 | self.assertEqual([], [n.name for n in box.walk_within(pos(15, 1, 15, 1))]) 11 | 12 | def test_walk_within_with_root_position(self): 13 | self.assertEqual( 14 | ["root", "first", "1", "2", "big", "small", "3", "4", "5", "6"], 15 | [n.name for n in box.walk_within(box.position)] 16 | ) 17 | 18 | def test_walk_within_with_leaf_position(self): 19 | self.assertEqual(["6"], [n.name for n in box.walk_within(pos(13, 3, 13, 9))]) 20 | 21 | def test_walk_within_with_subtree_position(self): 22 | self.assertEqual(["small", "3", "4", "5"], [n.name for n in box.walk_within(pos(7, 5, 11, 5))]) 23 | 24 | def test_walk_depth_first(self): 25 | self.assertEqual( 26 | ["root", "first", "1", "2", "big", "small", "3", "4", "5", "6"], 27 | [n.name for n in box.walk()] 28 | ) 29 | 30 | def test_walk_leaves_first(self): 31 | self.assertEqual( 32 | ["1", "first", "2", "3", "4", "5", "small", "big", "6", "root"], 33 | [n.name for n in box.walk_leaves_first()] 34 | ) 35 | 36 | def test_walk_descendants(self): 37 | self.assertEqual( 38 | ["first", "1", "2", "big", "small", "3", "4", "5", "6"], 39 | [n.name for n in box.walk_descendants()] 40 | ) 41 | 42 | def test_walk_ancestors(self): 43 | box.assign_parents() 44 | item4 = box.contents[2].contents[0].contents[1] 45 | self.assertEqual(["small", "big", "root"], [n.name for n in item4.walk_ancestors()]) 46 | 47 | def test_find_ancestor_of_type(self): 48 | box.assign_parents() 49 | item = box.contents[1] 50 | self.assertEqual(box, find_ancestor_of_type(item, Box)) 51 | -------------------------------------------------------------------------------- /tests/transformation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strumenta/pylasu/133dadc7b9b011d3ccf3ae1e08eaec124e6ae957/tests/transformation/__init__.py -------------------------------------------------------------------------------- /tests/transformation/test_ast_transformers.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import unittest 3 | from dataclasses import dataclass, field 4 | from typing import List 5 | 6 | from pylasu.model import Node 7 | from pylasu.testing.testing import assert_asts_are_equal 8 | from pylasu.transformation.generic_nodes import GenericNode 9 | from pylasu.transformation.transformation import ASTTransformer, PropertyRef, ast_transformer 10 | 11 | 12 | @dataclass 13 | class CU(Node): 14 | statements: List[Node] = field(default_factory=list) 15 | 16 | 17 | @dataclass 18 | class DisplayIntStatement(Node): 19 | value: int = 0 20 | 21 | 22 | @dataclass 23 | class SetStatement(Node): 24 | variable: str = "" 25 | value: int = 0 26 | 27 | 28 | class Operator(enum.Enum): 29 | PLUS = '+' 30 | MULT = '*' 31 | 32 | 33 | class Expression(Node): 34 | pass 35 | 36 | 37 | @dataclass 38 | class IntLiteral(Expression): 39 | value: int 40 | 41 | 42 | @dataclass 43 | class GenericBinaryExpression(Node): 44 | operator: Operator 45 | left: Expression 46 | right: Expression 47 | 48 | 49 | @dataclass 50 | class Mult(Node): 51 | left: Expression 52 | right: Expression 53 | 54 | 55 | @dataclass 56 | class Sum(Node): 57 | left: Expression 58 | right: Expression 59 | 60 | 61 | class ASTTransformerTest(unittest.TestCase): 62 | 63 | def test_identitiy_transformer(self): 64 | prop = PropertyRef("statements") 65 | transformer = ASTTransformer() 66 | transformer.register_node_factory(CU, CU).with_child(prop, prop) 67 | transformer.register_identity_transformation(DisplayIntStatement) 68 | transformer.register_identity_transformation(SetStatement) 69 | cu = CU(statements=[ 70 | SetStatement(variable="foo", value=123), 71 | DisplayIntStatement(value=456)]) 72 | transformed_cu = transformer.transform(cu) 73 | self.assertEqual(cu, transformed_cu) 74 | self.assertEqual(transformed_cu.origin, cu) 75 | 76 | def test_translate_binary_expression(self): 77 | """Example of transformation to perform a refactoring within the same language.""" 78 | my_transformer = ASTTransformer(allow_generic_node=False) 79 | 80 | @ast_transformer(GenericBinaryExpression, my_transformer, "to_ast") 81 | def generic_binary_expression_to_ast(source: GenericBinaryExpression): 82 | if source.operator == Operator.MULT: 83 | return Mult(my_transformer.transform(source.left), my_transformer.transform(source.right)) 84 | elif source.operator == Operator.PLUS: 85 | return Sum(my_transformer.transform(source.left), my_transformer.transform(source.right)) 86 | 87 | my_transformer.register_identity_transformation(IntLiteral) 88 | source = GenericBinaryExpression(Operator.MULT, IntLiteral(7), IntLiteral(8)) 89 | t1 = my_transformer.transform(source) 90 | self.assertEqual(t1, source.to_ast()) 91 | 92 | def test_translate_across_languages(self): 93 | transformer = ASTTransformer(allow_generic_node=False) 94 | transformer.register_node_factory(ALangIntLiteral, lambda source: BLangIntLiteral(source.value)) 95 | transformer.register_node_factory( 96 | ALangSum, 97 | lambda source: BLangSum(transformer.transform(source.left), transformer.transform(source.right))) 98 | transformer.register_node_factory( 99 | ALangMult, 100 | lambda source: BLangMult(transformer.transform(source.left), transformer.transform(source.right))) 101 | self.assertEqual( 102 | BLangMult( 103 | BLangSum( 104 | BLangIntLiteral(1), 105 | BLangMult(BLangIntLiteral(2), BLangIntLiteral(3)) 106 | ), 107 | BLangIntLiteral(4) 108 | ), 109 | transformer.transform( 110 | ALangMult( 111 | ALangSum( 112 | ALangIntLiteral(1), 113 | ALangMult(ALangIntLiteral(2), ALangIntLiteral(3)) 114 | ), 115 | ALangIntLiteral(4)))) 116 | 117 | def test_dropping_nodes(self): 118 | prop = PropertyRef("statements") 119 | transformer = ASTTransformer() 120 | transformer.register_node_factory(CU, CU).with_child(prop, prop) 121 | transformer.register_node_factory(DisplayIntStatement, lambda _: None) 122 | transformer.register_identity_transformation(SetStatement) 123 | cu = CU(statements=[DisplayIntStatement(value=456), SetStatement(variable="foo", value=123)]) 124 | transformed_cu = transformer.transform(cu) 125 | # TODO not yet supported self.assertTrue(transformed_cu.hasValidParents()) 126 | self.assertEqual(transformed_cu.origin, cu) 127 | self.assertEqual(1, len(transformed_cu.statements)) 128 | assert_asts_are_equal(self, cu.statements[1], transformed_cu.statements[0]) 129 | 130 | def test_nested_origin(self): 131 | prop = PropertyRef("statements") 132 | transformer = ASTTransformer() 133 | transformer.register_node_factory(CU, CU).with_child(prop, prop) 134 | transformer.register_node_factory(DisplayIntStatement, lambda s: s.with_origin(GenericNode())) 135 | cu = CU(statements=[DisplayIntStatement(value=456)]) 136 | transformed_cu = transformer.transform(cu) 137 | # TODO not yet supported self.assertTrue(transformed_cu.hasValidParents()) 138 | self.assertEqual(transformed_cu.origin, cu) 139 | self.assertIsInstance(transformed_cu.statements[0].origin, GenericNode) 140 | 141 | def test_transforming_one_node_to_many(self): 142 | prop = PropertyRef("stmts") 143 | transformer = ASTTransformer(allow_generic_node=False) 144 | transformer.register_node_factory(BarRoot, BazRoot).with_child(prop, prop) 145 | transformer.register_node_factory(BarStmt, lambda s: [BazStmt(f"{s.desc}-1"), BazStmt(f"{s.desc}-2")]) 146 | original = BarRoot([BarStmt("a"), BarStmt("b")]) 147 | transformed = transformer.transform(original) 148 | # TODO not yet supported assertTrue { transformed.hasValidParents() } 149 | self.assertEqual(transformed.origin, original) 150 | assert_asts_are_equal( 151 | self, 152 | BazRoot([BazStmt("a-1"), BazStmt("a-2"), BazStmt("b-1"), BazStmt("b-2")]), 153 | transformed 154 | ) 155 | 156 | 157 | @dataclass 158 | class ALangExpression(Node): 159 | pass 160 | 161 | 162 | @dataclass 163 | class ALangIntLiteral(ALangExpression): 164 | value: int 165 | 166 | 167 | @dataclass 168 | class ALangSum(ALangExpression): 169 | left: ALangExpression 170 | right: ALangExpression 171 | 172 | 173 | @dataclass 174 | class ALangMult(ALangExpression): 175 | left: ALangExpression 176 | right: ALangExpression 177 | 178 | 179 | @dataclass 180 | class BLangExpression(Node): 181 | pass 182 | 183 | 184 | @dataclass 185 | class BLangIntLiteral(BLangExpression): 186 | value: int 187 | 188 | 189 | @dataclass 190 | class BLangSum(BLangExpression): 191 | left: BLangExpression 192 | right: BLangExpression 193 | 194 | 195 | @dataclass 196 | class BLangMult(BLangExpression): 197 | left: BLangExpression 198 | right: BLangExpression 199 | 200 | 201 | @dataclass 202 | class BarStmt(Node): 203 | desc: str 204 | 205 | 206 | @dataclass 207 | class BarRoot(Node): 208 | stmts: List[BarStmt] = field(default_factory=list) 209 | 210 | 211 | @dataclass 212 | class BazStmt(Node): 213 | desc: str 214 | 215 | 216 | @dataclass 217 | class BazRoot(Node): 218 | stmts: List[BazStmt] = field(default_factory=list) 219 | 220 | 221 | if __name__ == '__main__': 222 | unittest.main() 223 | --------------------------------------------------------------------------------