├── .ci.yml ├── .github └── workflows │ └── workflow.yml ├── .gitignore ├── LICENSE ├── README.md ├── demo ├── blinky-temperature │ ├── CMakeLists.txt │ ├── prj.conf │ └── src │ │ └── main.c ├── shell │ ├── CMakeLists.txt │ ├── prj.conf │ └── src │ │ └── main.c ├── stm32-led-thermometer.json └── templates │ └── blinky-temperature │ ├── CMakeLists.txt │ ├── prj.conf │ └── src │ ├── main.c │ ├── utils.c │ └── utils.h ├── pyproject.toml ├── tools ├── modify_specification.py ├── requirements.txt └── validate.py └── vsd ├── __init__.py ├── __main__.py ├── backend.py ├── build.py ├── env.py ├── generate.py ├── graph.py ├── init.py ├── scripts ├── get_zephyr_sdk.sh └── init_zephyr.sh ├── simulate.py ├── spec_mods └── interactive.json ├── specification.py ├── templates └── run.resc └── utils.py /.ci.yml: -------------------------------------------------------------------------------- 1 | image: debian:bookworm 2 | 3 | stages: 4 | - build 5 | - deploy 6 | - prepare-workspace 7 | - validate 8 | - aggregate-results 9 | - show-changes 10 | 11 | .common_only: &common_only 12 | only: 13 | - main 14 | - dev 15 | - merge_requests 16 | 17 | build: 18 | <<: *common_only 19 | when: manual 20 | stage: build 21 | tags: ['ace-x86_64'] 22 | before_script: 23 | - apt-get -y update 24 | - apt-get -y install npm libarchive-tools git python3-venv python3-dev python3-pip 25 | script: 26 | - python3 -m venv .venv && source .venv/bin/activate && python3 --version 27 | - git clone https://github.com/antmicro/visual-system-designer-resources 28 | 29 | - pip install git+https://github.com/antmicro/kenning-pipeline-manager.git 30 | - pip install git+https://github.com/antmicro/kenning-pipeline-manager-backend-communication.git 31 | 32 | - pipeline_manager build server-app 33 | --assets-directory visual-system-designer-resources/assets/ 34 | --communication-server-host localhost 35 | --communication-server-port 9000 36 | --output-directory website 37 | --workspace-directory pm-workspace 38 | 39 | - tar cf $CI_DOCS_ARCHIVE -C website . 40 | artifacts: 41 | paths: 42 | - $CI_DOCS_ARCHIVE 43 | 44 | 45 | deploy: 46 | <<: *common_only 47 | when: manual 48 | image: $CI_DOCS_DOCKER_IMAGE 49 | variables: 50 | GIT_STRATEGY: none 51 | dependencies: 52 | - build 53 | stage: deploy 54 | tags: ['docs'] 55 | script: echo 'Deploying docs' 56 | artifacts: 57 | paths: 58 | - $CI_DOCS_ARCHIVE 59 | 60 | .install-requirements: &install-requirements |- 61 | apt-get -y update &> /dev/null 62 | apt-get -y install npm libarchive-tools curl git cmake ninja-build gperf ccache dfu-util device-tree-compiler wget python3-venv python3-dev python3-pip python3-setuptools python3-tk python3-wheel xz-utils file make gcc gcc-multilib g++-multilib libsdl2-dev 63 | 64 | # Initialize python venv and install VSD app 65 | .init-venv: &init-venv |- 66 | python3 -m venv .venv 67 | source .venv/bin/activate 68 | pip install -e . 69 | 70 | .init-workspace: &init-workspace |- 71 | export VSD_WORKSPACE=$(pwd)/workspace 72 | export ZEPHYR_SDK_INSTALL_DIR=$(vsd info | grep ZEPHYR_SDK | sed 's/ZEPHYR_SDK_INSTALL_DIR: //') 73 | 74 | # Move SDK to proper directory 75 | mv zephyr-sdk ${ZEPHYR_SDK_INSTALL_DIR} 76 | 77 | # Ensure that Zephyr SDK is setup correctly 78 | pushd ${ZEPHYR_SDK_INSTALL_DIR} 79 | ./setup.sh -t all -h -c 80 | popd 81 | 82 | # This call to vsd init is needed to install Zephyr requirements 83 | vsd init workspace 84 | vsd info 85 | 86 | 87 | 88 | setup-workspace: 89 | <<: *common_only 90 | stage: prepare-workspace 91 | tags: ['ace-x86_64'] 92 | before_script: 93 | - *install-requirements 94 | script: 95 | - *init-venv 96 | - echo "progress=dot:giga" > ${HOME}/.wgetrc 97 | # Setup workspace 98 | - vsd init workspace 99 | # Prepare archive that will be saved to artifacts 100 | - export VSD_WORKSPACE=workspace 101 | - 'export ZEPHYR_SDK_INSTALL_DIR=$(vsd info | grep ZEPHYR_SDK | sed "s/ZEPHYR_SDK_INSTALL_DIR: //")' 102 | # Copy Zephyr SDK to include it in artifacts 103 | - mv ${ZEPHYR_SDK_INSTALL_DIR} zephyr-sdk 104 | artifacts: 105 | paths: 106 | - workspace 107 | - zephyr-sdk 108 | when: always 109 | 110 | 111 | validate-setup: 112 | <<: *common_only 113 | stage: validate 114 | tags: ['ace-x86_64'] 115 | dependencies: [setup-workspace] 116 | before_script: 117 | - *install-requirements 118 | script: 119 | - *init-venv 120 | - *init-workspace 121 | # Build app on demo graph 122 | - vsd prepare-zephyr-app demo/stm32-led-thermometer.json demo/blinky-temperature 123 | # We expect that the simulation will succeed and run forever. In that case, timeout 124 | # command would kill the simulation after 20 seconds and return 124 indicating timeout. 125 | - timeout --signal=SIGINT 20 vsd simulate demo_blinky_temp || test $? -eq 124 126 | 127 | # Check the app generated from template in the same way 128 | - vsd prepare-zephyr-app demo/stm32-led-thermometer.json blinky-from-template --from-template demo/templates/blinky-temperature --force 129 | - timeout --signal=SIGINT 20 vsd simulate demo_blinky_temp || test $? -eq 124 130 | artifacts: 131 | paths: 132 | - workspace/builds/demo_blinky_temp 133 | when: always 134 | 135 | 136 | validate-targets: 137 | <<: *common_only 138 | stage: validate 139 | tags: ['ace-x86_64'] 140 | dependencies: [setup-workspace] 141 | parallel: 6 142 | before_script: 143 | - *install-requirements 144 | script: 145 | - *init-venv 146 | - *init-workspace 147 | - pip install -r tools/requirements.txt 148 | - ./tools/validate.py all-socs --chunk-total ${CI_NODE_TOTAL} --chunk-id $((${CI_NODE_INDEX} - 1)) --seed ${CI_COMMIT_SHA} --output validate-${CI_NODE_INDEX}.json 149 | artifacts: 150 | paths: 151 | - validate.log 152 | - validate-*.json 153 | - workspace/builds 154 | when: always 155 | 156 | 157 | aggregate-results: 158 | <<: *common_only 159 | stage: aggregate-results 160 | tags: ['ace-x86_64'] 161 | dependencies: [validate-targets] 162 | before_script: 163 | - apt-get -y update &> /dev/null 164 | - apt-get -y install git python3 python3-venv python3-pip &> /dev/null 165 | script: 166 | - *init-venv 167 | - pip install -r tools/requirements.txt 168 | - ./tools/validate.py print-results validate-*.json --output results.json 169 | artifacts: 170 | paths: 171 | - results.json 172 | - workspace/builds 173 | 174 | 175 | show-changes: 176 | <<: *common_only 177 | stage: show-changes 178 | tags: ['ace-x86_64'] 179 | dependencies: [aggregate-results] 180 | before_script: 181 | - apt-get -y update &> /dev/null 182 | - apt-get -y install git curl jq python3 python3-venv python3-pip &> /dev/null 183 | script: 184 | - *init-venv 185 | - pip install -r tools/requirements.txt 186 | - git clone https://github.com/antmicro/visual-system-designer-resources vsd-resources 187 | - ./tools/validate.py show-changes --fail-on-regression 188 | vsd-resources/visual-system-designer-app/boards-support.json results.json 189 | artifacts: 190 | paths: 191 | - results.json 192 | when: always 193 | allow_failure: true 194 | -------------------------------------------------------------------------------- /.github/workflows/workflow.yml: -------------------------------------------------------------------------------- 1 | name: Validate setup 2 | on: push 3 | 4 | jobs: 5 | validate-setup: 6 | name: Validate setup 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | 11 | - uses: actions/setup-python@v5 12 | with: 13 | python-version: '3.10' 14 | 15 | - name: Install dependencies 16 | run: | 17 | sudo apt-get -qqy update 18 | sudo apt-get -qqy autoremove 19 | sudo apt-get -qqy install npm libarchive-tools curl git cmake ninja-build gperf ccache dfu-util device-tree-compiler wget xz-utils file make gcc gcc-multilib g++-multilib libsdl2-dev 20 | 21 | - name: Prepare workspace 22 | run: | 23 | echo "progress=dot:giga" > ${HOME}/.wgetrc 24 | pip install -e . 25 | vsd init workspace 26 | echo "VSD_WORKSPACE=$(pwd)/workspace" >> $GITHUB_ENV 27 | 28 | - name: Build and run demo 29 | run: | 30 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json demo/blinky-temperature 31 | timeout --signal=SIGINT 20 vsd simulate demo_blinky_temp || test $? -eq 124 32 | 33 | - name: Build and run demo from template 34 | run: | 35 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json blinky-from-template --from-template demo/templates/blinky-temperature --force 36 | timeout --signal=SIGINT 20 vsd simulate demo_blinky_temp || test $? -eq 124 37 | 38 | - uses: actions/upload-artifact@v4 39 | with: 40 | path: workspace/builds/demo_blinky_temp 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | workspace 2 | build 3 | *.egg-info 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Visual System Designer 2 | 3 | Copyright (c) 2023 [Antmicro](https://www.antmicro.com) 4 | 5 | The Visual System Designer app is a local multi-tool which incorporates the diagramming capabilities of the online [Visual System Designer](https://designer.antmicro.com/) which can be used for building block design of embedded systems in a diagramming workflow. 6 | For more background on Visual System Designer and its goals, please read the [introductory blog note on Antmicro's blog](https://antmicro.com/blog/2023/09/build-embedded-systems-with-vsd/). 7 | 8 | The tool can also be used to generate and build [Zephyr RTOS](https://zephyrproject.org/)-based firmware and simulate it using [Renode](https://www.renode.io), Antmicro's open source simulation framework, visualizing the state of the simulation. 9 | 10 | ## Demo 11 | 12 | https://github.com/antmicro/visual-system-designer-app/assets/114056459/9262c9db-82ad-4abf-ac39-a331427065c2 13 | 14 | ## Prerequisites 15 | 16 | The VSD application currently depends on other projects: kenning-pipeline-manager, Zephyr and Renode, therefore their dependencies must be installed first. 17 | Make sure that you have installed all the programs mentioned below. 18 | Any other dependencies (e.g. Python requirements or Zephyr workspace) will be downloaded later by the setup script. 19 | 20 | (the following package names are for Debian based systems) 21 | 22 | * [Pipeline Manager dependencies](https://github.com/antmicro/kenning-pipeline-manager#prerequisites) 23 | 24 | ``` 25 | npm python3 python3-pip 26 | ``` 27 | * [Zephyr dependencies](https://docs.zephyrproject.org/latest/develop/getting_started/index.html#install-dependencies) 28 | 29 | ``` 30 | git cmake ninja-build gperf ccache dfu-util device-tree-compiler wget python3-dev python3-pip python3-setuptools \ 31 | python3-tk python3-wheel xz-utils file make gcc gcc-multilib g++-multilib libsdl2-dev libmagic1 32 | ``` 33 | * [Renode dependencies](https://github.com/renode/renode#installing-dependencies) 34 | 35 | ``` 36 | mono-complete 37 | ``` 38 | 39 | NOTE: On Arch based systems additionally the `gtk-sharp` package must be installed to successfully run Renode. 40 | 41 | ## Setup 42 | 43 | To prepare project's environment and download necessary files, run: 44 | 45 | ``` 46 | python3 -m venv .venv 47 | source .venv/bin/activate 48 | pip install -e . 49 | vsd init workspace 50 | export VSD_WORKSPACE=$(pwd)/workspace 51 | ``` 52 | 53 | ## Starting the VSD application 54 | 55 | The most convenient way to run VSD is to use it interactively: 56 | 57 | ``` 58 | vsd run 59 | ``` 60 | 61 | After running this command the VSD server will start and the application can be used in a web browser (by default at http://localhost:9000). 62 | It can be used to design a graph of the platform, build an example Zephyr application on designed platform and run it in simulation using Renode. 63 | 64 | To adjust the options used to start the VSD application use the following options (those can be listed with `--help`): 65 | 66 | ``` 67 | Usage: vsd run [OPTIONS] 68 | 69 | Options: 70 | --application PATH [default: demo/blinky-temperature] 71 | --website-host TEXT [default: 127.0.0.1] 72 | --website-port INTEGER [default: 9000] 73 | --vsd-backend-host TEXT [default: 127.0.0.1] 74 | --vsd-backend-port INTEGER [default: 5000] 75 | --remote / --no-remote [default: no-remote] 76 | --spec-mod PATH 77 | --verbosity TEXT [default: INFO] 78 | ``` 79 | 80 | ## Using the VSD application 81 | 82 | After the VSD application is launched it can be used to design graphs. 83 | Graphs can either be designed from scratch or from another graph imported using the "Load graph file" option. 84 | 85 | Visual System Designer is also capable of running a Zephyr demo application based on the created graphs. 86 | 87 | To build Zephyr on the current graph use the "Build" button. 88 | After the build has succeeded, simulation may be run using the "Run simulation" button. 89 | The build logs and Zephyr console output are available in dedicated terminals on the bottom of the screen. 90 | 91 | ### Modifying specification 92 | 93 | If you want to add some new nodes to the specification or modify existing ones you have to define a JSON file with such modifications. 94 | An example file (with modifications needed while running VSD interactively) is present in [vsd/spec_mods/interactive.json](./vsd/spec_mods/interactive.json). 95 | After defining such files you must specify their paths while starting VSD app (they will be applied in the order they were specified): 96 | 97 | ``` 98 | vsd run --spec-mod mod1.json --spec-mod mod2.json 99 | # or 100 | vsd prepare-zephyr-board board-graph.json --spec-mod mod1.json --spec-mod mod2.json 101 | ``` 102 | 103 | To modify specification without running VSD app you may use [tools/modify_specification.py](./tools/modify_specification.py) script. 104 | 105 | #### Specification modifications file format 106 | 107 | On the high level, the JSON file, which contains the description of modifications, has three keys: 108 | 109 | * `"metadata"` -- each field specified here will replace the one in metadata of original specification 110 | * `"add_nodes"` -- new nodes will be directly added to the original specification 111 | * `"mods"` -- each entry in this section describes how to modify a group of nodes specified in `"names"` list: 112 | - `"add_property"` -- properties specified here will be added to all specified nodes 113 | - `"add_interface"` -- interfaces specified here will be added to all specified nodes 114 | 115 | Example file: 116 | ```JSON 117 | { 118 | "metadata": { 119 | "notifyWhenChanged": true 120 | }, 121 | "add_nodes": [ 122 | { 123 | "abstract": false, 124 | "category": "Category/SomeNode", 125 | "name": "SomeNode", 126 | "properties": [ 127 | { 128 | "default": "", 129 | "name": "property1", 130 | "type": "text" 131 | } 132 | ], 133 | "interfaces": [ 134 | { 135 | "direction": "inout", 136 | "maxConnectionsCount": -1, 137 | "name": "interface1", 138 | "side": "left", 139 | "type": "interface1" 140 | } 141 | ] 142 | } 143 | ], 144 | "mods": [ 145 | { 146 | "names": ["LED"], 147 | "add_properties": [ 148 | { 149 | "default": false, 150 | "name": "active", 151 | "type": "bool" 152 | } 153 | ] 154 | }, 155 | { 156 | "names": [ 157 | "bme280", 158 | "sht4xd", 159 | "tmp108", 160 | "si7210" 161 | ], 162 | "add_properties": [ 163 | { 164 | "default": 20.0, 165 | "name": "temperature", 166 | "type": "number" 167 | } 168 | ] 169 | } 170 | ] 171 | } 172 | ``` 173 | 174 | ## Using VSD from command line 175 | 176 | VSD can also be used as a command line utility to prepare and simulate a demo on graph created with VSD. 177 | There are two available commands: `prepare-zephyr-app` and `simulate`. 178 | These commands process graph obtained earlier (e.g. from [designer.antmicro.com](https://designer.antmicro.com) or using `vsd run` command). 179 | 180 | ### `prepare-zephyr-app` command 181 | 182 | This command is used to prepare and build Zephyr application for given board graph. 183 | 184 | ``` 185 | usage: vsd prepare-zephyr-app graph_file source_dir [--from-template template_dir] [--force] 186 | ``` 187 | 188 | It requires providing: 189 | 190 | * `graph_file` - file defining the VSD graph representing the design 191 | * `source_dir` - a directory where the Zephyr project is placed (or where the generated project from template should be placed) 192 | 193 | There are two possible options to provide application sources for this command: 194 | 195 | - `--from-template` - specify the directory which contains a template for the project 196 | - `--force` - specify the application template (by name or directory), which will be used to generate the application sources. Currently there is only one template available to use specifying its name: `blinky-temperature`. 197 | 198 | #### Example 199 | 200 | Few basic usage examples: 201 | 202 | - Building demo from sources: 203 | ``` 204 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json demo/blinky-temperature 205 | ``` 206 | - Building demo from template: 207 | ``` 208 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json ./my-project --from-template demo/templates/blinky-temperature/ 209 | ``` 210 | 211 | ### `simulate` command 212 | 213 | This command is used to start Renode simulation of the demo build in the previous step. 214 | The `board_name`, which has to be specified as an argument, is obtained from the graph name by substituting all white and special characters with underscore. 215 | The board name is also printed in the previous step. 216 | 217 | ``` 218 | usage: vsd simulate board_name 219 | ``` 220 | 221 | #### Example 222 | 223 | Firstly, building demo, e.g. from template as demonstrated in `prepare-zephyr-app`: 224 | 225 | ``` 226 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json ./my-blinky --from-template demo/templates/blinky-temperature/ 227 | ``` 228 | 229 | Secondly, run `vsd simulate` with board name, here: 230 | 231 | ``` 232 | vsd simulate demo_blinky_temp 233 | ``` 234 | 235 | ## Example application 236 | 237 | The VSD app comes with its own Zephyr demo ([demo/blinky-temperature](./demo/blinky-temperature/)) which can be used on a predefined graph ([stm32-led-thermometer.json](./demo/stm32-led-thermometer.json)). 238 | To run that demo interactively you can start the VSD app, import the graph and run the application using "Run" button. 239 | 240 | To prepare and run the demo in the shell execute following commands: 241 | 242 | ``` 243 | vsd prepare-zephyr-app demo/stm32-led-thermometer.json demo/blinky-temperature 244 | vsd simulate demo_blinky_temp 245 | ``` 246 | 247 | ## Demo with frontend hosted on a remote server 248 | 249 | ### Building and hosting Pipeline Manager 250 | 251 | In order to build Pipeline Manager frontend, create the `venv` environment and install [Pipeline Manager](https://github.com/antmicro/kenning-pipeline-manager): 252 | 253 | ```sh 254 | python3 -m venv .venv 255 | source .venv/bin/activate 256 | pip install git+https://github.com/antmicro/kenning-pipeline-manager.git 257 | ``` 258 | 259 | The frontend requires additional assets (icons, graphics, ...) to render properly - they can be obtained from [VSD resources repository](https://github.com/antmicro/visual-system-designer-resources): 260 | 261 | ```sh 262 | git clone https://github.com/antmicro/visual-system-designer-resources.git 263 | ``` 264 | 265 | After obtaining all requirements, the frontend can be built with: 266 | 267 | ```sh 268 | pipeline_manager build server-app --communication-server-host localhost --communication-server-port 9000 --output-directory website --workspace-directory pm-workspace --assets-directory visual-system-designer-resources/assets/ 269 | ``` 270 | 271 | The `--communication-server-host` and `--communication-server-port` specify the address from which the `vsd run` command will connect (from the user desktop perspective, hence `localhost` is sufficient). 272 | 273 | The `website` directory can now be served using any HTTP server (e.g. the one included in Python3 distribution): 274 | 275 | ```sh 276 | python3 -m http.server -d ./website 277 | ``` 278 | 279 | ### Running the demo 280 | 281 | Assuming the commands are executed in the root directory for this project: 282 | 283 | 1. Prepare the workspace as described in [Setup](#setup). 284 | 1. Start VSD app (the `--application ` is the path to the sources for the Zephyr application) 285 | ```sh 286 | vsd run --app demo/blinky-temperature 287 | ``` 288 | 2. Go to address hosting Pipeline Manager (using above Python server go to http://localhost:8000). 289 | 3. Use VSD as usual (e.g. load [`visual-system-designer-app/demo/stm32-led-thermometer.json`](demo/stm32-led-thermometer.json) and click "Run"). 290 | 291 | ## License 292 | 293 | This project is published under the [Apache-2.0](LICENSE) license. 294 | -------------------------------------------------------------------------------- /demo/blinky-temperature/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | 3 | cmake_minimum_required(VERSION 3.20.0) 4 | find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE}) 5 | project(blinky-temperature) 6 | 7 | FILE(GLOB app_sources src/*.c) 8 | target_sources(app PRIVATE ${app_sources}) 9 | -------------------------------------------------------------------------------- /demo/blinky-temperature/prj.conf: -------------------------------------------------------------------------------- 1 | CONFIG_SENSOR=y 2 | CONFIG_STDOUT_CONSOLE=y 3 | CONFIG_CBPRINTF_FP_SUPPORT=y 4 | CONFIG_GPIO=y 5 | -------------------------------------------------------------------------------- /demo/blinky-temperature/src/main.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 ARM Ltd. 3 | * Copyright (c) 2023 FTP Technologies 4 | * Copyright (c) 2023 Daniel DeGrasse 5 | * Copyright (c) 2023 Antmicro 6 | * 7 | * SPDX-License-Identifier: Apache-2.0 8 | */ 9 | 10 | #include 11 | #include 12 | #include 13 | 14 | static double high_temp; 15 | static double low_temp; 16 | 17 | #define GET_GPIO_SPEC(n) GPIO_DT_SPEC_GET(n, gpios), 18 | #define GET_NAME(n) DT_NODE_FULL_NAME(n), 19 | 20 | #define SENSOR_DEVICE_ELEM(n) DEVICE_DT_GET(n), 21 | #define SENSOR_NAME_ELEM(n) DT_PROP(n, friendly_name), 22 | 23 | #define IS_SENSOR(n) DT_NODE_HAS_PROP(n, friendly_name) 24 | 25 | #define GET_SENSOR_DEVICE(n) \ 26 | COND_CODE_1(DT_NODE_HAS_PROP(n, friendly_name), (SENSOR_DEVICE_ELEM(n)), ()) 27 | 28 | #define GET_SENSOR_NAME(n) \ 29 | COND_CODE_1(DT_NODE_HAS_PROP(n, friendly_name), (SENSOR_NAME_ELEM(n)), ()) 30 | 31 | 32 | static const struct gpio_dt_spec leds[] = { 33 | DT_FOREACH_CHILD(DT_PATH(leds), GET_GPIO_SPEC) 34 | }; 35 | 36 | static const char *led_names[] = { 37 | DT_FOREACH_CHILD(DT_PATH(leds), GET_NAME) 38 | }; 39 | 40 | static const struct device *const all_sensor_devices[] = { 41 | DT_FOREACH_NODE(GET_SENSOR_DEVICE) 42 | }; 43 | 44 | static const char *all_sensor_names[] = { 45 | DT_FOREACH_NODE(GET_SENSOR_NAME) 46 | }; 47 | 48 | static bool is_thermometer[ARRAY_SIZE(all_sensor_devices)]; 49 | static bool led_state[ARRAY_SIZE(leds)]; 50 | 51 | int read_temperature(const struct device *dev, struct sensor_value *val) 52 | { 53 | int ret; 54 | 55 | ret = sensor_sample_fetch_chan(dev, SENSOR_CHAN_AMBIENT_TEMP); 56 | if (ret < 0) { 57 | printf("Could not fetch temperature: %d\n", ret); 58 | return ret; 59 | } 60 | 61 | ret = sensor_channel_get(dev, SENSOR_CHAN_AMBIENT_TEMP, val); 62 | if (ret < 0) { 63 | printf("Could not get temperature: %d\n", ret); 64 | } 65 | return ret; 66 | } 67 | 68 | void temp_alert_handler(const struct device *dev, const struct sensor_trigger *trig) 69 | { 70 | int ret; 71 | struct sensor_value value; 72 | double temp; 73 | 74 | /* Read sensor value */ 75 | ret = read_temperature(dev, &value); 76 | if (ret < 0) { 77 | printf("Reading temperature failed: %d\n", ret); 78 | return; 79 | } 80 | temp = sensor_value_to_double(&value); 81 | if (temp <= low_temp) { 82 | printf("Temperature below threshold: %0.1f°C\n", temp); 83 | } else if (temp >= high_temp) { 84 | printf("Temperature above threshold: %0.1f°C\n", temp); 85 | } else { 86 | printf("Error: temperature alert triggered without valid condition\n"); 87 | } 88 | } 89 | 90 | int main(void) 91 | { 92 | struct sensor_value value; 93 | double temp; 94 | int ret; 95 | const struct sensor_trigger trig = { 96 | .chan = SENSOR_CHAN_AMBIENT_TEMP, 97 | .type = SENSOR_TRIG_THRESHOLD, 98 | }; 99 | 100 | printf("Blinky and temperature example (%s)\n", CONFIG_ARCH); 101 | printf("LEDs registered: %d\n", ARRAY_SIZE(leds)); 102 | printf("Sensors registered: %d\n", ARRAY_SIZE(all_sensor_devices)); 103 | 104 | for (int i = 0; i < ARRAY_SIZE(leds); i++) { 105 | const struct gpio_dt_spec *led = &leds[i]; 106 | if (!gpio_is_ready_dt(led)) { 107 | printf("LED %s is not ready\n", led_names[i]); 108 | return 0; 109 | } 110 | 111 | ret = gpio_pin_configure_dt(led, GPIO_OUTPUT_ACTIVE); 112 | if (ret < 0) { 113 | printf("Failed to configure LED %s\n", led_names[i]); 114 | return 0; 115 | } 116 | led_state[i] = true; 117 | } 118 | 119 | for (int i = 0; i < ARRAY_SIZE(all_sensor_devices); i++) { 120 | const struct device *const dev = all_sensor_devices[i]; 121 | if (strcmp(all_sensor_names[i], "thermometer") == 0) { 122 | printf("Found thermometer: %s (dev address: %p)\n", dev->name, dev); 123 | is_thermometer[i] = true; 124 | } 125 | if (!device_is_ready(dev)) { 126 | printf("Device %s is not ready\n", dev->name); 127 | return 0; 128 | } 129 | 130 | /* First, fetch a sensor sample to use for sensor thresholds */ 131 | ret = read_temperature(dev, &value); 132 | if (ret != 0) { 133 | printf("Failed to read temperature: %d\n", ret); 134 | return ret; 135 | } 136 | temp = sensor_value_to_double(&value); 137 | 138 | /* Set thresholds to +0.5 and +1.5 °C from ambient */ 139 | low_temp = temp + 0.5; 140 | ret = sensor_value_from_double(&value, low_temp); 141 | if (ret != 0) { 142 | printf("Failed to convert low threshold to sensor value: %d\n", ret); 143 | return ret; 144 | } 145 | ret = sensor_attr_set(dev, SENSOR_CHAN_AMBIENT_TEMP, 146 | SENSOR_ATTR_LOWER_THRESH, &value); 147 | if (ret == 0) { 148 | /* This sensor supports threshold triggers */ 149 | printf("Set temperature lower limit to %0.1f°C\n", low_temp); 150 | } 151 | 152 | high_temp = temp + 1.5; 153 | ret = sensor_value_from_double(&value, high_temp); 154 | if (ret != 0) { 155 | printf("Failed to convert low threshold to sensor value: %d\n", ret); 156 | return ret; 157 | } 158 | ret = sensor_attr_set(dev, SENSOR_CHAN_AMBIENT_TEMP, 159 | SENSOR_ATTR_UPPER_THRESH, &value); 160 | if (ret == 0) { 161 | /* This sensor supports threshold triggers */ 162 | printf("Set temperature upper limit to %0.1f°C\n", high_temp); 163 | } 164 | 165 | ret = sensor_trigger_set(dev, &trig, temp_alert_handler); 166 | if (ret == 0) { 167 | printf("Enabled sensor threshold triggers\n"); 168 | } 169 | } 170 | 171 | while (1) { 172 | for (int i = 0; i < ARRAY_SIZE(all_sensor_devices); i++) { 173 | const struct device *const dev = all_sensor_devices[i]; 174 | if (!is_thermometer[i]) { 175 | continue; 176 | } 177 | ret = read_temperature(dev, &value); 178 | if (ret != 0) { 179 | printf("Failed to read temperature: %d\n", ret); 180 | break; 181 | } 182 | printf("%s: %0.1lf°C\n", dev->name, sensor_value_to_double(&value)); 183 | } 184 | 185 | for (int i = 0; i < ARRAY_SIZE(leds); i++) { 186 | const struct gpio_dt_spec *led = &leds[i]; 187 | ret = gpio_pin_toggle_dt(led); 188 | if (ret < 0) { 189 | printf("Failed to toggle LED %s state\n", led_names[i]); 190 | } 191 | 192 | /* Update led state */ 193 | led_state[i] = !led_state[i]; 194 | 195 | printk("LED %s state: %d\n", led_names[i], led_state[i]); 196 | } 197 | 198 | k_sleep(K_MSEC(1000)); 199 | } 200 | return 0; 201 | } 202 | -------------------------------------------------------------------------------- /demo/shell/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | 3 | cmake_minimum_required(VERSION 3.20.0) 4 | find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE}) 5 | project(shell) 6 | 7 | FILE(GLOB app_sources src/*.c) 8 | target_sources(app PRIVATE ${app_sources}) 9 | -------------------------------------------------------------------------------- /demo/shell/prj.conf: -------------------------------------------------------------------------------- 1 | CONFIG_PRINTK=y 2 | CONFIG_SHELL=y 3 | CONFIG_INIT_STACKS=y 4 | CONFIG_THREAD_STACK_INFO=y 5 | CONFIG_KERNEL_SHELL=y 6 | CONFIG_LED=y 7 | CONFIG_LED_SHELL=y 8 | CONFIG_THREAD_MONITOR=y 9 | CONFIG_BOOT_BANNER=y 10 | CONFIG_THREAD_NAME=y 11 | CONFIG_DEVICE_SHELL=y 12 | CONFIG_POSIX_CLOCK=y 13 | CONFIG_DATE_SHELL=y 14 | CONFIG_THREAD_RUNTIME_STATS=y 15 | CONFIG_THREAD_RUNTIME_STATS_USE_TIMING_FUNCTIONS=y 16 | CONFIG_STATS=y 17 | CONFIG_STATS_SHELL=y 18 | CONFIG_SHELL_BACKEND_SERIAL_API_POLLING=y 19 | -------------------------------------------------------------------------------- /demo/shell/src/main.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2023 Antmicro 3 | * SPDX-License-Identifier: Apache-2.0 4 | */ 5 | #include 6 | 7 | int main(void) 8 | { 9 | printf("Hello on %s", CONFIG_BOARD); 10 | return 0; 11 | } 12 | -------------------------------------------------------------------------------- /demo/stm32-led-thermometer.json: -------------------------------------------------------------------------------- 1 | { 2 | "graph": { 3 | "id": "5f7206e9-758b-4d5f-9df3-854364634101", 4 | "name": "demo_blinky_temp", 5 | "nodes": [ 6 | { 7 | "id": "930e3742-5a9f-4568-9472-efa29752f92d", 8 | "position": { 9 | "x": 800, 10 | "y": 700 11 | }, 12 | "width": 200, 13 | "twoColumn": true, 14 | "interfaces": [ 15 | { 16 | "name": "adc1", 17 | "id": "82daf895-4ada-444d-b178-288633defbbf", 18 | "direction": "inout", 19 | "side": "left", 20 | "sidePosition": 0 21 | }, 22 | { 23 | "name": "flash", 24 | "id": "c62b0d2b-7814-4a2e-b629-60bb030d9967", 25 | "direction": "inout", 26 | "side": "left", 27 | "sidePosition": 1 28 | }, 29 | { 30 | "name": "gpiob", 31 | "id": "3f0d7b99-752b-4c91-9aed-241d6c3982e1", 32 | "direction": "inout", 33 | "side": "left", 34 | "sidePosition": 2 35 | }, 36 | { 37 | "name": "gpiod", 38 | "id": "4fb02ee7-7dcc-41cc-9c5a-fee4b024b0b4", 39 | "direction": "inout", 40 | "side": "left", 41 | "sidePosition": 3 42 | }, 43 | { 44 | "name": "i2c1", 45 | "id": "16deefb9-37d3-4a3d-9c99-f00d0b9e9f2c", 46 | "direction": "inout", 47 | "side": "left", 48 | "sidePosition": 4 49 | }, 50 | { 51 | "name": "lpuart1", 52 | "id": "b2928848-3081-4be2-ab53-838be70a324f", 53 | "direction": "inout", 54 | "side": "left", 55 | "sidePosition": 5 56 | }, 57 | { 58 | "name": "pwm", 59 | "id": "83a51f16-8dcb-4450-963e-6a21ca8533ac", 60 | "direction": "inout", 61 | "side": "left", 62 | "sidePosition": 6 63 | }, 64 | { 65 | "name": "pwm-2", 66 | "id": "db33ab7b-5161-4e01-83c7-0e95c42e3fa0", 67 | "direction": "inout", 68 | "side": "left", 69 | "sidePosition": 7 70 | }, 71 | { 72 | "name": "spi2", 73 | "id": "0c8b2131-ce63-4feb-8009-c7586335bb36", 74 | "direction": "inout", 75 | "side": "left", 76 | "sidePosition": 8 77 | }, 78 | { 79 | "name": "usart2", 80 | "id": "b2fe6528-89eb-49ce-92d2-85aacf008988", 81 | "direction": "inout", 82 | "side": "left", 83 | "sidePosition": 9 84 | }, 85 | { 86 | "name": "dac1", 87 | "id": "0e593338-e598-4725-9251-413452931e95", 88 | "direction": "inout", 89 | "side": "right", 90 | "sidePosition": 0 91 | }, 92 | { 93 | "name": "gpioa", 94 | "id": "e789000f-45b2-4bad-b98c-411d83f7be6f", 95 | "direction": "inout", 96 | "side": "right", 97 | "sidePosition": 1 98 | }, 99 | { 100 | "name": "gpioc", 101 | "id": "eb53ad25-95cd-48ab-9320-c6e0fdc322db", 102 | "direction": "inout", 103 | "side": "right", 104 | "sidePosition": 2 105 | }, 106 | { 107 | "name": "gpioh", 108 | "id": "d8272f68-5e6e-4bc3-aa5f-264b5cf559e1", 109 | "direction": "inout", 110 | "side": "right", 111 | "sidePosition": 3 112 | }, 113 | { 114 | "name": "i2c2", 115 | "id": "dbb8e593-c7db-4b8e-837a-66103ff6b139", 116 | "direction": "inout", 117 | "side": "right", 118 | "sidePosition": 4 119 | }, 120 | { 121 | "name": "pinctrl", 122 | "id": "8f904417-7dfe-4a5c-b2d9-23637f2efc5f", 123 | "direction": "inout", 124 | "side": "right", 125 | "sidePosition": 5 126 | }, 127 | { 128 | "name": "pwm-1", 129 | "id": "59227c61-1d8d-4ad5-a67f-35f2fdaab0bf", 130 | "direction": "inout", 131 | "side": "right", 132 | "sidePosition": 6 133 | }, 134 | { 135 | "name": "spi1", 136 | "id": "a0555b7c-1217-4aca-a5fd-eb7ec30706a4", 137 | "direction": "inout", 138 | "side": "right", 139 | "sidePosition": 7 140 | }, 141 | { 142 | "name": "usart1", 143 | "id": "878b9abd-ccb1-46a3-ac22-9886ca958f03", 144 | "direction": "inout", 145 | "side": "right", 146 | "sidePosition": 8 147 | }, 148 | { 149 | "name": "usb", 150 | "id": "b83e9523-f640-47a6-9b56-88388ef63754", 151 | "direction": "inout", 152 | "side": "right", 153 | "sidePosition": 9 154 | } 155 | ], 156 | "properties": [], 157 | "enabledInterfaceGroups": [], 158 | "name": "stm32l053xx", 159 | "instanceName": "stm32l053xx" 160 | }, 161 | { 162 | "id": "50393e0a-bfd9-46c3-bd54-c32ce0288a86", 163 | "position": { 164 | "x": 200, 165 | "y": 600 166 | }, 167 | "width": 200, 168 | "twoColumn": true, 169 | "interfaces": [ 170 | { 171 | "name": "alert", 172 | "id": "dc6190eb-e197-497a-ab46-cf73cbad35aa", 173 | "direction": "inout", 174 | "side": "right", 175 | "sidePosition": 1 176 | }, 177 | { 178 | "name": "i2c", 179 | "id": "dfce1a40-f369-42c1-8d58-b01ba0a52b7d", 180 | "direction": "inout", 181 | "side": "right", 182 | "sidePosition": 0 183 | } 184 | ], 185 | "properties": [ 186 | { 187 | "name": "address (i2c)", 188 | "id": "67a05782-dc73-4d55-b798-822342c3eb20", 189 | "value": "0x18" 190 | } 191 | ], 192 | "enabledInterfaceGroups": [], 193 | "name": "tmp108", 194 | "instanceName": "tmp108" 195 | }, 196 | { 197 | "id": "d2871700-5270-4ef8-9b88-a689df14bba2", 198 | "position": { 199 | "x": 200, 200 | "y": 900 201 | }, 202 | "width": 200, 203 | "twoColumn": true, 204 | "interfaces": [ 205 | { 206 | "name": "i2c", 207 | "id": "4a9e1654-1691-4464-8c52-5692292f1320", 208 | "direction": "inout", 209 | "side": "right", 210 | "sidePosition": 0 211 | } 212 | ], 213 | "properties": [ 214 | { 215 | "name": "address (i2c)", 216 | "id": "efc349db-3f2d-45c1-ae02-6cbfe3a48bef", 217 | "value": "0x30" 218 | } 219 | ], 220 | "enabledInterfaceGroups": [], 221 | "name": "Silicon Labs SI7210-B-00-IVR", 222 | "instanceName": "Silicon Labs SI7210-B-00-IVR" 223 | }, 224 | { 225 | "id": "3d3fb00d-0a64-477f-abf9-3cb017b93ab0", 226 | "position": { 227 | "x": 1400, 228 | "y": 430 229 | }, 230 | "width": 200, 231 | "twoColumn": true, 232 | "interfaces": [ 233 | { 234 | "name": "gpio", 235 | "id": "d33c1500-f3c9-45a1-a30e-f063223540be", 236 | "direction": "inout", 237 | "side": "left", 238 | "sidePosition": 0 239 | } 240 | ], 241 | "properties": [ 242 | { 243 | "name": "address (gpio)", 244 | "id": "09ebbc50-59a9-4800-9181-26cebaf9d980", 245 | "value": "0x0" 246 | } 247 | ], 248 | "enabledInterfaceGroups": [], 249 | "name": "Lite-On LTST-C190KGKT", 250 | "instanceName": "Lite-On LTST-C190KGKT" 251 | }, 252 | { 253 | "id": "5dfc9630-37f4-4c98-99e8-c1f0b2395fc2", 254 | "position": { 255 | "x": 1400, 256 | "y": 670 257 | }, 258 | "width": 200, 259 | "twoColumn": true, 260 | "interfaces": [ 261 | { 262 | "name": "gpio", 263 | "id": "a7839926-7428-4d2e-9017-31be11f1b8ab", 264 | "direction": "inout", 265 | "side": "left", 266 | "sidePosition": 0 267 | } 268 | ], 269 | "properties": [ 270 | { 271 | "name": "address (gpio)", 272 | "id": "331a09b7-777a-4d68-8492-fc3400fe4f92", 273 | "value": "0x1" 274 | } 275 | ], 276 | "enabledInterfaceGroups": [], 277 | "name": "Lite-On LTST-C190KRKT", 278 | "instanceName": "Lite-On LTST-C190KRKT" 279 | }, 280 | { 281 | "id": "c0cf7a03-dd38-475d-9b06-d0112cf5c7a2", 282 | "position": { 283 | "x": 1400, 284 | "y": 1000 285 | }, 286 | "width": 200, 287 | "twoColumn": true, 288 | "interfaces": [ 289 | { 290 | "name": "gpio", 291 | "id": "4c827fdb-280c-4743-99b3-3e47dbd260d4", 292 | "direction": "inout", 293 | "side": "left", 294 | "sidePosition": 0 295 | } 296 | ], 297 | "properties": [ 298 | { 299 | "name": "address (gpio)", 300 | "id": "d53e9875-a773-41af-bad9-8964a5ca00d5", 301 | "value": "0x0" 302 | } 303 | ], 304 | "enabledInterfaceGroups": [], 305 | "name": "Vishay VLMY1500-GS08", 306 | "instanceName": "Vishay VLMY1500-GS08" 307 | } 308 | ], 309 | "connections": [ 310 | { 311 | "id": "c96c0fe1-1088-42bf-847b-68637b5c558b", 312 | "from": "dfce1a40-f369-42c1-8d58-b01ba0a52b7d", 313 | "to": "16deefb9-37d3-4a3d-9c99-f00d0b9e9f2c" 314 | }, 315 | { 316 | "id": "b0722736-50af-436e-a342-af542fd7d619", 317 | "from": "4a9e1654-1691-4464-8c52-5692292f1320", 318 | "to": "16deefb9-37d3-4a3d-9c99-f00d0b9e9f2c" 319 | }, 320 | { 321 | "id": "f5022bbf-6e3a-42da-8b98-30c3b2089f46", 322 | "from": "d33c1500-f3c9-45a1-a30e-f063223540be", 323 | "to": "e789000f-45b2-4bad-b98c-411d83f7be6f" 324 | }, 325 | { 326 | "id": "37d9383e-fa0e-46df-807d-689380fe00e5", 327 | "from": "a7839926-7428-4d2e-9017-31be11f1b8ab", 328 | "to": "e789000f-45b2-4bad-b98c-411d83f7be6f" 329 | }, 330 | { 331 | "id": "6a80c2b1-6486-498a-881e-301534992516", 332 | "from": "4c827fdb-280c-4743-99b3-3e47dbd260d4", 333 | "to": "d8272f68-5e6e-4bc3-aa5f-264b5cf559e1" 334 | } 335 | ] 336 | }, 337 | "version": "20230830.11", 338 | "metadata": { 339 | "movementStep": 10, 340 | "randomizedOffset": false 341 | } 342 | } 343 | -------------------------------------------------------------------------------- /demo/templates/blinky-temperature/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.20.0) 2 | find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE}) 3 | project(blinky-temperature) 4 | 5 | FILE(GLOB app_sources src/*.c) 6 | target_sources(app PRIVATE ${app_sources}) 7 | -------------------------------------------------------------------------------- /demo/templates/blinky-temperature/prj.conf: -------------------------------------------------------------------------------- 1 | CONFIG_SENSOR=y 2 | CONFIG_STDOUT_CONSOLE=y 3 | CONFIG_CBPRINTF_FP_SUPPORT=y 4 | -------------------------------------------------------------------------------- /demo/templates/blinky-temperature/src/main.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Generated file. 3 | */ 4 | 5 | #include 6 | 7 | #include 8 | #include 9 | 10 | #include 11 | 12 | #include "utils.h" 13 | 14 | /* Dts labels of used nodes */ 15 | 16 | //! for label in all_labels 17 | #define __{{ label.upper() }}_NODE DT_NODELABEL({{ label }}) 18 | //! endfor 19 | 20 | /* Initialize structures for discovered nodes */ 21 | 22 | //! for led_name in leds 23 | struct led {{ led_name }} = { 24 | .gpio = GPIO_DT_SPEC_GET(__{{ led_name.upper() }}_NODE, gpios), 25 | .name = DT_NODE_FULL_NAME(__{{ led_name.upper() }}_NODE), 26 | }; 27 | //! endfor 28 | 29 | //! for thermometer_name in thermometers 30 | struct thermometer {{ thermometer_name }} = { 31 | .dev = DEVICE_DT_GET(__{{ thermometer_name.upper() }}_NODE), 32 | .name = DT_NODE_FULL_NAME(__{{ thermometer_name.upper() }}_NODE), 33 | }; 34 | //! endfor 35 | 36 | int main(void) 37 | { 38 | int ret; 39 | double temp; 40 | 41 | /* Structures for nodes used in the demo */ 42 | 43 | //! for led_name in leds 44 | ret = init_led(&{{ led_name }}); 45 | if (ret < 0) { 46 | return ret; 47 | } 48 | //! endfor 49 | 50 | //! for thermometer_name in thermometers 51 | ret = init_thermometer(&{{ thermometer_name }}); 52 | if (ret < 0) { 53 | return ret; 54 | } 55 | //! endfor 56 | 57 | while (1) { 58 | /* Actions for each node */ 59 | 60 | //! for led_name in leds 61 | ret = toggle_led_state(&{{ led_name }}); 62 | if (ret < 0) { 63 | return ret; 64 | } 65 | //! endfor 66 | 67 | //! for thermometer_name in thermometers 68 | temp = get_temperature(&{{ thermometer_name }}); 69 | if (isnan(temp)) { 70 | return -1; 71 | } 72 | printf("%s: %0.1lf°C\n", {{ thermometer_name }}.name, temp); 73 | //! endfor 74 | 75 | k_sleep(K_MSEC(1000)); 76 | } 77 | return 0; 78 | } 79 | -------------------------------------------------------------------------------- /demo/templates/blinky-temperature/src/utils.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #include "utils.h" 8 | 9 | static int read_temperature(const struct device *dev, struct sensor_value *val) 10 | { 11 | int ret; 12 | 13 | ret = sensor_sample_fetch_chan(dev, SENSOR_CHAN_AMBIENT_TEMP); 14 | if (ret < 0) { 15 | printf("Could not fetch temperature: %d\n", ret); 16 | return ret; 17 | } 18 | 19 | ret = sensor_channel_get(dev, SENSOR_CHAN_AMBIENT_TEMP, val); 20 | if (ret < 0) { 21 | printf("Could not get temperature: %d\n", ret); 22 | } 23 | return ret; 24 | } 25 | 26 | int print_temperature(const struct device *thermoemeter) 27 | { 28 | int ret; 29 | struct sensor_value value; 30 | 31 | ret = read_temperature(thermoemeter, &value); 32 | if (ret != 0) { 33 | printf("Failed to read temperature: %d\n", ret); 34 | return -1; 35 | } 36 | printf("%s: %0.1lf°C\n", thermoemeter->name, sensor_value_to_double(&value)); 37 | return 0; 38 | } 39 | 40 | int init_led(struct led *led) 41 | { 42 | int ret; 43 | 44 | printf("Discovered LED: %s\n", led->name); 45 | 46 | if (!gpio_is_ready_dt(&led->gpio)) { 47 | printf("LED %s is not ready\n", led->name); 48 | return -1; 49 | } 50 | 51 | ret = gpio_pin_configure_dt(&led->gpio, GPIO_OUTPUT_ACTIVE); 52 | if (ret < 0) { 53 | printf("Failed to configure LED %s\n", led->name); 54 | return ret; 55 | } 56 | 57 | led->state = false; 58 | return 0; 59 | } 60 | 61 | int init_thermometer(struct thermometer *thermometer) 62 | { 63 | printf("Discovered thermometer: %s\n", thermometer->name); 64 | 65 | if (!device_is_ready(thermometer->dev)) { 66 | printf("Device %s is not ready\n", thermometer->name); 67 | return -1; 68 | } 69 | return 0; 70 | } 71 | 72 | inline double get_temperature(struct thermometer *thermometer) 73 | { 74 | int ret; 75 | struct sensor_value value; 76 | 77 | ret = read_temperature(thermometer->dev, &value); 78 | if (ret != 0) { 79 | printf("failed to read temperature: %d\n", ret); 80 | return NAN; 81 | } 82 | 83 | return sensor_value_to_double(&value); 84 | } 85 | 86 | inline int toggle_led_state(struct led *led) 87 | { 88 | int ret; 89 | 90 | ret = gpio_pin_toggle_dt(&led->gpio); 91 | if (ret < 0) { 92 | printf("Failed to toggle LED %s state\n", led->name); 93 | return ret; 94 | } 95 | 96 | led->state = !led->state; 97 | 98 | printk("LED %s state: %d\n", led->name, led->state); 99 | return 0; 100 | } 101 | -------------------------------------------------------------------------------- /demo/templates/blinky-temperature/src/utils.h: -------------------------------------------------------------------------------- 1 | #ifndef APP_UTILS_H 2 | #define APP_UTILS_H 3 | 4 | #include 5 | 6 | struct led { 7 | struct gpio_dt_spec gpio; 8 | const char *name; 9 | bool state; 10 | }; 11 | 12 | struct thermometer { 13 | const struct device *dev; 14 | const char *name; 15 | }; 16 | 17 | int init_led(struct led *led); 18 | int init_thermometer(struct thermometer *thermometer); 19 | 20 | int toggle_led_state(struct led *led); 21 | double get_temperature(struct thermometer *thermometer); 22 | int print_temperature(const struct device *thermoemeter); 23 | 24 | #endif /* APP_UTILS_H */ 25 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2022-2024 Antmicro 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | [build-system] 6 | requires = ["setuptools", "setuptools-scm"] 7 | build-backend = "setuptools.build_meta" 8 | 9 | [project] 10 | name = "visual-system-designer-app" 11 | version = "0.0.1" 12 | authors = [{name = "Antmicro Ltd.", email = "contact@antmicro.com"}] 13 | description = "Visual System Designer application" 14 | dependencies = [ 15 | "pyrenode3[all] @ git+https://github.com/antmicro/pyrenode3@e74fed065cf3a49beeaa94fd5bf2d72b8b7e1988", 16 | "pipeline-manager @ git+https://github.com/antmicro/kenning-pipeline-manager@0b8fffc24c7721d09da728c6c9bcd37a45092d26", 17 | "pipeline-manager-backend-communication @ git+https://github.com/antmicro/kenning-pipeline-manager-backend-communication@eb690cfb7766bfbd85a4eff2a1e809573b8b72d0", 18 | 19 | # XXX: dts2repl version should be synced with the Renode version (which is 20 | # specified in vsd/init.py file). Eventually, it will be specified by 21 | # the Renode, but currently we have to do it manually. When updating 22 | # he version here, please check if Renode shouldn't be updated as well. 23 | "dts2repl @ git+https://github.com/antmicro/dts2repl@83e2d743202684eb1f89a1b93e859c69cd736455", 24 | 25 | "pyyaml", 26 | "typer", 27 | "west", 28 | "Jinja2", 29 | ] 30 | 31 | requires-python = ">=3.9" 32 | 33 | classifiers = [ 34 | "Programming Language :: Python :: 3", 35 | "License :: OSI Approved :: Apache Software License", 36 | ] 37 | 38 | [project.scripts] 39 | vsd = "vsd.__main__:main" 40 | 41 | [tool.setuptools] 42 | include-package-data = true 43 | 44 | [tool.setuptools.packages.find] 45 | namespaces = true 46 | include = ["vsd"] 47 | -------------------------------------------------------------------------------- /tools/modify_specification.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import json 5 | import shutil 6 | import sys 7 | 8 | from pathlib import Path 9 | from vsd.specification import Specification 10 | 11 | def main(): 12 | parser = argparse.ArgumentParser("Modify components specification") 13 | parser.add_argument( 14 | "--spec-mod", type=Path, required=True, action="append", 15 | help="File with specification modifications", 16 | ) 17 | parser.add_argument( 18 | "-s", "--spec", type=Path, 19 | default=Path("workspace/visual-system-designer-resources/components-specification.json"), 20 | help="Specification file", 21 | ) 22 | args = parser.parse_args() 23 | 24 | if not args.spec.exists(): 25 | print(f"error: {args.spec} doesn't exist.") 26 | sys.exit(1) 27 | 28 | if not all(p.exists() for p in args.spec_mod): 29 | print(f"error: Some of files: {', '.join(map(str, args.spec_mod))} don't exist.") 30 | sys.exit(1) 31 | 32 | old_spec_path = args.spec.with_suffix(".orig") 33 | shutil.copy(args.spec, old_spec_path) 34 | print(f"Saved original specification in {old_spec_path}") 35 | 36 | specification = Specification(args.spec) 37 | 38 | spec_mods = map(lambda p: json.load(open(p)), args.spec_mod) 39 | for mod in spec_mods: 40 | specification.modify(mod) 41 | 42 | with open(args.spec, "w") as f: 43 | json.dump(specification.spec_json, f, sort_keys=True, indent=4, ensure_ascii=False) 44 | 45 | 46 | if __name__ == "__main__": 47 | main() 48 | -------------------------------------------------------------------------------- /tools/requirements.txt: -------------------------------------------------------------------------------- 1 | texttable 2 | typer==0.9.0 3 | -------------------------------------------------------------------------------- /tools/validate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2023-2024 Antmicro 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import json 7 | import logging 8 | import random 9 | import re 10 | import sys 11 | import typer 12 | 13 | from colorama import Fore, Style 14 | from enum import IntEnum 15 | from pathlib import Path 16 | from texttable import Texttable 17 | from threading import Event 18 | from typing import List 19 | 20 | from vsd import build 21 | from vsd import env 22 | from vsd import simulate 23 | from vsd.graph import Node 24 | from vsd.specification import Specification 25 | from vsd.utils import find_chosen 26 | 27 | 28 | thermometers = ["si7210", "tmp108"] 29 | 30 | configurations = [ 31 | "soc", 32 | "soc+led", 33 | "soc+si7210", 34 | "soc+tmp108", 35 | ] 36 | 37 | test_str = { 38 | "soc": "Blinky and temperature example", 39 | "soc+led": "LED led_0 state: 1", 40 | "soc+si7210": "silabs_si7210@18:", 41 | "soc+tmp108": "ti_tmp108@30:", 42 | } 43 | 44 | i2c_addresses = { 45 | "si7210": 0x18, 46 | "tmp108": 0x30, 47 | } 48 | 49 | 50 | class Status(IntEnum): 51 | NONE = 0 52 | BAD_SPEC = 1 53 | BAD_NAME = 2 54 | BAD_INTERFACE = 3 55 | CONFIG = 4 56 | GENERATED = 5 57 | BUILT = 6 58 | SIMERROR = 7 59 | TIMEOUT = 8 60 | OK = 9 61 | 62 | @classmethod 63 | def print_legend(cls): 64 | descr = { 65 | cls.OK: "Simulation succeeded.", 66 | cls.TIMEOUT: "Simulation timeout.", 67 | cls.SIMERROR: "Simulation failed.", 68 | cls.BUILT: "Zephyr built successfuly.", 69 | cls.GENERATED: "Zephyr board generated.", 70 | cls.CONFIG: "Configuration created", 71 | cls.BAD_INTERFACE: "Error when looking for specified interface in node", 72 | cls.BAD_NAME: "Error when looking for Renodepedia name in node", 73 | cls.BAD_SPEC: "Error when looking for node in components specification", 74 | cls.NONE: "There is no information about given configuration", 75 | } 76 | print("Status string legend:") 77 | for stat in cls: 78 | print(f" - {stat.name} -- {descr[stat]}") 79 | 80 | 81 | def red(text): 82 | return Fore.RED + (text or '') + Style.RESET_ALL 83 | 84 | 85 | def green(text): 86 | return Fore.GREEN + (text or '') + Style.RESET_ALL 87 | 88 | 89 | class NodeSpecNotFound(Exception): 90 | pass 91 | 92 | 93 | class InterfaceNotFound(Exception): 94 | pass 95 | 96 | 97 | class RenodepediaNameNotFound(Exception): 98 | pass 99 | 100 | 101 | def find_soc_interface(soc, if_type, spec): 102 | spec_node = spec.get_node_spec(soc) 103 | if not spec_node: 104 | raise NodeSpecNotFound(f"Node '{soc}' not found in specification") 105 | 106 | for interface in spec_node["interfaces"]: 107 | if if_type == interface.get("type"): 108 | return interface["name"] 109 | raise InterfaceNotFound(f"Interface of type '{if_type}' not found in '{soc}'") 110 | 111 | 112 | def get_soc_rdp_name(soc, spec): 113 | spec_node = spec.get_node_spec(soc) 114 | if not spec_node: 115 | raise NodeSpecNotFound(f"Node '{soc}' not found in specification") 116 | 117 | if "urls" in spec_node and "rdp" in spec_node["urls"]: 118 | rdp_link = spec_node["urls"]["rdp"] 119 | return rdp_link.split("/")[-1] 120 | raise RenodepediaNameNotFound(f"Can't create rdp name for node '{soc}'") 121 | 122 | 123 | node_id = 0 124 | 125 | 126 | def create_fake_node_connection(name, soc_iface, node_iface, spec, address): 127 | global node_id 128 | node = { 129 | "name": name, 130 | "instanceName": f"{name}_{node_id}", 131 | "id": str(node_id), 132 | "properties": [], 133 | "interfaces": [], 134 | } 135 | node_id += 1 136 | node["properties"].append({ 137 | "name": f"address ({node_iface})", 138 | "value": hex(address), 139 | }) 140 | return (soc_iface, node_iface, Node(node, spec)) 141 | 142 | 143 | def prepare_single_config(soc, config, spec): 144 | connections = [] 145 | board_name = re.sub(r"[\s+-]+", "_", soc + config[3:]) 146 | soc_name = get_soc_rdp_name(soc, spec) 147 | 148 | for tmp in thermometers: 149 | if_name = find_soc_interface(soc, "i2c", spec) 150 | if tmp in config: 151 | connections.append(create_fake_node_connection(tmp, if_name, "i2c", spec, i2c_addresses[tmp])) 152 | 153 | if "led" in config: 154 | if_name = find_soc_interface(soc, "gpio", spec) 155 | connections.append(create_fake_node_connection("LED", if_name, "gpio", spec, 0x0)) 156 | 157 | return (soc_name, board_name, connections) 158 | 159 | 160 | class LineTester(): 161 | def __init__(self, line_test_cb): 162 | self.acc_event = Event() 163 | self.line_test_cb = line_test_cb 164 | self.line = [] 165 | 166 | def get_callback(self): 167 | def callback(char): 168 | self.line.append(char) 169 | if char == 10: # Line Feed 170 | line = bytearray(self.line).decode() 171 | if self.line_test_cb(line): 172 | self.acc_event.set() 173 | return 174 | self.line = [] 175 | return callback 176 | 177 | def wait(self, timeout): 178 | acc = self.acc_event.wait(timeout=timeout) 179 | if acc: 180 | return Status.OK 181 | else: 182 | return Status.TIMEOUT 183 | 184 | 185 | def run_test(board_name, build_dir, config): 186 | repl_path = build_dir / f"{board_name}.repl" 187 | elf_path = build_dir / "zephyr/zephyr.elf" 188 | dts_path = build_dir / "zephyr/zephyr.dts" 189 | 190 | try: 191 | emu, machine = simulate.prepare_simulation(board_name, elf_path, repl_path) 192 | except Exception as e: 193 | print(f"Simulation can't be prepared using {repl_path} and {elf_path}!", file=sys.stderr) 194 | print(f"\n{e}", file=sys.stderr) 195 | return Status.SIMERROR 196 | 197 | def test_line(line): 198 | if re.search(test_str[config], line): 199 | return True 200 | return False 201 | 202 | tester = LineTester(test_line) 203 | 204 | zephyr_console = find_chosen("zephyr,console", dts_path) 205 | 206 | for uart, uart_name in simulate.get_all_uarts(machine): 207 | if uart_name == zephyr_console: 208 | simulate.register_uart_callback( 209 | uart, 210 | tester.get_callback() 211 | ) 212 | try: 213 | emu.StartAll() 214 | status = tester.wait(3) 215 | except Exception as e: 216 | logging.error(f"{board_name}: {e}") 217 | status = Status.SIMERROR 218 | finally: 219 | emu.clear() 220 | 221 | return status 222 | 223 | 224 | def validate(soc, config, spec, app, workspace, zephyr_base): 225 | try: 226 | soc_name, board_name, connections = prepare_single_config(soc, config, spec) 227 | except NodeSpecNotFound: 228 | return Status.BAD_SPEC 229 | except RenodepediaNameNotFound: 230 | return Status.BAD_NAME 231 | except InterfaceNotFound: 232 | return Status.BAD_INTERFACE 233 | 234 | status = Status.CONFIG 235 | 236 | try: 237 | board_dir = build.prepare_zephyr_board_dir(board_name, soc_name, connections, workspace) 238 | if board_dir: 239 | status = Status.GENERATED 240 | else: 241 | return status 242 | except: 243 | print(f"Board {board_name} failed unrecoverably") 244 | return Status.BAD_NAME 245 | 246 | build_ret, build_dir = build.build_zephyr(board_name, app, quiet=True) 247 | if build_ret == 0: 248 | status = Status.BUILT 249 | else: 250 | return status 251 | 252 | prep_ret = simulate.prepare_renode_files(board_name, workspace) 253 | if prep_ret != 0: 254 | return status 255 | 256 | return run_test(board_name, build_dir, config) 257 | 258 | 259 | app = typer.Typer() 260 | 261 | @app.command() 262 | @env.setup_env 263 | def single_soc(soc_name: str, application: Path = Path("demo/blinky-temperature")): 264 | zephyr_base = Path(env.get_var("ZEPHYR_BASE")) 265 | workspace = Path(env.get_workspace()) 266 | 267 | specification = Specification(workspace / "visual-system-designer-resources/components-specification.json") 268 | 269 | for config in configurations: 270 | status = validate(soc_name, config, specification, application, workspace, zephyr_base) 271 | print(f"{config}: {status.name}") 272 | 273 | 274 | @env.setup_env 275 | def validate_socs_list(socs, output_f, application, specification = None): 276 | zephyr_base = Path(env.get_var("ZEPHYR_BASE")) 277 | workspace = Path(env.get_workspace()) 278 | 279 | if not specification: 280 | specification = Specification(workspace / "visual-system-designer-resources/components-specification.json") 281 | 282 | results = {} 283 | for soc_name in socs: 284 | print(f"{soc_name}") 285 | results[soc_name] = {} 286 | for config in configurations: 287 | status = validate(soc_name, config, specification, application, workspace, zephyr_base) 288 | results[soc_name][config] = status.name 289 | print(f" {config}: {status.name}") 290 | 291 | with open(output_f, "w") as f: 292 | json.dump(results, f, indent=4, sort_keys=True) 293 | print(f"Results saved to {output_f}") 294 | 295 | 296 | @app.command() 297 | def soc_list(socs_list: Path, 298 | output: Path = Path("results.json"), 299 | application: Path = Path("demo/blinky-temperature")): 300 | 301 | with open(socs_list) as f: 302 | socs = f.read().strip().splitlines() 303 | validate_socs_list(socs, output, application) 304 | 305 | 306 | @app.command() 307 | @env.setup_env 308 | def all_socs(output: Path = Path("results.json"), 309 | application: Path = Path("demo/blinky-temperature"), 310 | chunk_total: int = 1, 311 | chunk_id: int = 0, 312 | seed: str = None): 313 | 314 | workspace = Path(env.get_workspace()) 315 | specification = Specification(workspace / "visual-system-designer-resources/components-specification.json") 316 | 317 | all_socs = specification.get_socs() 318 | 319 | if seed: 320 | print(f"Rand seed: {seed}") 321 | random.seed(seed) 322 | random.shuffle(all_socs) 323 | 324 | chunk_len = -(-len(all_socs) // chunk_total) # Rounded up chunk size 325 | chunk_start = chunk_id * chunk_len 326 | chunk_end = chunk_id * chunk_len + chunk_len 327 | 328 | print(f"Chunk size: {chunk_len}") 329 | print(f"Chunk boundaries: {chunk_start}-{chunk_end}") 330 | 331 | socs = all_socs[chunk_start:chunk_end] 332 | validate_socs_list(socs, output, application, specification) 333 | 334 | 335 | @app.command() 336 | def print_results(results: List[Path], output: Path = None): 337 | all_results = {} 338 | for res in results: 339 | with open(res) as f: 340 | all_results.update(json.load(f)) 341 | 342 | if output: 343 | with open(output, "w") as f: 344 | json.dump(all_results, f, indent=4, sort_keys=True) 345 | 346 | configs = sorted(list(all_results.values())[0].keys()) 347 | 348 | table = Texttable(max_width=160) 349 | table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) 350 | table.header(["id", "soc name"] + configs) 351 | table.set_cols_align(["c", "l"] + ["c"] * len(configs)) 352 | 353 | totals = dict.fromkeys(configs, 0) 354 | for i, (soc, res) in enumerate(sorted(all_results.items(), key=lambda t: t[0].lower())): 355 | output = f"| {i:>3} | {soc:<20} |" 356 | for config in configs: 357 | if res[config] == "OK": 358 | totals[config] += 1 359 | table.add_row([i, soc] + [res[c] for c in configs]) 360 | 361 | print(table.draw()) 362 | 363 | summary_table = Texttable() 364 | summary_table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) 365 | summary_table.header([""] + configs) 366 | summary_table.set_cols_align(["r"] + ["c"] * len(configs)) 367 | 368 | 369 | def get_percent(count, total): 370 | return f"{count / total * 100:.0f}%" 371 | totals[config] 372 | 373 | 374 | print("\nSummary of successful targets") 375 | summary_table.add_row(["total"] + [totals[c] for c in configs]) 376 | summary_table.add_row(["percent"] + [get_percent(totals[c], len(all_results)) for c in configs]) 377 | 378 | print(summary_table.draw()) 379 | 380 | print("") 381 | Status.print_legend() 382 | 383 | 384 | @app.command() 385 | def show_changes(prev_results: Path, new_results: Path, fail_on_regression: bool = False): 386 | with open(prev_results) as f: 387 | prev_results = json.load(f) 388 | with open(new_results) as f: 389 | new_results = json.load(f) 390 | 391 | new = new_results.keys() - prev_results.keys() 392 | print(f"--- New SoCs ({len(new)}) ---") 393 | for soc in new: 394 | print(f" {soc}:") 395 | for conf, res in new_results[soc].items(): 396 | print(green(f" {conf:>11}: NONE -> {res}")) 397 | print("") 398 | 399 | missing = prev_results.keys() - new_results.keys() 400 | print(f"--- Missing SoCs ({len(missing)}) ---") 401 | for soc in missing: 402 | print(f" {soc}:") 403 | for conf, res in prev_results[soc].items(): 404 | print(red(f" {conf:>11}: {res} -> NONE")) 405 | print("") 406 | 407 | regressions = 0 408 | changes = [] 409 | 410 | for k in prev_results.keys() & new_results.keys(): 411 | res1 = prev_results[k] 412 | res2 = new_results[k] 413 | stats = [] 414 | for c in res1.keys() | res2.keys(): 415 | res1_outcome = Status[res1.get(c, "NONE")] 416 | res2_outcome = Status[res2.get(c, "NONE")] 417 | 418 | if res1_outcome > res2_outcome: 419 | regressions += 1 420 | color = red 421 | elif res1_outcome < res2_outcome: 422 | color = green 423 | else: 424 | continue 425 | stats.append(color(f"{c:>11}: {res1_outcome.name} ({res1_outcome}) -> {res2_outcome.name} ({res2_outcome})")) 426 | 427 | if len(stats): 428 | changes.append((k, stats)) 429 | 430 | print(f"--- Changes in individual SoCs ({len(changes)}) ---") 431 | for soc, stats in changes: 432 | print(f" {soc}:") 433 | for stat in stats: 434 | print(" ", stat) 435 | 436 | print("") 437 | Status.print_legend() 438 | 439 | 440 | if regressions > 0 and fail_on_regression: 441 | exit(1) 442 | 443 | 444 | if __name__ == "__main__": 445 | app() 446 | -------------------------------------------------------------------------------- /vsd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/antmicro/visual-system-designer-app/fd9fdca3ade50725a3ce11e32a5e6aac1de927c8/vsd/__init__.py -------------------------------------------------------------------------------- /vsd/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2023-2024 Antmicro 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import logging 7 | import os 8 | import typer 9 | 10 | from typing import Optional 11 | 12 | from vsd.build import prepare_zephyr_app 13 | from vsd.backend import start_vsd_app 14 | from vsd.simulate import simulate 15 | from vsd.init import init, vsd_update_workspace, vsd_workspace_info 16 | 17 | app = typer.Typer(no_args_is_help=True, add_completion=False) 18 | 19 | app.command()(init) 20 | 21 | app.command()(prepare_zephyr_app) 22 | 23 | app.command()(simulate) 24 | 25 | app.command("run")(start_vsd_app) 26 | 27 | app.command("info")(vsd_workspace_info) 28 | 29 | app.command("update")(vsd_update_workspace) 30 | 31 | 32 | @app.callback() 33 | def set_logging(log_level: Optional[str] = None): 34 | if not log_level: 35 | log_level = os.environ.get('LOGLEVEL', 'INFO').upper() 36 | logging.basicConfig(level=log_level, format="%(levelname)s:VSD: %(message)s") 37 | 38 | 39 | def main(): 40 | logging.addLevelName(logging.INFO, "\033[1;34m%s\033[1;0m" % logging.getLevelName(logging.INFO)) 41 | logging.addLevelName(logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)) 42 | logging.addLevelName(logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)) 43 | 44 | app() 45 | 46 | 47 | if __name__ == "__main__": 48 | main() 49 | -------------------------------------------------------------------------------- /vsd/backend.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | import asyncio 6 | import atexit 7 | import functools 8 | import json 9 | import logging 10 | import os 11 | import re 12 | import signal 13 | import sys 14 | 15 | from datetime import datetime 16 | from importlib.resources import files 17 | from itertools import chain 18 | from multiprocessing import Process 19 | from pathlib import Path 20 | from time import sleep 21 | from typing import Dict, List, Optional 22 | 23 | from pipeline_manager_backend_communication.communication_backend import CommunicationBackend 24 | from pipeline_manager_backend_communication.misc_structures import MessageType 25 | from pipeline_manager_backend_communication.utils import ( 26 | convert_message_to_string, 27 | ) 28 | from pipeline_manager.scripts.run import script_run as pm_main 29 | 30 | from vsd import build 31 | from vsd import env 32 | from vsd import simulate 33 | from vsd.specification import Specification 34 | from vsd.graph import Graph 35 | from vsd.generate import generate_app 36 | 37 | 38 | class RPCMethods: 39 | def __init__(self, vsd_client: VSDClient): 40 | self.vsd_client = vsd_client 41 | 42 | def specification_get(self) -> Dict: 43 | return self.vsd_client.handle_specification_get() 44 | 45 | def app_capabilities_get(self) -> Dict: 46 | return { 47 | "stoppable_methods": ["dataflow_run", "custom_build"], 48 | } 49 | 50 | async def custom_build(self, dataflow: Dict) -> Dict: 51 | await self.vsd_client.send_progress("custom_build", -1) 52 | return await self.vsd_client.handle_build(dataflow) 53 | 54 | def dataflow_import( 55 | self, 56 | external_application_dataflow: str, 57 | mime: str, 58 | base64: bool, 59 | ) -> Dict: 60 | # XXX: Just copy the imported dataflow, because it uses the same format 61 | # as expected by the frontend. 62 | dataflow = convert_message_to_string( 63 | message=external_application_dataflow, 64 | mime=mime, 65 | base64=base64, 66 | ) 67 | dataflow = json.loads(dataflow) 68 | self.vsd_client.clean_dataflow_data() 69 | return self.vsd_client._ok(dataflow) 70 | 71 | def dataflow_export(self, dataflow: Dict) -> Dict: 72 | return self.vsd_client.save_graph(dataflow) 73 | 74 | async def dataflow_run(self, dataflow: Dict) -> Dict: 75 | await self.vsd_client.send_progress("dataflow_run", -1) 76 | return await self.vsd_client.handle_run(dataflow) 77 | 78 | def dataflow_stop(self, method: str) -> Dict: 79 | match method: 80 | case "dataflow_run": 81 | self.vsd_client.stop_simulation_event.set() 82 | case "custom_build": 83 | self.vsd_client.stop_build_event.set() 84 | case _: 85 | logging.warning(f"Unrecognized method: {method}") 86 | return self.vsd_client._ok("Stopped.") 87 | 88 | def nodes_on_change(self, graph_id, nodes): 89 | self.vsd_client.last_graph_change = datetime.now() 90 | logging.debug(f"Last change: {self.vsd_client.last_graph_change}") 91 | 92 | def properties_on_change(self,graph_id, node_id, properties): 93 | def is_ignored(prop): 94 | return id in self.vsd_client.ignored_property_changes 95 | 96 | all_ignored = True 97 | for prop in properties: 98 | id = (graph_id, node_id, prop['id']) 99 | if id not in self.vsd_client.ignored_property_changes: 100 | all_ignored = False 101 | 102 | if cb := self.vsd_client.prop_change_callback.get(id): 103 | cb(prop["new_value"]) 104 | 105 | if all_ignored: 106 | logging.debug(f"Changes of {node_id} properties ignored") 107 | return 108 | 109 | self.vsd_client.last_graph_change = datetime.now() 110 | logging.debug(f"Last change: {self.vsd_client.last_graph_change}") 111 | 112 | def connections_on_change(self, graph_id, connections): 113 | self.vsd_client.last_graph_change = datetime.now() 114 | logging.debug(f"Last change: {self.vsd_client.last_graph_change}") 115 | 116 | def graph_on_change(self, dataflow): 117 | self.vsd_client.last_graph_change = datetime.now() 118 | logging.debug(f"Last change: {self.vsd_client.last_graph_change}") 119 | 120 | # XXX: The metadata_on_change and position_on_change events don't have to 121 | # be recorded. They aren't triggered by actions that modify the graph. 122 | 123 | def metadata_on_change(self, metadata): 124 | pass 125 | 126 | def position_on_change(self, graph_id, node_id, position): 127 | pass 128 | 129 | async def terminal_read(self, name, message): 130 | await self.vsd_client.uart_write(name, message) 131 | 132 | 133 | class VSDLogHandler(logging.Handler): 134 | def __init__(self, vsd_client: VSDClient): 135 | super().__init__() 136 | self.formatter = logging.Formatter(fmt='%(levelname)s: %(message)s\n') 137 | self.vsd_client = vsd_client 138 | 139 | def filter(self, record): 140 | return record.module != 'dts2repl' 141 | 142 | def emit(self, record): 143 | if self.vsd_client._client.connected: 144 | msg = self.formatter.format(record) 145 | self.vsd_client.terminal_write_sync("backend-logs", msg) 146 | 147 | 148 | class VSDClient: 149 | def __init__(self, host, port, workspace, app_path, app_type, spec_mods): 150 | self.specification = Specification(workspace / "visual-system-designer-resources/components-specification.json") 151 | self.workspace = workspace 152 | self.stop_simulation_event = asyncio.Event() 153 | self.stop_build_event = asyncio.Event() 154 | self._client = CommunicationBackend(host, port) 155 | self.terminal_uarts = {} 156 | 157 | self.app_path = app_path 158 | self.app_generate = app_type == "template" 159 | 160 | self.ignored_property_changes = [] 161 | self.prop_change_callback = {} 162 | self.last_graph_change = datetime.now() 163 | 164 | for mod in spec_mods: 165 | self.specification.modify(mod) 166 | 167 | def clean_dataflow_data(self): 168 | self.ignored_property_changes = [] 169 | self.prop_change_callback = {} 170 | 171 | async def start_listening(self): 172 | await self._client.initialize_client(RPCMethods(self)) 173 | logging.info("Start listening for messages from pipeline manager") 174 | logging.getLogger().addHandler(VSDLogHandler(self)) 175 | await self._client.start_json_rpc_client() 176 | 177 | async def uart_write(self, term_name, chars): 178 | uart = self.terminal_uarts.get(term_name) 179 | if not uart: 180 | logging.warning(f"Uart not found for terminal {term_name}") 181 | return 182 | 183 | for b in bytes(chars, "utf-8"): 184 | uart.WriteChar(b) 185 | 186 | def _error(self, msg): 187 | return { 188 | 'type': MessageType.ERROR.value, 189 | 'content': msg 190 | } 191 | 192 | async def _notify(self, typ, title, details=""): 193 | await self._client.request( 194 | 'notification_send', 195 | { 196 | "type": typ, 197 | "title": title, 198 | "details": details, 199 | }, 200 | ) 201 | 202 | def terminal_write_sync(self, term_name, msg): 203 | asyncio.run_coroutine_threadsafe( 204 | self.terminal_write(term_name, msg), 205 | self._client.loop 206 | ) 207 | 208 | def terminal_add_sync(self, term_name, readonly): 209 | asyncio.run_coroutine_threadsafe( 210 | self.terminal_add(term_name, readonly), 211 | self._client.loop 212 | ) 213 | 214 | async def terminal_write(self, term_name, msg): 215 | request = { 216 | "name": term_name, 217 | "message": msg.replace("\n", "\r\n"), 218 | } 219 | await self._client.request("terminal_write", request), 220 | 221 | async def terminal_add(self, term_name, readonly): 222 | request = { 223 | "name": term_name, 224 | "readonly": readonly, 225 | } 226 | await self._client.request("terminal_add", request), 227 | 228 | async def send_progress(self, method: str, progress: int): 229 | await self._client.request( 230 | "progress_change", 231 | { 232 | "method": method, 233 | "progress": progress, 234 | }, 235 | ) 236 | 237 | def _ok(self, msg): 238 | return { 239 | 'type': MessageType.OK.value, 240 | 'content': msg 241 | } 242 | 243 | def handle_specification_get(self): 244 | return self._ok(self.specification.spec_json) 245 | 246 | def create_led_callback(self, graph_id, led): 247 | def led_callback(_, state): 248 | logging.debug(f"LED {led.label} state changed to {str(state)}") 249 | 250 | node_id = led.id 251 | if not (graph_id and node_id): 252 | return 253 | request = { 254 | 'graph_id': graph_id, 255 | 'node_id': node_id, 256 | 'properties': [{ 257 | 'name': 'active', 258 | 'new_value': state, 259 | }], 260 | } 261 | logging.debug(f'Request {request}') 262 | response = asyncio.run_coroutine_threadsafe( 263 | self._client.request('properties_change', request), 264 | self._client.loop 265 | ) 266 | logging.debug(f'Response {response}') 267 | 268 | return led_callback 269 | 270 | def create_terminal_callback(self, term_name): 271 | decoder = simulate.UTF8Decoder() 272 | 273 | self.terminal_add_sync(term_name, False) 274 | 275 | @decoder.wrap_callback 276 | def terminal_callback(char): 277 | self.terminal_write_sync(term_name, char) 278 | 279 | return terminal_callback 280 | 281 | async def _prepare_binaries(self, graph): 282 | """ 283 | Check if the application binaries are ready. Build the application if 284 | binaries are outdated or not found. 285 | 286 | Returns tuple of following values or None if failed: 287 | board_name: str 288 | binaries: Dict 289 | """ 290 | board_name = re.sub(r'[\s\-+]', '_', graph.name) 291 | build_dir = self.workspace / 'builds' / board_name 292 | 293 | def up_to_date(path): 294 | if not path.exists(): 295 | return False 296 | ts = os.path.getmtime(str(path.absolute())) 297 | 298 | logging.debug(f"Last graph change: {self.last_graph_change}") 299 | logging.debug(f"File mtime: {datetime.fromtimestamp(ts)}") 300 | return datetime.fromtimestamp(ts) > self.last_graph_change 301 | 302 | expect_binaries = { 303 | "repl": build_dir / f"{board_name}.repl", 304 | "elf": build_dir / "zephyr/zephyr.elf", 305 | "dts": build_dir / "zephyr/zephyr.dts", 306 | } 307 | 308 | # If these files are up to date we can use them. 309 | if all(map(up_to_date, expect_binaries.values())): 310 | return board_name, expect_binaries 311 | 312 | # If they are outdated, the application must be rebuilt. 313 | ret = await self._build(graph, self.stop_build_event) 314 | if not ret: 315 | return None 316 | 317 | def must_exist(path): 318 | if path.exists(): 319 | return True 320 | else: 321 | logging.error(f"The {path.name} hasn't been built.") 322 | return False 323 | 324 | if all(map(must_exist, expect_binaries.values())): 325 | return board_name, expect_binaries 326 | 327 | return None 328 | 329 | async def handle_run(self, graph_json): 330 | graph = Graph(graph_json, self.specification) 331 | 332 | prepare_ret = await self._prepare_binaries(graph) 333 | if not prepare_ret: 334 | return self._error("Simulation failed") 335 | 336 | # Unpack the values returned from _prepare_binaries 337 | board_name, binaries = prepare_ret 338 | 339 | try: 340 | emu, machine = simulate.prepare_simulation(board_name, binaries['elf'], binaries['repl']) 341 | except Exception as e: 342 | logging.error(f"Simulation can't be prepared using {binaries['repl']} and {binaries['elf']}:\n\t{e}") 343 | return self._error("Simulation failed.") 344 | 345 | for uart, uart_name in simulate.get_all_uarts(machine): 346 | simulate.register_uart_callback( 347 | uart, 348 | self.create_terminal_callback(uart_name) 349 | ) 350 | self.terminal_uarts[uart_name] = uart 351 | 352 | # Register leds callbacks 353 | try: 354 | _, connections = graph.get_soc_with_connections() 355 | except KeyError as e: 356 | logging.error(str(e)) 357 | return self._error("Simulation failed.") 358 | 359 | try: 360 | for source, connection, dest in connections: 361 | repl_label = re.sub("_", "", dest.label) 362 | 363 | if connection == 'gpio' and dest.label.startswith("led"): 364 | logging.info(f"Connecting state observer to {dest.label} ({repl_label})") 365 | simulate.register_led_callback( 366 | machine, source, repl_label, 367 | self.create_led_callback(graph.id, dest) 368 | ) 369 | await self._ignore_property(graph.id, dest.id, "active") 370 | 371 | if connection == 'i2c' and "temperature" in dest.properties: 372 | logging.info(f"Creating set temperature callback to {dest.label}") 373 | await self._add_property_callback( 374 | graph.id, dest.id, "temperature", 375 | simulate.create_temperature_callback(machine, source, repl_label), 376 | ) 377 | # Ignore this property, because it shouldn't trigger Zephyr building on change. 378 | await self._ignore_property(graph.id, dest.id, "active") 379 | 380 | except Exception as e: 381 | logging.error(str(e)) 382 | emu.clear() 383 | return self._error("Simulation failed.") 384 | 385 | logging.info(f"Starting simulation on {board_name}.") 386 | emu.StartAll() 387 | 388 | await self.stop_simulation_event.wait() 389 | emu.clear() 390 | 391 | self.terminal_uarts = {} 392 | self.stop_simulation_event.clear() 393 | 394 | logging.info(f"Simulation on {board_name} ended.") 395 | return self._ok("Simulation finished.") 396 | 397 | def handle_stop(self): 398 | self.stop_simulation_event.set() 399 | return self._ok("Stopping simulation") 400 | 401 | async def handle_build(self, graph_json): 402 | graph = Graph(graph_json, self.specification) 403 | build_ret = await self._build(graph, self.stop_build_event) 404 | if build_ret: 405 | return self._ok("Build succeeded.") 406 | else: 407 | return self._error("Build failed.") 408 | 409 | async def _build(self, graph, stop_event): 410 | prepare_ret = self._prepare_build(graph) 411 | if not prepare_ret: 412 | return False 413 | 414 | # Unpack the values returned from _prepare_build 415 | board_dir, board_name, app_src, command = prepare_ret 416 | 417 | logging.info(f"Zephyr board configuration prepared in: {board_dir}") 418 | logging.info(f"To build this demo manually use the following command:\n\t{command}") 419 | 420 | async def print_fun(msg): 421 | await self.terminal_write('backend-logs', msg) 422 | 423 | build_ret, build_dir = await build.build_zephyr_async( 424 | board_name, 425 | print_fun, 426 | stop_event, 427 | app_src 428 | ) 429 | stop_event.clear() 430 | 431 | if build_ret != 0: 432 | logging.error("Failed to build Zephyr.") 433 | return False 434 | 435 | logging.info(f"Application build files available in {build_dir}") 436 | 437 | ret = simulate.prepare_renode_files(board_name, self.workspace) 438 | if ret != 0: 439 | logging.error("Failed to create files needed by Renode.") 440 | return False 441 | 442 | return True 443 | 444 | def _prepare_build(self, graph): 445 | soc, connections = graph.get_soc_with_connections() 446 | 447 | soc_name = soc.rdp_name 448 | board_name = re.sub(r'[\s\-+]', '_', graph.name) 449 | 450 | board_dir = build.prepare_zephyr_board_dir(board_name, soc_name, connections, self.workspace) 451 | if not board_dir: 452 | None 453 | 454 | if self.app_generate: 455 | app_src = generate_app(self.app_path, board_name, connections, self.workspace) 456 | else: 457 | app_src = self.app_path 458 | 459 | command = build.compose_west_command(board_name, app_src, "", self.workspace) 460 | return board_dir, board_name, app_src, command 461 | 462 | def save_graph(self, graph_json): 463 | graph = Graph(graph_json, self.specification) 464 | 465 | dest_file = self.workspace / 'save' / f"{graph.name}.json" 466 | os.makedirs(dest_file.parent, exist_ok=True) 467 | with open(dest_file, 'w') as f: 468 | json.dump(graph_json, f) 469 | 470 | return self._ok(f"Graphs saved in {dest_file}") 471 | 472 | async def _ignore_property(self, graph_id, node_id, prop_name): 473 | """ 474 | Save the information needed to recognize ignored properties. 475 | The property is uniquely identified using graph id, node id and 476 | property id. 477 | """ 478 | resp = await self._client.request( 479 | 'properties_get', 480 | { "graph_id": graph_id, "node_id": node_id } 481 | ) 482 | for prop in resp['result']: 483 | if prop["name"] == prop_name: 484 | self.ignored_property_changes.append( 485 | (graph_id, node_id, prop["id"]) 486 | ) 487 | logging.debug("Ignoring: {} {}".format(node_id, prop["id"])) 488 | break 489 | 490 | async def _add_property_callback(self, graph_id, node_id, prop_name, callback): 491 | resp = await self._client.request( 492 | 'properties_get', 493 | { "graph_id": graph_id, "node_id": node_id } 494 | ) 495 | 496 | for prop in resp['result']: 497 | if prop["name"] == prop_name: 498 | # Set the callback for found property 499 | self.prop_change_callback[(graph_id, node_id, prop["id"])] = callback 500 | # Set the initial value read from the graph 501 | callback(prop["value"]) 502 | logging.debug(f"Set callback for change in: {node_id} {prop['id']}") 503 | break 504 | 505 | 506 | async def shutdown(loop): 507 | tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()] 508 | [task.cancel() for task in tasks] 509 | 510 | print(f"Cancelling {len(tasks)} VSDClient tasks") 511 | await asyncio.gather(*tasks) 512 | loop.stop() 513 | 514 | 515 | def start_vsd_backend(host, port, workspace, app_path, app_type, spec_mods): 516 | """ 517 | Initializes the client and runs its asyncio event loop until it is interrupted. 518 | Doesn't return, if signal is caught whole process exits. 519 | """ 520 | spec_mods = (json.load(open(p)) for p in chain([files('vsd.spec_mods').joinpath('interactive.json')], spec_mods)) 521 | client = VSDClient(host, port, workspace, app_path, app_type, spec_mods) 522 | 523 | loop = asyncio.get_event_loop() 524 | 525 | loop.add_signal_handler( 526 | signal.SIGINT, 527 | functools.partial(asyncio.create_task, shutdown(loop)) 528 | ) 529 | loop.run_until_complete(client.start_listening()) 530 | 531 | # After loop has ended, exit because there is no work to do. 532 | sys.exit(0) 533 | 534 | 535 | @env.setup_env 536 | def start_vsd_app(app: Path = None, 537 | app_template: str = None, 538 | website_host: str = "127.0.0.1", 539 | website_port: int = 9000, 540 | vsd_backend_host: str = "127.0.0.1", 541 | vsd_backend_port: int = 5000, 542 | spec_mod: Optional[List[Path]] = None, 543 | verbosity: str = "INFO"): 544 | """ 545 | Start VSD application. 546 | 547 | The website with gui for VSD app will be hosted on port specified with 548 | --website-port. 549 | 550 | The app may also be used as a backedn for the VSD editor hosted remotely. 551 | It should connect automatically with the default settings specified with 552 | --vsd-backend-host and --vsd-backend-port. 553 | """ 554 | 555 | logging.basicConfig(level=verbosity, format="%(levelname)s:VSD backend:\t%(message)s") 556 | 557 | try: 558 | app_path, app_type = build.determine_app_type(app, app_template) 559 | except build.InitError as e: 560 | logging.error(e) 561 | sys.exit(1) 562 | 563 | workspace = Path(env.get_workspace()) 564 | frontend_dir = workspace / ".pipeline_manager/frontend" 565 | app_workspace = workspace / ".pipeline_manager/workspace" 566 | pm_args = ( 567 | "pipeline_manager", # The first argument must be a program name. 568 | "--frontend-directory", str(frontend_dir), 569 | '--workspace-directory', str(app_workspace), 570 | "--backend-host", website_host, 571 | "--backend-port", str(website_port), 572 | "--tcp-server-host", vsd_backend_host, 573 | "--tcp-server-port", str(vsd_backend_port), 574 | "--verbosity", "INFO", 575 | ) 576 | pm_proc = Process(target=pm_main, args=[pm_args]) 577 | pm_proc.start() 578 | 579 | def wait_for_pm(): 580 | pm_proc.join() 581 | logging.info("Pipeline manager server closed. Exiting...") 582 | 583 | atexit.register(wait_for_pm) 584 | sleep(0.5) 585 | 586 | # NOTE: This function won't return. 587 | start_vsd_backend(vsd_backend_host, vsd_backend_port, workspace, app_path, app_type, spec_mod) 588 | -------------------------------------------------------------------------------- /vsd/build.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import asyncio 5 | import contextlib 6 | import json 7 | import logging 8 | import os 9 | import re 10 | import shutil 11 | import sys 12 | import yaml 13 | 14 | from importlib.resources import files 15 | from pathlib import Path 16 | from typing import List, Optional, Tuple 17 | 18 | from vsd import env 19 | from vsd.generate import generate_app 20 | from vsd.graph import Graph 21 | from vsd.specification import Specification 22 | from vsd.utils import filter_nodes, find_chosen 23 | 24 | 25 | supported_sensors = { 26 | 'bosch_bme280': 'thermometer', 27 | 'sensirion_sht4x': 'thermometer', 28 | 'silabs_si7210': 'thermometer', 29 | 'ti_tmp108': 'thermometer', 30 | } 31 | 32 | 33 | class InitError(Exception): 34 | pass 35 | 36 | 37 | def _prep_kconfig_board(board_name, configs): 38 | content = "" 39 | content += f"config BOARD_{board_name.upper()}\n" 40 | content += f"\tbool \"{board_name}\"\n" 41 | if "select" in configs: 42 | for flag in configs["select"]: 43 | content += f"\tselect {flag}\n" 44 | soc_name = configs["board_socs"][0]["name"] 45 | content += f"\tselect SOC_{soc_name.upper()}" 46 | return content 47 | 48 | 49 | def _prep_defconfig(configs, zephyr_dir): 50 | with open(zephyr_dir / configs["defconfig_file"]) as f: 51 | defconfig = f.read() 52 | 53 | if "remove_defconfig_flags" in configs: 54 | for flag in configs["remove_defconfig_flags"]: 55 | defconfig = re.sub(f'{flag}=y\n', '', defconfig) 56 | 57 | if "add_defconfig_flags" in configs: 58 | defconfig += "\n".join(configs["add_defconfig_flags"]) + "\n" 59 | 60 | return defconfig 61 | 62 | 63 | def _enable_iface(interface): 64 | snippet = f"&{interface} " + "{\n" 65 | snippet += '\tstatus = "okay";\n' 66 | snippet += "};\n" 67 | return snippet 68 | 69 | 70 | def _prep_leds(leds): 71 | snippet = "#include \n" 72 | snippet += "/ {\n" 73 | snippet += "\tleds {\n" 74 | snippet += '\t\tcompatible = "gpio-leds";\n' 75 | 76 | used_interfaces = set() 77 | 78 | for i, conn in enumerate(leds): 79 | soc_if, _, node = conn 80 | name = node.name if node.name else "LED" 81 | addr = node.get_node_interface_address('gpio') 82 | if addr == None: 83 | logging.warning(f"Can't find address for node {node.name}. Skipping node.") 84 | continue 85 | label = node.label 86 | snippet += f"\t\t{label}: led_{i} {{\n" 87 | snippet += f"\t\t\tgpios = <&{soc_if} {addr} GPIO_ACTIVE_HIGH>;\n" 88 | snippet += f'\t\t\tlabel = "{name}";\n' 89 | snippet += "\t\t};\n" 90 | used_interfaces.add(soc_if) 91 | 92 | snippet += "\t};\n" 93 | snippet += "};\n" 94 | 95 | for interface in used_interfaces: 96 | snippet += _enable_iface(interface) 97 | 98 | return snippet 99 | 100 | 101 | def _create_connection_snippet(name, label, addr, compats, soc_if, sensor_type): 102 | address = f"@{addr:x}" if addr else "" 103 | 104 | snippet = f"&{soc_if} " + "{\n" 105 | snippet += '\tstatus = "okay";\n' 106 | snippet += f"\t{label}: {name}" + address + " {\n" 107 | 108 | if compats: 109 | snippet += f'\t\tcompatible = {compats};\n' 110 | 111 | if addr: 112 | snippet += f"\t\treg = <{addr:#x}>;\n" 113 | 114 | if sensor_type: 115 | snippet += f'\t\tfriendly-name = "{sensor_type}";\n' 116 | 117 | # HACK: this should be done somewhere else, but now we don't have a place 118 | # here to make such tweaks. This property is required because it's 119 | # specified in Zephyr dts bindings. 120 | if compats == '"sensirion,sht4x"': 121 | snippet += "repeatability = <2>;" 122 | 123 | snippet += '\t\tstatus = "okay";\n' 124 | snippet += "\t};\n" 125 | snippet += "};\n" 126 | return snippet 127 | 128 | 129 | def _prep_sensors(sensors): 130 | snippet = "" 131 | for (i, (soc_if, node_if, sensor)) in enumerate(sensors): 132 | name = sensor.rdp_name if sensor.rdp_name else sensor.name 133 | compats = sensor.get_compats() 134 | addr = sensor.get_node_interface_address(node_if) 135 | label = sensor.label 136 | 137 | if not addr: 138 | logging.warning(f"Can't find address for node {sensor.name}. Inserting without address.") 139 | 140 | snippet += _create_connection_snippet(name, label, addr, compats, soc_if, supported_sensors[sensor.rdp_name]) 141 | 142 | return snippet 143 | 144 | 145 | def _prep_board_yaml(board_name, configs): 146 | return { 147 | "board": { 148 | "name": board_name, 149 | "vendor": configs["vendor"], 150 | "socs": configs["board_socs"] 151 | } 152 | } 153 | 154 | def _adjust_chosen(board_dts, soc_dts): 155 | default_shell_uart = ( 156 | find_chosen("zephyr,shell-uart", board_dts) or 157 | find_chosen("zephyr,shell-uart", soc_dts) 158 | ) 159 | 160 | default_console = ( 161 | find_chosen("zephyr,console", board_dts) or 162 | find_chosen("zephyr,console", soc_dts) 163 | ) 164 | 165 | if not default_shell_uart: 166 | if default_console: 167 | default_shell_uart = default_console 168 | 169 | snippet = '/ {\n' 170 | snippet += '\tchosen {\n' 171 | 172 | if default_shell_uart: 173 | snippet += f'\t\tzephyr,shell-uart = &{default_shell_uart};\n' 174 | 175 | if default_console: 176 | snippet += f'\t\tzephyr,shell-uart = &{default_console};\n' 177 | 178 | snippet += '\t};\n' 179 | snippet += '};\n' 180 | 181 | return snippet 182 | 183 | def determine_app_type(app: Path, app_template: str) -> Tuple[Path, str]: 184 | """ 185 | Determine if we are going to build the app from sources, or generate it's sources first. 186 | Returns path to the directory with app sources, and string which determines if it is a 187 | template application or full application sources. 188 | """ 189 | if not any([app, app_template]) or all([app, app_template]): 190 | raise InitError("Exactly one of --app or --app-template must be specified") 191 | 192 | if app_template: 193 | templates_dir = files('vsd.templates').joinpath("") 194 | default_path = templates_dir / "apps" / app_template 195 | if default_path.exists(): 196 | return default_path, "template" 197 | 198 | app_template = Path(app_template) 199 | if app_template.exists(): 200 | return app_template, "template" 201 | 202 | raise InitError(f"Can't find app template {app_template}") 203 | 204 | if not app.exists(): 205 | raise InitError(f"Can't find app sources {app}") 206 | 207 | return app, "sources" 208 | 209 | 210 | def prepare_zephyr_board_dir(board_name, soc_name, connections, workspace): 211 | zephyr_base = Path(env.get_var('ZEPHYR_BASE')) 212 | socs_dir = workspace / "visual-system-designer-resources/zephyr-data/socs" 213 | 214 | soc_dir = socs_dir / soc_name 215 | with open(soc_dir / "configs.yaml") as f: 216 | configs = yaml.safe_load(f) 217 | 218 | board_dir = workspace / "boards" / board_name 219 | 220 | # Remove old directory for board of the same name 221 | if board_dir.exists(): 222 | shutil.rmtree(board_dir) 223 | 224 | os.makedirs(board_dir) 225 | 226 | # XXX: This is the place to implement adding things to devicetree and configs 227 | # after reading configuration from the graph. Although, the application 228 | # specific configuration should not be added here but to the app config 229 | # and overlay. 230 | 231 | with open(board_dir / f"Kconfig.{board_name}", "w") as f: 232 | f.write(_prep_kconfig_board(board_name, configs)) 233 | 234 | with open(board_dir / "board.yml", "w") as f: 235 | yaml.dump(_prep_board_yaml(board_name, configs), f, indent=2) 236 | 237 | with open(board_dir / f"{board_name}_defconfig", "w") as f: 238 | f.write(_prep_defconfig(configs, zephyr_base)) 239 | 240 | shutil.copyfile(soc_dir / f"{soc_name}.dts", board_dir / f"{board_name}.dts") 241 | if (soc_dir / 'overlay.dts').exists(): 242 | with open(board_dir / f"{board_name}.dts", "a") as output: 243 | output.write("\n\n// overlay\n\n") 244 | with open(soc_dir / 'overlay.dts') as input: 245 | shutil.copyfileobj(input, output) 246 | 247 | leds, connections = filter_nodes( 248 | connections, 249 | lambda if_name, if_type, node: node.category.startswith("IO/LED") 250 | ) 251 | 252 | sensors, connections = filter_nodes( 253 | connections, 254 | lambda if_name, if_type, node: node.rdp_name in supported_sensors 255 | ) 256 | 257 | if len(connections) > 0: 258 | logging.warning(f"There are {len(connections)} connections that are currently not supported!") 259 | for soc_if, node_if, component in connections: 260 | logging.warning(f" - {component.name} ({component.rdp_name}): {node_if} -> {soc_if}") 261 | 262 | output.write("\n\n// nodes from graph\n\n") 263 | 264 | output.write(_prep_leds(leds)) 265 | output.write(_prep_sensors(sensors)) 266 | 267 | output.write(_adjust_chosen(board_dir / f"{board_name}.dts", soc_dir / "overlay.dts")) 268 | 269 | if "additional_files" in configs: 270 | for file in configs["additional_files"]: 271 | if (zephyr_base / file).exists(): 272 | shutil.copy2(zephyr_base / file, board_dir) 273 | 274 | return board_dir 275 | 276 | 277 | def _copy_build_images(board_name, build_dir, dst_dir): 278 | # Remove builds directory to discard old build artifacts 279 | if dst_dir.exists(): 280 | shutil.rmtree(dst_dir) 281 | 282 | os.makedirs(dst_dir) 283 | copy_files = [ 284 | (build_dir / "zephyr/zephyr.dts", dst_dir / "zephyr/zephyr.dts"), 285 | (build_dir / "zephyr/zephyr.elf", dst_dir / "zephyr/zephyr.elf"), 286 | (build_dir / "zephyr/.config", dst_dir / "zephyr/.config"), 287 | (build_dir / "build.log", dst_dir / "build.log"), 288 | ] 289 | for src, dest in copy_files: 290 | if src.exists(): 291 | os.makedirs(dest.parent, exist_ok=True) 292 | shutil.copy(src, dest) 293 | 294 | 295 | def compose_west_command(board_name, app_path, build_dir, boards_dir): 296 | cmd = "west build -p" 297 | cmd += f" -b {board_name}" 298 | cmd += f" --build-dir {build_dir}" 299 | cmd += f" {app_path}" 300 | cmd += " --" 301 | cmd += f" -DBOARD_ROOT={boards_dir.absolute()}" 302 | return cmd 303 | 304 | 305 | def ask_when_app_exists(app): 306 | if app.exists(): 307 | print( 308 | f"The {app} directory already exists and VSD will override it's contents. " 309 | "If you want to override the existing directory specify --force argument next time." 310 | ) 311 | 312 | choice = input("Do you want to override the contents of it now? (Y/n) ") 313 | if choice.lower() not in ["y", "yes"]: 314 | return False 315 | 316 | return True 317 | 318 | 319 | @env.setup_env 320 | def prepare_zephyr_app(graph_file: Path, 321 | app: Path, 322 | from_template: Path = None, 323 | spec_mod: Optional[List[Path]] = None, 324 | force: bool = False): 325 | """ 326 | Creates Zephyr application ready to be simulated: 327 | 1. Prepares board dir for generated board 328 | 2. Generate application directory in path specified with app argument 329 | (when --from-template argument is specified) 330 | 3. Build Zephyr application 331 | """ 332 | workspace = Path(env.get_workspace()) 333 | with open(graph_file) as f: 334 | graph_json = json.load(f) 335 | 336 | specification = Specification(workspace / "visual-system-designer-resources/components-specification.json") 337 | 338 | if spec_mod: 339 | for mod in spec_mod: 340 | specification.modify(json.load(open(mod))) 341 | 342 | graph = Graph(graph_json, specification) 343 | 344 | try: 345 | soc, soc_connections = graph.get_soc_with_connections() 346 | except KeyError as e: 347 | logging.error(str(e)) 348 | sys.exit(1) 349 | 350 | soc_name = soc.rdp_name 351 | board_name = re.sub(r'[\s\-+]', '_', graph.name) 352 | 353 | board_dir = prepare_zephyr_board_dir(board_name, soc_name, soc_connections, workspace) 354 | if not board_dir: 355 | sys.exit(1) 356 | 357 | logging.info(f"Prepared board configuration in {board_dir.relative_to(Path.cwd())}") 358 | 359 | # Generate the app when: 360 | # - the template is specified 361 | # - we got --force argument or we asked user if existing directory can be modified 362 | if from_template and (force or ask_when_app_exists(app)): 363 | generate_app(from_template, board_name, soc_connections, workspace, output_dir=app) 364 | 365 | ret, build_dir = build_zephyr(board_name, app) 366 | if ret != 0: 367 | logging.error("Zephyr build failed") 368 | sys.exit(1) 369 | 370 | print(f"Successfully build the {app} application on `{board_name}`.") 371 | 372 | 373 | def build_zephyr(board_name: str, 374 | app_path: Path = Path("demo/blinky-temperature"), 375 | quiet: bool = False): 376 | async def aprint(msg): 377 | print(msg, end='') 378 | 379 | return asyncio.run( 380 | build_zephyr_async( 381 | board_name, 382 | aprint if not quiet else None, 383 | None, 384 | app_path 385 | ) 386 | ) 387 | 388 | 389 | @env.setup_env 390 | async def build_zephyr_async(board_name: str, 391 | print_callback, 392 | kill_event, 393 | app_path: Path = Path("demo/blinky-temperature")): 394 | workspace = Path(env.get_workspace()) 395 | build_dir = workspace / 'build' 396 | 397 | # Remove build directory to discard old build files 398 | if build_dir.exists(): 399 | shutil.rmtree(build_dir) 400 | 401 | os.makedirs(build_dir) 402 | command = compose_west_command(board_name, app_path, build_dir, workspace) 403 | 404 | proc = await asyncio.create_subprocess_shell( 405 | command, 406 | stdout=asyncio.subprocess.PIPE, 407 | stderr=asyncio.subprocess.STDOUT 408 | ) 409 | 410 | out = bytearray() 411 | 412 | # XXX: There is no .poll() method in asyncio.subprocess so we have to do it manually. 413 | async def is_running(p): 414 | with contextlib.suppress(asyncio.TimeoutError): 415 | await asyncio.wait_for(p.wait(), 1e-6) 416 | return p.returncode is None 417 | 418 | while await is_running(proc): 419 | if kill_event and kill_event.is_set(): 420 | proc.terminate() 421 | logging.warning("Aborting Zephyr build") 422 | break 423 | 424 | line = await proc.stdout.readline() 425 | out.extend(line) 426 | if print_callback: 427 | await print_callback(line.decode()) 428 | 429 | await proc.wait() 430 | 431 | output_dir = workspace / 'builds' / board_name 432 | _copy_build_images(board_name, build_dir, output_dir) 433 | 434 | with open(output_dir / "build.log", "wb") as f: 435 | f.write(out) 436 | 437 | logging.info(f"Build files saved in {output_dir}") 438 | return proc.returncode, output_dir 439 | -------------------------------------------------------------------------------- /vsd/env.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | import sys 7 | import yaml 8 | 9 | from functools import wraps 10 | from pathlib import Path 11 | 12 | 13 | # Global VSD environment 14 | __vsd_workspace = None 15 | __vsd_env = None 16 | 17 | 18 | def setup_vsd_env(): 19 | """ 20 | Setup environment for VSD app from initialized VSD workspace. 21 | 22 | Ensures that we know where the VSD_WORKSPACE (default location is cwd) is 23 | and sets up all env variables needed by commands (with values read from /vsd-env.yml). 24 | """ 25 | global __vsd_workspace 26 | global __vsd_env 27 | 28 | if __vsd_workspace: 29 | return 30 | 31 | if "VSD_WORKSPACE" in os.environ: 32 | workspace = Path(os.environ.get("VSD_WORKSPACE")) 33 | else: 34 | workspace = Path(".") 35 | 36 | if not (workspace / "vsd-env.yml").exists(): 37 | logging.error( 38 | f"Can't find {workspace / 'vsd-env.yml'}. Have you initilized VSD workspace?\n" 39 | "Run `vsd init [workspace dir]` or export VSD_WORKSPACE dir with intialized workspace." 40 | ) 41 | sys.exit(1) 42 | 43 | # Set environ variables defined in vsd-env.yml 44 | with open(workspace / "vsd-env.yml") as f: 45 | vars = yaml.safe_load(f) 46 | 47 | os.environ.update(vars) 48 | 49 | __vsd_workspace = workspace 50 | __vsd_env = vars 51 | 52 | 53 | def setup_env(func): 54 | """ 55 | Decorator used to setup VSD environment before executing command. 56 | """ 57 | @wraps(func) 58 | def inner(*args, **kwargs): 59 | if not __vsd_env: 60 | setup_vsd_env() 61 | return func(*args, **kwargs) 62 | return inner 63 | 64 | 65 | def _vsd_env_not_found_err(): 66 | logging.error( 67 | "VSD environment not found.\n" 68 | "Consider calling vsd.env.setup_vsd_env() or decorate your current function with vsd.env.setup_env" 69 | ) 70 | sys.exit(1) 71 | 72 | 73 | def get_workspace(): 74 | if not __vsd_workspace: 75 | _vsd_env_not_found_err() 76 | return __vsd_workspace 77 | 78 | 79 | def get_var(var_name): 80 | if not __vsd_workspace: 81 | _vsd_env_not_found_err() 82 | return __vsd_env.get(var_name) 83 | 84 | 85 | def get_env(): 86 | if not __vsd_workspace: 87 | _vsd_env_not_found_err() 88 | return __vsd_env.copy() 89 | -------------------------------------------------------------------------------- /vsd/generate.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import shutil 4 | 5 | from vsd.utils import filter_nodes 6 | from jinja2 import Environment, FileSystemLoader, select_autoescape 7 | 8 | 9 | supported_sensors = { 10 | 'bosch_bme280': 'thermometer', 11 | 'sensirion_sht4x': 'thermometer', 12 | 'silabs_si7210': 'thermometer', 13 | 'ti_tmp108': 'thermometer', 14 | } 15 | 16 | 17 | def generate_app(app_template_path, board_name, connections, workspace, output_dir=None): 18 | template_env = Environment( 19 | autoescape=select_autoescape(), 20 | line_statement_prefix="//!", 21 | line_comment_prefix="///", 22 | loader=FileSystemLoader(app_template_path), 23 | ) 24 | 25 | # Parse graph and get nodes that will be generated 26 | leds, connections = filter_nodes( 27 | connections, 28 | lambda if_name, if_type, node: node.category.startswith("IO/LED") 29 | ) 30 | thermometers, connections = filter_nodes( 31 | connections, 32 | lambda if_name, if_type, node: node.rdp_name in supported_sensors and supported_sensors[node.rdp_name] == "thermometer" 33 | ) 34 | 35 | app_name = app_template_path.name 36 | generated_dir = output_dir or (workspace / "generated" / f"{board_name}_{app_name}") 37 | logging.info(f"Generating app sources in {generated_dir}") 38 | 39 | if generated_dir.exists(): 40 | logging.info(f"The {generated_dir} directory will be cleaned before generating the application code in it.") 41 | shutil.rmtree(generated_dir) 42 | 43 | os.makedirs(generated_dir) 44 | 45 | context = { 46 | "all_labels": list(map(lambda x: x[2].label, leds)) + list(map(lambda x: x[2].label, thermometers)), 47 | "leds": list(map(lambda x: x[2].label, leds)), 48 | "thermometers": list(map(lambda x: x[2].label, thermometers)), 49 | } 50 | 51 | for file in app_template_path.glob("**/*"): 52 | 53 | rel_path = file.relative_to(app_template_path) 54 | 55 | if file.is_file(): 56 | template = template_env.get_template(str(rel_path)) 57 | with open(generated_dir / rel_path, "w+") as f: 58 | f.write(template.render(context)) 59 | elif file.is_dir(): 60 | os.makedirs(generated_dir / rel_path) 61 | 62 | # Return generated_dir because it is also created when output_dir argument isn't specified. 63 | return generated_dir 64 | -------------------------------------------------------------------------------- /vsd/graph.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import re 6 | 7 | 8 | class Node(): 9 | def __init__(self, node, specification): 10 | self._node = node 11 | self._spec = specification.get_node_spec(node['name']) 12 | self._label = None 13 | 14 | raw_props = self._node.get('properties', []) 15 | props = dict(zip(map(lambda x: x["name"], raw_props), raw_props)) 16 | self.properties = props 17 | 18 | self.id = node['id'] 19 | self.connections = {} 20 | self.interfaces = {} 21 | 22 | if 'interfaces' not in node: 23 | return 24 | 25 | for interface in node['interfaces']: 26 | self.interfaces[interface['id']] = interface['name'] 27 | self.connections[interface['id']] = [] 28 | 29 | def __repr__(self): 30 | if 'urls' in self._spec and 'rdp' in self._spec['urls']: 31 | rdp = self._spec['urls']['rdp'] 32 | else: 33 | rdp = '' 34 | return f"" 35 | 36 | def is_soc(self): 37 | return 'SoC' in self._spec['category'] 38 | 39 | @property 40 | def rdp_name(self): 41 | if 'urls' in self._spec and 'rdp' in self._spec['urls']: 42 | rdp_link = self._spec['urls']['rdp'] 43 | else: 44 | return None 45 | return rdp_link.split("/")[-1] 46 | 47 | @property 48 | def name(self): 49 | if 'name' not in self._node: 50 | return None 51 | return self._node['name'] 52 | 53 | @property 54 | def label(self): 55 | if self._label: 56 | return self._label 57 | 58 | id = self.id.split('-')[-1].lower() 59 | cat = self._spec['category'].split('/')[-1].lower() 60 | 61 | self._label = f"{cat}_{id}" 62 | return self._label 63 | 64 | @property 65 | def category(self): 66 | if 'category' in self._spec: 67 | return self._spec['category'] 68 | return 'Other' 69 | 70 | def get_compats(self): 71 | if 'additionalData' in self._spec and 'compats' in self._spec['additionalData']: 72 | compats = self._spec['additionalData']['compats'] 73 | return ', '.join(f'"{c}"' for c in compats) 74 | return None 75 | 76 | def get_node_interface_address(self, interface): 77 | for prop, data in self.properties.items(): 78 | if f'address ({interface})' in prop: 79 | logging.debug(f"Found address property {prop} in {self.name}: {data['value']}") 80 | try: 81 | value = int(data['value'], base=16) 82 | except ValueError: 83 | logging.error(f"Missing or invalid value for {prop}: '{data['value']}'") 84 | return None 85 | return value 86 | 87 | return None 88 | 89 | 90 | class Graph(): 91 | def __init__(self, graph_json, specification): 92 | self.nodes = {} 93 | self.socs = [] 94 | self.interface_to_node = {} 95 | 96 | for graph_node in graph_json['graph']['nodes']: 97 | node_id = graph_node['id'] 98 | node = Node(graph_node, specification) 99 | self.nodes[node_id] = node 100 | 101 | for id in node.interfaces: 102 | self.interface_to_node[id] = node_id 103 | 104 | if node.is_soc(): 105 | self.socs.append(node_id) 106 | 107 | # Get name form graph or from SoC name 108 | name = graph_json['graph'].get('name') 109 | if not name: 110 | if len(self.socs) > 0: 111 | name = self.nodes[self.socs[0]].name 112 | if not name: 113 | name = "Untitled_graph" 114 | 115 | self.name = re.sub(r"[\s\-+]", "_", name) 116 | self.id = graph_json['graph']['id'] 117 | 118 | for edge in graph_json['graph']['connections']: 119 | id_from = edge['from'] 120 | id_to = edge['to'] 121 | 122 | node_from = self.interface_to_node[id_from] 123 | node_to = self.interface_to_node[id_to] 124 | 125 | self.nodes[node_from].connections[id_from].append(id_to) 126 | self.nodes[node_to].connections[id_to].append(id_from) 127 | 128 | def get_soc_with_connections(self): 129 | if len(self.socs) == 0: 130 | raise KeyError("Haven't found any SoC nodes in the graph") 131 | 132 | soc_id = self.socs[0] 133 | soc_node = self.nodes[soc_id] 134 | 135 | if len(self.socs) > 1: 136 | logging.warning(f"Found more than one SoC in the graph. Using {soc_node.name}.") 137 | 138 | connections = [] 139 | for id, neighbors in soc_node.connections.items(): 140 | soc_interface_name = soc_node.interfaces[id] 141 | for n in neighbors: 142 | neighbor_node = self.nodes[self.interface_to_node[n]] 143 | node_interface_name = neighbor_node.interfaces[n] 144 | connections.append((soc_interface_name, node_interface_name, neighbor_node)) 145 | return soc_node, connections 146 | -------------------------------------------------------------------------------- /vsd/init.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2022-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | import subprocess 7 | import sys 8 | import typer 9 | import yaml 10 | 11 | from enum import StrEnum 12 | from importlib.resources import files 13 | from pathlib import Path 14 | from typing import Optional, Annotated 15 | from subprocess import CalledProcessError 16 | 17 | from vsd import env 18 | from vsd.utils import git_command, git_commit_sha 19 | 20 | 21 | def search_for_zephyr_base(workspace): 22 | # When zephyr is initialized in workspace, use it. 23 | workspace_zephyr = workspace / "zephyr" 24 | if workspace_zephyr.exists(): 25 | return workspace_zephyr.resolve() 26 | 27 | if "ZEPHYR_BASE" in os.environ: 28 | logging.warning( 29 | f"Detected existing Zephyr workspace because ZEPHYR_BASE is set to {os.environ['ZEPHYR_BASE']}.\n" 30 | "If you don't want to use it unset ZEPHYR_BASE variable." 31 | ) 32 | return Path(os.environ["ZEPHYR_BASE"]) 33 | 34 | # Search for '.west' directory in all directories above. 35 | d = workspace 36 | while d != d.parent: 37 | if d.exists() and ".west" in os.listdir(d): 38 | logging.warning( 39 | f"Detected existing Zephyr workspace because {d}/.west directory exists.\n" 40 | ) 41 | return d / "zephyr" 42 | d = d.parent 43 | return None 44 | 45 | 46 | def get_vsd_resources(workspace): 47 | url = "https://github.com/antmicro/visual-system-designer-resources.git" 48 | dest = workspace / "visual-system-designer-resources" 49 | 50 | if dest.exists(): 51 | logging.info("visual-system-designer-resources repo exists") 52 | return 53 | 54 | logging.info(f"Cloning {url}") 55 | git_command(["clone", "-q", url, dest], error_msg="Cloning VSD resources failed") 56 | 57 | 58 | def check_for_modified_files(repo): 59 | git_command(["diff-files", "--quiet"], repo=repo, error_msg=f"Some files in {repo} are modified. Please stash the changes before updating the repo.") 60 | 61 | def update_vsd_resources(workspace): 62 | resources_repo = workspace / "visual-system-designer-resources" 63 | if not resources_repo.exists(): 64 | get_vsd_resources(workspace) 65 | return 66 | 67 | logging.info("Trying to update visual-system-designer-resources repo. ") 68 | check_for_modified_files(resources_repo) 69 | git_command(["checkout", "main"], repo=resources_repo, error_msg="Failed to update visual-system-designer-resources repository") 70 | git_command(["pull", "origin", "main"], repo=resources_repo, error_msg="Failed to update visual-system-designer-resources repository") 71 | 72 | 73 | def install_zephyr_requirements(zephyr_base): 74 | zephyr_requirements = str(zephyr_base / "scripts/requirements.txt") 75 | logging.info(f"Installing Zephyr requirements from: {zephyr_requirements}") 76 | try: 77 | subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "-r", zephyr_requirements]) 78 | except CalledProcessError as e: 79 | logging.error(f"Installing Zephyr Python requirements failed. (exitcode: {e.returncode})") 80 | sys.exit(e.returncode) 81 | 82 | 83 | def init_zephyr(workspace): 84 | with open(workspace / "visual-system-designer-resources/zephyr-data/zephyr.version") as f: 85 | zephyr_version = f.read().strip() 86 | 87 | logging.info(f"Initializing Zephyr workspace in {workspace}") 88 | 89 | init_zephyr_sh = files("vsd.scripts") / "init_zephyr.sh" 90 | try: 91 | subprocess.run(["bash", "--", str(init_zephyr_sh), str(workspace), zephyr_version]) 92 | except CalledProcessError as e: 93 | logging.error(f"Zephyr initialization failed. (exitcode: {e.returncode})") 94 | sys.exit(e.returncode) 95 | 96 | return workspace / "zephyr" 97 | 98 | 99 | def update_zephyr(zephyr_dir, zephyr_version): 100 | current_zephyr_version = git_command(["rev-parse", "HEAD"], repo=zephyr_dir, output=True) 101 | 102 | if current_zephyr_version == zephyr_version: 103 | return 104 | 105 | logging.info(f"Updating Zephyr to {zephyr_version}") 106 | check_for_modified_files(zephyr_dir) 107 | git_command(["fetch", "--depth=1", "origin", zephyr_version], repo=zephyr_dir, error_msg="Failed to update Zephyr repository") 108 | git_command(["checkout", "FETCH_HEAD"], repo=zephyr_dir, error_msg="Failed to update Zephyr repository") 109 | 110 | logging.info("Updating west workspace") 111 | try: 112 | subprocess.check_call(["west", "update"], cwd=zephyr_dir) 113 | except CalledProcessError as e: 114 | logging.error(f"Failed to update west workspace. (exitcode: {e.returncode})") 115 | sys.exit(e.returncode) 116 | 117 | 118 | def get_zephyr_sdk(sdk_version): 119 | home = Path(os.environ["HOME"]) 120 | sdk_install_dir = Path(os.environ.get("ZEPHYR_SDK_INSTALL_DIR", home / f"zephyr-sdk-{sdk_version}")) 121 | 122 | def read_sdk_version(dir): 123 | return (dir / "sdk_version").read_text().strip() 124 | 125 | # If we have correct SDK version we don't need to install it again 126 | if sdk_install_dir.exists() and sdk_version == read_sdk_version(sdk_install_dir): 127 | logging.info(f"Found Zephyr SDK v{sdk_version} in {sdk_install_dir}") 128 | return sdk_install_dir 129 | 130 | # Change install directory to install expected SDK version 131 | if sdk_install_dir.exists(): 132 | sdk_install_dir = sdk_install_dir.parent / f"zephyr-sdk-{sdk_version}" 133 | 134 | logging.info(f"Installing Zephyr SDK v{sdk_version} in {sdk_install_dir}") 135 | os.makedirs(sdk_install_dir, exist_ok=True) 136 | 137 | get_zephyr_sdk_sh = files("vsd.scripts") / "get_zephyr_sdk.sh" 138 | try: 139 | subprocess.check_call(["bash", "--", str(get_zephyr_sdk_sh), str(sdk_version), str(sdk_install_dir)]) 140 | except CalledProcessError as e: 141 | logging.error(f"Installing Zephyr SDK failed. (exitcode: {e.returncode})") 142 | sys.exit(e.returncode) 143 | return sdk_install_dir 144 | 145 | 146 | def build_pipeline_manager(workspace): 147 | pipeline_manager_build_cmd = ( 148 | "pipeline_manager", "build", "server-app", 149 | "--editor-title", "Visual System Designer", 150 | "--workspace-directory", workspace / ".pipeline_manager/workspace", 151 | "--output-directory", workspace / ".pipeline_manager/frontend", 152 | "--assets-directory", workspace / "visual-system-designer-resources/assets", 153 | "--favicon-path", workspace / "visual-system-designer-resources/assets/visual-system-designer.svg", 154 | ) 155 | try: 156 | subprocess.check_call(pipeline_manager_build_cmd) 157 | except CalledProcessError as e: 158 | logging.error(f"Pipeline manager frontend build failed. (exitcode: {e.returncode})") 159 | sys.exit(e.returncode) 160 | 161 | 162 | def get_renode_portable(workspace): 163 | # NOTE: When updating the Renode version here, check if dts2repl shouldn't be updated as well. 164 | # dts2repl version is recorded in pyproject.toml. 165 | renode_version = "1.15.0+20240414gitf47548cef" 166 | 167 | portable_dir = workspace / "renode-portable" 168 | renode_portable = portable_dir / "renode" 169 | 170 | if renode_portable.exists(): 171 | return renode_portable 172 | 173 | url = f"https://builds.renode.io/renode-{renode_version}.linux-portable-dotnet.tar.gz" 174 | 175 | logging.info(f"Downloading {url} and extractingn into {portable_dir}") 176 | 177 | # XXX: We prefer to do most of initialization in Python, but this operation is simpler when 178 | # it's done in shell. `tar` command is way easier to use than `tarfile` module in Python. 179 | os.makedirs(portable_dir, exist_ok=True) 180 | subprocess.check_output(f"curl -sL {url} | tar xz --strip=1 -C {portable_dir}", shell=True) 181 | 182 | if not renode_portable.exists(): 183 | logging.error("Renode portable wasn't downloaded.") 184 | sys.exit(1) 185 | 186 | return renode_portable 187 | 188 | 189 | def init(dir: Annotated[Path, typer.Argument()] = ".", 190 | zephyr_base: Optional[Path] = None, 191 | zephyr_sdk: str = "0.16.3"): 192 | """ 193 | Initialize VSD workspace. 194 | """ 195 | if "VSD_WORKSPACE" in os.environ: 196 | env_ws = Path(os.environ.get("VSD_WORKSPACE")).resolve() 197 | if env_ws != dir.resolve(): 198 | logging.error( 199 | f"The VSD workspace is already initialized in {env_ws}.\n" 200 | "If you want to initialize new workspace please unset VSD_WORKSPACE variable." 201 | ) 202 | sys.exit(1) 203 | 204 | workspace = dir.resolve() 205 | zephyr_base = zephyr_base or search_for_zephyr_base(workspace) 206 | 207 | print(f"Init VSD workspace in {workspace}") 208 | os.makedirs(workspace, exist_ok=True) 209 | 210 | get_vsd_resources(workspace) 211 | 212 | # Initialize Zephyr if it wasn't detected 213 | if not zephyr_base: 214 | zephyr_base = init_zephyr(workspace) 215 | else: 216 | logging.warning( 217 | f"Detected Zephyr workspace in {zephyr_base.parent}.\n" 218 | "If you want to specify different location please provide path to initialized Zephyr " 219 | "workspace using `--zephyr-base` option." 220 | ) 221 | 222 | install_zephyr_requirements(zephyr_base) 223 | build_pipeline_manager(workspace) 224 | 225 | zephyr_sdk_install_dir = get_zephyr_sdk(zephyr_sdk) 226 | renode_portable_path = get_renode_portable(workspace) 227 | 228 | # Save paths that will be used later by vsd app 229 | vars = {} 230 | vars["PYRENODE_BIN"] = str(renode_portable_path.resolve()) 231 | vars["PYRENODE_RUNTIME"] = "coreclr" 232 | vars["ZEPHYR_SDK_INSTALL_DIR"] = str(zephyr_sdk_install_dir.resolve()) 233 | vars["ZEPHYR_BASE"] = str(zephyr_base.resolve()) 234 | with open(workspace / "vsd-env.yml", "w") as f: 235 | yaml.dump(vars, f) 236 | 237 | os.environ["VSD_WORKSPACE"] = str(workspace) 238 | vsd_workspace_info() 239 | 240 | if workspace != Path.cwd(): 241 | logging.warning( 242 | "VSD workspace initialized in directory which is not cwd.\n" 243 | "To make sure that proper directory will be used as VSD workspace please export following variable:\n" 244 | f"\texport VSD_WORKSPACE={workspace}" 245 | ) 246 | 247 | 248 | @env.setup_env 249 | def vsd_workspace_info(): 250 | """ 251 | Display info about initialized components of VSD workspace. 252 | """ 253 | workspace = Path(env.get_workspace()) 254 | print(f"Workspace: {workspace}") 255 | print("-----------------------") 256 | 257 | max_len = max(len(x) for x in env.get_env().keys()) 258 | for k,v in env.get_env().items(): 259 | print(f"{k:<{max_len}}: {v}") 260 | 261 | print("-----------------------") 262 | print(f" Zephyr commit: {git_commit_sha(env.get_var('ZEPHYR_BASE'))}") 263 | print(f"VSD resources commit: {git_commit_sha(workspace / 'visual-system-designer-resources')}") 264 | 265 | try: 266 | renode_version = subprocess.check_output([env.get_var("PYRENODE_BIN"), "--version"]) 267 | except CalledProcessError as e: 268 | logging.error(f"Failed to get Renode version (exitcode: {e.returncode})") 269 | sys.exit(e.returncode) 270 | except Exception as e: 271 | logging.error(f"Failed to run `renode --version` command: {e}") 272 | sys.exit(1) 273 | 274 | print("-----------------------") 275 | print(renode_version.decode().strip()) 276 | 277 | 278 | class UpdateChoices(StrEnum): 279 | ALL = "all" 280 | ZEPHYR = "zephyr" 281 | RESOURCES = "resources" 282 | 283 | 284 | @env.setup_env 285 | def vsd_update_workspace(component: Annotated[UpdateChoices, typer.Argument()] = UpdateChoices.ALL, 286 | zephyr_base: Optional[Path] = None, 287 | zephyr_sdk: str = "0.16.3"): 288 | workspace = Path(env.get_workspace()) 289 | 290 | if component in [UpdateChoices.ALL, UpdateChoices.RESOURCES]: 291 | update_vsd_resources(workspace) 292 | 293 | if component in [UpdateChoices.ALL, UpdateChoices.ZEPHYR]: 294 | zephyr_version = (workspace / "visual-system-designer-resources/zephyr-data/zephyr.version").read_text().strip() 295 | zephyr_dir = Path(env.get_var("ZEPHYR_BASE")) 296 | update_zephyr(zephyr_dir, zephyr_version) 297 | -------------------------------------------------------------------------------- /vsd/scripts/get_zephyr_sdk.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Copyright (c) 2022-2024 Antmicro 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | ZEPHYR_SDK_VERSION=$1 9 | ZEPHYR_SDK_INSTALL_DIR=$2 10 | 11 | if [ "$1" == "" ] || [ "$2" == "" ]; then 12 | echo "expected 2 arguments" >&2 13 | echo "usage: ./get_zephyr_sdk.sh " >&2 14 | exit 1 15 | fi 16 | 17 | curl -kLs https://github.com/zephyrproject-rtos/sdk-ng/releases/download/v${ZEPHYR_SDK_VERSION}/zephyr-sdk-${ZEPHYR_SDK_VERSION}_linux-x86_64_minimal.tar.xz | tar xJ --strip 1 -C ${ZEPHYR_SDK_INSTALL_DIR} 18 | cd ${ZEPHYR_SDK_INSTALL_DIR} 19 | ./setup.sh -t all -h -c 20 | -------------------------------------------------------------------------------- /vsd/scripts/init_zephyr.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Copyright (c) 2022-2024 Antmicro 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | 7 | WORKSPACE=$1 8 | EXPECTED_ZEPHYR_VERSION=$2 9 | 10 | if [ "$1" = "" ] || [ "$2" = "" ]; then 11 | echo "expected 2 arguments" >&2 12 | echo "usage: ./init_zephyr.sh " >&2 13 | exit 1 14 | fi 15 | 16 | ZEPHYR_DIR=$WORKSPACE/zephyr 17 | 18 | if [ ! -d $ZEPHYR_DIR ] ; then 19 | mkdir -p $ZEPHYR_DIR 20 | git -C $ZEPHYR_DIR init -q 21 | git -C $ZEPHYR_DIR remote add origin https://github.com/zephyrproject-rtos/zephyr 22 | fi 23 | 24 | CURRENT_ZEPHYR_VERSION=$(git -C $ZEPHYR_DIR rev-parse HEAD 2> /dev/null || echo "none") 25 | 26 | if [ "$ZEPHYR_VERSION" != "$EXPECTED_ZEPHYR_VERSION" ] ; then 27 | git -C $ZEPHYR_DIR fetch -q --depth 1 origin "$EXPECTED_ZEPHYR_VERSION" 28 | git -C $ZEPHYR_DIR checkout -q FETCH_HEAD 29 | fi 30 | 31 | cd $WORKSPACE 32 | if [ ! -d .west ] ; then 33 | west init -l zephyr 34 | fi 35 | 36 | # Always update west, because zpehyr version might have changed. 37 | west update 38 | west zephyr-export 39 | cd - 40 | -------------------------------------------------------------------------------- /vsd/simulate.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import sys 6 | 7 | from dts2repl import dts2repl 8 | from importlib.resources import files 9 | from pathlib import Path 10 | 11 | from vsd import env 12 | from vsd.utils import find_chosen 13 | 14 | 15 | def _prepare_from_template(format, template, dest): 16 | with open(template) as f: 17 | template = f.read() 18 | with open(dest, "w") as f: 19 | f.write(template.format(**format)) 20 | 21 | 22 | def _prepare_repl(dts_path, repl_path): 23 | repl = dts2repl.generate(dts_path) 24 | if repl == '': 25 | return False 26 | with open(repl_path, 'w') as f: 27 | f.write(repl) 28 | return True 29 | 30 | 31 | def prepare_renode_files(board_name: str, workspace: Path): 32 | builds_dir = workspace / 'builds' / board_name 33 | dts_path = builds_dir / "zephyr/zephyr.dts" 34 | elf_path = builds_dir / "zephyr/zephyr.elf" 35 | resc_path = builds_dir / f"{board_name}.resc" 36 | repl_path = builds_dir / f"{board_name}.repl" 37 | 38 | format = { 39 | 'board_name': board_name, 40 | 'resc_path': resc_path.absolute(), 41 | 'repl_path': repl_path.absolute(), 42 | 'elf_path': elf_path.absolute(), 43 | } 44 | 45 | zephyr_console = find_chosen("zephyr,console", dts_path) 46 | if zephyr_console: 47 | format['console'] = zephyr_console 48 | 49 | templates_dir = files("vsd.templates").joinpath("") 50 | 51 | try: 52 | _prepare_from_template(format, templates_dir / "run.resc", resc_path) 53 | except KeyError as e: 54 | logging.error(f"Haven't found value to create renode files: {e}") 55 | return 1 56 | 57 | ret = _prepare_repl(dts_path, repl_path) 58 | if not ret: 59 | logging.error("Failed to create REPL file") 60 | return 1 61 | 62 | logging.info(f"Renode files for board {board_name} are ready in {builds_dir}") 63 | return 0 64 | 65 | 66 | def prepare_simulation(board_name, elf_path, repl_path): 67 | from pyrenode3.wrappers import Emulation 68 | 69 | emu = Emulation() 70 | machine = emu.add_mach('machine0') 71 | 72 | try: 73 | machine.load_repl(str(repl_path.absolute())) 74 | machine.load_elf(str(elf_path.absolute())) 75 | except Exception as e: 76 | # Save the error now, because it is impossible to read after the emulation is cleared. 77 | error = str(e) 78 | emu.clear() 79 | raise Exception(error) 80 | 81 | return emu, machine 82 | 83 | 84 | def register_led_callback(machine, source, repl_label, callback): 85 | from Antmicro.Renode.Peripherals.Miscellaneous import ILed 86 | led = ILed(machine.internal[f"sysbus.{source}.{repl_label}"]) 87 | led.StateChanged += (callback) 88 | 89 | 90 | def create_temperature_callback(machine, source, repl_label): 91 | from Antmicro.Renode.Peripherals.Sensor import ITemperatureSensor 92 | from System import Decimal 93 | 94 | thermometer = ITemperatureSensor(machine.internal[f"sysbus.{source}.{repl_label}"]) 95 | 96 | def set_temp(new_temp): 97 | logging.debug(f"Setting temperature to {new_temp}") 98 | thermometer.Temperature = Decimal(new_temp) 99 | return set_temp 100 | 101 | 102 | class UTF8Decoder: 103 | def __init__(self): 104 | self._utf8_chars_left = 0 105 | self._utf8_buffer = bytearray() 106 | 107 | def wrap_callback(self, inner_cb): 108 | def callback(char): 109 | UTF8_2_MASK = 0b11100000 110 | UTF8_3_MASK = 0b11110000 111 | UTF8_1 = 0b10000000 112 | UTF8_2 = 0b11000000 113 | UTF8_3 = 0b11100000 114 | 115 | if char & UTF8_3_MASK == UTF8_3: 116 | self._utf8_chars_left = 2 117 | self._utf8_buffer.append(char) 118 | return 119 | 120 | if char & UTF8_2_MASK == UTF8_2: 121 | self._utf8_chars_left = 1 122 | self._utf8_buffer.append(char) 123 | return 124 | 125 | if char & UTF8_1: 126 | self._utf8_chars_left -= 1 127 | assert self._utf8_chars_left >= 0 128 | 129 | self._utf8_buffer.append(char) 130 | 131 | if self._utf8_chars_left == 0: 132 | inner_cb(self._utf8_buffer.decode()) 133 | self._utf8_buffer = bytearray() 134 | else: 135 | # The char isn't encoded so we can just redirect it. 136 | inner_cb(chr(char)) 137 | return callback 138 | 139 | 140 | def register_uart_callback(uart, callback): 141 | uart.CharReceived += (callback) 142 | 143 | 144 | def get_all_uarts(machine): 145 | from Antmicro.Renode.Peripherals.UART import IUART 146 | from pyrenode3 import wrappers 147 | uarts = list(machine.GetPeripheralsOfType[IUART]()) 148 | return [(u, wrappers.Peripheral(u).name) for u in uarts] 149 | 150 | 151 | class ConsoleCallbackPool(): 152 | def __init__(self): 153 | self.active_uart = None 154 | 155 | def create_callback(self, uart, active=False): 156 | decoder = UTF8Decoder() 157 | if active: 158 | if self.active_uart is not None: 159 | raise Exception("Can't set more than one active consoles!") 160 | 161 | self.active_uart = uart 162 | return decoder.wrap_callback(lambda c: print(c, end='')) 163 | 164 | # If active console is already set, then just ignore all characters. 165 | if self.active_uart is not None: 166 | return (lambda c: None) 167 | 168 | @decoder.wrap_callback 169 | def console_callback(char): 170 | if self.active_uart is None: 171 | self.active_uart = uart 172 | # Print only when active uart matches the current one 173 | if self.active_uart is uart: 174 | print(char, end='') 175 | 176 | return console_callback 177 | 178 | 179 | @env.setup_env 180 | def simulate(board_name: str): 181 | """ 182 | Start simulation on the board prepared by previous steps. 183 | """ 184 | workspace = Path(env.get_workspace()) 185 | builds_dir = workspace / 'builds' / board_name 186 | repl_path = builds_dir / f"{board_name}.repl" 187 | elf_path = builds_dir / "zephyr/zephyr.elf" 188 | dts_path = builds_dir / "zephyr/zephyr.dts" 189 | 190 | ret = prepare_renode_files(board_name, workspace) 191 | if ret != 0: 192 | logging.error("Files needed for the simulation can't be prepared") 193 | sys.exit(1) 194 | 195 | try: 196 | emu, machine = prepare_simulation(board_name, elf_path, repl_path) 197 | except Exception as e: 198 | print(f"Simulation can't be prepared using {repl_path} and {elf_path}!") 199 | print(f"\n{e}") 200 | sys.exit(1) 201 | 202 | callback_pool = ConsoleCallbackPool() 203 | 204 | all_uarts = get_all_uarts(machine) 205 | if len(all_uarts) > 0: 206 | zephyr_console = find_chosen('zephyr,console', dts_path) 207 | for uart, name in get_all_uarts(machine): 208 | register_uart_callback(uart, callback_pool.create_callback(uart, active=(name == zephyr_console))) 209 | else: 210 | print("Runing without console output") 211 | 212 | print(f"Starting simulation on {board_name}. Press Ctrl+C to quit.") 213 | print("-----------------------------------") 214 | emu.StartAll() 215 | 216 | try: 217 | # Just wait for signal 218 | while True: 219 | pass 220 | finally: 221 | emu.clear() 222 | print("Exiting...") 223 | -------------------------------------------------------------------------------- /vsd/spec_mods/interactive.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "notifyWhenChanged": true, 4 | "navbarItems": [ 5 | { 6 | "name": "Build", 7 | "iconName": "build.svg", 8 | "procedureName": "custom_build" 9 | }, 10 | { 11 | "name": "Run simulation", 12 | "iconName": "Run", 13 | "procedureName": "dataflow_run" 14 | } 15 | ] 16 | }, 17 | "mods": [ 18 | { 19 | "names": ["LED"], 20 | "add_properties": [ 21 | { 22 | "default": false, 23 | "name": "active", 24 | "type": "bool" 25 | } 26 | ] 27 | }, 28 | { 29 | "names": [ 30 | "bme280", 31 | "sht4xd", 32 | "tmp108", 33 | "si7210" 34 | ], 35 | "add_properties": [ 36 | { 37 | "default": 20.0, 38 | "name": "temperature", 39 | "type": "number" 40 | } 41 | ] 42 | } 43 | ] 44 | } 45 | -------------------------------------------------------------------------------- /vsd/specification.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Antmicro 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | import logging 6 | 7 | 8 | class Specification(): 9 | def __init__(self, spec_path): 10 | self._parse_specification(spec_path) 11 | 12 | def _parse_specification(self, spec_path): 13 | """Return specification in format that is easier to operate on.""" 14 | with open(spec_path) as f: 15 | self.spec_json = json.load(f) 16 | 17 | metadata = self.spec_json['metadata'] 18 | 19 | nodes = {} 20 | categories = {} 21 | abstract = {} 22 | 23 | for node in self.spec_json['nodes']: 24 | if 'isCategory' in node and node['isCategory']: 25 | categories[node['category'].split("/")[-1]] = node 26 | continue 27 | if 'abstract' in node and node['abstract']: 28 | abstract[node['name']] = node 29 | continue 30 | nodes[node['name']] = node 31 | 32 | self.metadata = metadata 33 | self.nodes = nodes 34 | self.categories = categories 35 | self.abstract = abstract 36 | 37 | def get_node_spec(self, node_name, resolve=True): 38 | if node_name in self.nodes: 39 | logging.debug(f"{node_name} is a node.") 40 | node = self.nodes[node_name] 41 | elif node_name in self.categories: 42 | logging.debug(f"{node_name} is a category.") 43 | node = self.categories[node_name] 44 | elif node_name in self.abstract: 45 | logging.debug(f"{node_name} is an abstract node.") 46 | return self.abstract[node_name] 47 | else: 48 | logging.warning(f"Node {node_name} not found.") 49 | return None 50 | 51 | # XXX: maybe resolve on more levels? 52 | if resolve and 'extends' in node: 53 | for ext_name in node['extends']: 54 | if ext_name in self.abstract: 55 | node = {**node, **self.abstract[ext_name]} 56 | elif ext_name in self.categories: 57 | node = {**node, **self.categories[ext_name]} 58 | else: 59 | logging.warning(f"Not found the extend node: {ext_name}") 60 | return node 61 | 62 | def _add_node(self, node): 63 | if node.get('isCategory', False): 64 | self.categories[node['category'].split("/")[-1]] = node 65 | return 66 | 67 | if node.get('abstract', False): 68 | self.abstract[node['name']] = node 69 | return 70 | 71 | self.nodes[node['name']] = node 72 | self.spec_json["nodes"].append(node) 73 | 74 | 75 | def _modify_node(self, node, add_interfaces, add_properties): 76 | if add_interfaces: 77 | if "interfaces" not in node: 78 | node["interfaces"] = add_interfaces 79 | else: 80 | node["interfaces"].extend(add_interfaces) 81 | 82 | if add_properties: 83 | if "properties" not in node: 84 | node["properties"] = add_properties 85 | else: 86 | node["properties"].extend(add_properties) 87 | 88 | def modify(self, modifications): 89 | for key, value in modifications.get("metadata", {}).items(): 90 | self.spec_json["metadata"][key] = value 91 | 92 | for node in modifications.get("add_nodes", []): 93 | self._add_node(node) 94 | 95 | for mod in modifications.get("mods", []): 96 | add_interfaces = mod.get("add_interfaces") 97 | add_properties = mod.get("add_properties") 98 | for name in mod["names"]: 99 | if node := self.get_node_spec(name, resolve=False): 100 | self._modify_node(node, add_interfaces, add_properties) 101 | else: 102 | logging.warning(f"node {{name:{name}}} doesn't exist") 103 | 104 | def get_socs(self): 105 | soc_names = [] 106 | for name, node in self.categories.items(): 107 | if node['category'].startswith("SoC"): 108 | soc_names.append(name) 109 | return soc_names 110 | -------------------------------------------------------------------------------- /vsd/templates/run.resc: -------------------------------------------------------------------------------- 1 | $name?="{board_name}" 2 | using sysbus 3 | mach create $name 4 | 5 | machine LoadPlatformDescription @{repl_path} 6 | $bin = @{elf_path} 7 | showAnalyzer {console} 8 | 9 | macro reset 10 | """ 11 | sysbus LoadELF $bin 12 | """ 13 | runMacro $reset 14 | -------------------------------------------------------------------------------- /vsd/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import subprocess 4 | import sys 5 | 6 | from pathlib import Path 7 | from subprocess import CalledProcessError 8 | 9 | 10 | def find_chosen(name, dts_path): 11 | with open(dts_path, 'r') as f: 12 | dts = f.read() 13 | 14 | console_m = re.search(f'{name} = &(.+);', dts) 15 | if not console_m: 16 | return None 17 | 18 | return console_m.group(1) 19 | 20 | 21 | def filter_nodes(connections, filter_fn): 22 | """ 23 | Use filter_fn to filter nodes on connections list. 24 | 25 | The filter function has signature: 26 | def filter_fn(if_name, if_type, node) -> bool 27 | 28 | Returns tuple: 29 | - filtered nodes 30 | - connections that are not accepted by the filter function 31 | """ 32 | filtered, other = [], [] 33 | for conn in connections: 34 | if filter_fn(*conn): 35 | filtered.append(conn) 36 | else: 37 | other.append(conn) 38 | return filtered, other 39 | 40 | 41 | def git_command(command, repo=Path("."), output=False, error_msg=None): 42 | try: 43 | if output: 44 | return subprocess.check_output(["git", *command], cwd=repo, text=True).strip() 45 | else: 46 | subprocess.check_call(["git", *command], cwd=repo) 47 | except CalledProcessError as e: 48 | if not error_msg: 49 | raise 50 | logging.error(f"{error_msg} (exitcode: {e.returncode})") 51 | sys.exit(e.returncode) 52 | 53 | 54 | def git_commit_sha(repo): 55 | output = git_command( 56 | command=["rev-parse", "HEAD"], 57 | repo=repo, 58 | output=True, 59 | error_msg=f"Failed to read {repo} commit sha", 60 | ) 61 | return output.strip() 62 | --------------------------------------------------------------------------------