├── .clang-format ├── .eslintrc.json ├── .github └── workflows │ └── deploy-docker-images.yml ├── .gitignore ├── .prospector.yaml ├── LICENSE ├── README.md ├── THIRD_PARTY.md ├── dependency_manager ├── .gitignore ├── LICENSE ├── README.md ├── generate_bash_completion.sh ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── setup.py └── src │ └── edm_tool │ ├── __init__.py │ ├── bazel.py │ ├── edm-completion.bash │ ├── edm.py │ └── templates │ └── cpm.jinja ├── devcontainer ├── setup-devcontainer.sh └── template │ └── .devcontainer │ ├── docker-compose.yml │ └── general-devcontainer │ ├── Dockerfile │ ├── devcontainer.json │ └── docker-compose.devcontainer.yml ├── docker ├── docker-compose.yml └── images │ ├── mosquitto │ ├── Dockerfile │ └── mosquitto.conf │ ├── mqtt-explorer │ ├── Dockerfile │ └── settings.json │ ├── nodered │ └── Dockerfile │ └── steve │ ├── Dockerfile │ ├── init.sh │ ├── keystore.jks │ └── main.properties ├── everest-complete-readonly.yaml ├── everest-complete.yaml ├── everest-metadata.yaml └── everest_dev_tool ├── .gitignore ├── LICENSE ├── pyproject.toml └── src └── everest_dev_tool ├── __init__.py ├── git_handlers.py ├── parser.py └── services.py /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | Language: Cpp 3 | # BasedOnStyle: LLVM 4 | AccessModifierOffset: -4 5 | AlignAfterOpenBracket: Align 6 | AlignConsecutiveMacros: true 7 | AlignConsecutiveAssignments: false 8 | AlignConsecutiveDeclarations: false 9 | AlignEscapedNewlines: Right 10 | AlignOperands: true 11 | AlignTrailingComments: true 12 | AllowAllArgumentsOnNextLine: true 13 | AllowAllConstructorInitializersOnNextLine: true 14 | AllowAllParametersOfDeclarationOnNextLine: true 15 | AllowShortBlocksOnASingleLine: Never 16 | AllowShortCaseLabelsOnASingleLine: false 17 | AllowShortEnumsOnASingleLine: false 18 | AllowShortFunctionsOnASingleLine: None 19 | AllowShortLambdasOnASingleLine: All 20 | AllowShortIfStatementsOnASingleLine: Never 21 | AllowShortLoopsOnASingleLine: false 22 | AlwaysBreakAfterDefinitionReturnType: None 23 | AlwaysBreakAfterReturnType: None 24 | AlwaysBreakBeforeMultilineStrings: false 25 | AlwaysBreakTemplateDeclarations: MultiLine 26 | BinPackArguments: true 27 | BinPackParameters: true 28 | BraceWrapping: 29 | AfterCaseLabel: false 30 | AfterClass: false 31 | AfterControlStatement: false 32 | AfterEnum: false 33 | AfterFunction: false 34 | AfterNamespace: false 35 | AfterObjCDeclaration: false 36 | AfterStruct: false 37 | AfterUnion: false 38 | AfterExternBlock: false 39 | BeforeCatch: false 40 | BeforeElse: false 41 | IndentBraces: false 42 | SplitEmptyFunction: true 43 | SplitEmptyRecord: true 44 | SplitEmptyNamespace: true 45 | BreakBeforeBinaryOperators: None 46 | BreakBeforeBraces: Attach 47 | BreakBeforeInheritanceComma: false 48 | BreakInheritanceList: BeforeColon 49 | BreakBeforeTernaryOperators: true 50 | BreakConstructorInitializersBeforeComma: false 51 | BreakConstructorInitializers: AfterColon 52 | BreakAfterJavaFieldAnnotations: false 53 | BreakStringLiterals: true 54 | ColumnLimit: 120 55 | CommentPragmas: '^ IWYU pragma:' 56 | CompactNamespaces: false 57 | ConstructorInitializerAllOnOneLineOrOnePerLine: true 58 | ConstructorInitializerIndentWidth: 4 59 | ContinuationIndentWidth: 4 60 | Cpp11BracedListStyle: true 61 | DeriveLineEnding: true 62 | DerivePointerAlignment: false 63 | DisableFormat: false 64 | ExperimentalAutoDetectBinPacking: false 65 | FixNamespaceComments: true 66 | ForEachMacros: 67 | - foreach 68 | - Q_FOREACH 69 | - BOOST_FOREACH 70 | IncludeBlocks: Preserve 71 | IncludeCategories: 72 | - Regex: '^"(llvm|llvm-c|clang|clang-c)/' 73 | Priority: 2 74 | SortPriority: 0 75 | - Regex: '^(<|"(gtest|gmock|isl|json)/)' 76 | Priority: 3 77 | SortPriority: 0 78 | - Regex: '.*' 79 | Priority: 1 80 | SortPriority: 0 81 | IncludeIsMainRegex: '(Test)?$' 82 | IncludeIsMainSourceRegex: '' 83 | IndentCaseLabels: false 84 | IndentGotoLabels: true 85 | IndentPPDirectives: None 86 | IndentWidth: 4 87 | IndentWrappedFunctionNames: false 88 | JavaScriptQuotes: Leave 89 | JavaScriptWrapImports: true 90 | KeepEmptyLinesAtTheStartOfBlocks: true 91 | MacroBlockBegin: '' 92 | MacroBlockEnd: '' 93 | MaxEmptyLinesToKeep: 1 94 | NamespaceIndentation: None 95 | ObjCBinPackProtocolList: Auto 96 | ObjCBlockIndentWidth: 2 97 | ObjCSpaceAfterProperty: false 98 | ObjCSpaceBeforeProtocolList: true 99 | PenaltyBreakAssignment: 2 100 | PenaltyBreakBeforeFirstCallParameter: 19 101 | PenaltyBreakComment: 300 102 | PenaltyBreakFirstLessLess: 120 103 | PenaltyBreakString: 1000 104 | PenaltyBreakTemplateDeclaration: 10 105 | PenaltyExcessCharacter: 1000000 106 | PenaltyReturnTypeOnItsOwnLine: 60 107 | PointerAlignment: Left 108 | ReflowComments: true 109 | SortIncludes: true 110 | SortUsingDeclarations: true 111 | SpaceAfterCStyleCast: false 112 | SpaceAfterLogicalNot: false 113 | SpaceAfterTemplateKeyword: true 114 | SpaceBeforeAssignmentOperators: true 115 | SpaceBeforeCpp11BracedList: false 116 | SpaceBeforeCtorInitializerColon: true 117 | SpaceBeforeInheritanceColon: true 118 | SpaceBeforeParens: ControlStatements 119 | SpaceBeforeRangeBasedForLoopColon: true 120 | SpaceInEmptyBlock: false 121 | SpaceInEmptyParentheses: false 122 | SpacesBeforeTrailingComments: 1 123 | SpacesInAngles: false 124 | SpacesInConditionalStatement: false 125 | SpacesInContainerLiterals: true 126 | SpacesInCStyleCastParentheses: false 127 | SpacesInParentheses: false 128 | SpacesInSquareBrackets: false 129 | SpaceBeforeSquareBrackets: false 130 | Standard: Latest 131 | StatementMacros: 132 | - Q_UNUSED 133 | - QT_REQUIRE_VERSION 134 | TabWidth: 8 135 | UseCRLF: false 136 | UseTab: Never 137 | ... 138 | 139 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "commonjs": true, 5 | "es2021": true 6 | }, 7 | "extends": [ 8 | "airbnb-base" 9 | ], 10 | "parserOptions": { 11 | "ecmaVersion": 12 12 | }, 13 | "rules": { 14 | "camelcase": "off", 15 | "eqeqeq": [ 16 | "error", 17 | "smart" 18 | ], 19 | "comma-dangle": [ 20 | "warn", 21 | { 22 | "objects": "always-multiline", 23 | "arrays": "always-multiline", 24 | "functions": "never" 25 | } 26 | ], 27 | "import/no-unresolved": [ 28 | 2, 29 | { 30 | "ignore": [ 31 | "everestjs" 32 | ] 33 | } 34 | ], 35 | "max-len": [ 36 | "warn", 37 | { 38 | "code": 120, 39 | "tabWidth": 2 40 | } 41 | ] 42 | } 43 | } -------------------------------------------------------------------------------- /.github/workflows/deploy-docker-images.yml: -------------------------------------------------------------------------------- 1 | name: Build and push docker images 2 | 3 | on: 4 | pull_request: {} 5 | push: 6 | branches: 7 | - '**' 8 | tags: 9 | - 'docker-images-v*' 10 | 11 | env: 12 | REGISTRY: ghcr.io 13 | DOCKER_DIRECTORY: docker/images 14 | PLATFORMS: | 15 | linux/amd64 16 | PATH_TO_DEPLOY_DOCKER_IMAGES_WORKFLOW: .github/workflows/deploy-docker-images.yml 17 | 18 | jobs: 19 | env-setup: 20 | # Since env variables can't be passed to reusable workflows, we need to pass them as outputs 21 | name: Setup environment 22 | runs-on: ubuntu-22.04 23 | outputs: 24 | docker_registry: ${{ env.REGISTRY }} 25 | docker_directory: ${{ env.DOCKER_DIRECTORY }} 26 | platforms: ${{ env.PLATFORMS }} 27 | path_to_deploy_docker_images_workflow: ${{ env.PATH_TO_DEPLOY_DOCKER_IMAGES_WORKFLOW }} 28 | steps: 29 | - id: check 30 | run: | 31 | echo "Setting up environment" 32 | mosquitto: 33 | needs: 34 | - env-setup 35 | name: Build and push mosquitto docker image 36 | uses: everest/everest-ci/.github/workflows/deploy-single-docker-image.yml@v1.3.2 37 | secrets: 38 | SA_GITHUB_PAT: ${{ secrets.SA_GITHUB_PAT }} 39 | SA_GITHUB_USERNAME: ${{ secrets.SA_GITHUB_USERNAME }} 40 | with: 41 | image_name: ${{ github.event.repository.name }}/mosquitto 42 | directory: ${{ needs.env-setup.outputs.docker_directory }}/mosquitto 43 | docker_registry: ${{ needs.env-setup.outputs.docker_registry }} 44 | github_ref_before: ${{ github.event.before }} 45 | github_ref_after: ${{ github.event.after }} 46 | platforms: ${{ needs.env-setup.outputs.platforms }} 47 | depends_on_paths: | 48 | ${{ needs.env-setup.outputs.path_to_deploy_docker_images_workflow }} 49 | steve: 50 | needs: 51 | - env-setup 52 | name: Build and push steve docker image 53 | uses: everest/everest-ci/.github/workflows/deploy-single-docker-image.yml@v1.3.2 54 | secrets: 55 | SA_GITHUB_PAT: ${{ secrets.SA_GITHUB_PAT }} 56 | SA_GITHUB_USERNAME: ${{ secrets.SA_GITHUB_USERNAME }} 57 | with: 58 | image_name: ${{ github.event.repository.name }}/steve 59 | directory: ${{ needs.env-setup.outputs.docker_directory }}/steve 60 | docker_registry: ${{ needs.env-setup.outputs.docker_registry }} 61 | github_ref_before: ${{ github.event.before }} 62 | github_ref_after: ${{ github.event.after }} 63 | platforms: ${{ needs.env-setup.outputs.platforms }} 64 | depends_on_paths: | 65 | ${{ needs.env-setup.outputs.path_to_deploy_docker_images_workflow }} 66 | mqtt-explorer: 67 | needs: 68 | - env-setup 69 | name: Build and push mqtt-explorer docker image 70 | uses: everest/everest-ci/.github/workflows/deploy-single-docker-image.yml@v1.3.2 71 | secrets: 72 | SA_GITHUB_PAT: ${{ secrets.SA_GITHUB_PAT }} 73 | SA_GITHUB_USERNAME: ${{ secrets.SA_GITHUB_USERNAME }} 74 | with: 75 | image_name: ${{ github.event.repository.name }}/mqtt-explorer 76 | directory: ${{ needs.env-setup.outputs.docker_directory }}/mqtt-explorer 77 | docker_registry: ${{ needs.env-setup.outputs.docker_registry }} 78 | github_ref_before: ${{ github.event.before }} 79 | github_ref_after: ${{ github.event.after }} 80 | platforms: ${{ needs.env-setup.outputs.platforms }} 81 | depends_on_paths: | 82 | ${{ needs.env-setup.outputs.path_to_deploy_docker_images_workflow }} 83 | nodered: 84 | needs: 85 | - env-setup 86 | name: Build and push nodered docker image 87 | uses: everest/everest-ci/.github/workflows/deploy-single-docker-image.yml@v1.3.2 88 | secrets: 89 | SA_GITHUB_PAT: ${{ secrets.SA_GITHUB_PAT }} 90 | SA_GITHUB_USERNAME: ${{ secrets.SA_GITHUB_USERNAME }} 91 | with: 92 | image_name: ${{ github.event.repository.name }}/nodered 93 | directory: ${{ needs.env-setup.outputs.docker_directory }}/nodered 94 | docker_registry: ${{ needs.env-setup.outputs.docker_registry }} 95 | github_ref_before: ${{ github.event.before }} 96 | github_ref_after: ${{ github.event.after }} 97 | platforms: ${{ needs.env-setup.outputs.platforms }} 98 | depends_on_paths: | 99 | ${{ needs.env-setup.outputs.path_to_deploy_docker_images_workflow }} 100 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | workspace.yaml 2 | -------------------------------------------------------------------------------- /.prospector.yaml: -------------------------------------------------------------------------------- 1 | pep257: 2 | disable: 3 | - D203 4 | - D212 5 | - D213 6 | - D214 7 | - D215 8 | - D404 9 | - D405 10 | - D406 11 | - D407 12 | - D408 13 | - D409 14 | - D410 15 | - D411 16 | - D413 17 | - D415 18 | - D416 19 | - D417 20 | pylint: 21 | disable: 22 | - logging-fstring-interpolation 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EVerest dev environment 2 | 3 | This subproject contains all utility files for setting up your development environment. So far this is the [edm - the Everest Dependency Manager](dependency_manager/README.md) which helps you orchestrating the dependencies between the different everest repositories. 4 | 5 | You can install [edm](dependency_manager/README.md) very easy using pip. 6 | 7 | All documentation and the issue tracking can be found in our main repository here: https://github.com/EVerest/everest 8 | 9 | ## Easy Dev Environment Setup 10 | 11 | To setup a devcontainer in your workspace you can use the following command to run the `setup_devcontainer.sh` script locally. 12 | 13 | ### 1. Prerequisites 14 | 15 | Create a new directory and navigate into it. This directory will be your new workspace or use an existing one. 16 | 17 | ```bash 18 | mkdir my-workspace 19 | cd my-workspace 20 | ``` 21 | 22 | ### 2. Run the setup script 23 | 24 | Run the following command to setup the devcontainer. 25 | 26 | ```bash 27 | export BRANCH="main" && bash -c "$(curl -s --variable %BRANCH=main --expand-url https://raw.githubusercontent.com/EVerest/everest-dev-environment/{{BRANCH}}/devcontainer/setup-devcontainer.sh)" 28 | ``` 29 | 30 | The script will ask you for the following information: 31 | 1. Workspace directory: Default is the current directory. You can keep the default by pressing enter. 32 | 2. everest-dev-environment version: Default is 'main'. You can keep the default by pressing enter. 33 | 34 | ### 3. Open in VS Code 35 | 36 | After the script has finished, open the workspace in Visual Studio Code. 37 | 38 | ```bash 39 | code . 40 | ``` 41 | 42 | VS Code will ask you to reopen the workspace in a container. Click on the button `Reopen in Container`. 43 | 44 | ### 4. Getting started 45 | 46 | As your set up dev environment suggests when you open a terminal, you can setup your EVerest workspace by running the following command: 47 | 48 | ```bash 49 | everest clone everest-core 50 | ``` 51 | -------------------------------------------------------------------------------- /THIRD_PARTY.md: -------------------------------------------------------------------------------- 1 | _Use this file to list out any third-party dependencies used by this project. You may choose to point to a Gemfile or other language specific packaging file for details._ 2 | -------------------------------------------------------------------------------- /dependency_manager/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | __pycache__ 3 | *.egg-info -------------------------------------------------------------------------------- /dependency_manager/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /dependency_manager/README.md: -------------------------------------------------------------------------------- 1 | # Dependency Manager for EVerest 2 | 3 | - [Dependency Manager for EVerest](#dependency-manager-for-everest) 4 | - [Install and Quick Start](#install-and-quick-start) 5 | - [Installing edm](#installing-edm) 6 | - [Enabling CPM_SOURCE_CACHE](#enabling-cpm_source_cache) 7 | - [Python packages needed to run edm](#python-packages-needed-to-run-edm) 8 | - [Setting up CMake integration](#setting-up-cmake-integration) 9 | - [Setting up a workspace](#setting-up-a-workspace) 10 | - [Updating a workspace](#updating-a-workspace) 11 | - [Using the EDM CMake module and dependencies.yaml](#using-the-edm-cmake-module-and-dependenciesyaml) 12 | - [Create a workspace config from an existing directory tree](#create-a-workspace-config-from-an-existing-directory-tree) 13 | - [Git information at a glance](#git-information-at-a-glance) 14 | 15 | ## Install and Quick Start 16 | To install the **edm** dependency manager for EVerest you have to perform the following steps. 17 | 18 | Please make sure you are running a sufficiently recent version of **Python3 (>=3.6)** and that you are able to install Python packages from source. Usually you just have to ensure that you have **pip**, **setuptools** and **wheel** available. Refer to [the Python *Installing Packages* documentation](https://packaging.python.org/tutorials/installing-packages/#requirements-for-installing-packages) for indepth guidance if any problems arise. 19 | 20 | ```bash 21 | python3 -m pip install --upgrade pip setuptools wheel 22 | ``` 23 | 24 | ### Installing edm 25 | Now you can clone *this repository* and install **edm**: 26 | 27 | (make sure you have set up your [ssh key](https://www.atlassian.com/git/tutorials/git-ssh) in github first!) 28 | 29 | ```bash 30 | git clone git@github.com:EVerest/everest-dev-environment.git 31 | cd everest-dev-environment/dependency_manager 32 | python3 -m pip install . 33 | edm --config ../everest-complete.yaml --workspace ~/checkout/everest-workspace 34 | ``` 35 | 36 | The last command registers the [**EDM** CMake module](#setting-up-cmake-integration) and creates a workspace in the *~/checkout/everest-workspace* directory from [a config that is shipped with this repository](../everest-complete.yaml). 37 | The workspace will have the following structure containing all current dependencies for EVerest: 38 | ```bash 39 | everest-workspace/ 40 | ├── everest-core 41 | ├── everest-deploy-devkit 42 | ├── everest-dev-environment 43 | ├── everest-framework 44 | ├── everest-utils 45 | ├── liblog 46 | ├── libmodbus 47 | ├── libocpp 48 | ├── libsunspec 49 | ├── libtimer 50 | ├── open-plc-utils 51 | ├── RISE-V2G 52 | └── workspace-config.yaml 53 | ``` 54 | The *workspace-config.yaml* contains a copy of the config that was used to create this workspace. 55 | 56 | ### Enabling CPM_SOURCE_CACHE 57 | The **edm** dependency manager uses [CPM](https://github.com/cpm-cmake/CPM.cmake) for its CMake integration. 58 | This means you *can* and **should** set the *CPM_SOURCE_CACHE* environment variable. This makes sure that dependencies that you do not manage in the workspace are not re-downloaded multiple times. For detailed information and other useful environment variables please refer to the [CPM Documentation](https://github.com/cpm-cmake/CPM.cmake/blob/master/README.md#CPM_SOURCE_CACHE). 59 | ```bash 60 | export CPM_SOURCE_CACHE=$HOME/.cache/CPM 61 | ``` 62 | 63 | ### Python packages needed to run edm 64 | The following Python3 packages are needed to run the **edm** dependency manager. 65 | If you installed **edm** using the guide above they were already installed automatically. 66 | 67 | - Python >= 3.6 68 | - Jinja2 >= 3.0 69 | - PyYAML >= 5.4 70 | 71 | ## Setting up and updating a workspace 72 | For letting **edm** do the work of setting up an initial EVerest workspace, 73 | do this: 74 | 75 | ```bash 76 | edm init --workspace ~/checkout/everest-workspace 77 | ``` 78 | If you are currently in the *everest-workspace* directory the following command has the same effect: 79 | 80 | ```bash 81 | edm init 82 | ``` 83 | 84 | For using a dedicated release version, you can do this: 85 | 86 | ```bash 87 | edm init 2023.7.0 88 | ``` 89 | 90 | In this example, version 2023.7.0 is pulled from the server. This will only work if 91 | you previous code is not in a "dirty" state. 92 | 93 | ## Using the EDM CMake module and dependencies.yaml 94 | To use **edm** from CMake you have to add the following line to the top-level *CMakeLists.txt* file in the respective source repository: 95 | ```cmake 96 | find_package(EDM REQUIRED) 97 | ``` 98 | The **EDM** CMake module will be discovered automatically if you [registered the CMake module in the way it described in the *Setting up CMake integration* section of this readme](#setting-up-cmake-integration). 99 | 100 | To define dependencies you can now add a **dependencies.yaml** file to your source repository. It should look like this: 101 | ```yaml 102 | --- 103 | liblog: 104 | git: git@github.com:EVerest/liblog.git 105 | git_tag: main 106 | options: ["BUILD_EXAMPLES OFF"] 107 | libtimer: 108 | git: git@github.com:EVerest/libtimer.git 109 | git_tag: main 110 | options: ["BUILD_EXAMPLES OFF"] 111 | 112 | ``` 113 | 114 | If you want to conditionally include some dependencies, eg. for testing, you can do this in the following way: 115 | ```yaml 116 | catch2: 117 | git: https://github.com/catchorg/Catch2.git 118 | git_tag: v3.4.0 119 | cmake_condition: "BUILD_TESTING" 120 | 121 | ``` 122 | Here *cmake_condition* can be any string that CMake can use in an if() block. Please be aware that any variables you use here must be defined before a call to *evc_setup_edm()* is made in your CMakeLists.txt 123 | 124 | Additionally you can set the *EVEREST_MODIFY_DEPENDENCIES* environment variable to a file containing modifications to the projects dependencies.yaml files when running cmake: 125 | 126 | ```bash 127 | EVEREST_MODIFY_DEPENDENCIES=../dependencies_modified.yaml cmake -S . -B build 128 | ``` 129 | 130 | The *dependencies_modified.yaml* file can contain something along these lines: 131 | 132 | ```yaml 133 | nlohmann_json: 134 | git: null # this makes edm look for nlohmann_json via find_package 135 | libfmt: 136 | rename: fmt # if find_package needs a different dependency name you can rename it 137 | git: null 138 | catch2: 139 | git_tag: v1.2.3 # if you want to select a different git tag for a build this is also possible 140 | ``` 141 | 142 | ## Create a workspace config from an existing directory tree 143 | Suppose you already have a directory tree that you want to save into a config file. 144 | You can do this with the following command: 145 | ```bash 146 | edm --create-config custom-config.yaml 147 | ``` 148 | 149 | This is a short form of 150 | ```bash 151 | edm --create-config custom-config.yaml --include-remotes git@github.com:EVerest/* 152 | ``` 153 | and only includes repositories from the *EVerest* namespace. You can add as many remotes to this list as you want. 154 | 155 | For example if you only want to include certain repositories you can use the following command. 156 | ```bash 157 | edm --create-config custom-config.yaml --include-remotes git@github.com:EVerest/everest* git@github.com:EVerest/liblog.git 158 | ``` 159 | 160 | If you want to include all repositories, including external dependencies, in the config you can use the following command. 161 | ```bash 162 | edm --create-config custom-config.yaml --external-in-config 163 | ``` 164 | 165 | ## Git information at a glance 166 | You can get a list of all git repositories in the current directory and their state using the following command. 167 | ```bash 168 | edm --git-info --git-fetch 169 | ``` 170 | If you want to know the state of all repositories in a workspace you can use the following command. 171 | ```bash 172 | edm --workspace ~/checkout/everest-workspace --git-info --git-fetch 173 | ``` 174 | 175 | This creates output that is similar to the following example. 176 | ```bash 177 | [edm]: Git info for "~/checkout/everest-workspace": 178 | [edm]: Using git-fetch to update remote information. This might take a few seconds. 179 | [edm]: "everest-dev-environment" @ branch: main [remote: origin/main] [behind 6] [clean] 180 | [edm]: "everest-framework" @ branch: main [remote: origin/main] [dirty] 181 | [edm]: "everest-deploy-devkit" @ branch: main [remote: origin/main] [clean] 182 | [edm]: "libtimer" @ branch: main [remote: origin/main] [dirty] 183 | [edm]: 2/4 repositories are dirty. 184 | ``` 185 | -------------------------------------------------------------------------------- /dependency_manager/generate_bash_completion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ## 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## Copyright 2020 - 2021 Pionix GmbH and Contributors to EVerest 5 | ## 6 | echo "generating bash-completion file" 7 | SRC_DIR="$(dirname "${BASH_SOURCE[0]}")/src" 8 | echo "Using module found in ${SRC_DIR}" 9 | cd "${SRC_DIR}" 10 | BASH_COMPLETION_FILE_DIR="$(pwd)" 11 | BASH_COMPLETION_FILE="${BASH_COMPLETION_FILE_DIR}/edm_tool/edm-completion.bash" 12 | shtab --shell=bash -u edm_tool.get_parser --prog edm > "${BASH_COMPLETION_FILE}" -------------------------------------------------------------------------------- /dependency_manager/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel"] 3 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /dependency_manager/requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2>=2.11 2 | PyYAML>=5.3 3 | requests>=2 4 | -------------------------------------------------------------------------------- /dependency_manager/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = edm_tool 3 | version = attr: edm_tool.__version__ 4 | description= A simple dependency manager 5 | long_description = file: README.md 6 | long_description_content_type= text/markdown 7 | url= https://github.com/EVerest/everest-dev-environment 8 | author = Kai-Uwe Hermann 9 | author_email = kai-uwe.hermann@pionix.de 10 | classifiers = 11 | Development Status :: 3 - Alpha 12 | Intended Audience :: Developers 13 | Topic :: Software Development :: Build Tools 14 | License :: OSI Approved :: Apache Software License 15 | 16 | [options] 17 | packages = edm_tool 18 | package_dir = 19 | = src 20 | python_requires = >=3.6 21 | install_requires = 22 | Jinja2>=2.11 23 | PyYAML>=5.3 24 | requests>=2 25 | 26 | [options.entry_points] 27 | console_scripts = 28 | edm = edm_tool:main 29 | 30 | [options.package_data] 31 | edm_tool = 32 | templates/cpm.jinja 33 | cmake/CPM.cmake 34 | cmake/EDMConfig.cmake 35 | edm-completion.bash 36 | 37 | [pycodestyle] 38 | max-line-length = 120 39 | -------------------------------------------------------------------------------- /dependency_manager/setup.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # Copyright 2020 - 2022 Pionix GmbH and Contributors to EVerest 3 | """Everest Dependency Manager.""" 4 | 5 | from setuptools import setup, find_packages 6 | 7 | setup( 8 | # see setup.cfg 9 | ) 10 | -------------------------------------------------------------------------------- /dependency_manager/src/edm_tool/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-License-Identifier: Apache-2.0 3 | # Copyright Pionix GmbH and Contributors to EVerest 4 | # 5 | """Everest Dependency Manager.""" 6 | from edm_tool import edm 7 | 8 | __version__ = "0.7.1" 9 | 10 | 11 | def get_parser(): 12 | """Return the command line parser.""" 13 | return edm.get_parser(__version__) 14 | 15 | 16 | def main(): 17 | """Main entrypoint of edm.""" 18 | edm.main(get_parser()) 19 | -------------------------------------------------------------------------------- /dependency_manager/src/edm_tool/bazel.py: -------------------------------------------------------------------------------- 1 | "Bazel related functions for edm_tool." 2 | import yaml 3 | from typing import List, Optional, Dict 4 | 5 | 6 | def _format_optional_string(value: Optional[str]): 7 | """Formats a string value as a string literal (with quotes) or `None` if the value is None.""" 8 | if value is None: 9 | return "None" 10 | return f'"{value}"' 11 | 12 | 13 | def _is_commit(revision: str): 14 | # Revision is a commit if it is a hexadecimal 40-character string 15 | return len(revision) == 40 and all(c in "0123456789abcdef" for c in revision.lower()) 16 | 17 | def _get_depname_for_label(label: str) -> str: 18 | build, depname, bazel = label.split(":")[1].split(".") 19 | if build != "BUILD" or bazel != "bazel": 20 | raise ValueError(f"Invalid build file name: {label}") 21 | return depname 22 | 23 | def _parse_build_file_labels(labels: Optional[List[str]]) -> Dict[str, str]: 24 | # For easier matching of build files with dependencies 25 | # we convert the list of build files: 26 | # ``` 27 | # [ 28 | # "@workspace//path/to/build:BUILD..bazel", 29 | # ... 30 | # ] 31 | # ``` 32 | # into a dictionary: 33 | # ``` 34 | # { 35 | # "": "@workspace//path/to/build:BUILD..bazel", 36 | # ... 37 | # } 38 | # ``` 39 | # and check that all build files have proper names. 40 | if labels is None: 41 | return {} 42 | 43 | return dict((_get_depname_for_label(label), label) for label in labels) 44 | 45 | 46 | def generate_deps(args): 47 | "Parse the dependencies.yaml and print content of *.bzl file to stdout." 48 | with open(args.dependencies_yaml, 'r', encoding='utf-8') as f: 49 | deps = yaml.safe_load(f) 50 | 51 | build_files = _parse_build_file_labels(args.build_file) 52 | 53 | for name in build_files: 54 | if name not in deps: 55 | raise ValueError(f"Build file {name} does not have a corresponding dependency in {args.dependencies_yaml}") 56 | 57 | print(""" 58 | load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") 59 | load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") 60 | 61 | def edm_deps():""") 62 | 63 | for name, desc in deps.items(): 64 | repo = desc["git"] 65 | # The parameter is called `git_tag` but it can be a tag or a commit 66 | revision = desc["git_tag"] 67 | tag = None 68 | commit = None 69 | 70 | if _is_commit(revision): 71 | commit = revision 72 | else: 73 | tag = revision 74 | 75 | build_file = build_files.get(name) 76 | 77 | print( 78 | f""" 79 | maybe( 80 | git_repository, 81 | name = "{name}", 82 | remote = "{repo}", 83 | tag = {_format_optional_string(tag)}, 84 | commit = {_format_optional_string(commit)}, 85 | build_file = {_format_optional_string(build_file)}, 86 | ) 87 | """ 88 | ) 89 | -------------------------------------------------------------------------------- /dependency_manager/src/edm_tool/edm-completion.bash: -------------------------------------------------------------------------------- 1 | # AUTOMATCALLY GENERATED by `shtab` 2 | 3 | 4 | 5 | _shtab_edm_tool_option_strings=('-h' '--help' '--version' '--workspace' '--working_dir' '--out' '--include_deps' '--config' '--create-vscode-workspace' '--update' '--cmake' '--verbose' '--nocolor' '--install-bash-completion' '--create-config' '--external-in-config' '--include-remotes' '--create-snapshot' '--git-info' '--git-fetch' '--git-pull') 6 | 7 | 8 | 9 | 10 | 11 | _shtab_edm_tool_pos_0_nargs=A... 12 | _shtab_edm_tool__h_nargs=0 13 | _shtab_edm_tool___help_nargs=0 14 | _shtab_edm_tool___version_nargs=0 15 | _shtab_edm_tool___include_deps_nargs=0 16 | _shtab_edm_tool___create_vscode_workspace_nargs=0 17 | _shtab_edm_tool___update_nargs=0 18 | _shtab_edm_tool___cmake_nargs=0 19 | _shtab_edm_tool___verbose_nargs=0 20 | _shtab_edm_tool___nocolor_nargs=0 21 | _shtab_edm_tool___install_bash_completion_nargs=0 22 | _shtab_edm_tool___external_in_config_nargs=0 23 | _shtab_edm_tool___include_remotes_nargs=* 24 | _shtab_edm_tool___create_snapshot_nargs=? 25 | _shtab_edm_tool___git_info_nargs=0 26 | _shtab_edm_tool___git_fetch_nargs=0 27 | _shtab_edm_tool___git_pull_nargs=* 28 | 29 | 30 | # $1=COMP_WORDS[1] 31 | _shtab_compgen_files() { 32 | compgen -f -- $1 # files 33 | } 34 | 35 | # $1=COMP_WORDS[1] 36 | _shtab_compgen_dirs() { 37 | compgen -d -- $1 # recurse into subdirs 38 | } 39 | 40 | # $1=COMP_WORDS[1] 41 | _shtab_replace_nonword() { 42 | echo "${1//[^[:word:]]/_}" 43 | } 44 | 45 | # set default values (called for the initial parser & any subparsers) 46 | _set_parser_defaults() { 47 | local subparsers_var="${prefix}_subparsers[@]" 48 | sub_parsers=${!subparsers_var} 49 | 50 | local current_option_strings_var="${prefix}_option_strings[@]" 51 | current_option_strings=${!current_option_strings_var} 52 | 53 | completed_positional_actions=0 54 | 55 | _set_new_action "pos_${completed_positional_actions}" true 56 | } 57 | 58 | # $1=action identifier 59 | # $2=positional action (bool) 60 | # set all identifiers for an action's parameters 61 | _set_new_action() { 62 | current_action="${prefix}_$(_shtab_replace_nonword $1)" 63 | 64 | local current_action_compgen_var=${current_action}_COMPGEN 65 | current_action_compgen="${!current_action_compgen_var}" 66 | 67 | local current_action_choices_var="${current_action}_choices" 68 | current_action_choices="${!current_action_choices_var}" 69 | 70 | local current_action_nargs_var="${current_action}_nargs" 71 | if [ -n "${!current_action_nargs_var}" ]; then 72 | current_action_nargs="${!current_action_nargs_var}" 73 | else 74 | current_action_nargs=1 75 | fi 76 | 77 | current_action_args_start_index=$(( $word_index + 1 )) 78 | 79 | current_action_is_positional=$2 80 | } 81 | 82 | # Notes: 83 | # `COMPREPLY`: what will be rendered after completion is triggered 84 | # `completing_word`: currently typed word to generate completions for 85 | # `${!var}`: evaluates the content of `var` and expand its content as a variable 86 | # hello="world" 87 | # x="hello" 88 | # ${!x} -> ${hello} -> "world" 89 | _shtab_edm_tool() { 90 | local completing_word="${COMP_WORDS[COMP_CWORD]}" 91 | COMPREPLY=() 92 | 93 | prefix=_shtab_edm_tool 94 | word_index=0 95 | _set_parser_defaults 96 | word_index=1 97 | 98 | # determine what arguments are appropriate for the current state 99 | # of the arg parser 100 | while [ $word_index -ne $COMP_CWORD ]; do 101 | local this_word="${COMP_WORDS[$word_index]}" 102 | 103 | if [[ -n $sub_parsers && " ${sub_parsers[@]} " =~ " ${this_word} " ]]; then 104 | # valid subcommand: add it to the prefix & reset the current action 105 | prefix="${prefix}_$(_shtab_replace_nonword $this_word)" 106 | _set_parser_defaults 107 | fi 108 | 109 | if [[ " ${current_option_strings[@]} " =~ " ${this_word} " ]]; then 110 | # a new action should be acquired (due to recognised option string or 111 | # no more input expected from current action); 112 | # the next positional action can fill in here 113 | _set_new_action $this_word false 114 | fi 115 | 116 | if [[ "$current_action_nargs" != "*" ]] && \ 117 | [[ "$current_action_nargs" != "+" ]] && \ 118 | [[ "$current_action_nargs" != *"..." ]] && \ 119 | (( $word_index + 1 - $current_action_args_start_index >= \ 120 | $current_action_nargs )); then 121 | $current_action_is_positional && let "completed_positional_actions += 1" 122 | _set_new_action "pos_${completed_positional_actions}" true 123 | fi 124 | 125 | let "word_index+=1" 126 | done 127 | 128 | # Generate the completions 129 | 130 | if [[ "${completing_word}" == -* ]]; then 131 | # optional argument started: use option strings 132 | COMPREPLY=( $(compgen -W "${current_option_strings[*]}" -- "${completing_word}") ) 133 | else 134 | # use choices & compgen 135 | local IFS=$'\n' 136 | COMPREPLY=( $(compgen -W "${current_action_choices}" -- "${completing_word}") \ 137 | $([ -n "${current_action_compgen}" ] \ 138 | && "${current_action_compgen}" "${completing_word}") ) 139 | fi 140 | 141 | return 0 142 | } 143 | 144 | complete -o filenames -F _shtab_edm_tool edm 145 | -------------------------------------------------------------------------------- /dependency_manager/src/edm_tool/edm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | # Copyright Pionix GmbH and Contributors to EVerest 5 | # 6 | """Everest Dependency Manager.""" 7 | import argparse 8 | import logging 9 | import json 10 | from typing import Tuple 11 | from jinja2 import Environment, FileSystemLoader 12 | import yaml 13 | import os 14 | from pathlib import Path, PurePath 15 | import subprocess 16 | import sys 17 | import shutil 18 | import multiprocessing 19 | import requests 20 | import re 21 | import datetime 22 | 23 | from edm_tool import bazel 24 | 25 | 26 | log = logging.getLogger("edm") 27 | edm_config_dir_path = Path("~/.config/everest").expanduser().resolve() 28 | edm_config_path = edm_config_dir_path / "edm.yaml" 29 | metadata_timeout_s = 10 30 | 31 | 32 | class LocalDependencyCheckoutError(Exception): 33 | """Exception thrown when a dependency could not be checked out.""" 34 | 35 | 36 | def install_bash_completion(path=Path("~/.local/share/bash-completion")): 37 | """Install bash completion to a user provided path.""" 38 | source_bash_completion_file_path = Path(__file__).parent / "edm-completion.bash" 39 | target_bash_completion_dir = path.expanduser() 40 | target_bash_completion_dir_file_path = target_bash_completion_dir / "edm.sh" 41 | bash_completion_in_home = Path("~/.bash_completion").expanduser() 42 | if not target_bash_completion_dir.exists(): 43 | target_bash_completion_dir.expanduser().mkdir(parents=True, exist_ok=True) 44 | shutil.copy(source_bash_completion_file_path, target_bash_completion_dir_file_path) 45 | log.debug("Updated edm bash completion file") 46 | 47 | if not bash_completion_in_home.exists(): 48 | with open(bash_completion_in_home, 'w', encoding='utf-8') as bash_completion_dotfile: 49 | bash_completion_dotfile.write("for bash_completion_file in ~/.local/share/bash-completion/* ; do\n" 50 | " [ -f \"$bash_completion_file\" ] && . $bash_completion_file\n" 51 | "done") 52 | log.info(f"Updated \"{bash_completion_in_home}\" to point to edm bash completion " 53 | f"in \"{target_bash_completion_dir}\"") 54 | else: 55 | log.warning(f"\"{bash_completion_in_home}\" exists, could not automatically install bash-completion") 56 | log.info("Please add the following entry to your .bashrc:") 57 | log.info(f". {target_bash_completion_dir}/edm.sh") 58 | 59 | 60 | class Color: 61 | """Represents a subset of terminal color codes for use in log messages.""" 62 | 63 | DEFAULT = "" 64 | CLEAR = "\033[0m" 65 | BLACK = "\033[30m" 66 | GREY = "\033[90m" 67 | WHITE = "\033[37m" 68 | RED = "\033[31m" 69 | GREEN = "\033[32m" 70 | BLUE = "\033[34m" 71 | YELLOW = "\033[33m" 72 | MAGENTA = "\033[35m" 73 | CYAN = "\033[36m" 74 | 75 | @classmethod 76 | def set_none(cls): 77 | """Remove the color codes for no-color mode.""" 78 | Color.DEFAULT = "" 79 | Color.CLEAR = "" 80 | Color.BLACK = "" 81 | Color.GREY = "" 82 | Color.WHITE = "" 83 | Color.RED = "" 84 | Color.GREEN = "" 85 | Color.BLUE = "" 86 | Color.YELLOW = "" 87 | Color.MAGENTA = "" 88 | Color.CYAN = "" 89 | 90 | 91 | class ColorFormatter(logging.Formatter): 92 | """Logging formatter that uses pre-configured colors for different logging levels.""" 93 | 94 | def __init__(self, color=True, formatting_str="[%(name)s]: %(message)s"): 95 | """Initialize the ColorFormatter.""" 96 | super().__init__() 97 | self.color = color 98 | if not color: 99 | Color.set_none() 100 | self.formatting_str = formatting_str 101 | self.colored_formatting_strings = { 102 | logging.DEBUG: self.build_colored_formatting_string(Color.GREY), 103 | logging.INFO: self.build_colored_formatting_string(Color.CLEAR), 104 | logging.WARNING: self.build_colored_formatting_string(Color.YELLOW), 105 | logging.ERROR: self.build_colored_formatting_string(Color.RED), 106 | logging.CRITICAL: self.build_colored_formatting_string(Color.MAGENTA), 107 | } 108 | 109 | def build_colored_formatting_string(self, color: Color) -> str: 110 | """Build a formatting string with the provided color.""" 111 | if self.color: 112 | return f"{color}{self.formatting_str}{Color.CLEAR}" 113 | return f"{self.formatting_str}" 114 | 115 | def format(self, record): 116 | """Format a record with the colored formatter.""" 117 | return logging.Formatter(self.colored_formatting_strings[record.levelno]).format(record) 118 | 119 | 120 | def quote(lst: list) -> list: 121 | """Put quotation marks around every list element, which is assumed to be a str.""" 122 | return [f"\"{element}\"" for element in lst] 123 | 124 | 125 | def prettify(lst: list, indent: int) -> str: 126 | """Construct string from list elements with the given indentation.""" 127 | output = "" 128 | space = " " * indent 129 | for out in lst: 130 | if out and out != "\n": 131 | if len(output) > 0: 132 | output += f"\n{space}{out}" 133 | else: 134 | output += f"{space}{out}" 135 | return output 136 | 137 | 138 | def pretty_print(lst: list, indent: int, log_level: int): 139 | """Debug log every list element with the given indentation.""" 140 | space = " " * indent 141 | for out in lst: 142 | if out and out != "\n": 143 | if log_level == logging.DEBUG: 144 | log.debug(f"{space}{out}") 145 | elif log_level == logging.INFO: 146 | log.info(f"{space}{out}") 147 | elif log_level == logging.WARNING: 148 | log.warning(f"{space}{out}") 149 | elif log_level == logging.ERROR: 150 | log.error(f"{space}{out}") 151 | elif log_level == logging.CRITICAL: 152 | log.critical(f"{space}{out}") 153 | else: 154 | log.info(f"{space}{out}") 155 | 156 | 157 | def pretty_print_process(c: subprocess.CompletedProcess, indent: int, log_level: int): 158 | """Pretty print stdout and stderr of a CompletedProcess object.""" 159 | stdout = c.stdout.decode("utf-8").split("\n") 160 | pretty_print(stdout, indent, log_level) 161 | 162 | stderr = c.stderr.decode("utf-8").split("\n") 163 | pretty_print(stderr, indent, log_level) 164 | 165 | 166 | def pattern_matches(string: str, patterns: list) -> bool: 167 | """Return true if one of the patterns match with the string, false otherwise.""" 168 | matches = False 169 | for pattern in patterns: 170 | if PurePath(string).match(pattern): 171 | log.debug(f"Pattern \"{pattern}\" accepts string \"{string}\"") 172 | matches = True 173 | break 174 | return matches 175 | 176 | 177 | class GitInfo: 178 | """Provide information about git repositories.""" 179 | 180 | @classmethod 181 | def is_repo(cls, path: Path) -> bool: 182 | """Return true if path is a top-level git repo.""" 183 | try: 184 | result = subprocess.run(["git", "-C", path, "rev-parse", "--git-dir"], 185 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 186 | repo_dir = result.stdout.decode("utf-8").replace("\n", "") 187 | if repo_dir == ".git": 188 | return True 189 | except subprocess.CalledProcessError: 190 | return False 191 | return False 192 | 193 | @classmethod 194 | def is_dirty(cls, path: Path) -> bool: 195 | """Use git diff to check if the provided directory has uncommitted changes, ignoring untracked files.""" 196 | try: 197 | subprocess.run(["git", "-C", path, "diff", "--quiet", "--exit-code"], 198 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 199 | subprocess.run(["git", "-C", path, "diff", "--cached", "--quiet", "--exit-code"], 200 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 201 | return False 202 | except subprocess.CalledProcessError: 203 | return True 204 | 205 | @classmethod 206 | def is_detached(cls, path: Path) -> bool: 207 | """Check if the git repo at path is in detached HEAD state.""" 208 | try: 209 | subprocess.run(["git", "-C", path, "symbolic-ref", "-q", "HEAD"], 210 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 211 | return False 212 | except subprocess.CalledProcessError: 213 | return True 214 | 215 | @classmethod 216 | def fetch(cls, path: Path) -> bool: 217 | """ 218 | Return true if git-fetch was successful, false if not. 219 | 220 | TODO: distinguish between error codes? 221 | """ 222 | log.debug(f"\"{path.name}\": fetching information from remote. This might take a few seconds.") 223 | try: 224 | subprocess.run(["git", "-C", path, "fetch"], 225 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 226 | return True 227 | except subprocess.CalledProcessError as result: 228 | log.error(f"\"{path.name}\" Error during git-fetch: {result.returncode}") 229 | return False 230 | 231 | @classmethod 232 | def pull(cls, path: Path) -> bool: 233 | """ 234 | Return true if git-pull was successful, false if not. 235 | 236 | TODO: distinguish between error codes? 237 | """ 238 | log.info(f"\"{path.name}\": pulling from remote. This might take a few seconds.") 239 | try: 240 | subprocess.run(["git", "-C", path, "pull"], 241 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 242 | return True 243 | except subprocess.CalledProcessError as result: 244 | pretty_stderr = prettify(result.stderr.decode("utf-8").split("\n"), 4) 245 | log.error(f"\"{path.name}\" Error during git-pull: {result.returncode}\n{pretty_stderr}") 246 | return False 247 | 248 | @classmethod 249 | def get_behind(cls, path: Path) -> str: 250 | """Return how many commits behind the repo at path is relative to remote.""" 251 | behind = "" 252 | try: 253 | result = subprocess.run(["git", "-C", path, "rev-list", "--count", "HEAD..@{u}"], 254 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 255 | behind = result.stdout.decode("utf-8").replace("\n", "") 256 | except subprocess.CalledProcessError: 257 | return behind 258 | 259 | return behind 260 | 261 | @classmethod 262 | def get_ahead(cls, path: Path) -> str: 263 | """Return how many commits ahead the repo at path is relative to remote.""" 264 | ahead = "" 265 | try: 266 | result = subprocess.run(["git", "-C", path, "rev-list", "--count", "@{u}..HEAD"], 267 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 268 | ahead = result.stdout.decode("utf-8").replace("\n", "") 269 | except subprocess.CalledProcessError: 270 | return ahead 271 | 272 | return ahead 273 | 274 | @classmethod 275 | def get_tag(cls, path: Path) -> str: 276 | """Return the current tag of the repo at path, or an empty str.""" 277 | tag = "" 278 | try: 279 | result = subprocess.run(["git", "-C", path, "describe", "--exact-match", "--tags"], 280 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 281 | tag = result.stdout.decode("utf-8").replace("\n", "") 282 | except subprocess.CalledProcessError: 283 | return tag 284 | 285 | return tag 286 | 287 | @classmethod 288 | def get_branch(cls, path: Path) -> str: 289 | """Return the current branch of the repo at path, or an empty str.""" 290 | branch = "" 291 | try: 292 | result = subprocess.run(["git", "-C", path, "symbolic-ref", "--short", "-q", "HEAD"], 293 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 294 | branch = result.stdout.decode("utf-8").replace("\n", "") 295 | except subprocess.CalledProcessError: 296 | return branch 297 | 298 | return branch 299 | 300 | @classmethod 301 | def get_remote_branch(cls, path: Path) -> str: 302 | """Return the remote of the current branch of the repo at path, or an empty str.""" 303 | remote_branch = "" 304 | try: 305 | result = subprocess.run(["git", "-C", path, "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"], 306 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 307 | remote_branch = result.stdout.decode("utf-8").replace("\n", "") 308 | except subprocess.CalledProcessError: 309 | return remote_branch 310 | 311 | return remote_branch 312 | 313 | @classmethod 314 | def get_remote_url(cls, path: Path) -> str: 315 | """Return the remote url of the repo at path, or an empty str.""" 316 | remote_url = "" 317 | try: 318 | result = subprocess.run(["git", "-C", path, "config", "--get", "remote.origin.url"], 319 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 320 | remote_url = result.stdout.decode("utf-8").replace("\n", "") 321 | except subprocess.CalledProcessError: 322 | return remote_url 323 | 324 | return remote_url 325 | 326 | @classmethod 327 | def get_remote_tags(cls, remote_url: str) -> list: 328 | """Return the remote tags of the repo at path, or an empty list.""" 329 | remote_tags = [] 330 | try: 331 | result = subprocess.run(["git", "-c", "versionsort.suffix=-", "ls-remote", "--tags", "--sort=-v:refname", "--refs", "--quiet", remote_url], 332 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 333 | result_list = result.stdout.decode("utf-8").split("\n") 334 | for entry in result_list: 335 | ref_and_tag = entry.split("\t") 336 | if len(ref_and_tag) > 1: 337 | remote_tags.append(ref_and_tag[1].replace("refs/tags/", "")) 338 | except subprocess.CalledProcessError: 339 | return remote_tags 340 | 341 | return remote_tags 342 | 343 | @classmethod 344 | def get_remote_branches(cls, remote_url: str) -> list: 345 | """Return the remote branches of the repo at path, or an empty list.""" 346 | remote_branches = [] 347 | try: 348 | result = subprocess.run(["git", "ls-remote", "--heads", "--quiet", remote_url], 349 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 350 | result_list = result.stdout.decode("utf-8").split("\n") 351 | for entry in result_list: 352 | ref_and_tag = entry.split("\t") 353 | if len(ref_and_tag) > 1: 354 | remote_branches.append(ref_and_tag[1].replace("refs/heads/", "")) 355 | except subprocess.CalledProcessError: 356 | return remote_branches 357 | 358 | return remote_branches 359 | 360 | @classmethod 361 | def get_current_rev(cls, path: Path) -> str: 362 | """Return the currently checked out ref of the repo at path, or an empty str.""" 363 | rev = "" 364 | try: 365 | result = subprocess.run(["git", "-C", path, "rev-parse", "HEAD"], 366 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 367 | rev = result.stdout.decode("utf-8").replace("\n", "") 368 | except subprocess.CalledProcessError: 369 | return rev 370 | 371 | return rev 372 | 373 | @classmethod 374 | def get_current_short_rev(cls, path: Path) -> str: 375 | """Return the currently checked out short ref of the repo at path, or an empty str.""" 376 | rev = "" 377 | try: 378 | result = subprocess.run(["git", "-C", path, "rev-parse", "--short", "HEAD"], 379 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 380 | rev = result.stdout.decode("utf-8").replace("\n", "") 381 | except subprocess.CalledProcessError: 382 | return rev 383 | 384 | return rev 385 | 386 | @classmethod 387 | def is_tag(cls, remote: str, tag: str) -> bool: 388 | """Return True if the given tag can be found on the given remote.""" 389 | try: 390 | subprocess.run(["git", "ls-remote", "--exit-code", remote, f"refs/tags/{tag}"], 391 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 392 | return True 393 | except subprocess.CalledProcessError as called_process_error: 394 | if called_process_error.returncode == 2: 395 | return False 396 | return True 397 | 398 | @classmethod 399 | def get_rev(cls, remote: str, branch: str) -> str: 400 | """Return the rev of the given branch on the given remote or the branch name on error.""" 401 | try: 402 | result = subprocess.run(["git", "ls-remote", "--exit-code", remote, branch], 403 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 404 | rev = result.stdout.decode("utf-8").replace("\n", "") 405 | return rev.split()[0] 406 | except subprocess.CalledProcessError: 407 | return branch 408 | 409 | @classmethod 410 | def get_git_repo_info(cls, repo_path: Path, fetch=False) -> dict: 411 | """ 412 | Return useful information about a repository a the given path. 413 | 414 | Returns a default dictionary if the path is no git repo 415 | """ 416 | repo_info = { 417 | 'is_repo': False, 418 | 'fetch_worked': None, 419 | 'remote_branch': None, 420 | 'behind': None, 421 | 'ahead': None, 422 | 'tag': None, 423 | 'branch': None, 424 | 'dirty': None, 425 | 'detached': None, 426 | 'rev': None, 427 | 'short_rev': None, 428 | 'url': None, 429 | } 430 | if GitInfo.is_repo(repo_path): 431 | repo_info["is_repo"] = True 432 | if fetch: 433 | repo_info["fetch_worked"] = GitInfo.fetch(repo_path) 434 | repo_info["remote_branch"] = GitInfo.get_remote_branch(repo_path) 435 | repo_info["behind"] = GitInfo.get_behind(repo_path) 436 | repo_info["ahead"] = GitInfo.get_ahead(repo_path) 437 | repo_info["tag"] = GitInfo.get_tag(repo_path) 438 | repo_info["branch"] = GitInfo.get_branch(repo_path) 439 | repo_info["dirty"] = GitInfo.is_dirty(repo_path) 440 | repo_info["detached"] = GitInfo.is_detached(repo_path) 441 | repo_info["rev"] = GitInfo.get_current_rev(repo_path) 442 | repo_info["short_rev"] = GitInfo.get_current_short_rev(repo_path) 443 | repo_info["url"] = GitInfo.get_remote_url(repo_path) 444 | return repo_info 445 | 446 | @classmethod 447 | def get_git_info(cls, path: Path, fetch=False) -> dict: 448 | """ 449 | Return useful information about a repository a the given path. 450 | 451 | TODO: return type should be a well defined object 452 | Returns an empty dictionary if the path is no git repo 453 | """ 454 | git_info = {} 455 | subdirs = list(path.glob("*/")) 456 | for subdir in subdirs: 457 | subdir_path = Path(subdir) 458 | repo_info = GitInfo.get_git_repo_info(subdir_path, fetch) 459 | 460 | git_info[subdir] = repo_info 461 | return git_info 462 | 463 | @classmethod 464 | def pull_all(cls, path: Path, repos=None) -> dict: 465 | """Pull all repositories in the given path, or a specific list of repos.""" 466 | git_info = {} 467 | subdirs = list(path.glob("*/")) 468 | for subdir in subdirs: 469 | subdir_path = Path(subdir) 470 | if repos is not None and len(repos) > 0 and subdir_path.name not in repos: 471 | log.debug(f"Skipping {subdir_path.name} because it is not in the list of provided repos.") 472 | continue 473 | pull_info = {'is_repo': False} 474 | if GitInfo.is_repo(subdir_path): 475 | pull_info["is_repo"] = True 476 | pull_info["pull_worked"] = GitInfo.pull(subdir_path) 477 | 478 | git_info[subdir] = pull_info 479 | return git_info 480 | 481 | @classmethod 482 | def checkout_rev(cls, checkout_dir: Path, rev: str): 483 | """Check out the given rev in the given checkout_dir""" 484 | try: 485 | result = subprocess.run(["git", "-C", checkout_dir, "checkout", rev], 486 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 487 | pretty_print_process(result, 4, logging.DEBUG) 488 | except subprocess.CalledProcessError as result: 489 | pretty_print_process(result, 4, logging.DEBUG) 490 | 491 | 492 | class EDM: 493 | """Provide dependecy management functionality.""" 494 | 495 | @classmethod 496 | def print_git_info(cls, git_info): 497 | dirty_count = 0 498 | repo_count = 0 499 | for path, info in git_info.items(): 500 | if not info["is_repo"]: 501 | log.debug(f"\"{path.name}\" is not a git repository.") 502 | continue 503 | repo_count += 1 504 | tag_or_branch = "" 505 | if info["tag"]: 506 | tag_or_branch += f" @ tag: {info['tag']}" 507 | if info["branch"]: 508 | tag_or_branch += f" @ branch: {info['branch']}" 509 | 510 | remote_info = "" 511 | if info["detached"]: 512 | remote_info = f" [{Color.YELLOW}detached HEAD @ {info['rev']}{Color.CLEAR}]" 513 | else: 514 | if "branch" in info and "remote_branch" in info: 515 | remote_info = (f" [remote: {Color.RED}{info['remote_branch']}{Color.CLEAR}]") 516 | behind_ahead = "" 517 | if "behind" in info and info["behind"] and info["behind"] != "0": 518 | behind_ahead += f"behind {Color.RED}{info['behind']}{Color.CLEAR}" 519 | if "ahead" in info and info["ahead"] and info["ahead"] != "0": 520 | if behind_ahead: 521 | behind_ahead += " " 522 | behind_ahead += f"ahead {Color.GREEN}{info['ahead']}{Color.CLEAR}" 523 | if behind_ahead: 524 | remote_info += f" [{behind_ahead}]" 525 | dirty = f"[{Color.GREEN}clean{Color.CLEAR}]" 526 | if info["dirty"]: 527 | dirty = f"[{Color.RED}dirty{Color.CLEAR}]" 528 | dirty_count += 1 529 | 530 | log.info(f"\"{Color.GREEN}{path.name}{Color.CLEAR}\"{tag_or_branch}{remote_info} {dirty}") 531 | 532 | if dirty_count > 0: 533 | log.info(f"{dirty_count}/{repo_count} repositories are dirty.") 534 | 535 | @classmethod 536 | def show_git_info(cls, working_dir: Path, workspace: str, git_fetch: bool): 537 | """Log information about git repositories.""" 538 | git_info_working_dir = working_dir 539 | if workspace: 540 | git_info_working_dir = Path(workspace).expanduser().resolve() 541 | log.info("Workspace provided, executing git-info in workspace") 542 | log.info(f"Git info for \"{git_info_working_dir}\":") 543 | if git_fetch: 544 | log.info("Using git-fetch to update remote information. This might take a few seconds.") 545 | git_info = GitInfo.get_git_info(git_info_working_dir, git_fetch) 546 | EDM.print_git_info(git_info) 547 | 548 | @classmethod 549 | def setup_workspace_from_config(cls, workspace: str, config: str, update: bool, create_vscode_workspace: bool): 550 | """Setup a workspace from the provided config, update an existing workspace if specified.""" 551 | workspace_dir = Path(workspace).expanduser().resolve() 552 | 553 | config_path = Path(config).expanduser().resolve() 554 | if config_path.exists(): 555 | log.info(f"Using config \"{config_path}\"") 556 | else: 557 | log.error(f"Config file \"{config_path}\" does not exists, stopping.") 558 | sys.exit(1) 559 | config = parse_config(config_path) 560 | try: 561 | workspace_checkout = setup_workspace(workspace_dir, config, update) 562 | except LocalDependencyCheckoutError: 563 | log.error("Could not setup workspace. Stopping.") 564 | sys.exit(1) 565 | # copy config into workspace 566 | try: 567 | config_destination_path = workspace_dir / "workspace-config.yaml" 568 | shutil.copyfile(config_path, config_destination_path) 569 | log.info(f"Copied config into \"{config_destination_path}\"") 570 | except shutil.SameFileError: 571 | log.info(f"Did not copy workspace config because source and destination are the same \"{config_path}\"") 572 | 573 | if create_vscode_workspace: 574 | create_vscode_workspace_file(workspace_dir, workspace_checkout) 575 | 576 | @classmethod 577 | def config_from_dependencies(cls, dependencies: dict, external_in_config: bool, include_remotes: list) -> dict: 578 | """Assemble a config from the given dependencies.""" 579 | new_config = {} 580 | if external_in_config: 581 | new_config = {**new_config, **dependencies} 582 | log.debug("Including external dependencies in generated config.") 583 | else: 584 | for name, entry in dependencies.items(): 585 | if pattern_matches(entry["git"], include_remotes): 586 | log.debug(f"Adding \"{name}\" to config. ") 587 | new_config[name] = entry 588 | else: 589 | log.debug(f"Did not add \"{name}\" to generated config because it is an external dependency.") 590 | 591 | return new_config 592 | 593 | @classmethod 594 | def create_config(cls, working_dir: Path, new_config: dict, external_in_config: bool, include_remotes: list) -> dict: 595 | """Scan all first-level subdirectories in working_dir for git repositories that might have been missed.""" 596 | for subdir in list(working_dir.glob("*/")): 597 | subdir_path = Path(subdir) 598 | name = subdir_path.name 599 | if name in new_config: 600 | log.debug(f"Skipping {name} which already is in config.") 601 | continue 602 | # FIXME: change this when we support alias info for a repo. 603 | # then this name might not be not equal to the dep name anymore 604 | if not subdir_path.is_dir(): 605 | log.debug(f"Skipping {name} because it is not a directory.") 606 | continue 607 | log.debug(f"Checking {subdir_path}: {subdir_path.name}") 608 | 609 | entry = {} 610 | 611 | remote = GitInfo.get_remote_url(subdir_path) 612 | if not remote: 613 | log.warning(f"Skipping {name} because remote could not be determined.") 614 | log.debug(f" remote: {remote}") 615 | if not external_in_config and not pattern_matches(remote, include_remotes): 616 | log.debug(f"Skipping {name} because it is an external dependency.") 617 | continue 618 | entry["git"] = remote 619 | # TODO: check if there already is another config entry with this remote 620 | try: 621 | branch_result = subprocess.run(["git", "-C", subdir_path, "symbolic-ref", "--short", "-q", "HEAD"], 622 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 623 | branch = branch_result.stdout.decode("utf-8").replace("\n", "") 624 | log.debug(f" branch: {branch}") 625 | entry["git_tag"] = branch 626 | except subprocess.CalledProcessError: 627 | try: 628 | tag_result = subprocess.run(["git", "-C", subdir_path, "describe", "--exact-match", "--tags"], 629 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 630 | tag = tag_result.stdout.decode("utf-8").replace("\n", "") 631 | log.debug(f" tag: {tag}") 632 | entry["git_tag"] = tag 633 | except subprocess.CalledProcessError: 634 | log.warning(f"Skipping {name} because no branch or tag could be determined.") 635 | continue 636 | new_config[name] = entry 637 | 638 | return new_config 639 | 640 | @classmethod 641 | def create_snapshot(cls, working_dir: Path, config_path: Path) -> dict: 642 | git_info = GitInfo.get_git_info(working_dir, False) 643 | 644 | config = parse_config(config_path) 645 | for path, info in git_info.items(): 646 | if not info["is_repo"]: 647 | log.debug(f"{path.name} is not a repo, path: {path}") 648 | continue 649 | if path.name not in config: 650 | config[path.name] = {} 651 | config[path.name]["git"] = info["url"] 652 | config[path.name]["git_rev"] = info["rev"] 653 | if info["tag"]: 654 | config[path.name]["git_tag"] = info["tag"] 655 | return config 656 | 657 | @classmethod 658 | def write_config(cls, new_config: dict, out_path: str, silent=False): 659 | """Write the given config to the given path.""" 660 | new_config_path = Path(out_path).expanduser().resolve() 661 | for config_entry_name, _ in new_config.items(): 662 | if not silent: 663 | log.info(f"Adding \"{Color.GREEN}{config_entry_name}{Color.CLEAR}\" to config.") 664 | with open(new_config_path, 'w', encoding='utf-8') as new_config_file: 665 | yaml.dump(new_config, new_config_file) 666 | if not silent: 667 | log.info(f"Successfully saved config \"{new_config_path}\".") 668 | 669 | @classmethod 670 | def pull(cls, working_dir: Path, repos: list): 671 | """Pull all repos in working_dir or a restricted list of repos when provided.""" 672 | pull_info = GitInfo.pull_all(working_dir, repos) 673 | pull_error_count = 0 674 | repo_count = 0 675 | for path, info in pull_info.items(): 676 | if info["is_repo"]: 677 | repo_count += 1 678 | pulled = f"[{Color.GREEN}pulled{Color.CLEAR}]" 679 | if not info["pull_worked"]: 680 | pulled = f"[{Color.RED}error during git-pull{Color.CLEAR}]" 681 | pull_error_count += 1 682 | 683 | log.info(f"\"{Color.GREEN}{path.name}{Color.CLEAR}\"{pulled}") 684 | else: 685 | log.debug(f"\"{path.name}\" is not a git repository.") 686 | if pull_error_count > 0: 687 | log.info(f"{pull_error_count}/{repo_count} repositories could not be pulled.") 688 | 689 | @classmethod 690 | def scan_dependencies(cls, working_dir: Path, include_deps: list, files_to_ignore: set = None) -> Tuple[dict, set]: 691 | """Scan working_dir for dependencies.""" 692 | log.info(f"Scanning \"{working_dir}\" for dependencies.") 693 | dependencies_files = set(list(working_dir.glob("**/dependencies.yaml")) + 694 | list(working_dir.glob("**/dependencies.yml"))) 695 | 696 | if files_to_ignore: 697 | dependencies_files.difference_update(files_to_ignore) 698 | 699 | dependencies = {} 700 | for dependencies_file in dependencies_files: 701 | if dependencies_file.is_file(): 702 | # filter _deps folders 703 | if not include_deps: 704 | relative_path = dependencies_file.relative_to(working_dir).parent.as_posix() 705 | if "_deps/" in relative_path: 706 | log.info( 707 | f"Ignoring dependencies in \"{dependencies_file}\" " 708 | f"because this file is located in a \"_deps\" subdirectory.") 709 | continue 710 | log.info(f"Parsing dependencies file: {dependencies_file}") 711 | with open(dependencies_file, encoding='utf-8') as dep: 712 | try: 713 | dependencies_yaml = yaml.safe_load(dep) 714 | if dependencies_yaml is not None: 715 | dependencies = {**dependencies, **dependencies_yaml} 716 | except yaml.YAMLError as e: 717 | log.error(f"Error parsing yaml of \"{dependencies_file}\": {e}") 718 | 719 | return (dependencies, dependencies_files) 720 | 721 | @classmethod 722 | def parse_workspace_files(cls, workspace_files: list) -> dict: 723 | """Parse the given list of workspace_files and return a workspace dict when exactly one workspace file is in the list""" 724 | workspace = {} 725 | if len(workspace_files) == 1: 726 | workspace_file = Path(workspace_files[0]).expanduser().resolve() 727 | if workspace_file.is_file(): 728 | log.info(f"Using workspace file: {workspace_file}") 729 | with open(workspace_file, encoding='utf-8') as wsp: 730 | try: 731 | workspace_yaml = yaml.safe_load(wsp) 732 | if workspace_yaml is not None: 733 | workspace = {**workspace, **workspace_yaml} 734 | except yaml.YAMLError as e: 735 | log.error(f"Error parsing yaml of {workspace_file}: {e}") 736 | return workspace 737 | 738 | @classmethod 739 | def parse_workspace_directory(cls, workspace_dir: Path) -> dict: 740 | """Parse the given workspace_dir for possible local dependencies""" 741 | workspace = {} 742 | workspace["local_dependencies"] = {} 743 | workspace["workspace"] = workspace_dir.as_posix() 744 | for entry in workspace_dir.iterdir(): 745 | if not entry.is_dir(): 746 | pass 747 | workspace["local_dependencies"][entry.name] = {} 748 | workspace["local_dependencies"][entry.name] = {"git_tag": GitInfo.get_branch(entry)} 749 | 750 | return workspace 751 | 752 | @classmethod 753 | def checkout_local_dependencies(cls, workspace: dict, workspace_arg: str, dependencies: dict) -> list: 754 | """Checkout local dependencies in the workspace.""" 755 | checkout = [] 756 | if "local_dependencies" in workspace: 757 | workspace_dir = None 758 | # workspace given by command line always takes precedence 759 | if workspace_arg is not None: 760 | workspace_dir = Path(workspace_arg).expanduser().resolve() 761 | log.info(f"Using workspace directory \"{workspace_dir}\" from command line.") 762 | elif "workspace" in workspace: 763 | workspace_dir = Path(workspace["workspace"]).expanduser().resolve() 764 | else: 765 | print("Cannot checkout requested dependencies without a workspace directory, stopping.") 766 | sys.exit(1) 767 | for name, entry in workspace["local_dependencies"].items(): 768 | if name not in dependencies: 769 | log.debug(f"{name}: listed in workspace, but not in dependencies. Ignoring.") 770 | continue 771 | checkout_dir = workspace_dir / name 772 | git_tag = None 773 | if "git_tag" in dependencies[name]: 774 | git_tag = dependencies[name]["git_tag"] 775 | if entry is not None and "git_tag" in entry: 776 | git_tag = entry["git_tag"] 777 | checkout.append(checkout_local_dependency( 778 | name, dependencies[name]["git"], git_tag, None, checkout_dir, True)) 779 | 780 | return checkout 781 | 782 | @classmethod 783 | def write_cmake(cls, workspace: dict, checkout: list, dependencies: dict, out_file: Path): 784 | """Generate a CMake file containing the dependencies in the given out_file.""" 785 | templates_path = Path(__file__).parent / "templates" 786 | env = Environment( 787 | loader=FileSystemLoader(templates_path), 788 | trim_blocks=True, 789 | ) 790 | env.filters['quote'] = quote 791 | 792 | cpm_template = env.get_template("cpm.jinja") 793 | render = cpm_template.render({ 794 | "dependencies": dependencies, 795 | "checkout": checkout, 796 | "workspace": workspace}) 797 | 798 | with open(out_file, 'w', encoding='utf-8') as out: 799 | log.info(f"Saving dependencies in: {out_file}") 800 | out.write(render) 801 | 802 | @classmethod 803 | def check_github_key(cls) -> bool: 804 | """Checks if a public key is stored at github.""" 805 | valid = False 806 | try: 807 | subprocess.run(["ssh", "-T", "git@github.com"], 808 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 809 | except subprocess.CalledProcessError as process_error: 810 | if process_error.returncode == 1: 811 | valid = True 812 | 813 | return valid 814 | 815 | @classmethod 816 | def write_config_from_scanned_dependencies(cls, working_dir: Path, include_deps: list, 817 | external_in_config: bool, include_remotes: list, config_path: Path): 818 | """Writes a config file from the scanned dependencies in working_dir""" 819 | (dependencies, _) = EDM.scan_dependencies(working_dir, include_deps) 820 | new_config = EDM.config_from_dependencies(dependencies, external_in_config, include_remotes) 821 | new_config = EDM.create_config(working_dir, new_config, external_in_config, include_remotes) 822 | EDM.write_config(new_config, config_path) 823 | 824 | 825 | def checkout_local_dependency(name: str, git: str, git_tag: str, git_rev: str, checkout_dir: Path, keep_branch=False) -> dict: 826 | """ 827 | Clone local dependency into checkout_dir. 828 | 829 | If the directory already exists only switch branches if the git repo is not dirty or keep_branch is False 830 | """ 831 | def clone_dependency_repo(git: str, git_tag: str, checkout_dir: Path) -> None: 832 | """Clone given git repository at the given git_tag into checkout_dir.""" 833 | git_clone_args = [git, checkout_dir] 834 | if git_tag: 835 | git_clone_args = ["--branch", git_tag, git, checkout_dir] 836 | else: 837 | log.debug(" No git-tag specified, cloning default branch.") 838 | git_clone_cmd = ["git", "clone"] + git_clone_args 839 | 840 | try: 841 | result = subprocess.run(git_clone_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) 842 | pretty_print_process(result, 4, logging.DEBUG) 843 | except subprocess.CalledProcessError as e: 844 | error_message = " Error while cloning git repository during local dependency checkout:" 845 | log.warning(error_message) 846 | pretty_print(e.stderr.decode().strip().split("\n"), 6, logging.WARNING) 847 | raise LocalDependencyCheckoutError(error_message) from e 848 | 849 | log.info(f"Setting up dependency \"{Color.GREEN}{name}{Color.CLEAR}\" in workspace") 850 | log.debug(f" git-remote: \"{git}\"") 851 | log.debug(f" git-tag: \"{git_tag}\"") 852 | log.debug(f" git-rev: \"{git_rev}\"") 853 | log.debug(f" local directory: \"{checkout_dir}\"") 854 | git_tag_is_git_rev = False 855 | if checkout_dir.exists(): 856 | log.debug(f" ... the directory for dependency \"{name}\" already exists at \"{checkout_dir}\".") 857 | # check if git is dirty 858 | if GitInfo.is_dirty(checkout_dir): 859 | log.debug(" Repo is dirty, nothing will be done to this repo.") 860 | elif keep_branch: 861 | log.debug(" Keeping currently checked out branch.") 862 | else: 863 | # if the repo is clean we can safely switch branches 864 | if git_tag is not None: 865 | log.debug(f" Repo is not dirty, checking out requested git tag \"{git_tag}\"") 866 | GitInfo.checkout_rev(checkout_dir, git_tag) 867 | else: 868 | try: 869 | clone_dependency_repo(git, git_tag, checkout_dir) 870 | except LocalDependencyCheckoutError as e: 871 | # maybe the given tag was actually a rev? 872 | if not git_rev: 873 | # assume git_tag is git_rev 874 | log.info(f" No git_rev given, but git_tag \"{git_tag}\" might be a git_rev, trying to checkout this rev.") 875 | git_rev = git_tag 876 | git_tag = None 877 | clone_dependency_repo(git, git_tag, checkout_dir) 878 | git_tag_is_git_rev = True 879 | elif git_rev and git_tag: 880 | log.info(f" Both git_rev and git_tag given, but git_tag \"{git_tag}\" might be a git_rev," 881 | f" trying to checkout git_rev \"{git_rev}\" instead.") 882 | git_tag = None 883 | clone_dependency_repo(git, git_tag, checkout_dir) 884 | git_tag_is_git_rev = True 885 | else: 886 | raise e 887 | 888 | if git_rev is not None: 889 | log.debug(f" Checking out requested git rev \"{git_rev}\"") 890 | GitInfo.checkout_rev(checkout_dir, git_rev) 891 | if git_tag_is_git_rev: 892 | log.info(f" Successfully checked out git_rev \"{git_rev}\" of dependency \"{Color.GREEN}{name}{Color.CLEAR}\"") 893 | 894 | return {"name": name, "path": checkout_dir, "git_tag": git_tag} 895 | 896 | 897 | def parse_config(path: Path) -> dict: 898 | """Parse a config file in yaml format at the given path.""" 899 | if path.is_file(): 900 | with open(path, encoding='utf-8') as config_file: 901 | try: 902 | config_yaml = yaml.safe_load(config_file) 903 | if config_yaml is not None: 904 | return config_yaml 905 | except yaml.YAMLError as e: 906 | print(f"Error parsing yaml of {config_file}: {e}") 907 | return {} 908 | 909 | 910 | def setup_workspace(workspace_path: Path, config: dict, update=False) -> dict: 911 | """Setup a workspace at the given workspace_path using the given config.""" 912 | log.info(f"Setting up workspace \"{workspace_path}\"") 913 | workspace_checkout = [] 914 | for name, entry in config.items(): 915 | checkout_dir = workspace_path / name 916 | git_tag = None 917 | git_rev = None 918 | if entry is not None: 919 | if "git_tag" in entry: 920 | git_tag = entry["git_tag"] 921 | if "git_rev" in entry: 922 | git_rev = entry["git_rev"] 923 | workspace_checkout.append(checkout_local_dependency(name, entry["git"], git_tag, git_rev, checkout_dir)) 924 | 925 | log.info("Done.") 926 | return workspace_checkout 927 | 928 | 929 | def create_vscode_workspace_file(workspace_path: Path, workspace_checkout: dict): 930 | """Create a VS Code compatible workspace file at the given workspace_path.""" 931 | vscode_workspace_file_path = workspace_path / f"{workspace_path.name}.code-workspace" 932 | 933 | content = {} 934 | if vscode_workspace_file_path.exists(): 935 | log.warning( 936 | f"VS Code workspace file \"{vscode_workspace_file_path}\" exists.") 937 | log.info("Updating VS Code workspace file.") 938 | with open(vscode_workspace_file_path, 'r', encoding='utf-8') as ws_file: 939 | content = json.load(ws_file) 940 | else: 941 | log.info(f"Creating VS Code workspace file at: {vscode_workspace_file_path}") 942 | if "folders" not in content: 943 | content["folders"] = [] 944 | for entry in workspace_checkout: 945 | folder = entry["path"].name 946 | if not any(f["path"] == folder for f in content["folders"]): 947 | log.debug(f"Dependency \"{Color.GREEN}{folder}{Color.GREY}\" added to VS Code workspace file") 948 | content["folders"].append({"path": folder}) 949 | with open(vscode_workspace_file_path, 'w', encoding='utf-8') as ws_file: 950 | json.dump(content, ws_file, indent="\t") 951 | 952 | 953 | def load_edm_config(): 954 | config = None 955 | if edm_config_path.exists(): 956 | # load config if exists 957 | log.debug(f"Loading edm config from {edm_config_path}") 958 | with open(edm_config_path, encoding='utf-8') as edm_config_file: 959 | try: 960 | config = yaml.safe_load(edm_config_file) 961 | except yaml.YAMLError as e: 962 | log.error(f"Error parsing yaml of \"{edm_config_path}\": {e}") 963 | return config 964 | 965 | 966 | def init_handler(args): 967 | """Handler for the edm init subcommand""" 968 | working_dir = Path(args.working_dir).expanduser().resolve() 969 | 970 | if args.workspace: 971 | log.info(f"Using provided workspace path \"{args.workspace}\"") 972 | working_dir = Path(args.workspace) 973 | 974 | config_path = working_dir / "workspace-config.yaml" 975 | 976 | if args.list: 977 | tags = GitInfo.get_remote_tags("https://github.com/EVerest/everest-core.git") 978 | log.info(f"Available everest-core releases: {', '.join(tags)}") 979 | sys.exit(0) 980 | 981 | if args.release: 982 | log.info(f"Checking if requested EVerest release \"{args.release}\" is available...") 983 | elif args.config: 984 | log.info(f"Using supplied config \"{args.config}\"") 985 | main_handler(args) 986 | return 987 | else: 988 | log.info("No release specified, checking for most recent stable version...") 989 | 990 | github_key_available = EDM.check_github_key() 991 | 992 | github_https_pefix = "https://github.com/EVerest/" 993 | github_git_prefix = "git@github.com:EVerest/" 994 | 995 | github_prefix = github_https_pefix 996 | 997 | if github_key_available: 998 | github_prefix = github_git_prefix 999 | 1000 | everest_core = {"name": "everest-core", "repo": github_prefix + "everest-core.git", "release": args.release} 1001 | everest_cmake = {"name": "everest-cmake", "repo": github_prefix + "everest-cmake.git", "release": None} 1002 | everest_dev_environment = {"name": "everest-dev-environment", 1003 | "repo": github_prefix + "everest-dev-environment.git", "release": None} 1004 | everest_utils = {"name": "everest-utils", "repo": github_prefix + "everest-utils.git", "release": None} 1005 | 1006 | for repo in [everest_core, everest_cmake, everest_dev_environment, everest_utils]: 1007 | tags = GitInfo.get_remote_tags(repo["repo"]) 1008 | latest_tag = tags[0] if len(tags) > 0 else "main" 1009 | 1010 | if repo["release"]: 1011 | if repo["release"] in tags: 1012 | latest_tag = repo["release"] 1013 | log.info(f"Requested release is available: {repo['release']}") 1014 | else: 1015 | branches = GitInfo.get_remote_branches(repo["repo"]) 1016 | if repo["release"] in branches: 1017 | latest_tag = repo["release"] 1018 | log.info(f"Requested branch is available: {repo['release']}") 1019 | else: 1020 | log.error(f"Requested release is NOT available: {repo['release']}") 1021 | sys.exit(1) 1022 | 1023 | log.info(f"Using \"{Color.GREEN}{repo['name']}{Color.CLEAR}\" @ {latest_tag}") 1024 | checkout_local_dependency(repo["name"], repo["repo"], latest_tag, None, working_dir / repo["name"], False) 1025 | 1026 | # now we have the basics, get the rest recursively 1027 | iterations = 10 1028 | old_snapshot = {} 1029 | config = {} 1030 | scanned_dependencies_files = set() 1031 | for i in range(iterations): 1032 | if i > 0: 1033 | # only do recursive parsing if explicitly requested 1034 | (dependencies, dependencies_files) = EDM.scan_dependencies( 1035 | working_dir, args.include_deps, scanned_dependencies_files) 1036 | scanned_dependencies_files.update(dependencies_files) 1037 | new_config = EDM.config_from_dependencies(dependencies, args.external_in_config, args.include_remotes) 1038 | new_config = EDM.create_config(working_dir, new_config, args.external_in_config, args.include_remotes) 1039 | # merge config with new_config, overwrite github https prefix with git prefix 1040 | for name, entry in new_config.items(): 1041 | if name not in config: 1042 | if github_key_available and "git" in entry and entry["git"].startswith(github_https_pefix): 1043 | entry["git"] = entry["git"].replace(github_https_pefix, github_git_prefix, 1) 1044 | config[name] = entry 1045 | checkout_dir = working_dir / name 1046 | git_tag = None 1047 | git_rev = None 1048 | if entry is not None: 1049 | if "git_tag" in entry: 1050 | git_tag = entry["git_tag"] 1051 | if "git_rev" in entry: 1052 | git_rev = entry["git_rev"] 1053 | checkout_local_dependency(name, entry["git"], git_tag, git_rev, checkout_dir) 1054 | EDM.write_config(config, config_path, True) 1055 | # EDM.setup_workspace_from_config(working_dir, config_path, False, False) 1056 | 1057 | snapshot = EDM.create_snapshot(working_dir, config_path) 1058 | if snapshot == old_snapshot: 1059 | log.info(f'Stopping recursive workspace setup early after {i+1} loops.') 1060 | break 1061 | old_snapshot = snapshot 1062 | EDM.show_git_info(working_dir, None, False) 1063 | 1064 | # write config file 1065 | edm_config_dir_path.mkdir(parents=True, exist_ok=True) 1066 | config = load_edm_config() 1067 | 1068 | if not config: 1069 | log.info("Config is None, creating new config") 1070 | config = {} 1071 | config["edm"] = {} # for general workspace independent config settings 1072 | config["workspaces"] = {} 1073 | 1074 | with open(edm_config_path, 'w', encoding='utf-8') as edm_config_file: 1075 | workspace_name = working_dir.name 1076 | config["edm"]["active_workspace"] = workspace_name 1077 | config["workspaces"][workspace_name] = {} 1078 | config["workspaces"][workspace_name]["path"] = working_dir.as_posix() 1079 | yaml.dump(config, edm_config_file) 1080 | log.info(f"Successfully saved edm config \"{edm_config_path}\".") 1081 | 1082 | 1083 | def list_handler(_args): 1084 | """Handler for the edm list subcommand""" 1085 | log.info("Listing workspaces") 1086 | config = load_edm_config() 1087 | 1088 | if not config: 1089 | log.info("No edm config found") 1090 | sys.exit(0) 1091 | 1092 | for workspace_name, workspace_config in config["workspaces"].items(): 1093 | log.info(f" {workspace_name} ({workspace_config['path']})") 1094 | 1095 | 1096 | def rm_handler(args): 1097 | """Handler for the edm rm subcommand""" 1098 | config = load_edm_config() 1099 | 1100 | if not config: 1101 | log.error("No edm config found") 1102 | sys.exit(0) 1103 | 1104 | workspace_name = args.workspace_name[0] 1105 | 1106 | if workspace_name in config["workspaces"]: 1107 | log.info(f"Removing workspace {workspace_name} from config.") 1108 | del config["workspaces"][workspace_name] 1109 | 1110 | # write config 1111 | with open(edm_config_path, 'w', encoding='utf-8') as edm_config_file: 1112 | config["edm"]["active_workspace"] = None 1113 | yaml.dump(config, edm_config_file) 1114 | log.info(f"Successfully saved edm config \"{edm_config_path}\".") 1115 | 1116 | 1117 | def git_info_handler(args): 1118 | """Handler for the edm git info subcommand""" 1119 | working_dir = Path(args.working_dir).expanduser().resolve() 1120 | 1121 | if not args.repo_name: 1122 | log.info("No repo name specified, listing git info for every repo in the current workspace") 1123 | EDM.show_git_info(working_dir, None, True) 1124 | else: 1125 | log.info(f"Only listing git info for {', '.join(args.repo_name)}") 1126 | git_info = {} 1127 | for repo_name in args.repo_name: 1128 | repo_path = working_dir / repo_name 1129 | repo_info = GitInfo.get_git_repo_info(repo_path, True) 1130 | git_info[repo_path] = repo_info 1131 | EDM.print_git_info(git_info) 1132 | sys.exit(0) 1133 | 1134 | 1135 | def git_pull_handler(args): 1136 | """Handler for the edm git pull subcommand""" 1137 | working_dir = Path(args.working_dir).expanduser().resolve() 1138 | 1139 | if not args.repo_name: 1140 | log.info("No repo name specified, pulling all repos in the current workspace") 1141 | EDM.pull(working_dir, repos=None) 1142 | else: 1143 | EDM.pull(working_dir, repos=args.repo_name) 1144 | 1145 | 1146 | def snapshot_handler(args): 1147 | """Handler for the edm snapshot subcommand""" 1148 | working_dir = Path(args.working_dir).expanduser().resolve() 1149 | 1150 | config_path = working_dir / "workspace-config.yaml" 1151 | 1152 | if not config_path.exists(): 1153 | log.info(f'Workspace config does not exist, creating a new one at: {config_path}') 1154 | EDM.write_config_from_scanned_dependencies( 1155 | working_dir, args.include_deps, args.external_in_config, args.include_remotes, config_path) 1156 | 1157 | log.info(f"Creating snapshot: {args.snapshot_name}") 1158 | 1159 | iterations = 1 1160 | if args.recursive: 1161 | iterations = args.recursive 1162 | old_snapshot = {} 1163 | for i in range(iterations): 1164 | if i > 0: 1165 | # only do recursive parsing if explicitly requested 1166 | EDM.write_config_from_scanned_dependencies( 1167 | working_dir, args.include_deps, args.external_in_config, args.include_remotes, config_path) 1168 | EDM.setup_workspace_from_config(working_dir, config_path, False, False) 1169 | snapshot = EDM.create_snapshot(working_dir, config_path) 1170 | EDM.write_config(snapshot, args.snapshot_name) 1171 | if snapshot == old_snapshot: 1172 | log.info(f'Stopping recursive snapshot generation early after {i+1} loops.') 1173 | break 1174 | old_snapshot = snapshot 1175 | sys.exit(0) 1176 | 1177 | 1178 | def check_non_local_dependecy(dependency_item): 1179 | name, dependency = dependency_item 1180 | 1181 | if "git" not in dependency or dependency["git"] is None: 1182 | log.warning(f'Dependency "{name}": git is not set') 1183 | return dependency_item 1184 | 1185 | if "git_tag" not in dependency or dependency["git_tag"] is None: 1186 | log.warning(f'Dependency "{name}": git_tag is not set') 1187 | return dependency_item 1188 | 1189 | known_branches = ["main", "master"] 1190 | 1191 | log.debug(f'Dependency "{name}": determining if "{dependency["git_tag"]}" is a tag') 1192 | if dependency["git_tag"] in known_branches or not GitInfo.is_tag(dependency["git"], dependency["git_tag"]): 1193 | log.info(f'Dependency "{name}": "{dependency["git_tag"]}" is not a tag, requesting remote rev') 1194 | dependency["git_tag"] = GitInfo.get_rev(dependency["git"], dependency["git_tag"]) 1195 | else: 1196 | log.info(f'Dependency "{name}": "{dependency["git_tag"]}" is a tag') 1197 | 1198 | return dependency_item 1199 | 1200 | 1201 | def check_origin_of_dependencies(dependencies, checkout): 1202 | non_local_dependencies = {} 1203 | 1204 | # handle locally available dependencies and filter out non-local ones 1205 | for name, dependency in dependencies.items(): 1206 | if "git" not in dependency: 1207 | log.info(f'Dependency "{name}": Using package instead of git url') 1208 | continue 1209 | shortcut = False 1210 | for checkout_dep in checkout: 1211 | if checkout_dep["name"] == name: 1212 | shortcut = True 1213 | if shortcut: 1214 | log.info(f'Dependency "{name}": available locally') 1215 | continue 1216 | 1217 | # fall-through 1218 | non_local_dependencies[name] = dependency 1219 | 1220 | with multiprocessing.Pool() as pool: 1221 | modified_dependencies = pool.map(check_non_local_dependecy, non_local_dependencies.items()) 1222 | for name, dependency in modified_dependencies: 1223 | dependencies[name] = dependency 1224 | 1225 | 1226 | def modify_dependencies_yaml(dependencies, modified_dependencies_yaml): 1227 | for name, entry in modified_dependencies_yaml.items(): 1228 | if name not in dependencies: 1229 | if "add" in entry: 1230 | dependencies[name] = {} 1231 | else: 1232 | continue 1233 | dependency = dependencies[name] 1234 | if not entry: 1235 | continue 1236 | 1237 | if "rename" in entry: 1238 | new_name = entry["rename"] 1239 | log.info(f'Dependency "{name}": Renaming to "{new_name}"') 1240 | dependencies[new_name] = dependencies.pop(name) 1241 | name = new_name 1242 | dependency = dependencies[name] 1243 | 1244 | for modification_name, modification_entry in entry.items(): 1245 | if modification_name in dependency: 1246 | if modification_entry: 1247 | log.info(f'Dependency "{name}": Changing "{modification_name}" to "{modification_entry}"') 1248 | dependency[modification_name] = modification_entry 1249 | else: 1250 | log.info(f'Dependency "{name}": Deleting "{modification_name}"') 1251 | del dependency[modification_name] 1252 | else: 1253 | if modification_entry: 1254 | log.info(f'Dependency "{name}": Adding "{modification_name}" containing "{modification_entry}"') 1255 | dependency[modification_name] = modification_entry 1256 | 1257 | 1258 | def modify_dependencies(dependencies, modify_dependencies_file): 1259 | log.info(f'Modifying dependencies with file: {modify_dependencies_file}') 1260 | with open(modify_dependencies_file, encoding='utf-8') as modified_dependencies_file: 1261 | try: 1262 | modified_dependencies_yaml = yaml.safe_load(modified_dependencies_file) 1263 | if modified_dependencies_yaml: 1264 | modify_dependencies_yaml(dependencies, modified_dependencies_yaml) 1265 | except yaml.YAMLError as e: 1266 | log.error(f"Error parsing yaml of {modify_dependencies_file}: {e}") 1267 | 1268 | def populate_component(metadata_yaml, key, version): 1269 | meta = {"description": "", "license": "unknown", "name": key} 1270 | if key in metadata_yaml: 1271 | meta_entry = metadata_yaml[key] 1272 | meta['description'] = meta_entry.get('description', '') 1273 | meta['license'] = meta_entry.get('license', 'unknown') 1274 | meta["name"] = meta_entry.get("name", key) 1275 | component = {'name': meta["name"], 'version': version, 1276 | 'description': meta['description'], 'license': meta['license']} 1277 | return component 1278 | 1279 | 1280 | def release_handler(args): 1281 | """Handler for the edm release subcommand""" 1282 | everest_core_path = Path(args.everest_core_dir) 1283 | build_path = Path(args.build_dir) 1284 | release_path = Path(args.out) 1285 | 1286 | metadata_yaml = {} 1287 | metadata_file = os.environ.get('EVEREST_METADATA_FILE', None) 1288 | metadata_url = "https://raw.githubusercontent.com/EVerest/everest-dev-environment/main/everest-metadata.yaml" 1289 | 1290 | if not metadata_file: 1291 | metadata_path = build_path / "everest-metadata.yaml" 1292 | if not metadata_path.exists(): 1293 | log.info("No metadata.yaml provided, downloading...") 1294 | try: 1295 | request = requests.get(metadata_url, allow_redirects=True, timeout=metadata_timeout_s) 1296 | 1297 | with open(metadata_path, 'wb') as metadata: 1298 | metadata.write(request.content) 1299 | except requests.exceptions.RequestException as e: 1300 | log.info(f"Could not download metadata file, creating release.json without metadata: {e}") 1301 | else: 1302 | metadata_path = Path(metadata_file) 1303 | if metadata_path.exists(): 1304 | log.info(f"Using metadata file: {metadata_path}") 1305 | with open(metadata_path, encoding='utf-8') as metadata_file: 1306 | metadata_yaml_data = yaml.safe_load(metadata_file) 1307 | if metadata_yaml_data: 1308 | metadata_yaml = metadata_yaml_data 1309 | 1310 | cpm_modules_path = build_path / "CPM_modules" 1311 | everest_core_repo_info = GitInfo.get_git_repo_info(everest_core_path) 1312 | everest_core_repo_info_git_tag = "unknown" 1313 | if everest_core_repo_info["rev"]: 1314 | everest_core_repo_info_git_tag = everest_core_repo_info["rev"] 1315 | if everest_core_repo_info["branch"]: 1316 | everest_core_repo_info_git_tag = everest_core_repo_info["branch"] + "@" + everest_core_repo_info["short_rev"] 1317 | if everest_core_repo_info["tag"]: 1318 | everest_core_repo_info_git_tag = everest_core_repo_info["tag"] 1319 | snapshot_yaml = {"everest-core": {"git_tag": everest_core_repo_info_git_tag}} 1320 | for cpm_module_file_name in sorted(os.listdir(cpm_modules_path)): 1321 | cpm_module_file = cpm_modules_path / cpm_module_file_name 1322 | if not cpm_module_file.is_file(): 1323 | continue 1324 | with open(cpm_module_file, encoding='utf-8', mode='r') as cpm_module: 1325 | cpm_add_package_line = None 1326 | for line in cpm_module: 1327 | if line.startswith("CPMAddPackage("): 1328 | cpm_add_package_line = line.strip().replace("CPMAddPackage(\"", "").replace("\")", "")+";" 1329 | break 1330 | name = None 1331 | git_repo = None 1332 | git_tag = None 1333 | source_dir = None 1334 | name_match = re.search("NAME;(.*?);", cpm_add_package_line) 1335 | if name_match: 1336 | name = name_match.group(1) 1337 | git_repo_match = re.search("GIT_REPOSITORY;(.*?);", cpm_add_package_line) 1338 | if git_repo_match: 1339 | git_repo = git_repo_match.group(1) 1340 | git_tag_match = re.search("GIT_TAG;(.*?);", cpm_add_package_line) 1341 | if git_tag_match: 1342 | git_tag = git_tag_match.group(1) 1343 | source_dir_match = re.search("SOURCE_DIR;(.*?);", cpm_add_package_line) 1344 | if source_dir_match: 1345 | source_dir = source_dir_match.group(1) 1346 | 1347 | if not name: 1348 | print(" no NAME found?") 1349 | sys.exit(1) 1350 | if not source_dir and not git_tag: 1351 | print(" no source dir found, cannot determine git tag") 1352 | sys.exit(1) 1353 | 1354 | if not git_repo and source_dir: 1355 | repo_info = GitInfo.get_git_repo_info(source_dir) 1356 | if repo_info["branch"]: 1357 | git_tag = repo_info["branch"] + "@" + repo_info["short_rev"] 1358 | if repo_info["tag"]: 1359 | git_tag = repo_info["tag"] 1360 | if repo_info["url"]: 1361 | git_repo = repo_info["url"] 1362 | 1363 | snapshot_yaml[name] = {"git_tag": git_tag} 1364 | 1365 | d = datetime.datetime.utcnow() 1366 | now = d.isoformat("T") + "Z" 1367 | channel = os.environ.get('EVEREST_UPDATE_CHANNEL', "unknown") 1368 | include_all = os.environ.get('EVEREST_METADATA_INCLUDE_ALL', "no") 1369 | 1370 | release_json = {"channel": channel, "datetime": now, 1371 | "version": snapshot_yaml["everest-core"]["git_tag"], "components": []} 1372 | 1373 | snapshot_yaml = dict(sorted(snapshot_yaml.items(), key=lambda entry: (entry[0].swapcase()))) 1374 | 1375 | for key in snapshot_yaml: 1376 | entry = snapshot_yaml[key] 1377 | component = populate_component(metadata_yaml, key, entry['git_tag']) 1378 | release_json['components'].append(component) 1379 | 1380 | if include_all == "yes": 1381 | for key in metadata_yaml: 1382 | component = populate_component(metadata_yaml, key, '') 1383 | exists = False 1384 | for existing_component in release_json['components']: 1385 | if existing_component["name"] == component["name"]: 1386 | exists = True 1387 | if exists: 1388 | continue 1389 | release_json['components'].append(component) 1390 | 1391 | with open(release_path, 'w', encoding='utf-8') as release_file: 1392 | release_file.write(json.dumps(release_json)) 1393 | 1394 | sys.exit(0) 1395 | 1396 | 1397 | def main_handler(args): 1398 | working_dir = Path(args.working_dir).expanduser().resolve() 1399 | 1400 | if args.install_bash_completion: 1401 | install_bash_completion() 1402 | sys.exit(0) 1403 | 1404 | if args.git_pull is not None: 1405 | EDM.pull(working_dir, repos=args.git_pull) 1406 | sys.exit(0) 1407 | 1408 | if args.git_info: 1409 | EDM.show_git_info(working_dir, args.workspace, args.git_fetch) 1410 | sys.exit(0) 1411 | 1412 | if not args.config and not args.cmake and not args.create_config and not args.create_snapshot: 1413 | log.info("No --config, --cmake or --create-config parameter given, exiting.") 1414 | sys.exit(0) 1415 | 1416 | if args.config: 1417 | if not args.workspace: 1418 | log.error("A workspace path must be provided if supplying a config. Stopping.") 1419 | sys.exit(1) 1420 | 1421 | EDM.setup_workspace_from_config(args.workspace, args.config, False, args.create_vscode_workspace) 1422 | sys.exit(0) 1423 | 1424 | if args.create_snapshot: 1425 | log.info(f"Creating snapshot: {args.create_snapshot}") 1426 | snapshot = EDM.create_snapshot(working_dir) 1427 | EDM.write_config(snapshot, args.create_snapshot) 1428 | sys.exit(0) 1429 | 1430 | if not args.cmake and not args.create_config: 1431 | log.error("FIXME") 1432 | sys.exit(1) 1433 | 1434 | out_file = Path(args.out).expanduser().resolve() 1435 | 1436 | (dependencies, _) = EDM.scan_dependencies(working_dir, args.include_deps) 1437 | 1438 | if args.create_config: 1439 | log.info("Creating config") 1440 | new_config = EDM.config_from_dependencies(dependencies, args.external_in_config, args.include_remotes) 1441 | new_config = EDM.create_config(working_dir, new_config, args.external_in_config, args.include_remotes) 1442 | EDM.write_config(new_config, args.create_config) 1443 | sys.exit(0) 1444 | 1445 | if not args.cmake: 1446 | log.error("Calling the dependency manager without the --config parameter indicates usage from a CMake script. " 1447 | "If this is intendend , please use the --cmake flag to explicitly request this functionality.") 1448 | sys.exit(1) 1449 | 1450 | env_workspace = os.environ.get('EVEREST_EDM_WORKSPACE') 1451 | workspace_dir = working_dir.parent 1452 | if env_workspace: 1453 | workspace_dir = Path(env_workspace).expanduser().resolve() 1454 | log.info(f'Using workspace path set in EVEREST_EDM_WORKSPACE environment variable: {workspace_dir}') 1455 | else: 1456 | log.info(f'Using parent directory as workspace path: {workspace_dir}') 1457 | 1458 | workspace = EDM.parse_workspace_directory(workspace_dir) 1459 | checkout = EDM.checkout_local_dependencies(workspace, args.workspace, dependencies) 1460 | 1461 | # modify dependencies from environment variable 1462 | env_modify_dependencies = os.environ.get('EVEREST_MODIFY_DEPENDENCIES') 1463 | if env_modify_dependencies: 1464 | modify_dependencies_file = Path(env_modify_dependencies).expanduser().resolve() 1465 | if modify_dependencies_file.is_file(): 1466 | modify_dependencies(dependencies, modify_dependencies_file) 1467 | 1468 | check_origin_of_dependencies(dependencies, checkout) 1469 | 1470 | EDM.write_cmake(workspace, checkout, dependencies, out_file) 1471 | 1472 | 1473 | def get_parser(version) -> argparse.ArgumentParser: 1474 | """Return the argument parser containing all command line options.""" 1475 | parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, 1476 | description="Everest Dependency Manager") 1477 | parser.add_argument('--version', action='version', version=f'%(prog)s {version}') 1478 | parser.add_argument( 1479 | "--workspace", metavar='WORKSPACE', 1480 | help="Directory in which source code repositories that are explicity requested are checked out.", 1481 | required=False) 1482 | parser.add_argument("--working_dir", metavar='WORKINGDIR', default=".", 1483 | help="Working directory, default is the current one.", required=False) 1484 | parser.add_argument("--out", metavar='OUTFILENAME', default="dependencies.cmake", 1485 | help="Path of the file that will contain the generated CPM cmake information") 1486 | parser.add_argument( 1487 | "--include_deps", action='store_true', 1488 | help="Include dependency files that are stored in \"_deps\" directories. " 1489 | "Given that files in these directories are part of the in-tree source cache of CPM " 1490 | "you probably almost never want to do this.") 1491 | parser.add_argument( 1492 | "--config", metavar='CONFIG', 1493 | help="Path to a config file that contains the repositories that should be checked out into the workspace.", 1494 | required=False) 1495 | parser.add_argument( 1496 | "--create-vscode-workspace", action="store_true", 1497 | help="Create a VS Code workspace by saving a .code-workspace file in the workspace folder.") 1498 | parser.add_argument( 1499 | "--cmake", action="store_true", 1500 | help="Use this flag to indicate that the dependency manager was called from a CMake script.") 1501 | parser.add_argument( 1502 | "--verbose", action="store_true", 1503 | help="Verbose output.") 1504 | parser.add_argument( 1505 | "--nocolor", action="store_true", 1506 | help="No color output.") 1507 | parser.add_argument( 1508 | "--install-bash-completion", action="store_true", 1509 | help="Install bash completion if possible.") 1510 | parser.add_argument( 1511 | "--create-config", metavar='CREATECONFIG', 1512 | help="Creates a config file at the given path containing all dependencies from the working directory.", 1513 | required=False) 1514 | parser.add_argument( 1515 | "--external-in-config", action="store_true", 1516 | help="Include external dependencies in created config file.") 1517 | parser.add_argument( 1518 | "--include-remotes", metavar='INTERNAL', 1519 | help="List of git remotes that are included in a created config file", 1520 | nargs="*", 1521 | default=["git@github.com:EVerest/*", "https://github.com/EVerest/*"], 1522 | required=False) 1523 | parser.add_argument( 1524 | "--create-snapshot", 1525 | help="Creates a config file at the given path containing all repositories from the working directory.", 1526 | nargs="?", 1527 | const="snapshot.yaml", 1528 | required=False) 1529 | parser.add_argument( 1530 | "--git-info", action="store_true", 1531 | help="Show information of git repositories in working_dir") 1532 | parser.add_argument( 1533 | "--git-fetch", action="store_true", 1534 | help="Use git-fetch to get updated info from remote") 1535 | parser.add_argument( 1536 | "--git-pull", 1537 | help="Use git-pull to pull all git repositories in working_dir", 1538 | nargs="*", 1539 | required=False) 1540 | # TODO(kai): consider implementing interactive mode 1541 | # parser.add_argument("--interactive", action='store_true', 1542 | # help="Interactively ask which repositories should be checked out.") 1543 | 1544 | subparsers = parser.add_subparsers(help='available commands') 1545 | 1546 | init_parser = subparsers.add_parser('init', add_help=True) 1547 | init_parser.add_argument( 1548 | "--config", metavar='CONFIG', 1549 | help="Path to a config file that contains the repositories that should be checked out into the workspace.", 1550 | required=False) 1551 | init_parser.add_argument( 1552 | "--workspace", metavar='WORKSPACE', 1553 | help="Directory in which source code repositories that are explicity requested are checked out.", 1554 | required=False) 1555 | init_parser.add_argument( 1556 | "release", 1557 | help="Release version requested, if empty the most recent stable release is assumed.", 1558 | nargs="?") 1559 | init_parser.set_defaults(action_handler=init_handler) 1560 | init_parser.add_argument( 1561 | "--list", 1562 | action="store_true", 1563 | help="List available everest-core versions.") 1564 | 1565 | list_parser = subparsers.add_parser('list', add_help=True) 1566 | list_parser.set_defaults(action_handler=list_handler) 1567 | 1568 | rm_parser = subparsers.add_parser('rm', add_help=True) 1569 | rm_parser.set_defaults(action_handler=rm_handler) 1570 | rm_parser.add_argument( 1571 | "workspace_name", 1572 | help="Name of the workspace to remove", 1573 | nargs=1) 1574 | 1575 | git_parser = subparsers.add_parser('git', add_help=True) 1576 | git_subparsers = git_parser.add_subparsers(help='available git commands') 1577 | 1578 | git_info_parser = git_subparsers.add_parser('info', add_help=True) 1579 | git_info_parser.add_argument( 1580 | "repo_name", 1581 | help="Name of the repo(s) to get info from", 1582 | nargs="*") 1583 | git_info_parser.set_defaults(action_handler=git_info_handler) 1584 | 1585 | git_pull_parser = git_subparsers.add_parser('pull', add_help=True) 1586 | git_pull_parser.add_argument( 1587 | "repo_name", 1588 | help="Name of the repo(s) to pull", 1589 | nargs="*") 1590 | git_pull_parser.set_defaults(action_handler=git_pull_handler) 1591 | 1592 | snapshot_parser = subparsers.add_parser('snapshot', add_help=True) 1593 | snapshot_parser.set_defaults(action_handler=snapshot_handler) 1594 | snapshot_parser.add_argument( 1595 | "snapshot_name", 1596 | help="Name of the snapshot file", 1597 | nargs="?", 1598 | default="snapshot.yaml") 1599 | snapshot_parser.add_argument( 1600 | "--recursive", 1601 | help="Recursively check out the snapshot", 1602 | nargs="?", 1603 | const=10, 1604 | required=False) 1605 | snapshot_parser.add_argument( 1606 | "--config", 1607 | help="Path to a snapshot config.", 1608 | nargs="?", 1609 | const="snapshot-config.yaml", 1610 | required=False) 1611 | 1612 | release_parser = subparsers.add_parser('release', add_help=True) 1613 | release_parser.set_defaults(action_handler=release_handler) 1614 | release_parser.add_argument( 1615 | "--everest-core-dir", 1616 | help="Path to everest-core", 1617 | nargs="?", 1618 | default="everest-core") 1619 | release_parser.add_argument( 1620 | "--build-dir", 1621 | help="Path to everest-core build dir", 1622 | nargs="?", 1623 | default="build") 1624 | release_parser.add_argument( 1625 | "--out", 1626 | help="Path to release.json file", 1627 | nargs="?", 1628 | default="release.json") 1629 | bazel_parser = subparsers.add_parser( 1630 | "bazel", 1631 | description="Convert dependencies.yaml file into a file that can be used in Bazel workspace.", 1632 | add_help=True) 1633 | bazel_parser.set_defaults(action_handler=bazel.generate_deps) 1634 | bazel_parser.add_argument( 1635 | "dependencies_yaml", 1636 | type=Path, 1637 | help="Path to dependencies.yaml") 1638 | bazel_parser.add_argument( 1639 | "-b", "--build-file", 1640 | type=str, 1641 | action="append", 1642 | help="Bazel-style label for the build files into the deppendencies. " + 1643 | "The format should be `@//:BUILD..bazel`." + 1644 | " should correspond to the name of the dependency in " + 1645 | "the dependencies.yaml file. This option can be used multiple times." + 1646 | "If not provided, Bazel will search for BUILD file in the repo itself.", 1647 | required=False) 1648 | 1649 | parser.set_defaults(action_handler=main_handler) 1650 | 1651 | return parser 1652 | 1653 | 1654 | def setup_logging(verbose: bool, nocolor: bool): 1655 | """Setup logging, choosing logger level and if colorful log output is requested.""" 1656 | if verbose: 1657 | log.setLevel(level=logging.DEBUG) 1658 | else: 1659 | log.setLevel(level=logging.INFO) 1660 | console_handler = logging.StreamHandler() 1661 | console_handler.setFormatter(ColorFormatter(color=not nocolor)) 1662 | log.addHandler(console_handler) 1663 | 1664 | if not nocolor: 1665 | log.debug( 1666 | "Using \033[1;31mc\033[1;33mo\033[93ml\033[92mo\033[94mr\033[34mf\033[95mu\033[35ml\033[0m \033[1m" 1667 | "output\033[0m") 1668 | 1669 | 1670 | def main(parser: argparse.ArgumentParser): 1671 | """The main entrypoint of edm. Provides different functionality based on the given command line arguments.""" 1672 | args = parser.parse_args() 1673 | 1674 | setup_logging(args.verbose, args.nocolor) 1675 | 1676 | if not os.environ.get("CPM_SOURCE_CACHE"): 1677 | log.warning("CPM_SOURCE_CACHE environment variable is not set, this might lead to unintended behavior.") 1678 | 1679 | args.action_handler(args) 1680 | -------------------------------------------------------------------------------- /dependency_manager/src/edm_tool/templates/cpm.jinja: -------------------------------------------------------------------------------- 1 | set(ENV{EVEREST_EDM_WORKSPACE} {{ workspace["workspace"] }}) 2 | set(CPM_USE_NAMED_CACHE_DIRECTORIES ON) 3 | {% for dep in checkout %} 4 | set(CPM_{{ dep["name"] }}_SOURCE "{{ dep["path"] }}") 5 | {% endfor %} 6 | {% for name, value in dependencies.items() %} 7 | if("{{name}}" IN_LIST EVEREST_EXCLUDE_DEPENDENCIES) 8 | message(STATUS "Excluding dependency {{name}}") 9 | {% if "cmake_condition" in value and value["cmake_condition"]|length > 0 %} 10 | elseif({{ value["cmake_condition"] }}) 11 | {% else %} 12 | else() 13 | {% endif %} 14 | {% if value and "git" in value %} 15 | CPMAddPackage( 16 | NAME {{ name }} 17 | GIT_REPOSITORY {{ value["git"] }} 18 | {% if "git_tag" in value %} 19 | GIT_TAG {{ value["git_tag"] }} 20 | {% endif %} 21 | {% if "options" in value and value["options"]|length > 0 %} 22 | OPTIONS 23 | {{value["options"]|quote|join(" ")}} 24 | {% endif %} 25 | {% if "prevent_install" in value and value["prevent_install"] %} 26 | EXCLUDE_FROM_ALL YES 27 | {% endif %} 28 | ) 29 | {% else %} 30 | find_package( 31 | {{ name }} 32 | {% if value and "components" in value and value["components"]|length > 0 %} 33 | COMPONENTS 34 | {{value["components"]|quote|join(" ")}} 35 | {% endif %} 36 | {% if not value or "optional" not in value or not value["optional"] %} 37 | REQUIRED 38 | {% endif %} 39 | ) 40 | {% endif %} 41 | {% if value and "alias" in value %} 42 | if({{name}}_ADDED) 43 | add_library({{value["alias"]["name"]}} ALIAS {{value["alias"]["target"]}}) 44 | endif() 45 | {% endif %} 46 | {% if "cmake_condition" in value and value["cmake_condition"]|length > 0 %} 47 | else() 48 | message(STATUS "Excluding dependency {{name}} based on cmake_condition") 49 | {% endif %} 50 | endif() 51 | 52 | {% endfor %} 53 | 54 | execute_process( 55 | COMMAND "${EVEREST_DEPENDENCY_MANAGER}" release --everest-core-dir ${PROJECT_SOURCE_DIR} --build-dir ${CMAKE_BINARY_DIR} --out ${CMAKE_BINARY_DIR}/release.json 56 | ) 57 | 58 | install( 59 | FILES "${CMAKE_BINARY_DIR}/release.json" 60 | DESTINATION "${CMAKE_INSTALL_SYSCONFDIR}/everest" 61 | ) 62 | -------------------------------------------------------------------------------- /devcontainer/setup-devcontainer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | read -p "Enter the workspace directory (default is the current directory): " WORKSPACE_DIR 6 | if [ -z "$WORKSPACE_DIR" ]; then 7 | WORKSPACE_DIR="./" 8 | fi 9 | WORKSPACE_DIR=$(realpath -m "$WORKSPACE_DIR") 10 | 11 | read -p "Enter the version of the everest-dev-environment (default is 'main'): " VERSION 12 | if [ -z "$VERSION" ]; then 13 | VERSION="main" 14 | fi 15 | 16 | echo "Create the workspace directory '$WORKSPACE_DIR' if it does not exist" 17 | mkdir -p $WORKSPACE_DIR 18 | 19 | if [ "$(ls -A $WORKSPACE_DIR)" ]; then 20 | # The workspace directory is not empty, warning do you want to continue? 21 | read -p "The workspace directory is not empty, do you want to continue? (y/N): " -r 22 | if [[ $REPLY =~ ^[Nn]$ || $REPLY = "" ]]; then 23 | echo "Exiting.." 24 | exit 1 25 | elif [[ ! $REPLY =~ ^[Yy]$ ]]; then 26 | echo "Invalid input. Exiting.." 27 | exit 1 28 | fi 29 | fi 30 | 31 | TMP_DIR=$(mktemp --directory) 32 | echo "Clone the everest-dev-environment repository to the workspace directory with the version $VERSION, temporarily.." 33 | git clone --quiet --depth 1 --single-branch --branch "$VERSION" https://github.com/EVerest/everest-dev-environment.git "$TMP_DIR" 34 | 35 | echo "Copy the template devcontainer configuration files to the workspace directory" 36 | cp -n -r $TMP_DIR/devcontainer/template/. $WORKSPACE_DIR/ 37 | 38 | echo "Remove the everest-dev-environment repository" 39 | rm -rf "$TMP_DIR" 40 | -------------------------------------------------------------------------------- /devcontainer/template/.devcontainer/docker-compose.yml: -------------------------------------------------------------------------------- 1 | volumes: 2 | ocpp-db-data: 3 | external: false 4 | node-red-data: 5 | external: false 6 | 7 | services: 8 | mqtt-server: 9 | image: ghcr.io/everest/everest-dev-environment/mosquitto:docker-images-v0.1.0 10 | ports: 11 | # allow multiple ports for host to avoid conflicts with other dev environments 12 | - 1883-1983:1883 13 | - 9001-9101:9001 14 | 15 | ocpp-db: 16 | image: mariadb:10.4.30 # pinned to patch-version because https://github.com/steve-community/steve/pull/1213 17 | volumes: 18 | - ocpp-db-data:/var/lib/mysql 19 | ports: 20 | # allow multiple ports for host to avoid conflicts with other dev environments 21 | - 13306-13406:3306 22 | environment: 23 | MYSQL_RANDOM_ROOT_PASSWORD: "yes" 24 | MYSQL_DATABASE: ocpp-db 25 | MYSQL_USER: ocpp 26 | MYSQL_PASSWORD: ocpp 27 | steve: 28 | image: ghcr.io/everest/everest-dev-environment/steve:docker-images-v0.1.0 29 | ports: 30 | # allow multiple ports for host to avoid conflicts with other dev environments 31 | - 8180-8280:8180 32 | - 8443-8543:8443 33 | depends_on: 34 | - ocpp-db 35 | mqtt-explorer: 36 | image: ghcr.io/everest/everest-dev-environment/mqtt-explorer:docker-images-v0.1.0 37 | depends_on: 38 | - mqtt-server 39 | ports: 40 | - 4000-4100:4000 41 | -------------------------------------------------------------------------------- /devcontainer/template/.devcontainer/general-devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1 2 | FROM ghcr.io/everest/everest-ci/dev-env-base:v1.3.2 3 | 4 | # Update the package list 5 | RUN sudo apt update 6 | 7 | # EVerest Development Tool - Dependencies 8 | RUN pip install --break-system-packages \ 9 | docker==7.1.0 10 | # EVerest Development Tool 11 | ARG DEV_ENV_TOOL_VERSION=docker-images-v0.1.0 12 | RUN python3 -m pip install --break-system-packages \ 13 | git+https://github.com/EVerest/everest-dev-environment@${DEV_ENV_TOOL_VERSION}#subdirectory=everest_dev_tool 14 | 15 | RUN echo "echo \"🏔️ 🚘 Welcome to the EVerest development environment!\"" >> ${HOME}/.bashrc 16 | RUN echo "echo \"To initialize the EVerest core repository everest-core in your workspace please run the following command:\"" >> ${HOME}/.bashrc 17 | RUN echo "echo \"everest clone everest-core\"" >> ${HOME}/.bashrc 18 | -------------------------------------------------------------------------------- /devcontainer/template/.devcontainer/general-devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "EVerest - ${localWorkspaceFolderBasename}", 3 | "dockerComposeFile": ["../docker-compose.yml", "./docker-compose.devcontainer.yml"], 4 | "service": "devcontainer", 5 | "runServices": ["devcontainer"], 6 | "remoteUser": "docker", 7 | "workspaceFolder": "/workspace", 8 | "forwardPorts": [ 9 | "mqtt-explorer:4000", 10 | "steve:8180" 11 | ], 12 | "portsAttributes": { 13 | "mqtt-explorer:4000": { 14 | "label": "MQTT Explorer - WebView" 15 | }, 16 | "steve:8180": { 17 | "label": "Steve - WebTool" 18 | } 19 | }, 20 | "otherPortsAttributes": { 21 | "onAutoForward": "notify", 22 | "protocol": "http", 23 | "requireLocalPort": false 24 | }, 25 | "customizations": { 26 | "vscode": { 27 | "settings": { 28 | "terminal.integrated.profiles.linux": { 29 | "bash": { 30 | "path": "/bin/bash", 31 | "icon": "terminal-bash", 32 | "args": ["-l"] 33 | } 34 | }, 35 | "terminal.integrated.defaultProfile.linux": "bash", 36 | "python.pythonPath": "/usr/bin/python3", 37 | "python.defaultInterpreterPath": "/usr/bin/python3", 38 | "editor.rulers": [79, 120], 39 | // RST/Sphinx language server 40 | "esbonio.sphinx.buildDir": "${workspaceFolder}/everest/docs/_build", 41 | "esbonio.sphinx.confDir": "${workspaceFolder}/everest/docs", 42 | // RST 43 | "restructuredtext.preview.scrollEditorWithPreview": false, 44 | "restructuredtext.pythonRecommendation.disabled": true, 45 | "liveServer.settings.root": "/everest/docs/_build/html" 46 | }, 47 | "extensions": [ 48 | // language support CPP 49 | "ms-vscode.cpptools", 50 | // language support cmake 51 | "twxs.cmake", 52 | "ms-vscode.cmake-tools", 53 | // language support python 54 | "ms-python.python", 55 | // language support restructured text 56 | "lextudio.restructuredtext", 57 | "trond-snekvik.simple-rst", 58 | // doc live server 59 | "ritwickdey.liveserver" 60 | ] 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /devcontainer/template/.devcontainer/general-devcontainer/docker-compose.devcontainer.yml: -------------------------------------------------------------------------------- 1 | networks: 2 | docker-proxy-network: 3 | internal: true 4 | 5 | volumes: 6 | cpm-source-cache: 7 | name: everest-cpm-source-cache 8 | 9 | services: 10 | docker-proxy: 11 | image: tecnativa/docker-socket-proxy:latest 12 | volumes: 13 | - type: bind 14 | source: /var/run/docker.sock 15 | target: /var/run/docker.sock 16 | environment: 17 | - CONTAINERS=1 18 | - IMAGES=1 19 | - POST=1 20 | - NETWORKS=1 21 | - VOLUMES=1 22 | networks: 23 | - docker-proxy-network 24 | 25 | devcontainer: 26 | depends_on: 27 | - docker-proxy 28 | build: 29 | context: ./general-devcontainer 30 | dockerfile: Dockerfile 31 | volumes: 32 | - type: bind 33 | source: .. 34 | target: /workspace 35 | - type: volume 36 | source: cpm-source-cache 37 | target: /home/docker/.cache/cpm 38 | command: sleep infinity 39 | environment: 40 | MQTT_SERVER_ADDRESS: mqtt-server 41 | MQTT_SERVER_PORT: 1883 42 | DOCKER_HOST: tcp://docker-proxy:2375 43 | CPM_SOURCE_CACHE: /home/docker/.cache/cpm 44 | networks: 45 | - docker-proxy-network 46 | - default 47 | sysctls: 48 | - net.ipv6.conf.all.disable_ipv6=0 49 | -------------------------------------------------------------------------------- /docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.6" 2 | 3 | volumes: 4 | ocpp-db-data: 5 | external: false 6 | node-red-data: 7 | external: false 8 | 9 | networks: 10 | default: 11 | external: true 12 | name: infranet_network 13 | 14 | services: 15 | mqtt-server: 16 | build: images/mosquitto 17 | ports: 18 | - 1883:1883 19 | - 9001:9001 20 | 21 | ocpp-db: 22 | image: mariadb:10.4.30 # pinned to patch-version because https://github.com/steve-community/steve/pull/1213 23 | volumes: 24 | - ocpp-db-data:/var/lib/mysql 25 | ports: 26 | - 13306:3306 27 | environment: 28 | MYSQL_RANDOM_ROOT_PASSWORD: "yes" 29 | MYSQL_DATABASE: ocpp-db 30 | MYSQL_USER: ocpp 31 | MYSQL_PASSWORD: ocpp 32 | 33 | steve: 34 | build: images/steve 35 | ports: 36 | - 8180:8180 37 | - 8443:8443 38 | depends_on: 39 | - ocpp-db 40 | 41 | nodered: 42 | build: images/nodered 43 | volumes: 44 | - node-red-data:/data 45 | depends_on: 46 | - mqtt-server 47 | ports: 48 | - 1880:1880 49 | -------------------------------------------------------------------------------- /docker/images/mosquitto/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM eclipse-mosquitto:2.0.10 2 | 3 | COPY mosquitto.conf /mosquitto/config/mosquitto.conf 4 | -------------------------------------------------------------------------------- /docker/images/mosquitto/mosquitto.conf: -------------------------------------------------------------------------------- 1 | # Config file for mosquitto 2 | # 3 | # See mosquitto.conf(5) for more information. 4 | # 5 | # Default values are shown, uncomment to change. 6 | # 7 | # Use the # character to indicate a comment, but only if it is the 8 | # very first character on the line. 9 | 10 | # ================================================================= 11 | # General configuration 12 | # ================================================================= 13 | 14 | # Use per listener security settings. 15 | # 16 | # It is recommended this option be set before any other options. 17 | # 18 | # If this option is set to true, then all authentication and access control 19 | # options are controlled on a per listener basis. The following options are 20 | # affected: 21 | # 22 | # password_file acl_file psk_file auth_plugin auth_opt_* allow_anonymous 23 | # auto_id_prefix allow_zero_length_clientid 24 | # 25 | # Note that if set to true, then a durable client (i.e. with clean session set 26 | # to false) that has disconnected will use the ACL settings defined for the 27 | # listener that it was most recently connected to. 28 | # 29 | # The default behaviour is for this to be set to false, which maintains the 30 | # setting behaviour from previous versions of mosquitto. 31 | #per_listener_settings false 32 | 33 | 34 | # This option controls whether a client is allowed to connect with a zero 35 | # length client id or not. This option only affects clients using MQTT v3.1.1 36 | # and later. If set to false, clients connecting with a zero length client id 37 | # are disconnected. If set to true, clients will be allocated a client id by 38 | # the broker. This means it is only useful for clients with clean session set 39 | # to true. 40 | #allow_zero_length_clientid true 41 | 42 | # If allow_zero_length_clientid is true, this option allows you to set a prefix 43 | # to automatically generated client ids to aid visibility in logs. 44 | # Defaults to 'auto-' 45 | #auto_id_prefix auto- 46 | 47 | # This option affects the scenario when a client subscribes to a topic that has 48 | # retained messages. It is possible that the client that published the retained 49 | # message to the topic had access at the time they published, but that access 50 | # has been subsequently removed. If check_retain_source is set to true, the 51 | # default, the source of a retained message will be checked for access rights 52 | # before it is republished. When set to false, no check will be made and the 53 | # retained message will always be published. This affects all listeners. 54 | #check_retain_source true 55 | 56 | # QoS 1 and 2 messages will be allowed inflight per client until this limit 57 | # is exceeded. Defaults to 0. (No maximum) 58 | # See also max_inflight_messages 59 | #max_inflight_bytes 0 60 | 61 | # The maximum number of QoS 1 and 2 messages currently inflight per 62 | # client. 63 | # This includes messages that are partway through handshakes and 64 | # those that are being retried. Defaults to 20. Set to 0 for no 65 | # maximum. Setting to 1 will guarantee in-order delivery of QoS 1 66 | # and 2 messages. 67 | #max_inflight_messages 20 68 | 69 | # For MQTT v5 clients, it is possible to have the server send a "server 70 | # keepalive" value that will override the keepalive value set by the client. 71 | # This is intended to be used as a mechanism to say that the server will 72 | # disconnect the client earlier than it anticipated, and that the client should 73 | # use the new keepalive value. The max_keepalive option allows you to specify 74 | # that clients may only connect with keepalive less than or equal to this 75 | # value, otherwise they will be sent a server keepalive telling them to use 76 | # max_keepalive. This only applies to MQTT v5 clients. The maximum value 77 | # allowable is 65535. Do not set below 10. 78 | #max_keepalive 65535 79 | 80 | # For MQTT v5 clients, it is possible to have the server send a "maximum packet 81 | # size" value that will instruct the client it will not accept MQTT packets 82 | # with size greater than max_packet_size bytes. This applies to the full MQTT 83 | # packet, not just the payload. Setting this option to a positive value will 84 | # set the maximum packet size to that number of bytes. If a client sends a 85 | # packet which is larger than this value, it will be disconnected. This applies 86 | # to all clients regardless of the protocol version they are using, but v3.1.1 87 | # and earlier clients will of course not have received the maximum packet size 88 | # information. Defaults to no limit. Setting below 20 bytes is forbidden 89 | # because it is likely to interfere with ordinary client operation, even with 90 | # very small payloads. 91 | #max_packet_size 0 92 | 93 | # QoS 1 and 2 messages above those currently in-flight will be queued per 94 | # client until this limit is exceeded. Defaults to 0. (No maximum) 95 | # See also max_queued_messages. 96 | # If both max_queued_messages and max_queued_bytes are specified, packets will 97 | # be queued until the first limit is reached. 98 | #max_queued_bytes 0 99 | 100 | # Set the maximum QoS supported. Clients publishing at a QoS higher than 101 | # specified here will be disconnected. 102 | #max_qos 2 103 | 104 | # The maximum number of QoS 1 and 2 messages to hold in a queue per client 105 | # above those that are currently in-flight. Defaults to 1000. Set 106 | # to 0 for no maximum (not recommended). 107 | # See also queue_qos0_messages. 108 | # See also max_queued_bytes. 109 | #max_queued_messages 1000 110 | # 111 | # This option sets the maximum number of heap memory bytes that the broker will 112 | # allocate, and hence sets a hard limit on memory use by the broker. Memory 113 | # requests that exceed this value will be denied. The effect will vary 114 | # depending on what has been denied. If an incoming message is being processed, 115 | # then the message will be dropped and the publishing client will be 116 | # disconnected. If an outgoing message is being sent, then the individual 117 | # message will be dropped and the receiving client will be disconnected. 118 | # Defaults to no limit. 119 | #memory_limit 0 120 | 121 | # This option sets the maximum publish payload size that the broker will allow. 122 | # Received messages that exceed this size will not be accepted by the broker. 123 | # The default value is 0, which means that all valid MQTT messages are 124 | # accepted. MQTT imposes a maximum payload size of 268435455 bytes. 125 | #message_size_limit 0 126 | 127 | # This option allows persistent clients (those with clean session set to false) 128 | # to be removed if they do not reconnect within a certain time frame. 129 | # 130 | # This is a non-standard option in MQTT V3.1 but allowed in MQTT v3.1.1. 131 | # 132 | # Badly designed clients may set clean session to false whilst using a randomly 133 | # generated client id. This leads to persistent clients that will never 134 | # reconnect. This option allows these clients to be removed. 135 | # 136 | # The expiration period should be an integer followed by one of h d w m y for 137 | # hour, day, week, month and year respectively. For example 138 | # 139 | # persistent_client_expiration 2m 140 | # persistent_client_expiration 14d 141 | # persistent_client_expiration 1y 142 | # 143 | # The default if not set is to never expire persistent clients. 144 | #persistent_client_expiration 145 | 146 | # Write process id to a file. Default is a blank string which means 147 | # a pid file shouldn't be written. 148 | # This should be set to /var/run/mosquitto/mosquitto.pid if mosquitto is 149 | # being run automatically on boot with an init script and 150 | # start-stop-daemon or similar. 151 | #pid_file 152 | 153 | # Set to true to queue messages with QoS 0 when a persistent client is 154 | # disconnected. These messages are included in the limit imposed by 155 | # max_queued_messages and max_queued_bytes 156 | # Defaults to false. 157 | # This is a non-standard option for the MQTT v3.1 spec but is allowed in 158 | # v3.1.1. 159 | #queue_qos0_messages false 160 | 161 | # Set to false to disable retained message support. If a client publishes a 162 | # message with the retain bit set, it will be disconnected if this is set to 163 | # false. 164 | #retain_available true 165 | 166 | # Disable Nagle's algorithm on client sockets. This has the effect of reducing 167 | # latency of individual messages at the potential cost of increasing the number 168 | # of packets being sent. 169 | #set_tcp_nodelay false 170 | 171 | # Time in seconds between updates of the $SYS tree. 172 | # Set to 0 to disable the publishing of the $SYS tree. 173 | #sys_interval 10 174 | 175 | # The MQTT specification requires that the QoS of a message delivered to a 176 | # subscriber is never upgraded to match the QoS of the subscription. Enabling 177 | # this option changes this behaviour. If upgrade_outgoing_qos is set true, 178 | # messages sent to a subscriber will always match the QoS of its subscription. 179 | # This is a non-standard option explicitly disallowed by the spec. 180 | #upgrade_outgoing_qos false 181 | 182 | # When run as root, drop privileges to this user and its primary 183 | # group. 184 | # Set to root to stay as root, but this is not recommended. 185 | # If set to "mosquitto", or left unset, and the "mosquitto" user does not exist 186 | # then it will drop privileges to the "nobody" user instead. 187 | # If run as a non-root user, this setting has no effect. 188 | # Note that on Windows this has no effect and so mosquitto should be started by 189 | # the user you wish it to run as. 190 | #user mosquitto 191 | 192 | # ================================================================= 193 | # Listeners 194 | # ================================================================= 195 | 196 | # Listen on a port/ip address combination. By using this variable 197 | # multiple times, mosquitto can listen on more than one port. If 198 | # this variable is used and neither bind_address nor port given, 199 | # then the default listener will not be started. 200 | # The port number to listen on must be given. Optionally, an ip 201 | # address or host name may be supplied as a second argument. In 202 | # this case, mosquitto will attempt to bind the listener to that 203 | # address and so restrict access to the associated network and 204 | # interface. By default, mosquitto will listen on all interfaces. 205 | # Note that for a websockets listener it is not possible to bind to a host 206 | # name. 207 | # 208 | # On systems that support Unix Domain Sockets, it is also possible 209 | # to create a # Unix socket rather than opening a TCP socket. In 210 | # this case, the port number should be set to 0 and a unix socket 211 | # path must be provided, e.g. 212 | # listener 0 /tmp/mosquitto.sock 213 | # 214 | # listener port-number [ip address/host name/unix socket path] 215 | listener 1883 216 | 217 | # By default, a listener will attempt to listen on all supported IP protocol 218 | # versions. If you do not have an IPv4 or IPv6 interface you may wish to 219 | # disable support for either of those protocol versions. In particular, note 220 | # that due to the limitations of the websockets library, it will only ever 221 | # attempt to open IPv6 sockets if IPv6 support is compiled in, and so will fail 222 | # if IPv6 is not available. 223 | # 224 | # Set to `ipv4` to force the listener to only use IPv4, or set to `ipv6` to 225 | # force the listener to only use IPv6. If you want support for both IPv4 and 226 | # IPv6, then do not use the socket_domain option. 227 | # 228 | #socket_domain 229 | 230 | # Bind the listener to a specific interface. This is similar to 231 | # the [ip address/host name] part of the listener definition, but is useful 232 | # when an interface has multiple addresses or the address may change. If used 233 | # with the [ip address/host name] part of the listener definition, then the 234 | # bind_interface option will take priority. 235 | # Not available on Windows. 236 | # 237 | # Example: bind_interface eth0 238 | #bind_interface 239 | 240 | # When a listener is using the websockets protocol, it is possible to serve 241 | # http data as well. Set http_dir to a directory which contains the files you 242 | # wish to serve. If this option is not specified, then no normal http 243 | # connections will be possible. 244 | #http_dir 245 | 246 | # The maximum number of client connections to allow. This is 247 | # a per listener setting. 248 | # Default is -1, which means unlimited connections. 249 | # Note that other process limits mean that unlimited connections 250 | # are not really possible. Typically the default maximum number of 251 | # connections possible is around 1024. 252 | #max_connections -1 253 | 254 | # The listener can be restricted to operating within a topic hierarchy using 255 | # the mount_point option. This is achieved be prefixing the mount_point string 256 | # to all topics for any clients connected to this listener. This prefixing only 257 | # happens internally to the broker; the client will not see the prefix. 258 | #mount_point 259 | 260 | # Choose the protocol to use when listening. 261 | # This can be either mqtt or websockets. 262 | # Certificate based TLS may be used with websockets, except that only the 263 | # cafile, certfile, keyfile, ciphers, and ciphers_tls13 options are supported. 264 | #protocol mqtt 265 | 266 | # Set use_username_as_clientid to true to replace the clientid that a client 267 | # connected with with its username. This allows authentication to be tied to 268 | # the clientid, which means that it is possible to prevent one client 269 | # disconnecting another by using the same clientid. 270 | # If a client connects with no username it will be disconnected as not 271 | # authorised when this option is set to true. 272 | # Do not use in conjunction with clientid_prefixes. 273 | # See also use_identity_as_username. 274 | #use_username_as_clientid 275 | 276 | # Change the websockets headers size. This is a global option, it is not 277 | # possible to set per listener. This option sets the size of the buffer used in 278 | # the libwebsockets library when reading HTTP headers. If you are passing large 279 | # header data such as cookies then you may need to increase this value. If left 280 | # unset, or set to 0, then the default of 1024 bytes will be used. 281 | #websockets_headers_size 282 | 283 | # ----------------------------------------------------------------- 284 | # Certificate based SSL/TLS support 285 | # ----------------------------------------------------------------- 286 | # The following options can be used to enable certificate based SSL/TLS support 287 | # for this listener. Note that the recommended port for MQTT over TLS is 8883, 288 | # but this must be set manually. 289 | # 290 | # See also the mosquitto-tls man page and the "Pre-shared-key based SSL/TLS 291 | # support" section. Only one of certificate or PSK encryption support can be 292 | # enabled for any listener. 293 | 294 | # Both of certfile and keyfile must be defined to enable certificate based 295 | # TLS encryption. 296 | 297 | # Path to the PEM encoded server certificate. 298 | #certfile 299 | 300 | # Path to the PEM encoded keyfile. 301 | #keyfile 302 | 303 | # If you wish to control which encryption ciphers are used, use the ciphers 304 | # option. The list of available ciphers can be optained using the "openssl 305 | # ciphers" command and should be provided in the same format as the output of 306 | # that command. This applies to TLS 1.2 and earlier versions only. Use 307 | # ciphers_tls1.3 for TLS v1.3. 308 | #ciphers 309 | 310 | # Choose which TLS v1.3 ciphersuites are used for this listener. 311 | # Defaults to "TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256" 312 | #ciphers_tls1.3 313 | 314 | # If you have require_certificate set to true, you can create a certificate 315 | # revocation list file to revoke access to particular client certificates. If 316 | # you have done this, use crlfile to point to the PEM encoded revocation file. 317 | #crlfile 318 | 319 | # To allow the use of ephemeral DH key exchange, which provides forward 320 | # security, the listener must load DH parameters. This can be specified with 321 | # the dhparamfile option. The dhparamfile can be generated with the command 322 | # e.g. "openssl dhparam -out dhparam.pem 2048" 323 | #dhparamfile 324 | 325 | # By default an TLS enabled listener will operate in a similar fashion to a 326 | # https enabled web server, in that the server has a certificate signed by a CA 327 | # and the client will verify that it is a trusted certificate. The overall aim 328 | # is encryption of the network traffic. By setting require_certificate to true, 329 | # the client must provide a valid certificate in order for the network 330 | # connection to proceed. This allows access to the broker to be controlled 331 | # outside of the mechanisms provided by MQTT. 332 | #require_certificate false 333 | 334 | # cafile and capath define methods of accessing the PEM encoded 335 | # Certificate Authority certificates that will be considered trusted when 336 | # checking incoming client certificates. 337 | # cafile defines the path to a file containing the CA certificates. 338 | # capath defines a directory that will be searched for files 339 | # containing the CA certificates. For capath to work correctly, the 340 | # certificate files must have ".crt" as the file ending and you must run 341 | # "openssl rehash " each time you add/remove a certificate. 342 | #cafile 343 | #capath 344 | 345 | 346 | # If require_certificate is true, you may set use_identity_as_username to true 347 | # to use the CN value from the client certificate as a username. If this is 348 | # true, the password_file option will not be used for this listener. 349 | #use_identity_as_username false 350 | 351 | # ----------------------------------------------------------------- 352 | # Pre-shared-key based SSL/TLS support 353 | # ----------------------------------------------------------------- 354 | # The following options can be used to enable PSK based SSL/TLS support for 355 | # this listener. Note that the recommended port for MQTT over TLS is 8883, but 356 | # this must be set manually. 357 | # 358 | # See also the mosquitto-tls man page and the "Certificate based SSL/TLS 359 | # support" section. Only one of certificate or PSK encryption support can be 360 | # enabled for any listener. 361 | 362 | # The psk_hint option enables pre-shared-key support for this listener and also 363 | # acts as an identifier for this listener. The hint is sent to clients and may 364 | # be used locally to aid authentication. The hint is a free form string that 365 | # doesn't have much meaning in itself, so feel free to be creative. 366 | # If this option is provided, see psk_file to define the pre-shared keys to be 367 | # used or create a security plugin to handle them. 368 | #psk_hint 369 | 370 | # When using PSK, the encryption ciphers used will be chosen from the list of 371 | # available PSK ciphers. If you want to control which ciphers are available, 372 | # use the "ciphers" option. The list of available ciphers can be optained 373 | # using the "openssl ciphers" command and should be provided in the same format 374 | # as the output of that command. 375 | #ciphers 376 | 377 | # Set use_identity_as_username to have the psk identity sent by the client used 378 | # as its username. Authentication will be carried out using the PSK rather than 379 | # the MQTT username/password and so password_file will not be used for this 380 | # listener. 381 | #use_identity_as_username false 382 | 383 | listener 9001 384 | protocol websockets 385 | 386 | # ================================================================= 387 | # Persistence 388 | # ================================================================= 389 | 390 | # If persistence is enabled, save the in-memory database to disk 391 | # every autosave_interval seconds. If set to 0, the persistence 392 | # database will only be written when mosquitto exits. See also 393 | # autosave_on_changes. 394 | # Note that writing of the persistence database can be forced by 395 | # sending mosquitto a SIGUSR1 signal. 396 | #autosave_interval 1800 397 | 398 | # If true, mosquitto will count the number of subscription changes, retained 399 | # messages received and queued messages and if the total exceeds 400 | # autosave_interval then the in-memory database will be saved to disk. 401 | # If false, mosquitto will save the in-memory database to disk by treating 402 | # autosave_interval as a time in seconds. 403 | #autosave_on_changes false 404 | 405 | # Save persistent message data to disk (true/false). 406 | # This saves information about all messages, including 407 | # subscriptions, currently in-flight messages and retained 408 | # messages. 409 | # retained_persistence is a synonym for this option. 410 | #persistence false 411 | 412 | # The filename to use for the persistent database, not including 413 | # the path. 414 | #persistence_file mosquitto.db 415 | 416 | # Location for persistent database. 417 | # Default is an empty string (current directory). 418 | # Set to e.g. /var/lib/mosquitto if running as a proper service on Linux or 419 | # similar. 420 | #persistence_location 421 | 422 | 423 | # ================================================================= 424 | # Logging 425 | # ================================================================= 426 | 427 | # Places to log to. Use multiple log_dest lines for multiple 428 | # logging destinations. 429 | # Possible destinations are: stdout stderr syslog topic file dlt 430 | # 431 | # stdout and stderr log to the console on the named output. 432 | # 433 | # syslog uses the userspace syslog facility which usually ends up 434 | # in /var/log/messages or similar. 435 | # 436 | # topic logs to the broker topic '$SYS/broker/log/', 437 | # where severity is one of D, E, W, N, I, M which are debug, error, 438 | # warning, notice, information and message. Message type severity is used by 439 | # the subscribe/unsubscribe log_types and publishes log messages to 440 | # $SYS/broker/log/M/susbcribe or $SYS/broker/log/M/unsubscribe. 441 | # 442 | # The file destination requires an additional parameter which is the file to be 443 | # logged to, e.g. "log_dest file /var/log/mosquitto.log". The file will be 444 | # closed and reopened when the broker receives a HUP signal. Only a single file 445 | # destination may be configured. 446 | # 447 | # The dlt destination is for the automotive `Diagnostic Log and Trace` tool. 448 | # This requires that Mosquitto has been compiled with DLT support. 449 | # 450 | # Note that if the broker is running as a Windows service it will default to 451 | # "log_dest none" and neither stdout nor stderr logging is available. 452 | # Use "log_dest none" if you wish to disable logging. 453 | #log_dest stderr 454 | 455 | # Types of messages to log. Use multiple log_type lines for logging 456 | # multiple types of messages. 457 | # Possible types are: debug, error, warning, notice, information, 458 | # none, subscribe, unsubscribe, websockets, all. 459 | # Note that debug type messages are for decoding the incoming/outgoing 460 | # network packets. They are not logged in "topics". 461 | #log_type error 462 | #log_type warning 463 | #log_type notice 464 | #log_type information 465 | 466 | 467 | # If set to true, client connection and disconnection messages will be included 468 | # in the log. 469 | #connection_messages true 470 | 471 | # If using syslog logging (not on Windows), messages will be logged to the 472 | # "daemon" facility by default. Use the log_facility option to choose which of 473 | # local0 to local7 to log to instead. The option value should be an integer 474 | # value, e.g. "log_facility 5" to use local5. 475 | #log_facility 476 | 477 | # If set to true, add a timestamp value to each log message. 478 | #log_timestamp true 479 | 480 | # Set the format of the log timestamp. If left unset, this is the number of 481 | # seconds since the Unix epoch. 482 | # This is a free text string which will be passed to the strftime function. To 483 | # get an ISO 8601 datetime, for example: 484 | # log_timestamp_format %Y-%m-%dT%H:%M:%S 485 | #log_timestamp_format 486 | 487 | # Change the websockets logging level. This is a global option, it is not 488 | # possible to set per listener. This is an integer that is interpreted by 489 | # libwebsockets as a bit mask for its lws_log_levels enum. See the 490 | # libwebsockets documentation for more details. "log_type websockets" must also 491 | # be enabled. 492 | #websockets_log_level 0 493 | 494 | 495 | # ================================================================= 496 | # Security 497 | # ================================================================= 498 | 499 | # If set, only clients that have a matching prefix on their 500 | # clientid will be allowed to connect to the broker. By default, 501 | # all clients may connect. 502 | # For example, setting "secure-" here would mean a client "secure- 503 | # client" could connect but another with clientid "mqtt" couldn't. 504 | #clientid_prefixes 505 | 506 | # Boolean value that determines whether clients that connect 507 | # without providing a username are allowed to connect. If set to 508 | # false then a password file should be created (see the 509 | # password_file option) to control authenticated client access. 510 | # 511 | # Defaults to false, unless there are no listeners defined in the configuration 512 | # file, in which case it is set to true, but connections are only allowed from 513 | # the local machine. 514 | allow_anonymous true 515 | 516 | # ----------------------------------------------------------------- 517 | # Default authentication and topic access control 518 | # ----------------------------------------------------------------- 519 | 520 | # Control access to the broker using a password file. This file can be 521 | # generated using the mosquitto_passwd utility. If TLS support is not compiled 522 | # into mosquitto (it is recommended that TLS support should be included) then 523 | # plain text passwords are used, in which case the file should be a text file 524 | # with lines in the format: 525 | # username:password 526 | # The password (and colon) may be omitted if desired, although this 527 | # offers very little in the way of security. 528 | # 529 | # See the TLS client require_certificate and use_identity_as_username options 530 | # for alternative authentication options. If an auth_plugin is used as well as 531 | # password_file, the auth_plugin check will be made first. 532 | #password_file 533 | 534 | # Access may also be controlled using a pre-shared-key file. This requires 535 | # TLS-PSK support and a listener configured to use it. The file should be text 536 | # lines in the format: 537 | # identity:key 538 | # The key should be in hexadecimal format without a leading "0x". 539 | # If an auth_plugin is used as well, the auth_plugin check will be made first. 540 | #psk_file 541 | 542 | # Control access to topics on the broker using an access control list 543 | # file. If this parameter is defined then only the topics listed will 544 | # have access. 545 | # If the first character of a line of the ACL file is a # it is treated as a 546 | # comment. 547 | # Topic access is added with lines of the format: 548 | # 549 | # topic [read|write|readwrite|deny] 550 | # 551 | # The access type is controlled using "read", "write", "readwrite" or "deny". 552 | # This parameter is optional (unless contains a space character) - if 553 | # not given then the access is read/write. can contain the + or # 554 | # wildcards as in subscriptions. 555 | # 556 | # The "deny" option can used to explicity deny access to a topic that would 557 | # otherwise be granted by a broader read/write/readwrite statement. Any "deny" 558 | # topics are handled before topics that grant read/write access. 559 | # 560 | # The first set of topics are applied to anonymous clients, assuming 561 | # allow_anonymous is true. User specific topic ACLs are added after a 562 | # user line as follows: 563 | # 564 | # user 565 | # 566 | # The username referred to here is the same as in password_file. It is 567 | # not the clientid. 568 | # 569 | # 570 | # If is also possible to define ACLs based on pattern substitution within the 571 | # topic. The patterns available for substition are: 572 | # 573 | # %c to match the client id of the client 574 | # %u to match the username of the client 575 | # 576 | # The substitution pattern must be the only text for that level of hierarchy. 577 | # 578 | # The form is the same as for the topic keyword, but using pattern as the 579 | # keyword. 580 | # Pattern ACLs apply to all users even if the "user" keyword has previously 581 | # been given. 582 | # 583 | # If using bridges with usernames and ACLs, connection messages can be allowed 584 | # with the following pattern: 585 | # pattern write $SYS/broker/connection/%c/state 586 | # 587 | # pattern [read|write|readwrite] 588 | # 589 | # Example: 590 | # 591 | # pattern write sensor/%u/data 592 | # 593 | # If an auth_plugin is used as well as acl_file, the auth_plugin check will be 594 | # made first. 595 | #acl_file 596 | 597 | # ----------------------------------------------------------------- 598 | # External authentication and topic access plugin options 599 | # ----------------------------------------------------------------- 600 | 601 | # External authentication and access control can be supported with the 602 | # auth_plugin option. This is a path to a loadable plugin. See also the 603 | # auth_opt_* options described below. 604 | # 605 | # The auth_plugin option can be specified multiple times to load multiple 606 | # plugins. The plugins will be processed in the order that they are specified 607 | # here. If the auth_plugin option is specified alongside either of 608 | # password_file or acl_file then the plugin checks will be made first. 609 | # 610 | #auth_plugin 611 | 612 | # If the auth_plugin option above is used, define options to pass to the 613 | # plugin here as described by the plugin instructions. All options named 614 | # using the format auth_opt_* will be passed to the plugin, for example: 615 | # 616 | # auth_opt_db_host 617 | # auth_opt_db_port 618 | # auth_opt_db_username 619 | # auth_opt_db_password 620 | 621 | 622 | # ================================================================= 623 | # Bridges 624 | # ================================================================= 625 | 626 | # A bridge is a way of connecting multiple MQTT brokers together. 627 | # Create a new bridge using the "connection" option as described below. Set 628 | # options for the bridges using the remaining parameters. You must specify the 629 | # address and at least one topic to subscribe to. 630 | # 631 | # Each connection must have a unique name. 632 | # 633 | # The address line may have multiple host address and ports specified. See 634 | # below in the round_robin description for more details on bridge behaviour if 635 | # multiple addresses are used. Note that if you use an IPv6 address, then you 636 | # are required to specify a port. 637 | # 638 | # The direction that the topic will be shared can be chosen by 639 | # specifying out, in or both, where the default value is out. 640 | # The QoS level of the bridged communication can be specified with the next 641 | # topic option. The default QoS level is 0, to change the QoS the topic 642 | # direction must also be given. 643 | # 644 | # The local and remote prefix options allow a topic to be remapped when it is 645 | # bridged to/from the remote broker. This provides the ability to place a topic 646 | # tree in an appropriate location. 647 | # 648 | # For more details see the mosquitto.conf man page. 649 | # 650 | # Multiple topics can be specified per connection, but be careful 651 | # not to create any loops. 652 | # 653 | # If you are using bridges with cleansession set to false (the default), then 654 | # you may get unexpected behaviour from incoming topics if you change what 655 | # topics you are subscribing to. This is because the remote broker keeps the 656 | # subscription for the old topic. If you have this problem, connect your bridge 657 | # with cleansession set to true, then reconnect with cleansession set to false 658 | # as normal. 659 | #connection 660 | #address [:] [[:]] 661 | #topic [[[out | in | both] qos-level] local-prefix remote-prefix] 662 | 663 | # If you need to have the bridge connect over a particular network interface, 664 | # use bridge_bind_address to tell the bridge which local IP address the socket 665 | # should bind to, e.g. `bridge_bind_address 192.168.1.10` 666 | #bridge_bind_address 667 | 668 | # If a bridge has topics that have "out" direction, the default behaviour is to 669 | # send an unsubscribe request to the remote broker on that topic. This means 670 | # that changing a topic direction from "in" to "out" will not keep receiving 671 | # incoming messages. Sending these unsubscribe requests is not always 672 | # desirable, setting bridge_attempt_unsubscribe to false will disable sending 673 | # the unsubscribe request. 674 | #bridge_attempt_unsubscribe true 675 | 676 | # Set the version of the MQTT protocol to use with for this bridge. Can be one 677 | # of mqttv50, mqttv311 or mqttv31. Defaults to mqttv311. 678 | #bridge_protocol_version mqttv311 679 | 680 | # Set the clean session variable for this bridge. 681 | # When set to true, when the bridge disconnects for any reason, all 682 | # messages and subscriptions will be cleaned up on the remote 683 | # broker. Note that with cleansession set to true, there may be a 684 | # significant amount of retained messages sent when the bridge 685 | # reconnects after losing its connection. 686 | # When set to false, the subscriptions and messages are kept on the 687 | # remote broker, and delivered when the bridge reconnects. 688 | #cleansession false 689 | 690 | # Set the amount of time a bridge using the lazy start type must be idle before 691 | # it will be stopped. Defaults to 60 seconds. 692 | #idle_timeout 60 693 | 694 | # Set the keepalive interval for this bridge connection, in 695 | # seconds. 696 | #keepalive_interval 60 697 | 698 | # Set the clientid to use on the local broker. If not defined, this defaults to 699 | # 'local.'. If you are bridging a broker to itself, it is important 700 | # that local_clientid and clientid do not match. 701 | #local_clientid 702 | 703 | # If set to true, publish notification messages to the local and remote brokers 704 | # giving information about the state of the bridge connection. Retained 705 | # messages are published to the topic $SYS/broker/connection//state 706 | # unless the notification_topic option is used. 707 | # If the message is 1 then the connection is active, or 0 if the connection has 708 | # failed. 709 | # This uses the last will and testament feature. 710 | #notifications true 711 | 712 | # Choose the topic on which notification messages for this bridge are 713 | # published. If not set, messages are published on the topic 714 | # $SYS/broker/connection//state 715 | #notification_topic 716 | 717 | # Set the client id to use on the remote end of this bridge connection. If not 718 | # defined, this defaults to 'name.hostname' where name is the connection name 719 | # and hostname is the hostname of this computer. 720 | # This replaces the old "clientid" option to avoid confusion. "clientid" 721 | # remains valid for the time being. 722 | #remote_clientid 723 | 724 | # Set the password to use when connecting to a broker that requires 725 | # authentication. This option is only used if remote_username is also set. 726 | # This replaces the old "password" option to avoid confusion. "password" 727 | # remains valid for the time being. 728 | #remote_password 729 | 730 | # Set the username to use when connecting to a broker that requires 731 | # authentication. 732 | # This replaces the old "username" option to avoid confusion. "username" 733 | # remains valid for the time being. 734 | #remote_username 735 | 736 | # Set the amount of time a bridge using the automatic start type will wait 737 | # until attempting to reconnect. 738 | # This option can be configured to use a constant delay time in seconds, or to 739 | # use a backoff mechanism based on "Decorrelated Jitter", which adds a degree 740 | # of randomness to when the restart occurs. 741 | # 742 | # Set a constant timeout of 20 seconds: 743 | # restart_timeout 20 744 | # 745 | # Set backoff with a base (start value) of 10 seconds and a cap (upper limit) of 746 | # 60 seconds: 747 | # restart_timeout 10 30 748 | # 749 | # Defaults to jitter with a base of 5 and cap of 30 750 | #restart_timeout 5 30 751 | 752 | # If the bridge has more than one address given in the address/addresses 753 | # configuration, the round_robin option defines the behaviour of the bridge on 754 | # a failure of the bridge connection. If round_robin is false, the default 755 | # value, then the first address is treated as the main bridge connection. If 756 | # the connection fails, the other secondary addresses will be attempted in 757 | # turn. Whilst connected to a secondary bridge, the bridge will periodically 758 | # attempt to reconnect to the main bridge until successful. 759 | # If round_robin is true, then all addresses are treated as equals. If a 760 | # connection fails, the next address will be tried and if successful will 761 | # remain connected until it fails 762 | #round_robin false 763 | 764 | # Set the start type of the bridge. This controls how the bridge starts and 765 | # can be one of three types: automatic, lazy and once. Note that RSMB provides 766 | # a fourth start type "manual" which isn't currently supported by mosquitto. 767 | # 768 | # "automatic" is the default start type and means that the bridge connection 769 | # will be started automatically when the broker starts and also restarted 770 | # after a short delay (30 seconds) if the connection fails. 771 | # 772 | # Bridges using the "lazy" start type will be started automatically when the 773 | # number of queued messages exceeds the number set with the "threshold" 774 | # parameter. It will be stopped automatically after the time set by the 775 | # "idle_timeout" parameter. Use this start type if you wish the connection to 776 | # only be active when it is needed. 777 | # 778 | # A bridge using the "once" start type will be started automatically when the 779 | # broker starts but will not be restarted if the connection fails. 780 | #start_type automatic 781 | 782 | # Set the number of messages that need to be queued for a bridge with lazy 783 | # start type to be restarted. Defaults to 10 messages. 784 | # Must be less than max_queued_messages. 785 | #threshold 10 786 | 787 | # If try_private is set to true, the bridge will attempt to indicate to the 788 | # remote broker that it is a bridge not an ordinary client. If successful, this 789 | # means that loop detection will be more effective and that retained messages 790 | # will be propagated correctly. Not all brokers support this feature so it may 791 | # be necessary to set try_private to false if your bridge does not connect 792 | # properly. 793 | #try_private true 794 | 795 | # Some MQTT brokers do not allow retained messages. MQTT v5 gives a mechanism 796 | # for brokers to tell clients that they do not support retained messages, but 797 | # this is not possible for MQTT v3.1.1 or v3.1. If you need to bridge to a 798 | # v3.1.1 or v3.1 broker that does not support retained messages, set the 799 | # bridge_outgoing_retain option to false. This will remove the retain bit on 800 | # all outgoing messages to that bridge, regardless of any other setting. 801 | #bridge_outgoing_retain true 802 | 803 | # If you wish to restrict the size of messages sent to a remote bridge, use the 804 | # bridge_max_packet_size option. This sets the maximum number of bytes for 805 | # the total message, including headers and payload. 806 | # Note that MQTT v5 brokers may provide their own maximum-packet-size property. 807 | # In this case, the smaller of the two limits will be used. 808 | # Set to 0 for "unlimited". 809 | #bridge_max_packet_size 0 810 | 811 | 812 | # ----------------------------------------------------------------- 813 | # Certificate based SSL/TLS support 814 | # ----------------------------------------------------------------- 815 | # Either bridge_cafile or bridge_capath must be defined to enable TLS support 816 | # for this bridge. 817 | # bridge_cafile defines the path to a file containing the 818 | # Certificate Authority certificates that have signed the remote broker 819 | # certificate. 820 | # bridge_capath defines a directory that will be searched for files containing 821 | # the CA certificates. For bridge_capath to work correctly, the certificate 822 | # files must have ".crt" as the file ending and you must run "openssl rehash 823 | # " each time you add/remove a certificate. 824 | #bridge_cafile 825 | #bridge_capath 826 | 827 | 828 | # If the remote broker has more than one protocol available on its port, e.g. 829 | # MQTT and WebSockets, then use bridge_alpn to configure which protocol is 830 | # requested. Note that WebSockets support for bridges is not yet available. 831 | #bridge_alpn 832 | 833 | # When using certificate based encryption, bridge_insecure disables 834 | # verification of the server hostname in the server certificate. This can be 835 | # useful when testing initial server configurations, but makes it possible for 836 | # a malicious third party to impersonate your server through DNS spoofing, for 837 | # example. Use this option in testing only. If you need to resort to using this 838 | # option in a production environment, your setup is at fault and there is no 839 | # point using encryption. 840 | #bridge_insecure false 841 | 842 | # Path to the PEM encoded client certificate, if required by the remote broker. 843 | #bridge_certfile 844 | 845 | # Path to the PEM encoded client private key, if required by the remote broker. 846 | #bridge_keyfile 847 | 848 | # ----------------------------------------------------------------- 849 | # PSK based SSL/TLS support 850 | # ----------------------------------------------------------------- 851 | # Pre-shared-key encryption provides an alternative to certificate based 852 | # encryption. A bridge can be configured to use PSK with the bridge_identity 853 | # and bridge_psk options. These are the client PSK identity, and pre-shared-key 854 | # in hexadecimal format with no "0x". Only one of certificate and PSK based 855 | # encryption can be used on one 856 | # bridge at once. 857 | #bridge_identity 858 | #bridge_psk 859 | 860 | 861 | # ================================================================= 862 | # External config files 863 | # ================================================================= 864 | 865 | # External configuration files may be included by using the 866 | # include_dir option. This defines a directory that will be searched 867 | # for config files. All files that end in '.conf' will be loaded as 868 | # a configuration file. It is best to have this as the last option 869 | # in the main file. This option will only be processed from the main 870 | # configuration file. The directory specified must not contain the 871 | # main configuration file. 872 | # Files within include_dir will be loaded sorted in case-sensitive 873 | # alphabetical order, with capital letters ordered first. If this option is 874 | # given multiple times, all of the files from the first instance will be 875 | # processed before the next instance. See the man page for examples. 876 | #include_dir 877 | -------------------------------------------------------------------------------- /docker/images/mqtt-explorer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM smeagolworms4/mqtt-explorer:browser-1.0.3 2 | 3 | COPY ./settings.json /mqtt-explorer/config/settings.json 4 | -------------------------------------------------------------------------------- /docker/images/mqtt-explorer/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "ConnectionManager_connections": { 3 | "mqtt-server": { 4 | "configVersion": 1, 5 | "certValidation": true, 6 | "clientId": "mqtt-explorer-e1085971", 7 | "id": "mqtt-server", 8 | "name": "MQTT Server", 9 | "encryption": false, 10 | "subscriptions": [ 11 | { 12 | "topic": "#", 13 | "qos": 0 14 | }, 15 | { 16 | "topic": "$SYS/#", 17 | "qos": 0 18 | } 19 | ], 20 | "type": "mqtt", 21 | "host": "mqtt-server", 22 | "port": 1883, 23 | "protocol": "mqtt" 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /docker/images/nodered/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nodered/node-red:2.2.3 2 | RUN npm install node-red-dashboard@3.6.5 3 | RUN npm install node-red-contrib-ui-actions@0.1.8 4 | RUN npm install node-red-node-ui-table@0.4.3 5 | RUN npm install node-red-contrib-ui-level@0.1.46 6 | -------------------------------------------------------------------------------- /docker/images/steve/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM maven:3.6.1-jdk-11 2 | 3 | ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 4 | 5 | WORKDIR /steve 6 | 7 | ENV DOCKERIZE_VERSION v0.6.1 8 | RUN wget --no-verbose https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ 9 | && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ 10 | && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz 11 | 12 | RUN wget -qO- https://github.com/steve-community/steve/archive/steve-3.6.0.tar.gz | tar xz --strip-components=1 13 | COPY main.properties src/main/resources/config/docker 14 | COPY init.sh . 15 | COPY keystore.jks . 16 | 17 | CMD /steve/init.sh 18 | -------------------------------------------------------------------------------- /docker/images/steve/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e # exit on any error 3 | dockerize -wait tcp://ocpp-db:3306 -timeout 60s 4 | 5 | if [ ! -f ".buildsuccess" ]; then 6 | mvn clean package -Pdocker -Djdk.tls.client.protocols="TLSv1,TLSv1.1,TLSv1.2" 7 | touch .buildsuccess 8 | fi 9 | 10 | java -jar target/steve.jar -------------------------------------------------------------------------------- /docker/images/steve/keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EVerest/everest-dev-environment/3c10aa4a054984463228a30ddf7f0866e9b3bfa7/docker/images/steve/keystore.jks -------------------------------------------------------------------------------- /docker/images/steve/main.properties: -------------------------------------------------------------------------------- 1 | # Just to be backwards compatible with previous versions, this is set to "steve", 2 | # since there might be already configured chargepoints expecting the older path. 3 | # Otherwise, might as well be changed to something else or be left empty. 4 | # 5 | context.path = steve 6 | 7 | # Database configuration 8 | # 9 | db.ip = ocpp-db 10 | db.port = 3306 11 | db.schema = ocpp-db 12 | db.user = ocpp 13 | db.password = ocpp 14 | 15 | # Credentials for Web interface access 16 | # 17 | auth.user = admin 18 | auth.password = 1234 19 | 20 | # Jetty configuration 21 | # 22 | server.host = 0.0.0.0 23 | server.gzip.enabled = false 24 | 25 | # Jetty HTTP configuration 26 | # 27 | http.enabled = true 28 | http.port = 8180 29 | 30 | # Jetty HTTPS configuration 31 | # 32 | https.enabled = true 33 | https.port = 8443 34 | keystore.path = /steve/keystore.jks 35 | keystore.password = 123456 36 | 37 | # When the WebSocket/Json charge point opens more than one WebSocket connection, 38 | # we need a mechanism/strategy to select one of them for outgoing requests. 39 | # For allowed values see de.rwth.idsg.steve.ocpp.ws.custom.WsSessionSelectStrategyEnum. 40 | # 41 | ws.session.select.strategy = ALWAYS_LAST 42 | 43 | # if BootNotification messages arrive (SOAP) or WebSocket connection attempts are made (JSON) from unknown charging 44 | # stations, we reject these charging stations, because stations with these chargeBoxIds were NOT inserted into database 45 | # beforehand. by setting this property to true, this behaviour can be modified to automatically insert unknown 46 | # stations into database and accept their requests. 47 | # 48 | # CAUTION: setting this property to true is very dangerous, because we will accept EVERY BootNotification or WebSocket 49 | # connection attempt from ANY sender as long as the sender knows the URL and sends a valid message. 50 | # 51 | auto.register.unknown.stations = false 52 | 53 | ### DO NOT MODIFY ### 54 | steve.version = ${project.version} 55 | git.describe = ${git.commit.id.describe} 56 | db.sql.logging = false 57 | profile = prod 58 | -------------------------------------------------------------------------------- /everest-complete-readonly.yaml: -------------------------------------------------------------------------------- 1 | RISE-V2G: 2 | git: https://github.com/EVerest/ext-RISE-V2G.git 3 | git_tag: master 4 | options: 5 | - THIRD_PARTY_APP_DST rise_v2g 6 | Josev: 7 | git: https://github.com/EVerest/ext-switchev-iso15118.git 8 | git_tag: everest 9 | options: 10 | - THIRD_PARTY_APP_DST josev 11 | everest-cmake: 12 | git: https://github.com/EVerest/everest-cmake.git 13 | git_tag: main 14 | everest-core: 15 | git: https://github.com/EVerest/everest-core.git 16 | git_tag: main 17 | everest-dev-environment: 18 | git: https://github.com/EVerest/everest-dev-environment.git 19 | git_tag: main 20 | everest-framework: 21 | git: https://github.com/EVerest/everest-framework.git 22 | git_tag: main 23 | everest-utils: 24 | git: https://github.com/EVerest/everest-utils.git 25 | git_tag: main 26 | libfsm: 27 | git: https://github.com/EVerest/libfsm.git 28 | git_tag: main 29 | liblog: 30 | git: https://github.com/EVerest/liblog.git 31 | git_tag: main 32 | options: 33 | - BUILD_EXAMPLES OFF 34 | libmodbus: 35 | git: https://github.com/EVerest/libmodbus.git 36 | git_tag: main 37 | options: 38 | - BUILD_EXAMPLES OFF 39 | libocpp: 40 | git: https://github.com/EVerest/libocpp.git 41 | git_tag: main 42 | libslac: 43 | git: https://github.com/EVerest/libslac.git 44 | git_tag: main 45 | libsunspec: 46 | git: https://github.com/EVerest/libsunspec.git 47 | git_tag: main 48 | options: 49 | - BUILD_EXAMPLES OFF 50 | libtimer: 51 | git: https://github.com/EVerest/libtimer.git 52 | git_tag: main 53 | options: 54 | - BUILD_EXAMPLES OFF 55 | ext-mbedtls: 56 | git: https://github.com/EVerest/ext-mbedtls.git 57 | git_tag: mbedtls-2.28.0-trustedCAKey 58 | ext-openv2g: 59 | git: https://github.com/EVerest/ext-openv2g.git 60 | git_tag: everest 61 | -------------------------------------------------------------------------------- /everest-complete.yaml: -------------------------------------------------------------------------------- 1 | RISE-V2G: 2 | git: git@github.com:EVerest/ext-RISE-V2G.git 3 | git_tag: master 4 | options: 5 | - THIRD_PARTY_APP_DST rise_v2g 6 | Josev: 7 | git: git@github.com:EVerest/ext-switchev-iso15118.git 8 | git_tag: everest 9 | options: 10 | - THIRD_PARTY_APP_DST josev 11 | everest-cmake: 12 | git: git@github.com:EVerest/everest-cmake.git 13 | git_tag: main 14 | everest-core: 15 | git: git@github.com:EVerest/everest-core.git 16 | git_tag: main 17 | everest-dev-environment: 18 | git: git@github.com:EVerest/everest-dev-environment.git 19 | git_tag: main 20 | everest-framework: 21 | git: git@github.com:EVerest/everest-framework.git 22 | git_tag: main 23 | everest-utils: 24 | git: git@github.com:EVerest/everest-utils.git 25 | git_tag: main 26 | libfsm: 27 | git: git@github.com:EVerest/libfsm.git 28 | git_tag: main 29 | liblog: 30 | git: git@github.com:EVerest/liblog.git 31 | git_tag: main 32 | options: 33 | - BUILD_EXAMPLES OFF 34 | libmodbus: 35 | git: git@github.com:EVerest/libmodbus.git 36 | git_tag: main 37 | options: 38 | - BUILD_EXAMPLES OFF 39 | libocpp: 40 | git: git@github.com:EVerest/libocpp.git 41 | git_tag: main 42 | libslac: 43 | git: git@github.com:EVerest/libslac.git 44 | git_tag: main 45 | libsunspec: 46 | git: git@github.com:EVerest/libsunspec.git 47 | git_tag: main 48 | options: 49 | - BUILD_EXAMPLES OFF 50 | libtimer: 51 | git: git@github.com:EVerest/libtimer.git 52 | git_tag: main 53 | options: 54 | - BUILD_EXAMPLES OFF 55 | ext-mbedtls: 56 | git: git@github.com:EVerest/ext-mbedtls.git 57 | git_tag: mbedtls-2.28.0-trustedCAKey 58 | ext-openv2g: 59 | git: git@github.com:EVerest/ext-openv2g.git 60 | git_tag: everest 61 | -------------------------------------------------------------------------------- /everest-metadata.yaml: -------------------------------------------------------------------------------- 1 | everest-admin-panel: 2 | name: "EVerest admin panel" 3 | description: "A beta-stage frontend for graphically editing the everest config file" 4 | license: "Apache-2.0" 5 | 6 | everest-cmake: 7 | description: "CMake utility scripts for everest" 8 | license: "Apache-2.0" 9 | # dependencies of everest-cmake 10 | cpm: 11 | name: "CPM.cmake" 12 | description: "CMake's missing package manager. A small CMake script for setup-free, cross-platform, reproducible dependency management" 13 | license: "MIT" 14 | code-coverage: 15 | name: "CodeCoverage.cmake" 16 | license: "BSD-3-Clause" 17 | 18 | everest-core: 19 | description: "This is the main part of EVerest containing the actual charge controller logic included in a large set of modules" 20 | license: "Apache-2.0" 21 | # dependencies of everest-core 22 | nanopb: 23 | description: "Protocol Buffers with small code size" 24 | license: "Zlib" 25 | sigslot: 26 | name: "Sigslot, a signal-slot library" 27 | description: "A simple C++14 signal-slots implementation " 28 | license: "MIT" 29 | ext-mbedtls: 30 | name: "Mbed TLS" 31 | description: "An open source, portable, easy to use, readable and flexible SSL library" 32 | license: "Apache-2.0" 33 | boost: 34 | description: "Boost provides free peer-reviewed portable C++ source libraries." 35 | license: "BSL-1.0 & MIT & Python-2.0" 36 | pugixml: 37 | description: "Light-weight, simple and fast XML parser for C++ with XPath support" 38 | license: "MIT" 39 | Josev: 40 | description: "Implementation of the ISO 15118 Communication Protocol (-2, -20, -8)" 41 | license: "Apache-2.0" 42 | sqlite_cpp: 43 | name: "SQLiteCpp" 44 | description: "SQLiteC++ (SQLiteCpp) is a smart and easy to use C++ SQLite3 wrapper" 45 | license: "MIT" 46 | libcurl: 47 | name: "curl" 48 | description: "A command line tool and library for transferring data with URL syntax, supporting DICT, FILE, FTP, FTPS, GOPHER, GOPHERS, HTTP, HTTPS, IMAP, IMAPS, LDAP, LDAPS, MQTT, POP3, POP3S, RTMP, RTMPS, RTSP, SCP, SFTP, SMB, SMBS, SMTP, SMTPS, TELNET, TFTP, WS and WSS. libcurl offers a myriad of powerful features" 49 | license: "curl" 50 | libcbv2g: 51 | name: "libcbV2G" 52 | description: "libcbV2G - The V2GTP EXI codec library" 53 | license: "Apache-2.0" 54 | everest-utils: 55 | description: "EVerest utilities" 56 | license: "Apache-2.0" 57 | libpcap: 58 | license: "BSD-3-Clause" 59 | libevent: 60 | license: "BSD-3-Clause & MIT" 61 | openssl: 62 | license: "Apache-2.0" 63 | 64 | # from everest-dev-environment 65 | edm: 66 | name: "EDM" 67 | description: "Dependency Manager for EVerest" 68 | license: "Apache-2.0" 69 | # dependencies of edm 70 | jinja2: 71 | name: "Jinja2" 72 | description: "Jinja is a fast, expressive, extensible templating engine" 73 | license: "BSD-3-Clause" 74 | pyyaml: 75 | name: "PyYAML" 76 | description: "PyYAML is a YAML parser and emitter for Python" 77 | license: "MIT" 78 | requests: 79 | name: "Requests" 80 | description: "Requests is a simple, yet elegant, HTTP library." 81 | license: "Apache-2.0" 82 | 83 | everest-framework: 84 | description: "Provides a mechanism to manage dependencies between different modules communicating with a wrapped MQTT protocol" 85 | license: "Apache-2.0" 86 | # dependencies of everest-framework 87 | catch2: 88 | name: "Catch2" 89 | description: "A modern, C++-native, test framework for unit-tests, TDD and BDD - using C++14, C++17 and later" 90 | license: "BSL-1.0" 91 | date: 92 | name: "Date" 93 | description: "A date and time library based on the C++11/14/17 header" 94 | license: "MIT" 95 | libfmt: 96 | name: "{fmt}" 97 | description: "A modern formatting library" 98 | license: "MIT" 99 | libwebsockets: 100 | name: "Libwebsockets" 101 | description: "Canonical libwebsockets.org networking library" 102 | license: "MIT" 103 | mqttc: 104 | name: "MQTT-C" 105 | description: "A portable MQTT C client for embedded systems and PCs alike." 106 | license: "MIT" 107 | nlohmann_json: 108 | description: "JSON for Modern C++" 109 | license: "MIT" 110 | nlohmann_json_schema_validator: 111 | description: "JSON schema validator for JSON for Modern C++" 112 | license: "MIT" 113 | pybind11: 114 | description: "Seamless operability between C++11 and Python" 115 | license: "BSD-3-Clause" 116 | pybind11_json: 117 | description: "Using nlohmann::json with pybind11" 118 | license: "BSD-3-Clause" 119 | ryml: 120 | name: "Rapid YAML" 121 | description: "Rapid YAML - a library to parse and emit YAML, and do it fast." 122 | license: "MIT" 123 | libcap: 124 | license: "BSD-3-Clause" 125 | node-addon-api: 126 | license: "MIT" 127 | nodejs: 128 | license: "MIT & ISC & BSD-2-Clause & BSD-3-Clause & Artistic-2.0 & Apache-2.0" 129 | 130 | # from everest-utils 131 | ev-dev-tools: 132 | name: "ev-cli" 133 | description: "EVerest module auto generation" 134 | license: "Apache-2.0" 135 | # dependencies of ev-cli 136 | jsonschema: 137 | description: "jsonschema is an implementation of the JSON Schema specification for Python" 138 | license: "MIT" 139 | stringcase: 140 | description: "String case converter" 141 | license: "MIT" 142 | 143 | # from everest-utils 144 | everest-testing: 145 | name: "EVerest Testing" 146 | description: "This python package provides utilities for testing EVerest with pytest" 147 | license: "Apache-2.0" 148 | # dependencies of everest-testing 149 | pytest: 150 | description: "pytest: simple powerful testing with Python" 151 | license: "MIT" 152 | pytest-asyncio: 153 | description: "Pytest support for asyncio" 154 | license: "Apache-2.0" 155 | python-dateutil: 156 | description: "Extensions to the standard Python datetime module" 157 | license: "Apache-2.0" 158 | paho-mqtt: 159 | description: "MQTT version 5.0/3.1.1 client class" 160 | license: "EPL-2.0" 161 | pyftpdlib: 162 | description: "Very fast asynchronous FTP server library" 163 | license: "MIT" 164 | ocpp: 165 | description: "Python package implementing the JSON version of the Open Charge Point Protocol (OCPP)" 166 | license: "MIT" 167 | websockets: 168 | description: "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" 169 | license: "BSD-3-Clause" 170 | pyOpenSSL: 171 | description: "Python wrapper module around the OpenSSL library" 172 | license: "Apache-2.0" 173 | 174 | libfsm: 175 | description: "A tiny C++14 library for writing maintainable finite state machines" 176 | license: "Apache-2.0" 177 | 178 | liblog: 179 | description: "C++ logging and exceptions library for the EVerest framework" 180 | license: "Apache-2.0" 181 | # dependencies of liblog 182 | metamacros: 183 | description: "Macros for metaprogramming" 184 | license: "MIT" 185 | libbacktrace: 186 | description: "A C library that may be linked into a C/C++ program to produce symbolic backtraces" 187 | license: "BSD-3-Clause" 188 | 189 | libmodbus: 190 | description: "This is an implementation of the MODBUS communication protocol" 191 | license: "Apache-2.0" 192 | # dependencies of libmodbus 193 | gtest: 194 | name: "GoogleTest" 195 | description: "GoogleTest - Google Testing and Mocking Framework" 196 | license: "BSD-3-Clause" 197 | 198 | libocpp: 199 | description: "This is a C++ library implementation of OCPP for version 1.6 and 2.0.1" 200 | license: "Apache-2.0" 201 | # dependencies of libocpp 202 | sqlite3: 203 | name: "SQLite" 204 | description: "SQLite is a C-language library that implements a small, fast, self-contained, high-reliability, full-featured, SQL database engine" 205 | license: "SQLite Blessing (Public Domain)" 206 | websocketpp: 207 | name: "WebSocket++" 208 | description: "C++ websocket client/server library" 209 | license: "BSD-3-Clause" 210 | libevse-security: 211 | description: "This is a C++ library for security related operations for charging stations" 212 | license: "Apache-2.0" 213 | 214 | libslac: 215 | description: "Simple ISO15118-3 SLAC library" 216 | license: "Apache-2.0" 217 | # dependencies of libslac 218 | hash-library: 219 | description: "Portable C++ hashing library" 220 | license: "Zlib" 221 | 222 | libtimer: 223 | description: "C++ timer library for the EVerest framework" 224 | license: "Apache-2.0" 225 | -------------------------------------------------------------------------------- /everest_dev_tool/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | __pycache__ 3 | *.egg-info 4 | -------------------------------------------------------------------------------- /everest_dev_tool/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /everest_dev_tool/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "everest_dev_tool" 3 | version = "0.0.1" 4 | description = "This tool provides helpful commands to setup/control your dev environment" 5 | license = { text="Apache-2.0" } 6 | dependencies = [] 7 | 8 | [project.scripts] 9 | everest = "everest_dev_tool:main" 10 | -------------------------------------------------------------------------------- /everest_dev_tool/src/everest_dev_tool/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.0.1" 2 | 3 | from . import parser 4 | 5 | def get_parser(): 6 | return parser.get_parser(__version__) 7 | 8 | def main(): 9 | parser.main(get_parser()) 10 | -------------------------------------------------------------------------------- /everest_dev_tool/src/everest_dev_tool/git_handlers.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import subprocess 4 | 5 | default_logger = logging.getLogger("EVerest's Development Tool - Git Helpers") 6 | 7 | def clone_handler(args: argparse.Namespace, log: logging.Logger = default_logger): 8 | log.debug("Running clone handler") 9 | 10 | if args.https: 11 | repository_url = "https://github.com/" 12 | else: 13 | repository_url = "git@github.com:" 14 | repository_url = repository_url + f"{ args.organization }/{ args.repository_name }.git" 15 | 16 | subprocess.run(["git", "clone", "-b", args.branch, repository_url], check=True) 17 | -------------------------------------------------------------------------------- /everest_dev_tool/src/everest_dev_tool/parser.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | from . import services, git_handlers 5 | 6 | log = logging.getLogger("EVerest's Development Tool") 7 | 8 | def get_parser(version: str) -> argparse.ArgumentParser: 9 | parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, 10 | description="EVerest's Development Tool",) 11 | parser.add_argument('--version', action='version', version=f'%(prog)s { version }') 12 | parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 13 | parser.set_defaults(action_handler=lambda _: parser.print_help()) 14 | 15 | subparsers = parser.add_subparsers(help="available commands") 16 | 17 | # Service related commands 18 | services_parser = subparsers.add_parser("services", help="Service related commands", add_help=True) 19 | services_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 20 | services_subparsers = services_parser.add_subparsers(help="Service related commands") 21 | 22 | start_service_parser = services_subparsers.add_parser("start", help="Start a service", add_help=True) 23 | start_service_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 24 | start_service_parser.add_argument("service_name", help="Name of Service to start") 25 | start_service_parser.set_defaults(action_handler=services.start_service_handler) 26 | 27 | stop_service_parser = services_subparsers.add_parser("stop", help="Stop a service", add_help=True) 28 | stop_service_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 29 | stop_service_parser.add_argument("service_name", help="Name of Service to stop") 30 | stop_service_parser.set_defaults(action_handler=services.stop_service_handler) 31 | 32 | services_info_parser = services_subparsers.add_parser("info", help="Show information about the current environment", add_help=True) 33 | services_info_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 34 | services_info_parser.set_defaults(action_handler=services.info_handler) 35 | 36 | list_services_parser = services_subparsers.add_parser("list", help="List all available services", add_help=True) 37 | list_services_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 38 | list_services_parser.set_defaults(action_handler=services.list_services_handler) 39 | 40 | # Git related commands 41 | clone_parser = subparsers.add_parser("clone", help="Clone a repository", add_help=True) 42 | clone_parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output") 43 | clone_parser.add_argument('--organization', '--org', default="EVerest", help="Github Organization name, default is 'EVerest'") 44 | clone_parser.add_argument('--branch', '-b', default="main", help="Branch to checkout, default is 'main'") 45 | clone_parser.add_argument('--https', action='store_true', help="Use HTTPS to clone the repository, default is 'SSH'") 46 | clone_parser.add_argument("repository_name", help="Name of the repository to clone") 47 | clone_parser.set_defaults(action_handler=git_handlers.clone_handler) 48 | 49 | return parser 50 | 51 | def setup_logging(verbose: bool): 52 | if verbose: 53 | log.setLevel(logging.DEBUG) 54 | else: 55 | log.setLevel(logging.INFO) 56 | console_handler = logging.StreamHandler() 57 | log.addHandler(console_handler) 58 | 59 | def main(parser: argparse.ArgumentParser): 60 | args = parser.parse_args() 61 | args.logger = log 62 | 63 | setup_logging(args.verbose) 64 | 65 | args.action_handler(args) 66 | -------------------------------------------------------------------------------- /everest_dev_tool/src/everest_dev_tool/services.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os,sys 4 | import subprocess 5 | from dataclasses import dataclass 6 | from typing import List 7 | import docker 8 | import enum 9 | 10 | @dataclass 11 | class DockerEnvironmentInfo: 12 | container_id: str | None = None 13 | container_name: str | None = None 14 | 15 | container_image: str | None = None 16 | container_image_id: str | None = None 17 | container_image_digest: str | None = None 18 | 19 | compose_files: List[str] | None = None 20 | compose_project_name: str | None = None 21 | 22 | in_docker_container: bool = False 23 | 24 | @dataclass 25 | class DockerComposeCommand: 26 | class Command(enum.Enum): 27 | UP = "up" 28 | DOWN = "down" 29 | PS = "ps" 30 | compose_files: List[str] 31 | project_name: str 32 | command: Command 33 | services: List[str] | None = None 34 | def execute_command(self, log: logging.Logger): 35 | command_list = ["docker", "compose"] 36 | for compose_file in self.compose_files: 37 | command_list.extend(["-f", compose_file]) 38 | command_list.extend(["-p", self.project_name]) 39 | if self.command == DockerComposeCommand.Command.UP: 40 | command_list.extend(["up", "-d"]) 41 | command_list.extend(self.services) 42 | elif self.command == DockerComposeCommand.Command.DOWN: 43 | command_list.extend(["down"]) 44 | command_list.extend(self.services) 45 | elif self.command == DockerComposeCommand.Command.PS: 46 | command_list.extend(["ps"]) 47 | else: 48 | log.error(f"Unknown command {self.command}") 49 | return 50 | log.debug(f"Executing command: {' '.join(command_list)}") 51 | subprocess.run(command_list, check=True) 52 | 53 | @dataclass 54 | class Service: 55 | """Class to represent a service""" 56 | name: str 57 | description: str 58 | start_command: List[str] | DockerComposeCommand 59 | stop_command: List[str] | DockerComposeCommand 60 | 61 | #################### 62 | # Helper functions # 63 | #################### 64 | 65 | def get_docker_environment_info(log: logging.Logger) -> DockerEnvironmentInfo: 66 | dei = DockerEnvironmentInfo() 67 | 68 | # Check if we are running in a docker container 69 | if not os.path.exists("/.dockerenv"): 70 | log.debug("Not running in Docker Container") 71 | dei.in_docker_container = False 72 | return dei 73 | 74 | log.debug("Running in Docker Container") 75 | 76 | dei.in_docker_container = True 77 | 78 | # Get the container information 79 | dei.container_id = subprocess.run(["hostname"], stdout=subprocess.PIPE).stdout.decode().strip() 80 | client = docker.from_env() 81 | dei.container_name = client.containers.get(dei.container_id).name 82 | 83 | # Get the image information 84 | dei.container_image = client.containers.get(dei.container_id).image.tags[0]# 85 | dei.container_image_id = client.containers.get(dei.container_id).image.id 86 | dei.container_image_digest = client.images.get(dei.container_image_id).id 87 | 88 | # Get the compose information 89 | if not os.path.exists("/workspace/.devcontainer/docker-compose.yml"): 90 | log.error("docker-compose.yml not found in /workspace/.devcontainer") 91 | sys.exit(1) 92 | dei.compose_files = ["/workspace/.devcontainer/docker-compose.yml"] 93 | 94 | # Check if the container is part of a docker-compose project 95 | if "com.docker.compose.project" not in client.containers.get(dei.container_id).attrs["Config"]["Labels"]: 96 | log.error("Container is not part of a docker-compose project") 97 | sys.exit(1) 98 | 99 | dei.compose_project_name = client.containers.get(dei.container_id).attrs["Config"]["Labels"]["com.docker.compose.project"] 100 | 101 | return dei 102 | 103 | def get_services(docker_env_info: DockerEnvironmentInfo, log: logging.Logger) -> List[Service]: 104 | return [ 105 | Service( 106 | name="mqtt-server", 107 | description="MQTT Server", 108 | start_command=DockerComposeCommand( 109 | compose_files=docker_env_info.compose_files, 110 | project_name=docker_env_info.compose_project_name, 111 | services=["mqtt-server"], 112 | command=DockerComposeCommand.Command.UP 113 | ), 114 | stop_command=DockerComposeCommand( 115 | compose_files=docker_env_info.compose_files, 116 | project_name=docker_env_info.compose_project_name, 117 | services=["mqtt-server"], 118 | command=DockerComposeCommand.Command.DOWN 119 | ) 120 | ), 121 | Service( 122 | name="steve", 123 | description="OCPP server for development of OCPP 1.6", 124 | start_command=DockerComposeCommand( 125 | compose_files=docker_env_info.compose_files, 126 | project_name=docker_env_info.compose_project_name, 127 | services=["steve"], 128 | command=DockerComposeCommand.Command.UP 129 | ), 130 | stop_command=DockerComposeCommand( 131 | compose_files=docker_env_info.compose_files, 132 | project_name=docker_env_info.compose_project_name, 133 | services=["steve", "ocpp-db"], 134 | command=DockerComposeCommand.Command.DOWN 135 | ) 136 | ), 137 | Service( 138 | name="mqtt-explorer", 139 | description="Web based MQTT Client to inspect mqtt traffic", 140 | start_command=DockerComposeCommand( 141 | compose_files=docker_env_info.compose_files, 142 | project_name=docker_env_info.compose_project_name, 143 | services=["mqtt-explorer"], 144 | command=DockerComposeCommand.Command.UP 145 | ), 146 | stop_command=DockerComposeCommand( 147 | compose_files=docker_env_info.compose_files, 148 | project_name=docker_env_info.compose_project_name, 149 | services=["mqtt-explorer"], 150 | command=DockerComposeCommand.Command.DOWN 151 | ) 152 | ) 153 | ] 154 | 155 | def get_service_by_name(service_name: str, docker_env_info: DockerEnvironmentInfo, log: logging.Logger) -> Service: 156 | return next((service for service in get_services(docker_env_info, log) if service.name == service_name), None) 157 | 158 | ############ 159 | # Handlers # 160 | ############ 161 | 162 | def start_service_handler(args: argparse.Namespace): 163 | log = args.logger 164 | docker_env_info = get_docker_environment_info(log) 165 | service = get_service_by_name(args.service_name, docker_env_info, log) 166 | if service is None: 167 | log.error(f"Service {args.service_name} not found, try 'everest services list' to get a list of available services") 168 | return 169 | 170 | log.info(f"Starting service {service.name}") 171 | if isinstance(service.start_command, DockerComposeCommand): 172 | service.start_command.execute_command(log) 173 | else: 174 | subprocess.run(service.start_command, check=True) 175 | 176 | def stop_service_handler(args: argparse.Namespace): 177 | log = args.logger 178 | docker_env_info = get_docker_environment_info(log) 179 | service = get_service_by_name(args.service_name, docker_env_info, log) 180 | if service is None: 181 | log.error(f"Service {args.service_name} not found, try 'everest services list' to get a list of available services") 182 | return 183 | 184 | log.info(f"Stopping service {service.name}") 185 | if isinstance(service.stop_command, DockerComposeCommand): 186 | service.stop_command.execute_command(log) 187 | else: 188 | subprocess.run(service.stop_command, check=True) 189 | 190 | def list_services_handler(args: argparse.Namespace): 191 | log = args.logger 192 | docker_env_info = get_docker_environment_info(log) 193 | log.info("Available services:") 194 | for service in get_services(docker_env_info, log): 195 | log.info(f"{service.name}: {service.description}") 196 | log.debug(f"Start Command: {service.start_command}") 197 | log.debug(f"Stop Command: {service.stop_command}") 198 | 199 | def info_handler(args: argparse.Namespace): 200 | log = args.logger 201 | docker_env_info = get_docker_environment_info(log) 202 | command = DockerComposeCommand( 203 | compose_files=docker_env_info.compose_files, 204 | project_name=docker_env_info.compose_project_name, 205 | command=DockerComposeCommand.Command.PS 206 | ) 207 | command.execute_command(log) 208 | --------------------------------------------------------------------------------