├── .dockerignore ├── .github └── workflows │ ├── greenhack22.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Dockerfile ├── LICENSE ├── Makefile ├── Pipfile ├── README.md ├── boagent ├── __init__.py ├── api │ ├── __init__.py │ ├── api.py │ ├── config.py │ ├── exceptions.py │ ├── models.py │ ├── process.py │ └── utils.py ├── hardware │ ├── __init__.py │ └── lshw.py └── public │ ├── assets │ ├── boavizta-logo-4.png │ ├── data.csv │ ├── dygraph.css │ ├── dygraph.min.js │ ├── favicon.ico │ ├── favicon.png │ ├── git-logo.png │ ├── graph.ts │ ├── jquery-3.6.1.min.js │ ├── license.txt │ ├── main.css │ ├── platypus_logo.png │ ├── pureknob.js │ ├── synchronizer.js │ └── table-style.css │ └── index.html ├── boagent_color.svg ├── compose └── development.yaml ├── docker-compose.yaml ├── hardware_cli.py ├── poetry.lock ├── pyproject.toml ├── pytest.ini ├── requirements.txt ├── setup.py ├── setup └── docker-compose.yaml └── tests ├── __init__.py ├── api ├── test_api_integration.py ├── test_api_process.py └── test_api_unit.py ├── hardware ├── test_hardwarecli.py └── test_lshw.py └── mocks ├── boaviztapi_response_not_verbose.json ├── boaviztapi_response_verbose.json ├── formatted_power_data_one_hour.json ├── formatted_scaphandre.json ├── get_metrics_not_verbose.json ├── get_metrics_verbose.json ├── get_metrics_verbose_no_hdd.json ├── hardware_data.json ├── hubblo-ci-01_lshw.json ├── lshw_data.json ├── lshw_data_sudo.json ├── mocks.py ├── nvme_data.json ├── nvme_data_sudo.json ├── power_data.json ├── sudo_lshw_data.json ├── sudo_lshw_data_disks.json └── sync-ce-re_lshw.json /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | venv 3 | -------------------------------------------------------------------------------- /.github/workflows/greenhack22.yml: -------------------------------------------------------------------------------- 1 | name: Publish release 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'greenhack22' 7 | 8 | jobs: 9 | 10 | docker: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v4 15 | 16 | - name: Login to GitHub Container Registry 17 | uses: docker/login-action@v3 18 | with: 19 | registry: ghcr.io 20 | username: ${{ github.actor }} 21 | password: ${{ secrets.GITHUB_TOKEN }} 22 | 23 | - name: Setup Python 3 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: '3.9' 27 | 28 | - name: Build docker image 29 | run: docker build . --tag ghcr.io/boavizta/boagent:greenhack22 --cache-from ghcr.io/boavizta/boagent:greenhack22 30 | 31 | - name: Push docker image 32 | run: docker push ghcr.io/boavizta/boagent:greenhack22 33 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | 9 | docker: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v4 14 | 15 | - name: Login to GitHub Container Registry 16 | uses: docker/login-action@v3 17 | with: 18 | registry: ghcr.io 19 | username: ${{ github.actor }} 20 | password: ${{ secrets.GITHUB_TOKEN }} 21 | 22 | - name: Setup Python 3 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.9' 26 | 27 | - name: Install poetry 28 | run: python -m pip install --upgrade poetry wheel 29 | 30 | - name: Build docker image 31 | run: docker build . --tag ghcr.io/boavizta/boagent:$(poetry version -s) 32 | 33 | - name: Push docker image 34 | run: docker push ghcr.io/boavizta/boagent:$(poetry version -s) 35 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Execute tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | paths: 9 | - "boagent/**" 10 | - "tests/**" 11 | - "poetry.lock" 12 | - "pyproject.toml" 13 | pull_request: 14 | branches: 15 | - main 16 | - dev 17 | paths: 18 | - "boagent/**" 19 | - "tests/**" 20 | - "poetry.lock" 21 | - "pyproject.toml" 22 | 23 | jobs: 24 | test: 25 | strategy: 26 | matrix: 27 | version: ["3.10", "3.11"] 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v4 31 | - name: Python setup 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: ${{ matrix.version }} 35 | - name: Poetry setup 36 | run : python3 -m pip install --upgrade poetry wheel 37 | - name: Install dependencies 38 | run: poetry install 39 | - name: Execute tests 40 | run: poetry run python3 -m pytest 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | _pycache_/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | api/service/server_impact/ref/jupyter_data.ipynb 80 | api/service/server_impact/ref/__pycache__ 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | .python-version 88 | 89 | # pipenv 90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 93 | # install all needed dependencies. 94 | #Pipfile.lock 95 | 96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 97 | _pypackages_/ 98 | 99 | # Celery stuff 100 | celerybeat-schedule 101 | celerybeat.pid 102 | 103 | # SageMath parsed files 104 | *.sage.py 105 | 106 | # Environments 107 | .env 108 | .venv 109 | env/ 110 | venv/ 111 | ENV/ 112 | env.bak/ 113 | venv.bak/ 114 | 115 | # Spyder project settings 116 | .spyderproject 117 | .spyproject 118 | 119 | # Rope project settings 120 | .ropeproject 121 | 122 | # mkdocs documentation 123 | /site 124 | 125 | # mypy 126 | .mypy_cache/ 127 | .dmypy.json 128 | dmypy.json 129 | 130 | # Pyre type checker 131 | .pyre/ 132 | 133 | # MacOS 134 | .DS_Store 135 | 136 | # IDE 137 | .idea/ 138 | .vscode/ 139 | 140 | *.db 141 | *.svg 142 | *.csv 143 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v2.3.0 4 | hooks: 5 | - id: check-yaml 6 | - id: end-of-file-fixer 7 | - id: trailing-whitespace 8 | - repo: https://github.com/psf/black 9 | rev: 22.10.0 10 | hooks: 11 | - id: black 12 | - repo: https://github.com/PyCQA/flake8 13 | rev: 7.0.0 14 | hooks: 15 | - id: flake8 16 | entry: flake8 --ignore=E501,W503 --per-file-ignores='__init__.py:F401' 17 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-slim 2 | 3 | LABEL org.opencontainers.image.authors="open-source@boavizta.org" 4 | LABEL org.opencontainers.image.description="Docker image for Boagent, a local API & environmental impact monitoring tool." 5 | LABEL org.opencontainers.image.licenses=Apache-2.0 6 | 7 | WORKDIR /home/boagent 8 | 9 | RUN python3 -m pip install --upgrade poetry 10 | 11 | RUN apt update && apt install lshw nvme-cli -y 12 | 13 | COPY pyproject.toml . 14 | 15 | RUN poetry install 16 | 17 | COPY . . 18 | 19 | EXPOSE 8000 20 | 21 | ENTRYPOINT ["poetry", "run", "uvicorn", "--reload", "boagent.api.api:app", "--host", "0.0.0.0"] 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2022 Boavizta 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | CURRENT_VERSION := $(shell poetry version -s) 2 | SEMVERS := major minor patch 3 | LAST_TAG := $(shell git describe --tags --abbrev=0) 4 | 5 | tag_version: 6 | git commit -m "release: bump to ${CURRENT_VERSION}" pyproject.toml 7 | git tag ${CURRENT_VERSION} 8 | 9 | $(SEMVERS): 10 | poetry version $@ 11 | $(MAKE) tag_version 12 | 13 | release: 14 | git push origin tag ${LAST_TAG} 15 | gh release create --verify-tag ${LAST_TAG} --notes-from-tag 16 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | fastapi = '0.75.2' 8 | uvicorn = '*' 9 | pandas = '*' 10 | aiofile = '*' 11 | mangum = "*" 12 | boaviztapi-sdk='0.1.2' 13 | cpuid='0.0.10' 14 | py-cpuinfo='8.0.0' 15 | dataclasses='0.8' 16 | requests = "*" 17 | sqlalchemy = "*" 18 | pydantic = {extras = ["dotenv"], version = "*"} 19 | croniter = "*" 20 | 21 | [dev-packages] 22 | mkdocs = '*' 23 | pytest = '*' 24 | atomicwrites = "*" 25 | mkdocs-material = "*" 26 | httpx = '*' 27 | pytest-asyncio = '*' 28 | requests = '*' 29 | setuptools-pipfile = "==0.4.1" 30 | twine = "==3.2.0" 31 | bump2version = "==1.0.1" 32 | 33 | [requires] 34 | python_version = "3.8" 35 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 |

5 | Local API / sidecar / companion of a running application that computes and gives insights to the application regarding its environmental impacts. 6 |

7 | 8 | --- 9 | 10 | _If no parameters are passed to the API to isolate the application, then the impact of the whole machine is calculated._ 11 | 12 | ## How to use 13 | 14 | This is an API, you could use either your browser, cURL, or call it directly from an application (which is the main usecase). 15 | 16 | Once the API is running, a Swagger interface is available on [localhost:8000/docs](http://localhost:8000/docs). 17 | 18 | 19 | ### Run natively 20 | 21 | Boagent will not be able to return proper responses from its endpoints without root privileges in order to fetch hardware data. 22 | It also needs information from BoaviztAPI and Scaphandre, see the [setup informations](#Setup). 23 | 24 | To run it : 25 | 26 | Without `poetry` 27 | 28 | ``` 29 | apt update && apt install lshw nvme-cli -y 30 | pip3 install -r requirements.txt 31 | cd boagent/api/ 32 | uvicorn api:app --reload 33 | ``` 34 | 35 | With `poetry` 36 | 37 | ``` 38 | apt update && apt install lshw nvme-cli -y 39 | poetry install --only main 40 | poetry run uvicorn --reload boagent.api.api:app 41 | ``` 42 | 43 | ### Run in a docker container 44 | 45 | You could pull the [image](https://github.com/Boavizta/boagent/pkgs/container/boagent) with `docker pull ghcr.io/boavizta/boagent:0.1.0`. 46 | 47 | ### Run in docker-compose (with all the requirements) 48 | 49 | To get started you need docker and docker-compose installed on your machine. On a Debian or Ubuntu environment, run : 50 | 51 | # apt update && apt install -y docker.io docker-compose 52 | 53 | To get the full setup easily, you could run the stack in docker-compose with `docker-compose up -d`. `docker-compose.yml`, at the root of the project will build a Docker image from the source for Boagent, and setup a container for [Scaphandre](#Scaphandre) and another for the [BoaviztAPI](#BoaviztAPI), allowing you to get the full evaluation easily on a physical machine. 54 | 55 | Please see [Configuration](#Configuration) for the environment variables you can tweak in the Boagent container. 56 | 57 | ### Use `hardware_cli` 58 | 59 | To have an example of the retrieved hardware information by Boagent, you can run `sudo ./hardware_cli.py`. 60 | At the moment, it will output the formatted data for CPU, RAM and storage devices used by Boagent when sending a request to BoaviztAPI. 61 | `sudo ./hardware_cli.py --output-file ` can send the formatted output to a file. 62 | 63 | ## Setup 64 | 65 | ## Linux 66 | 67 | Boagent parses output from `lshw` (a tool listing hardware components and characteristics) and `nvme-cli` (a tool listing information on SSD storage 68 | devices available through NVME interfaces). To get all actually parsed information (and for future developments), Boagent needs those two programs and to execute them with root privileges. 69 | 70 | ### BoaviztAPI 71 | 72 | You need either to use an existing BoaviztAPI endpoint, or to build the BoaviztAPI container image, then run the container locally on port 5000. 73 | 74 | Depending or your setup, specify the endpoint to be used with the environment variable `BOAVIZTAPI_ENDPOINT`, see [Configuration](#Configuration). 75 | 76 | Ensure that the version of BoaviztAPI SDK installed (see `requirements.txt` or `pyproject.toml`) is the same as the version of the API running the endpoint you use. 77 | 78 | ### Scaphandre 79 | 80 | To get power consumption metrics, you need [Scaphandre](https://github.com/hubblo-org/scaphandre) running in the background, with the JSON exporter. This will write power metrics to a file, that Boagent will read : 81 | 82 | ``` 83 | scaphandre json -s 5 -f power_data.json 84 | ``` 85 | 86 | ## Configuration 87 | 88 | Boagent can be configured with the following variables : 89 | 90 | - `DEFAULT_LIFETIME`: machines lifetime used to compute the scope 3 / manufacturing, transport, end-of-life impacts 91 | - `HARDWARE_FILE_PATH`: path to the file containing the hardware list (output from `lshw.py`) 92 | - `POWER_FILE_PATH`: path to the file containing power measurements (output from [Scaphandre](https://github.com/hubblo-org/scaphandre) with JSON exporter) 93 | - `HARDWARE_CLI`: path to the executable file to collect hardware information (`lshw.py` from this project) 94 | - `BOAVIZTAPI_ENDPOINT`: HTTP endpoint to the BoaviztAPI, in the form `http://myendpoint.com:PORTNUMBER` 95 | 96 | You can set those variables in the following order (as interpreted by the tool): 97 | 98 | 1. export the variable in the environment 99 | 2. write it in the .env file in the same folder as `api.py` 100 | 3. rely on default values from `config.py` 101 | 102 | You can check the configuration applied by querying the `/info` route. 103 | 104 | ## How it works 105 | 106 | Currently, Boagent only works for Linux systems. 107 | 108 | Boagent exposes multiple API endpoints, most notably `/query` and `/metrics`. Both will query an instance of [BoaviztAPI](https://doc.api.boavizta.org/) in order to give the environmental impacts 109 | of the received hardware data. `/query` will return a response in JSON format, and `/metrics` will return a response parsable by a Prometheus instance. If needed, both those 110 | endpoints can return data from [Scaphandre](https://github.com/hubblo-org/scaphandre/) and give the energy consumption of components from the queried hardware. 111 | 112 | Presently, Boagent gets hardware data through a parsing of the output of `lshw`, a common utility available for Linux distributions that lists a lot of information of all 113 | hardware components on a running computer. The code for this `Lshw` class is an adaptation of [netbox-agent](https://github.com/Solvik/netbox-agent)'s implementation. 114 | `lshw`, to get all proper data needed by BoaviztAPI, needs to be executed as a privileged user with `sudo`. Boagent, executed with the available `docker-compose` file, 115 | will run as privileged and will be able to receive the needed hardware data. At the moment, only data for the CPU, RAM and storage (either HDD or SSD) are parsed and sent to BoaviztAPI 116 | in order to calculate impacts. 117 | 118 | Another endpoint, `process_embedded_impacts`, allows to calculate the embedded impacts of a process running on the host, in relation to the host components (CPU, RAM and storage). It will give, for all the components, the average, maximum and minimum values between two timestamps for three environmental impact factors : Global Warming Potential (in KgCO2e), Abiotic Depletion Potential (in KgSbeq) and Primary Energy (in microjoules). To get these informations, a Linux Process ID has to be provided. 119 | 120 | ## Deeper explanations 121 | 122 | ### Environmental metrics 123 | 124 | This project uses the Life Cycle Assessment (ISO 14040 / 14044) methodology as a reference. 125 | 126 | This way, it is intended to evaluate the impacts on all life cycle phases (extraction, manufacturing, shipping, use, end of life). **Today we only evaluate manufacturing and use phases.** 127 | 128 | Here are the impacts considered so far : 129 | 130 | - Green House Gas emissions / Global Warming Potential (see GHG protocol as a reference) 131 | - resources extraction (LCA) / scope 3 (GHG protocol) ✔️ 132 | - use (LCA) / scope 2 (GHG protocol) ✔️ 133 | - manufacturing (LCA) / scope 3 (GHG protocol) ✔️ 134 | - shipping (LCA) / scope 3 (GHG protocol) ❌ 135 | - end of life (LCA) / scope 3 (GHG protocol) ❌ 136 | - Abiotic ressources depletion (minerals), criteria called ADP or Abiotic Depletion Potential 137 | - resources extraction (LCA) ✔️ 138 | - use (LCA) ✔️ 139 | - manufacturing (LCA) ✔️ 140 | - shipping (LCA) ❌ 141 | - end of life (LCA) ❌ 142 | - Primary energy usage : PE 143 | - resources extraction (LCA) ✔️ 144 | - use (LCA) ✔️ 145 | - manufacturing (LCA) ✔️ 146 | - shipping (LCA) ❌ 147 | - end of life (LCA) ❌ 148 | -------------------------------------------------------------------------------- /boagent/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Boagent 3 | 4 | Monitoring agent/framework for evaluating the environmental impacts of a machine and its applications, including several to all steps of the life cycle of the machine and service, plus multiple criterias of impacts (not just CO2eq metrics / Global Warming Potential). Part of the efforts of https://boavizta.org/en and https://sdialliance.org/. 5 | """ 6 | 7 | __version__ = "0.1.0" 8 | __author__ = "Boavizta " 9 | __credits__ = "Boavizta contributors" 10 | -------------------------------------------------------------------------------- /boagent/api/__init__.py: -------------------------------------------------------------------------------- 1 | from .api import ( 2 | build_hardware_data, 3 | format_usage_request, 4 | read_hardware_data, 5 | get_hardware_data, 6 | query_machine_impact_data, 7 | compute_average_consumption, 8 | get_power_data, 9 | get_metrics, 10 | ) 11 | -------------------------------------------------------------------------------- /boagent/api/api.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from typing import Dict, Any, List, Union 4 | from fastapi import FastAPI, Response, Body, HTTPException 5 | from fastapi.staticfiles import StaticFiles 6 | from fastapi.responses import HTMLResponse 7 | from boaviztapi_sdk.api.server_api import ServerApi 8 | from boaviztapi_sdk.models.server import Server 9 | from boagent.api.exceptions import InvalidPIDException 10 | from boagent.hardware.lshw import Lshw 11 | from .utils import ( 12 | iso8601_or_timestamp_as_timestamp, 13 | format_prometheus_output, 14 | get_boavizta_api_client, 15 | sort_ram, 16 | sort_disks, 17 | ) 18 | 19 | from .config import Settings 20 | from .process import Process 21 | from .models import WorkloadTime, time_workload_example 22 | 23 | settings = Settings() 24 | 25 | HARDWARE_FILE_PATH = settings.hardware_file_path 26 | POWER_DATA_FILE_PATH = settings.power_file_path 27 | PUBLIC_PATH = settings.public_path 28 | ASSETS_PATH = settings.assets_path 29 | DB_PATH = settings.db_path 30 | DEFAULT_LIFETIME = settings.default_lifetime 31 | SECONDS_IN_ONE_YEAR = settings.seconds_in_one_year 32 | HARDWARE_CLI = settings.hardware_cli 33 | AZURE_LOCATION = settings.azure_location 34 | BOAVIZTAPI_ENDPOINT = settings.boaviztapi_endpoint 35 | CARBON_AWARE_API_ENDPOINT = settings.carbon_aware_api_endpoint 36 | CARBON_AWARE_API_TOKEN = settings.carbon_aware_api_token 37 | PROJECT_NAME = settings.project_name 38 | PROJECT_VERSION = settings.project_version 39 | PROJECT_DESCRIPTION = settings.project_description 40 | TAGS_METADATA = settings.tags_metadata 41 | 42 | 43 | def configure_static(app): 44 | app.mount("/assets", StaticFiles(directory=ASSETS_PATH), name="assets") 45 | 46 | 47 | def configure_app(): 48 | app = FastAPI( 49 | title=PROJECT_NAME, 50 | version=PROJECT_VERSION, 51 | description=PROJECT_DESCRIPTION, 52 | contact={"name": "Boavizta Members", "url": "https://boavizta.org/en"}, 53 | license_info={"name": "Apache-2.0"}, 54 | openapi_tags=TAGS_METADATA, 55 | ) 56 | configure_static(app) 57 | return app 58 | 59 | 60 | app = configure_app() 61 | 62 | 63 | @app.get("/info", tags=["info"]) 64 | async def info(): 65 | return { 66 | "seconds_in_one_year": SECONDS_IN_ONE_YEAR, 67 | "default_lifetime": DEFAULT_LIFETIME, 68 | "hardware_file_path": HARDWARE_FILE_PATH, 69 | "power_file_path": POWER_DATA_FILE_PATH, 70 | "hardware_cli": HARDWARE_CLI, 71 | "boaviztapi_endpoint": BOAVIZTAPI_ENDPOINT, 72 | } 73 | 74 | 75 | @app.get("/web", tags=["web"], response_class=HTMLResponse) 76 | async def web(): 77 | res = "" 78 | with open("{}/index.html".format(PUBLIC_PATH), "r") as fd: 79 | res = fd.read() 80 | fd.close() 81 | return res 82 | 83 | 84 | @app.get("/metrics", tags=["metrics"]) 85 | async def metrics( 86 | start_time: str = "0.0", 87 | end_time: str = "0.0", 88 | verbose: bool = False, 89 | location: str = "", 90 | measure_power: bool = True, 91 | lifetime: float = DEFAULT_LIFETIME, 92 | fetch_hardware: bool = False, 93 | ): 94 | return Response( 95 | content=format_prometheus_output( 96 | get_metrics( 97 | iso8601_or_timestamp_as_timestamp(start_time), 98 | iso8601_or_timestamp_as_timestamp(end_time), 99 | verbose, 100 | location, 101 | measure_power, 102 | lifetime, 103 | fetch_hardware, 104 | ), 105 | verbose, 106 | ), 107 | media_type="plain-text", 108 | ) 109 | 110 | 111 | @app.get("/query", tags=["query"]) 112 | async def query( 113 | start_time: str = "0.0", 114 | end_time: str = "0.0", 115 | verbose: bool = False, 116 | location: str = "EEE", 117 | measure_power: bool = True, 118 | lifetime: float = DEFAULT_LIFETIME, 119 | fetch_hardware: bool = False, 120 | ): 121 | """ 122 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 123 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 124 | verbose: Get detailled metrics with extra information.\n 125 | location: Country code to configure the local electricity grid to take into account.\n 126 | measure_power: Get electricity consumption metrics from Scaphandre or not.\n 127 | lifetime: Full lifetime of the machine to evaluate.\n 128 | fetch_hardware: Regenerate hardware.json file with current machine hardware or not.\n 129 | """ 130 | return get_metrics( 131 | iso8601_or_timestamp_as_timestamp(start_time), 132 | iso8601_or_timestamp_as_timestamp(end_time), 133 | verbose, 134 | location, 135 | measure_power, 136 | lifetime, 137 | fetch_hardware, 138 | ) 139 | 140 | 141 | @app.post("/query", tags=["query"]) 142 | async def query_with_time_workload( 143 | start_time: str = "0.0", 144 | end_time: str = "0.0", 145 | verbose: bool = False, 146 | location: str = "EEE", 147 | measure_power: bool = True, 148 | lifetime: float = DEFAULT_LIFETIME, 149 | fetch_hardware: bool = False, 150 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]]] = Body( 151 | None, example=time_workload_example 152 | ), 153 | ): 154 | """ 155 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 156 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 157 | verbose: Get detailled metrics with extra information.\n 158 | location: Country code to configure the local electricity grid to take into account.\n 159 | measure_power: Get electricity consumption metrics from Scaphandre or not.\n 160 | lifetime: Full lifetime of the machine to evaluate.\n 161 | fetch_hardware: Regenerate hardware.json file with current machine hardware or not.\n 162 | time_workload: Workload percentage for CPU and RAM. Can be a float or a list of dictionaries with format 163 | {"time_percentage": float, "load_percentage": float} 164 | """ 165 | return get_metrics( 166 | iso8601_or_timestamp_as_timestamp(start_time), 167 | iso8601_or_timestamp_as_timestamp(end_time), 168 | verbose, 169 | location, 170 | measure_power, 171 | lifetime, 172 | fetch_hardware, 173 | time_workload, 174 | ) 175 | 176 | 177 | @app.get("/process_embedded_impacts", tags=["process"]) 178 | async def process_embedded_impacts( 179 | process_id: int = 0, 180 | start_time: str = "0.0", 181 | end_time: str = "0.0", 182 | location: str = "EEE", 183 | lifetime: float = DEFAULT_LIFETIME, 184 | fetch_hardware: bool = False, 185 | ): 186 | """ 187 | process_id: The process ID queried to be evaluated for embedded impacts for each available component. \n 188 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 189 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n 190 | location: Country code to configure the local electricity grid to take into account.\n 191 | lifetime: Full lifetime of the machine to evaluate.\n 192 | """ 193 | 194 | verbose = True 195 | measure_power = True 196 | 197 | metrics_data = get_metrics( 198 | iso8601_or_timestamp_as_timestamp(start_time), 199 | iso8601_or_timestamp_as_timestamp(end_time), 200 | verbose, 201 | location, 202 | measure_power, 203 | lifetime, 204 | fetch_hardware, 205 | ) 206 | try: 207 | queried_process = Process(metrics_data, process_id) 208 | except InvalidPIDException as invalid_pid: 209 | raise HTTPException(status_code=400, detail=invalid_pid.message) 210 | else: 211 | process_embedded_impact_values = queried_process.embedded_impact_values 212 | json_content = json.dumps(process_embedded_impact_values) 213 | return Response(status_code=200, content=json_content) 214 | 215 | 216 | def get_metrics( 217 | start_time: float, 218 | end_time: float, 219 | verbose: bool, 220 | location: str, 221 | measure_power: bool, 222 | lifetime: float, 223 | fetch_hardware: bool, 224 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]], None] = None, 225 | ): 226 | 227 | now: float = time.time() 228 | if start_time and end_time: 229 | ratio = (end_time - start_time) / (lifetime * SECONDS_IN_ONE_YEAR) 230 | else: 231 | ratio = 1.0 232 | if start_time == 0.0: 233 | start_time = now - 3600 234 | if end_time == 0.0: 235 | end_time = now 236 | if end_time - start_time >= lifetime * SECONDS_IN_ONE_YEAR: 237 | lifetime = (end_time - start_time) / float(SECONDS_IN_ONE_YEAR) 238 | 239 | hardware_data = get_hardware_data(fetch_hardware) 240 | 241 | res = {"emissions_calculation_data": {}} 242 | 243 | avg_power = None 244 | 245 | if len(location) < 3 or location == "EEE": 246 | res["location_warning"] = { 247 | "warning_message": "Location is either set as default, or has not been set, and is therefore set to the default BoaviztAPI location. " 248 | "Be aware that the presented results can be drastically different due to location. " 249 | "It is recommended that you set the asset location with the corresponding country code, see: https://doc.api.boavizta.org/Explanations/usage/countries/" 250 | } 251 | 252 | if measure_power: 253 | power_data = get_power_data(start_time, end_time) 254 | avg_power = power_data["avg_power"] 255 | if "warning" in power_data: 256 | res["emissions_calculation_data"][ 257 | "energy_consumption_warning" 258 | ] = power_data["warning"] 259 | 260 | boaviztapi_data = query_machine_impact_data( 261 | model={}, 262 | configuration=hardware_data, 263 | usage=format_usage_request( 264 | start_time, end_time, avg_power, location, time_workload 265 | ), 266 | ) 267 | 268 | if measure_power: 269 | res["total_operational_emissions"] = { 270 | "value": boaviztapi_data["impacts"]["gwp"]["use"], 271 | "description": "GHG emissions related to usage, from start_time to end_time.", 272 | "type": "gauge", 273 | "unit": "kg CO2eq", 274 | "long_unit": "kilograms CO2 equivalent", 275 | } 276 | res["total_operational_abiotic_resources_depletion"] = { 277 | "value": boaviztapi_data["impacts"]["adp"]["use"], 278 | "description": "Abiotic Resources Depletion (minerals & metals, ADPe) due to the usage phase.", 279 | "type": "gauge", 280 | "unit": "kgSbeq", 281 | "long_unit": "kilograms Antimony equivalent", 282 | } 283 | res["total_operational_primary_energy_consumed"] = { 284 | "value": boaviztapi_data["impacts"]["pe"]["use"], 285 | "description": "Primary Energy consumed due to the usage phase.", 286 | "type": "gauge", 287 | "unit": "MJ", 288 | "long_unit": "Mega Joules", 289 | } 290 | res["start_time"] = { 291 | "value": start_time, 292 | "description": "Start time for the evaluation, in timestamp format (seconds since 1970)", 293 | "type": "counter", 294 | "unit": "s", 295 | "long_unit": "seconds", 296 | } 297 | res["end_time"] = { 298 | "value": end_time, 299 | "description": "End time for the evaluation, in timestamp format (seconds since 1970)", 300 | "type": "counter", 301 | "unit": "s", 302 | "long_unit": "seconds", 303 | } 304 | res["average_power_measured"] = { 305 | "value": avg_power, 306 | "description": "Average power measured from start_time to end_time", 307 | "type": "gauge", 308 | "unit": "W", 309 | "long_unit": "Watts", 310 | } 311 | 312 | """ res["calculated_emissions"] = { 313 | "value": boaviztapi_data["impacts"]["gwp"]["value"] * ratio 314 | + boaviztapi_data["impacts"]["gwp"]["use"]["value"], 315 | "description": "Total Green House Gas emissions calculated for manufacturing and usage phases, between " 316 | "start_time and end_time", 317 | "type": "gauge", 318 | "unit": "kg CO2eq", 319 | "long_unit": "kilograms CO2 equivalent", 320 | } """ 321 | 322 | res["embedded_emissions"] = { 323 | "value": boaviztapi_data["impacts"]["gwp"]["embedded"]["value"] * ratio, 324 | "description": "Embedded carbon emissions (manufacturing phase)", 325 | "type": "gauge", 326 | "unit": "kg CO2eq", 327 | "long_unit": "kilograms CO2 equivalent", 328 | } 329 | res["embedded_abiotic_resources_depletion"] = { 330 | "value": boaviztapi_data["impacts"]["adp"]["embedded"]["value"] * ratio, 331 | "description": "Embedded abiotic ressources consumed (manufacturing phase)", 332 | "type": "gauge", 333 | "unit": "kg Sbeq", 334 | "long_unit": "kilograms ADP equivalent", 335 | } 336 | res["embedded_primary_energy"] = { 337 | "value": boaviztapi_data["impacts"]["pe"]["embedded"]["value"] * ratio, 338 | "description": "Embedded primary energy consumed (manufacturing phase)", 339 | "type": "gauge", 340 | "unit": "MJ", 341 | "long_unit": "Mega Joules", 342 | } 343 | 344 | if verbose: 345 | res["raw_data"] = { 346 | "hardware_data": hardware_data, 347 | "resources_data": "not implemented yet", 348 | "boaviztapi_data": boaviztapi_data, 349 | "start_time": start_time, 350 | "end_time": end_time, 351 | } 352 | res["electricity_carbon_intensity"] = { 353 | "value": boaviztapi_data["verbose"]["gwp_factor"]["value"], 354 | "description": "Carbon intensity of the electricity mix. Mix considered : {}".format( 355 | location 356 | ), 357 | "type": "gauge", 358 | "unit": "kg CO2eq / kWh", 359 | "long_unit": "Kilograms CO2 equivalent per KiloWattHour", 360 | } 361 | 362 | if measure_power: 363 | res["raw_data"]["power_data"] = power_data 364 | 365 | return res 366 | 367 | 368 | def format_usage_request( 369 | start_time: float, 370 | end_time: float, 371 | avg_power: Union[float, None] = None, 372 | location: str = "EEE", 373 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]], None] = None, 374 | ): 375 | hours_use_time = (end_time - start_time) / 3600.0 376 | kwargs_usage = {"hours_use_time": hours_use_time} 377 | if location: 378 | kwargs_usage["usage_location"] = location 379 | if avg_power: 380 | kwargs_usage["avg_power"] = avg_power 381 | if time_workload: 382 | kwargs_usage["time_workload"] = time_workload 383 | return kwargs_usage 384 | 385 | 386 | def get_power_data(start_time, end_time): 387 | # Get all items of the json list where start_time <= host.timestamp <= end_time 388 | power_data = {} 389 | with open(POWER_DATA_FILE_PATH, "r") as power_data_file: 390 | formatted_data = f"{power_data_file.read()}]" 391 | data = json.loads(formatted_data) 392 | queried_power_data = [ 393 | e for e in data if start_time <= float(e["host"]["timestamp"]) <= end_time 394 | ] 395 | power_data["raw_data"] = queried_power_data 396 | power_data["avg_power"] = compute_average_consumption(queried_power_data) 397 | if end_time - start_time <= 3600: 398 | power_data["warning"] = ( 399 | "The time window is lower than one hour, but the energy consumption estimate is in " 400 | "Watt.Hour. So this is an extrapolation of the power usage profile on one hour. Be " 401 | "careful with this data. " 402 | ) 403 | return power_data 404 | 405 | 406 | def compute_average_consumption(power_data) -> float: 407 | # Host energy consumption 408 | total_host = 0.0 409 | avg_host = 0.0 410 | if len(power_data) > 0: 411 | for r in power_data: 412 | total_host += float(r["host"]["consumption"]) 413 | 414 | avg_host = total_host / len(power_data) / 1000000.0 # from microwatts to watts 415 | 416 | return avg_host 417 | 418 | 419 | def get_hardware_data(fetch_hardware: bool): 420 | data = {} 421 | if fetch_hardware: 422 | build_hardware_data() 423 | try: 424 | data = read_hardware_data() 425 | except Exception: 426 | build_hardware_data() 427 | data = read_hardware_data() 428 | return data 429 | 430 | 431 | def read_hardware_data() -> Dict: 432 | with open(HARDWARE_FILE_PATH, "r") as fd: 433 | data = json.load(fd) 434 | return data 435 | 436 | 437 | def build_hardware_data(): 438 | lshw = Lshw() 439 | with open(HARDWARE_FILE_PATH, "w") as hardware_file: 440 | hardware_data = {} 441 | hardware_data["disks"] = lshw.disks 442 | hardware_data["cpus"] = lshw.cpus 443 | hardware_data["rams"] = lshw.memories 444 | json.dump(hardware_data, hardware_file) 445 | 446 | 447 | def query_machine_impact_data( 448 | model: dict[str, str], 449 | configuration: dict[str, dict[str, int]], 450 | usage: dict[str, Any], 451 | ) -> dict: 452 | server_api = ServerApi(get_boavizta_api_client()) 453 | 454 | server_impact = None 455 | 456 | if configuration: 457 | server = Server(usage=usage, configuration=configuration) 458 | server_impact = server_api.server_impact_from_configuration_v1_server_post( 459 | server=server 460 | ) 461 | elif model: 462 | # server = Server(usage=usage, model=model) 463 | # TO IMPLEMENT 464 | # This conditional was based on a previous version of BoaviztAPI, where a server model could 465 | # be sent to /v1/server through a GET method. BoaviztAPI now expects an archetype string to 466 | # return a prerecorded impact from an asset. 467 | server_impact = server_api.server_impact_from_model_v1_server_get( 468 | archetype="dellR740" 469 | ) 470 | 471 | return server_impact 472 | 473 | 474 | def generate_machine_configuration(hardware_data) -> Dict[str, Any]: 475 | # Either delete or transfer this logic to hardware_cli / lshw 476 | config = { 477 | "cpu": { 478 | "units": len(hardware_data["cpus"]), 479 | "core_units": hardware_data["cpus"][1]["core_units"], 480 | # "family": hardware_data['cpus'][1]['family'] 481 | }, 482 | "ram": sort_ram(hardware_data["rams"]), 483 | "disk": sort_disks(hardware_data["disks"]), 484 | "power_supply": ( 485 | hardware_data["power_supply"] 486 | if "power_supply" in hardware_data 487 | else {"units": 1} 488 | ), 489 | # TODO: if cpu is a small one, guess that power supply is light/average weight of a laptops power supply ? 490 | } 491 | return config 492 | -------------------------------------------------------------------------------- /boagent/api/config.py: -------------------------------------------------------------------------------- 1 | from pydantic_settings import BaseSettings 2 | 3 | 4 | class Settings(BaseSettings): 5 | project_name: str = "boagent" 6 | project_version: str = "0.1.0" 7 | project_description: str = "Boagent is a local API and monitoring agent to help you estimate the environmental impact of your machine, including software activity and hardware embodied impacts." 8 | tags_metadata: list = [ 9 | {"name": "info", "description": "Returns runtime configuration of Boagent."}, 10 | {"name": "web", "description": "Web UI to explore Boagent metrics."}, 11 | { 12 | "name": "csv", 13 | "description": "Internal route. Generates and returns a CSV-formatted dataset with metrics needed by the webUI", 14 | }, 15 | { 16 | "name": "metrics", 17 | "description": "Returns metrics as a Prometheus HTTP exporter.", 18 | }, 19 | { 20 | "name": "query", 21 | "description": "This is the main route. Returns metrics in JSON format.", 22 | }, 23 | ] 24 | seconds_in_one_year: int = 31536000 25 | default_lifetime: float = 5.0 26 | hardware_file_path: str = "./hardware_data.json" 27 | power_file_path: str = "./power_data.json" 28 | hardware_cli: str = "./boagent/hardware/hardware_cli.py" 29 | boaviztapi_endpoint: str = "http://localhost:5000" 30 | db_path: str = "../../db/boagent.db" 31 | public_path: str = "./boagent/public" 32 | assets_path: str = "./boagent/public/assets/" 33 | carbon_aware_api_endpoint: str = "https://carbon-aware-api.azurewebsites.net" 34 | carbon_aware_api_token: str = "token" 35 | azure_location: str = "northeurope" 36 | -------------------------------------------------------------------------------- /boagent/api/exceptions.py: -------------------------------------------------------------------------------- 1 | class InvalidPIDException(Exception): 2 | def __init__(self, pid): 3 | self.pid = pid 4 | self.message = f"Process_id {self.pid} has not been found in metrics data. Check the queried PID." 5 | super().__init__(self.message) 6 | -------------------------------------------------------------------------------- /boagent/api/models.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class WorkloadTime(BaseModel): 5 | time_percentage: float = 0.0 6 | load_percentage: float = 0.0 7 | 8 | 9 | time_workload_example = { 10 | "time_workload": [ 11 | {"time_percentage": 50, "load_percentage": 0}, 12 | {"time_percentage": 25, "load_percentage": 60}, 13 | {"time_percentage": 25, "load_percentage": 100}, 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /boagent/api/process.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from .exceptions import InvalidPIDException 3 | 4 | 5 | class Process: 6 | def __init__(self, metrics_data, pid): 7 | self.metrics_data = metrics_data 8 | self.validate_pid(pid) 9 | self._pid = pid 10 | self.process_info = self.get_process_info() 11 | 12 | def validate_pid(self, value): 13 | 14 | timestamps = [ 15 | timestamp 16 | for timestamp in self.metrics_data["raw_data"]["power_data"]["raw_data"] 17 | ] 18 | consumers = [timestamp["consumers"] for timestamp in timestamps] 19 | pids = set([process["pid"] for consumer in consumers for process in consumer]) 20 | if value in pids: 21 | return value 22 | else: 23 | raise InvalidPIDException(value) 24 | 25 | @property 26 | def pid(self, pid): 27 | """The PID queried in data coming from Scaphandre.""" 28 | return self._pid 29 | 30 | @pid.setter 31 | def pid(self, value): 32 | self._pid = self.validate_pid(value) 33 | 34 | def get_process_info(self): 35 | 36 | timestamps = [ 37 | timestamp 38 | for timestamp in self.metrics_data["raw_data"]["power_data"]["raw_data"] 39 | ] 40 | consumers = [timestamp["consumers"] for timestamp in timestamps] 41 | process_info = [ 42 | process 43 | for consumer in consumers 44 | for process in consumer 45 | if process["pid"] == self._pid 46 | ] 47 | return process_info 48 | 49 | @property 50 | def process_name(self): 51 | process_name = self.process_info[0]["exe"].split("/")[-1] 52 | return process_name 53 | 54 | @property 55 | def process_exe(self): 56 | process_exe = self.process_info[0]["exe"] 57 | return process_exe 58 | 59 | def get_total_ram_in_bytes(self): 60 | 61 | ram_data = self.metrics_data["raw_data"]["hardware_data"]["rams"] 62 | total_ram_in_bytes = ( 63 | sum(ram_unit["capacity"] for ram_unit in ram_data) * 1073741824 64 | ) 65 | 66 | return total_ram_in_bytes 67 | 68 | def get_disk_usage_in_bytes(self): 69 | 70 | # Data from Scaphandre can be empty on first returned element in the array 71 | try: 72 | key_for_disk_total_bytes = self.metrics_data["raw_data"]["power_data"][ 73 | "raw_data" 74 | ][0]["host"]["components"]["disks"][0]["disk_total_bytes"] 75 | except IndexError: 76 | key_for_disk_total_bytes = self.metrics_data["raw_data"]["power_data"][ 77 | "raw_data" 78 | ][1]["host"]["components"]["disks"][0]["disk_total_bytes"] 79 | 80 | try: 81 | key_for_disk_available_bytes = self.metrics_data["raw_data"]["power_data"][ 82 | "raw_data" 83 | ][0]["host"]["components"]["disks"][0]["disk_available_bytes"] 84 | except IndexError: 85 | key_for_disk_available_bytes = self.metrics_data["raw_data"]["power_data"][ 86 | "raw_data" 87 | ][1]["host"]["components"]["disks"][0]["disk_available_bytes"] 88 | 89 | disk_total_bytes = int(key_for_disk_total_bytes) 90 | disk_available_bytes = int(key_for_disk_available_bytes) 91 | disk_usage_in_bytes = disk_total_bytes - disk_available_bytes 92 | return disk_usage_in_bytes 93 | 94 | @property 95 | def ram_shares(self): 96 | 97 | process_ram_shares = [ 98 | ( 99 | ( 100 | int(timestamp["resources_usage"]["memory_usage"]) 101 | / self.get_total_ram_in_bytes() 102 | ) 103 | * 100 104 | ) 105 | for timestamp in self.process_info 106 | ] 107 | 108 | return process_ram_shares 109 | 110 | @property 111 | def cpu_load_shares(self): 112 | 113 | process_cpu_load_shares = [ 114 | float(timestamp["resources_usage"]["cpu_usage"]) 115 | for timestamp in self.process_info 116 | ] 117 | return process_cpu_load_shares 118 | 119 | @property 120 | def storage_shares(self): 121 | process_storage_shares = [ 122 | ( 123 | ( 124 | int(timestamp["resources_usage"]["disk_usage_write"]) 125 | / self.get_disk_usage_in_bytes() 126 | ) 127 | * 100 128 | ) 129 | for timestamp in self.process_info 130 | ] 131 | return process_storage_shares 132 | 133 | def get_component_embedded_impact_shares(self, queried_component, component_shares): 134 | 135 | component = f"{queried_component}-1" 136 | component_impacts_data = self.metrics_data["raw_data"]["boaviztapi_data"][ 137 | "verbose" 138 | ][component]["impacts"] 139 | component_embedded_impact_shares = list() 140 | for impact in component_impacts_data: 141 | impact_embedded_value = component_impacts_data[impact]["embedded"]["value"] 142 | for process_component_share in component_shares: 143 | if process_component_share == 0.0: 144 | component_embedded_impact = ( 145 | f"{impact}_embedded_share", 146 | float(process_component_share), 147 | ) 148 | component_embedded_impact_shares.append(component_embedded_impact) 149 | else: 150 | component_embedded_impact_share = ( 151 | float(impact_embedded_value) * float(process_component_share) 152 | ) / 100 153 | component_embedded_impact = ( 154 | f"{impact}_embedded_share", 155 | float(component_embedded_impact_share), 156 | ) 157 | component_embedded_impact_shares.append(component_embedded_impact) 158 | return component_embedded_impact_shares 159 | 160 | def get_component_embedded_impact_values(self, queried_component): 161 | if queried_component == "cpu": 162 | component_impact_shares = self.get_component_embedded_impact_shares( 163 | "CPU", self.cpu_load_shares 164 | ) 165 | elif queried_component == "ram": 166 | component_impact_shares = self.get_component_embedded_impact_shares( 167 | "RAM", self.ram_shares 168 | ) 169 | elif queried_component == "ssd": 170 | component_impact_shares = self.get_component_embedded_impact_shares( 171 | "SSD", self.storage_shares 172 | ) 173 | elif queried_component == "hdd": 174 | component_impact_shares = self.get_component_embedded_impact_shares( 175 | "HDD", self.storage_shares 176 | ) 177 | else: 178 | return "Queried component is not available for evaluation." 179 | 180 | gwp_list = defaultdict(list) 181 | adp_list = defaultdict(list) 182 | pe_list = defaultdict(list) 183 | 184 | for impact_key, impact_value in component_impact_shares: 185 | if impact_key == "gwp_embedded_share": 186 | gwp_list[impact_key].append(impact_value) 187 | if impact_key == "adp_embedded_share": 188 | adp_list[impact_key].append(impact_value) 189 | if impact_key == "pe_embedded_share": 190 | pe_list[impact_key].append(impact_value) 191 | 192 | gwp_average = sum(gwp_list["gwp_embedded_share"]) / len( 193 | gwp_list["gwp_embedded_share"] 194 | ) 195 | adp_average = sum(adp_list["adp_embedded_share"]) / len( 196 | adp_list["adp_embedded_share"] 197 | ) 198 | pe_average = sum(pe_list["pe_embedded_share"]) / len( 199 | pe_list["pe_embedded_share"] 200 | ) 201 | 202 | gwp_max = max(gwp_list["gwp_embedded_share"]) 203 | adp_max = max(adp_list["adp_embedded_share"]) 204 | pe_max = max(pe_list["pe_embedded_share"]) 205 | 206 | gwp_min = min(gwp_list["gwp_embedded_share"]) 207 | adp_min = min(adp_list["adp_embedded_share"]) 208 | pe_min = min(pe_list["pe_embedded_share"]) 209 | 210 | component_embedded_impact_values = { 211 | f"gwp_{queried_component}_average_impact": gwp_average, 212 | f"adp_{queried_component}_average_impact": adp_average, 213 | f"pe_{queried_component}_average_impact": pe_average, 214 | f"gwp_{queried_component}_max_impact": gwp_max, 215 | f"adp_{queried_component}_max_impact": adp_max, 216 | f"pe_{queried_component}_max_impact": pe_max, 217 | f"gwp_{queried_component}_min_impact": gwp_min, 218 | f"adp_{queried_component}_min_impact": adp_min, 219 | f"pe_{queried_component}_min_impact": pe_min, 220 | } 221 | return component_embedded_impact_values 222 | 223 | @property 224 | def embedded_impact_values(self): 225 | process_embedded_impact_values = { 226 | "pid": self._pid, 227 | "process_embedded_impacts": {}, 228 | } 229 | components = ["cpu", "ram", "hdd", "ssd"] 230 | 231 | for component in components: 232 | try: 233 | process_component_embedded_impact_values = ( 234 | self.get_component_embedded_impact_values(component) 235 | ) 236 | process_embedded_impact_values["process_embedded_impacts"][ 237 | f"process_{component}_embedded_impact_values" 238 | ] = process_component_embedded_impact_values 239 | except KeyError as absent_component: 240 | print( 241 | f"Queried component is not present in Boagent metrics: {absent_component}" 242 | ) 243 | 244 | return process_embedded_impact_values 245 | -------------------------------------------------------------------------------- /boagent/api/utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from boaviztapi_sdk import ApiClient, Configuration 3 | from dateutil import parser 4 | from .config import Settings 5 | from os import PathLike 6 | 7 | settings = Settings() 8 | BOAVIZTAPI_ENDPOINT = settings.boaviztapi_endpoint 9 | 10 | 11 | def sort_ram(items: list): 12 | hash_map = {} 13 | for r in items: 14 | if "manufacturer" in r: 15 | if "{}:{}".format(r["capacity"], r["manufacturer"]) in hash_map: 16 | hash_map["{}:{}".format(r["capacity"], r["manufacturer"])]["units"] += 1 17 | else: 18 | hash_map["{}:{}".format(r["capacity"], r["manufacturer"])] = { 19 | "units": 1, 20 | "manufacturer": r["manufacturer"], 21 | "capacity": r["capacity"], 22 | } 23 | else: 24 | hash_map["{}".format(r["capacity"])] = { 25 | "units": 1, 26 | "capacity": r["capacity"], 27 | } 28 | return [v for k, v in hash_map.items()] 29 | 30 | 31 | def sort_disks(items: list): 32 | hash_map = {} 33 | for r in items: 34 | if "{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"]) in hash_map: 35 | hash_map["{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"])][ 36 | "units" 37 | ] += 1 38 | else: 39 | hash_map["{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"])] = { 40 | "units": 1, 41 | "manufacturer": r["manufacturer"], 42 | "capacity": r["capacity"], 43 | "type": r["type"], 44 | } 45 | return [v for k, v in hash_map.items()] 46 | 47 | 48 | def get_boavizta_api_client(): 49 | config = Configuration( 50 | host=BOAVIZTAPI_ENDPOINT, 51 | ) 52 | client = ApiClient(configuration=config) 53 | return client 54 | 55 | 56 | def iso8601_or_timestamp_as_timestamp(iso_time: str) -> float: 57 | """ 58 | Takes an str that's either a timestamp or an iso8601 59 | time. Returns a float that represents a timestamp. 60 | """ 61 | if iso_time == "0.0" or iso_time == "0": 62 | return float(iso_time) 63 | else: 64 | dt = None 65 | try: 66 | dt = parser.parse(iso_time) 67 | print("{} is an iso 8601 datetime".format(iso_time)) 68 | except Exception as e: 69 | print("{} is not an iso 8601 datetime".format(iso_time)) 70 | print("Exception : {}".format(e)) 71 | try: 72 | dt = datetime.fromtimestamp(int(round(float(iso_time)))) 73 | print("{} is a timestamp".format(iso_time)) 74 | except Exception as e: 75 | print("{} is not a timestamp".format(iso_time)) 76 | print("Exception : {}".format(e)) 77 | print("Parser would give : {}".format(parser.parse(iso_time))) 78 | finally: 79 | if dt: 80 | return dt.timestamp() 81 | else: 82 | return float(iso_time) 83 | 84 | 85 | def format_prometheus_output(res, verbose: bool): 86 | response = "" 87 | for k, v in res.items(): 88 | if "value" in v and "type" in v: 89 | if "description" not in v: 90 | v["description"] = "TODO: define me" 91 | if type(v["value"]) is float: 92 | response += format_prometheus_metric( 93 | k, 94 | "{}. {}".format( 95 | v["description"], 96 | "In {} ({}).".format(v["long_unit"], v["unit"]), 97 | ), 98 | v["type"], 99 | v["value"], 100 | ) 101 | if type(v["value"]) is dict: 102 | response += format_prometheus_metric( 103 | k, 104 | "{}. {}".format( 105 | v["description"], 106 | "In {} ({}).".format(v["long_unit"], v["unit"]), 107 | ), 108 | v["type"], 109 | v["value"]["value"], 110 | ) 111 | 112 | else: 113 | for x, y in v.items(): 114 | if type(y) is float: 115 | pass 116 | else: 117 | if "value" in y and "type" in y: 118 | if "description" not in y: 119 | y["description"] = "TODO: define me" 120 | response += format_prometheus_metric( 121 | "{}_{}".format(k, x), 122 | "{}. {}".format( 123 | y["description"], 124 | "In {} ({}).".format(y["long_unit"], y["unit"]), 125 | ), 126 | y["type"], 127 | y["value"], 128 | ) 129 | if verbose: 130 | if "boaviztapi_data" in v: 131 | for impact_name, impact_items in v["boaviztapi_data"][ 132 | "impacts" 133 | ].items(): 134 | if "unit" in impact_items: 135 | for value in impact_items["embedded"]: 136 | if value == "warnings": 137 | pass 138 | else: 139 | response += format_prometheus_metric( 140 | "{}".format(f"{impact_name}_total_impact_{value}"), 141 | "{}. {}".format( 142 | impact_items["description"], 143 | "In {}".format(impact_items["unit"]), 144 | ), 145 | "{}".format("gauge"), 146 | "{}".format(f"{impact_items['embedded'][value]}"), 147 | ) 148 | 149 | for component_name, component_impacts in v["boaviztapi_data"][ 150 | "verbose" 151 | ].items(): 152 | formatted_component_name = component_name.lower().replace("-", "_") 153 | if "impacts" in component_impacts: 154 | for impact, items in component_impacts["impacts"].items(): 155 | for component_embedded_impact_metric, value in items[ 156 | "embedded" 157 | ].items(): 158 | if component_embedded_impact_metric == "warnings": 159 | pass 160 | else: 161 | response += format_prometheus_metric( 162 | "{}".format( 163 | f"{formatted_component_name}_{impact}_embedded_impact_{component_embedded_impact_metric}" 164 | ), 165 | "{}. {}".format( 166 | items["description"], 167 | "In {}".format(items["unit"]), 168 | ), 169 | "{}".format("gauge"), 170 | "{}".format( 171 | f"{value}", 172 | ), 173 | ) 174 | 175 | return response 176 | 177 | 178 | def format_prometheus_metric( 179 | metric_name, metric_description, metric_type, metric_value 180 | ): 181 | response = """# HELP {} {} 182 | # TYPE {} {} 183 | {} {} 184 | """.format( 185 | metric_name, 186 | metric_description, 187 | metric_name, 188 | metric_type, 189 | metric_name, 190 | metric_value, 191 | ) 192 | return response 193 | 194 | 195 | def filter_date_range(data: list, start_date: datetime, stop_date: datetime) -> list: 196 | 197 | lower_index = 0 198 | upper_index = 0 199 | 200 | start = datetime.timestamp(start_date) 201 | end = datetime.timestamp(stop_date) 202 | 203 | for d in data: 204 | if d["timestamp"] < start: 205 | lower_index += 1 206 | if d["timestamp"] < end: 207 | upper_index += 1 208 | 209 | return data[lower_index:upper_index] 210 | 211 | 212 | def format_scaphandre_json(file: str | PathLike) -> str: 213 | with open(file, "r") as fd: 214 | formatted_scaphandre_json = f"[{fd.read()}]".replace( 215 | '{"host"', ',{"host"' 216 | ).replace(',{"host"', '{"host"', 1) 217 | return formatted_scaphandre_json 218 | -------------------------------------------------------------------------------- /boagent/hardware/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/hardware/__init__.py -------------------------------------------------------------------------------- /boagent/hardware/lshw.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is modified code issued from https://github.com/Solvik/netbox-agent/blob/master/netbox_agent/lshw.py, 3 | copyright under Apache-2.0 licence. 4 | """ 5 | 6 | from shutil import which 7 | import subprocess 8 | import json 9 | import sys 10 | import re 11 | import os 12 | 13 | SYS_BLOCK_PATH = "/sys/block" 14 | 15 | 16 | def is_tool(name): 17 | """Check whether `name` is on PATH and marked as executable""" 18 | return which(name) is not None 19 | 20 | 21 | def serialized_lshw_output(): 22 | try: 23 | lshw_output = subprocess.getoutput("lshw -quiet -json 2> /dev/null") 24 | serialized_lshw_output = json.loads(lshw_output) 25 | except json.JSONDecodeError: 26 | raise Exception("lshw does not seem do be executed as root.") 27 | else: 28 | if isinstance(serialized_lshw_output, list): 29 | return serialized_lshw_output[0] 30 | else: 31 | return serialized_lshw_output 32 | 33 | 34 | def serialized_nvme_output(): 35 | nvme_output = subprocess.check_output( 36 | ["nvme", "-list", "-o", "json"], encoding="utf8" 37 | ) 38 | serialized_nvme_output = json.loads(nvme_output) 39 | return serialized_nvme_output 40 | 41 | 42 | class Lshw: 43 | def __init__(self): 44 | if not is_tool("lshw"): 45 | raise Exception("lshw does not seem to be installed.") 46 | self.hw_info = serialized_lshw_output() 47 | self.info = {} 48 | self.memories = [] 49 | self.cpus = [] 50 | self.power = [] 51 | self.disks = [] 52 | self.gpus = [] 53 | self.motherboard_serial = self.hw_info["children"][0].get("serial", "No S/N") 54 | self.motherboard = self.hw_info["children"][0].get("product", "Motherboard") 55 | 56 | for k in self.hw_info["children"]: 57 | if k["class"] == "power": 58 | self.power.append(k) 59 | 60 | if "children" in k: 61 | for j in k["children"]: 62 | if j["class"] == "generic": 63 | continue 64 | 65 | if j["class"] == "storage": 66 | self.find_storage(j) 67 | 68 | if j["class"] == "memory": 69 | self.find_memories(j) 70 | 71 | if j["class"] == "processor": 72 | self.find_cpus(j) 73 | 74 | if j["class"] == "bridge": 75 | self.walk_bridge(j) 76 | 77 | def get_hw_linux(self, hwclass): 78 | if hwclass == "cpu": 79 | return self.cpus 80 | if hwclass == "gpu": 81 | return self.gpus 82 | """ if hwclass == "network": 83 | return self.interfaces """ 84 | if hwclass == "storage": 85 | return self.disks 86 | if hwclass == "memory": 87 | return self.memories 88 | 89 | """ 90 | def find_network(self, obj): 91 | # Some interfaces do not have device (logical) name (eth0, for 92 | # instance), such as not connected network mezzanine cards in blade 93 | # servers. In such situations, the card will be named `unknown[0-9]`. 94 | unkn_intfs = [] 95 | for i in self.interfaces: 96 | # newer versions of lshw can return a list of names, see issue #227 97 | if not isinstance(i["name"], list): 98 | if i["name"].startswith("unknown"): 99 | unkn_intfs.push(i) 100 | else: 101 | for j in i["name"]: 102 | if j.startswith("unknown"): 103 | unkn_intfs.push(j) 104 | 105 | unkn_name = "unknown{}".format(len(unkn_intfs)) 106 | self.interfaces.append( 107 | { 108 | "name": obj.get("logicalname", unkn_name), 109 | "macaddress": obj.get("serial", ""), 110 | "serial": obj.get("serial", ""), 111 | "product": obj["product"], 112 | "vendor": obj["vendor"], 113 | "description": obj["description"], 114 | } 115 | ) 116 | """ 117 | 118 | def find_storage(self, obj): 119 | if "children" in obj: 120 | for device in obj["children"]: 121 | if "vendor" in device and "size" in device: 122 | d = { 123 | "units": +1, 124 | "manufacturer": self.check_disk_vendor( 125 | device["vendor"] 126 | ).lower(), 127 | "capacity": device["size"], 128 | "logicalname": device["logicalname"], 129 | "type": self.get_disk_type(device["logicalname"]), 130 | } 131 | self.disks.append(d) 132 | if "configuration" in obj: 133 | if "nvme" in obj["configuration"]["driver"]: 134 | if not is_tool("nvme"): 135 | raise Exception("nvme-cli >= 1.0 does not seem to be installed") 136 | try: 137 | nvme = serialized_nvme_output() 138 | for device in nvme["Devices"]: 139 | d = { 140 | "units": +1, 141 | "logicalname": device["DevicePath"], 142 | "manufacturer": self.check_disk_vendor( 143 | device["ModelNumber"] 144 | ).lower(), 145 | "type": "ssd", 146 | "capacity": device["PhysicalSize"] // 1073741824, 147 | } 148 | self.disks.append(d) 149 | except Exception: 150 | pass 151 | 152 | def find_cpus(self, obj): 153 | if "product" in obj: 154 | self.cpus.append( 155 | { 156 | "units": +1, 157 | "name": obj["product"], 158 | "manufacturer": obj["vendor"], 159 | "core_units": int(obj["configuration"]["cores"]), 160 | } 161 | ) 162 | 163 | def find_memories(self, obj): 164 | if "children" not in obj: 165 | # print("not a DIMM memory.") 166 | return 167 | 168 | for dimm in obj["children"]: 169 | if "empty" in dimm["description"]: 170 | continue 171 | 172 | self.memories.append( 173 | { 174 | "units": +1, 175 | "manufacturer": dimm.get("vendor", "N/A"), 176 | "capacity": dimm.get("size", 0) // 2**20 // 1024, 177 | } 178 | ) 179 | 180 | def find_gpus(self, obj): 181 | if "product" in obj: 182 | self.gpus.append( 183 | { 184 | "product": obj["product"], 185 | "vendor": obj["vendor"], 186 | "description": obj["description"], 187 | } 188 | ) 189 | 190 | def walk_bridge(self, obj): 191 | if "children" not in obj: 192 | return 193 | 194 | for bus in obj["children"]: 195 | if bus["class"] == "storage": 196 | self.find_storage(bus) 197 | if bus["class"] == "display": 198 | self.find_gpus(bus) 199 | 200 | if "children" in bus: 201 | for b in bus["children"]: 202 | if b["class"] == "storage": 203 | self.find_storage(b) 204 | if b["class"] == "display": 205 | self.find_gpus(b) 206 | 207 | def check_disk_vendor(self, model_string: str) -> str: 208 | split_model = model_string.split(" ") 209 | vendor = "" 210 | 211 | if len(split_model) == 1: 212 | check_string_for_numbers = bool(re.search("\\d", model_string)) 213 | if check_string_for_numbers: 214 | raise Exception( 215 | "Lshw did not output a parsable manufacturer name for this device." 216 | ) 217 | else: 218 | return model_string 219 | 220 | model_first_str = split_model[0] 221 | model_second_str = split_model[1] 222 | check_first_string_for_numbers = re.search("\\d", model_first_str) 223 | result = bool(check_first_string_for_numbers) 224 | 225 | if result: 226 | vendor = model_second_str 227 | return vendor 228 | else: 229 | vendor = model_first_str 230 | return vendor 231 | 232 | def get_disk_type(self, dev_path: str) -> str: 233 | 234 | rotational = self.get_rotational_int(dev_path) 235 | 236 | if rotational == 0: 237 | return "ssd" 238 | if rotational == 1: 239 | return "hdd" 240 | if rotational == 2: 241 | return "unknown" 242 | return "unknown" 243 | 244 | def get_rotational_int(self, dev_path: str) -> int: 245 | 246 | device = dev_path.removeprefix("/dev") 247 | 248 | try: 249 | rotational_fp = os.path.realpath( 250 | f"{SYS_BLOCK_PATH}{device}/queue/rotational", strict=True 251 | ) 252 | 253 | except OSError: 254 | sys.stderr.write("Rotational file was not found") 255 | return 2 256 | else: 257 | with open(rotational_fp, "r") as file: 258 | rotational_int = int(file.read()) 259 | return rotational_int 260 | -------------------------------------------------------------------------------- /boagent/public/assets/boavizta-logo-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/boavizta-logo-4.png -------------------------------------------------------------------------------- /boagent/public/assets/data.csv: -------------------------------------------------------------------------------- 1 | Date,gwp_intensity,cpu_load,ram_load,cpu_conso,ram_conso,server_conso,gwp_ram,gwp_cpu,gwp_server,range_type,gwp_fix 2 | 2022/10/21 00:00:00,98,34,12,24,12,46.8,1176,2352,4586.4,1,890 3 | 2022/10/21 00:05:00,96,35,15,22,11,42.9,1056,2112,4118.4,1,890 4 | 2022/10/21 00:10:00,94,33,18,21,14,45.5,1316,1974,4277,1,890 5 | 2022/10/21 00:15:00,92,23,22,23,16,50.7,1472,2116,4664.4,1,890 6 | 2022/10/21 00:20:00,90,36,29,25,12,48.1,1080,2250,4329,1,890 7 | 2022/10/21 00:25:00,88,44,22,28,15,55.9,1320,2464,4919.2,1,890 8 | 2022/10/21 00:30:00,89,48,34,29,21,65,1869,2581,5785,1,890 9 | 2022/10/21 00:35:00,97,57,34,32,13,58.5,1261,3104,5674.5,1,890 10 | 2022/10/21 00:40:00,67,68,32,35,15,65,1005,2345,4355,1,890 11 | 2022/10/21 00:45:00,66,90,31,23,12,45.5,792,1518,3003,1,890 12 | 2022/10/21 00:50:00,66,87,59,24,11,45.5,726,1584,3003,1,890 13 | 2022/10/21 00:55:00,98,78,70,34,16,65,1568,3332,6370,1,890 14 | 2022/10/21 01:00:00,96,75,99,28,19,61.1,1824,2688,5865.6,2,890 15 | 2022/10/21 01:05:00,94,45,40,27,22,63.7,2068,2538,5987.8,2,890 16 | 2022/10/21 01:10:00,92,55,30,24,18,54.6,1656,2208,5023.2,2,890 17 | 2022/10/21 01:15:00,90,34,22,22,14,46.8,1260,1980,4212,2,890 18 | 2022/10/21 01:20:00,88,35,16,21,11,41.6,968,1848,3660.8,2,890 19 | 2022/10/21 01:25:00,89,33,12,23,12,45.5,1068,2047,4049.5,2,890 20 | 2022/10/21 01:30:00,97,23,15,25,11,46.8,1067,2425,4539.6,2,890 21 | 2022/10/21 01:35:00,67,36,18,28,14,54.6,938,1876,3658.2,1,890 22 | 2022/10/21 01:40:00,66,44,22,29,16,58.5,1056,1914,3861,1,890 23 | 2022/10/21 01:45:00,98,48,29,32,12,57.2,1176,3136,5605.6,1,890 24 | 2022/10/21 01:50:00,96,57,22,35,15,65,1440,3360,6240,1,890 25 | 2022/10/21 01:55:00,94,68,34,23,21,57.2,1974,2162,5376.8,1,890 26 | 2022/10/21 02:00:00,92,90,34,24,13,48.1,1196,2208,4425.2,1,890 27 | 2022/10/21 02:05:00,90,87,32,34,15,63.7,1350,3060,5733,1,890 28 | 2022/10/21 02:10:00,88,78,31,28,12,52,1056,2464,4576,1,890 29 | 2022/10/21 02:15:00,89,75,59,27,11,49.4,979,2403,4396.6,1,890 30 | 2022/10/21 02:20:00,97,45,70,24,16,52,1552,2328,5044,0,890 31 | 2022/10/21 02:25:00,67,55,99,22,19,53.3,1273,1474,3571.1,0,890 32 | 2022/10/21 02:30:00,66,34,40,21,22,55.9,1452,1386,3689.4,0,890 33 | 2022/10/21 02:35:00,66,35,30,23,18,53.3,1188,1518,3517.8,0,890 34 | 2022/10/21 02:40:00,98,33,22,25,14,50.7,1372,2450,4968.6,0,890 35 | 2022/10/21 02:45:00,96,23,16,28,11,50.7,1056,2688,4867.2,0,890 36 | 2022/10/21 02:50:00,94,36,12,29,12,53.3,1128,2726,5010.2,0,890 37 | 2022/10/21 02:55:00,92,44,15,32,11,55.9,1012,2944,5142.8,0,890 38 | 2022/10/21 03:00:00,90,48,18,35,14,63.7,1260,3150,5733,0,890 39 | 2022/10/21 03:05:00,88,57,22,23,16,50.7,1408,2024,4461.6,1,890 40 | 2022/10/21 03:10:00,89,68,29,24,12,46.8,1068,2136,4165.2,1,890 41 | 2022/10/21 03:15:00,97,90,22,34,15,63.7,1455,3298,6178.9,1,890 42 | 2022/10/21 03:20:00,67,87,34,28,21,63.7,1407,1876,4267.9,1,890 43 | 2022/10/21 03:25:00,66,78,34,27,13,52,858,1782,3432,1,890 44 | 2022/10/21 03:30:00,98,75,32,24,15,50.7,1470,2352,4968.6,1,890 45 | 2022/10/21 03:35:00,96,45,31,22,12,44.2,1152,2112,4243.2,1,890 46 | 2022/10/21 03:40:00,94,55,59,21,11,41.6,1034,1974,3910.4,1,890 47 | 2022/10/21 03:45:00,92,34,70,23,16,50.7,1472,2116,4664.4,1,890 48 | 2022/10/21 03:50:00,90,35,99,25,19,57.2,1710,2250,5148,1,890 49 | 2022/10/21 03:55:00,88,33,40,28,22,65,1936,2464,5720,1,890 50 | 2022/10/21 04:00:00,89,23,30,29,18,61.1,1602,2581,5437.9,1,890 51 | 2022/10/21 04:05:00,97,36,22,32,12,57.2,1164,3104,5548.4,1,890 52 | 2022/10/21 04:10:00,67,44,16,35,11,59.8,737,2345,4006.6,1,890 53 | 2022/10/21 04:15:00,66,48,12,23,14,48.1,924,1518,3174.6,1,890 54 | 2022/10/21 04:20:00,66,57,15,24,16,52,1056,1584,3432,2,890 55 | 2022/10/21 04:25:00,98,68,18,24,12,46.8,1176,2352,4586.4,2,890 56 | 2022/10/21 04:30:00,96,90,22,22,15,48.1,1440,2112,4617.6,2,890 57 | 2022/10/21 04:35:00,94,34,29,21,21,54.6,1974,1974,5132.4,2,890 58 | 2022/10/21 04:40:00,92,35,22,23,13,46.8,1196,2116,4305.6,2,890 59 | 2022/10/21 04:45:00,90,33,34,25,15,52,1350,2250,4680,0,890 60 | 2022/10/21 04:50:00,88,23,34,28,12,52,1056,2464,4576,0,890 61 | 2022/10/21 04:55:00,89,36,32,29,11,52,979,2581,4628,0,890 62 | 2022/10/21 05:00:00,97,44,31,32,16,62.4,1552,3104,6052.8,0,890 -------------------------------------------------------------------------------- /boagent/public/assets/dygraph.css: -------------------------------------------------------------------------------- 1 | .graph { margin-top: 50px; margin-bottom: 10px; margin-left: auto; margin-right: auto;} 2 | -------------------------------------------------------------------------------- /boagent/public/assets/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/favicon.ico -------------------------------------------------------------------------------- /boagent/public/assets/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/favicon.png -------------------------------------------------------------------------------- /boagent/public/assets/git-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/git-logo.png -------------------------------------------------------------------------------- /boagent/public/assets/graph.ts: -------------------------------------------------------------------------------- 1 | new Dygraph(, "ny-vs-sf.txt", { 2 | legend: 'always', 3 | title: 'NYC vs. SF', 4 | showRoller: true, 5 | rollPeriod: 14, 6 | customBars: true, 7 | ylabel: 'Temperature (F)', 8 | }); 9 | -------------------------------------------------------------------------------- /boagent/public/assets/license.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 13 | -------------------------------------------------------------------------------- /boagent/public/assets/main.css: -------------------------------------------------------------------------------- 1 | body{ 2 | margin-right: auto; 3 | margin-left: auto; 4 | width:90%; 5 | } 6 | 7 | a{ 8 | color: white; 9 | text-decoration: none; 10 | } 11 | 12 | h1{ 13 | font-size: 2.5em; 14 | } 15 | 16 | a:hover{ 17 | color: gray; 18 | } 19 | 20 | .title{ 21 | padding: 1%; 22 | background-color: #364049; 23 | border-radius: 6px; 24 | text-align: center; 25 | margin-bottom: 1%; 26 | } 27 | .navbar{ 28 | font-size: 1.4em; 29 | padding: 1%; 30 | } 31 | 32 | .box { 33 | background-color: #364049; 34 | padding:1%; 35 | border-radius: 6px; 36 | height:200px; 37 | } 38 | 39 | 40 | .scores-box{ 41 | background-color: #364049; 42 | border-radius: 6px; 43 | margin-bottom: 3%; 44 | display: flex; 45 | flex-wrap: wrap; 46 | justify-content: space-around; 47 | } 48 | 49 | .score-box { 50 | width:18%; 51 | text-align: center; 52 | } 53 | 54 | #platypus-logo{ 55 | width: 15%; 56 | height: auto; 57 | } 58 | 59 | .box-legend{ 60 | width:90%; 61 | margin-top: 2%; 62 | margin-right: auto; 63 | margin-left: auto; 64 | } 65 | 66 | .impacts_box{ 67 | display: flex; 68 | justify-content: space-between; 69 | } 70 | 71 | .impact_box{ 72 | width: 48%; 73 | background-color: #364049; 74 | border-radius: 6px; 75 | text-align: center; 76 | padding-bottom: 3%; 77 | padding-top: 2%; 78 | 79 | } 80 | 81 | .txt_impact{ 82 | font-size: 3em; 83 | } 84 | 85 | .txt_unit{ 86 | font-size: 1.5em; 87 | } 88 | 89 | 90 | #forkongithub a 91 | { 92 | background:#000;color:#fff;text-decoration:none;font-family:arial,sans-serif;text-align:center;font-weight:bold;padding:5px 40px;font-size:1rem;line-height:2rem;position:relative;transition:0.5s;} 93 | 94 | #forkongithub a:hover 95 | { 96 | background:#c11;color:#fff; 97 | } 98 | 99 | #forkongithub a::before, #forkongithub a::after{ 100 | content:""; 101 | width:100%; 102 | display:block; 103 | position:absolute; 104 | top:1px; 105 | left:0; 106 | height:1px; 107 | background:#fff; 108 | } 109 | #forkongithub a::after{ 110 | bottom:1px; 111 | top:auto; 112 | } 113 | 114 | @media screen and (min-width:800px){ 115 | #forkongithub{ 116 | position:absolute; 117 | display:block; 118 | top:0; 119 | right:0; 120 | width:200px; 121 | overflow:hidden; 122 | height:200px; 123 | z-index:9999; 124 | } 125 | #forkongithub a{ 126 | width:200px; 127 | position:absolute; 128 | top:60px; 129 | right:-60px; 130 | transform:rotate(45deg); 131 | -webkit-transform:rotate(45deg); 132 | -ms-transform:rotate(45deg); 133 | -moz-transform:rotate(45deg); 134 | -o-transform:rotate(45deg); 135 | box-shadow:4px 4px 10px rgba(0,0,0,0.8); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /boagent/public/assets/platypus_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/platypus_logo.png -------------------------------------------------------------------------------- /boagent/public/assets/synchronizer.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Synchronize zooming and/or selections between a set of dygraphs. 3 | * 4 | * Usage: 5 | * 6 | * var g1 = new Dygraph(...), 7 | * g2 = new Dygraph(...), 8 | * ...; 9 | * var sync = Dygraph.synchronize(g1, g2, ...); 10 | * // charts are now synchronized 11 | * sync.detach(); 12 | * // charts are no longer synchronized 13 | * 14 | * You can set options using the last parameter, for example: 15 | * 16 | * var sync = Dygraph.synchronize(g1, g2, g3, { 17 | * selection: true, 18 | * zoom: true 19 | * }); 20 | * 21 | * The default is to synchronize both of these. 22 | * 23 | * Instead of passing one Dygraph object as each parameter, you may also pass an 24 | * array of dygraphs: 25 | * 26 | * var sync = Dygraph.synchronize([g1, g2, g3], { 27 | * selection: false, 28 | * zoom: true 29 | * }); 30 | * 31 | * You may also set `range: false` if you wish to only sync the x-axis. 32 | * The `range` option has no effect unless `zoom` is true (the default). 33 | */ 34 | (function() { 35 | /* global Dygraph:false */ 36 | 'use strict'; 37 | 38 | var Dygraph; 39 | if (window.Dygraph) { 40 | Dygraph = window.Dygraph; 41 | } else if (typeof(module) !== 'undefined') { 42 | Dygraph = require('../dygraph'); 43 | } 44 | 45 | var synchronize = function(/* dygraphs..., opts */) { 46 | if (arguments.length === 0) { 47 | throw 'Invalid invocation of Dygraph.synchronize(). Need >= 1 argument.'; 48 | } 49 | 50 | var OPTIONS = ['selection', 'zoom', 'range']; 51 | var opts = { 52 | selection: true, 53 | zoom: true, 54 | range: true 55 | }; 56 | var dygraphs = []; 57 | var prevCallbacks = []; 58 | 59 | var parseOpts = function(obj) { 60 | if (!(obj instanceof Object)) { 61 | throw 'Last argument must be either Dygraph or Object.'; 62 | } else { 63 | for (var i = 0; i < OPTIONS.length; i++) { 64 | var optName = OPTIONS[i]; 65 | if (obj.hasOwnProperty(optName)) opts[optName] = obj[optName]; 66 | } 67 | } 68 | }; 69 | 70 | if (arguments[0] instanceof Dygraph) { 71 | // Arguments are Dygraph objects. 72 | for (var i = 0; i < arguments.length; i++) { 73 | if (arguments[i] instanceof Dygraph) { 74 | dygraphs.push(arguments[i]); 75 | } else { 76 | break; 77 | } 78 | } 79 | if (i < arguments.length - 1) { 80 | throw 'Invalid invocation of Dygraph.synchronize(). ' + 81 | 'All but the last argument must be Dygraph objects.'; 82 | } else if (i == arguments.length - 1) { 83 | parseOpts(arguments[arguments.length - 1]); 84 | } 85 | } else if (arguments[0].length) { 86 | // Invoked w/ list of dygraphs, options 87 | for (var i = 0; i < arguments[0].length; i++) { 88 | dygraphs.push(arguments[0][i]); 89 | } 90 | if (arguments.length == 2) { 91 | parseOpts(arguments[1]); 92 | } else if (arguments.length > 2) { 93 | throw 'Invalid invocation of Dygraph.synchronize(). ' + 94 | 'Expected two arguments: array and optional options argument.'; 95 | } // otherwise arguments.length == 1, which is fine. 96 | } else { 97 | throw 'Invalid invocation of Dygraph.synchronize(). ' + 98 | 'First parameter must be either Dygraph or list of Dygraphs.'; 99 | } 100 | 101 | if (dygraphs.length < 2) { 102 | throw 'Invalid invocation of Dygraph.synchronize(). ' + 103 | 'Need two or more dygraphs to synchronize.'; 104 | } 105 | 106 | var readycount = dygraphs.length; 107 | for (var i = 0; i < dygraphs.length; i++) { 108 | var g = dygraphs[i]; 109 | g.ready( function() { 110 | if (--readycount == 0) { 111 | // store original callbacks 112 | var callBackTypes = ['drawCallback', 'highlightCallback', 'unhighlightCallback']; 113 | for (var j = 0; j < dygraphs.length; j++) { 114 | if (!prevCallbacks[j]) { 115 | prevCallbacks[j] = {}; 116 | } 117 | for (var k = callBackTypes.length - 1; k >= 0; k--) { 118 | prevCallbacks[j][callBackTypes[k]] = dygraphs[j].getFunctionOption(callBackTypes[k]); 119 | } 120 | } 121 | 122 | // Listen for draw, highlight, unhighlight callbacks. 123 | if (opts.zoom) { 124 | attachZoomHandlers(dygraphs, opts, prevCallbacks); 125 | } 126 | 127 | if (opts.selection) { 128 | attachSelectionHandlers(dygraphs, prevCallbacks); 129 | } 130 | } 131 | }); 132 | } 133 | 134 | return { 135 | detach: function() { 136 | for (var i = 0; i < dygraphs.length; i++) { 137 | var g = dygraphs[i]; 138 | if (opts.zoom) { 139 | g.updateOptions({drawCallback: prevCallbacks[i].drawCallback}); 140 | } 141 | if (opts.selection) { 142 | g.updateOptions({ 143 | highlightCallback: prevCallbacks[i].highlightCallback, 144 | unhighlightCallback: prevCallbacks[i].unhighlightCallback 145 | }); 146 | } 147 | } 148 | // release references & make subsequent calls throw. 149 | dygraphs = null; 150 | opts = null; 151 | prevCallbacks = null; 152 | } 153 | }; 154 | }; 155 | 156 | function arraysAreEqual(a, b) { 157 | if (!Array.isArray(a) || !Array.isArray(b)) return false; 158 | var i = a.length; 159 | if (i !== b.length) return false; 160 | while (i--) { 161 | if (a[i] !== b[i]) return false; 162 | } 163 | return true; 164 | } 165 | 166 | function attachZoomHandlers(gs, syncOpts, prevCallbacks) { 167 | var block = false; 168 | for (var i = 0; i < gs.length; i++) { 169 | var g = gs[i]; 170 | g.updateOptions({ 171 | drawCallback: function(me, initial) { 172 | if (block || initial) return; 173 | block = true; 174 | var opts = { 175 | dateWindow: me.xAxisRange() 176 | }; 177 | if (syncOpts.range) opts.valueRange = me.yAxisRange(); 178 | 179 | for (var j = 0; j < gs.length; j++) { 180 | if (gs[j] == me) { 181 | if (prevCallbacks[j] && prevCallbacks[j].drawCallback) { 182 | prevCallbacks[j].drawCallback.apply(this, arguments); 183 | } 184 | continue; 185 | } 186 | 187 | // Only redraw if there are new options 188 | if (arraysAreEqual(opts.dateWindow, gs[j].getOption('dateWindow')) && 189 | arraysAreEqual(opts.valueRange, gs[j].getOption('valueRange'))) { 190 | continue; 191 | } 192 | 193 | gs[j].updateOptions(opts); 194 | } 195 | block = false; 196 | } 197 | }, true /* no need to redraw */); 198 | } 199 | } 200 | 201 | function attachSelectionHandlers(gs, prevCallbacks) { 202 | var block = false; 203 | for (var i = 0; i < gs.length; i++) { 204 | var g = gs[i]; 205 | 206 | g.updateOptions({ 207 | highlightCallback: function(event, x, points, row, seriesName) { 208 | if (block) return; 209 | block = true; 210 | var me = this; 211 | for (var i = 0; i < gs.length; i++) { 212 | if (me == gs[i]) { 213 | if (prevCallbacks[i] && prevCallbacks[i].highlightCallback) { 214 | prevCallbacks[i].highlightCallback.apply(this, arguments); 215 | } 216 | continue; 217 | } 218 | var idx = gs[i].getRowForX(x); 219 | if (idx !== null) { 220 | gs[i].setSelection(idx, seriesName); 221 | } 222 | } 223 | block = false; 224 | }, 225 | unhighlightCallback: function(event) { 226 | if (block) return; 227 | block = true; 228 | var me = this; 229 | for (var i = 0; i < gs.length; i++) { 230 | if (me == gs[i]) { 231 | if (prevCallbacks[i] && prevCallbacks[i].unhighlightCallback) { 232 | prevCallbacks[i].unhighlightCallback.apply(this, arguments); 233 | } 234 | continue; 235 | } 236 | gs[i].clearSelection(); 237 | } 238 | block = false; 239 | } 240 | }, true /* no need to redraw */); 241 | } 242 | } 243 | 244 | Dygraph.synchronize = synchronize; 245 | 246 | })(); 247 | -------------------------------------------------------------------------------- /boagent/public/assets/table-style.css: -------------------------------------------------------------------------------- 1 | table { 2 | margin-top: 2%; 3 | margin-left: auto; 4 | margin-right: auto; 5 | border-collapse: collapse; 6 | background: none; 7 | border-radius: 6px; 8 | overflow: hidden; 9 | width: 90%; 10 | position: relative; 11 | } 12 | table * { 13 | position: relative; 14 | } 15 | table td, table th { 16 | padding-left: 8px; 17 | } 18 | table thead tr { 19 | height: 60px; 20 | background:#364049; 21 | font-size: 16px; 22 | } 23 | table tbody tr { 24 | height: 48px; 25 | border-bottom: 1px solid #E3F1D5; 26 | } 27 | table tbody tr:last-child { 28 | border: 0; 29 | } 30 | table td, table th { 31 | text-align: left; 32 | } 33 | table td.l, table th.l { 34 | text-align: right; 35 | } 36 | table td.c, table th.c { 37 | text-align: center; 38 | } 39 | table td.r, table th.r { 40 | text-align: center; 41 | } 42 | 43 | @media screen and (max-width: 35.5em) { 44 | table { 45 | display: block; 46 | } 47 | table > *, table tr, table td, table th { 48 | display: block; 49 | } 50 | table thead { 51 | display: none; 52 | } 53 | table tbody tr { 54 | height: auto; 55 | padding: 8px 0; 56 | } 57 | table tbody tr td { 58 | padding-left: 45%; 59 | margin-bottom: 12px; 60 | } 61 | table tbody tr td:last-child { 62 | margin-bottom: 0; 63 | } 64 | table tbody tr td:before { 65 | position: absolute; 66 | font-weight: 700; 67 | width: 40%; 68 | left: 10px; 69 | top: 0; 70 | } 71 | table tbody tr td:nth-child(1):before { 72 | content: "Code"; 73 | } 74 | table tbody tr td:nth-child(2):before { 75 | content: "Stock"; 76 | } 77 | table tbody tr td:nth-child(3):before { 78 | content: "Cap"; 79 | } 80 | table tbody tr td:nth-child(4):before { 81 | content: "Inch"; 82 | } 83 | table tbody tr td:nth-child(5):before { 84 | content: "Box Type"; 85 | } 86 | } 87 | body { 88 | background: #9BC86A; 89 | font: 400 14px 'Calibri','Arial'; 90 | padding: 20px; 91 | } 92 | 93 | blockquote { 94 | color: white; 95 | text-align: center; 96 | } 97 | -------------------------------------------------------------------------------- /boagent/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | Fork me on GitHub 14 |
15 |

PLATYPUS : report, understand and tackle IT impacts

16 |
17 | 23 | 24 |
25 |
26 |

Current intensity (CO2eq./kWh)

27 |
28 |
29 | 30 |
31 |

Current RAM consumption

32 |
33 |
34 | 35 | 36 | 37 |
38 |

Current CPU consumption

39 |
40 |
41 |
42 |

Current electrical consumption

43 |
44 |
45 |
46 | 47 |
48 |
49 |

Yearly operational emissions

50 | 51 | kgCO2eq. 52 |
53 |
54 |

Yearly embedded emissions

55 | 56 | kgCO2eq. 57 |
58 |
59 | 60 |
61 |

Carbon intensity

62 |

Carbon intensity of the electricity grid (production and distribution) in your region expressed in gCO2eq./kWh.

63 |
64 |
65 |
66 |
67 | 68 |
69 |

Server impact

70 |

Green gaz emissions expressed in gCO2eq. Both operational (related to electricity consumption) and embedded (related to manufacture) are reported.

71 |
72 |
73 |
74 |
75 | 76 |
77 |

Server consumption

78 |

Server consumption, in Watts

79 |
80 |
81 |
82 |
83 | 84 |
85 |

Server Ram Usage

86 |

Server Ram usage, in GigaBytes

87 |
88 |
89 |
90 |
91 | 92 |
93 |

Server Cpu Usage

94 |

Server Cpu load, in %

95 |
96 |
97 |
98 |
99 | 100 | 101 |
102 |

Recommandations

103 |
104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 |
ModeTimeTypeRecommendationCMD
118 | 119 | 120 | 212 | 295 | 296 | 333 | 334 | 335 | 336 | 337 | -------------------------------------------------------------------------------- /boagent_color.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /compose/development.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | boagent: 3 | entrypoint: [ "/bin/bash", "-c", "cd boagent/api && uvicorn api:app --host 0.0.0.0 --reload" ] 4 | volumes: 5 | - "$PWD:/home/boagent" 6 | 7 | boaviztapi: 8 | build: 9 | context: ../boaviztapi 10 | dockerfile: ../boaviztapi/Dockerfile.dev 11 | volumes: 12 | - "$PWD/../boaviztapi/boaviztapi:/app/boaviztapi" 13 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.2' 2 | 3 | services: 4 | 5 | boagent: 6 | build: 7 | context: . 8 | environment: 9 | BOAVIZTAPI_ENDPOINT: "http://boaviztapi:5000" 10 | DEFAULT_LIFETIME: 5.0 11 | HARDWARE_FILE_PATH: "/home/boagent/hardware_data.json" 12 | POWER_FILE_PATH: "/app/data/power_data.json" 13 | privileged: true 14 | depends_on: 15 | - boaviztapi 16 | - scaphandre 17 | ports: 18 | - "8000:8000" 19 | networks: 20 | - boagent-network 21 | volumes: 22 | - "/proc:/proc" 23 | - "/sys:/sys:ro" 24 | - "powerdata:/app/data:ro" 25 | - "./db:/app/db" 26 | - "../boaviztapi/boaviztapi:/app/boaviztapi" 27 | - "/etc/crontab:/etc/crontab" 28 | - "./boagent:/home/boagent/boagent" 29 | 30 | scaphandre: 31 | image: hubblo/scaphandre:dev 32 | privileged: true 33 | volumes: 34 | - type: bind 35 | source: /proc 36 | target: /proc 37 | - type: bind 38 | source: /sys/class/powercap 39 | target: /sys/class/powercap 40 | - "powerdata:/app/data:rw" 41 | command: [ "--no-header", "json", "-s", "10", "--resources", "-f", "/app/data/power_data.json" ] 42 | networks: 43 | - boagent-network 44 | 45 | boaviztapi: 46 | image: ghcr.io/boavizta/boaviztapi:1.2.2 47 | ports: 48 | - "5000:5000" 49 | networks: 50 | - boagent-network 51 | 52 | volumes: 53 | powerdata: {} 54 | 55 | networks: 56 | boagent-network: 57 | driver: bridge 58 | ipam: 59 | config: 60 | - subnet: 192.168.33.0/24 61 | -------------------------------------------------------------------------------- /hardware_cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import json 4 | import sys 5 | 6 | from boagent.hardware.lshw import Lshw 7 | from click import command, option, ClickException 8 | 9 | 10 | @command() 11 | @option("--output-file", help="File to output the hardwate data to") 12 | def main(output_file): 13 | try: 14 | lshw = Lshw() 15 | 16 | lshw_cpus = lshw.cpus 17 | lshw_ram = lshw.memories 18 | lshw_disks = lshw.disks 19 | except KeyError: 20 | error_message = "Hardware_cli was not executed with privileges, try `sudo ./hardware_cli.py`." 21 | exception = ClickException(error_message) 22 | exception.show() 23 | else: 24 | hardware_data = {} 25 | hardware_data["disks"] = lshw_disks 26 | hardware_data["cpus"] = lshw_cpus 27 | hardware_data["rams"] = lshw_ram 28 | if output_file is not None: 29 | with open(output_file, "w") as fd: 30 | json.dump(hardware_data, fd, indent=4) 31 | else: 32 | json.dump(hardware_data, sys.stdout, indent=4) 33 | return 0 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "boagent" 3 | version = "0.1.1" 4 | description = "Local API to collect and compute data on used device and running applications to give insight on their environmental impacts." 5 | authors = ["Boavizta "] 6 | license = "Apache-2.0" 7 | readme = "README.md" 8 | package-mode = false 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.10" 12 | boaviztapi-sdk = "^1.2.4" 13 | fastapi = "^0.110.0" 14 | pydantic = "^2.6.4" 15 | pydantic-settings = "^2.2.1" 16 | click = "^8.1.7" 17 | python-dateutil = "^2.9.0.post0" 18 | 19 | [tool.poetry.group.dev.dependencies] 20 | pytest = "^8.0.2" 21 | pre-commit = "^3.6.2" 22 | deptry = "^0.20.0" 23 | httpx = "^0.27.2" 24 | requests = "^2.32.3" 25 | uvicorn = "^0.30.6" 26 | 27 | [build-system] 28 | requires = ["poetry-core"] 29 | build-backend = "poetry.core.masonry.api" 30 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --import-mode=importlib 3 | testpaths = 4 | tests 5 | markers = 6 | query: mark a test for Boagent query endpoint. 7 | database: mark a test a Boagent API route that is dependent on a SQLite database. 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiofile==3.8.1 2 | annotated-types==0.6.0 3 | anyio==3.6.2 4 | asgiref==3.7.2 5 | boaviztapi-sdk==1.2.4 6 | caio==0.9.8 7 | certifi==2022.9.24 8 | cfgv==3.4.0 9 | charset-normalizer==2.1.1 10 | click==8.1.3 11 | croniter==1.3.7 12 | dataclasses==0.6 13 | distlib==0.3.8 14 | exceptiongroup==1.2.0 15 | fastapi==0.110.0 16 | filelock==3.13.1 17 | greenlet==1.1.3.post0 18 | h11==0.14.0 19 | httpcore==1.0.4 20 | httpx==0.27.0 21 | identify==2.5.35 22 | idna==3.4 23 | iniconfig==2.0.0 24 | mangum==0.16.0 25 | nodeenv==1.8.0 26 | numpy==1.26.4 27 | packaging==23.2 28 | pandas==1.5.1 29 | platformdirs==4.2.0 30 | pluggy==1.4.0 31 | pre-commit==3.6.2 32 | py-cpuinfo==9.0.0 33 | pydantic==2.6.4 34 | pydantic-settings==2.2.1 35 | pydantic_core==2.16.3 36 | pytest==8.0.2 37 | python-dateutil==2.8.2 38 | python-dotenv==0.21.0 39 | pytz==2022.5 40 | PyYAML==6.0.1 41 | requests==2.28.1 42 | six==1.16.0 43 | sniffio==1.3.0 44 | SQLAlchemy==1.4.42 45 | starlette==0.36.3 46 | tomli==2.0.1 47 | typing_extensions==4.9.0 48 | tzdata==2024.1 49 | urllib3==1.26.12 50 | uvicorn==0.19.0 51 | virtualenv==20.25.1 52 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from setuptools import setup, find_packages 4 | from boagent import __version__ 5 | 6 | py_version = sys.version_info[:2] 7 | if py_version < (3, 9): 8 | raise Exception("api requires Python >= 3.9.") 9 | 10 | with open("README.md", "r") as fh: 11 | long_description = fh.read() 12 | 13 | setup( 14 | name="boagent", 15 | maintainer="Benoit Petit", 16 | maintainer_email="bpetit@hubblo.org", 17 | version=__version__, 18 | packages=find_packages(), 19 | include_package_data=True, 20 | description="Monitoring agent/framework for evaluating the environmental impacts of a machine and its applications, including several to all steps of the life cycle of the machine and service, plus multiple criterias of impacts (not just CO2eq metrics / Global Warming Potential). Part of the efforts of https://boavizta.org/en and https://sdialliance.org/.", 21 | use_pipfile=True, 22 | long_description=long_description, 23 | long_description_content_type="text/markdown", 24 | url="https://github.com/Boavizta/boagent", 25 | test_suite="tests", 26 | setup_requires=["setuptools-pipfile"], 27 | keywords=[ 28 | "carbon", 29 | "footprint", 30 | "environment", 31 | "climate", 32 | "co2", 33 | "gwp", 34 | "adp", 35 | "pe", 36 | "energy", 37 | "boagent", 38 | "scaphandre", 39 | "boavizta", 40 | "api", 41 | ], 42 | classifiers=[ 43 | "Programming Language :: Python :: 3.9", 44 | "Intended Audience :: Developers", 45 | "Operating System :: OS Independent", 46 | ], 47 | python_requires=">=3.9", 48 | entry_points=""" """, 49 | ) 50 | -------------------------------------------------------------------------------- /setup/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.2' 2 | 3 | services: 4 | 5 | boagent: 6 | image: ghcr.io/boavizta/boagent:latest 7 | environment: 8 | BOAVIZTAPI_ENDPOINT: "http://boaviztapi:5000" 9 | DEFAULT_LIFETIME: 5.0 10 | HARDWARE_FILE_PATH: "/home/boagent/hardware_data.json" 11 | POWER_FILE_PATH: "/app/data/power_data.json" 12 | depends_on: 13 | - boaviztapi 14 | - scaphandre 15 | ports: 16 | - "8000:8000" 17 | networks: 18 | - boagent-network 19 | volumes: 20 | - "/proc:/proc" 21 | - "/sys:/sys:ro" 22 | - "powerdata:/app/data:ro" 23 | 24 | scaphandre: 25 | image: hubblo/scaphandre:dev 26 | volumes: 27 | - type: bind 28 | source: /proc 29 | target: /proc 30 | - type: bind 31 | source: /sys/class/powercap 32 | target: /sys/class/powercap 33 | - "powerdata:/app/data:rw" 34 | command: [ "json", "-s", "10", "-f", "/app/data/power_data.json" ] 35 | networks: 36 | - boagent-network 37 | 38 | boaviztapi: 39 | #image: ghcr.io/boavizta/boaviztapi:0.1.2 40 | image: bpetit/boaviztapi:v0.1.3 41 | ports: 42 | - "5000:5000" 43 | networks: 44 | - boagent-network 45 | 46 | volumes: 47 | powerdata: {} 48 | 49 | networks: 50 | boagent-network: 51 | driver: bridge 52 | ipam: 53 | config: 54 | - subnet: 192.168.33.0/24 55 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/tests/__init__.py -------------------------------------------------------------------------------- /tests/api/test_api_integration.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from datetime import datetime, timedelta 4 | from fastapi.testclient import TestClient 5 | from unittest import TestCase 6 | from unittest.mock import patch 7 | from pytest import mark 8 | from boagent.api.config import Settings 9 | from tests.mocks.mocks import ( 10 | mock_boaviztapi_response_not_verbose, 11 | mock_get_metrics_verbose, 12 | mock_get_metrics_not_verbose, 13 | ) 14 | 15 | # Mock settings for testing environment 16 | settings = Settings( 17 | hardware_file_path="./tests/mocks/hardware_data.json", 18 | db_path="./tests/mocks/boagent.db", 19 | power_file_path="./tests/mocks/power_data.json", 20 | ) 21 | 22 | from boagent.api.api import app # noqa 23 | 24 | NOW_ISO8601 = datetime.now().isoformat() 25 | NOW_ISO8601_MINUS_ONE_MINUTE = datetime.fromisoformat(NOW_ISO8601) - timedelta( 26 | minutes=1 27 | ) 28 | 29 | client = TestClient(app) 30 | 31 | 32 | class ApiEndpointsTest(TestCase): 33 | def setUp(self): 34 | with open( 35 | mock_boaviztapi_response_not_verbose, "r" 36 | ) as boaviztapi_response_file: 37 | self.boaviztapi_response_not_verbose = json.load(boaviztapi_response_file) 38 | 39 | with open(mock_get_metrics_not_verbose, "r") as get_metrics_not_verbose_file: 40 | self.get_metrics_not_verbose = json.load(get_metrics_not_verbose_file) 41 | with open(mock_get_metrics_verbose, "r") as get_metrics_verbose_file: 42 | self.get_metrics_verbose = json.load(get_metrics_verbose_file) 43 | 44 | def test_read_info(self): 45 | response = client.get("/info") 46 | assert response.status_code == 200 47 | 48 | def test_read_web(self): 49 | response = client.get("/web") 50 | assert response.status_code == 200 51 | 52 | @patch("boagent.api.api.get_metrics") 53 | def test_read_metrics_with_success(self, mocked_get_metrics): 54 | 55 | mocked_get_metrics.return_value = self.get_metrics_not_verbose 56 | 57 | params = { 58 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 59 | "end_time": f"{NOW_ISO8601}", 60 | "verbose": "false", 61 | "location": "FRA", 62 | "measure_power": "false", 63 | "lifetime": 5, 64 | "fetch_hardware": "false", 65 | } 66 | 67 | response = client.get("/metrics", params=params) 68 | assert response.status_code == 200 69 | 70 | @patch("boagent.api.api.get_metrics") 71 | def test_read_metrics_with_verbose_with_success(self, mocked_get_metrics): 72 | 73 | mocked_get_metrics.return_value = self.get_metrics_verbose 74 | 75 | params = { 76 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 77 | "end_time": f"{NOW_ISO8601}", 78 | "verbose": "false", 79 | "location": "FRA", 80 | "measure_power": "false", 81 | "lifetime": 5, 82 | "fetch_hardware": "false", 83 | } 84 | 85 | response = client.get("/metrics", params=params) 86 | assert response.status_code == 200 87 | 88 | @mark.query 89 | @patch("boagent.api.api.get_metrics") 90 | def test_read_query_without_measure_power_and_fetch_hardware_with_success( 91 | self, mocked_get_metrics 92 | ): 93 | 94 | mocked_get_metrics.return_value = self.boaviztapi_response_not_verbose 95 | 96 | params = { 97 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 98 | "end_time": f"{NOW_ISO8601}", 99 | "verbose": "false", 100 | "location": "FRA", 101 | "measure_power": "false", 102 | "lifetime": 5, 103 | "fetch_hardware": "false", 104 | } 105 | 106 | response = client.get("/query", params=params) 107 | assert response.status_code == 200 108 | 109 | @mark.query 110 | @patch("boagent.api.api.get_metrics") 111 | def test_read_query_with_measure_power_with_success(self, mocked_get_metrics): 112 | 113 | mocked_get_metrics.return_value = self.get_metrics_not_verbose 114 | 115 | params = { 116 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 117 | "end_time": f"{NOW_ISO8601}", 118 | "verbose": "false", 119 | "location": "FRA", 120 | "measure_power": "true", 121 | "lifetime": 5, 122 | "fetch_hardware": "false", 123 | } 124 | 125 | response = client.get("/query", params=params) 126 | assert response.status_code == 200 127 | 128 | @mark.query 129 | @patch("boagent.api.api.get_metrics") 130 | def test_read_query_with_fetch_hardware_with_success(self, mocked_get_metrics): 131 | 132 | mocked_get_metrics.return_value = self.get_metrics_not_verbose 133 | 134 | params = { 135 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 136 | "end_time": f"{NOW_ISO8601}", 137 | "verbose": "false", 138 | "location": "FRA", 139 | "measure_power": "false", 140 | "lifetime": 5, 141 | "fetch_hardware": "true", 142 | } 143 | 144 | response = client.get("query", params=params) 145 | assert response.status_code == 200 146 | 147 | @mark.query 148 | @patch("boagent.api.api.get_metrics") 149 | def test_read_query_with_measure_power_and_fetch_hardware(self, mocked_get_metrics): 150 | 151 | mocked_get_metrics.return_value = self.boaviztapi_response_not_verbose 152 | 153 | params = { 154 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 155 | "end_time": f"{NOW_ISO8601}", 156 | "verbose": "false", 157 | "location": "FRA", 158 | "measure_power": "true", 159 | "lifetime": 5, 160 | "fetch_hardware": "true", 161 | } 162 | 163 | response = client.get("/query", params=params) 164 | assert response.status_code == 200 165 | 166 | @mark.query 167 | @patch("boagent.api.api.get_metrics") 168 | def test_read_query_with_measure_power_and_fetch_hardware_verbose( 169 | self, mocked_get_metrics 170 | ): 171 | 172 | mocked_get_metrics.return_value = self.get_metrics_verbose 173 | 174 | params = { 175 | "start_time": f"{NOW_ISO8601_MINUS_ONE_MINUTE}", 176 | "end_time": f"{NOW_ISO8601}", 177 | "verbose": "true", 178 | "location": "FRA", 179 | "measure_power": "true", 180 | "lifetime": 5, 181 | "fetch_hardware": "true", 182 | } 183 | 184 | response = client.get("/query", params=params) 185 | assert response.status_code == 200 186 | 187 | @patch("boagent.api.api.get_metrics") 188 | def test_get_process_embedded_impacts_with_success(self, mocked_get_metrics): 189 | mocked_get_metrics.return_value = self.get_metrics_verbose 190 | params = { 191 | "process_id": 3099, 192 | "start_time": "1717500637.2979465", 193 | "end_time": "1717504237.2979465", 194 | "verbose": "true", 195 | "location": "FRA", 196 | "measure_power": "true", 197 | "lifetime": 5, 198 | "fetch_hardware": "true", 199 | } 200 | response = client.get("/process_embedded_impacts", params=params) 201 | assert response.status_code == 200 202 | self.assertIn("pid", response.json()) 203 | self.assertEqual(response.json()["pid"], 3099) 204 | self.assertIn("process_embedded_impacts", response.json()) 205 | self.assertIn( 206 | "process_cpu_embedded_impact_values", 207 | response.json()["process_embedded_impacts"], 208 | ) 209 | self.assertIn( 210 | "process_ram_embedded_impact_values", 211 | response.json()["process_embedded_impacts"], 212 | ) 213 | self.assertIn( 214 | "process_ssd_embedded_impact_values", 215 | response.json()["process_embedded_impacts"], 216 | ) 217 | self.assertIn( 218 | "process_hdd_embedded_impact_values", 219 | response.json()["process_embedded_impacts"], 220 | ) 221 | 222 | @patch("boagent.api.api.get_metrics") 223 | def test_get_process_embedded_impacts_with_error_if_pid_not_found_in_metrics_data( 224 | self, mocked_get_metrics 225 | ): 226 | 227 | mocked_get_metrics.return_value = self.get_metrics_verbose 228 | params = { 229 | "process_id": 1234, 230 | "start_time": "1717500637.2979465", 231 | "end_time": "1717504237.2979465", 232 | "verbose": "true", 233 | "location": "FRA", 234 | "measure_power": "true", 235 | "lifetime": 5, 236 | "fetch_hardware": "true", 237 | } 238 | 239 | response = client.get("/process_embedded_impacts", params=params) 240 | error_message = ( 241 | "Process_id 1234 has not been found in metrics data. Check the queried PID." 242 | ) 243 | self.assertEqual(response.status_code, 400) 244 | self.assertIs(error_message in response.text, True) 245 | -------------------------------------------------------------------------------- /tests/api/test_api_process.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest import TestCase, TestSuite, TestLoader 3 | from unittest.mock import patch 4 | from boagent.api.api import ( 5 | get_metrics, 6 | ) 7 | from boagent.api.process import Process, InvalidPIDException 8 | from tests.mocks.mocks import ( 9 | mock_hardware_data, 10 | mock_boaviztapi_response_not_verbose, 11 | mock_get_metrics_verbose, 12 | mock_get_metrics_verbose_no_hdd, 13 | ) 14 | 15 | 16 | @patch("boagent.api.api.HARDWARE_FILE_PATH", mock_hardware_data) 17 | class AllocateEmbeddedImpactForProcess(TestCase): 18 | def setUp(self): 19 | 20 | self.start_time = 1710837858 21 | self.end_time = 1710841458 22 | self.verbose = False 23 | self.location = "EEE" 24 | self.measure_power = False 25 | self.lifetime = 5.0 26 | self.fetch_hardware = False 27 | self.pid = 3099 28 | 29 | with open(mock_boaviztapi_response_not_verbose, "r") as boaviztapi_data: 30 | self.boaviztapi_data = json.load(boaviztapi_data) 31 | 32 | with open(mock_get_metrics_verbose, "r") as get_metrics_verbose: 33 | self.get_metrics_verbose = json.load(get_metrics_verbose) 34 | 35 | with open(mock_get_metrics_verbose_no_hdd, "r") as get_metrics_verbose_no_hdd: 36 | self.get_metrics_verbose_no_hdd = json.load(get_metrics_verbose_no_hdd) 37 | 38 | self.process = Process(self.get_metrics_verbose, self.pid) 39 | 40 | @patch("boagent.api.api.query_machine_impact_data") 41 | def test_get_total_embedded_impacts_for_host( 42 | self, mocked_query_machine_impact_data 43 | ): 44 | 45 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 46 | 47 | total_embedded_impacts_host = get_metrics( 48 | self.start_time, 49 | self.end_time, 50 | self.verbose, 51 | self.location, 52 | self.measure_power, 53 | self.lifetime, 54 | self.fetch_hardware, 55 | ) 56 | 57 | assert "embedded_emissions" in total_embedded_impacts_host 58 | assert "embedded_abiotic_resources_depletion" in total_embedded_impacts_host 59 | assert "embedded_primary_energy" in total_embedded_impacts_host 60 | 61 | def test_get_process_info(self): 62 | 63 | process_details = self.process.process_info 64 | for process in process_details: 65 | assert type(process) is dict 66 | self.assertEqual(process["pid"], 3099) 67 | self.assertEqual( 68 | process["exe"], "/snap/firefox/4336/usr/lib/firefox/firefox" 69 | ) 70 | assert type(process_details) is list 71 | 72 | def test_get_process_name(self): 73 | 74 | expected_process_name = "firefox" 75 | process_name = self.process.process_name 76 | 77 | self.assertEqual(expected_process_name, process_name) 78 | 79 | def test_get_process_exe(self): 80 | 81 | expected_process_exe = "/snap/firefox/4336/usr/lib/firefox/firefox" 82 | process_exe = self.process.process_exe 83 | 84 | self.assertEqual(expected_process_exe, process_exe) 85 | 86 | def test_validate_pid_with_error_if_process_id_not_in_metrics(self): 87 | 88 | expected_error_message = ( 89 | "Process_id 1234 has not been found in metrics data. Check the queried PID." 90 | ) 91 | 92 | with self.assertRaises(InvalidPIDException) as context_manager: 93 | self.process = Process(self.get_metrics_verbose, 1234) 94 | 95 | self.assertEqual(context_manager.exception.message, expected_error_message) 96 | 97 | with self.assertRaises(InvalidPIDException) as context_manager: 98 | self.process.pid = 1234 99 | 100 | self.assertEqual(context_manager.exception.message, expected_error_message) 101 | 102 | def test_get_total_ram_in_bytes(self): 103 | 104 | expected_ram_total = 8589934592 105 | total_ram_in_bytes = self.process.get_total_ram_in_bytes() 106 | assert type(total_ram_in_bytes) is int 107 | self.assertEqual(total_ram_in_bytes, expected_ram_total) 108 | 109 | def test_get_process_ram_share_by_timestamp(self): 110 | 111 | expected_ram_shares = [5.918979644775391, 0.0, 5.9177398681640625] 112 | process_ram_shares = self.process.ram_shares 113 | for index, ram_share in enumerate(process_ram_shares): 114 | assert type(ram_share) is float 115 | self.assertEqual(ram_share, expected_ram_shares[index]) 116 | assert type(process_ram_shares) is list 117 | 118 | def test_get_disk_usage_in_bytes(self): 119 | disk_total_bytes = int( 120 | self.get_metrics_verbose["raw_data"]["power_data"]["raw_data"][1]["host"][ 121 | "components" 122 | ]["disks"][0]["disk_total_bytes"] 123 | ) 124 | disk_available_bytes = int( 125 | self.get_metrics_verbose["raw_data"]["power_data"]["raw_data"][1]["host"][ 126 | "components" 127 | ]["disks"][0]["disk_available_bytes"] 128 | ) 129 | expected_disk_usage = disk_total_bytes - disk_available_bytes 130 | disk_usage = self.process.get_disk_usage_in_bytes() 131 | assert type(disk_usage) is int 132 | self.assertEqual(expected_disk_usage, disk_usage) 133 | 134 | def test_get_process_storage_share_by_timestamp(self): 135 | 136 | expected_storage_shares = [0.0, 0.0, 0.0] 137 | process_storage_shares = self.process.storage_shares 138 | for index, storage_share in enumerate(process_storage_shares): 139 | assert type(storage_share) is float 140 | self.assertEqual(storage_share, expected_storage_shares[index]) 141 | assert type(process_storage_shares) is list 142 | 143 | def test_get_embedded_impact_share_for_ssd_by_timestamp(self): 144 | 145 | storage_embedded_impact_shares = ( 146 | self.process.get_component_embedded_impact_shares( 147 | "SSD", self.process.storage_shares 148 | ) 149 | ) 150 | 151 | for storage_embedded_impact_share in storage_embedded_impact_shares: 152 | assert type(storage_embedded_impact_share) is tuple 153 | for value in storage_embedded_impact_share: 154 | assert type(storage_embedded_impact_share[1]) is float 155 | assert type(storage_embedded_impact_shares) 156 | 157 | def test_get_embedded_impact_share_for_hdd_by_timestamp(self): 158 | 159 | storage_embedded_impact_shares = ( 160 | self.process.get_component_embedded_impact_shares( 161 | "HDD", self.process.storage_shares 162 | ) 163 | ) 164 | 165 | for storage_embedded_impact_share in storage_embedded_impact_shares: 166 | assert type(storage_embedded_impact_share) is tuple 167 | for value in storage_embedded_impact_share: 168 | assert type(storage_embedded_impact_share[1]) is float 169 | assert type(storage_embedded_impact_shares) 170 | 171 | def test_get_embedded_impact_share_for_ram_by_timestamp(self): 172 | 173 | ram_embedded_impact_shares = self.process.get_component_embedded_impact_shares( 174 | "RAM", self.process.ram_shares 175 | ) 176 | 177 | for ram_embedded_impact_share in ram_embedded_impact_shares: 178 | assert type(ram_embedded_impact_share) is tuple 179 | for value in ram_embedded_impact_share: 180 | assert type(ram_embedded_impact_share[1]) is float 181 | assert type(ram_embedded_impact_shares) is list 182 | 183 | def test_get_process_cpu_load_shares_by_timestamp(self): 184 | 185 | expected_cpu_load_shares = [5.9772415, 5.2776732, 2.9987452] 186 | process_cpu_load_shares = self.process.cpu_load_shares 187 | 188 | for index, cpu_load_share in enumerate(process_cpu_load_shares): 189 | assert type(cpu_load_share) is float 190 | self.assertEqual(cpu_load_share, expected_cpu_load_shares[index]) 191 | assert type(process_cpu_load_shares) is list 192 | 193 | def test_get_embedded_impact_share_for_cpu_by_timestamp(self): 194 | 195 | cpu_embedded_impact_shares = self.process.get_component_embedded_impact_shares( 196 | "CPU", self.process.cpu_load_shares 197 | ) 198 | 199 | for cpu_embedded_impact_share in cpu_embedded_impact_shares: 200 | assert type(cpu_embedded_impact_share) is tuple 201 | assert type(cpu_embedded_impact_shares) is list 202 | 203 | def test_get_avg_min_max_embedded_impact_shares_for_cpu_and_ram(self): 204 | 205 | impact_criterias = ["gwp", "adp", "pe"] 206 | cpu_embedded_impact_values = self.process.get_component_embedded_impact_values( 207 | "cpu" 208 | ) 209 | ram_embedded_impact_values = self.process.get_component_embedded_impact_values( 210 | "ram" 211 | ) 212 | 213 | assert type(cpu_embedded_impact_values) is dict 214 | assert type(ram_embedded_impact_values) is dict 215 | for criteria in impact_criterias: 216 | assert f"{criteria}_cpu_average_impact" in cpu_embedded_impact_values 217 | assert f"{criteria}_cpu_max_impact" in cpu_embedded_impact_values 218 | assert f"{criteria}_cpu_min_impact" in cpu_embedded_impact_values 219 | assert f"{criteria}_ram_average_impact" in ram_embedded_impact_values 220 | assert f"{criteria}_ram_max_impact" in ram_embedded_impact_values 221 | assert f"{criteria}_ram_min_impact" in ram_embedded_impact_values 222 | 223 | def test_get_embedded_impact_values_with_error_if_invalid_component_queried(self): 224 | 225 | invalid_component_queried = self.process.get_component_embedded_impact_values( 226 | "invalid_component" 227 | ) 228 | assert ( 229 | invalid_component_queried 230 | == "Queried component is not available for evaluation." 231 | ) 232 | 233 | def test_get_embedded_impact_values_for_ssd(self): 234 | 235 | impact_criterias = ["gwp", "adp", "pe"] 236 | ssd_embedded_impact_values = self.process.get_component_embedded_impact_values( 237 | "ssd" 238 | ) 239 | 240 | assert type(ssd_embedded_impact_values) is dict 241 | for criteria in impact_criterias: 242 | assert f"{criteria}_ssd_average_impact" in ssd_embedded_impact_values 243 | 244 | def test_get_embedded_impact_values_for_hdd(self): 245 | 246 | impact_criterias = ["gwp", "adp", "pe"] 247 | hdd_embedded_impact_values = self.process.get_component_embedded_impact_values( 248 | "hdd" 249 | ) 250 | 251 | assert type(hdd_embedded_impact_values) is dict 252 | for criteria in impact_criterias: 253 | assert f"{criteria}_hdd_average_impact" in hdd_embedded_impact_values 254 | 255 | def test_get_all_components_embedded_impact_values(self): 256 | 257 | process_embedded_impacts = self.process.embedded_impact_values 258 | self.assertIn("process_embedded_impacts", process_embedded_impacts) 259 | self.assertIn("pid", process_embedded_impacts) 260 | self.assertIn( 261 | "process_cpu_embedded_impact_values", 262 | process_embedded_impacts["process_embedded_impacts"], 263 | ) 264 | self.assertIn( 265 | "process_ram_embedded_impact_values", 266 | process_embedded_impacts["process_embedded_impacts"], 267 | ) 268 | self.assertIn( 269 | "process_ssd_embedded_impact_values", 270 | process_embedded_impacts["process_embedded_impacts"], 271 | ) 272 | self.assertIn( 273 | "process_hdd_embedded_impact_values", 274 | process_embedded_impacts["process_embedded_impacts"], 275 | ) 276 | 277 | def test_get_components_embedded_impact_values_with_hdd_absent_from_get_metrics( 278 | self, 279 | ): 280 | self.process = Process(self.get_metrics_verbose_no_hdd, self.pid) 281 | process_embedded_impacts = self.process.embedded_impact_values 282 | self.assertIn("pid", process_embedded_impacts) 283 | self.assertIn("process_embedded_impacts", process_embedded_impacts) 284 | self.assertIn( 285 | "process_cpu_embedded_impact_values", 286 | process_embedded_impacts["process_embedded_impacts"], 287 | ) 288 | self.assertIn( 289 | "process_ram_embedded_impact_values", 290 | process_embedded_impacts["process_embedded_impacts"], 291 | ) 292 | self.assertIn( 293 | "process_ssd_embedded_impact_values", 294 | process_embedded_impacts["process_embedded_impacts"], 295 | ) 296 | self.assertNotIn( 297 | "process_hdd_embedded_impact_values", 298 | process_embedded_impacts["process_embedded_impacts"], 299 | ) 300 | 301 | 302 | loader = TestLoader() 303 | suite = TestSuite() 304 | 305 | suite.addTests(loader.loadTestsFromTestCase(AllocateEmbeddedImpactForProcess)) 306 | -------------------------------------------------------------------------------- /tests/api/test_api_unit.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from unittest import TestCase, TestSuite, TestLoader 5 | from unittest.mock import Mock, patch 6 | 7 | from boagent.api.api import ( 8 | build_hardware_data, 9 | read_hardware_data, 10 | get_hardware_data, 11 | # query_machine_impact_data, 12 | format_usage_request, 13 | compute_average_consumption, 14 | get_power_data, 15 | get_metrics, 16 | ) 17 | from boagent.api.utils import format_prometheus_output 18 | from tests.mocks.mocks import ( 19 | MockLshw, 20 | hardware_data, 21 | mock_power_data, 22 | mock_hardware_data, 23 | mock_boaviztapi_response_not_verbose, 24 | mock_boaviztapi_response_verbose, 25 | mock_formatted_scaphandre, 26 | mock_get_metrics_verbose, 27 | mock_get_metrics_not_verbose, 28 | ) 29 | 30 | mocked_lshw = Mock() 31 | mocked_lshw.return_value = MockLshw() 32 | 33 | 34 | @patch("boagent.api.api.HARDWARE_FILE_PATH", hardware_data) 35 | @patch("boagent.api.api.Lshw", mocked_lshw) 36 | class ReadHardwareDataTest(TestCase): 37 | def test_build_hardware_data(self): 38 | 39 | build_hardware_data() 40 | assert os.path.exists(hardware_data) is True 41 | 42 | def test_read_hardware_data(self): 43 | 44 | build_hardware_data() 45 | data = read_hardware_data() 46 | assert type(data["cpus"]) is dict 47 | assert type(data["rams"]) is dict 48 | assert type(data["disks"]) is dict 49 | 50 | @patch("boagent.api.api.build_hardware_data") 51 | def test_get_hardware_data_with_fetch_hardware_false(self, mocked_build_hardware): 52 | 53 | # Test case where hardware_data.json is already present on the 54 | # filesystem through previous call to build_hardware_data 55 | 56 | build_hardware_data() 57 | data = get_hardware_data(fetch_hardware=False) 58 | assert type(data) is dict 59 | mocked_build_hardware.assert_not_called() 60 | 61 | def test_get_hardware_data_with_fetch_hardware_true(self): 62 | 63 | data = get_hardware_data(fetch_hardware=True) 64 | assert type(data) is dict 65 | 66 | def tearDown(self) -> None: 67 | os.remove(hardware_data) 68 | 69 | 70 | class FormatUsageRequestTest(TestCase): 71 | def setUp(self) -> None: 72 | self.start_time = 1710837858 73 | self.end_time = 1710841458 74 | 75 | def test_format_usage_request_with_start_and_end_times(self): 76 | 77 | formatted_request = format_usage_request( 78 | start_time=self.start_time, 79 | end_time=self.end_time, 80 | ) 81 | 82 | assert type(formatted_request) is dict 83 | assert "hours_use_time" in formatted_request 84 | 85 | def test_format_usage_request_with_host_avg_consumption_and_location( 86 | self, 87 | ): 88 | 89 | location = "FRA" 90 | avg_power = 120 91 | 92 | formatted_request = format_usage_request( 93 | start_time=self.start_time, 94 | end_time=self.end_time, 95 | location=location, 96 | avg_power=avg_power, 97 | ) 98 | assert type(formatted_request) is dict 99 | assert "avg_power" in formatted_request 100 | assert "usage_location" in formatted_request 101 | 102 | def test_format_usage_request_with_time_workload_as_percentage(self): 103 | 104 | time_workload = {"time_workload": 50.0} 105 | 106 | formatted_request = format_usage_request( 107 | start_time=self.start_time, 108 | end_time=self.end_time, 109 | time_workload=time_workload, 110 | ) 111 | 112 | assert type(formatted_request) is dict 113 | assert "time_workload" in formatted_request 114 | 115 | 116 | class ComputeAvgConsumptionTest(TestCase): 117 | def test_compute_average_consumption(self): 118 | 119 | with open(mock_power_data, "r") as power_data_file: 120 | # power_data = f"[{power_data_file.read()}]" 121 | data = json.load(power_data_file) 122 | avg_host = compute_average_consumption(data) 123 | 124 | assert type(avg_host) is float 125 | 126 | 127 | class FormatPrometheusOutput(TestCase): 128 | def setUp(self): 129 | self.get_metrics_response_not_verbose_path = mock_get_metrics_not_verbose 130 | self.get_metrics_response_verbose_path = mock_get_metrics_verbose 131 | self.components = [ 132 | "assembly_1", 133 | "cpu_1", 134 | "ram_1", 135 | "ssd_1", 136 | "power_supply_1", 137 | "case_1", 138 | "motherboard_1", 139 | ] 140 | 141 | def test_format_prometheus_output_with_get_metrics_not_verbose(self): 142 | 143 | with open(mock_get_metrics_not_verbose, "r") as json_response: 144 | response_to_format = json.load(json_response) 145 | 146 | prometheus_output = format_prometheus_output(response_to_format, verbose=False) 147 | 148 | assert type(prometheus_output) is str 149 | assert len(prometheus_output) > 1 150 | assert "TYPE" in prometheus_output 151 | assert "HELP" in prometheus_output 152 | 153 | def test_format_prometheus_output_with_get_metrics_verbose(self): 154 | 155 | with open(mock_get_metrics_verbose, "r") as json_response: 156 | response_to_format = json.load(json_response) 157 | 158 | prometheus_output = format_prometheus_output(response_to_format, verbose=True) 159 | 160 | assert type(prometheus_output) is str 161 | assert len(prometheus_output) > 1 162 | assert "TYPE" in prometheus_output 163 | assert "HELP" in prometheus_output 164 | assert all(component in prometheus_output for component in self.components) 165 | 166 | 167 | class GetPowerDataTest(TestCase): 168 | def setUp(self) -> None: 169 | # One-hour interval 170 | self.start_time = 1713776733 171 | self.end_time = 1713780333 172 | # Ten minutes interval 173 | self.short_interval_start_time = 1713776733 174 | self.short_interval_end_time = 1713777333 175 | 176 | self.formatted_scaphandre = f"{mock_formatted_scaphandre}" 177 | 178 | @patch("boagent.api.api.POWER_DATA_FILE_PATH", mock_formatted_scaphandre) 179 | def test_get_power_data(self): 180 | 181 | power_data = get_power_data(self.start_time, self.end_time) 182 | 183 | assert type(power_data) is dict 184 | assert "raw_data" in power_data 185 | assert "avg_power" in power_data 186 | assert type(power_data["avg_power"]) is float 187 | assert power_data["avg_power"] > 0 188 | 189 | @patch("boagent.api.api.POWER_DATA_FILE_PATH", mock_formatted_scaphandre) 190 | def test_get_power_data_with_short_time_interval(self): 191 | 192 | power_data = get_power_data( 193 | self.short_interval_start_time, self.short_interval_end_time 194 | ) 195 | 196 | assert type(power_data) is dict 197 | assert "raw_data" in power_data 198 | assert "avg_power" in power_data 199 | assert "warning" in power_data 200 | 201 | 202 | @patch("boagent.api.api.read_hardware_data") 203 | @patch("boagent.api.api.query_machine_impact_data") 204 | class GetMetricsNotVerboseNoScaphandreTest(TestCase): 205 | def setUp(self) -> None: 206 | self.time_workload_as_percentage = {"time_workload": 70.0} 207 | self.time_workload_as_list_of_dicts = { 208 | "time_workload": [ 209 | {"time_percentage": 50, "load_percentage": 0}, 210 | {"time_percentage": 25, "load_percentage": 60}, 211 | {"time_percentage": 25, "load_percentage": 100}, 212 | ] 213 | } 214 | self.start_time = 1710837858 215 | self.end_time = 1710841458 216 | self.verbose = False 217 | self.location = "FRA" 218 | self.measure_power = False 219 | self.lifetime = 5.0 220 | self.fetch_hardware = False 221 | 222 | with open(mock_boaviztapi_response_not_verbose, "r") as file: 223 | self.boaviztapi_data = json.load(file) 224 | 225 | with open(mock_hardware_data, "r") as file: 226 | self.hardware_data = json.load(file) 227 | 228 | def test_get_metrics_with_time_workload_as_percentage( 229 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 230 | ): 231 | 232 | metrics = get_metrics( 233 | self.start_time, 234 | self.end_time, 235 | self.verbose, 236 | self.location, 237 | self.measure_power, 238 | self.lifetime, 239 | self.fetch_hardware, 240 | self.time_workload_as_percentage, 241 | ) 242 | 243 | mocked_read_hardware_data.return_value = self.hardware_data 244 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 245 | 246 | assert type(metrics) is dict 247 | assert "emissions_calculation_data" in metrics 248 | assert "embedded_emissions" in metrics 249 | assert "embedded_abiotic_resources_depletion" in metrics 250 | assert "embedded_primary_energy" in metrics 251 | 252 | def test_get_metrics_with_time_workload_as_list_of_dicts( 253 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 254 | ): 255 | 256 | metrics = get_metrics( 257 | self.start_time, 258 | self.end_time, 259 | self.verbose, 260 | self.location, 261 | self.measure_power, 262 | self.lifetime, 263 | self.fetch_hardware, 264 | self.time_workload_as_list_of_dicts, 265 | ) 266 | 267 | mocked_read_hardware_data.return_value = self.hardware_data 268 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 269 | 270 | assert type(metrics) is dict 271 | assert "emissions_calculation_data" in metrics 272 | assert "embedded_emissions" in metrics 273 | assert "embedded_abiotic_resources_depletion" in metrics 274 | assert "embedded_primary_energy" in metrics 275 | 276 | def test_get_metrics_with_default_location( 277 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 278 | ): 279 | 280 | metrics = get_metrics( 281 | self.start_time, 282 | self.end_time, 283 | self.verbose, 284 | "EEE", 285 | self.measure_power, 286 | self.lifetime, 287 | self.fetch_hardware, 288 | self.time_workload_as_list_of_dicts, 289 | ) 290 | 291 | mocked_read_hardware_data.return_value = self.hardware_data 292 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 293 | 294 | assert type(metrics) is dict 295 | assert "location_warning" in metrics 296 | 297 | def test_get_metrics_with_no_set_location( 298 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 299 | ): 300 | 301 | empty_location = "" 302 | 303 | metrics = get_metrics( 304 | self.start_time, 305 | self.end_time, 306 | self.verbose, 307 | empty_location, 308 | self.measure_power, 309 | self.lifetime, 310 | self.fetch_hardware, 311 | self.time_workload_as_list_of_dicts, 312 | ) 313 | 314 | mocked_read_hardware_data.return_value = self.hardware_data 315 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 316 | 317 | print(len(empty_location)) 318 | assert type(metrics) is dict 319 | assert "location_warning" in metrics 320 | 321 | 322 | @patch("boagent.api.api.read_hardware_data") 323 | @patch("boagent.api.api.query_machine_impact_data") 324 | class GetMetricsVerboseNoScaphandreTest(TestCase): 325 | def setUp(self) -> None: 326 | self.time_workload_as_percentage = {"time_workload": 70.0} 327 | self.time_workload_as_list_of_dicts = { 328 | "time_workload": [ 329 | {"time_percentage": 50, "load_percentage": 0}, 330 | {"time_percentage": 25, "load_percentage": 60}, 331 | {"time_percentage": 25, "load_percentage": 100}, 332 | ] 333 | } 334 | 335 | self.start_time = 1710837858 336 | self.end_time = 1710841458 337 | self.verbose = True 338 | self.location = "FRA" 339 | self.measure_power = False 340 | self.lifetime = 5.0 341 | self.fetch_hardware = False 342 | 343 | with open(mock_boaviztapi_response_verbose, "r") as file: 344 | self.boaviztapi_data = json.load(file) 345 | 346 | with open(mock_hardware_data, "r") as file: 347 | self.hardware_data = json.load(file) 348 | 349 | def test_get_metrics_verbose_with_time_workload_percentage( 350 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 351 | ): 352 | 353 | metrics = get_metrics( 354 | self.start_time, 355 | self.end_time, 356 | self.verbose, 357 | self.location, 358 | self.measure_power, 359 | self.lifetime, 360 | self.fetch_hardware, 361 | self.time_workload_as_percentage, 362 | ) 363 | 364 | mocked_read_hardware_data.return_value = self.hardware_data 365 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 366 | 367 | assert type(metrics) is dict 368 | assert "emissions_calculation_data" in metrics 369 | assert "embedded_emissions" in metrics 370 | assert "embedded_abiotic_resources_depletion" in metrics 371 | assert "embedded_primary_energy" in metrics 372 | assert "raw_data" in metrics 373 | assert "electricity_carbon_intensity" in metrics 374 | 375 | def test_get_metrics_verbose_with_time_workload_as_list_of_dicts( 376 | self, mocked_read_hardware_data, mocked_query_machine_impact_data 377 | ): 378 | 379 | metrics = get_metrics( 380 | self.start_time, 381 | self.end_time, 382 | self.verbose, 383 | self.location, 384 | self.measure_power, 385 | self.lifetime, 386 | self.fetch_hardware, 387 | self.time_workload_as_list_of_dicts, 388 | ) 389 | 390 | mocked_read_hardware_data.return_value = self.hardware_data 391 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 392 | 393 | assert type(metrics) is dict 394 | assert "emissions_calculation_data" in metrics 395 | assert "embedded_emissions" in metrics 396 | assert "embedded_abiotic_resources_depletion" in metrics 397 | assert "embedded_primary_energy" in metrics 398 | assert "raw_data" in metrics 399 | assert "electricity_carbon_intensity" in metrics 400 | 401 | 402 | class GetMetricsVerboseWithScaphandreTest(TestCase): 403 | def setUp(self) -> None: 404 | self.start_time = 1710837858 405 | self.end_time = 1710841458 406 | self.verbose = True 407 | self.location = "FRA" 408 | self.measure_power = True 409 | self.lifetime = 5.0 410 | self.fetch_hardware = False 411 | 412 | with open(mock_boaviztapi_response_verbose, "r") as file: 413 | self.boaviztapi_data = json.load(file) 414 | 415 | with open(mock_formatted_scaphandre, "r") as file: 416 | power_data = {} 417 | power_data["raw_data"] = file.read() 418 | power_data["avg_power"] = 11.86 419 | self.power_data = power_data 420 | 421 | with open(mock_hardware_data, "r") as file: 422 | self.hardware_data = json.load(file) 423 | 424 | @patch("boagent.api.api.query_machine_impact_data") 425 | @patch("boagent.api.api.get_power_data") 426 | @patch("boagent.api.api.read_hardware_data") 427 | def test_get_metrics_verbose_with_scaphandre( 428 | self, 429 | mocked_read_hardware_data, 430 | mocked_query_machine_impact_data, 431 | mocked_power_data, 432 | ): 433 | 434 | metrics = get_metrics( 435 | self.start_time, 436 | self.end_time, 437 | self.verbose, 438 | self.location, 439 | self.measure_power, 440 | self.lifetime, 441 | self.fetch_hardware, 442 | ) 443 | 444 | mocked_read_hardware_data.return_value = self.hardware_data 445 | mocked_query_machine_impact_data.return_value = self.boaviztapi_data 446 | mocked_power_data.return_value = self.power_data 447 | 448 | assert type(metrics) is dict 449 | assert "total_operational_emissions" in metrics 450 | assert "total_operational_abiotic_resources_depletion" in metrics 451 | assert "total_operational_primary_energy_consumed" in metrics 452 | assert "start_time" in metrics 453 | assert "end_time" in metrics 454 | assert "average_power_measured" in metrics 455 | assert "raw_data" in metrics 456 | assert "electricity_carbon_intensity" in metrics 457 | assert "power_data" in metrics["raw_data"] 458 | 459 | 460 | loader = TestLoader() 461 | suite = TestSuite() 462 | 463 | suite.addTests(loader.loadTestsFromTestCase(ReadHardwareDataTest)) 464 | suite.addTests(loader.loadTestsFromTestCase(FormatUsageRequestTest)) 465 | suite.addTests(loader.loadTestsFromTestCase(ComputeAvgConsumptionTest)) 466 | suite.addTests(loader.loadTestsFromTestCase(GetPowerDataTest)) 467 | suite.addTests(loader.loadTestsFromTestCase(GetMetricsNotVerboseNoScaphandreTest)) 468 | suite.addTests(loader.loadTestsFromTestCase(GetMetricsVerboseNoScaphandreTest)) 469 | suite.addTests(loader.loadTestsFromTestCase(GetMetricsVerboseWithScaphandreTest)) 470 | -------------------------------------------------------------------------------- /tests/hardware/test_hardwarecli.py: -------------------------------------------------------------------------------- 1 | from json import load 2 | from unittest import TestCase 3 | from os.path import exists 4 | from unittest.mock import Mock, patch 5 | from hardware_cli import main 6 | from click.testing import CliRunner 7 | from tests.mocks.mocks import MockLshw, mock_lshw_data 8 | 9 | 10 | # Need to use a mock of `lshw` run without `sudo` to reproduce the error case 11 | # where hardware_cli is run without `sudo`. 12 | with open(mock_lshw_data) as lshw_json: 13 | lshw_data = load(lshw_json) 14 | 15 | mocked_lshw = Mock() 16 | mocked_lshw.return_value = MockLshw() 17 | mocked_is_tool = Mock() 18 | mocked_is_tool.return_value = True 19 | mocked_serialized_lshw_output = Mock() 20 | mocked_serialized_lshw_output.return_value = lshw_data 21 | 22 | 23 | class HardwarecliTest(TestCase): 24 | @patch("hardware_cli.Lshw", mocked_lshw) 25 | def test_write_hardware_json_file_from_hardware_cli_with_output_file_flag_on(self): 26 | 27 | runner = CliRunner() 28 | with runner.isolated_filesystem(): 29 | result_file_path = "hardware_data.json" 30 | 31 | result = runner.invoke(main, ["--output-file", f"./{result_file_path}"]) 32 | assert exists(f"./{result_file_path}") is True 33 | 34 | assert result.exit_code == 0 35 | 36 | @patch("hardware_cli.Lshw", mocked_lshw) 37 | def test_read_stdout_from_hardware_cli(self): 38 | 39 | runner = CliRunner() 40 | 41 | result = runner.invoke(main) 42 | 43 | assert result.exit_code == 0 44 | assert result.output.count("disk") >= 1 45 | assert result.output.count("ram") >= 1 46 | assert result.output.count("cpu") >= 1 47 | 48 | @patch("boagent.hardware.lshw.is_tool", mocked_is_tool) 49 | @patch( 50 | "boagent.hardware.lshw.serialized_lshw_output", mocked_serialized_lshw_output 51 | ) 52 | def test_hardware_cli_returns_error_if_not_executed_with_sudo(self): 53 | runner = CliRunner() 54 | result = runner.invoke(main) 55 | assert ( 56 | result.output.__contains__( 57 | "Hardware_cli was not executed with privileges, try `sudo ./hardware_cli.py`" 58 | ) 59 | ) is True 60 | -------------------------------------------------------------------------------- /tests/hardware/test_lshw.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from boagent.hardware.lshw import Lshw 3 | from unittest.mock import Mock, patch 4 | from json import load 5 | 6 | from tests.mocks.mocks import mock_sudo_lshw_data, mock_lshw_data_disks, mock_nvme_data 7 | 8 | with open(mock_sudo_lshw_data) as lshw_json: 9 | lshw_data = load(lshw_json) 10 | with open(mock_nvme_data) as nvme_json: 11 | nvme_data = load(nvme_json) 12 | 13 | mocked_is_tool = Mock() 14 | mocked_is_tool.return_value = True 15 | mocked_serialized_lshw_output = Mock() 16 | mocked_serialized_lshw_output.return_value = lshw_data 17 | mocked_serialized_nvme_output = Mock() 18 | mocked_serialized_nvme_output.return_value = nvme_data 19 | 20 | 21 | class LshwTest(TestCase): 22 | @patch("boagent.hardware.lshw.is_tool", mocked_is_tool) 23 | @patch( 24 | "boagent.hardware.lshw.serialized_lshw_output", mocked_serialized_lshw_output 25 | ) 26 | @patch( 27 | "boagent.hardware.lshw.serialized_nvme_output", mocked_serialized_nvme_output 28 | ) 29 | def setUp(self): 30 | self.lshw = Lshw() 31 | self.cpu_data = self.lshw.cpus 32 | self.storage_data = self.lshw.disks 33 | self.ram_data = self.lshw.memories 34 | 35 | def test_read_get_hw_linux_cpu(self): 36 | cpu_data = self.lshw.get_hw_linux("cpu") 37 | 38 | assert type(cpu_data) is list 39 | 40 | def test_read_get_hw_linux_storage(self): 41 | storage_data = self.lshw.get_hw_linux("storage") 42 | 43 | assert type(storage_data) is list 44 | 45 | def test_read_get_hw_linux_memory(self): 46 | memory_data = self.lshw.get_hw_linux("memory") 47 | 48 | assert type(memory_data) is list 49 | 50 | def test_read_cpus_vendor(self): 51 | 52 | for cpu in self.cpu_data: 53 | assert "manufacturer" in cpu 54 | assert type(cpu["manufacturer"]) is str 55 | assert cpu["manufacturer"] == "Advanced Micro Devices [AMD]" 56 | 57 | def test_read_cpus_name(self): 58 | 59 | for cpu in self.cpu_data: 60 | assert "name" in cpu 61 | assert type(cpu["name"]) is str 62 | assert cpu["name"] == "AMD Ryzen 5 5600H with Radeon Graphics" 63 | 64 | def test_read_cpus_core_units(self): 65 | 66 | for cpu in self.cpu_data: 67 | assert "core_units" in cpu 68 | assert type(cpu["core_units"]) is int 69 | assert cpu["core_units"] == 6 70 | 71 | def test_read_cpus_units(self): 72 | 73 | for cpu in self.cpu_data: 74 | assert "units" in cpu 75 | assert type(cpu["units"]) is int 76 | assert cpu["units"] == 1 77 | 78 | def test_read_check_disk_vendor_with_correct_model(self): 79 | 80 | model = "LENOVO 123456154" 81 | result = self.lshw.check_disk_vendor(model) 82 | 83 | assert result == "LENOVO" 84 | 85 | def test_read_check_disk_vendor_with_incorrect_model(self): 86 | 87 | model = "12345121 LENOVO" 88 | result = self.lshw.check_disk_vendor(model) 89 | 90 | assert result == "LENOVO" 91 | 92 | def test_read_check_disk_vendor_with_one_correct_string_in_model(self): 93 | 94 | model = "LENOVO" 95 | result = self.lshw.check_disk_vendor(model) 96 | 97 | assert result == "LENOVO" 98 | 99 | def test_read_check_disk_vendor_with_one_incorrect_string_in_model(self): 100 | 101 | model = "12345211" 102 | with self.assertRaises(Exception): 103 | self.lshw.check_disk_vendor(model) 104 | 105 | def test_read_check_disk_vendor_with_multiple_strings_in_model(self): 106 | 107 | model = "LENOVO 123456 MODEL" 108 | result = self.lshw.check_disk_vendor(model) 109 | 110 | assert result == "LENOVO" 111 | 112 | def test_read_disks_type(self): 113 | 114 | for disk in self.storage_data: 115 | assert "type" in disk 116 | assert type(disk["type"]) is str 117 | assert disk["type"] == "ssd" 118 | 119 | def test_read_disk_dev_name(self): 120 | 121 | for disk in self.storage_data: 122 | assert "logicalname" in disk 123 | assert type(disk["logicalname"]) is str 124 | assert disk["logicalname"] == "/dev/nvme0n1" 125 | 126 | @patch("boagent.hardware.lshw.Lshw.get_rotational_int") 127 | def test_check_disk_type_is_ssd(self, mocked_get_rotational): 128 | 129 | dev_logicalname = "/dev/ssdonsata" 130 | mocked_get_rotational.return_value = 0 131 | 132 | disk_type = self.lshw.get_disk_type(dev_logicalname) 133 | assert disk_type == "ssd" 134 | 135 | @patch("boagent.hardware.lshw.Lshw.get_rotational_int") 136 | def test_check_disk_type_is_hdd(self, mocked_get_rotational): 137 | 138 | dev_logicalname = "/dev/sdaex" 139 | mocked_get_rotational.return_value = 1 140 | 141 | disk_type = self.lshw.get_disk_type(dev_logicalname) 142 | assert disk_type == "hdd" 143 | 144 | def test_int_for_get_rotational_int_when_file_not_found(self): 145 | 146 | dev_erroneous_name = "/dev/thisnameleadstonorotational" 147 | rotational_int = self.lshw.get_rotational_int(dev_erroneous_name) 148 | 149 | self.assertEqual(rotational_int, 2) 150 | 151 | def test_read_disk_type_when_dev_path_not_found(self): 152 | 153 | dev_erroneous_name = "/dev/thisnamedoesntexist" 154 | disk_type = self.lshw.get_disk_type(dev_erroneous_name) 155 | assert disk_type == "unknown" 156 | 157 | @patch("boagent.hardware.lshw.is_tool") 158 | def test_check_lshw_is_installed_to_parse_hardware_data_and_raises_error_if_not( 159 | self, mocked_is_tool 160 | ): 161 | mocked_is_tool.return_value = False 162 | with self.assertRaises(Exception) as context: 163 | self.lshw.__init__() 164 | self.assertTrue("lshw does not seem to be installed" in str(context.exception)) 165 | 166 | @patch("boagent.hardware.lshw.is_tool") 167 | def test_check_nvme_cli_is_installed_to_find_storage_and_raises_error_if_not( 168 | self, mocked_is_tool 169 | ): 170 | mocked_is_tool.return_value = False 171 | 172 | with open(mock_lshw_data_disks, "r") as file, self.assertRaises( 173 | Exception 174 | ) as nvme_cli_exception: 175 | data = load(file) 176 | self.lshw.find_storage(data) 177 | 178 | caught_exception = nvme_cli_exception.exception 179 | assert str(caught_exception) == "nvme-cli >= 1.0 does not seem to be installed" 180 | 181 | def test_read_disks_manufacturer(self): 182 | 183 | for disk in self.storage_data: 184 | assert "manufacturer" in disk 185 | assert type(disk["manufacturer"]) is str 186 | assert disk["manufacturer"] == "toshiba" 187 | 188 | def test_read_disks_capacity(self): 189 | 190 | for disk in self.storage_data: 191 | assert "capacity" in disk 192 | assert type(disk["capacity"]) is int 193 | assert disk["capacity"] == 238 194 | 195 | def test_read_disks_units(self): 196 | 197 | for disk in self.storage_data: 198 | assert "units" in disk 199 | assert type(disk["units"]) is int 200 | assert disk["units"] == 1 201 | 202 | def test_read_ram_manufacturer(self): 203 | 204 | for ram in self.ram_data: 205 | assert "manufacturer" in ram 206 | assert type(ram["manufacturer"]) is str 207 | assert ram["manufacturer"] == "Samsung" 208 | 209 | def test_read_ram_capacity(self): 210 | 211 | for ram in self.ram_data: 212 | assert "capacity" in ram 213 | assert type(ram["capacity"]) is int 214 | assert ram["capacity"] == 8 215 | 216 | def test_read_ram_units(self): 217 | 218 | for ram in self.ram_data: 219 | assert "units" in ram 220 | assert type(ram["units"]) is int 221 | assert ram["units"] == 1 222 | -------------------------------------------------------------------------------- /tests/mocks/boaviztapi_response_not_verbose.json: -------------------------------------------------------------------------------- 1 | { 2 | "impacts": { 3 | "gwp": { 4 | "unit": "kgCO2eq", 5 | "description": "Total climate change", 6 | "embedded": { 7 | "value": 1200, 8 | "min": 759.1, 9 | "max": 1949, 10 | "warnings": [ 11 | "End of life is not included in the calculation" 12 | ] 13 | }, 14 | "use": { 15 | "value": 1030, 16 | "min": 1030, 17 | "max": 1030 18 | } 19 | }, 20 | "adp": { 21 | "unit": "kgSbeq", 22 | "description": "Use of minerals and fossil ressources", 23 | "embedded": { 24 | "value": 0.155, 25 | "min": 0.1128, 26 | "max": 0.2077, 27 | "warnings": [ 28 | "End of life is not included in the calculation" 29 | ] 30 | }, 31 | "use": { 32 | "value": 0.0005107, 33 | "min": 0.0005107, 34 | "max": 0.0005107 35 | } 36 | }, 37 | "pe": { 38 | "unit": "MJ", 39 | "description": "Consumption of primary energy", 40 | "embedded": { 41 | "value": 15000, 42 | "min": 10080, 43 | "max": 25260, 44 | "warnings": [ 45 | "End of life is not included in the calculation" 46 | ] 47 | }, 48 | "use": { 49 | "value": 118700, 50 | "min": 118700, 51 | "max": 118700 52 | } 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /tests/mocks/boaviztapi_response_verbose.json: -------------------------------------------------------------------------------- 1 | { 2 | "impacts": { 3 | "gwp": { 4 | "unit": "kgCO2eq", 5 | "description": "Total climate change", 6 | "embedded": { 7 | "value": 1176, 8 | "min": 1112, 9 | "max": 1176, 10 | "warnings": [ 11 | "End of life is not included in the calculation" 12 | ] 13 | }, 14 | "use": { 15 | "value": 10000, 16 | "min": 526.5, 17 | "max": 34540 18 | } 19 | }, 20 | "adp": { 21 | "unit": "kgSbeq", 22 | "description": "Use of minerals and fossil ressources", 23 | "embedded": { 24 | "value": 0.1492, 25 | "min": 0.1492, 26 | "max": 0.1567, 27 | "warnings": [ 28 | "End of life is not included in the calculation" 29 | ] 30 | }, 31 | "use": { 32 | "value": 0.0016, 33 | "min": 0.0003031, 34 | "max": 0.008107 35 | } 36 | }, 37 | "pe": { 38 | "unit": "MJ", 39 | "description": "Consumption of primary energy", 40 | "embedded": { 41 | "value": 15680, 42 | "min": 14700, 43 | "max": 15680, 44 | "warnings": [ 45 | "End of life is not included in the calculation" 46 | ] 47 | }, 48 | "use": { 49 | "value": 300000, 50 | "min": 297.6, 51 | "max": 14290000, 52 | "warnings": [ 53 | "Uncertainty from technical characteristics is very important. Results should be interpreted with caution (see min and max values)" 54 | ] 55 | } 56 | } 57 | }, 58 | "verbose": { 59 | "duration": { 60 | "value": 35040, 61 | "unit": "hours" 62 | }, 63 | "ASSEMBLY-1": { 64 | "impacts": { 65 | "gwp": { 66 | "unit": "kgCO2eq", 67 | "description": "Total climate change", 68 | "embedded": { 69 | "value": 6.68, 70 | "min": 6.68, 71 | "max": 6.68, 72 | "warnings": [ 73 | "End of life is not included in the calculation" 74 | ] 75 | }, 76 | "use": "not implemented" 77 | }, 78 | "adp": { 79 | "unit": "kgSbeq", 80 | "description": "Use of minerals and fossil ressources", 81 | "embedded": { 82 | "value": 1.41e-06, 83 | "min": 1.41e-06, 84 | "max": 1.41e-06, 85 | "warnings": [ 86 | "End of life is not included in the calculation" 87 | ] 88 | }, 89 | "use": "not implemented" 90 | }, 91 | "pe": { 92 | "unit": "MJ", 93 | "description": "Consumption of primary energy", 94 | "embedded": { 95 | "value": 68.6, 96 | "min": 68.6, 97 | "max": 68.6, 98 | "warnings": [ 99 | "End of life is not included in the calculation" 100 | ] 101 | }, 102 | "use": "not implemented" 103 | } 104 | }, 105 | "units": { 106 | "value": 1, 107 | "status": "ARCHETYPE", 108 | "min": 1, 109 | "max": 1 110 | }, 111 | "duration": { 112 | "value": 35040, 113 | "unit": "hours" 114 | } 115 | }, 116 | "CPU-1": { 117 | "impacts": { 118 | "gwp": { 119 | "unit": "kgCO2eq", 120 | "description": "Total climate change", 121 | "embedded": { 122 | "value": 250, 123 | "min": 250, 124 | "max": 250, 125 | "warnings": [ 126 | "End of life is not included in the calculation" 127 | ] 128 | }, 129 | "use": { 130 | "value": 12000, 131 | "min": 701.8, 132 | "max": 34530 133 | } 134 | }, 135 | "adp": { 136 | "unit": "kgSbeq", 137 | "description": "Use of minerals and fossil ressources", 138 | "embedded": { 139 | "value": 0.04087, 140 | "min": 0.04087, 141 | "max": 0.04087, 142 | "warnings": [ 143 | "End of life is not included in the calculation" 144 | ] 145 | }, 146 | "use": { 147 | "value": 0.002, 148 | "min": 0.000404, 149 | "max": 0.008103 150 | } 151 | }, 152 | "pe": { 153 | "unit": "MJ", 154 | "description": "Consumption of primary energy", 155 | "embedded": { 156 | "value": 3428, 157 | "min": 3428, 158 | "max": 3428, 159 | "warnings": [ 160 | "End of life is not included in the calculation" 161 | ] 162 | }, 163 | "use": { 164 | "value": 400000, 165 | "min": 396.7, 166 | "max": 14280000, 167 | "warnings": [ 168 | "Uncertainty from technical characteristics is very important. Results should be interpreted with caution (see min and max values)" 169 | ] 170 | } 171 | } 172 | }, 173 | "units": { 174 | "value": 2, 175 | "status": "INPUT" 176 | }, 177 | "core_units": { 178 | "value": 24, 179 | "status": "INPUT" 180 | }, 181 | "die_size_per_core": { 182 | "value": 245, 183 | "status": "INPUT", 184 | "unit": "mm2" 185 | }, 186 | "die_size": { 187 | "value": 5880, 188 | "status": "COMPLETED", 189 | "unit": "mm2", 190 | "source": "die_size_per_core*core_units", 191 | "min": 5880, 192 | "max": 5880 193 | }, 194 | "duration": { 195 | "value": 35040, 196 | "unit": "hours" 197 | }, 198 | "avg_power": { 199 | "value": 435.4, 200 | "status": "COMPLETED", 201 | "unit": "W", 202 | "min": 435.4, 203 | "max": 435.4 204 | }, 205 | "time_workload": { 206 | "value": 70, 207 | "status": "INPUT", 208 | "unit": "%" 209 | }, 210 | "usage_location": { 211 | "value": "EEE", 212 | "status": "DEFAULT", 213 | "unit": "CodSP3 - NCS Country Codes - NATO" 214 | }, 215 | "use_time_ratio": { 216 | "value": 1, 217 | "status": "ARCHETYPE", 218 | "unit": "/1", 219 | "min": 1, 220 | "max": 1 221 | }, 222 | "hours_life_time": { 223 | "value": 35040, 224 | "status": "COMPLETED", 225 | "unit": "hours", 226 | "source": "from device", 227 | "min": 35040, 228 | "max": 35040 229 | }, 230 | "params": { 231 | "value": { 232 | "a": 171.2, 233 | "b": 0.0354, 234 | "c": 36.89, 235 | "d": -10.13 236 | }, 237 | "status": "ARCHETYPE" 238 | }, 239 | "gwp_factor": { 240 | "value": 0.38, 241 | "status": "DEFAULT", 242 | "unit": "kg CO2eq/kWh", 243 | "source": "https://www.sciencedirect.com/science/article/pii/S0306261921012149", 244 | "min": 0.023, 245 | "max": 1.13161 246 | }, 247 | "adp_factor": { 248 | "value": 6.42317e-08, 249 | "status": "DEFAULT", 250 | "unit": "kg Sbeq/kWh", 251 | "source": "ADEME Base IMPACTS ®", 252 | "min": 1.324e-08, 253 | "max": 2.65575e-07 254 | }, 255 | "pe_factor": { 256 | "value": 12.873, 257 | "status": "DEFAULT", 258 | "unit": "MJ/kWh", 259 | "source": "ADPf / (1-%renewable_energy)", 260 | "min": 0.013, 261 | "max": 468.15 262 | } 263 | }, 264 | "RAM-1": { 265 | "impacts": { 266 | "gwp": { 267 | "unit": "kgCO2eq", 268 | "description": "Total climate change", 269 | "embedded": { 270 | "value": 534.6, 271 | "min": 534.6, 272 | "max": 534.6, 273 | "warnings": [ 274 | "End of life is not included in the calculation" 275 | ] 276 | }, 277 | "use": { 278 | "value": 17000, 279 | "min": 1055, 280 | "max": 51890 281 | } 282 | }, 283 | "adp": { 284 | "unit": "kgSbeq", 285 | "description": "Use of minerals and fossil ressources", 286 | "embedded": { 287 | "value": 0.0338, 288 | "min": 0.0338, 289 | "max": 0.0338, 290 | "warnings": [ 291 | "End of life is not included in the calculation" 292 | ] 293 | }, 294 | "use": { 295 | "value": 0.003, 296 | "min": 0.0006071, 297 | "max": 0.01218 298 | } 299 | }, 300 | "pe": { 301 | "unit": "MJ", 302 | "description": "Consumption of primary energy", 303 | "embedded": { 304 | "value": 6745, 305 | "min": 6745, 306 | "max": 6745, 307 | "warnings": [ 308 | "End of life is not included in the calculation" 309 | ] 310 | }, 311 | "use": { 312 | "value": 1000000, 313 | "min": 596.1, 314 | "max": 21470000 315 | } 316 | } 317 | }, 318 | "units": { 319 | "value": 12, 320 | "status": "INPUT" 321 | }, 322 | "capacity": { 323 | "value": 32, 324 | "status": "INPUT", 325 | "unit": "GB" 326 | }, 327 | "density": { 328 | "value": 1.79, 329 | "status": "INPUT", 330 | "unit": "GB/cm2" 331 | }, 332 | "duration": { 333 | "value": 35040, 334 | "unit": "hours" 335 | }, 336 | "avg_power": { 337 | "value": 109.05599999999998, 338 | "status": "COMPLETED", 339 | "unit": "W", 340 | "min": 109.05599999999998, 341 | "max": 109.05599999999998 342 | }, 343 | "time_workload": { 344 | "value": 70, 345 | "status": "INPUT", 346 | "unit": "%" 347 | }, 348 | "usage_location": { 349 | "value": "EEE", 350 | "status": "DEFAULT", 351 | "unit": "CodSP3 - NCS Country Codes - NATO" 352 | }, 353 | "use_time_ratio": { 354 | "value": 1, 355 | "status": "ARCHETYPE", 356 | "unit": "/1", 357 | "min": 1, 358 | "max": 1 359 | }, 360 | "hours_life_time": { 361 | "value": 35040, 362 | "status": "COMPLETED", 363 | "unit": "hours", 364 | "source": "from device", 365 | "min": 35040, 366 | "max": 35040 367 | }, 368 | "params": { 369 | "value": { 370 | "a": 9.088 371 | }, 372 | "status": "COMPLETED", 373 | "source": "(ram_electrical_factor_per_go : 0.284) * (ram_capacity: 32) " 374 | }, 375 | "gwp_factor": { 376 | "value": 0.38, 377 | "status": "DEFAULT", 378 | "unit": "kg CO2eq/kWh", 379 | "source": "https://www.sciencedirect.com/science/article/pii/S0306261921012149", 380 | "min": 0.023, 381 | "max": 1.13161 382 | }, 383 | "adp_factor": { 384 | "value": 6.42317e-08, 385 | "status": "DEFAULT", 386 | "unit": "kg Sbeq/kWh", 387 | "source": "ADEME Base IMPACTS ®", 388 | "min": 1.324e-08, 389 | "max": 2.65575e-07 390 | }, 391 | "pe_factor": { 392 | "value": 12.873, 393 | "status": "DEFAULT", 394 | "unit": "MJ/kWh", 395 | "source": "ADPf / (1-%renewable_energy)", 396 | "min": 0.013, 397 | "max": 468.15 398 | } 399 | }, 400 | "SSD-1": { 401 | "impacts": { 402 | "gwp": { 403 | "unit": "kgCO2eq", 404 | "description": "Total climate change", 405 | "embedded": { 406 | "value": 23.73, 407 | "min": 23.73, 408 | "max": 23.73, 409 | "warnings": [ 410 | "End of life is not included in the calculation" 411 | ] 412 | }, 413 | "use": "not implemented" 414 | }, 415 | "adp": { 416 | "unit": "kgSbeq", 417 | "description": "Use of minerals and fossil ressources", 418 | "embedded": { 419 | "value": 0.001061, 420 | "min": 0.001061, 421 | "max": 0.001061, 422 | "warnings": [ 423 | "End of life is not included in the calculation" 424 | ] 425 | }, 426 | "use": "not implemented" 427 | }, 428 | "pe": { 429 | "unit": "MJ", 430 | "description": "Consumption of primary energy", 431 | "embedded": { 432 | "value": 292.7, 433 | "min": 292.7, 434 | "max": 292.7, 435 | "warnings": [ 436 | "End of life is not included in the calculation" 437 | ] 438 | }, 439 | "use": "not implemented" 440 | } 441 | }, 442 | "units": { 443 | "value": 1, 444 | "status": "INPUT" 445 | }, 446 | "capacity": { 447 | "value": 400, 448 | "status": "INPUT", 449 | "unit": "GB" 450 | }, 451 | "density": { 452 | "value": 50.6, 453 | "status": "INPUT", 454 | "unit": "GB/cm2" 455 | }, 456 | "duration": { 457 | "value": 35040, 458 | "unit": "hours" 459 | } 460 | }, 461 | "POWER_SUPPLY-1": { 462 | "impacts": { 463 | "gwp": { 464 | "unit": "kgCO2eq", 465 | "description": "Total climate change", 466 | "embedded": { 467 | "value": 145.3, 468 | "min": 145.3, 469 | "max": 145.3, 470 | "warnings": [ 471 | "End of life is not included in the calculation" 472 | ] 473 | }, 474 | "use": "not implemented" 475 | }, 476 | "adp": { 477 | "unit": "kgSbeq", 478 | "description": "Use of minerals and fossil ressources", 479 | "embedded": { 480 | "value": 0.04963, 481 | "min": 0.04963, 482 | "max": 0.04963, 483 | "warnings": [ 484 | "End of life is not included in the calculation" 485 | ] 486 | }, 487 | "use": "not implemented" 488 | }, 489 | "pe": { 490 | "unit": "MJ", 491 | "description": "Consumption of primary energy", 492 | "embedded": { 493 | "value": 2105, 494 | "min": 2105, 495 | "max": 2105, 496 | "warnings": [ 497 | "End of life is not included in the calculation" 498 | ] 499 | }, 500 | "use": "not implemented" 501 | } 502 | }, 503 | "units": { 504 | "value": 2, 505 | "status": "INPUT" 506 | }, 507 | "unit_weight": { 508 | "value": 2.99, 509 | "status": "INPUT", 510 | "unit": "kg" 511 | }, 512 | "duration": { 513 | "value": 35040, 514 | "unit": "hours" 515 | } 516 | }, 517 | "CASE-1": { 518 | "impacts": { 519 | "gwp": { 520 | "unit": "kgCO2eq", 521 | "description": "Total climate change", 522 | "embedded": { 523 | "value": 150, 524 | "min": 85.9, 525 | "max": 150, 526 | "warnings": [ 527 | "End of life is not included in the calculation" 528 | ] 529 | }, 530 | "use": "not implemented" 531 | }, 532 | "adp": { 533 | "unit": "kgSbeq", 534 | "description": "Use of minerals and fossil ressources", 535 | "embedded": { 536 | "value": 0.0202, 537 | "min": 0.0202, 538 | "max": 0.02767, 539 | "warnings": [ 540 | "End of life is not included in the calculation" 541 | ] 542 | }, 543 | "use": "not implemented" 544 | }, 545 | "pe": { 546 | "unit": "MJ", 547 | "description": "Consumption of primary energy", 548 | "embedded": { 549 | "value": 2200, 550 | "min": 1229, 551 | "max": 2200, 552 | "warnings": [ 553 | "End of life is not included in the calculation" 554 | ] 555 | }, 556 | "use": "not implemented" 557 | } 558 | }, 559 | "units": { 560 | "value": 1, 561 | "status": "ARCHETYPE", 562 | "min": 1, 563 | "max": 1 564 | }, 565 | "case_type": { 566 | "value": "rack", 567 | "status": "ARCHETYPE" 568 | }, 569 | "duration": { 570 | "value": 35040, 571 | "unit": "hours" 572 | } 573 | }, 574 | "MOTHERBOARD-1": { 575 | "impacts": { 576 | "gwp": { 577 | "unit": "kgCO2eq", 578 | "description": "Total climate change", 579 | "embedded": { 580 | "value": 66.1, 581 | "min": 66.1, 582 | "max": 66.1, 583 | "warnings": [ 584 | "End of life is not included in the calculation" 585 | ] 586 | }, 587 | "use": "not implemented" 588 | }, 589 | "adp": { 590 | "unit": "kgSbeq", 591 | "description": "Use of minerals and fossil ressources", 592 | "embedded": { 593 | "value": 0.00369, 594 | "min": 0.00369, 595 | "max": 0.00369, 596 | "warnings": [ 597 | "End of life is not included in the calculation" 598 | ] 599 | }, 600 | "use": "not implemented" 601 | }, 602 | "pe": { 603 | "unit": "MJ", 604 | "description": "Consumption of primary energy", 605 | "embedded": { 606 | "value": 836, 607 | "min": 836, 608 | "max": 836, 609 | "warnings": [ 610 | "End of life is not included in the calculation" 611 | ] 612 | }, 613 | "use": "not implemented" 614 | } 615 | }, 616 | "units": { 617 | "value": 1, 618 | "status": "ARCHETYPE", 619 | "min": 1, 620 | "max": 1 621 | }, 622 | "duration": { 623 | "value": 35040, 624 | "unit": "hours" 625 | } 626 | }, 627 | "avg_power": { 628 | "value": 724.1264799999999, 629 | "status": "COMPLETED", 630 | "unit": "W", 631 | "min": 653.3471999999998, 632 | "max": 871.1295999999999 633 | }, 634 | "time_workload": { 635 | "value": 70, 636 | "status": "INPUT", 637 | "unit": "%" 638 | }, 639 | "usage_location": { 640 | "value": "FRA", 641 | "status": "INPUT", 642 | "unit": "CodSP3 - NCS Country Codes - NATO" 643 | }, 644 | "use_time_ratio": { 645 | "value": 1, 646 | "status": "ARCHETYPE", 647 | "unit": "/1", 648 | "min": 1, 649 | "max": 1 650 | }, 651 | "hours_life_time": { 652 | "value": 35040, 653 | "status": "COMPLETED", 654 | "unit": "hours", 655 | "source": "from device", 656 | "min": 35040, 657 | "max": 35040 658 | }, 659 | "other_consumption_ratio": { 660 | "value": 0.33, 661 | "status": "ARCHETYPE", 662 | "unit": "ratio /1", 663 | "min": 0.2, 664 | "max": 0.6 665 | }, 666 | "gwp_factor": { 667 | "value": 0.38, 668 | "status": "DEFAULT", 669 | "unit": "kg CO2eq/kWh", 670 | "source": "https://www.sciencedirect.com/science/article/pii/S0306261921012149", 671 | "min": 0.023, 672 | "max": 1.13161 673 | }, 674 | "adp_factor": { 675 | "value": 6.42317e-08, 676 | "status": "DEFAULT", 677 | "unit": "kg Sbeq/kWh", 678 | "source": "ADEME Base IMPACTS ®", 679 | "min": 1.324e-08, 680 | "max": 2.65575e-07 681 | }, 682 | "pe_factor": { 683 | "value": 12.873, 684 | "status": "DEFAULT", 685 | "unit": "MJ/kWh", 686 | "source": "ADPf / (1-%renewable_energy)", 687 | "min": 0.013, 688 | "max": 468.15 689 | }, 690 | "units": { 691 | "value": 1, 692 | "status": "ARCHETYPE", 693 | "min": 1, 694 | "max": 1 695 | } 696 | } 697 | } 698 | -------------------------------------------------------------------------------- /tests/mocks/get_metrics_not_verbose.json: -------------------------------------------------------------------------------- 1 | {"emissions_calculation_data": {"energy_consumption_warning": "The time window is lower than one hour, but the energy consumption estimate is in Watt.Hour. So this is an extrapolation of the power usage profile on one hour. Be careful with this data. "}, "location_warning": {"warning_message": "Location is either set as default, or has not been set, and is therefore set to the default BoaviztAPI location. Be aware that the presented results can be drastically different due to location. It is recommended that you set the asset location with the corresponding country code, see: https://doc.api.boavizta.org/Explanations/usage/countries/"}, "total_operational_emissions": {"value": {"value": 40.72, "min": 40.72, "max": 40.72}, "description": "GHG emissions related to usage, from start_time to end_time.", "type": "gauge", "unit": "kg CO2eq", "long_unit": "kilograms CO2 equivalent"}, "total_operational_abiotic_resources_depletion": {"value": {"value": 6.883e-06, "min": 6.883e-06, "max": 6.883e-06}, "description": "Abiotic Resources Depletion (minerals & metals, ADPe) due to the usage phase.", "type": "gauge", "unit": "kgSbeq", "long_unit": "kilograms Antimony equivalent"}, "total_operational_primary_energy_consumed": {"value": {"value": 1379.0, "min": 1379.0, "max": 1379.0}, "description": "Primary Energy consumed due to the usage phase.", "type": "gauge", "unit": "MJ", "long_unit": "Mega Joules"}, "start_time": {"value": 1714047030.8907304, "description": "Start time for the evaluation, in timestamp format (seconds since 1970)", "type": "counter", "unit": "s", "long_unit": "seconds"}, "end_time": {"value": 1714050630.8907304, "description": "End time for the evaluation, in timestamp format (seconds since 1970)", "type": "counter", "unit": "s", "long_unit": "seconds"}, "average_power_measured": {"value": 3.0582353333333336, "description": "Average power measured from start_time to end_time", "type": "gauge", "unit": "W", "long_unit": "Watts"}, "embedded_emissions": {"value": 900.0, "description": "Embedded carbon emissions (manufacturing phase)", "type": "gauge", "unit": "kg CO2eq", "long_unit": "kilograms CO2 equivalent"}, "embedded_abiotic_resources_depletion": {"value": 0.14, "description": "Embedded abiotic ressources consumed (manufacturing phase)", "type": "gauge", "unit": "kg Sbeq", "long_unit": "kilograms ADP equivalent"}, "embedded_primary_energy": {"value": 13000.0, "description": "Embedded primary energy consumed (manufacturing phase)", "type": "gauge", "unit": "MJ", "long_unit": "Mega Joules"}} -------------------------------------------------------------------------------- /tests/mocks/hardware_data.json: -------------------------------------------------------------------------------- 1 | { 2 | "disks": [ 3 | { 4 | "capacity": 476, 5 | "manufacturer": "samsung", 6 | "type": "ssd" 7 | } 8 | ], 9 | "cpus": [ 10 | { 11 | "vendor": "AuthenticAMD", 12 | "name": "AMD Ryzen 5 5600H with Radeon Graphics \u0000", 13 | "microarch": [ 14 | "x86_64", 15 | "" 16 | ], 17 | "vector_instructions": { 18 | "sse": "Yes", 19 | "sse2": "Yes", 20 | "sse3": "Yes", 21 | "ssse3": "Yes", 22 | "sse4.1": "Yes", 23 | "sse4.2": "Yes", 24 | "sse4a": "Yes", 25 | "avx": "Yes", 26 | "avx2": "--", 27 | "bmi1": "--", 28 | "bmi2": "--" 29 | }, 30 | "cpu_info": { 31 | "python_version": "3.10.12.final.0 (64 bit)", 32 | "cpuinfo_version": [ 33 | 9, 34 | 0, 35 | 0 36 | ], 37 | "cpuinfo_version_string": "9.0.0", 38 | "arch": "X86_64", 39 | "bits": 64, 40 | "count": 12, 41 | "arch_string_raw": "x86_64", 42 | "vendor_id_raw": "AuthenticAMD", 43 | "brand_raw": "AMD Ryzen 5 5600H with Radeon Graphics", 44 | "hz_advertised_friendly": "4.2383 GHz", 45 | "hz_actual_friendly": "4.2383 GHz", 46 | "hz_advertised": [ 47 | 4238303000, 48 | 0 49 | ], 50 | "hz_actual": [ 51 | 4238303000, 52 | 0 53 | ], 54 | "model": 80, 55 | "family": 25, 56 | "flags": [ 57 | "3dnowprefetch", 58 | "abm", 59 | "adx", 60 | "aes", 61 | "aperfmperf", 62 | "apic", 63 | "arat", 64 | "avic", 65 | "avx", 66 | "avx2", 67 | "bmi1", 68 | "bmi2", 69 | "bpext", 70 | "cat_l3", 71 | "cdp_l3", 72 | "clflush", 73 | "clflushopt", 74 | "clwb", 75 | "clzero", 76 | "cmov", 77 | "cmp_legacy", 78 | "constant_tsc", 79 | "cpb", 80 | "cppc", 81 | "cpuid", 82 | "cqm", 83 | "cqm_llc", 84 | "cqm_mbm_local", 85 | "cqm_mbm_total", 86 | "cqm_occup_llc", 87 | "cr8_legacy", 88 | "cx16", 89 | "cx8", 90 | "dbx", 91 | "de", 92 | "decodeassists", 93 | "erms", 94 | "extapic", 95 | "extd_apicid", 96 | "f16c", 97 | "flushbyasid", 98 | "fma", 99 | "fpu", 100 | "fsgsbase", 101 | "fsrm", 102 | "fxsr", 103 | "fxsr_opt", 104 | "ht", 105 | "hw_pstate", 106 | "ibpb", 107 | "ibrs", 108 | "ibs", 109 | "invpcid", 110 | "irperf", 111 | "lahf_lm", 112 | "lbrv", 113 | "lm", 114 | "mba", 115 | "mca", 116 | "mce", 117 | "misalignsse", 118 | "mmx", 119 | "mmxext", 120 | "monitor", 121 | "movbe", 122 | "msr", 123 | "mtrr", 124 | "mwaitx", 125 | "nonstop_tsc", 126 | "nopl", 127 | "npt", 128 | "nrip_save", 129 | "nx", 130 | "ospke", 131 | "osvw", 132 | "osxsave", 133 | "overflow_recov", 134 | "pae", 135 | "pat", 136 | "pausefilter", 137 | "pci_l2i", 138 | "pclmulqdq", 139 | "pdpe1gb", 140 | "perfctr_core", 141 | "perfctr_llc", 142 | "perfctr_nb", 143 | "pfthreshold", 144 | "pge", 145 | "pku", 146 | "pni", 147 | "popcnt", 148 | "pqe", 149 | "pqm", 150 | "pse", 151 | "pse36", 152 | "rapl", 153 | "rdpid", 154 | "rdpru", 155 | "rdrand", 156 | "rdrnd", 157 | "rdseed", 158 | "rdt_a", 159 | "rdtscp", 160 | "rep_good", 161 | "sep", 162 | "sha", 163 | "sha_ni", 164 | "skinit", 165 | "smap", 166 | "smca", 167 | "smep", 168 | "ssbd", 169 | "sse", 170 | "sse2", 171 | "sse4_1", 172 | "sse4_2", 173 | "sse4a", 174 | "ssse3", 175 | "stibp", 176 | "succor", 177 | "svm", 178 | "svm_lock", 179 | "syscall", 180 | "tce", 181 | "topoext", 182 | "tsc", 183 | "tsc_scale", 184 | "umip", 185 | "v_spec_ctrl", 186 | "v_vmsave_vmload", 187 | "vaes", 188 | "vgif", 189 | "vmcb_clean", 190 | "vme", 191 | "vmmcall", 192 | "vpclmulqdq", 193 | "wbnoinvd", 194 | "wdt", 195 | "xgetbv1", 196 | "xsave", 197 | "xsavec", 198 | "xsaveerptr", 199 | "xsaveopt", 200 | "xsaves" 201 | ], 202 | "l3_cache_size": 524288, 203 | "l2_cache_size": 3145728, 204 | "l1_data_cache_size": 196608, 205 | "l1_instruction_cache_size": 196608, 206 | "l2_cache_line_size": 512, 207 | "l2_cache_associativity": 6 208 | }, 209 | "core_units": 12, 210 | "family": "X86_64" 211 | } 212 | ], 213 | "rams": [ 214 | { 215 | "capacity": 5 216 | } 217 | ], 218 | "mother_board": {} 219 | } 220 | -------------------------------------------------------------------------------- /tests/mocks/mocks.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | current_dir = os.path.dirname(__file__) 4 | mock_power_data = os.path.join(f"{current_dir}", "../mocks/power_data.json") 5 | mock_hardware_data = os.path.join(f"{current_dir}", "../mocks/hardware_data.json") 6 | mock_boaviztapi_response_not_verbose = os.path.join( 7 | f"{current_dir}", "../mocks/boaviztapi_response_not_verbose.json" 8 | ) 9 | mock_boaviztapi_response_verbose = os.path.join( 10 | f"{current_dir}", "../mocks/boaviztapi_response_verbose.json" 11 | ) 12 | mock_formatted_scaphandre = os.path.join( 13 | f"{current_dir}", "../mocks/formatted_power_data_one_hour.json" 14 | ) 15 | mock_formatted_scaphandre_with_processes = os.path.join( 16 | f"{current_dir}", "../mocks/formatted_scaphandre.json" 17 | ) 18 | mock_get_metrics_not_verbose = os.path.join( 19 | f"{current_dir}", "../mocks/get_metrics_not_verbose.json" 20 | ) 21 | mock_get_metrics_verbose = os.path.join( 22 | f"{current_dir}", "../mocks/get_metrics_verbose.json" 23 | ) 24 | mock_get_metrics_verbose_no_hdd = os.path.join( 25 | f"{current_dir}", "../mocks/get_metrics_verbose_no_hdd.json" 26 | ) 27 | mock_lshw_data = os.path.join(f"{current_dir}", "../mocks/lshw_data.json") 28 | mock_lshw_data_disks = os.path.join( 29 | f"{current_dir}", "../mocks/sudo_lshw_data_disks.json" 30 | ) 31 | mock_sudo_lshw_data = os.path.join(f"{current_dir}", "../mocks/sudo_lshw_data.json") 32 | mock_nvme_data = os.path.join(f"{current_dir}", "../mocks/nvme_data_sudo.json") 33 | hardware_cli = os.path.join(f"{current_dir}", "../../boagent/hardware/hardware_cli.py") 34 | hardware_data = os.path.join(f"{current_dir}", "../../boagent/api/hardware_data.json") 35 | 36 | 37 | class MockLshw: 38 | def __init__(self): 39 | self.cpus = { 40 | "cpus": [ 41 | { 42 | "units": 1, 43 | "name": "AMD Ryzen 5 5600H with Radeon Graphics", 44 | "manufacturer": "Advanced Micro Devices [AMD]", 45 | "core_units": 6, 46 | } 47 | ] 48 | } 49 | self.memories = { 50 | "rams": [ 51 | {"units": 1, "manufacturer": "Samsung", "capacity": 8}, 52 | {"units": 1, "manufacturer": "Kingston", "capacity": 16}, 53 | ] 54 | } 55 | self.disks = { 56 | "disks": [ 57 | { 58 | "units": 1, 59 | "logicalname": "/dev/nvme0n1", 60 | "manufacturer": "samsung", 61 | "type": "ssd", 62 | "capacity": 476, 63 | } 64 | ], 65 | } 66 | -------------------------------------------------------------------------------- /tests/mocks/nvme_data.json: -------------------------------------------------------------------------------- 1 | { 2 | "Devices" : [ 3 | { 4 | "NameSpace" : 1, 5 | "DevicePath" : "/dev/nvme0n1", 6 | "Firmware" : " ", 7 | "Index" : 0, 8 | "ModelNumber" : " ", 9 | "ProductName" : "Non-Volatile memory controller: Toshiba Corporation XG6 NVMe SSD Controller Satellite Pro", 10 | "SerialNumber" : " ", 11 | "UsedBytes" : 0, 12 | "MaximumLBA" : 0, 13 | "PhysicalSize" : 0, 14 | "SectorSize" : 1 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /tests/mocks/nvme_data_sudo.json: -------------------------------------------------------------------------------- 1 | { 2 | "Devices" : [ 3 | { 4 | "NameSpace" : 1, 5 | "DevicePath" : "/dev/nvme0n1", 6 | "Firmware" : "5108AGLA", 7 | "Index" : 0, 8 | "ModelNumber" : "KXG6AZNV256G TOSHIBA", 9 | "ProductName" : "Non-Volatile memory controller: Toshiba Corporation XG6 NVMe SSD Controller Satellite Pro", 10 | "SerialNumber" : "39NS11RYTMCQ", 11 | "UsedBytes" : 256060514304, 12 | "MaximumLBA" : 500118192, 13 | "PhysicalSize" : 256060514304, 14 | "SectorSize" : 512 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /tests/mocks/sudo_lshw_data_disks.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "pci:3", 3 | "class": "bridge", 4 | "claimed": true, 5 | "handle": "PCIBUS:0000:04", 6 | "description": "PCI bridge", 7 | "product": "Renoir/Cezanne PCIe GPP Bridge", 8 | "vendor": "Advanced Micro Devices, Inc. [AMD]", 9 | "physid": "2.2", 10 | "businfo": "pci@0000:00:02.2", 11 | "version": "00", 12 | "width": 32, 13 | "clock": 33000000, 14 | "configuration": { 15 | "driver": "nvme" 16 | }, 17 | "capabilities": { 18 | "pci": true, 19 | "pm": "Power Management", 20 | "pciexpress": "PCI Express", 21 | "msi": "Message Signalled Interrupts", 22 | "ht": "HyperTransport", 23 | "normal_decode": true, 24 | "bus_master": "bus mastering", 25 | "cap_list": "PCI capabilities listing" 26 | }, 27 | "children": [ 28 | { 29 | "id": "nvme", 30 | "class": "storage", 31 | "claimed": true, 32 | "handle": "PCI:0000:04:00.0", 33 | "description": "NVMe device", 34 | "product": "SAMSUNG MZALQ512HBLU-00BL2", 35 | "vendor": "Samsung Electronics Co Ltd", 36 | "physid": "0", 37 | "businfo": "pci@0000:04:00.0", 38 | "logicalname": "/dev/nvme0", 39 | "version": "7L2QFXM7", 40 | "serial": "S65DNE2R576016", 41 | "width": 64, 42 | "clock": 33000000, 43 | "configuration": { 44 | "driver": "nvme", 45 | "latency": "0", 46 | "nqn": "nqn.1994-11.com.samsung:nvme:PM991a:M.2:S65DNE2R576016", 47 | "state": "live" 48 | }, 49 | "capabilities": { 50 | "nvme": true, 51 | "pm": "Power Management", 52 | "msi": "Message Signalled Interrupts", 53 | "pciexpress": "PCI Express", 54 | "msix": "MSI-X", 55 | "nvm_express": true, 56 | "bus_master": "bus mastering", 57 | "cap_list": "PCI capabilities listing" 58 | }, 59 | "children": [ 60 | { 61 | "id": "namespace:0", 62 | "class": "disk", 63 | "claimed": true, 64 | "description": "NVMe disk", 65 | "physid": "0", 66 | "logicalname": "hwmon3" 67 | }, 68 | { 69 | "id": "namespace:1", 70 | "class": "disk", 71 | "claimed": true, 72 | "description": "NVMe disk", 73 | "physid": "2", 74 | "logicalname": "/dev/ng0n1" 75 | }, 76 | { 77 | "id": "namespace:2", 78 | "class": "disk", 79 | "claimed": true, 80 | "handle": "GUID:1fecaa97-f09a-488d-b516-3b68bbb28ae4", 81 | "description": "NVMe disk", 82 | "physid": "1", 83 | "businfo": "nvme@0:1", 84 | "logicalname": "/dev/nvme0n1", 85 | "units": "bytes", 86 | "size": 512110190592, 87 | "configuration": { 88 | "guid": "1fecaa97-f09a-488d-b516-3b68bbb28ae4", 89 | "logicalsectorsize": "512", 90 | "sectorsize": "512", 91 | "wwid": "eui.002538d511132cc1" 92 | }, 93 | "capabilities": { 94 | "gpt-1.00": "GUID Partition Table version 1.00", 95 | "partitioned": "Partitioned disk", 96 | "partitioned:gpt": "GUID partition table" 97 | }, 98 | "children": [ 99 | { 100 | "id": "volume:0", 101 | "class": "volume", 102 | "claimed": true, 103 | "handle": "GUID:917ce9be-0205-4dc9-8a19-b3c9bcbdc615", 104 | "description": "Windows FAT volume", 105 | "vendor": "MSDOS5.0", 106 | "physid": "1", 107 | "businfo": "nvme@0:1,1", 108 | "logicalname": [ 109 | "/dev/nvme0n1p1", 110 | "/boot/efi" 111 | ], 112 | "dev": "259:1", 113 | "version": "FAT32", 114 | "serial": "046d-f274", 115 | "size": 267912192, 116 | "capacity": 272629248, 117 | "configuration": { 118 | "FATs": "2", 119 | "filesystem": "fat", 120 | "label": "SYSTEM_DRV", 121 | "mount.fstype": "vfat", 122 | "mount.options": "rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=iso8859-1,shortname=mixed,errors=remount-ro", 123 | "name": "EFI system partition", 124 | "state": "mounted" 125 | }, 126 | "capabilities": { 127 | "boot": "Contains boot code", 128 | "fat": "Windows FAT", 129 | "initialized": "initialized volume" 130 | } 131 | }, 132 | { 133 | "id": "volume:1", 134 | "class": "volume", 135 | "claimed": true, 136 | "handle": "GUID:f2a882c8-e020-440e-9358-ace449ad1f40", 137 | "description": "EXT4 volume", 138 | "vendor": "Linux", 139 | "physid": "3", 140 | "businfo": "nvme@0:1,3", 141 | "logicalname": "/dev/nvme0n1p3", 142 | "dev": "259:2", 143 | "version": "1.0", 144 | "serial": "970ce5ee-c264-43f9-8f23-7fede276bab0", 145 | "size": 176741679104, 146 | "configuration": { 147 | "created": "2023-08-18 13:59:07", 148 | "filesystem": "ext4", 149 | "lastmountpoint": "/media/virgilisdead/970ce5ee-c264-43f9-8f23-7fede276bab0", 150 | "modified": "2024-02-25 22:06:18", 151 | "mounted": "2024-02-25 14:12:18", 152 | "state": "clean" 153 | }, 154 | "capabilities": { 155 | "journaled": true, 156 | "extended_attributes": "Extended Attributes", 157 | "large_files": "4GB+ files", 158 | "huge_files": "16TB+ files", 159 | "dir_nlink": "directories with 65000+ subdirs", 160 | "64bit": "64bit filesystem", 161 | "extents": "extent-based allocation", 162 | "ext4": true, 163 | "ext2": "EXT2/EXT3", 164 | "initialized": "initialized volume" 165 | } 166 | }, 167 | { 168 | "id": "volume:2", 169 | "class": "volume", 170 | "claimed": true, 171 | "handle": "GUID:ec13037f-c71f-459b-b81a-e569e040f3ce", 172 | "description": "Windows NTFS volume", 173 | "vendor": "Windows", 174 | "physid": "4", 175 | "businfo": "nvme@0:1,4", 176 | "logicalname": "/dev/nvme0n1p4", 177 | "dev": "259:3", 178 | "version": "3.1", 179 | "serial": "aa3d2e93-fda9-784b-8bcc-aee2e9a65a9f", 180 | "size": 1021312512, 181 | "capacity": 1048575488, 182 | "configuration": { 183 | "clustersize": "4096", 184 | "created": "2021-10-11 12:04:53", 185 | "filesystem": "ntfs", 186 | "label": "WINRE_DRV", 187 | "name": "Basic data partition", 188 | "state": "clean" 189 | }, 190 | "capabilities": { 191 | "boot": "Contains boot code", 192 | "precious": "This partition is required for the platform to function", 193 | "ntfs": "Windows NTFS", 194 | "initialized": "initialized volume" 195 | } 196 | }, 197 | { 198 | "id": "volume:3", 199 | "class": "volume", 200 | "claimed": true, 201 | "handle": "GUID:c969a0be-bf01-4ba4-8533-3b63829a7e57", 202 | "description": "EXT4 volume", 203 | "vendor": "Linux", 204 | "physid": "5", 205 | "businfo": "nvme@0:1,5", 206 | "logicalname": [ 207 | "/dev/nvme0n1p5", 208 | "/", 209 | "/var/snap/firefox/common/host-hunspell" 210 | ], 211 | "dev": "259:4", 212 | "version": "1.0", 213 | "serial": "55ecc6fa-6ec8-48e8-9987-392fe7e08612", 214 | "size": 157286400000, 215 | "configuration": { 216 | "created": "2023-02-25 18:27:04", 217 | "filesystem": "ext4", 218 | "lastmountpoint": "/", 219 | "modified": "2024-03-14 12:41:36", 220 | "mount.fstype": "ext4", 221 | "mount.options": "ro,noexec,noatime,errors=remount-ro,stripe=32", 222 | "mounted": "2024-03-14 12:41:36", 223 | "state": "mounted" 224 | }, 225 | "capabilities": { 226 | "journaled": true, 227 | "extended_attributes": "Extended Attributes", 228 | "large_files": "4GB+ files", 229 | "huge_files": "16TB+ files", 230 | "dir_nlink": "directories with 65000+ subdirs", 231 | "recover": "needs recovery", 232 | "64bit": "64bit filesystem", 233 | "extents": "extent-based allocation", 234 | "ext4": true, 235 | "ext2": "EXT2/EXT3", 236 | "initialized": "initialized volume" 237 | } 238 | } 239 | ] 240 | } 241 | ] 242 | } 243 | ] 244 | } 245 | --------------------------------------------------------------------------------