├── .dockerignore
├── .github
└── workflows
│ ├── greenhack22.yml
│ ├── release.yml
│ └── test.yml
├── .gitignore
├── .pre-commit-config.yaml
├── Dockerfile
├── LICENSE
├── Makefile
├── Pipfile
├── README.md
├── boagent
├── __init__.py
├── api
│ ├── __init__.py
│ ├── api.py
│ ├── config.py
│ ├── exceptions.py
│ ├── models.py
│ ├── process.py
│ └── utils.py
├── hardware
│ ├── __init__.py
│ └── lshw.py
└── public
│ ├── assets
│ ├── boavizta-logo-4.png
│ ├── data.csv
│ ├── dygraph.css
│ ├── dygraph.min.js
│ ├── favicon.ico
│ ├── favicon.png
│ ├── git-logo.png
│ ├── graph.ts
│ ├── jquery-3.6.1.min.js
│ ├── license.txt
│ ├── main.css
│ ├── platypus_logo.png
│ ├── pureknob.js
│ ├── synchronizer.js
│ └── table-style.css
│ └── index.html
├── boagent_color.svg
├── compose
└── development.yaml
├── docker-compose.yaml
├── hardware_cli.py
├── poetry.lock
├── pyproject.toml
├── pytest.ini
├── requirements.txt
├── setup.py
├── setup
└── docker-compose.yaml
└── tests
├── __init__.py
├── api
├── test_api_integration.py
├── test_api_process.py
└── test_api_unit.py
├── hardware
├── test_hardwarecli.py
└── test_lshw.py
└── mocks
├── boaviztapi_response_not_verbose.json
├── boaviztapi_response_verbose.json
├── formatted_power_data_one_hour.json
├── formatted_scaphandre.json
├── get_metrics_not_verbose.json
├── get_metrics_verbose.json
├── get_metrics_verbose_no_hdd.json
├── hardware_data.json
├── hubblo-ci-01_lshw.json
├── lshw_data.json
├── lshw_data_sudo.json
├── mocks.py
├── nvme_data.json
├── nvme_data_sudo.json
├── power_data.json
├── sudo_lshw_data.json
├── sudo_lshw_data_disks.json
└── sync-ce-re_lshw.json
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | venv
3 |
--------------------------------------------------------------------------------
/.github/workflows/greenhack22.yml:
--------------------------------------------------------------------------------
1 | name: Publish release
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'greenhack22'
7 |
8 | jobs:
9 |
10 | docker:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: Checkout
14 | uses: actions/checkout@v4
15 |
16 | - name: Login to GitHub Container Registry
17 | uses: docker/login-action@v3
18 | with:
19 | registry: ghcr.io
20 | username: ${{ github.actor }}
21 | password: ${{ secrets.GITHUB_TOKEN }}
22 |
23 | - name: Setup Python 3
24 | uses: actions/setup-python@v5
25 | with:
26 | python-version: '3.9'
27 |
28 | - name: Build docker image
29 | run: docker build . --tag ghcr.io/boavizta/boagent:greenhack22 --cache-from ghcr.io/boavizta/boagent:greenhack22
30 |
31 | - name: Push docker image
32 | run: docker push ghcr.io/boavizta/boagent:greenhack22
33 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Publish release
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 |
9 | docker:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout
13 | uses: actions/checkout@v4
14 |
15 | - name: Login to GitHub Container Registry
16 | uses: docker/login-action@v3
17 | with:
18 | registry: ghcr.io
19 | username: ${{ github.actor }}
20 | password: ${{ secrets.GITHUB_TOKEN }}
21 |
22 | - name: Setup Python 3
23 | uses: actions/setup-python@v5
24 | with:
25 | python-version: '3.9'
26 |
27 | - name: Install poetry
28 | run: python -m pip install --upgrade poetry wheel
29 |
30 | - name: Build docker image
31 | run: docker build . --tag ghcr.io/boavizta/boagent:$(poetry version -s)
32 |
33 | - name: Push docker image
34 | run: docker push ghcr.io/boavizta/boagent:$(poetry version -s)
35 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Execute tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - dev
8 | paths:
9 | - "boagent/**"
10 | - "tests/**"
11 | - "poetry.lock"
12 | - "pyproject.toml"
13 | pull_request:
14 | branches:
15 | - main
16 | - dev
17 | paths:
18 | - "boagent/**"
19 | - "tests/**"
20 | - "poetry.lock"
21 | - "pyproject.toml"
22 |
23 | jobs:
24 | test:
25 | strategy:
26 | matrix:
27 | version: ["3.10", "3.11"]
28 | runs-on: ubuntu-latest
29 | steps:
30 | - uses: actions/checkout@v4
31 | - name: Python setup
32 | uses: actions/setup-python@v5
33 | with:
34 | python-version: ${{ matrix.version }}
35 | - name: Poetry setup
36 | run : python3 -m pip install --upgrade poetry wheel
37 | - name: Install dependencies
38 | run: poetry install
39 | - name: Execute tests
40 | run: poetry run python3 -m pytest
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | _pycache_/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 | api/service/server_impact/ref/jupyter_data.ipynb
80 | api/service/server_impact/ref/__pycache__
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | .python-version
88 |
89 | # pipenv
90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
93 | # install all needed dependencies.
94 | #Pipfile.lock
95 |
96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
97 | _pypackages_/
98 |
99 | # Celery stuff
100 | celerybeat-schedule
101 | celerybeat.pid
102 |
103 | # SageMath parsed files
104 | *.sage.py
105 |
106 | # Environments
107 | .env
108 | .venv
109 | env/
110 | venv/
111 | ENV/
112 | env.bak/
113 | venv.bak/
114 |
115 | # Spyder project settings
116 | .spyderproject
117 | .spyproject
118 |
119 | # Rope project settings
120 | .ropeproject
121 |
122 | # mkdocs documentation
123 | /site
124 |
125 | # mypy
126 | .mypy_cache/
127 | .dmypy.json
128 | dmypy.json
129 |
130 | # Pyre type checker
131 | .pyre/
132 |
133 | # MacOS
134 | .DS_Store
135 |
136 | # IDE
137 | .idea/
138 | .vscode/
139 |
140 | *.db
141 | *.svg
142 | *.csv
143 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v2.3.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/psf/black
9 | rev: 22.10.0
10 | hooks:
11 | - id: black
12 | - repo: https://github.com/PyCQA/flake8
13 | rev: 7.0.0
14 | hooks:
15 | - id: flake8
16 | entry: flake8 --ignore=E501,W503 --per-file-ignores='__init__.py:F401'
17 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10-slim
2 |
3 | LABEL org.opencontainers.image.authors="open-source@boavizta.org"
4 | LABEL org.opencontainers.image.description="Docker image for Boagent, a local API & environmental impact monitoring tool."
5 | LABEL org.opencontainers.image.licenses=Apache-2.0
6 |
7 | WORKDIR /home/boagent
8 |
9 | RUN python3 -m pip install --upgrade poetry
10 |
11 | RUN apt update && apt install lshw nvme-cli -y
12 |
13 | COPY pyproject.toml .
14 |
15 | RUN poetry install
16 |
17 | COPY . .
18 |
19 | EXPOSE 8000
20 |
21 | ENTRYPOINT ["poetry", "run", "uvicorn", "--reload", "boagent.api.api:app", "--host", "0.0.0.0"]
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2022 Boavizta
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | CURRENT_VERSION := $(shell poetry version -s)
2 | SEMVERS := major minor patch
3 | LAST_TAG := $(shell git describe --tags --abbrev=0)
4 |
5 | tag_version:
6 | git commit -m "release: bump to ${CURRENT_VERSION}" pyproject.toml
7 | git tag ${CURRENT_VERSION}
8 |
9 | $(SEMVERS):
10 | poetry version $@
11 | $(MAKE) tag_version
12 |
13 | release:
14 | git push origin tag ${LAST_TAG}
15 | gh release create --verify-tag ${LAST_TAG} --notes-from-tag
16 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | fastapi = '0.75.2'
8 | uvicorn = '*'
9 | pandas = '*'
10 | aiofile = '*'
11 | mangum = "*"
12 | boaviztapi-sdk='0.1.2'
13 | cpuid='0.0.10'
14 | py-cpuinfo='8.0.0'
15 | dataclasses='0.8'
16 | requests = "*"
17 | sqlalchemy = "*"
18 | pydantic = {extras = ["dotenv"], version = "*"}
19 | croniter = "*"
20 |
21 | [dev-packages]
22 | mkdocs = '*'
23 | pytest = '*'
24 | atomicwrites = "*"
25 | mkdocs-material = "*"
26 | httpx = '*'
27 | pytest-asyncio = '*'
28 | requests = '*'
29 | setuptools-pipfile = "==0.4.1"
30 | twine = "==3.2.0"
31 | bump2version = "==1.0.1"
32 |
33 | [requires]
34 | python_version = "3.8"
35 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Local API / sidecar / companion of a running application that computes and gives insights to the application regarding its environmental impacts.
6 |
7 |
8 | ---
9 |
10 | _If no parameters are passed to the API to isolate the application, then the impact of the whole machine is calculated._
11 |
12 | ## How to use
13 |
14 | This is an API, you could use either your browser, cURL, or call it directly from an application (which is the main usecase).
15 |
16 | Once the API is running, a Swagger interface is available on [localhost:8000/docs](http://localhost:8000/docs).
17 |
18 |
19 | ### Run natively
20 |
21 | Boagent will not be able to return proper responses from its endpoints without root privileges in order to fetch hardware data.
22 | It also needs information from BoaviztAPI and Scaphandre, see the [setup informations](#Setup).
23 |
24 | To run it :
25 |
26 | Without `poetry`
27 |
28 | ```
29 | apt update && apt install lshw nvme-cli -y
30 | pip3 install -r requirements.txt
31 | cd boagent/api/
32 | uvicorn api:app --reload
33 | ```
34 |
35 | With `poetry`
36 |
37 | ```
38 | apt update && apt install lshw nvme-cli -y
39 | poetry install --only main
40 | poetry run uvicorn --reload boagent.api.api:app
41 | ```
42 |
43 | ### Run in a docker container
44 |
45 | You could pull the [image](https://github.com/Boavizta/boagent/pkgs/container/boagent) with `docker pull ghcr.io/boavizta/boagent:0.1.0`.
46 |
47 | ### Run in docker-compose (with all the requirements)
48 |
49 | To get started you need docker and docker-compose installed on your machine. On a Debian or Ubuntu environment, run :
50 |
51 | # apt update && apt install -y docker.io docker-compose
52 |
53 | To get the full setup easily, you could run the stack in docker-compose with `docker-compose up -d`. `docker-compose.yml`, at the root of the project will build a Docker image from the source for Boagent, and setup a container for [Scaphandre](#Scaphandre) and another for the [BoaviztAPI](#BoaviztAPI), allowing you to get the full evaluation easily on a physical machine.
54 |
55 | Please see [Configuration](#Configuration) for the environment variables you can tweak in the Boagent container.
56 |
57 | ### Use `hardware_cli`
58 |
59 | To have an example of the retrieved hardware information by Boagent, you can run `sudo ./hardware_cli.py`.
60 | At the moment, it will output the formatted data for CPU, RAM and storage devices used by Boagent when sending a request to BoaviztAPI.
61 | `sudo ./hardware_cli.py --output-file ` can send the formatted output to a file.
62 |
63 | ## Setup
64 |
65 | ## Linux
66 |
67 | Boagent parses output from `lshw` (a tool listing hardware components and characteristics) and `nvme-cli` (a tool listing information on SSD storage
68 | devices available through NVME interfaces). To get all actually parsed information (and for future developments), Boagent needs those two programs and to execute them with root privileges.
69 |
70 | ### BoaviztAPI
71 |
72 | You need either to use an existing BoaviztAPI endpoint, or to build the BoaviztAPI container image, then run the container locally on port 5000.
73 |
74 | Depending or your setup, specify the endpoint to be used with the environment variable `BOAVIZTAPI_ENDPOINT`, see [Configuration](#Configuration).
75 |
76 | Ensure that the version of BoaviztAPI SDK installed (see `requirements.txt` or `pyproject.toml`) is the same as the version of the API running the endpoint you use.
77 |
78 | ### Scaphandre
79 |
80 | To get power consumption metrics, you need [Scaphandre](https://github.com/hubblo-org/scaphandre) running in the background, with the JSON exporter. This will write power metrics to a file, that Boagent will read :
81 |
82 | ```
83 | scaphandre json -s 5 -f power_data.json
84 | ```
85 |
86 | ## Configuration
87 |
88 | Boagent can be configured with the following variables :
89 |
90 | - `DEFAULT_LIFETIME`: machines lifetime used to compute the scope 3 / manufacturing, transport, end-of-life impacts
91 | - `HARDWARE_FILE_PATH`: path to the file containing the hardware list (output from `lshw.py`)
92 | - `POWER_FILE_PATH`: path to the file containing power measurements (output from [Scaphandre](https://github.com/hubblo-org/scaphandre) with JSON exporter)
93 | - `HARDWARE_CLI`: path to the executable file to collect hardware information (`lshw.py` from this project)
94 | - `BOAVIZTAPI_ENDPOINT`: HTTP endpoint to the BoaviztAPI, in the form `http://myendpoint.com:PORTNUMBER`
95 |
96 | You can set those variables in the following order (as interpreted by the tool):
97 |
98 | 1. export the variable in the environment
99 | 2. write it in the .env file in the same folder as `api.py`
100 | 3. rely on default values from `config.py`
101 |
102 | You can check the configuration applied by querying the `/info` route.
103 |
104 | ## How it works
105 |
106 | Currently, Boagent only works for Linux systems.
107 |
108 | Boagent exposes multiple API endpoints, most notably `/query` and `/metrics`. Both will query an instance of [BoaviztAPI](https://doc.api.boavizta.org/) in order to give the environmental impacts
109 | of the received hardware data. `/query` will return a response in JSON format, and `/metrics` will return a response parsable by a Prometheus instance. If needed, both those
110 | endpoints can return data from [Scaphandre](https://github.com/hubblo-org/scaphandre/) and give the energy consumption of components from the queried hardware.
111 |
112 | Presently, Boagent gets hardware data through a parsing of the output of `lshw`, a common utility available for Linux distributions that lists a lot of information of all
113 | hardware components on a running computer. The code for this `Lshw` class is an adaptation of [netbox-agent](https://github.com/Solvik/netbox-agent)'s implementation.
114 | `lshw`, to get all proper data needed by BoaviztAPI, needs to be executed as a privileged user with `sudo`. Boagent, executed with the available `docker-compose` file,
115 | will run as privileged and will be able to receive the needed hardware data. At the moment, only data for the CPU, RAM and storage (either HDD or SSD) are parsed and sent to BoaviztAPI
116 | in order to calculate impacts.
117 |
118 | Another endpoint, `process_embedded_impacts`, allows to calculate the embedded impacts of a process running on the host, in relation to the host components (CPU, RAM and storage). It will give, for all the components, the average, maximum and minimum values between two timestamps for three environmental impact factors : Global Warming Potential (in KgCO2e), Abiotic Depletion Potential (in KgSbeq) and Primary Energy (in microjoules). To get these informations, a Linux Process ID has to be provided.
119 |
120 | ## Deeper explanations
121 |
122 | ### Environmental metrics
123 |
124 | This project uses the Life Cycle Assessment (ISO 14040 / 14044) methodology as a reference.
125 |
126 | This way, it is intended to evaluate the impacts on all life cycle phases (extraction, manufacturing, shipping, use, end of life). **Today we only evaluate manufacturing and use phases.**
127 |
128 | Here are the impacts considered so far :
129 |
130 | - Green House Gas emissions / Global Warming Potential (see GHG protocol as a reference)
131 | - resources extraction (LCA) / scope 3 (GHG protocol) ✔️
132 | - use (LCA) / scope 2 (GHG protocol) ✔️
133 | - manufacturing (LCA) / scope 3 (GHG protocol) ✔️
134 | - shipping (LCA) / scope 3 (GHG protocol) ❌
135 | - end of life (LCA) / scope 3 (GHG protocol) ❌
136 | - Abiotic ressources depletion (minerals), criteria called ADP or Abiotic Depletion Potential
137 | - resources extraction (LCA) ✔️
138 | - use (LCA) ✔️
139 | - manufacturing (LCA) ✔️
140 | - shipping (LCA) ❌
141 | - end of life (LCA) ❌
142 | - Primary energy usage : PE
143 | - resources extraction (LCA) ✔️
144 | - use (LCA) ✔️
145 | - manufacturing (LCA) ✔️
146 | - shipping (LCA) ❌
147 | - end of life (LCA) ❌
148 |
--------------------------------------------------------------------------------
/boagent/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Boagent
3 |
4 | Monitoring agent/framework for evaluating the environmental impacts of a machine and its applications, including several to all steps of the life cycle of the machine and service, plus multiple criterias of impacts (not just CO2eq metrics / Global Warming Potential). Part of the efforts of https://boavizta.org/en and https://sdialliance.org/.
5 | """
6 |
7 | __version__ = "0.1.0"
8 | __author__ = "Boavizta "
9 | __credits__ = "Boavizta contributors"
10 |
--------------------------------------------------------------------------------
/boagent/api/__init__.py:
--------------------------------------------------------------------------------
1 | from .api import (
2 | build_hardware_data,
3 | format_usage_request,
4 | read_hardware_data,
5 | get_hardware_data,
6 | query_machine_impact_data,
7 | compute_average_consumption,
8 | get_power_data,
9 | get_metrics,
10 | )
11 |
--------------------------------------------------------------------------------
/boagent/api/api.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | from typing import Dict, Any, List, Union
4 | from fastapi import FastAPI, Response, Body, HTTPException
5 | from fastapi.staticfiles import StaticFiles
6 | from fastapi.responses import HTMLResponse
7 | from boaviztapi_sdk.api.server_api import ServerApi
8 | from boaviztapi_sdk.models.server import Server
9 | from boagent.api.exceptions import InvalidPIDException
10 | from boagent.hardware.lshw import Lshw
11 | from .utils import (
12 | iso8601_or_timestamp_as_timestamp,
13 | format_prometheus_output,
14 | get_boavizta_api_client,
15 | sort_ram,
16 | sort_disks,
17 | )
18 |
19 | from .config import Settings
20 | from .process import Process
21 | from .models import WorkloadTime, time_workload_example
22 |
23 | settings = Settings()
24 |
25 | HARDWARE_FILE_PATH = settings.hardware_file_path
26 | POWER_DATA_FILE_PATH = settings.power_file_path
27 | PUBLIC_PATH = settings.public_path
28 | ASSETS_PATH = settings.assets_path
29 | DB_PATH = settings.db_path
30 | DEFAULT_LIFETIME = settings.default_lifetime
31 | SECONDS_IN_ONE_YEAR = settings.seconds_in_one_year
32 | HARDWARE_CLI = settings.hardware_cli
33 | AZURE_LOCATION = settings.azure_location
34 | BOAVIZTAPI_ENDPOINT = settings.boaviztapi_endpoint
35 | CARBON_AWARE_API_ENDPOINT = settings.carbon_aware_api_endpoint
36 | CARBON_AWARE_API_TOKEN = settings.carbon_aware_api_token
37 | PROJECT_NAME = settings.project_name
38 | PROJECT_VERSION = settings.project_version
39 | PROJECT_DESCRIPTION = settings.project_description
40 | TAGS_METADATA = settings.tags_metadata
41 |
42 |
43 | def configure_static(app):
44 | app.mount("/assets", StaticFiles(directory=ASSETS_PATH), name="assets")
45 |
46 |
47 | def configure_app():
48 | app = FastAPI(
49 | title=PROJECT_NAME,
50 | version=PROJECT_VERSION,
51 | description=PROJECT_DESCRIPTION,
52 | contact={"name": "Boavizta Members", "url": "https://boavizta.org/en"},
53 | license_info={"name": "Apache-2.0"},
54 | openapi_tags=TAGS_METADATA,
55 | )
56 | configure_static(app)
57 | return app
58 |
59 |
60 | app = configure_app()
61 |
62 |
63 | @app.get("/info", tags=["info"])
64 | async def info():
65 | return {
66 | "seconds_in_one_year": SECONDS_IN_ONE_YEAR,
67 | "default_lifetime": DEFAULT_LIFETIME,
68 | "hardware_file_path": HARDWARE_FILE_PATH,
69 | "power_file_path": POWER_DATA_FILE_PATH,
70 | "hardware_cli": HARDWARE_CLI,
71 | "boaviztapi_endpoint": BOAVIZTAPI_ENDPOINT,
72 | }
73 |
74 |
75 | @app.get("/web", tags=["web"], response_class=HTMLResponse)
76 | async def web():
77 | res = ""
78 | with open("{}/index.html".format(PUBLIC_PATH), "r") as fd:
79 | res = fd.read()
80 | fd.close()
81 | return res
82 |
83 |
84 | @app.get("/metrics", tags=["metrics"])
85 | async def metrics(
86 | start_time: str = "0.0",
87 | end_time: str = "0.0",
88 | verbose: bool = False,
89 | location: str = "",
90 | measure_power: bool = True,
91 | lifetime: float = DEFAULT_LIFETIME,
92 | fetch_hardware: bool = False,
93 | ):
94 | return Response(
95 | content=format_prometheus_output(
96 | get_metrics(
97 | iso8601_or_timestamp_as_timestamp(start_time),
98 | iso8601_or_timestamp_as_timestamp(end_time),
99 | verbose,
100 | location,
101 | measure_power,
102 | lifetime,
103 | fetch_hardware,
104 | ),
105 | verbose,
106 | ),
107 | media_type="plain-text",
108 | )
109 |
110 |
111 | @app.get("/query", tags=["query"])
112 | async def query(
113 | start_time: str = "0.0",
114 | end_time: str = "0.0",
115 | verbose: bool = False,
116 | location: str = "EEE",
117 | measure_power: bool = True,
118 | lifetime: float = DEFAULT_LIFETIME,
119 | fetch_hardware: bool = False,
120 | ):
121 | """
122 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
123 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
124 | verbose: Get detailled metrics with extra information.\n
125 | location: Country code to configure the local electricity grid to take into account.\n
126 | measure_power: Get electricity consumption metrics from Scaphandre or not.\n
127 | lifetime: Full lifetime of the machine to evaluate.\n
128 | fetch_hardware: Regenerate hardware.json file with current machine hardware or not.\n
129 | """
130 | return get_metrics(
131 | iso8601_or_timestamp_as_timestamp(start_time),
132 | iso8601_or_timestamp_as_timestamp(end_time),
133 | verbose,
134 | location,
135 | measure_power,
136 | lifetime,
137 | fetch_hardware,
138 | )
139 |
140 |
141 | @app.post("/query", tags=["query"])
142 | async def query_with_time_workload(
143 | start_time: str = "0.0",
144 | end_time: str = "0.0",
145 | verbose: bool = False,
146 | location: str = "EEE",
147 | measure_power: bool = True,
148 | lifetime: float = DEFAULT_LIFETIME,
149 | fetch_hardware: bool = False,
150 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]]] = Body(
151 | None, example=time_workload_example
152 | ),
153 | ):
154 | """
155 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
156 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
157 | verbose: Get detailled metrics with extra information.\n
158 | location: Country code to configure the local electricity grid to take into account.\n
159 | measure_power: Get electricity consumption metrics from Scaphandre or not.\n
160 | lifetime: Full lifetime of the machine to evaluate.\n
161 | fetch_hardware: Regenerate hardware.json file with current machine hardware or not.\n
162 | time_workload: Workload percentage for CPU and RAM. Can be a float or a list of dictionaries with format
163 | {"time_percentage": float, "load_percentage": float}
164 | """
165 | return get_metrics(
166 | iso8601_or_timestamp_as_timestamp(start_time),
167 | iso8601_or_timestamp_as_timestamp(end_time),
168 | verbose,
169 | location,
170 | measure_power,
171 | lifetime,
172 | fetch_hardware,
173 | time_workload,
174 | )
175 |
176 |
177 | @app.get("/process_embedded_impacts", tags=["process"])
178 | async def process_embedded_impacts(
179 | process_id: int = 0,
180 | start_time: str = "0.0",
181 | end_time: str = "0.0",
182 | location: str = "EEE",
183 | lifetime: float = DEFAULT_LIFETIME,
184 | fetch_hardware: bool = False,
185 | ):
186 | """
187 | process_id: The process ID queried to be evaluated for embedded impacts for each available component. \n
188 | start_time: Start time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
189 | end_time: End time for evaluation. Accepts either UNIX Timestamp or ISO8601 date format. \n
190 | location: Country code to configure the local electricity grid to take into account.\n
191 | lifetime: Full lifetime of the machine to evaluate.\n
192 | """
193 |
194 | verbose = True
195 | measure_power = True
196 |
197 | metrics_data = get_metrics(
198 | iso8601_or_timestamp_as_timestamp(start_time),
199 | iso8601_or_timestamp_as_timestamp(end_time),
200 | verbose,
201 | location,
202 | measure_power,
203 | lifetime,
204 | fetch_hardware,
205 | )
206 | try:
207 | queried_process = Process(metrics_data, process_id)
208 | except InvalidPIDException as invalid_pid:
209 | raise HTTPException(status_code=400, detail=invalid_pid.message)
210 | else:
211 | process_embedded_impact_values = queried_process.embedded_impact_values
212 | json_content = json.dumps(process_embedded_impact_values)
213 | return Response(status_code=200, content=json_content)
214 |
215 |
216 | def get_metrics(
217 | start_time: float,
218 | end_time: float,
219 | verbose: bool,
220 | location: str,
221 | measure_power: bool,
222 | lifetime: float,
223 | fetch_hardware: bool,
224 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]], None] = None,
225 | ):
226 |
227 | now: float = time.time()
228 | if start_time and end_time:
229 | ratio = (end_time - start_time) / (lifetime * SECONDS_IN_ONE_YEAR)
230 | else:
231 | ratio = 1.0
232 | if start_time == 0.0:
233 | start_time = now - 3600
234 | if end_time == 0.0:
235 | end_time = now
236 | if end_time - start_time >= lifetime * SECONDS_IN_ONE_YEAR:
237 | lifetime = (end_time - start_time) / float(SECONDS_IN_ONE_YEAR)
238 |
239 | hardware_data = get_hardware_data(fetch_hardware)
240 |
241 | res = {"emissions_calculation_data": {}}
242 |
243 | avg_power = None
244 |
245 | if len(location) < 3 or location == "EEE":
246 | res["location_warning"] = {
247 | "warning_message": "Location is either set as default, or has not been set, and is therefore set to the default BoaviztAPI location. "
248 | "Be aware that the presented results can be drastically different due to location. "
249 | "It is recommended that you set the asset location with the corresponding country code, see: https://doc.api.boavizta.org/Explanations/usage/countries/"
250 | }
251 |
252 | if measure_power:
253 | power_data = get_power_data(start_time, end_time)
254 | avg_power = power_data["avg_power"]
255 | if "warning" in power_data:
256 | res["emissions_calculation_data"][
257 | "energy_consumption_warning"
258 | ] = power_data["warning"]
259 |
260 | boaviztapi_data = query_machine_impact_data(
261 | model={},
262 | configuration=hardware_data,
263 | usage=format_usage_request(
264 | start_time, end_time, avg_power, location, time_workload
265 | ),
266 | )
267 |
268 | if measure_power:
269 | res["total_operational_emissions"] = {
270 | "value": boaviztapi_data["impacts"]["gwp"]["use"],
271 | "description": "GHG emissions related to usage, from start_time to end_time.",
272 | "type": "gauge",
273 | "unit": "kg CO2eq",
274 | "long_unit": "kilograms CO2 equivalent",
275 | }
276 | res["total_operational_abiotic_resources_depletion"] = {
277 | "value": boaviztapi_data["impacts"]["adp"]["use"],
278 | "description": "Abiotic Resources Depletion (minerals & metals, ADPe) due to the usage phase.",
279 | "type": "gauge",
280 | "unit": "kgSbeq",
281 | "long_unit": "kilograms Antimony equivalent",
282 | }
283 | res["total_operational_primary_energy_consumed"] = {
284 | "value": boaviztapi_data["impacts"]["pe"]["use"],
285 | "description": "Primary Energy consumed due to the usage phase.",
286 | "type": "gauge",
287 | "unit": "MJ",
288 | "long_unit": "Mega Joules",
289 | }
290 | res["start_time"] = {
291 | "value": start_time,
292 | "description": "Start time for the evaluation, in timestamp format (seconds since 1970)",
293 | "type": "counter",
294 | "unit": "s",
295 | "long_unit": "seconds",
296 | }
297 | res["end_time"] = {
298 | "value": end_time,
299 | "description": "End time for the evaluation, in timestamp format (seconds since 1970)",
300 | "type": "counter",
301 | "unit": "s",
302 | "long_unit": "seconds",
303 | }
304 | res["average_power_measured"] = {
305 | "value": avg_power,
306 | "description": "Average power measured from start_time to end_time",
307 | "type": "gauge",
308 | "unit": "W",
309 | "long_unit": "Watts",
310 | }
311 |
312 | """ res["calculated_emissions"] = {
313 | "value": boaviztapi_data["impacts"]["gwp"]["value"] * ratio
314 | + boaviztapi_data["impacts"]["gwp"]["use"]["value"],
315 | "description": "Total Green House Gas emissions calculated for manufacturing and usage phases, between "
316 | "start_time and end_time",
317 | "type": "gauge",
318 | "unit": "kg CO2eq",
319 | "long_unit": "kilograms CO2 equivalent",
320 | } """
321 |
322 | res["embedded_emissions"] = {
323 | "value": boaviztapi_data["impacts"]["gwp"]["embedded"]["value"] * ratio,
324 | "description": "Embedded carbon emissions (manufacturing phase)",
325 | "type": "gauge",
326 | "unit": "kg CO2eq",
327 | "long_unit": "kilograms CO2 equivalent",
328 | }
329 | res["embedded_abiotic_resources_depletion"] = {
330 | "value": boaviztapi_data["impacts"]["adp"]["embedded"]["value"] * ratio,
331 | "description": "Embedded abiotic ressources consumed (manufacturing phase)",
332 | "type": "gauge",
333 | "unit": "kg Sbeq",
334 | "long_unit": "kilograms ADP equivalent",
335 | }
336 | res["embedded_primary_energy"] = {
337 | "value": boaviztapi_data["impacts"]["pe"]["embedded"]["value"] * ratio,
338 | "description": "Embedded primary energy consumed (manufacturing phase)",
339 | "type": "gauge",
340 | "unit": "MJ",
341 | "long_unit": "Mega Joules",
342 | }
343 |
344 | if verbose:
345 | res["raw_data"] = {
346 | "hardware_data": hardware_data,
347 | "resources_data": "not implemented yet",
348 | "boaviztapi_data": boaviztapi_data,
349 | "start_time": start_time,
350 | "end_time": end_time,
351 | }
352 | res["electricity_carbon_intensity"] = {
353 | "value": boaviztapi_data["verbose"]["gwp_factor"]["value"],
354 | "description": "Carbon intensity of the electricity mix. Mix considered : {}".format(
355 | location
356 | ),
357 | "type": "gauge",
358 | "unit": "kg CO2eq / kWh",
359 | "long_unit": "Kilograms CO2 equivalent per KiloWattHour",
360 | }
361 |
362 | if measure_power:
363 | res["raw_data"]["power_data"] = power_data
364 |
365 | return res
366 |
367 |
368 | def format_usage_request(
369 | start_time: float,
370 | end_time: float,
371 | avg_power: Union[float, None] = None,
372 | location: str = "EEE",
373 | time_workload: Union[dict[str, float], dict[str, List[WorkloadTime]], None] = None,
374 | ):
375 | hours_use_time = (end_time - start_time) / 3600.0
376 | kwargs_usage = {"hours_use_time": hours_use_time}
377 | if location:
378 | kwargs_usage["usage_location"] = location
379 | if avg_power:
380 | kwargs_usage["avg_power"] = avg_power
381 | if time_workload:
382 | kwargs_usage["time_workload"] = time_workload
383 | return kwargs_usage
384 |
385 |
386 | def get_power_data(start_time, end_time):
387 | # Get all items of the json list where start_time <= host.timestamp <= end_time
388 | power_data = {}
389 | with open(POWER_DATA_FILE_PATH, "r") as power_data_file:
390 | formatted_data = f"{power_data_file.read()}]"
391 | data = json.loads(formatted_data)
392 | queried_power_data = [
393 | e for e in data if start_time <= float(e["host"]["timestamp"]) <= end_time
394 | ]
395 | power_data["raw_data"] = queried_power_data
396 | power_data["avg_power"] = compute_average_consumption(queried_power_data)
397 | if end_time - start_time <= 3600:
398 | power_data["warning"] = (
399 | "The time window is lower than one hour, but the energy consumption estimate is in "
400 | "Watt.Hour. So this is an extrapolation of the power usage profile on one hour. Be "
401 | "careful with this data. "
402 | )
403 | return power_data
404 |
405 |
406 | def compute_average_consumption(power_data) -> float:
407 | # Host energy consumption
408 | total_host = 0.0
409 | avg_host = 0.0
410 | if len(power_data) > 0:
411 | for r in power_data:
412 | total_host += float(r["host"]["consumption"])
413 |
414 | avg_host = total_host / len(power_data) / 1000000.0 # from microwatts to watts
415 |
416 | return avg_host
417 |
418 |
419 | def get_hardware_data(fetch_hardware: bool):
420 | data = {}
421 | if fetch_hardware:
422 | build_hardware_data()
423 | try:
424 | data = read_hardware_data()
425 | except Exception:
426 | build_hardware_data()
427 | data = read_hardware_data()
428 | return data
429 |
430 |
431 | def read_hardware_data() -> Dict:
432 | with open(HARDWARE_FILE_PATH, "r") as fd:
433 | data = json.load(fd)
434 | return data
435 |
436 |
437 | def build_hardware_data():
438 | lshw = Lshw()
439 | with open(HARDWARE_FILE_PATH, "w") as hardware_file:
440 | hardware_data = {}
441 | hardware_data["disks"] = lshw.disks
442 | hardware_data["cpus"] = lshw.cpus
443 | hardware_data["rams"] = lshw.memories
444 | json.dump(hardware_data, hardware_file)
445 |
446 |
447 | def query_machine_impact_data(
448 | model: dict[str, str],
449 | configuration: dict[str, dict[str, int]],
450 | usage: dict[str, Any],
451 | ) -> dict:
452 | server_api = ServerApi(get_boavizta_api_client())
453 |
454 | server_impact = None
455 |
456 | if configuration:
457 | server = Server(usage=usage, configuration=configuration)
458 | server_impact = server_api.server_impact_from_configuration_v1_server_post(
459 | server=server
460 | )
461 | elif model:
462 | # server = Server(usage=usage, model=model)
463 | # TO IMPLEMENT
464 | # This conditional was based on a previous version of BoaviztAPI, where a server model could
465 | # be sent to /v1/server through a GET method. BoaviztAPI now expects an archetype string to
466 | # return a prerecorded impact from an asset.
467 | server_impact = server_api.server_impact_from_model_v1_server_get(
468 | archetype="dellR740"
469 | )
470 |
471 | return server_impact
472 |
473 |
474 | def generate_machine_configuration(hardware_data) -> Dict[str, Any]:
475 | # Either delete or transfer this logic to hardware_cli / lshw
476 | config = {
477 | "cpu": {
478 | "units": len(hardware_data["cpus"]),
479 | "core_units": hardware_data["cpus"][1]["core_units"],
480 | # "family": hardware_data['cpus'][1]['family']
481 | },
482 | "ram": sort_ram(hardware_data["rams"]),
483 | "disk": sort_disks(hardware_data["disks"]),
484 | "power_supply": (
485 | hardware_data["power_supply"]
486 | if "power_supply" in hardware_data
487 | else {"units": 1}
488 | ),
489 | # TODO: if cpu is a small one, guess that power supply is light/average weight of a laptops power supply ?
490 | }
491 | return config
492 |
--------------------------------------------------------------------------------
/boagent/api/config.py:
--------------------------------------------------------------------------------
1 | from pydantic_settings import BaseSettings
2 |
3 |
4 | class Settings(BaseSettings):
5 | project_name: str = "boagent"
6 | project_version: str = "0.1.0"
7 | project_description: str = "Boagent is a local API and monitoring agent to help you estimate the environmental impact of your machine, including software activity and hardware embodied impacts."
8 | tags_metadata: list = [
9 | {"name": "info", "description": "Returns runtime configuration of Boagent."},
10 | {"name": "web", "description": "Web UI to explore Boagent metrics."},
11 | {
12 | "name": "csv",
13 | "description": "Internal route. Generates and returns a CSV-formatted dataset with metrics needed by the webUI",
14 | },
15 | {
16 | "name": "metrics",
17 | "description": "Returns metrics as a Prometheus HTTP exporter.",
18 | },
19 | {
20 | "name": "query",
21 | "description": "This is the main route. Returns metrics in JSON format.",
22 | },
23 | ]
24 | seconds_in_one_year: int = 31536000
25 | default_lifetime: float = 5.0
26 | hardware_file_path: str = "./hardware_data.json"
27 | power_file_path: str = "./power_data.json"
28 | hardware_cli: str = "./boagent/hardware/hardware_cli.py"
29 | boaviztapi_endpoint: str = "http://localhost:5000"
30 | db_path: str = "../../db/boagent.db"
31 | public_path: str = "./boagent/public"
32 | assets_path: str = "./boagent/public/assets/"
33 | carbon_aware_api_endpoint: str = "https://carbon-aware-api.azurewebsites.net"
34 | carbon_aware_api_token: str = "token"
35 | azure_location: str = "northeurope"
36 |
--------------------------------------------------------------------------------
/boagent/api/exceptions.py:
--------------------------------------------------------------------------------
1 | class InvalidPIDException(Exception):
2 | def __init__(self, pid):
3 | self.pid = pid
4 | self.message = f"Process_id {self.pid} has not been found in metrics data. Check the queried PID."
5 | super().__init__(self.message)
6 |
--------------------------------------------------------------------------------
/boagent/api/models.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class WorkloadTime(BaseModel):
5 | time_percentage: float = 0.0
6 | load_percentage: float = 0.0
7 |
8 |
9 | time_workload_example = {
10 | "time_workload": [
11 | {"time_percentage": 50, "load_percentage": 0},
12 | {"time_percentage": 25, "load_percentage": 60},
13 | {"time_percentage": 25, "load_percentage": 100},
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/boagent/api/process.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from .exceptions import InvalidPIDException
3 |
4 |
5 | class Process:
6 | def __init__(self, metrics_data, pid):
7 | self.metrics_data = metrics_data
8 | self.validate_pid(pid)
9 | self._pid = pid
10 | self.process_info = self.get_process_info()
11 |
12 | def validate_pid(self, value):
13 |
14 | timestamps = [
15 | timestamp
16 | for timestamp in self.metrics_data["raw_data"]["power_data"]["raw_data"]
17 | ]
18 | consumers = [timestamp["consumers"] for timestamp in timestamps]
19 | pids = set([process["pid"] for consumer in consumers for process in consumer])
20 | if value in pids:
21 | return value
22 | else:
23 | raise InvalidPIDException(value)
24 |
25 | @property
26 | def pid(self, pid):
27 | """The PID queried in data coming from Scaphandre."""
28 | return self._pid
29 |
30 | @pid.setter
31 | def pid(self, value):
32 | self._pid = self.validate_pid(value)
33 |
34 | def get_process_info(self):
35 |
36 | timestamps = [
37 | timestamp
38 | for timestamp in self.metrics_data["raw_data"]["power_data"]["raw_data"]
39 | ]
40 | consumers = [timestamp["consumers"] for timestamp in timestamps]
41 | process_info = [
42 | process
43 | for consumer in consumers
44 | for process in consumer
45 | if process["pid"] == self._pid
46 | ]
47 | return process_info
48 |
49 | @property
50 | def process_name(self):
51 | process_name = self.process_info[0]["exe"].split("/")[-1]
52 | return process_name
53 |
54 | @property
55 | def process_exe(self):
56 | process_exe = self.process_info[0]["exe"]
57 | return process_exe
58 |
59 | def get_total_ram_in_bytes(self):
60 |
61 | ram_data = self.metrics_data["raw_data"]["hardware_data"]["rams"]
62 | total_ram_in_bytes = (
63 | sum(ram_unit["capacity"] for ram_unit in ram_data) * 1073741824
64 | )
65 |
66 | return total_ram_in_bytes
67 |
68 | def get_disk_usage_in_bytes(self):
69 |
70 | # Data from Scaphandre can be empty on first returned element in the array
71 | try:
72 | key_for_disk_total_bytes = self.metrics_data["raw_data"]["power_data"][
73 | "raw_data"
74 | ][0]["host"]["components"]["disks"][0]["disk_total_bytes"]
75 | except IndexError:
76 | key_for_disk_total_bytes = self.metrics_data["raw_data"]["power_data"][
77 | "raw_data"
78 | ][1]["host"]["components"]["disks"][0]["disk_total_bytes"]
79 |
80 | try:
81 | key_for_disk_available_bytes = self.metrics_data["raw_data"]["power_data"][
82 | "raw_data"
83 | ][0]["host"]["components"]["disks"][0]["disk_available_bytes"]
84 | except IndexError:
85 | key_for_disk_available_bytes = self.metrics_data["raw_data"]["power_data"][
86 | "raw_data"
87 | ][1]["host"]["components"]["disks"][0]["disk_available_bytes"]
88 |
89 | disk_total_bytes = int(key_for_disk_total_bytes)
90 | disk_available_bytes = int(key_for_disk_available_bytes)
91 | disk_usage_in_bytes = disk_total_bytes - disk_available_bytes
92 | return disk_usage_in_bytes
93 |
94 | @property
95 | def ram_shares(self):
96 |
97 | process_ram_shares = [
98 | (
99 | (
100 | int(timestamp["resources_usage"]["memory_usage"])
101 | / self.get_total_ram_in_bytes()
102 | )
103 | * 100
104 | )
105 | for timestamp in self.process_info
106 | ]
107 |
108 | return process_ram_shares
109 |
110 | @property
111 | def cpu_load_shares(self):
112 |
113 | process_cpu_load_shares = [
114 | float(timestamp["resources_usage"]["cpu_usage"])
115 | for timestamp in self.process_info
116 | ]
117 | return process_cpu_load_shares
118 |
119 | @property
120 | def storage_shares(self):
121 | process_storage_shares = [
122 | (
123 | (
124 | int(timestamp["resources_usage"]["disk_usage_write"])
125 | / self.get_disk_usage_in_bytes()
126 | )
127 | * 100
128 | )
129 | for timestamp in self.process_info
130 | ]
131 | return process_storage_shares
132 |
133 | def get_component_embedded_impact_shares(self, queried_component, component_shares):
134 |
135 | component = f"{queried_component}-1"
136 | component_impacts_data = self.metrics_data["raw_data"]["boaviztapi_data"][
137 | "verbose"
138 | ][component]["impacts"]
139 | component_embedded_impact_shares = list()
140 | for impact in component_impacts_data:
141 | impact_embedded_value = component_impacts_data[impact]["embedded"]["value"]
142 | for process_component_share in component_shares:
143 | if process_component_share == 0.0:
144 | component_embedded_impact = (
145 | f"{impact}_embedded_share",
146 | float(process_component_share),
147 | )
148 | component_embedded_impact_shares.append(component_embedded_impact)
149 | else:
150 | component_embedded_impact_share = (
151 | float(impact_embedded_value) * float(process_component_share)
152 | ) / 100
153 | component_embedded_impact = (
154 | f"{impact}_embedded_share",
155 | float(component_embedded_impact_share),
156 | )
157 | component_embedded_impact_shares.append(component_embedded_impact)
158 | return component_embedded_impact_shares
159 |
160 | def get_component_embedded_impact_values(self, queried_component):
161 | if queried_component == "cpu":
162 | component_impact_shares = self.get_component_embedded_impact_shares(
163 | "CPU", self.cpu_load_shares
164 | )
165 | elif queried_component == "ram":
166 | component_impact_shares = self.get_component_embedded_impact_shares(
167 | "RAM", self.ram_shares
168 | )
169 | elif queried_component == "ssd":
170 | component_impact_shares = self.get_component_embedded_impact_shares(
171 | "SSD", self.storage_shares
172 | )
173 | elif queried_component == "hdd":
174 | component_impact_shares = self.get_component_embedded_impact_shares(
175 | "HDD", self.storage_shares
176 | )
177 | else:
178 | return "Queried component is not available for evaluation."
179 |
180 | gwp_list = defaultdict(list)
181 | adp_list = defaultdict(list)
182 | pe_list = defaultdict(list)
183 |
184 | for impact_key, impact_value in component_impact_shares:
185 | if impact_key == "gwp_embedded_share":
186 | gwp_list[impact_key].append(impact_value)
187 | if impact_key == "adp_embedded_share":
188 | adp_list[impact_key].append(impact_value)
189 | if impact_key == "pe_embedded_share":
190 | pe_list[impact_key].append(impact_value)
191 |
192 | gwp_average = sum(gwp_list["gwp_embedded_share"]) / len(
193 | gwp_list["gwp_embedded_share"]
194 | )
195 | adp_average = sum(adp_list["adp_embedded_share"]) / len(
196 | adp_list["adp_embedded_share"]
197 | )
198 | pe_average = sum(pe_list["pe_embedded_share"]) / len(
199 | pe_list["pe_embedded_share"]
200 | )
201 |
202 | gwp_max = max(gwp_list["gwp_embedded_share"])
203 | adp_max = max(adp_list["adp_embedded_share"])
204 | pe_max = max(pe_list["pe_embedded_share"])
205 |
206 | gwp_min = min(gwp_list["gwp_embedded_share"])
207 | adp_min = min(adp_list["adp_embedded_share"])
208 | pe_min = min(pe_list["pe_embedded_share"])
209 |
210 | component_embedded_impact_values = {
211 | f"gwp_{queried_component}_average_impact": gwp_average,
212 | f"adp_{queried_component}_average_impact": adp_average,
213 | f"pe_{queried_component}_average_impact": pe_average,
214 | f"gwp_{queried_component}_max_impact": gwp_max,
215 | f"adp_{queried_component}_max_impact": adp_max,
216 | f"pe_{queried_component}_max_impact": pe_max,
217 | f"gwp_{queried_component}_min_impact": gwp_min,
218 | f"adp_{queried_component}_min_impact": adp_min,
219 | f"pe_{queried_component}_min_impact": pe_min,
220 | }
221 | return component_embedded_impact_values
222 |
223 | @property
224 | def embedded_impact_values(self):
225 | process_embedded_impact_values = {
226 | "pid": self._pid,
227 | "process_embedded_impacts": {},
228 | }
229 | components = ["cpu", "ram", "hdd", "ssd"]
230 |
231 | for component in components:
232 | try:
233 | process_component_embedded_impact_values = (
234 | self.get_component_embedded_impact_values(component)
235 | )
236 | process_embedded_impact_values["process_embedded_impacts"][
237 | f"process_{component}_embedded_impact_values"
238 | ] = process_component_embedded_impact_values
239 | except KeyError as absent_component:
240 | print(
241 | f"Queried component is not present in Boagent metrics: {absent_component}"
242 | )
243 |
244 | return process_embedded_impact_values
245 |
--------------------------------------------------------------------------------
/boagent/api/utils.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from boaviztapi_sdk import ApiClient, Configuration
3 | from dateutil import parser
4 | from .config import Settings
5 | from os import PathLike
6 |
7 | settings = Settings()
8 | BOAVIZTAPI_ENDPOINT = settings.boaviztapi_endpoint
9 |
10 |
11 | def sort_ram(items: list):
12 | hash_map = {}
13 | for r in items:
14 | if "manufacturer" in r:
15 | if "{}:{}".format(r["capacity"], r["manufacturer"]) in hash_map:
16 | hash_map["{}:{}".format(r["capacity"], r["manufacturer"])]["units"] += 1
17 | else:
18 | hash_map["{}:{}".format(r["capacity"], r["manufacturer"])] = {
19 | "units": 1,
20 | "manufacturer": r["manufacturer"],
21 | "capacity": r["capacity"],
22 | }
23 | else:
24 | hash_map["{}".format(r["capacity"])] = {
25 | "units": 1,
26 | "capacity": r["capacity"],
27 | }
28 | return [v for k, v in hash_map.items()]
29 |
30 |
31 | def sort_disks(items: list):
32 | hash_map = {}
33 | for r in items:
34 | if "{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"]) in hash_map:
35 | hash_map["{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"])][
36 | "units"
37 | ] += 1
38 | else:
39 | hash_map["{}:{}:{}".format(r["capacity"], r["manufacturer"], r["type"])] = {
40 | "units": 1,
41 | "manufacturer": r["manufacturer"],
42 | "capacity": r["capacity"],
43 | "type": r["type"],
44 | }
45 | return [v for k, v in hash_map.items()]
46 |
47 |
48 | def get_boavizta_api_client():
49 | config = Configuration(
50 | host=BOAVIZTAPI_ENDPOINT,
51 | )
52 | client = ApiClient(configuration=config)
53 | return client
54 |
55 |
56 | def iso8601_or_timestamp_as_timestamp(iso_time: str) -> float:
57 | """
58 | Takes an str that's either a timestamp or an iso8601
59 | time. Returns a float that represents a timestamp.
60 | """
61 | if iso_time == "0.0" or iso_time == "0":
62 | return float(iso_time)
63 | else:
64 | dt = None
65 | try:
66 | dt = parser.parse(iso_time)
67 | print("{} is an iso 8601 datetime".format(iso_time))
68 | except Exception as e:
69 | print("{} is not an iso 8601 datetime".format(iso_time))
70 | print("Exception : {}".format(e))
71 | try:
72 | dt = datetime.fromtimestamp(int(round(float(iso_time))))
73 | print("{} is a timestamp".format(iso_time))
74 | except Exception as e:
75 | print("{} is not a timestamp".format(iso_time))
76 | print("Exception : {}".format(e))
77 | print("Parser would give : {}".format(parser.parse(iso_time)))
78 | finally:
79 | if dt:
80 | return dt.timestamp()
81 | else:
82 | return float(iso_time)
83 |
84 |
85 | def format_prometheus_output(res, verbose: bool):
86 | response = ""
87 | for k, v in res.items():
88 | if "value" in v and "type" in v:
89 | if "description" not in v:
90 | v["description"] = "TODO: define me"
91 | if type(v["value"]) is float:
92 | response += format_prometheus_metric(
93 | k,
94 | "{}. {}".format(
95 | v["description"],
96 | "In {} ({}).".format(v["long_unit"], v["unit"]),
97 | ),
98 | v["type"],
99 | v["value"],
100 | )
101 | if type(v["value"]) is dict:
102 | response += format_prometheus_metric(
103 | k,
104 | "{}. {}".format(
105 | v["description"],
106 | "In {} ({}).".format(v["long_unit"], v["unit"]),
107 | ),
108 | v["type"],
109 | v["value"]["value"],
110 | )
111 |
112 | else:
113 | for x, y in v.items():
114 | if type(y) is float:
115 | pass
116 | else:
117 | if "value" in y and "type" in y:
118 | if "description" not in y:
119 | y["description"] = "TODO: define me"
120 | response += format_prometheus_metric(
121 | "{}_{}".format(k, x),
122 | "{}. {}".format(
123 | y["description"],
124 | "In {} ({}).".format(y["long_unit"], y["unit"]),
125 | ),
126 | y["type"],
127 | y["value"],
128 | )
129 | if verbose:
130 | if "boaviztapi_data" in v:
131 | for impact_name, impact_items in v["boaviztapi_data"][
132 | "impacts"
133 | ].items():
134 | if "unit" in impact_items:
135 | for value in impact_items["embedded"]:
136 | if value == "warnings":
137 | pass
138 | else:
139 | response += format_prometheus_metric(
140 | "{}".format(f"{impact_name}_total_impact_{value}"),
141 | "{}. {}".format(
142 | impact_items["description"],
143 | "In {}".format(impact_items["unit"]),
144 | ),
145 | "{}".format("gauge"),
146 | "{}".format(f"{impact_items['embedded'][value]}"),
147 | )
148 |
149 | for component_name, component_impacts in v["boaviztapi_data"][
150 | "verbose"
151 | ].items():
152 | formatted_component_name = component_name.lower().replace("-", "_")
153 | if "impacts" in component_impacts:
154 | for impact, items in component_impacts["impacts"].items():
155 | for component_embedded_impact_metric, value in items[
156 | "embedded"
157 | ].items():
158 | if component_embedded_impact_metric == "warnings":
159 | pass
160 | else:
161 | response += format_prometheus_metric(
162 | "{}".format(
163 | f"{formatted_component_name}_{impact}_embedded_impact_{component_embedded_impact_metric}"
164 | ),
165 | "{}. {}".format(
166 | items["description"],
167 | "In {}".format(items["unit"]),
168 | ),
169 | "{}".format("gauge"),
170 | "{}".format(
171 | f"{value}",
172 | ),
173 | )
174 |
175 | return response
176 |
177 |
178 | def format_prometheus_metric(
179 | metric_name, metric_description, metric_type, metric_value
180 | ):
181 | response = """# HELP {} {}
182 | # TYPE {} {}
183 | {} {}
184 | """.format(
185 | metric_name,
186 | metric_description,
187 | metric_name,
188 | metric_type,
189 | metric_name,
190 | metric_value,
191 | )
192 | return response
193 |
194 |
195 | def filter_date_range(data: list, start_date: datetime, stop_date: datetime) -> list:
196 |
197 | lower_index = 0
198 | upper_index = 0
199 |
200 | start = datetime.timestamp(start_date)
201 | end = datetime.timestamp(stop_date)
202 |
203 | for d in data:
204 | if d["timestamp"] < start:
205 | lower_index += 1
206 | if d["timestamp"] < end:
207 | upper_index += 1
208 |
209 | return data[lower_index:upper_index]
210 |
211 |
212 | def format_scaphandre_json(file: str | PathLike) -> str:
213 | with open(file, "r") as fd:
214 | formatted_scaphandre_json = f"[{fd.read()}]".replace(
215 | '{"host"', ',{"host"'
216 | ).replace(',{"host"', '{"host"', 1)
217 | return formatted_scaphandre_json
218 |
--------------------------------------------------------------------------------
/boagent/hardware/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/hardware/__init__.py
--------------------------------------------------------------------------------
/boagent/hardware/lshw.py:
--------------------------------------------------------------------------------
1 | """
2 | This file is modified code issued from https://github.com/Solvik/netbox-agent/blob/master/netbox_agent/lshw.py,
3 | copyright under Apache-2.0 licence.
4 | """
5 |
6 | from shutil import which
7 | import subprocess
8 | import json
9 | import sys
10 | import re
11 | import os
12 |
13 | SYS_BLOCK_PATH = "/sys/block"
14 |
15 |
16 | def is_tool(name):
17 | """Check whether `name` is on PATH and marked as executable"""
18 | return which(name) is not None
19 |
20 |
21 | def serialized_lshw_output():
22 | try:
23 | lshw_output = subprocess.getoutput("lshw -quiet -json 2> /dev/null")
24 | serialized_lshw_output = json.loads(lshw_output)
25 | except json.JSONDecodeError:
26 | raise Exception("lshw does not seem do be executed as root.")
27 | else:
28 | if isinstance(serialized_lshw_output, list):
29 | return serialized_lshw_output[0]
30 | else:
31 | return serialized_lshw_output
32 |
33 |
34 | def serialized_nvme_output():
35 | nvme_output = subprocess.check_output(
36 | ["nvme", "-list", "-o", "json"], encoding="utf8"
37 | )
38 | serialized_nvme_output = json.loads(nvme_output)
39 | return serialized_nvme_output
40 |
41 |
42 | class Lshw:
43 | def __init__(self):
44 | if not is_tool("lshw"):
45 | raise Exception("lshw does not seem to be installed.")
46 | self.hw_info = serialized_lshw_output()
47 | self.info = {}
48 | self.memories = []
49 | self.cpus = []
50 | self.power = []
51 | self.disks = []
52 | self.gpus = []
53 | self.motherboard_serial = self.hw_info["children"][0].get("serial", "No S/N")
54 | self.motherboard = self.hw_info["children"][0].get("product", "Motherboard")
55 |
56 | for k in self.hw_info["children"]:
57 | if k["class"] == "power":
58 | self.power.append(k)
59 |
60 | if "children" in k:
61 | for j in k["children"]:
62 | if j["class"] == "generic":
63 | continue
64 |
65 | if j["class"] == "storage":
66 | self.find_storage(j)
67 |
68 | if j["class"] == "memory":
69 | self.find_memories(j)
70 |
71 | if j["class"] == "processor":
72 | self.find_cpus(j)
73 |
74 | if j["class"] == "bridge":
75 | self.walk_bridge(j)
76 |
77 | def get_hw_linux(self, hwclass):
78 | if hwclass == "cpu":
79 | return self.cpus
80 | if hwclass == "gpu":
81 | return self.gpus
82 | """ if hwclass == "network":
83 | return self.interfaces """
84 | if hwclass == "storage":
85 | return self.disks
86 | if hwclass == "memory":
87 | return self.memories
88 |
89 | """
90 | def find_network(self, obj):
91 | # Some interfaces do not have device (logical) name (eth0, for
92 | # instance), such as not connected network mezzanine cards in blade
93 | # servers. In such situations, the card will be named `unknown[0-9]`.
94 | unkn_intfs = []
95 | for i in self.interfaces:
96 | # newer versions of lshw can return a list of names, see issue #227
97 | if not isinstance(i["name"], list):
98 | if i["name"].startswith("unknown"):
99 | unkn_intfs.push(i)
100 | else:
101 | for j in i["name"]:
102 | if j.startswith("unknown"):
103 | unkn_intfs.push(j)
104 |
105 | unkn_name = "unknown{}".format(len(unkn_intfs))
106 | self.interfaces.append(
107 | {
108 | "name": obj.get("logicalname", unkn_name),
109 | "macaddress": obj.get("serial", ""),
110 | "serial": obj.get("serial", ""),
111 | "product": obj["product"],
112 | "vendor": obj["vendor"],
113 | "description": obj["description"],
114 | }
115 | )
116 | """
117 |
118 | def find_storage(self, obj):
119 | if "children" in obj:
120 | for device in obj["children"]:
121 | if "vendor" in device and "size" in device:
122 | d = {
123 | "units": +1,
124 | "manufacturer": self.check_disk_vendor(
125 | device["vendor"]
126 | ).lower(),
127 | "capacity": device["size"],
128 | "logicalname": device["logicalname"],
129 | "type": self.get_disk_type(device["logicalname"]),
130 | }
131 | self.disks.append(d)
132 | if "configuration" in obj:
133 | if "nvme" in obj["configuration"]["driver"]:
134 | if not is_tool("nvme"):
135 | raise Exception("nvme-cli >= 1.0 does not seem to be installed")
136 | try:
137 | nvme = serialized_nvme_output()
138 | for device in nvme["Devices"]:
139 | d = {
140 | "units": +1,
141 | "logicalname": device["DevicePath"],
142 | "manufacturer": self.check_disk_vendor(
143 | device["ModelNumber"]
144 | ).lower(),
145 | "type": "ssd",
146 | "capacity": device["PhysicalSize"] // 1073741824,
147 | }
148 | self.disks.append(d)
149 | except Exception:
150 | pass
151 |
152 | def find_cpus(self, obj):
153 | if "product" in obj:
154 | self.cpus.append(
155 | {
156 | "units": +1,
157 | "name": obj["product"],
158 | "manufacturer": obj["vendor"],
159 | "core_units": int(obj["configuration"]["cores"]),
160 | }
161 | )
162 |
163 | def find_memories(self, obj):
164 | if "children" not in obj:
165 | # print("not a DIMM memory.")
166 | return
167 |
168 | for dimm in obj["children"]:
169 | if "empty" in dimm["description"]:
170 | continue
171 |
172 | self.memories.append(
173 | {
174 | "units": +1,
175 | "manufacturer": dimm.get("vendor", "N/A"),
176 | "capacity": dimm.get("size", 0) // 2**20 // 1024,
177 | }
178 | )
179 |
180 | def find_gpus(self, obj):
181 | if "product" in obj:
182 | self.gpus.append(
183 | {
184 | "product": obj["product"],
185 | "vendor": obj["vendor"],
186 | "description": obj["description"],
187 | }
188 | )
189 |
190 | def walk_bridge(self, obj):
191 | if "children" not in obj:
192 | return
193 |
194 | for bus in obj["children"]:
195 | if bus["class"] == "storage":
196 | self.find_storage(bus)
197 | if bus["class"] == "display":
198 | self.find_gpus(bus)
199 |
200 | if "children" in bus:
201 | for b in bus["children"]:
202 | if b["class"] == "storage":
203 | self.find_storage(b)
204 | if b["class"] == "display":
205 | self.find_gpus(b)
206 |
207 | def check_disk_vendor(self, model_string: str) -> str:
208 | split_model = model_string.split(" ")
209 | vendor = ""
210 |
211 | if len(split_model) == 1:
212 | check_string_for_numbers = bool(re.search("\\d", model_string))
213 | if check_string_for_numbers:
214 | raise Exception(
215 | "Lshw did not output a parsable manufacturer name for this device."
216 | )
217 | else:
218 | return model_string
219 |
220 | model_first_str = split_model[0]
221 | model_second_str = split_model[1]
222 | check_first_string_for_numbers = re.search("\\d", model_first_str)
223 | result = bool(check_first_string_for_numbers)
224 |
225 | if result:
226 | vendor = model_second_str
227 | return vendor
228 | else:
229 | vendor = model_first_str
230 | return vendor
231 |
232 | def get_disk_type(self, dev_path: str) -> str:
233 |
234 | rotational = self.get_rotational_int(dev_path)
235 |
236 | if rotational == 0:
237 | return "ssd"
238 | if rotational == 1:
239 | return "hdd"
240 | if rotational == 2:
241 | return "unknown"
242 | return "unknown"
243 |
244 | def get_rotational_int(self, dev_path: str) -> int:
245 |
246 | device = dev_path.removeprefix("/dev")
247 |
248 | try:
249 | rotational_fp = os.path.realpath(
250 | f"{SYS_BLOCK_PATH}{device}/queue/rotational", strict=True
251 | )
252 |
253 | except OSError:
254 | sys.stderr.write("Rotational file was not found")
255 | return 2
256 | else:
257 | with open(rotational_fp, "r") as file:
258 | rotational_int = int(file.read())
259 | return rotational_int
260 |
--------------------------------------------------------------------------------
/boagent/public/assets/boavizta-logo-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/boavizta-logo-4.png
--------------------------------------------------------------------------------
/boagent/public/assets/data.csv:
--------------------------------------------------------------------------------
1 | Date,gwp_intensity,cpu_load,ram_load,cpu_conso,ram_conso,server_conso,gwp_ram,gwp_cpu,gwp_server,range_type,gwp_fix
2 | 2022/10/21 00:00:00,98,34,12,24,12,46.8,1176,2352,4586.4,1,890
3 | 2022/10/21 00:05:00,96,35,15,22,11,42.9,1056,2112,4118.4,1,890
4 | 2022/10/21 00:10:00,94,33,18,21,14,45.5,1316,1974,4277,1,890
5 | 2022/10/21 00:15:00,92,23,22,23,16,50.7,1472,2116,4664.4,1,890
6 | 2022/10/21 00:20:00,90,36,29,25,12,48.1,1080,2250,4329,1,890
7 | 2022/10/21 00:25:00,88,44,22,28,15,55.9,1320,2464,4919.2,1,890
8 | 2022/10/21 00:30:00,89,48,34,29,21,65,1869,2581,5785,1,890
9 | 2022/10/21 00:35:00,97,57,34,32,13,58.5,1261,3104,5674.5,1,890
10 | 2022/10/21 00:40:00,67,68,32,35,15,65,1005,2345,4355,1,890
11 | 2022/10/21 00:45:00,66,90,31,23,12,45.5,792,1518,3003,1,890
12 | 2022/10/21 00:50:00,66,87,59,24,11,45.5,726,1584,3003,1,890
13 | 2022/10/21 00:55:00,98,78,70,34,16,65,1568,3332,6370,1,890
14 | 2022/10/21 01:00:00,96,75,99,28,19,61.1,1824,2688,5865.6,2,890
15 | 2022/10/21 01:05:00,94,45,40,27,22,63.7,2068,2538,5987.8,2,890
16 | 2022/10/21 01:10:00,92,55,30,24,18,54.6,1656,2208,5023.2,2,890
17 | 2022/10/21 01:15:00,90,34,22,22,14,46.8,1260,1980,4212,2,890
18 | 2022/10/21 01:20:00,88,35,16,21,11,41.6,968,1848,3660.8,2,890
19 | 2022/10/21 01:25:00,89,33,12,23,12,45.5,1068,2047,4049.5,2,890
20 | 2022/10/21 01:30:00,97,23,15,25,11,46.8,1067,2425,4539.6,2,890
21 | 2022/10/21 01:35:00,67,36,18,28,14,54.6,938,1876,3658.2,1,890
22 | 2022/10/21 01:40:00,66,44,22,29,16,58.5,1056,1914,3861,1,890
23 | 2022/10/21 01:45:00,98,48,29,32,12,57.2,1176,3136,5605.6,1,890
24 | 2022/10/21 01:50:00,96,57,22,35,15,65,1440,3360,6240,1,890
25 | 2022/10/21 01:55:00,94,68,34,23,21,57.2,1974,2162,5376.8,1,890
26 | 2022/10/21 02:00:00,92,90,34,24,13,48.1,1196,2208,4425.2,1,890
27 | 2022/10/21 02:05:00,90,87,32,34,15,63.7,1350,3060,5733,1,890
28 | 2022/10/21 02:10:00,88,78,31,28,12,52,1056,2464,4576,1,890
29 | 2022/10/21 02:15:00,89,75,59,27,11,49.4,979,2403,4396.6,1,890
30 | 2022/10/21 02:20:00,97,45,70,24,16,52,1552,2328,5044,0,890
31 | 2022/10/21 02:25:00,67,55,99,22,19,53.3,1273,1474,3571.1,0,890
32 | 2022/10/21 02:30:00,66,34,40,21,22,55.9,1452,1386,3689.4,0,890
33 | 2022/10/21 02:35:00,66,35,30,23,18,53.3,1188,1518,3517.8,0,890
34 | 2022/10/21 02:40:00,98,33,22,25,14,50.7,1372,2450,4968.6,0,890
35 | 2022/10/21 02:45:00,96,23,16,28,11,50.7,1056,2688,4867.2,0,890
36 | 2022/10/21 02:50:00,94,36,12,29,12,53.3,1128,2726,5010.2,0,890
37 | 2022/10/21 02:55:00,92,44,15,32,11,55.9,1012,2944,5142.8,0,890
38 | 2022/10/21 03:00:00,90,48,18,35,14,63.7,1260,3150,5733,0,890
39 | 2022/10/21 03:05:00,88,57,22,23,16,50.7,1408,2024,4461.6,1,890
40 | 2022/10/21 03:10:00,89,68,29,24,12,46.8,1068,2136,4165.2,1,890
41 | 2022/10/21 03:15:00,97,90,22,34,15,63.7,1455,3298,6178.9,1,890
42 | 2022/10/21 03:20:00,67,87,34,28,21,63.7,1407,1876,4267.9,1,890
43 | 2022/10/21 03:25:00,66,78,34,27,13,52,858,1782,3432,1,890
44 | 2022/10/21 03:30:00,98,75,32,24,15,50.7,1470,2352,4968.6,1,890
45 | 2022/10/21 03:35:00,96,45,31,22,12,44.2,1152,2112,4243.2,1,890
46 | 2022/10/21 03:40:00,94,55,59,21,11,41.6,1034,1974,3910.4,1,890
47 | 2022/10/21 03:45:00,92,34,70,23,16,50.7,1472,2116,4664.4,1,890
48 | 2022/10/21 03:50:00,90,35,99,25,19,57.2,1710,2250,5148,1,890
49 | 2022/10/21 03:55:00,88,33,40,28,22,65,1936,2464,5720,1,890
50 | 2022/10/21 04:00:00,89,23,30,29,18,61.1,1602,2581,5437.9,1,890
51 | 2022/10/21 04:05:00,97,36,22,32,12,57.2,1164,3104,5548.4,1,890
52 | 2022/10/21 04:10:00,67,44,16,35,11,59.8,737,2345,4006.6,1,890
53 | 2022/10/21 04:15:00,66,48,12,23,14,48.1,924,1518,3174.6,1,890
54 | 2022/10/21 04:20:00,66,57,15,24,16,52,1056,1584,3432,2,890
55 | 2022/10/21 04:25:00,98,68,18,24,12,46.8,1176,2352,4586.4,2,890
56 | 2022/10/21 04:30:00,96,90,22,22,15,48.1,1440,2112,4617.6,2,890
57 | 2022/10/21 04:35:00,94,34,29,21,21,54.6,1974,1974,5132.4,2,890
58 | 2022/10/21 04:40:00,92,35,22,23,13,46.8,1196,2116,4305.6,2,890
59 | 2022/10/21 04:45:00,90,33,34,25,15,52,1350,2250,4680,0,890
60 | 2022/10/21 04:50:00,88,23,34,28,12,52,1056,2464,4576,0,890
61 | 2022/10/21 04:55:00,89,36,32,29,11,52,979,2581,4628,0,890
62 | 2022/10/21 05:00:00,97,44,31,32,16,62.4,1552,3104,6052.8,0,890
--------------------------------------------------------------------------------
/boagent/public/assets/dygraph.css:
--------------------------------------------------------------------------------
1 | .graph { margin-top: 50px; margin-bottom: 10px; margin-left: auto; margin-right: auto;}
2 |
--------------------------------------------------------------------------------
/boagent/public/assets/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/favicon.ico
--------------------------------------------------------------------------------
/boagent/public/assets/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/favicon.png
--------------------------------------------------------------------------------
/boagent/public/assets/git-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/git-logo.png
--------------------------------------------------------------------------------
/boagent/public/assets/graph.ts:
--------------------------------------------------------------------------------
1 | new Dygraph(, "ny-vs-sf.txt", {
2 | legend: 'always',
3 | title: 'NYC vs. SF',
4 | showRoller: true,
5 | rollPeriod: 14,
6 | customBars: true,
7 | ylabel: 'Temperature (F)',
8 | });
9 |
--------------------------------------------------------------------------------
/boagent/public/assets/license.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 |
13 |
--------------------------------------------------------------------------------
/boagent/public/assets/main.css:
--------------------------------------------------------------------------------
1 | body{
2 | margin-right: auto;
3 | margin-left: auto;
4 | width:90%;
5 | }
6 |
7 | a{
8 | color: white;
9 | text-decoration: none;
10 | }
11 |
12 | h1{
13 | font-size: 2.5em;
14 | }
15 |
16 | a:hover{
17 | color: gray;
18 | }
19 |
20 | .title{
21 | padding: 1%;
22 | background-color: #364049;
23 | border-radius: 6px;
24 | text-align: center;
25 | margin-bottom: 1%;
26 | }
27 | .navbar{
28 | font-size: 1.4em;
29 | padding: 1%;
30 | }
31 |
32 | .box {
33 | background-color: #364049;
34 | padding:1%;
35 | border-radius: 6px;
36 | height:200px;
37 | }
38 |
39 |
40 | .scores-box{
41 | background-color: #364049;
42 | border-radius: 6px;
43 | margin-bottom: 3%;
44 | display: flex;
45 | flex-wrap: wrap;
46 | justify-content: space-around;
47 | }
48 |
49 | .score-box {
50 | width:18%;
51 | text-align: center;
52 | }
53 |
54 | #platypus-logo{
55 | width: 15%;
56 | height: auto;
57 | }
58 |
59 | .box-legend{
60 | width:90%;
61 | margin-top: 2%;
62 | margin-right: auto;
63 | margin-left: auto;
64 | }
65 |
66 | .impacts_box{
67 | display: flex;
68 | justify-content: space-between;
69 | }
70 |
71 | .impact_box{
72 | width: 48%;
73 | background-color: #364049;
74 | border-radius: 6px;
75 | text-align: center;
76 | padding-bottom: 3%;
77 | padding-top: 2%;
78 |
79 | }
80 |
81 | .txt_impact{
82 | font-size: 3em;
83 | }
84 |
85 | .txt_unit{
86 | font-size: 1.5em;
87 | }
88 |
89 |
90 | #forkongithub a
91 | {
92 | background:#000;color:#fff;text-decoration:none;font-family:arial,sans-serif;text-align:center;font-weight:bold;padding:5px 40px;font-size:1rem;line-height:2rem;position:relative;transition:0.5s;}
93 |
94 | #forkongithub a:hover
95 | {
96 | background:#c11;color:#fff;
97 | }
98 |
99 | #forkongithub a::before, #forkongithub a::after{
100 | content:"";
101 | width:100%;
102 | display:block;
103 | position:absolute;
104 | top:1px;
105 | left:0;
106 | height:1px;
107 | background:#fff;
108 | }
109 | #forkongithub a::after{
110 | bottom:1px;
111 | top:auto;
112 | }
113 |
114 | @media screen and (min-width:800px){
115 | #forkongithub{
116 | position:absolute;
117 | display:block;
118 | top:0;
119 | right:0;
120 | width:200px;
121 | overflow:hidden;
122 | height:200px;
123 | z-index:9999;
124 | }
125 | #forkongithub a{
126 | width:200px;
127 | position:absolute;
128 | top:60px;
129 | right:-60px;
130 | transform:rotate(45deg);
131 | -webkit-transform:rotate(45deg);
132 | -ms-transform:rotate(45deg);
133 | -moz-transform:rotate(45deg);
134 | -o-transform:rotate(45deg);
135 | box-shadow:4px 4px 10px rgba(0,0,0,0.8);
136 | }
137 | }
138 |
--------------------------------------------------------------------------------
/boagent/public/assets/platypus_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Boavizta/boagent/37d403c3aa478a5f0473cc51f79fab7841adbff6/boagent/public/assets/platypus_logo.png
--------------------------------------------------------------------------------
/boagent/public/assets/synchronizer.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Synchronize zooming and/or selections between a set of dygraphs.
3 | *
4 | * Usage:
5 | *
6 | * var g1 = new Dygraph(...),
7 | * g2 = new Dygraph(...),
8 | * ...;
9 | * var sync = Dygraph.synchronize(g1, g2, ...);
10 | * // charts are now synchronized
11 | * sync.detach();
12 | * // charts are no longer synchronized
13 | *
14 | * You can set options using the last parameter, for example:
15 | *
16 | * var sync = Dygraph.synchronize(g1, g2, g3, {
17 | * selection: true,
18 | * zoom: true
19 | * });
20 | *
21 | * The default is to synchronize both of these.
22 | *
23 | * Instead of passing one Dygraph object as each parameter, you may also pass an
24 | * array of dygraphs:
25 | *
26 | * var sync = Dygraph.synchronize([g1, g2, g3], {
27 | * selection: false,
28 | * zoom: true
29 | * });
30 | *
31 | * You may also set `range: false` if you wish to only sync the x-axis.
32 | * The `range` option has no effect unless `zoom` is true (the default).
33 | */
34 | (function() {
35 | /* global Dygraph:false */
36 | 'use strict';
37 |
38 | var Dygraph;
39 | if (window.Dygraph) {
40 | Dygraph = window.Dygraph;
41 | } else if (typeof(module) !== 'undefined') {
42 | Dygraph = require('../dygraph');
43 | }
44 |
45 | var synchronize = function(/* dygraphs..., opts */) {
46 | if (arguments.length === 0) {
47 | throw 'Invalid invocation of Dygraph.synchronize(). Need >= 1 argument.';
48 | }
49 |
50 | var OPTIONS = ['selection', 'zoom', 'range'];
51 | var opts = {
52 | selection: true,
53 | zoom: true,
54 | range: true
55 | };
56 | var dygraphs = [];
57 | var prevCallbacks = [];
58 |
59 | var parseOpts = function(obj) {
60 | if (!(obj instanceof Object)) {
61 | throw 'Last argument must be either Dygraph or Object.';
62 | } else {
63 | for (var i = 0; i < OPTIONS.length; i++) {
64 | var optName = OPTIONS[i];
65 | if (obj.hasOwnProperty(optName)) opts[optName] = obj[optName];
66 | }
67 | }
68 | };
69 |
70 | if (arguments[0] instanceof Dygraph) {
71 | // Arguments are Dygraph objects.
72 | for (var i = 0; i < arguments.length; i++) {
73 | if (arguments[i] instanceof Dygraph) {
74 | dygraphs.push(arguments[i]);
75 | } else {
76 | break;
77 | }
78 | }
79 | if (i < arguments.length - 1) {
80 | throw 'Invalid invocation of Dygraph.synchronize(). ' +
81 | 'All but the last argument must be Dygraph objects.';
82 | } else if (i == arguments.length - 1) {
83 | parseOpts(arguments[arguments.length - 1]);
84 | }
85 | } else if (arguments[0].length) {
86 | // Invoked w/ list of dygraphs, options
87 | for (var i = 0; i < arguments[0].length; i++) {
88 | dygraphs.push(arguments[0][i]);
89 | }
90 | if (arguments.length == 2) {
91 | parseOpts(arguments[1]);
92 | } else if (arguments.length > 2) {
93 | throw 'Invalid invocation of Dygraph.synchronize(). ' +
94 | 'Expected two arguments: array and optional options argument.';
95 | } // otherwise arguments.length == 1, which is fine.
96 | } else {
97 | throw 'Invalid invocation of Dygraph.synchronize(). ' +
98 | 'First parameter must be either Dygraph or list of Dygraphs.';
99 | }
100 |
101 | if (dygraphs.length < 2) {
102 | throw 'Invalid invocation of Dygraph.synchronize(). ' +
103 | 'Need two or more dygraphs to synchronize.';
104 | }
105 |
106 | var readycount = dygraphs.length;
107 | for (var i = 0; i < dygraphs.length; i++) {
108 | var g = dygraphs[i];
109 | g.ready( function() {
110 | if (--readycount == 0) {
111 | // store original callbacks
112 | var callBackTypes = ['drawCallback', 'highlightCallback', 'unhighlightCallback'];
113 | for (var j = 0; j < dygraphs.length; j++) {
114 | if (!prevCallbacks[j]) {
115 | prevCallbacks[j] = {};
116 | }
117 | for (var k = callBackTypes.length - 1; k >= 0; k--) {
118 | prevCallbacks[j][callBackTypes[k]] = dygraphs[j].getFunctionOption(callBackTypes[k]);
119 | }
120 | }
121 |
122 | // Listen for draw, highlight, unhighlight callbacks.
123 | if (opts.zoom) {
124 | attachZoomHandlers(dygraphs, opts, prevCallbacks);
125 | }
126 |
127 | if (opts.selection) {
128 | attachSelectionHandlers(dygraphs, prevCallbacks);
129 | }
130 | }
131 | });
132 | }
133 |
134 | return {
135 | detach: function() {
136 | for (var i = 0; i < dygraphs.length; i++) {
137 | var g = dygraphs[i];
138 | if (opts.zoom) {
139 | g.updateOptions({drawCallback: prevCallbacks[i].drawCallback});
140 | }
141 | if (opts.selection) {
142 | g.updateOptions({
143 | highlightCallback: prevCallbacks[i].highlightCallback,
144 | unhighlightCallback: prevCallbacks[i].unhighlightCallback
145 | });
146 | }
147 | }
148 | // release references & make subsequent calls throw.
149 | dygraphs = null;
150 | opts = null;
151 | prevCallbacks = null;
152 | }
153 | };
154 | };
155 |
156 | function arraysAreEqual(a, b) {
157 | if (!Array.isArray(a) || !Array.isArray(b)) return false;
158 | var i = a.length;
159 | if (i !== b.length) return false;
160 | while (i--) {
161 | if (a[i] !== b[i]) return false;
162 | }
163 | return true;
164 | }
165 |
166 | function attachZoomHandlers(gs, syncOpts, prevCallbacks) {
167 | var block = false;
168 | for (var i = 0; i < gs.length; i++) {
169 | var g = gs[i];
170 | g.updateOptions({
171 | drawCallback: function(me, initial) {
172 | if (block || initial) return;
173 | block = true;
174 | var opts = {
175 | dateWindow: me.xAxisRange()
176 | };
177 | if (syncOpts.range) opts.valueRange = me.yAxisRange();
178 |
179 | for (var j = 0; j < gs.length; j++) {
180 | if (gs[j] == me) {
181 | if (prevCallbacks[j] && prevCallbacks[j].drawCallback) {
182 | prevCallbacks[j].drawCallback.apply(this, arguments);
183 | }
184 | continue;
185 | }
186 |
187 | // Only redraw if there are new options
188 | if (arraysAreEqual(opts.dateWindow, gs[j].getOption('dateWindow')) &&
189 | arraysAreEqual(opts.valueRange, gs[j].getOption('valueRange'))) {
190 | continue;
191 | }
192 |
193 | gs[j].updateOptions(opts);
194 | }
195 | block = false;
196 | }
197 | }, true /* no need to redraw */);
198 | }
199 | }
200 |
201 | function attachSelectionHandlers(gs, prevCallbacks) {
202 | var block = false;
203 | for (var i = 0; i < gs.length; i++) {
204 | var g = gs[i];
205 |
206 | g.updateOptions({
207 | highlightCallback: function(event, x, points, row, seriesName) {
208 | if (block) return;
209 | block = true;
210 | var me = this;
211 | for (var i = 0; i < gs.length; i++) {
212 | if (me == gs[i]) {
213 | if (prevCallbacks[i] && prevCallbacks[i].highlightCallback) {
214 | prevCallbacks[i].highlightCallback.apply(this, arguments);
215 | }
216 | continue;
217 | }
218 | var idx = gs[i].getRowForX(x);
219 | if (idx !== null) {
220 | gs[i].setSelection(idx, seriesName);
221 | }
222 | }
223 | block = false;
224 | },
225 | unhighlightCallback: function(event) {
226 | if (block) return;
227 | block = true;
228 | var me = this;
229 | for (var i = 0; i < gs.length; i++) {
230 | if (me == gs[i]) {
231 | if (prevCallbacks[i] && prevCallbacks[i].unhighlightCallback) {
232 | prevCallbacks[i].unhighlightCallback.apply(this, arguments);
233 | }
234 | continue;
235 | }
236 | gs[i].clearSelection();
237 | }
238 | block = false;
239 | }
240 | }, true /* no need to redraw */);
241 | }
242 | }
243 |
244 | Dygraph.synchronize = synchronize;
245 |
246 | })();
247 |
--------------------------------------------------------------------------------
/boagent/public/assets/table-style.css:
--------------------------------------------------------------------------------
1 | table {
2 | margin-top: 2%;
3 | margin-left: auto;
4 | margin-right: auto;
5 | border-collapse: collapse;
6 | background: none;
7 | border-radius: 6px;
8 | overflow: hidden;
9 | width: 90%;
10 | position: relative;
11 | }
12 | table * {
13 | position: relative;
14 | }
15 | table td, table th {
16 | padding-left: 8px;
17 | }
18 | table thead tr {
19 | height: 60px;
20 | background:#364049;
21 | font-size: 16px;
22 | }
23 | table tbody tr {
24 | height: 48px;
25 | border-bottom: 1px solid #E3F1D5;
26 | }
27 | table tbody tr:last-child {
28 | border: 0;
29 | }
30 | table td, table th {
31 | text-align: left;
32 | }
33 | table td.l, table th.l {
34 | text-align: right;
35 | }
36 | table td.c, table th.c {
37 | text-align: center;
38 | }
39 | table td.r, table th.r {
40 | text-align: center;
41 | }
42 |
43 | @media screen and (max-width: 35.5em) {
44 | table {
45 | display: block;
46 | }
47 | table > *, table tr, table td, table th {
48 | display: block;
49 | }
50 | table thead {
51 | display: none;
52 | }
53 | table tbody tr {
54 | height: auto;
55 | padding: 8px 0;
56 | }
57 | table tbody tr td {
58 | padding-left: 45%;
59 | margin-bottom: 12px;
60 | }
61 | table tbody tr td:last-child {
62 | margin-bottom: 0;
63 | }
64 | table tbody tr td:before {
65 | position: absolute;
66 | font-weight: 700;
67 | width: 40%;
68 | left: 10px;
69 | top: 0;
70 | }
71 | table tbody tr td:nth-child(1):before {
72 | content: "Code";
73 | }
74 | table tbody tr td:nth-child(2):before {
75 | content: "Stock";
76 | }
77 | table tbody tr td:nth-child(3):before {
78 | content: "Cap";
79 | }
80 | table tbody tr td:nth-child(4):before {
81 | content: "Inch";
82 | }
83 | table tbody tr td:nth-child(5):before {
84 | content: "Box Type";
85 | }
86 | }
87 | body {
88 | background: #9BC86A;
89 | font: 400 14px 'Calibri','Arial';
90 | padding: 20px;
91 | }
92 |
93 | blockquote {
94 | color: white;
95 | text-align: center;
96 | }
97 |
--------------------------------------------------------------------------------
/boagent/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | Fork me on GitHub
14 |
15 |
PLATYPUS : report, understand and tackle IT impacts