├── tracarbon
├── py.typed
├── hardwares
│ ├── data
│ │ └── __init__.py
│ ├── __init__.py
│ ├── cloud_providers.py
│ ├── gpu.py
│ ├── hardware.py
│ ├── energy.py
│ ├── containers.py
│ ├── rapl.py
│ └── sensors.py
├── locations
│ ├── data
│ │ ├── __init__.py
│ │ ├── grid-emissions-factors-aws.csv
│ │ └── co2-emission-intensity-9.exhibit.json
│ ├── __init__.py
│ ├── location.py
│ └── country.py
├── __main__.py
├── emissions
│ ├── __init__.py
│ └── carbon_emissions.py
├── exporters
│ ├── __init__.py
│ ├── stdout.py
│ ├── datadog_exporter.py
│ ├── prometheus_exporter.py
│ ├── json_exporter.py
│ └── exporter.py
├── __init__.py
├── exceptions.py
├── conf.py
├── builder.py
├── cli
│ └── __init__.py
└── general_metrics.py
├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── feature_request.md
│ └── bug_report.md
├── pull_request_template.md
├── dependabot.yml
└── workflows
│ ├── codeql-analysis.yml
│ ├── build.yml
│ └── release.yml
├── helm
└── tracarbon
│ ├── templates
│ ├── NOTES.txt
│ ├── serviceaccount.yaml
│ ├── service.yaml
│ ├── _helpers.tpl
│ ├── rbac.yaml
│ └── daemonset.yaml
│ ├── Chart.yaml
│ ├── .helmignore
│ └── values.yaml
├── tests
├── hardwares
│ ├── data
│ │ ├── intel-rapl
│ │ │ ├── intel-raplT0
│ │ │ │ ├── name
│ │ │ │ ├── energy_uj
│ │ │ │ ├── intel-raplT0T0
│ │ │ │ │ ├── name
│ │ │ │ │ ├── energy_uj
│ │ │ │ │ └── max_energy_range_uj
│ │ │ │ ├── intel-raplT0T1
│ │ │ │ │ ├── name
│ │ │ │ │ ├── energy_uj
│ │ │ │ │ └── max_energy_range_uj
│ │ │ │ └── max_energy_range_uj
│ │ │ └── intel-raplT1
│ │ │ │ ├── name
│ │ │ │ ├── energy_uj
│ │ │ │ ├── intel-raplT1T0
│ │ │ │ ├── name
│ │ │ │ ├── energy_uj
│ │ │ │ └── max_energy_range_uj
│ │ │ │ ├── intel-raplT1T1
│ │ │ │ ├── name
│ │ │ │ ├── energy_uj
│ │ │ │ └── max_energy_range_uj
│ │ │ │ └── max_energy_range_uj
│ │ └── intel-rapl2
│ │ │ └── intel-raplT0
│ │ │ ├── energy_uj
│ │ │ ├── name
│ │ │ ├── intel-raplT0T0
│ │ │ ├── name
│ │ │ ├── energy_uj
│ │ │ └── max_energy_range_uj
│ │ │ └── max_energy_range_uj
│ ├── test_gpu.py
│ ├── test_energy.py
│ ├── test_hardware.py
│ ├── test_containers.py
│ ├── test_sensors.py
│ └── test_rapl.py
├── conftest.py
├── test_builder.py
├── exporters
│ ├── test_prometheus_exporter.py
│ ├── test_json_exporter.py
│ └── test_exporter.py
├── locations
│ ├── test_country.py
│ └── test_location.py
├── cli
│ └── test_cli.py
├── carbon_emissions
│ └── test_carbon_emissions.py
└── test_general_metrics.py
├── stubs
├── aiocache.pyi
├── kubernetes
│ ├── __init__.pyi
│ └── client.pyi
├── datadog.pyi
└── psutil.pyi
├── logo.png
├── docs
├── source
│ ├── _static
│ │ └── .gitignore
│ ├── installation.rst
│ ├── index.rst
│ ├── development.rst
│ ├── api_reference.rst
│ ├── _ext
│ │ └── edit_on_github.py
│ ├── conf.py
│ └── usage.rst
├── Makefile
└── make.bat
├── .env
├── .dockerignore
├── .pre-commit-config.yaml
├── Dockerfile
├── Makefile
├── scripts
└── check_data.py
├── .gitignore
├── pyproject.toml
├── CODE_OF_CONDUCT.md
├── README.md
└── LICENSE.txt
/tracarbon/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @fvaleye
2 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/NOTES.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/data/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tracarbon/locations/data/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/name:
--------------------------------------------------------------------------------
1 | package-0
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/name:
--------------------------------------------------------------------------------
1 | package-1
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/energy_uj:
--------------------------------------------------------------------------------
1 | 20232
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/energy_uj:
--------------------------------------------------------------------------------
1 | 0.002
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/name:
--------------------------------------------------------------------------------
1 | package-0
--------------------------------------------------------------------------------
/stubs/aiocache.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | cached: Any
4 |
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/energy_uj:
--------------------------------------------------------------------------------
1 | 24346753748
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T0/name:
--------------------------------------------------------------------------------
1 | core
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T1/name:
--------------------------------------------------------------------------------
1 | dram
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T0/name:
--------------------------------------------------------------------------------
1 | core
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T1/name:
--------------------------------------------------------------------------------
1 | dram
--------------------------------------------------------------------------------
/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fvaleye/tracarbon/HEAD/logo.png
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T0/energy_uj:
--------------------------------------------------------------------------------
1 | 3
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T1/energy_uj:
--------------------------------------------------------------------------------
1 | 2433
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/intel-raplT0T0/name:
--------------------------------------------------------------------------------
1 | core
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 70000
--------------------------------------------------------------------------------
/stubs/kubernetes/__init__.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | config: Any
4 |
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/intel-raplT0T0/energy_uj:
--------------------------------------------------------------------------------
1 | 0.001
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T0/energy_uj:
--------------------------------------------------------------------------------
1 | 43725162336
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T1/energy_uj:
--------------------------------------------------------------------------------
1 | 2592370025
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl2/intel-raplT0/intel-raplT0T0/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 70000
--------------------------------------------------------------------------------
/stubs/datadog.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | ThreadStats: Any
4 | initialize: Any
5 |
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T0/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT0/intel-raplT0T1/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T0/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/tests/hardwares/data/intel-rapl/intel-raplT1/intel-raplT1T1/max_energy_range_uj:
--------------------------------------------------------------------------------
1 | 65532610987
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | build.yml
2 | release.ymlblank_issues_enabled: false
3 |
--------------------------------------------------------------------------------
/stubs/kubernetes/client.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | CustomObjectsApi: Any
4 | CoreV1Api: Any
5 |
--------------------------------------------------------------------------------
/tracarbon/__main__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.cli import main
2 |
3 | if __name__ == "__main__":
4 | main()
5 |
--------------------------------------------------------------------------------
/stubs/psutil.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | cpu_percent: Any
4 | virtual_memory: Any
5 | cpu_count: Any
6 |
--------------------------------------------------------------------------------
/docs/source/_static/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore everything in this directory
2 | *
3 | # Except this file
4 | !.gitignore
5 |
--------------------------------------------------------------------------------
/tracarbon/locations/__init__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.locations.country import *
2 | from tracarbon.locations.location import *
3 |
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | AIOCACHE_DISABLE=1
2 | TRACARBON_INTERVAL_IN_SECONDS=1
3 | TRACARBON_METRIC_PREFIX_NAME=test
4 | TRACARBON_LOG_LEVEL=INFO
5 | DATADOG_API_KEY=DATADOG_API_KEY
6 | DATADOG_APP_KEY=DATADOG_APP_KEY
7 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/serviceaccount.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ServiceAccount
3 | metadata:
4 | name: {{ template "tracarbon.name" . }}
5 | labels:
6 | {{- include "labels.common" . | nindent 4 }}
7 |
--------------------------------------------------------------------------------
/tracarbon/emissions/__init__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.emissions.carbon_emissions import CarbonEmission
2 | from tracarbon.emissions.carbon_emissions import CarbonUsage
3 | from tracarbon.emissions.carbon_emissions import CarbonUsageUnit
4 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/__init__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.hardwares.containers import *
2 | from tracarbon.hardwares.energy import *
3 | from tracarbon.hardwares.hardware import *
4 | from tracarbon.hardwares.rapl import *
5 | from tracarbon.hardwares.sensors import *
6 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project or its docs
4 | title: ''
5 | labels: 'enhancement'
6 | ---
7 |
8 | # Description
9 |
10 | **Use Case**
11 |
12 | **Related Issue(s)**
13 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | .venv
3 | .vscode
4 | __pycache__
5 | *.pyc
6 | .DS_Store
7 | .env
8 | # Docker
9 | .dockerignore
10 | Dockerfile
11 | *.log
12 | # Uv
13 | uv.lock
14 | # Tests
15 | .pytest_cache
16 | .coverage
17 | htmlcov
18 | .tox
19 | # MyPy
20 | .mypy_cache
21 |
--------------------------------------------------------------------------------
/tracarbon/exporters/__init__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.exporters.datadog_exporter import *
2 | from tracarbon.exporters.exporter import *
3 | from tracarbon.exporters.json_exporter import *
4 | from tracarbon.exporters.prometheus_exporter import *
5 | from tracarbon.exporters.stdout import *
6 |
--------------------------------------------------------------------------------
/tracarbon/__init__.py:
--------------------------------------------------------------------------------
1 | from tracarbon.builder import *
2 | from tracarbon.conf import *
3 | from tracarbon.emissions import *
4 | from tracarbon.exceptions import *
5 | from tracarbon.general_metrics import *
6 | from tracarbon.hardwares.sensors import *
7 | from tracarbon.locations import *
8 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | # Description
2 | The description of the main changes of your pull request
3 |
4 | # Related Issue(s)
5 |
10 |
11 | # Documentation
12 |
13 |
16 |
--------------------------------------------------------------------------------
/docs/source/installation.rst:
--------------------------------------------------------------------------------
1 | ************
2 | Installation
3 | ************
4 |
5 | Using Pip
6 | =========
7 | .. code-block:: bash
8 |
9 | # Install Tracarbon
10 | pip install 'tracarbon'
11 |
12 | # Install one or more exporters from the list
13 | pip install 'tracarbon[datadog,prometheus,kubernetes]'
14 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # Please see the documentation for all configuration options:
2 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
3 |
4 | version: 2
5 | updates:
6 | - package-ecosystem: "pip"
7 | directory: "/"
8 | schedule:
9 | interval: "weekly"
10 |
--------------------------------------------------------------------------------
/helm/tracarbon/Chart.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v2
2 | name: tracarbon
3 | description: A Helm chart for Tracarbon
4 | maintainers:
5 | - name: fvaleye
6 | url: https://fvaleye.github.io/
7 | icon: https://github.com/fvaleye/tracarbon/blob/main/logo.png
8 | home: https://github.com/fvaleye/tracarbon
9 | type: application
10 | version: 0.2.0
11 | appVersion: "1.16.0"
12 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | Tracarbon documentation
2 | =========================================================
3 | This is the documentation of Tracarbon.
4 |
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 |
9 | installation
10 | usage
11 | api_reference
12 | development
13 |
14 |
15 | Indices and tables
16 | ==================
17 |
18 | * :ref:`genindex`
19 | * :ref:`modindex`
20 | * :ref:`search`
21 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Problems and issues
4 | title: ''
5 | labels: 'bug'
6 | ---
7 | # Environment
8 |
9 | **Tracarbon version**:
10 |
11 |
12 | **Environment**:
13 | - **Source**:
14 | - **OS**:
15 | - **Other**:
16 |
17 | ***
18 | # Bug
19 |
20 | **What happened**:
21 |
22 | **What you expected to happen**:
23 |
24 | **How to reproduce it**:
25 |
26 | **More details**:
27 |
--------------------------------------------------------------------------------
/docs/source/development.rst:
--------------------------------------------------------------------------------
1 | ***********
2 | Development
3 | ***********
4 |
5 | With Docker
6 | ===========
7 |
8 | .. code-block:: bash
9 |
10 | # Inside the root folder
11 | docker build -t tracarbon ./
12 |
13 | # Build with Docker
14 | docker run -it tracarbon bash
15 |
16 | With uv
17 | ===========
18 |
19 | .. code-block:: bash
20 |
21 | # Setup Python
22 | make init
23 |
24 | # List everything
25 | make help
26 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_stages: [pre-commit]
2 | repos:
3 | - repo: https://github.com/pre-commit/pre-commit-hooks
4 | rev: v5.0.0
5 | hooks:
6 | - id: end-of-file-fixer
7 | exclude: tests/hardwares/data/
8 | - id: trailing-whitespace
9 | - repo: https://github.com/astral-sh/ruff-pre-commit
10 | rev: v0.3.2
11 | hooks:
12 | - id: ruff
13 | args: [--fix, --exit-non-zero-on-fix]
14 | - id: ruff-format
15 |
--------------------------------------------------------------------------------
/helm/tracarbon/.helmignore:
--------------------------------------------------------------------------------
1 | # Patterns to ignore when building packages.
2 | # This supports shell glob matching, relative path matching, and
3 | # negation (prefixed with !). Only one pattern per line.
4 | .DS_Store
5 | # Common VCS dirs
6 | .git/
7 | .gitignore
8 | .bzr/
9 | .bzrignore
10 | .hg/
11 | .hgignore
12 | .svn/
13 | # Common backup files
14 | *.swp
15 | *.bak
16 | *.tmp
17 | *.orig
18 | *~
19 | # Various IDEs
20 | .project
21 | .idea/
22 | *.tmproj
23 | .vscode/
24 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim-bookworm
2 |
3 | # Install uv
4 | RUN pip install uv
5 |
6 | COPY . /app
7 | WORKDIR /app
8 |
9 | # Install dependencies.
10 | # The --system flag installs packages into the system’s Python environment.
11 | # The -e flag installs the project in "editable" mode.
12 | # '.[all]' will install datadog, prometheus and kubernetes optional dependencies.
13 | RUN uv pip install -e '.[all]' --system
14 |
15 | # Run tracarbon
16 | ENTRYPOINT ["tracarbon", "run"]
17 |
--------------------------------------------------------------------------------
/helm/tracarbon/values.yaml:
--------------------------------------------------------------------------------
1 | image:
2 | name: florianvaleye/tracarbon
3 | tag: latest
4 |
5 | port: 8081
6 |
7 | resources:
8 | limits:
9 | memory: 150Mi
10 | requests:
11 | cpu: 150m
12 | memory: 100Mi
13 |
14 | tracarbon:
15 | args:
16 | - --exporter-name=Prometheus
17 | - --containers
18 | co2signal_api_key: ""
19 | rapl_available: true
20 | interval_in_seconds: 60
21 | log_level: "INFO"
22 | metric_prefix_name: "tracarbon"
23 |
24 | userID: 0
25 | groupID: 0
26 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/service.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: {{ template "tracarbon.name" . }}
5 | labels:
6 | app.kubernetes.io/name: {{ template "tracarbon.name" . }}
7 | annotations:
8 | prometheus.io/port: "{{ .Values.port }}"
9 | prometheus.io/scheme: "http"
10 | prometheus.io/scrape: "true"
11 | spec:
12 | ports:
13 | - port: {{ .Values.port }}
14 | name: metrics
15 | targetPort: metrics
16 | selector:
17 | app.kubernetes.io/name: {{ template "tracarbon.name" . }}
18 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/tests/hardwares/test_gpu.py:
--------------------------------------------------------------------------------
1 | import shutil
2 |
3 | import pytest
4 |
5 | from tracarbon.exceptions import TracarbonException
6 | from tracarbon.hardwares.gpu import NvidiaGPU
7 |
8 |
9 | def test_get_nvidia_gpu_power_usage(mocker):
10 | gpu_power_usage_returned = "226 W"
11 | gpu_usage_expected = 226
12 | mocker.patch.object(shutil, "which", return_value=True)
13 | mocker.patch.object(NvidiaGPU, "launch_shell_command", return_value=[gpu_power_usage_returned, 0])
14 |
15 | gpu_usage = NvidiaGPU.get_gpu_power_usage()
16 |
17 | assert gpu_usage == gpu_usage_expected
18 |
19 |
20 | def test_get_nvidia_gpu_power_usage_should_throw_error():
21 | with pytest.raises(TracarbonException) as exception:
22 | NvidiaGPU.get_gpu_power_usage()
23 | assert exception.value.args[0] == "Nvidia GPU with nvidia-smi not found in PATH."
24 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/tracarbon/exceptions.py:
--------------------------------------------------------------------------------
1 | class TracarbonException(Exception):
2 | """General Tracarbon Exception."""
3 |
4 | pass
5 |
6 |
7 | class CountryIsMissing(TracarbonException):
8 | """The country is missing."""
9 |
10 | pass
11 |
12 |
13 | class CloudProviderRegionIsMissing(TracarbonException):
14 | """The region of the cloud provider is missing."""
15 |
16 | pass
17 |
18 |
19 | class AWSSensorException(TracarbonException):
20 | """Error in the AWS Sensor Error."""
21 |
22 | pass
23 |
24 |
25 | class HardwareRAPLException(TracarbonException):
26 | """The hardware is not compatible with RAPL."""
27 |
28 | pass
29 |
30 |
31 | class HardwareNoGPUDetectedException(TracarbonException):
32 | """The hardware does not have a GPU."""
33 |
34 | pass
35 |
36 |
37 | class CO2SignalAPIKeyIsMissing(TracarbonException):
38 | """The C02 Signal API key is missing."""
39 |
40 | pass
41 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import pytest
4 | from _pytest.logging import LogCaptureFixture
5 | from loguru import logger
6 |
7 |
8 | def test_some_interaction(monkeypatch):
9 | monkeypatch.setattr("os.getcwd", lambda: "/")
10 |
11 |
12 | ALL = set("darwin linux windows".split())
13 |
14 |
15 | @pytest.fixture(autouse=True)
16 | def no_requests(monkeypatch):
17 | """Remove requests.sessions.Session.request for all tests."""
18 | monkeypatch.delattr("requests.sessions.Session.request")
19 |
20 |
21 | @pytest.fixture
22 | def caplog(caplog: LogCaptureFixture):
23 | handler_id = logger.add(caplog.handler, format="{message}")
24 | yield caplog
25 | logger.remove(handler_id)
26 |
27 |
28 | def pytest_runtest_setup(item):
29 | supported_platforms = ALL.intersection(mark.name for mark in item.iter_markers())
30 | plat = sys.platform
31 | if supported_platforms and plat not in supported_platforms:
32 | pytest.skip("cannot run on platform {}".format(plat))
33 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/_helpers.tpl:
--------------------------------------------------------------------------------
1 | {{/*
2 | Expand the name of the chart.
3 | */}}
4 | {{- define "tracarbon.name" -}}
5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
6 | {{- end }}
7 |
8 | {{/*
9 | Create chart name and version as used by the chart label.
10 | */}}
11 | {{- define "tracarbon.chart" -}}
12 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
13 | {{- end }}
14 |
15 | {{/*
16 | Common labels
17 | */}}
18 | {{- define "labels.common" -}}
19 | app.kubernetes.io/name: {{ include "tracarbon.name" . | quote }}
20 | app.kubernetes.io/managed-by: {{ .Release.Service }}
21 | helm.sh/chart: {{ include "tracarbon.chart" . }}
22 | {{- end -}}
23 |
24 | {{/*
25 | Create the name of the service account to use
26 | */}}
27 | {{- define "tracarbon.serviceAccountName" -}}
28 | {{- if .Values.serviceAccount.create }}
29 | {{- default (include "tracarbon.fullname" .) .Values.serviceAccount.name }}
30 | {{- else }}
31 | {{- default "default" .Values.serviceAccount.name }}
32 | {{- end }}
33 | {{- end }}
34 |
--------------------------------------------------------------------------------
/tracarbon/locations/data/grid-emissions-factors-aws.csv:
--------------------------------------------------------------------------------
1 | Region,Country,NERC Region,CO2e (metric ton/kWh),Source
2 | us-east-1,United States,SERC,0.000415755,EPA
3 | us-east-2,United States,RFC,0.000440187,EPA
4 | us-west-1,United States,WECC,0.000350861,EPA
5 | us-west-2,United States,WECC,0.000350861,EPA
6 | us-gov-east-1,United States,SERC,0.000415755,EPA
7 | us-gov-west-1,United States,WECC,0.000350861,EPA
8 | af-south-1,South Africa,,0.000928,carbonfootprint.com
9 | ap-east-1,Hong Kong,,0.00081,carbonfootprint.com
10 | ap-south-1,India,,0.000708,carbonfootprint.com
11 | ap-northeast-3,Japan,,0.000506,carbonfootprint.com
12 | ap-northeast-2,South Korea,,0.0005,carbonfootprint.com
13 | ap-southeast-1,Singapore,,0.0004085,EMA Singapore
14 | ap-southeast-2,Australia,,0.00079,carbonfootprint.com
15 | ap-northeast-1,Japan,,0.000506,carbonfootprint.com
16 | ca-central-1,Canada,,0.00013,carbonfootprint.com
17 | cn-north-1,China,,0.000555,carbonfootprint.com
18 | cn-northwest-1,China,,0.000555,carbonfootprint.com
19 | eu-central-1,Germany,,0.000338,EEA
20 | eu-west-1,Ireland,,0.000316,EEA
21 | eu-west-2,England,,0.000228,EEA
22 | eu-south-1,Italy,,0.000233,EEA
23 | eu-west-3,France,,0.000052,EEA
24 | eu-north-1,Sweden,,0.000008,EEA
25 | me-south-1,Bahrain,,0.000732,carbonfootprint.com
26 | sa-east-1,Brazil,,0.000074,carbonfootprint.com
27 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .DEFAULT_GOAL := help
2 |
3 | .PHONY: init
4 | init: ## Init the requirements
5 | $(info --- 🖥 Init dependencies ---)
6 | @uv venv
7 | @uv pip install -e '.[all,dev]'
8 |
9 | .PHONY: format
10 | format: ## Format the code
11 | $(info --- 🐍 Check Python format ---)
12 | pre-commit run -a
13 |
14 | .PHONY: security
15 | security: ## Run security checks
16 | $(info --- 🐍 Security Python ---)
17 | @uv run bandit -c pyproject.toml -r .
18 |
19 | .PHONY: complexity
20 | complexity: ## Run complexity checks
21 | $(info --- 🐍 Complexity Python ---)
22 | @uv run radon cc -a tracarbon/
23 |
24 | .PHONY: test-unit
25 | test-unit: ## Run unit test
26 | $(info --- 🐍 Run Python test-unit ---)
27 | @uv run python -m pytest
28 |
29 | .PHONY: check-data
30 | check-data: ## Check data of Tracarbon
31 | $(info --- 📍 Checking data ---)
32 | @uv run python scripts/check_data.py
33 | @echo "👍"
34 |
35 | .PHONY: build-documentation
36 | build-documentation: ## Build documentation with Sphinx
37 | $(info --- 📚 Run build of the Sphinx documentation ---)
38 | @uv run sphinx-build -Wn -b html -d ./docs/build/doctrees ./docs/source ./docs/build/html
39 |
40 | .PHONY: help
41 | help: ## List the rules
42 | grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
43 |
--------------------------------------------------------------------------------
/scripts/check_data.py:
--------------------------------------------------------------------------------
1 | import urllib.request
2 | from urllib.parse import urlparse
3 |
4 |
5 | def is_valid_url(url: str) -> bool:
6 | parsed_url = urlparse(url)
7 | return parsed_url.scheme in ["http", "https"]
8 |
9 |
10 | def check_content_length(url: str, expected_content_length: str) -> bool:
11 | if not is_valid_url(url):
12 | raise ValueError(f"Invalid or unsafe URL scheme for URL: {url}")
13 |
14 | site = urllib.request.urlopen(url) # noqa: S310
15 | if site.getheader("Content-Length") != expected_content_length:
16 | raise ValueError(f"This url content changed {url}")
17 | return True
18 |
19 |
20 | if __name__ == "__main__":
21 | urls = [
22 | {
23 | "url": "https://www.eea.europa.eu/data-and-maps/daviz/co2-emission-intensity-9/download.exhibit",
24 | "content_length": "106078",
25 | },
26 | {
27 | "url": "https://raw.githubusercontent.com/cloud-carbon-footprint/ccf-coefficients/main/data/aws-instances.csv",
28 | "content_length": "160141",
29 | },
30 | {
31 | "url": "https://raw.githubusercontent.com/cloud-carbon-footprint/ccf-coefficients/main/data/grid-emissions-factors-aws.csv",
32 | "content_length": "1204",
33 | },
34 | ]
35 | for url in urls:
36 | check_content_length(url=url["url"], expected_content_length=url["content_length"])
37 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 | schedule:
9 | - cron: '45 10 * * 1'
10 |
11 | jobs:
12 | analyze:
13 | name: Analyze
14 | runs-on: ubuntu-latest
15 | permissions:
16 | actions: read
17 | contents: read
18 | security-events: write
19 |
20 | strategy:
21 | fail-fast: false
22 | matrix:
23 | language: [ 'python' ]
24 |
25 | steps:
26 | - name: Checkout repository
27 | uses: actions/checkout@v3
28 |
29 | # Initializes the CodeQL tools for scanning.
30 | - name: Initialize CodeQL
31 | uses: github/codeql-action/init@v2
32 | with:
33 | languages: ${{ matrix.language }}
34 | # If you wish to specify custom queries, you can do so here or in a config file.
35 | # By default, queries listed here will override any specified in a config file.
36 | # Prefix the list here with "+" to use these queries and those in the config file.
37 |
38 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
39 | # queries: security-extended,security-and-quality
40 |
41 | - name: Perform CodeQL Analysis
42 | uses: github/codeql-action/analyze@v2
43 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: build
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | branches: [main]
8 |
9 | jobs:
10 | format:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v3
14 |
15 | - name: Setup Python
16 | uses: actions/setup-python@v5
17 | with:
18 | python-version: 3.9
19 |
20 | - name: Install uv
21 | run: pip install uv
22 |
23 | - name: Check Python
24 | run: |
25 | make init
26 | pip install pre-commit
27 | pre-commit install
28 | make format
29 |
30 | - name: Check Data
31 | run: make check-data
32 |
33 | unit_test:
34 | strategy:
35 | fail-fast: false
36 | matrix:
37 | os:
38 | - macos-13
39 | - macos-14
40 | - macos-15
41 | - windows-2022
42 | - ubuntu-latest
43 | runs-on: ${{ matrix.os }}
44 | steps:
45 | - uses: actions/checkout@v3
46 |
47 | - name: Setup Python
48 | uses: actions/setup-python@v4
49 | with:
50 | python-version: 3.9
51 |
52 | - name: Install uv
53 | run: pip install uv
54 |
55 | - name: Run unit tests
56 | run: |
57 | make init
58 | make test-unit
59 |
60 | - name: Run security and complexity
61 | run: |
62 | make security
63 | make complexity
64 |
65 | - name: Build Sphinx documentation
66 | run: |
67 | make build-documentation
68 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/cloud_providers.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | import requests
4 | from ec2_metadata import EC2Metadata
5 | from ec2_metadata import ec2_metadata
6 | from pydantic import BaseModel
7 |
8 |
9 | class CloudProviders(BaseModel):
10 | """The Cloud Provider interface."""
11 |
12 | instance_type: str
13 | region_name: str
14 |
15 | @staticmethod
16 | def is_running_on_cloud_provider() -> bool:
17 | """
18 | Check if it's running on a known cloud provider.
19 |
20 | :return: if it's running on a known cloud provider
21 | """
22 | return AWS.is_ec2()
23 |
24 | @staticmethod
25 | def auto_detect() -> Optional["CloudProviders"]:
26 | """
27 | Autodetect the cloud provider.
28 |
29 | :return: the cloud provider detected
30 | """
31 | if CloudProviders.is_running_on_cloud_provider():
32 | return AWS(
33 | region_name=ec2_metadata.region,
34 | instance_type=ec2_metadata.instance_type,
35 | )
36 | return None
37 |
38 |
39 | class AWS(CloudProviders):
40 | """The Cloud Provider: AWS."""
41 |
42 | @staticmethod
43 | def is_ec2() -> bool:
44 | """
45 | Check if it's running on an AWS EC2 instance based on metadata.
46 |
47 | :return: is a EC2
48 | """
49 | try:
50 | ec2_metadata = EC2Metadata()
51 | requests.head(ec2_metadata.service_url, timeout=1)
52 | except Exception:
53 | return False
54 | return True
55 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/rbac.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: rbac.authorization.k8s.io/v1
2 | kind: ClusterRoleBinding
3 | metadata:
4 | name: {{ template "tracarbon.name" . }}
5 | labels:
6 | {{- include "labels.common" . | nindent 4 }}
7 | subjects:
8 | - kind: ServiceAccount
9 | name: {{ template "tracarbon.name" . }}
10 | namespace: {{ .Release.Namespace }}
11 | roleRef:
12 | kind: ClusterRole
13 | name: {{ template "tracarbon.name" . }}
14 | apiGroup: rbac.authorization.k8s.io
15 | ---
16 | apiVersion: rbac.authorization.k8s.io/v1
17 | kind: ClusterRole
18 | metadata:
19 | name: {{ template "tracarbon.name" . }}
20 | labels:
21 | {{- include "labels.common" . | nindent 4 }}
22 | rbac.authorization.k8s.io/aggregate-to-admin: "true"
23 | rbac.authorization.k8s.io/aggregate-to-edit: "true"
24 | rbac.authorization.k8s.io/aggregate-to-view: "true"
25 | rules:
26 | - apiGroups:
27 | - metrics.k8s.io
28 | resources:
29 | - pods
30 | - nodes
31 | verbs:
32 | - get
33 | - list
34 | - watch
35 | - apiGroups:
36 | - ""
37 | resources:
38 | - nodes/metrics
39 | verbs:
40 | - get
41 | - apiGroups:
42 | - ""
43 | resources:
44 | - pods
45 | - nodes
46 | - namespaces
47 | - configmaps
48 | verbs:
49 | - get
50 | - list
51 | - watch
52 | - apiGroups:
53 | - extensions
54 | resources:
55 | - podsecuritypolicies
56 | resourceNames:
57 | - {{ .Chart.Name }}
58 | verbs:
59 | - "use"
60 | - apiGroups:
61 | - ""
62 | resources:
63 | - pods
64 | verbs:
65 | - list
66 | - watch
67 |
--------------------------------------------------------------------------------
/tracarbon/exporters/stdout.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 |
3 | from tracarbon.exporters.exporter import Exporter
4 | from tracarbon.exporters.exporter import MetricGenerator
5 |
6 |
7 | class StdoutExporter(Exporter):
8 | """
9 | Print the metrics to Stdout.
10 | """
11 |
12 | async def launch(self, metric_generator: MetricGenerator) -> None:
13 | """
14 | Launch the Stdout exporter with the metrics.
15 |
16 | :param metric_generator: the metric generator
17 | """
18 | try:
19 | async for metric in metric_generator.generate():
20 | try:
21 | metric_value = await metric.value()
22 | logger.debug(f"Generated metric '{metric.name}' with value: {metric_value}")
23 |
24 | if metric_value is not None:
25 | await self.add_metric_to_report(metric=metric, value=metric_value)
26 | logger.info(
27 | f"Metric name[{metric.format_name(metric_prefix_name=self.metric_prefix_name)}], value[{metric_value}], tags{metric.format_tags()}"
28 | )
29 | else:
30 | logger.debug(f"Skipping metric '{metric.name}' with None value")
31 | except Exception as e:
32 | logger.error(f"Error processing metric '{metric.name}': {e}")
33 | except Exception as e:
34 | logger.error(f"Error in StdoutExporter.launch: {e}")
35 |
36 | @classmethod
37 | def get_name(cls) -> str:
38 | """
39 | Get the name of the exporter.
40 |
41 | :return: the Exporter's name
42 | """
43 | return "Stdout"
44 |
--------------------------------------------------------------------------------
/docs/source/api_reference.rst:
--------------------------------------------------------------------------------
1 | *************
2 | API Reference
3 | *************
4 |
5 | CLI
6 | ===
7 |
8 | .. automodule:: tracarbon.cli
9 | :members:
10 |
11 | Configuration
12 | =============
13 |
14 | .. automodule:: tracarbon.conf
15 | :members:
16 |
17 | Builder
18 | =======
19 |
20 | .. automodule:: tracarbon.builder
21 | :members:
22 |
23 | Hardware
24 | ========
25 |
26 | .. automodule:: tracarbon.hardwares.hardware
27 | :members:
28 |
29 | .. automodule:: tracarbon.hardwares.containers
30 | :members:
31 |
32 | .. automodule:: tracarbon.hardwares.energy
33 | :members:
34 |
35 | .. automodule:: tracarbon.hardwares.gpu
36 | :members:
37 |
38 | .. automodule:: tracarbon.hardwares.rapl
39 | :members:
40 |
41 | .. automodule:: tracarbon.hardwares.sensors
42 | :members:
43 |
44 | Exporters
45 | ==========
46 |
47 | .. automodule:: tracarbon.exporters.exporter
48 | :members:
49 |
50 | .. automodule:: tracarbon.exporters.stdout
51 | :members:
52 |
53 | .. automodule:: tracarbon.exporters.datadog_exporter
54 | :members:
55 |
56 | .. automodule:: tracarbon.exporters.json_exporter
57 | :members:
58 |
59 | .. automodule:: tracarbon.exporters.prometheus_exporter
60 | :members:
61 |
62 | Emissions
63 | =========
64 |
65 | .. automodule:: tracarbon.emissions.carbon_emissions
66 | :members:
67 |
68 | Locations
69 | =========
70 |
71 | .. automodule:: tracarbon.locations.location
72 | :members:
73 |
74 | .. automodule:: tracarbon.locations.country
75 | :members:
76 |
77 | Exceptions
78 | ==========
79 |
80 | .. automodule:: tracarbon.exceptions
81 | :members:
82 |
83 | General Metrics
84 | ===============
85 |
86 | .. automodule:: tracarbon.general_metrics
87 | :members:
88 |
--------------------------------------------------------------------------------
/docs/source/_ext/edit_on_github.py:
--------------------------------------------------------------------------------
1 | """
2 | Sphinx extension to add ReadTheDocs-style "Edit on GitHub" links to the
3 | sidebar.
4 |
5 | Loosely based on https://github.com/astropy/astropy/pull/347
6 | """
7 |
8 | import os
9 | import warnings
10 |
11 | __licence__ = "BSD (3 clause)"
12 |
13 |
14 | def get_github_url(app, view, path):
15 | return "https://github.com/{project}/{view}/{branch}/{path}".format(
16 | project=app.config.edit_on_github_project,
17 | view=view,
18 | branch=app.config.edit_on_github_branch,
19 | path=path,
20 | )
21 |
22 |
23 | def html_page_context(app, pagename, templatename, context, doctree):
24 | if templatename != "page.html":
25 | return
26 |
27 | if not app.config.edit_on_github_project:
28 | warnings.warn("edit_on_github_project not specified", stacklevel=1)
29 | return
30 |
31 | path = os.path.relpath(doctree.get("source"), app.builder.srcdir)
32 | show_url = get_github_url(app, "blob", path)
33 | edit_url = get_github_url(app, "edit", path)
34 |
35 | context["show_on_github_url"] = show_url
36 | context["edit_on_github_url"] = edit_url
37 |
38 | # For sphinx_rtd_theme.
39 | context["display_github"] = True
40 | context["github_user"] = app.config.edit_on_github_project.split("/")[0]
41 | context["github_repo"] = app.config.edit_on_github_project.split("/")[1]
42 | context["github_version"] = f"{app.config.edit_on_github_branch}/{app.config.page_source_prefix}/"
43 |
44 |
45 | def setup(app):
46 | app.add_config_value("edit_on_github_project", "", True)
47 | app.add_config_value("edit_on_github_branch", "main", True)
48 | app.add_config_value("page_source_prefix", "docs/source", True)
49 | app.connect("html-page-context", html_page_context)
50 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/gpu.py:
--------------------------------------------------------------------------------
1 | import shutil
2 | import subprocess
3 | from abc import ABC
4 | from typing import Tuple
5 |
6 | from pydantic import BaseModel
7 |
8 | from tracarbon.exceptions import HardwareNoGPUDetectedException
9 |
10 |
11 | class GPUInfo(ABC, BaseModel):
12 | """
13 | GPU information.
14 | """
15 |
16 | @classmethod
17 | def get_gpu_power_usage(cls) -> float:
18 | """
19 | Get the GPU power usage in watts.
20 |
21 | :return: the gpu power usage in W
22 | """
23 | return NvidiaGPU.get_gpu_power_usage()
24 |
25 |
26 | class NvidiaGPU(GPUInfo):
27 | """
28 | Nvidia GPU information.
29 | """
30 |
31 | @classmethod
32 | def launch_shell_command(cls) -> Tuple[bytes, int]:
33 | """
34 | Launch a shell command.
35 |
36 | :return: result of the shell command and returncode
37 | """
38 | nvidia_smi_path = shutil.which("nvidia-smi")
39 | if nvidia_smi_path is None:
40 | raise HardwareNoGPUDetectedException("Nvidia GPU with nvidia-smi not found in PATH.")
41 |
42 | process = subprocess.Popen(
43 | [nvidia_smi_path, "--query-gpu=power.draw", "--format=csv,noheader"],
44 | stdout=subprocess.PIPE,
45 | )
46 | stdout, _ = process.communicate()
47 | return stdout, process.returncode
48 |
49 | @classmethod
50 | def get_gpu_power_usage(cls) -> float:
51 | """
52 | Get the GPU power usage in watts.
53 |
54 | :return: the gpu power usage in W
55 | """
56 | gpu_utilization, return_code = cls.launch_shell_command()
57 | if return_code == 0:
58 | return float(gpu_utilization.split()[0])
59 | raise HardwareNoGPUDetectedException(f"No Nvidia GPU detected.")
60 |
--------------------------------------------------------------------------------
/tests/test_builder.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from tracarbon.builder import TracarbonBuilder
4 | from tracarbon.builder import TracarbonConfiguration
5 | from tracarbon.builder import TracarbonReport
6 | from tracarbon.exporters import StdoutExporter
7 | from tracarbon.general_metrics import CarbonEmissionGenerator
8 | from tracarbon.locations import Country
9 |
10 |
11 | @pytest.mark.darwin
12 | def test_builder_without_configuration(mocker):
13 | location = "fr"
14 | mocker.patch.object(Country, "get_current_country", return_value=location)
15 | builder = TracarbonBuilder()
16 | expected_exporter = StdoutExporter(
17 | metric_generators=[CarbonEmissionGenerator(location=Country(name=location, co2g_kwh=51.1))]
18 | )
19 |
20 | tracarbon = builder.build()
21 |
22 | assert tracarbon.configuration == TracarbonConfiguration()
23 | assert type(tracarbon.exporter) == type(expected_exporter)
24 | assert type(tracarbon.exporter.metric_generators[0]) == type(expected_exporter.metric_generators[0])
25 | assert tracarbon.location == Country(name=location, co2g_kwh=51.1)
26 |
27 |
28 | @pytest.mark.darwin
29 | def test_builder_with_configuration():
30 | configuration = TracarbonConfiguration(co2signal_api_key="API_KEY")
31 | expected_location = Country(name="fr", co2g_kwh=51.1)
32 | expected_exporter = StdoutExporter(metric_generators=[CarbonEmissionGenerator(location=expected_location)])
33 | builder = TracarbonBuilder(configuration=configuration)
34 |
35 | tracarbon = builder.with_exporter(exporter=expected_exporter).with_location(location=expected_location).build()
36 |
37 | assert tracarbon.configuration == configuration
38 | assert tracarbon.location == expected_location
39 | assert tracarbon.exporter == expected_exporter
40 | assert tracarbon.report is not None
41 |
--------------------------------------------------------------------------------
/tracarbon/locations/location.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from abc import abstractmethod
3 | from enum import Enum
4 | from typing import Any
5 | from typing import Dict
6 | from typing import Optional
7 |
8 | import aiohttp
9 | import ujson
10 | from aiocache import cached
11 | from loguru import logger
12 | from pydantic import BaseModel
13 |
14 |
15 | class CarbonIntensitySource(Enum):
16 | FILE: str = "file"
17 | CO2SignalAPI: str = "CO2SignalAPI"
18 |
19 |
20 | class Location(ABC, BaseModel):
21 | """
22 | Generic Location.
23 | """
24 |
25 | name: str
26 | co2g_kwh_source: CarbonIntensitySource = CarbonIntensitySource.FILE
27 | co2signal_api_key: Optional[str] = None
28 | co2signal_url: Optional[str] = None
29 | co2g_kwh: float = 0.0
30 |
31 | @classmethod
32 | async def request(cls, url: str, headers: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
33 | """
34 | Launch an async request.
35 |
36 | :param url: url to request
37 | :param headers: headers to add to the request
38 | :return: the response
39 | """
40 |
41 | async with aiohttp.ClientSession() as session:
42 | async with session.get(url, headers=headers) as response:
43 | try:
44 | logger.info(f"Sending request to the url: {url}.")
45 | text = await response.text()
46 | return ujson.loads(text)
47 | except Exception as exception:
48 | logger.exception(f"Failed to request this url: {url}")
49 | raise exception
50 |
51 | @abstractmethod
52 | @cached() # type: ignore
53 | async def get_latest_co2g_kwh(self) -> float:
54 | """
55 | Get the latest co2g_kwh for France.
56 |
57 | :return: the latest co2g_kwh
58 | """
59 | pass
60 |
--------------------------------------------------------------------------------
/tests/exporters/test_prometheus_exporter.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import psutil
4 |
5 | from tracarbon import Country
6 | from tracarbon import MetricGenerator
7 | from tracarbon.exporters import Metric
8 | from tracarbon.exporters import PrometheusExporter
9 | from tracarbon.exporters import Tag
10 |
11 |
12 | def test_prometheus_exporter(mocker):
13 | interval_in_seconds = 1
14 | memory_value = 70
15 | mock_memory_value = ["0", "0", memory_value]
16 | mocker.patch.object(psutil, "virtual_memory", return_value=mock_memory_value)
17 | expected_metric_1 = "gauge:tracarbon_test_metric_1"
18 |
19 | async def get_memory_usage() -> float:
20 | return psutil.virtual_memory()[2]
21 |
22 | mocker.patch.object(
23 | Country,
24 | "get_location",
25 | return_value=Country(name="fr", co2g_kwh=50.0),
26 | )
27 | memory_metric = Metric(
28 | name="test_metric_1",
29 | value=get_memory_usage,
30 | tags=[Tag(key="test", value="tags")],
31 | )
32 | metric_generators = [MetricGenerator(metrics=[memory_metric])]
33 | exporter = PrometheusExporter(quit=True, metric_generators=metric_generators, metric_prefix_name="tracarbon")
34 | exporter.start(interval_in_seconds=interval_in_seconds)
35 | exporter.stop()
36 |
37 | assert str(exporter.prometheus_metrics["tracarbon_test_metric_1"]) == expected_metric_1
38 | assert exporter.metric_report["test_metric_1"].exporter_name == PrometheusExporter.get_name()
39 | assert exporter.metric_report["test_metric_1"].metric == memory_metric
40 | assert exporter.metric_report["test_metric_1"].total > 0
41 | assert exporter.metric_report["test_metric_1"].average > 0
42 | assert exporter.metric_report["test_metric_1"].minimum < sys.float_info.max
43 | assert exporter.metric_report["test_metric_1"].maximum > 0
44 | assert exporter.metric_report["test_metric_1"].call_count == 1
45 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/hardware.py:
--------------------------------------------------------------------------------
1 | import platform
2 | from typing import Optional
3 |
4 | import psutil
5 | from pydantic import BaseModel
6 |
7 | from tracarbon.hardwares.gpu import GPUInfo
8 |
9 |
10 | class HardwareInfo(BaseModel):
11 | """
12 | Hardware information.
13 | """
14 |
15 | @staticmethod
16 | def get_platform() -> str:
17 | """
18 | Get the platform name.
19 |
20 | :return: the name of the platform
21 | """
22 | return platform.system()
23 |
24 | @staticmethod
25 | def get_number_of_cores(logical: bool = True) -> int:
26 | """
27 | Get the number of CPU's cores.
28 |
29 | :param: logical: core as logical included
30 | :return: the number of CPU's cores
31 | """
32 | return psutil.cpu_count(logical=logical)
33 |
34 | @staticmethod
35 | def get_cpu_usage(interval: Optional[float] = None) -> float:
36 | """
37 | Get the CPU load percentage usage.
38 |
39 | :param interval: the minimal interval to wait between two consecutive measures
40 | :return: the CPU load in %
41 | """
42 | return psutil.cpu_percent(interval=interval)
43 |
44 | @staticmethod
45 | def get_memory_usage() -> float:
46 | """
47 | Get the local memory usage.
48 |
49 | :return: the memory used in percentage
50 | """
51 | return psutil.virtual_memory().used
52 |
53 | @staticmethod
54 | def get_memory_total() -> float:
55 | """
56 | Get the total physical memory available.
57 |
58 | :return: the total physical memory available
59 | """
60 | return psutil.virtual_memory().total
61 |
62 | @classmethod
63 | def get_gpu_power_usage(cls) -> float:
64 | """
65 | Get the GPU power usage in watts.
66 |
67 | :return: the gpu power usage in W
68 | """
69 | return GPUInfo.get_gpu_power_usage()
70 |
--------------------------------------------------------------------------------
/tracarbon/locations/data/co2-emission-intensity-9.exhibit.json:
--------------------------------------------------------------------------------
1 | {
2 | "countries": [
3 | {
4 | "co2g_kwh": 479.2,
5 | "name": "gr"
6 | },
7 | {
8 | "co2g_kwh": 410.4,
9 | "name": "bg"
10 | },
11 | {
12 | "co2g_kwh": 133.8,
13 | "name": "hr"
14 | },
15 | {
16 | "co2g_kwh": 198.4,
17 | "name": "pt"
18 | },
19 | {
20 | "co2g_kwh": 51.1,
21 | "name": "fr"
22 | },
23 | {
24 | "co2g_kwh": 328.4,
25 | "name": "nl"
26 | },
27 | {
28 | "co2g_kwh": 774.9,
29 | "name": "ee"
30 | },
31 | {
32 | "co2g_kwh": 311.0,
33 | "name": "de"
34 | },
35 | {
36 | "co2g_kwh": 45.4,
37 | "name": "lt"
38 | },
39 | {
40 | "co2g_kwh": 709.8,
41 | "name": "pl"
42 | },
43 | {
44 | "co2g_kwh": 8.8,
45 | "name": "se"
46 | },
47 | {
48 | "co2g_kwh": 213.4,
49 | "name": "it"
50 | },
51 | {
52 | "co2g_kwh": 278.6,
53 | "name": "ie"
54 | },
55 | {
56 | "co2g_kwh": 436.6,
57 | "name": "cz"
58 | },
59 | {
60 | "co2g_kwh": 225.0,
61 | "name": "gb"
62 | },
63 | {
64 | "co2g_kwh": 161.0,
65 | "name": "be"
66 | },
67 | {
68 | "co2g_kwh": 82.4,
69 | "name": "at"
70 | },
71 | {
72 | "co2g_kwh": 106.5,
73 | "name": "lv"
74 | },
75 | {
76 | "co2g_kwh": 379.0,
77 | "name": "mt"
78 | },
79 | {
80 | "co2g_kwh": 216.4,
81 | "name": "hu"
82 | },
83 | {
84 | "co2g_kwh": 68.6,
85 | "name": "fi"
86 | },
87 | {
88 | "co2g_kwh": 101.7,
89 | "name": "sk"
90 | },
91 | {
92 | "co2g_kwh": 109.0,
93 | "name": "dk"
94 | },
95 | {
96 | "co2g_kwh": 217.8,
97 | "name": "si"
98 | },
99 | {
100 | "co2g_kwh": 156.4,
101 | "name": "es"
102 | },
103 | {
104 | "co2g_kwh": 58.5,
105 | "name": "lu"
106 | },
107 | {
108 | "co2g_kwh": 299.5,
109 | "name": "ro"
110 | },
111 | {
112 | "co2g_kwh": 620.9,
113 | "name": "cy"
114 | }
115 | ]
116 | }
117 |
--------------------------------------------------------------------------------
/tracarbon/exporters/datadog_exporter.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Any
3 | from typing import Optional
4 |
5 | from loguru import logger
6 |
7 | from tracarbon.conf import DATADOG_INSTALLED
8 | from tracarbon.exporters.exporter import Exporter
9 | from tracarbon.exporters.exporter import MetricGenerator
10 |
11 | if DATADOG_INSTALLED:
12 | from datadog import ThreadStats
13 | from datadog import initialize
14 |
15 | class DatadogExporter(Exporter):
16 | """
17 | Datadog exporter for the metrics.
18 | """
19 |
20 | api_key: Optional[str] = None
21 | app_key: Optional[str] = None
22 | stats: Optional[ThreadStats] = None
23 | disable_buffering: bool = False
24 | datadog_flush_interval: int = 10
25 |
26 | def __init__(self, **data: Any) -> None:
27 | """
28 | Initialize the Datadog Exporter.
29 |
30 | :return:
31 | """
32 | super().__init__(**data)
33 | initialize(
34 | flush_interval=self.datadog_flush_interval,
35 | api_key=self.api_key if self.api_key else os.environ["DATADOG_API_KEY"],
36 | app_key=self.app_key if self.app_key else os.environ["DATADOG_APP_KEY"],
37 | disable_buffering=self.disable_buffering,
38 | )
39 | self.stats = ThreadStats()
40 | self.stats.start()
41 |
42 | async def launch(self, metric_generator: MetricGenerator) -> None:
43 | """
44 | Launch the Datadog exporter with the metrics.
45 |
46 | :param metric_generator: the metric generators
47 | :return:
48 | """
49 | async for metric in metric_generator.generate():
50 | metric_value = await metric.value()
51 | if metric_value:
52 | await self.add_metric_to_report(metric=metric, value=metric_value)
53 | metric_name = metric.format_name(metric_prefix_name=self.metric_prefix_name)
54 | logger.info(
55 | f"Sending metric[{metric_name}] with value [{metric_value}] and tags{metric.format_tags()} to Datadog."
56 | )
57 | self.stats.gauge(metric_name, metric_value, tags=metric.format_tags())
58 |
59 | @classmethod
60 | def get_name(cls) -> str:
61 | """
62 | Get the name of the exporter.
63 |
64 | :return: the Exporter's name
65 | """
66 | return "Datadog"
67 |
--------------------------------------------------------------------------------
/tracarbon/conf.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from typing import Any
4 | from typing import Optional
5 |
6 | from dotenv import load_dotenv
7 | from pydantic import BaseModel
8 |
9 |
10 | def check_optional_dependency(name: str) -> bool:
11 | import importlib.util
12 |
13 | from loguru import logger
14 |
15 | try:
16 | importlib.import_module(name)
17 | except ImportError:
18 | logger.debug(f"{name} optional dependency is not installed.")
19 | return False
20 | return True
21 |
22 |
23 | KUBERNETES_INSTALLED = check_optional_dependency(name="kubernetes")
24 | DATADOG_INSTALLED = check_optional_dependency(name="datadog")
25 | PROMETHEUS_INSTALLED = check_optional_dependency(name="prometheus_client")
26 |
27 |
28 | def logger_configuration(level: str) -> None:
29 | """
30 | Configure the logger format.
31 | """
32 | from loguru import logger
33 |
34 | log_format = "{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} {message}"
35 | logger_config = {
36 | "handlers": [
37 | {"sink": sys.stderr, "format": log_format, "level": level},
38 | ],
39 | }
40 | logger.configure(**logger_config) # type: ignore
41 |
42 |
43 | class TracarbonConfiguration(BaseModel):
44 | """
45 | The Configuration of Tracarbon.
46 | """
47 |
48 | metric_prefix_name: str
49 | log_level: str
50 | interval_in_seconds: int
51 | co2signal_api_key: str
52 | co2signal_url: str
53 |
54 | def __init__(
55 | self,
56 | metric_prefix_name: str = "tracarbon",
57 | interval_in_seconds: int = 60,
58 | log_level: str = "INFO",
59 | co2signal_api_key: str = "",
60 | co2signal_url: str = "https://api.co2signal.com/v1/latest?countryCode=",
61 | env_file_path: Optional[str] = None,
62 | **data: Any,
63 | ) -> None:
64 | load_dotenv(env_file_path)
65 | log_level = os.environ.get("TRACARBON_LOG_LEVEL", log_level)
66 | logger_configuration(level=log_level)
67 | super().__init__(
68 | metric_prefix_name=os.environ.get("TRACARBON_METRIC_PREFIX_NAME", metric_prefix_name),
69 | log_level=log_level,
70 | interval_in_seconds=os.environ.get("TRACARBON_INTERVAL_IN_SECONDS", interval_in_seconds),
71 | co2signal_api_key=os.environ.get("TRACARBON_CO2SIGNAL_API_KEY", co2signal_api_key),
72 | co2signal_url=os.environ.get("TRACARBON_CO2SIGNAL_URL", co2signal_url),
73 | **data,
74 | )
75 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | .idea/
132 |
133 | # Ignore test environment variables
134 | !.env
135 |
--------------------------------------------------------------------------------
/tests/hardwares/test_energy.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | from tracarbon import EnergyUsage
4 | from tracarbon import EnergyUsageUnit
5 | from tracarbon import UsageType
6 | from tracarbon.hardwares import Power
7 |
8 |
9 | def test_power_should_convert_watt_hours_to_co2g():
10 | co2g_per_kwh = 20.3
11 | watts_hour = 10.1
12 | co2g_expected = 0.20503
13 |
14 | co2g = Power.co2g_from_watts_hour(watts_hour=watts_hour, co2g_per_kwh=co2g_per_kwh)
15 |
16 | assert co2g == co2g_expected
17 |
18 |
19 | def test_energy_should_convert_watt_hours_to_co2g():
20 | watts = 45
21 | watt_hours_expected = 0.75
22 | one_minute_ago = datetime.datetime.now() - datetime.timedelta(seconds=60)
23 | previous_energy_measurement_time = one_minute_ago
24 |
25 | watt_hours = Power.watts_to_watt_hours(
26 | watts=watts, previous_energy_measurement_time=previous_energy_measurement_time
27 | )
28 |
29 | assert round(watt_hours, 3) == watt_hours_expected
30 |
31 |
32 | def test_energy_should_convert_watts_from_microjoules():
33 | uj = 4304343000
34 | watts_expected = 4304.343
35 |
36 | watts = Power.watts_from_microjoules(
37 | uj=uj,
38 | )
39 |
40 | assert round(watts, 3) == watts_expected
41 |
42 |
43 | def test_energy_usage_with_type_and_conversion():
44 | host_energy_usage = 2.4
45 | cpu_energy_usage = 0.4
46 | memory_energy_usage = 2
47 | gpu_energy_usage = 1
48 |
49 | energy_usage = EnergyUsage(
50 | host_energy_usage=host_energy_usage,
51 | cpu_energy_usage=cpu_energy_usage,
52 | memory_energy_usage=memory_energy_usage,
53 | gpu_energy_usage=gpu_energy_usage,
54 | )
55 |
56 | assert energy_usage.get_energy_usage_on_type(UsageType.HOST) == host_energy_usage
57 | assert energy_usage.get_energy_usage_on_type(UsageType.CPU) == cpu_energy_usage
58 | assert energy_usage.get_energy_usage_on_type(UsageType.MEMORY) == memory_energy_usage
59 | assert energy_usage.get_energy_usage_on_type(UsageType.GPU) == gpu_energy_usage
60 | assert energy_usage.unit == EnergyUsageUnit.WATT
61 |
62 | energy_usage.convert_unit(EnergyUsageUnit.MILLIWATT)
63 |
64 | assert energy_usage.get_energy_usage_on_type(UsageType.HOST) == host_energy_usage * 1000
65 | assert energy_usage.get_energy_usage_on_type(UsageType.CPU) == cpu_energy_usage * 1000
66 | assert energy_usage.get_energy_usage_on_type(UsageType.MEMORY) == memory_energy_usage * 1000
67 | assert energy_usage.get_energy_usage_on_type(UsageType.GPU) == gpu_energy_usage * 1000
68 | assert energy_usage.unit == EnergyUsageUnit.MILLIWATT
69 |
--------------------------------------------------------------------------------
/helm/tracarbon/templates/daemonset.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: DaemonSet
3 | metadata:
4 | name: {{ template "tracarbon.name" . }}
5 | labels:
6 | {{- include "labels.common" . | nindent 4 }}
7 | spec:
8 | updateStrategy:
9 | type: RollingUpdate
10 | selector:
11 | matchLabels:
12 | app.kubernetes.io/name: {{ template "tracarbon.name" . }}
13 | template:
14 | metadata:
15 | name: {{ template "tracarbon.name" . }}
16 | labels:
17 | {{- include "labels.common" . | nindent 8 }}
18 | spec:
19 | containers:
20 | - name: {{ template "tracarbon.name" . }}
21 | image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
22 | args:
23 | {{ range .Values.tracarbon.args }}
24 | - {{ . }}
25 | {{ end }}
26 | env:
27 | {{- if .Values.tracarbon.co2signal_api_key }}
28 | - name: TRACARBON_CO2SIGNAL_API_KEY
29 | value: '{{ .Values.tracarbon.co2signal_api_key }}'
30 | {{- end }}
31 | {{- if .Values.tracarbon.co2signal_url }}
32 | - name: TRACARBON_CO2SIGNAL_URL
33 | value: '{{ .Values.tracarbon.co2signal_url }}'
34 | {{- end }}
35 | - name: TRACARBON_INTERVAL_IN_SECONDS
36 | value: '{{ .Values.tracarbon.interval_in_seconds }}'
37 | {{- if .Values.tracarbon.log_level }}
38 | - name: TRACARBON_LOG_LEVEL
39 | value: '{{ .Values.tracarbon.log_level }}'
40 | {{- end }}
41 | {{- if .Values.tracarbon.log_level }}
42 | - name: TRACARBON_METRIC_PREFIX_NAME
43 | value: '{{ .Values.tracarbon.metric_prefix_name }}'
44 | {{- end }}
45 | ports:
46 | - name: metrics
47 | containerPort: {{ .Values.port }}
48 | resources:
49 | {{ toYaml .Values.resources | indent 10 }}
50 | volumeMounts:
51 | - mountPath: /proc
52 | name: proc
53 | readOnly: false
54 | {{- if eq .Values.tracarbon.rapl_available true }}
55 | - mountPath: /sys/class/powercap
56 | name: powercap
57 | readOnly: false
58 | {{- end }}
59 | securityContext:
60 | runAsUser: {{ .Values.userID }}
61 | runAsGroup: {{ .Values.userGroup }}
62 | serviceAccountName: {{ template "tracarbon.name" . }}
63 | tolerations:
64 | - operator: "Exists"
65 | volumes:
66 | - hostPath:
67 | path: /proc
68 | type: "Directory"
69 | name: proc
70 | {{- if eq .Values.tracarbon.rapl_available true }}
71 | - hostPath:
72 | path: /sys/class/powercap
73 | type: "Directory"
74 | name: powercap
75 | {{- end }}
76 |
--------------------------------------------------------------------------------
/tests/exporters/test_json_exporter.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from datetime import datetime
3 |
4 | import psutil
5 | import ujson
6 |
7 | from tracarbon import Country
8 | from tracarbon import MetricGenerator
9 | from tracarbon.exporters import JSONExporter
10 | from tracarbon.exporters import Metric
11 | from tracarbon.exporters import Tag
12 |
13 |
14 | def test_json_exporter_should_write_well_formatted_metrics_in_json_file(mocker, tmpdir):
15 | mock = mocker.patch("tracarbon.exporters.json_exporter.datetime")
16 | fixed_timestamp = datetime(2021, 12, 21)
17 | mock.utcnow.return_value = fixed_timestamp
18 | test_json_file = tmpdir.mkdir("data").join("test.json")
19 | interval_in_seconds = 1
20 | memory_value = 70
21 | mock_memory_value = ["0", "0", memory_value]
22 | mocker.patch.object(psutil, "virtual_memory", return_value=mock_memory_value)
23 |
24 | async def get_memory_usage() -> float:
25 | return psutil.virtual_memory()[2]
26 |
27 | expected = [
28 | {
29 | "timestamp": str(fixed_timestamp),
30 | "metric_name": "test_metric_1",
31 | "metric_value": 70,
32 | "metric_tags": ["test:tags"],
33 | },
34 | {
35 | "timestamp": str(fixed_timestamp),
36 | "metric_name": "test_metric_1",
37 | "metric_value": 70,
38 | "metric_tags": ["test:tags"],
39 | },
40 | ]
41 |
42 | mocker.patch.object(
43 | Country,
44 | "get_location",
45 | return_value=Country(name="fr", co2g_kwh=50.0),
46 | )
47 | memory_metric = Metric(
48 | name="test_metric_1",
49 | value=get_memory_usage,
50 | tags=[Tag(key="test", value="tags")],
51 | )
52 |
53 | metric_generators = [MetricGenerator(metrics=[memory_metric])]
54 | exporter = JSONExporter(quit=True, metric_generators=metric_generators, path=str(test_json_file))
55 | exporter.start(interval_in_seconds=interval_in_seconds)
56 | exporter.stop()
57 |
58 | exporter.start(interval_in_seconds=interval_in_seconds)
59 | exporter.stop()
60 | exporter.flush()
61 |
62 | with open(test_json_file, "r") as file:
63 | assert ujson.load(file) == expected
64 |
65 | assert exporter.metric_report["test_metric_1"].exporter_name == JSONExporter.get_name()
66 | assert exporter.metric_report["test_metric_1"].metric == memory_metric
67 | assert exporter.metric_report["test_metric_1"].total > 0
68 | assert exporter.metric_report["test_metric_1"].average > 0
69 | assert exporter.metric_report["test_metric_1"].minimum < sys.float_info.max
70 | assert exporter.metric_report["test_metric_1"].maximum > 0
71 | assert exporter.metric_report["test_metric_1"].call_count == 1
72 |
--------------------------------------------------------------------------------
/tests/locations/test_country.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from tracarbon.locations import CarbonIntensitySource
4 | from tracarbon.locations import Country
5 |
6 |
7 | @pytest.mark.asyncio
8 | async def test_france_location_should_return_latest_known(mocker):
9 | co2_expected = 51.1
10 | response = {
11 | "countryCode": "FR",
12 | "data": {
13 | "carbonIntensity": co2_expected,
14 | "datetime": "2017-02-09T08:30:00.000Z",
15 | "fossilFuelPercentage": 12.028887656434616,
16 | },
17 | "status": "ok",
18 | "units": {"carbonIntensity": "gCO2eq/kWh"},
19 | }
20 | mocker.patch.object(Country, "request", return_value=response)
21 | co2signal_api_key = "API_KEY"
22 |
23 | country = Country(
24 | name="fr",
25 | co2signal_api_key=co2signal_api_key,
26 | co2g_kwh_source=CarbonIntensitySource.CO2SignalAPI,
27 | co2g_kwh=co2_expected,
28 | )
29 |
30 | result = await country.get_latest_co2g_kwh()
31 |
32 | assert result == co2_expected
33 | assert country.name == "fr"
34 | assert country.co2g_kwh == co2_expected
35 | assert country.co2g_kwh_source == CarbonIntensitySource.CO2SignalAPI
36 |
37 |
38 | @pytest.mark.asyncio
39 | async def test_france_location_with_recent_api_versions_should_return_latest_known(
40 | mocker,
41 | ):
42 | co2_expected = 83
43 | response = {
44 | "zone": "FR",
45 | "carbonIntensity": co2_expected,
46 | "datetime": "2023-03-20T17:00:00.000Z",
47 | "updatedAt": "2023-03-20T16:51:02.892Z",
48 | "createdAt": "2023-03-17T17:54:01.319Z",
49 | "emissionFactorType": "lifecycle",
50 | "isEstimated": True,
51 | "estimationMethod": "TIME_SLICER_AVERAGE",
52 | }
53 | mocker.patch.object(Country, "request", return_value=response)
54 | co2signal_api_key = "API_KEY"
55 |
56 | country = Country(
57 | name="fr",
58 | co2signal_api_key=co2signal_api_key,
59 | co2g_kwh_source=CarbonIntensitySource.CO2SignalAPI,
60 | co2g_kwh=co2_expected,
61 | )
62 |
63 | result = await country.get_latest_co2g_kwh()
64 |
65 | assert result == co2_expected
66 | assert country.name == "fr"
67 | assert country.co2g_kwh == co2_expected
68 | assert country.co2g_kwh_source == CarbonIntensitySource.CO2SignalAPI
69 |
70 |
71 | @pytest.mark.asyncio
72 | async def test_france_location_should_return_taux_co2():
73 | co2_expected = 51.1
74 | co2signal_api_key = ""
75 | country = Country(co2signal_api_key=co2signal_api_key, name="fr", co2g_kwh=51.1)
76 |
77 | result = await country.get_latest_co2g_kwh()
78 |
79 | assert result == co2_expected
80 | assert country.name == "fr"
81 | assert country.co2g_kwh == co2_expected
82 | assert country.co2g_kwh_source == CarbonIntensitySource.FILE
83 |
--------------------------------------------------------------------------------
/tests/hardwares/test_hardware.py:
--------------------------------------------------------------------------------
1 | import platform
2 | import shutil
3 | from collections import namedtuple
4 |
5 | import psutil
6 | import pytest
7 |
8 | from tracarbon import HardwareInfo
9 | from tracarbon.exceptions import TracarbonException
10 | from tracarbon.hardwares.gpu import NvidiaGPU
11 |
12 |
13 | def test_get_platform_should_return_the_platform():
14 | platform_expected = platform.system()
15 |
16 | platform_returned = HardwareInfo.get_platform()
17 |
18 | assert platform_returned == platform_expected
19 |
20 |
21 | def test_get_cpu_usage(mocker):
22 | cpu_usage_expected = 10.0
23 | mocker.patch.object(psutil, "cpu_percent", return_value=10.0)
24 |
25 | cpu_usage = HardwareInfo.get_cpu_usage()
26 |
27 | assert cpu_usage == cpu_usage_expected
28 |
29 |
30 | def test_get_memory_usage(mocker):
31 | memory_usage_expected = 30.0
32 | Memory = namedtuple("Memory", "used")
33 | return_value = Memory(used=memory_usage_expected)
34 | mocker.patch.object(psutil, "virtual_memory", return_value=return_value)
35 |
36 | memory_usage = HardwareInfo.get_memory_usage()
37 |
38 | assert memory_usage == memory_usage_expected
39 |
40 |
41 | def test_get_memory_total(mocker):
42 | memory_total_expected = 300000.0
43 | Memory = namedtuple("Memory", "total")
44 | return_value = Memory(total=memory_total_expected)
45 | mocker.patch.object(psutil, "virtual_memory", return_value=return_value)
46 |
47 | memory_usage = HardwareInfo.get_memory_total()
48 |
49 | assert memory_usage == memory_total_expected
50 |
51 |
52 | def test_get_cpu_count(mocker):
53 | return_value = 2
54 | mocker.patch.object(psutil, "cpu_count", return_value=return_value)
55 |
56 | cores = HardwareInfo.get_number_of_cores()
57 |
58 | assert cores == return_value
59 |
60 |
61 | def test_get_gpu_power_usage(mocker):
62 | gpu_power_usage_returned = "226 W"
63 | gpu_usage_expected = 226
64 | mocker.patch.object(NvidiaGPU, "launch_shell_command", return_value=[gpu_power_usage_returned, 0])
65 |
66 | gpu_usage = HardwareInfo.get_gpu_power_usage()
67 |
68 | assert gpu_usage == gpu_usage_expected
69 |
70 |
71 | def test_get_gpu_power_usage_with_no_0(mocker):
72 | gpu_power_usage_returned = "0 W"
73 | mocker.patch.object(shutil, "which", return_value=True)
74 | mocker.patch.object(NvidiaGPU, "launch_shell_command", return_value=[gpu_power_usage_returned, -1])
75 |
76 | with pytest.raises(TracarbonException) as exception:
77 | HardwareInfo.get_gpu_power_usage()
78 | assert exception.value.args[0] == "No Nvidia GPU detected."
79 |
80 |
81 | def test_get_gpu_power_usage_with_no_gpu():
82 | with pytest.raises(TracarbonException) as exception:
83 | HardwareInfo.get_gpu_power_usage()
84 | assert exception.value.args[0] == "Nvidia GPU with nvidia-smi not found in PATH."
85 |
--------------------------------------------------------------------------------
/tracarbon/exporters/prometheus_exporter.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Any
3 | from typing import Dict
4 | from typing import Optional
5 |
6 | from loguru import logger
7 | from pydantic import Field
8 |
9 | from tracarbon.conf import PROMETHEUS_INSTALLED
10 | from tracarbon.exporters.exporter import Exporter
11 | from tracarbon.exporters.exporter import MetricGenerator
12 |
13 | if PROMETHEUS_INSTALLED:
14 | import prometheus_client
15 | from prometheus_client import Gauge
16 | from prometheus_client import start_http_server
17 |
18 | class PrometheusExporter(Exporter):
19 | """
20 | Send the metrics to Prometheus by running an HTTP server for the metrics exposure.
21 | """
22 |
23 | prometheus_metrics: Dict[str, Gauge] = Field(default_factory=dict)
24 | address: Optional[str] = None
25 | port: Optional[int] = None
26 |
27 | def __init__(self, **data: Any) -> None:
28 | super().__init__(**data)
29 | prometheus_client.REGISTRY.unregister(prometheus_client.GC_COLLECTOR)
30 | addr = self.address if self.address else os.environ.get("PROMETHEUS_ADDRESS", "::")
31 | port = self.port if self.port else int(os.environ.get("PROMETHEUS_PORT", 8081))
32 | start_http_server(
33 | addr=addr,
34 | port=port,
35 | )
36 |
37 | async def launch(self, metric_generator: MetricGenerator) -> None:
38 | """
39 | Launch the Prometheus exporter with the metrics.
40 |
41 | :param metric_generator: the metric generator
42 | """
43 | async for metric in metric_generator.generate():
44 | metric_name = metric.format_name(metric_prefix_name=self.metric_prefix_name, separator="_")
45 | if metric_name not in self.prometheus_metrics:
46 | self.prometheus_metrics[metric_name] = Gauge(
47 | metric_name,
48 | f"Tracarbon metric {metric_name}",
49 | [tag.key for tag in metric.tags],
50 | )
51 | metric_value = await metric.value()
52 | if metric_value:
53 | await self.add_metric_to_report(metric=metric, value=metric_value)
54 | logger.info(
55 | f"Sending metric[{metric_name}] with value [{metric_value}] and labels{metric.format_tags()} to Prometheus."
56 | )
57 | self.prometheus_metrics[metric_name].labels(*[tag.value for tag in metric.tags]).set(metric_value)
58 |
59 | @classmethod
60 | def get_name(cls) -> str:
61 | """
62 | Get the name of the exporter.
63 |
64 | :return: the Exporter's name
65 | """
66 | return "Prometheus"
67 |
--------------------------------------------------------------------------------
/tests/cli/test_cli.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from kubernetes import config
3 |
4 | from tracarbon import Country
5 | from tracarbon import EnergyUsage
6 | from tracarbon import Kubernetes
7 | from tracarbon import MacEnergyConsumption
8 | from tracarbon.cli import get_exporter
9 | from tracarbon.cli import run_metrics
10 | from tracarbon.exporters import DatadogExporter
11 | from tracarbon.exporters import StdoutExporter
12 | from tracarbon.hardwares import Container
13 | from tracarbon.hardwares import Pod
14 |
15 |
16 | def test_get_exporter_by_name():
17 | stdout_exporter = get_exporter(exporter_name="Stdout", metric_generators=[])
18 | datadadog_exporter = get_exporter(exporter_name="Datadog", metric_generators=[])
19 |
20 | assert isinstance(stdout_exporter, StdoutExporter) is True
21 | assert isinstance(datadadog_exporter, DatadogExporter) is True
22 |
23 |
24 | def test_get_exporter_by_name_should_raise_error():
25 | with pytest.raises(ValueError) as exception:
26 | get_exporter(exporter_name="unknown", metric_generators=[])
27 |
28 | assert "This exporter is not available in the list:" in exception.value.args[0]
29 |
30 |
31 | @pytest.mark.darwin
32 | def test_run_metrics_should_be_ok(mocker, caplog):
33 | exporter = "Stdout"
34 | mocker.patch.object(
35 | Country,
36 | "get_location",
37 | return_value=Country(name="fr", co2g_kwh=50.0),
38 | )
39 | energy_usage = EnergyUsage(host_energy_usage=60.0)
40 | mocker.patch.object(config, "load_kube_config", return_value=None)
41 | mocker.patch.object(MacEnergyConsumption, "get_energy_usage", return_value=energy_usage)
42 |
43 | run_metrics(exporter_name=exporter, running=False)
44 |
45 | assert "Metric name[test.carbon_emission_host]" in caplog.text
46 | assert "Metric name[test.energy_consumption_host]" in caplog.text
47 | assert "units:co2g" in caplog.text
48 | assert "units:watts" in caplog.text
49 |
50 | energy_usage = EnergyUsage(cpu_energy_usage=15.0, memory_energy_usage=12.0)
51 | mocker.patch.object(MacEnergyConsumption, "get_energy_usage", return_value=energy_usage)
52 | mocker.patch.object(
53 | Kubernetes,
54 | "get_pods_usage",
55 | return_value=[
56 | Pod(
57 | name="pod_name",
58 | namespace="default",
59 | containers=[Container(name="container_name", cpu_usage="1", memory_usage=2)],
60 | )
61 | ],
62 | )
63 |
64 | run_metrics(exporter_name=exporter, running=False, containers=True)
65 |
66 | assert "Metric name[test.carbon_emission_kubernetes_total]" in caplog.text
67 | assert "Metric name[test.energy_consumption_kubernetes_total]" in caplog.text
68 | assert "units:co2mg" in caplog.text
69 | assert "units:milliwatts" in caplog.text
70 | assert "start_time" in caplog.text
71 | assert "end_time" in caplog.text
72 | assert "Report" in caplog.text
73 |
--------------------------------------------------------------------------------
/tests/locations/test_location.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from tracarbon.exceptions import CloudProviderRegionIsMissing
4 | from tracarbon.exceptions import CountryIsMissing
5 | from tracarbon.hardwares import CloudProviders
6 | from tracarbon.locations import AWSLocation
7 | from tracarbon.locations import Country
8 | from tracarbon.locations import Location
9 |
10 |
11 | @pytest.mark.asyncio
12 | async def get_current_country(mocker):
13 | location_expected = "fr"
14 |
15 | mocker.patch.object(Location, "request", location_expected)
16 | location = Location.get_current_country()
17 |
18 | assert location_expected == location
19 |
20 |
21 | def test_country_location(mocker):
22 | mocker.patch.object(
23 | CloudProviders,
24 | "is_running_on_cloud_provider",
25 | return_value=False,
26 | )
27 | location_expected = "be"
28 | co2g_kwh = 161.0
29 |
30 | mocker.patch.object(Country, "get_current_country", return_value=location_expected)
31 |
32 | country = Country.get_location()
33 |
34 | assert country.name == location_expected
35 | assert country.co2g_kwh == co2g_kwh
36 |
37 |
38 | def test_unknown_location(mocker):
39 | mocker.patch.object(
40 | CloudProviders,
41 | "is_running_on_cloud_provider",
42 | return_value=False,
43 | )
44 | location = "ze"
45 | mocker.patch.object(Country, "get_current_country", return_value=location)
46 |
47 | with pytest.raises(CountryIsMissing) as exception:
48 | Country.get_location()
49 |
50 | assert exception.value.args[0] == "The country [ze] is not in the co2 emission file."
51 |
52 |
53 | def test_world_emission_should_get_country():
54 | country_code_alpha_iso_2 = "fr"
55 | co2g_kwh_expected = 51.1
56 | country_expected = Country(
57 | name=country_code_alpha_iso_2,
58 | co2g_kwh=co2g_kwh_expected,
59 | )
60 |
61 | country = Country.from_eu_file(country_code_alpha_iso_2=country_code_alpha_iso_2)
62 |
63 | assert country == country_expected
64 |
65 |
66 | def test_world_emission_should_raise_error_when_country_is_missing():
67 | country_code_alpha_iso_2 = "zf"
68 |
69 | with pytest.raises(CountryIsMissing) as exception:
70 | Country.from_eu_file(country_code_alpha_iso_2=country_code_alpha_iso_2)
71 |
72 | assert exception.value.args[0] == f"The country [{country_code_alpha_iso_2}] is not in the co2 emission file."
73 |
74 |
75 | def test_aws_location_should_return_an_error_if_region_not_exists():
76 | region_name = "zf"
77 |
78 | with pytest.raises(CloudProviderRegionIsMissing) as exception:
79 | AWSLocation(region_name=region_name)
80 | assert exception.value.args[0] == f"The region [{region_name}] is not in the AWS grid emissions factors file."
81 |
82 |
83 | def test_aws_location_should_return_ok_if_region_exists():
84 | region_name = "eu-west-1"
85 |
86 | location = AWSLocation(region_name=region_name)
87 |
88 | assert location.name == "AWS(eu-west-1)"
89 | assert location.co2g_kwh == 316.0
90 | assert location.co2g_kwh_source.value == "file"
91 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | import toml
17 |
18 | sys.path.insert(0, os.path.abspath("../.."))
19 | sys.path.insert(0, os.path.abspath("./_ext"))
20 |
21 |
22 | def get_release_version() -> str:
23 | """
24 | Get the release version from the pyproject.toml file
25 |
26 | :return:
27 | """
28 | pyproject = toml.load("../../pyproject.toml")
29 | return pyproject["project"]["version"]
30 |
31 |
32 | # -- Project information -----------------------------------------------------
33 |
34 | project = "tracarbon"
35 | copyright = "2023 Tracarbon contributors"
36 | author = "Florian Valeye"
37 | version = get_release_version()
38 |
39 | # -- General configuration ---------------------------------------------------
40 |
41 | # Add any Sphinx extension module names here, as strings. They can be
42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
43 | # ones.
44 | extensions = ["sphinx.ext.autodoc", "edit_on_github"]
45 | autodoc_typehints = "description"
46 | nitpicky = True
47 | nitpick_ignore = [
48 | ("py:class", "datetime.datetime"),
49 | ("py:class", "datadog.threadstats.base.ThreadStats"),
50 | ("py:class", "threading.Event"),
51 | ("py:class", "prometheus_client.metrics.Gauge"),
52 | ("py:class", "kubernetes.client.api.custom_objects_api.CustomObjectsApi"),
53 | ("py:class", "ConfigDict"),
54 | ]
55 |
56 | # Add any paths that contain templates here, relative to this directory.
57 | templates_path = ["_templates"]
58 |
59 |
60 | # List of patterns, relative to source directory, that match files and
61 | # directories to ignore when looking for source files.
62 | # This pattern also affects html_static_path and html_extra_path.
63 | exclude_patterns = []
64 |
65 |
66 | # -- Options for HTML output -------------------------------------------------
67 |
68 | # The theme to use for HTML and HTML Help pages. See the documentation for
69 | # a list of builtin themes.
70 | #
71 | html_theme = "pydata_sphinx_theme"
72 | html_logo = "../../logo.png"
73 | html_favicon = "../../logo.png"
74 | html_theme_options = {
75 | "external_links": [],
76 | "github_url": "https://github.com/fvaleye/tracarbon",
77 | "icon_links": [],
78 | }
79 |
80 | # Add any paths that contain custom static files (such as style sheets) here,
81 | # relative to this directory. They are copied after the builtin static files,
82 | # so a file named "default.css" will overwrite the builtin "default.css".
83 | html_static_path = ["_static"]
84 |
85 | edit_on_github_project = "fvaleye/tracarbon"
86 | edit_on_github_branch = "main"
87 | page_source_prefix = "python/docs/source"
88 |
--------------------------------------------------------------------------------
/tests/exporters/test_exporter.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import psutil
4 | import pytest
5 |
6 | from tracarbon import Country
7 | from tracarbon import MetricGenerator
8 | from tracarbon.exporters import Metric
9 | from tracarbon.exporters import StdoutExporter
10 | from tracarbon.exporters import Tag
11 |
12 |
13 | def test_exporters_should_run_and_print_the_metrics(mocker, caplog):
14 | async def get_memory_usage() -> float:
15 | return psutil.virtual_memory()[2]
16 |
17 | interval_in_seconds = 1
18 | mocker.patch.object(
19 | Country,
20 | "get_location",
21 | return_value=Country(name="fr", co2g_kwh=50.0),
22 | )
23 | memory_metric = Metric(
24 | name="test_metric_1",
25 | value=get_memory_usage,
26 | tags=[Tag(key="test", value="tags")],
27 | )
28 | metric_generator = MetricGenerator(metrics=[memory_metric])
29 |
30 | metric_generators = [metric_generator]
31 | exporter = StdoutExporter(quit=True, metric_generators=metric_generators)
32 | exporter.start(interval_in_seconds=interval_in_seconds)
33 | exporter.stop()
34 |
35 | assert memory_metric.name in caplog.text
36 | assert str(memory_metric.value) in caplog.text
37 | assert str(memory_metric.tags) in caplog.text
38 | assert exporter.metric_report["test_metric_1"].exporter_name == StdoutExporter.get_name()
39 | assert exporter.metric_report["test_metric_1"].metric == memory_metric
40 | assert exporter.metric_report["test_metric_1"].total > 0
41 | assert exporter.metric_report["test_metric_1"].average > 0
42 | assert exporter.metric_report["test_metric_1"].minimum < sys.float_info.max
43 | assert exporter.metric_report["test_metric_1"].maximum > 0
44 | assert exporter.metric_report["test_metric_1"].call_count == 1
45 | assert exporter.metric_report["test_metric_1"].last_report_time is not None
46 | assert exporter.metric_report["test_metric_1"].average_interval_in_seconds is None
47 |
48 |
49 | def test_metric_name_and_tags_format():
50 | async def get_memory_usage() -> float:
51 | return psutil.virtual_memory()[2]
52 |
53 | metric = Metric(
54 | name="test_metric_2",
55 | value=get_memory_usage,
56 | tags=[Tag(key="test", value="tags")],
57 | )
58 | expected_name = "tracarbon_test_metric_2"
59 | expected_name_without_prefix = "test_metric_2"
60 | expected_tags = ["test:tags"]
61 |
62 | metric_name = metric.format_name(metric_prefix_name="tracarbon", separator="_")
63 | metric_name_without_prefix = metric.format_name(separator="_")
64 | metric_tags = metric.format_tags(separator=":")
65 |
66 | assert metric_name == expected_name
67 | assert expected_name_without_prefix == metric_name_without_prefix
68 | assert metric_tags == expected_tags
69 |
70 |
71 | @pytest.mark.asyncio
72 | async def test_metric_generator_generate():
73 | async def get_memory_usage() -> float:
74 | return psutil.virtual_memory()[2]
75 |
76 | metric = Metric(
77 | name="test_metric_2",
78 | value=get_memory_usage,
79 | tags=[Tag(key="test", value="tags")],
80 | )
81 | metrics = [metric]
82 |
83 | metric_generated = await MetricGenerator(metrics=metrics).generate().__anext__()
84 |
85 | assert metric_generated.name == "test_metric_2"
86 |
--------------------------------------------------------------------------------
/docs/source/usage.rst:
--------------------------------------------------------------------------------
1 | *****
2 | Usage
3 | *****
4 |
5 | Tracarbon
6 | =========
7 |
8 | 1. Set the environment variable or directly set the configuration.
9 | 2. Choose :class:`.Exporter` with your list of :class:`.Metric`.
10 | 3. Launch Tracarbon!
11 |
12 | Run the CLI
13 | ===========
14 |
15 | Run Tracarbon CLI with the default Stdout exporter and the C02 Signal API:
16 |
17 | >>> TRACARBON_CO2SIGNAL_API_KEY=API_KEY tracarbon run
18 |
19 | Run Tracarbon CLI with the default Stdout exporter without the CO2 Signal API:
20 |
21 | >>> tracarbon run
22 |
23 | Run Tracarbon CLI with the default Stdout exporter with a specified location:
24 |
25 | >>> tracarbon run --country-code-alpha-iso-2 fr
26 |
27 | Run Tracarbon CLI with the Datadog exporter:
28 |
29 | >>> TRACARBON_CO2SIGNAL_API_KEY=API_KEY DATADOG_API_KEY=DATADOG_API_KEY DATADOG_APP_KEY=DATADOG_APP_KEY tracarbon run --exporter-name Datadog
30 |
31 | Run Tracarbon CLI on LinuxHardware with Kubernetes send send the metrics to Prometheus:
32 |
33 | >>> tracarbon run --exporter-name Prometheus --containers
34 |
35 | Run the code
36 | ============
37 | >>> from tracarbon import TracarbonBuilder, TracarbonConfiguration
38 | >>>
39 | >>> configuration = TracarbonConfiguration(co2signal_api_key="API_KEY") # Your configuration
40 | >>> tracarbon = TracarbonBuilder(configuration=configuration).build()
41 | >>> tracarbon.start()
42 | >>> # Your code
43 | >>> tracarbon.stop()
44 | >>>
45 | >>> with tracarbon:
46 | >>> # Your code
47 | >>>
48 | >>> report = tracarbon.report # Get the report
49 |
50 | Run the code with general metrics
51 | =================================
52 | >>> from tracarbon import TracarbonBuilder, TracarbonConfiguration
53 | >>> from tracarbon.exporters import StdoutExporter
54 | >>> from tracarbon.general_metrics import CarbonEmissionGenerator, EnergyConsumptionGenerator
55 | >>>
56 | >>> configuration = TracarbonConfiguration(co2signal_api_key="API_KEY") # Your configuration
57 | >>> metric_generators = [EnergyConsumptionGenerator(), CarbonEmissionGenerator()]
58 | >>> exporter = StdoutExporter(metric_generators=metric_generators) # Your exporter
59 | >>> tracarbon = TracarbonBuilder(configuration=configuration).with_exporter(exporter=exporter).build()
60 | >>> tracarbon.start()
61 | >>> # Your code
62 | >>> tracarbon.stop()
63 | >>>
64 | >>> with tracarbon:
65 | >>> # Your code
66 | >>>
67 | >>> report = tracarbon.report # Get the report
68 |
69 | Run the code with a custom configuration
70 | =========================================
71 | >>> from tracarbon import TracarbonBuilder, TracarbonConfiguration
72 | >>> from tracarbon.exporters import StdoutExporter, MetricGenerator, Metric, Tag
73 | >>> from tracarbon.emissions import CarbonEmission
74 | >>>
75 | >>> configuration = TracarbonConfiguration(co2signal_api_key="API_KEY") # Your configuration
76 | >>> metric_generators = [MetricGenerator(metrics=[Metric(name="custom_metric", value=CustomClass().run, tags=[Tag(key="key", value="value")])])] # Your custom metrics
77 | >>> exporter = StdoutExporter(metric_generators=metric_generators) # Your exporter
78 | >>> tracarbon = TracarbonBuilder(configuration=configuration).with_exporter(exporter=exporter).build()
79 | >>> tracarbon.start()
80 | >>> # Your code
81 | >>> tracarbon.stop()
82 | >>>
83 | >>> with tracarbon:
84 | >>> # Your code
85 | >>>
86 | >>> report = tracarbon.report # Get the report
87 |
--------------------------------------------------------------------------------
/tests/carbon_emissions/test_carbon_emissions.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from tracarbon import CarbonEmission
4 | from tracarbon import CarbonUsage
5 | from tracarbon import CarbonUsageUnit
6 | from tracarbon import EnergyUsage
7 | from tracarbon import LinuxEnergyConsumption
8 | from tracarbon import MacEnergyConsumption
9 | from tracarbon import UsageType
10 | from tracarbon.locations import Country
11 |
12 |
13 | @pytest.mark.asyncio
14 | @pytest.mark.darwin
15 | async def test_carbon_emission_should_run_to_convert_watt_hours_to_co2g_on_mac(mocker):
16 | co2g_per_kwh = 20.0
17 | carbon_usage = 0.0003333333333333334
18 | co2_expected = CarbonUsage(
19 | host_carbon_usage=carbon_usage,
20 | cpu_carbon_usage=carbon_usage,
21 | memory_carbon_usage=carbon_usage,
22 | gpu_carbon_usage=carbon_usage,
23 | )
24 | energy_usage = 60.0
25 | energy_usage = EnergyUsage(
26 | host_energy_usage=energy_usage,
27 | cpu_energy_usage=energy_usage,
28 | memory_energy_usage=energy_usage,
29 | gpu_energy_usage=energy_usage,
30 | )
31 | name_alpha_iso_2 = "fr"
32 | mocker.patch.object(Country, "get_latest_co2g_kwh", return_value=co2g_per_kwh)
33 | mocker.patch.object(MacEnergyConsumption, "get_energy_usage", return_value=energy_usage)
34 | carbon_emission = CarbonEmission(
35 | location=Country(name=name_alpha_iso_2, co2g_kwh=co2g_per_kwh),
36 | )
37 |
38 | co2g = await carbon_emission.get_co2_usage()
39 |
40 | assert co2g == co2_expected
41 |
42 |
43 | @pytest.mark.asyncio
44 | @pytest.mark.linux
45 | async def test_carbon_emission_should_run_to_convert_watt_hours_to_co2g_on_linux(
46 | mocker,
47 | ):
48 | co2g_per_kwh = 20.0
49 | co2_expected = CarbonUsage(
50 | host_carbon_usage=0.0003333333333333334,
51 | )
52 | name_alpha_iso_2 = "fr"
53 | energy_usage = EnergyUsage(host_energy_usage=60.0)
54 | mocker.patch.object(Country, "get_latest_co2g_kwh", return_value=co2g_per_kwh)
55 | mocker.patch.object(LinuxEnergyConsumption, "get_energy_usage", return_value=energy_usage)
56 | carbon_emission = CarbonEmission(
57 | location=Country(name=name_alpha_iso_2, co2g_kwh=co2g_per_kwh),
58 | )
59 |
60 | co2g = await carbon_emission.get_co2_usage()
61 |
62 | assert co2g == co2_expected
63 |
64 |
65 | def test_carbon_usage_with_type_and_conversion():
66 | host_carbon_usage = 12.4
67 | cpu_carbon_usage = 8.4
68 | memory_carbon_usage = 3
69 | gpu_carbon_usage = 1
70 |
71 | carbon_usage = CarbonUsage(
72 | host_carbon_usage=host_carbon_usage,
73 | cpu_carbon_usage=cpu_carbon_usage,
74 | memory_carbon_usage=memory_carbon_usage,
75 | gpu_carbon_usage=gpu_carbon_usage,
76 | )
77 |
78 | assert carbon_usage.get_carbon_usage_on_type(UsageType.HOST) == host_carbon_usage
79 | assert carbon_usage.get_carbon_usage_on_type(UsageType.CPU) == cpu_carbon_usage
80 | assert carbon_usage.get_carbon_usage_on_type(UsageType.MEMORY) == memory_carbon_usage
81 | assert carbon_usage.get_carbon_usage_on_type(UsageType.GPU) == gpu_carbon_usage
82 | assert carbon_usage.unit == CarbonUsageUnit.CO2_G
83 |
84 | carbon_usage.convert_unit(CarbonUsageUnit.CO2_MG)
85 |
86 | assert carbon_usage.get_carbon_usage_on_type(UsageType.HOST) == host_carbon_usage * 1000
87 | assert carbon_usage.get_carbon_usage_on_type(UsageType.CPU) == cpu_carbon_usage * 1000
88 | assert carbon_usage.get_carbon_usage_on_type(UsageType.MEMORY) == memory_carbon_usage * 1000
89 | assert carbon_usage.get_carbon_usage_on_type(UsageType.GPU) == gpu_carbon_usage * 1000
90 | assert carbon_usage.unit == CarbonUsageUnit.CO2_MG
91 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "tracarbon"
3 | authors = [{name = "Florian Valeye", email = "fvaleye@github.com"}]
4 | version = "0.8.0"
5 | description = "Tracarbon is a Python library that tracks your device's energy consumption and calculates your carbon emissions."
6 | readme = "README.md"
7 | license = "Apache-2.0"
8 | requires-python = ">=3.9"
9 | keywords = ["energy", "sustainability", "energy-consumption", "electricity-consumption", "energy-efficiency", "carbon-footprint", "carbon-emissions"]
10 | classifiers = [
11 | "Development Status :: 3 - Alpha",
12 | "Programming Language :: Python :: 3 :: Only"
13 | ]
14 | dependencies = [
15 | "loguru>=0.6,<0.8",
16 | "aiohttp>=3.9.3,<4.0.0",
17 | "aiocache>=0.12.1,<0.13.0",
18 | "aiofiles>=23.2,<26.0",
19 | "psutil>=5.9.8",
20 | "ujson>=5.9.0,<6.0.0",
21 | "msgpack>=1.1.1,<2.0.0",
22 | "pydantic>=1.10.7,<3.0.0",
23 | "typer>=0.7,<0.21",
24 | "ec2-metadata>=2.14.0,<3.0.0",
25 | "python-dotenv>=0.21,<1.3",
26 | "asyncer>=0.0.5,<0.0.11",
27 | ]
28 |
29 | [project.optional-dependencies]
30 | datadog = ["datadog>=0.44,<0.53"]
31 | prometheus = ["prometheus-client>=0.16,<0.24"]
32 | kubernetes = ["kubernetes>=26.1,<35.0"]
33 | dev = [
34 | "mypy>=1.10,<2.0",
35 | "ruff>=0.11.13,<0.15.0",
36 | "pytest>=8.4.0,<9.0.0",
37 | "pytest-mock>=3.14.0,<4.0.0",
38 | "pytest-asyncio>=0.24.0,<1.3.0",
39 | "pytest-cov>=5.0.0,<8.0.0",
40 | "pytest-xdist>=3.6.1,<4.0.0",
41 | "pytest-clarity>=1.0.1,<2.0.0",
42 | "sphinx>=7.4.7,<8.0.0",
43 | "pydata-sphinx-theme>=0.14.4,<0.17.0",
44 | "toml>=0.10.2,<0.11.0",
45 | "types-ujson>=5.10.0,<6.0.0",
46 | "datadog>=0.44,<0.53",
47 | "prometheus-client>=0.16,<0.24",
48 | "types-requests>=2.32.4,<3.0.0",
49 | "bandit>=1.7.9,<2.0.0",
50 | "radon>=6.0.1,<7.0.0",
51 | "types-aiofiles>=24.1.0,<26.0.0",
52 | "kubernetes>=26.1,<35.0",
53 | "autodoc_pydantic==2.2.0",
54 | "uv",
55 | "pre-commit>=3.7.0,<5.0.0"
56 | ]
57 | all = [
58 | "tracarbon[datadog]",
59 | "tracarbon[prometheus]",
60 | "tracarbon[kubernetes]",
61 | ]
62 |
63 | [build-system]
64 | requires = ["setuptools>=61.0"]
65 | build-backend = "setuptools.build_meta"
66 |
67 | [tool.setuptools]
68 | include-package-data = true
69 |
70 | [tool.setuptools.packages.find]
71 | where = ["."]
72 | include = ["tracarbon*"]
73 |
74 | [project.scripts]
75 | tracarbon = "tracarbon.cli:main"
76 |
77 | [project.urls]
78 | documentation = "https://fvaleye.github.io"
79 | repository = "https://github.com/fvaleye/tracarbon"
80 |
81 | [tool.mypy]
82 | files = "tracarbon/"
83 | exclude = "venv,^tests"
84 | mypy_path = "./stubs"
85 | disallow_any_generics = true
86 | disallow_subclassing_any = true
87 | disallow_untyped_calls = true
88 | disallow_untyped_defs = true
89 | disallow_incomplete_defs = true
90 | check_untyped_defs = true
91 | disallow_untyped_decorators = true
92 | no_implicit_optional = true
93 | warn_redundant_casts = true
94 | warn_unused_ignores = true
95 | warn_return_any = false
96 | implicit_reexport = true
97 | strict_equality = true
98 | strict_optional = false
99 |
100 | [tool.bandit]
101 | skips = ["B404", "B607", "B602", "B603"]
102 | exclude_dirs = ["tests", "scripts", ".venv"]
103 |
104 | [tool.ruff]
105 | fix = true
106 | line-length = 120
107 |
108 | [tool.ruff.lint]
109 | select = ["I", "S", "B"]
110 | ignore = ["B023", "S603"]
111 |
112 | [tool.ruff.lint.per-file-ignores] # Don’t apply ruff rules to our tests
113 | "**/tests/*" = ["S"]
114 |
115 | [tool.ruff.lint.isort]
116 | force-single-line = true
117 |
118 | [tool.pytest.ini_options]
119 | addopts = "--cov=tracarbon -v --asyncio-mode=auto"
120 | markers = [
121 | "darwin",
122 | "windows",
123 | "linux"
124 | ]
125 | testpaths = [
126 | "tests",
127 | ]
128 |
--------------------------------------------------------------------------------
/tracarbon/builder.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from typing import Dict
3 | from typing import Optional
4 |
5 | from pydantic import BaseModel
6 | from pydantic import ConfigDict
7 | from pydantic import Field
8 |
9 | from tracarbon.conf import TracarbonConfiguration
10 | from tracarbon.exporters import Exporter
11 | from tracarbon.exporters import MetricReport
12 | from tracarbon.exporters import StdoutExporter
13 | from tracarbon.general_metrics import CarbonEmissionGenerator
14 | from tracarbon.locations import Country
15 | from tracarbon.locations import Location
16 |
17 |
18 | class TracarbonReport(BaseModel):
19 | """
20 | Tracarbon report to store running statistics.
21 | """
22 |
23 | start_time: Optional[datetime.datetime] = None
24 | end_time: Optional[datetime.datetime] = None
25 | metric_report: Dict[str, MetricReport] = Field(default_factory=dict)
26 | model_config = ConfigDict(arbitrary_types_allowed=True)
27 |
28 |
29 | class Tracarbon:
30 | """
31 | Tracarbon instance.
32 | """
33 |
34 | configuration: TracarbonConfiguration
35 | exporter: Exporter
36 | location: Location
37 | report: TracarbonReport
38 |
39 | def __init__(
40 | self,
41 | configuration: TracarbonConfiguration,
42 | exporter: Optional[Exporter],
43 | location: Optional[Location],
44 | ) -> None:
45 | self.configuration = configuration
46 | self.exporter = exporter
47 | self.location = location
48 | self.report = TracarbonReport()
49 |
50 | def __enter__(self) -> "Tracarbon":
51 | self.start()
52 | return self
53 |
54 | def __exit__(self, type, value, traceback) -> None: # type: ignore
55 | self.stop()
56 |
57 | def start(self) -> None:
58 | """
59 | Start Tracarbon.
60 | """
61 | self.report.start_time = datetime.datetime.now()
62 | self.exporter.start(interval_in_seconds=self.configuration.interval_in_seconds)
63 |
64 | def stop(self) -> None:
65 | """
66 | Stop Tracarbon.
67 | """
68 | self.report.metric_report = self.exporter.metric_report
69 | self.report.end_time = datetime.datetime.now()
70 | self.exporter.stop()
71 |
72 |
73 | class TracarbonBuilder(BaseModel):
74 | """
75 | Tracarbon builder for building Tracarbon.
76 | """
77 |
78 | exporter: Optional[Exporter] = None
79 | location: Optional[Location] = None
80 | configuration: TracarbonConfiguration = TracarbonConfiguration()
81 |
82 | def with_location(self, location: Location) -> "TracarbonBuilder":
83 | """
84 | Add a location to the builder.
85 | :param location: the location
86 | :return:
87 | """
88 | self.location = location
89 | return self
90 |
91 | def with_exporter(self, exporter: Exporter) -> "TracarbonBuilder":
92 | """
93 | Add an exporter to the builder.
94 | :param exporter: the exporter
95 | :return:
96 | """
97 | self.exporter = exporter
98 | return self
99 |
100 | def build(self) -> Tracarbon:
101 | """
102 | Build Tracarbon with its configuration.
103 | """
104 | if not self.location:
105 | self.location = Country.get_location(
106 | co2signal_api_key=self.configuration.co2signal_api_key,
107 | co2signal_url=self.configuration.co2signal_url,
108 | )
109 | if not self.exporter:
110 | self.exporter = StdoutExporter(metric_generators=[CarbonEmissionGenerator(location=self.location)])
111 |
112 | return Tracarbon(
113 | configuration=self.configuration,
114 | exporter=self.exporter,
115 | location=self.location,
116 | )
117 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release to PyPI
2 |
3 | on:
4 | push:
5 | tags: ["v*"]
6 |
7 | defaults:
8 | run:
9 | working-directory: ./
10 |
11 | jobs:
12 | validate-git-tag:
13 | name: Validate the git tag
14 | runs-on: ubuntu-22.04
15 | steps:
16 | - uses: actions/checkout@v3
17 | - name: compare git tag with cargo metadata
18 | run: |
19 | TAG=${GITHUB_REF##*/}
20 | CURR_VER=$( grep version pyproject.toml | head -n 1 | awk '{print $3}' | tr -d '"' )
21 | if [[ "${TAG}" != "v${CURR_VER}" ]]; then
22 | echo "Pyproject metadata has version set to ${CURR_VER}, but got pushed tag ${TAG}."
23 | exit 1
24 | fi
25 |
26 | release-pypi-ubuntu:
27 | needs: validate-git-tag
28 | runs-on: ubuntu-latest
29 | steps:
30 | - uses: actions/checkout@v3
31 |
32 | - uses: actions/setup-python@v5
33 | with:
34 | python-version: 3.9
35 |
36 | - name: Install uv and twine
37 | run: pip install uv twine
38 |
39 | - name: Setup
40 | run: make init
41 |
42 | - name: Build
43 | run: uv build
44 |
45 | - name: Publish
46 | run: twine upload dist/*
47 | env:
48 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
49 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
50 |
51 | release-documentation:
52 | needs: release-pypi-ubuntu
53 | runs-on: ubuntu-latest
54 | steps:
55 | - uses: actions/checkout@v3
56 |
57 | - uses: actions/setup-python@v5
58 | with:
59 | python-version: 3.9
60 |
61 | - name: Install uv
62 | run: pip install uv
63 |
64 | - name: Setup
65 | run: make init
66 |
67 | - name: Build and Publish Sphinx documentation
68 | run: |
69 | echo "Generate the new documentation"
70 | make build-documentation
71 | mv docs/build ~/build
72 |
73 | echo "Configure git"
74 | git config --global user.name 'Github Action'
75 | git config --global user.email 'fvaleye@users.noreply.github.com'
76 | git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/$GITHUB_REPOSITORY
77 |
78 | echo "Commit the new changes in the gh-pages"
79 | git reset --hard HEAD
80 | git clean -d -fx .
81 | git fetch
82 | git checkout gh-pages
83 | cp -avr ~/build/html/. ./documentation
84 |
85 | git status
86 | git add ./documentation
87 | git commit -m "Publish the new documentation for ${GITHUB_REF_NAME}"
88 | git push origin gh-pages
89 |
90 | release-docker:
91 | needs: release-pypi-ubuntu
92 | runs-on: ubuntu-latest
93 | steps:
94 | - name: Checkout
95 | uses: actions/checkout@v3
96 |
97 | - name: Set up QEMU
98 | uses: docker/setup-qemu-action@v2
99 |
100 | - name: Set up Docker Buildx
101 | uses: docker/setup-buildx-action@v2
102 |
103 | - name: Docker meta
104 | id: meta
105 | uses: docker/metadata-action@v4
106 | with:
107 | images: |
108 | florianvaleye/tracarbon
109 | tags: |
110 | type=ref,event=branch
111 | type=semver,pattern={{version}}
112 |
113 | - name: Login to DockerHub
114 | uses: docker/login-action@v2
115 | with:
116 | username: ${{ secrets.DOCKERHUB_USERNAME }}
117 | password: ${{ secrets.DOCKERHUB_TOKEN }}
118 |
119 | - name: Build and push
120 | uses: docker/build-push-action@v4
121 | with:
122 | context: .
123 | push: true
124 | tags: ${{ steps.meta.outputs.tags }}
125 | labels: ${{ steps.meta.outputs.labels }}
126 |
--------------------------------------------------------------------------------
/tests/hardwares/test_containers.py:
--------------------------------------------------------------------------------
1 | from kubernetes import config
2 | from kubernetes.client import CoreV1Api
3 | from kubernetes.client import CustomObjectsApi
4 | from kubernetes.client import V1Namespace
5 | from kubernetes.client import V1NamespaceList
6 | from kubernetes.client import V1ObjectMeta
7 |
8 | from tracarbon import HardwareInfo
9 | from tracarbon.hardwares.containers import Container
10 | from tracarbon.hardwares.containers import Kubernetes
11 | from tracarbon.hardwares.containers import Pod
12 |
13 |
14 | def test_get_pods_usage(mocker):
15 | return_value = {
16 | "kind": "PodMetricsList",
17 | "apiVersion": "metrics.k8s.io/v1beta1",
18 | "metadata": {},
19 | "items": [
20 | {
21 | "metadata": {
22 | "name": "grafana-5745b58656-8q4q8",
23 | "namespace": "default",
24 | "creationTimestamp": "2023-01-09T08:01:49Z",
25 | "labels": {
26 | "app.kubernetes.io/instance": "grafana",
27 | "app.kubernetes.io/name": "grafana",
28 | "pod-template-hash": "5745b58656",
29 | },
30 | },
31 | "timestamp": "2023-01-09T08:01:44Z",
32 | "window": "15s",
33 | "containers": [
34 | {
35 | "name": "grafana",
36 | "usage": {"cpu": "1000m", "memory": "500Mi"},
37 | }
38 | ],
39 | },
40 | {
41 | "metadata": {
42 | "name": "shorty-5469f85799-n4k2x",
43 | "namespace": "default",
44 | "creationTimestamp": "2023-01-09T08:01:49Z",
45 | "labels": {
46 | "app.kubernetes.io/instance": "shorty",
47 | "app.kubernetes.io/name": "shorty",
48 | "pod-template-hash": "5469f85799",
49 | },
50 | },
51 | "timestamp": "2023-01-09T08:01:31Z",
52 | "window": "18s",
53 | "containers": [{"name": "shorty", "usage": {"cpu": "380444n", "memory": "3304Ki"}}],
54 | },
55 | {
56 | "metadata": {
57 | "name": "subnet-router",
58 | "namespace": "default",
59 | "creationTimestamp": "2023-01-09T08:01:49Z",
60 | "labels": {"app": "tailscale"},
61 | },
62 | "timestamp": "2023-01-09T08:01:35Z",
63 | "window": "15s",
64 | "containers": [
65 | {
66 | "name": "tailscale",
67 | "usage": {"cpu": "14016200n", "memory": "14912Ki"},
68 | }
69 | ],
70 | },
71 | ],
72 | }
73 | number_of_cores = 2
74 | mocker.patch.object(HardwareInfo, "get_number_of_cores", return_value=number_of_cores)
75 | memory_total = 1000000000
76 | mocker.patch.object(HardwareInfo, "get_memory_total", return_value=memory_total)
77 | mocker.patch.object(CustomObjectsApi, "list_namespaced_custom_object", return_value=return_value)
78 | mocker.patch.object(
79 | CoreV1Api,
80 | "list_namespace",
81 | return_value=V1NamespaceList(items=[V1Namespace(metadata=V1ObjectMeta(name="default"))]),
82 | )
83 | mocker.patch.object(config, "load_kube_config", return_value=None)
84 | pods_usage_expected = [
85 | Pod(
86 | name="grafana-5745b58656-8q4q8",
87 | namespace="default",
88 | containers=[Container(name="grafana", cpu_usage=0.5, memory_usage=0.5)],
89 | ),
90 | Pod(
91 | name="shorty-5469f85799-n4k2x",
92 | namespace="default",
93 | containers=[Container(name="shorty", cpu_usage=0.000190222, memory_usage=0.003304)],
94 | ),
95 | Pod(
96 | name="subnet-router",
97 | namespace="default",
98 | containers=[Container(name="tailscale", cpu_usage=0.0070081, memory_usage=0.014912)],
99 | ),
100 | ]
101 |
102 | kubernetes = Kubernetes()
103 | pods_usage = list(kubernetes.get_pods_usage())
104 |
105 | assert pods_usage == pods_usage_expected
106 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/energy.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from enum import Enum
3 | from typing import ClassVar
4 | from typing import Optional
5 |
6 | from pydantic import BaseModel
7 |
8 |
9 | class EnergyUsageUnit(Enum):
10 | """
11 | Energy usage unit.
12 | """
13 |
14 | WATT = "watts"
15 | MILLIWATT = "milliwatts"
16 |
17 |
18 | class UsageType(Enum):
19 | """
20 | Usage type.
21 | """
22 |
23 | HOST = "host"
24 | CPU = "cpu"
25 | MEMORY = "memory"
26 | GPU = "gpu"
27 |
28 |
29 | class EnergyUsage(BaseModel):
30 | """
31 | Energy report in watts.
32 | """
33 |
34 | host_energy_usage: float = 0.0
35 | cpu_energy_usage: Optional[float] = None
36 | memory_energy_usage: Optional[float] = None
37 | gpu_energy_usage: Optional[float] = None
38 | unit: EnergyUsageUnit = EnergyUsageUnit.WATT
39 |
40 | def get_energy_usage_on_type(self, usage_type: UsageType) -> Optional[float]:
41 | """
42 | Get the energy usage based on the type.
43 |
44 | :param: usage_type: the type of energy to return
45 | :return: the energy of the type
46 | """
47 | if usage_type == UsageType.CPU:
48 | return self.cpu_energy_usage
49 | elif usage_type == UsageType.GPU:
50 | return self.gpu_energy_usage
51 | elif usage_type == UsageType.HOST:
52 | return self.host_energy_usage
53 | elif usage_type == UsageType.MEMORY:
54 | return self.memory_energy_usage
55 | return None
56 |
57 | def convert_unit(self, unit: EnergyUsageUnit) -> None:
58 | """
59 | Convert the EnergyUsage values to the requested unit.
60 |
61 | :param unit: the target energy usage unit for the conversion
62 | """
63 | if self.unit == unit:
64 | return
65 | # Convert from milliwatts to watts
66 | if self.unit == EnergyUsageUnit.MILLIWATT and unit == EnergyUsageUnit.WATT:
67 | self.host_energy_usage = self.host_energy_usage / 1000
68 | self.cpu_energy_usage = self.cpu_energy_usage / 1000 if self.cpu_energy_usage is not None else None
69 | self.memory_energy_usage = self.memory_energy_usage / 1000 if self.memory_energy_usage is not None else None
70 | self.gpu_energy_usage = self.gpu_energy_usage / 1000 if self.gpu_energy_usage is not None else None
71 | self.unit = EnergyUsageUnit.WATT
72 | # Convert from watts to milliwatts
73 | elif self.unit == EnergyUsageUnit.WATT and unit == EnergyUsageUnit.MILLIWATT:
74 | self.host_energy_usage = self.host_energy_usage * 1000
75 | self.cpu_energy_usage = self.cpu_energy_usage * 1000 if self.cpu_energy_usage is not None else None
76 | self.memory_energy_usage = self.memory_energy_usage * 1000 if self.memory_energy_usage is not None else None
77 | self.gpu_energy_usage = self.gpu_energy_usage * 1000 if self.gpu_energy_usage is not None else None
78 | self.unit = EnergyUsageUnit.MILLIWATT
79 |
80 |
81 | class Power(BaseModel):
82 | """Power utility"""
83 |
84 | MICROJOULES_TO_WATT_FACTOR: ClassVar[int] = 1000000
85 | WH_TO_KWH_FACTOR: ClassVar[int] = 1000
86 | SECONDS_TO_HOURS_FACTOR: ClassVar[int] = 3600
87 |
88 | @staticmethod
89 | def watts_to_watt_hours(watts: float, previous_energy_measurement_time: Optional[datetime] = None) -> float:
90 | """
91 | Convert current watts to watt-hours W/h using the previous energy measurement.
92 |
93 | :param watts: the wattage in W
94 | :param previous_energy_measurement_time: the previous measurement time
95 | :return: watt-hours W/h
96 | """
97 | now = datetime.now()
98 | if previous_energy_measurement_time:
99 | time_difference_in_seconds = (now - previous_energy_measurement_time).total_seconds()
100 | else:
101 | time_difference_in_seconds = 1
102 | return watts * (time_difference_in_seconds / Power.SECONDS_TO_HOURS_FACTOR)
103 |
104 | @staticmethod
105 | def co2g_from_watts_hour(watts_hour: float, co2g_per_kwh: float) -> float:
106 | """
107 | Calculate the CO2g generated using watt-hours and the CO2g/kwh.
108 |
109 | :return: the CO2g generated by the energy consumption
110 | """
111 | return (watts_hour / Power.WH_TO_KWH_FACTOR) * co2g_per_kwh
112 |
113 | @staticmethod
114 | def watts_from_microjoules(
115 | uj: float,
116 | ) -> float:
117 | """
118 | Get watts from microjoules.
119 |
120 | :param: uj: energy in microjoules
121 | :return: watts
122 | """
123 | return uj / Power.MICROJOULES_TO_WATT_FACTOR
124 |
--------------------------------------------------------------------------------
/tracarbon/exporters/json_exporter.py:
--------------------------------------------------------------------------------
1 | import atexit
2 | import os
3 | from datetime import datetime
4 | from typing import Any
5 |
6 | import aiofiles
7 | import ujson
8 |
9 | from tracarbon.exporters.exporter import Exporter
10 | from tracarbon.exporters.exporter import MetricGenerator
11 |
12 |
13 | class JSONExporter(Exporter):
14 | """
15 | Write the metrics to a local JSON file.
16 | """
17 |
18 | path: str = ""
19 | indent: int = 4
20 |
21 | def __init__(self, **data: Any) -> None:
22 | # Register flush at exit
23 | if "path" not in data or not data.get("path"):
24 | data["path"] = datetime.now().strftime("tracarbon_export_%d_%m_%Y.json")
25 | super().__init__(**data)
26 | atexit.register(self.flush)
27 |
28 | def _strip_trailing_closing_bracket(self) -> None:
29 | """
30 | If the JSON file ends with a closing bracket, truncate it so we can append
31 | new elements and keep a valid JSON array across multiple runs.
32 | """
33 | if not os.path.isfile(self.path):
34 | return
35 | try:
36 | with open(self.path, "rb+") as file:
37 | file.seek(0, os.SEEK_END)
38 | end = file.tell()
39 | # Move backwards to find last non-whitespace char
40 | while end > 0:
41 | end -= 1
42 | file.seek(end)
43 | ch = file.read(1)
44 | if ch not in b" \t\r\n":
45 | break
46 | if ch == b"]":
47 | file.truncate(end)
48 | except Exception as exc:
49 | # Log and continue; we can still write a fresh array
50 | from loguru import logger
51 |
52 | logger.debug(f"JSONExporter: could not strip trailing bracket for {self.path}: {exc}")
53 |
54 | def flush(self) -> None:
55 | """
56 | Close the JSON array if needed by appending a closing bracket.
57 | """
58 | if not os.path.isfile(self.path):
59 | return
60 | try:
61 | with open(self.path, "rb+") as file:
62 | file.seek(0, os.SEEK_END)
63 | size = file.tell()
64 | if size == 0:
65 | # Write empty array
66 | file.write(b"[]")
67 | return
68 | # Check if already closed
69 | pos = size
70 | last = None
71 | while pos > 0:
72 | pos -= 1
73 | file.seek(pos)
74 | ch = file.read(1)
75 | if ch not in b" \t\r\n":
76 | last = ch
77 | break
78 | if last != b"]":
79 | file.write(b"\n]")
80 | except Exception as exc:
81 | from loguru import logger
82 |
83 | logger.debug(f"JSONExporter: flush failed for {self.path}: {exc}")
84 |
85 | async def launch(self, metric_generator: MetricGenerator) -> None:
86 | """
87 | Launch the Stdout exporter with the metrics.
88 |
89 | :param metric_generator: the metric generator
90 | """
91 | # Ensure we can append to an existing closed array from a previous run
92 | self._strip_trailing_closing_bracket()
93 | async for metric in metric_generator.generate():
94 | metric_value = await metric.value()
95 | if metric_value is not None:
96 | await self.add_metric_to_report(metric=metric, value=metric_value)
97 | file_exists = os.path.isfile(self.path)
98 | async with aiofiles.open(self.path, "a+") as file:
99 | if file_exists and os.path.getsize(self.path) > 0:
100 | await file.write(f",{os.linesep}")
101 | else:
102 | await file.write(f"[{os.linesep}")
103 | await file.write(
104 | ujson.dumps(
105 | {
106 | "timestamp": str(datetime.utcnow()),
107 | "metric_name": metric.format_name(metric_prefix_name=self.metric_prefix_name),
108 | "metric_value": metric_value,
109 | "metric_tags": metric.format_tags(),
110 | },
111 | indent=self.indent,
112 | )
113 | )
114 |
115 | @classmethod
116 | def get_name(cls) -> str:
117 | """
118 | Get the name of the exporter.
119 |
120 | :return: the Exporter's name
121 | """
122 | return "JSON"
123 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/containers.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from typing import Iterator
3 | from typing import List
4 | from typing import Optional
5 |
6 | from pydantic import BaseModel
7 |
8 | from tracarbon.conf import KUBERNETES_INSTALLED
9 | from tracarbon.exceptions import TracarbonException
10 | from tracarbon.hardwares.hardware import HardwareInfo
11 |
12 | if KUBERNETES_INSTALLED:
13 | from kubernetes import config
14 | from kubernetes.client import CoreV1Api
15 | from kubernetes.client import CustomObjectsApi
16 |
17 | class Container(BaseModel):
18 | """
19 | Container of Kubernetes.
20 | """
21 |
22 | name: str
23 | cpu_usage: float # in percentage of the total CPU
24 | memory_usage: float # in percentage of the total memory
25 |
26 | def __init__(self, **data: Any) -> None:
27 | """
28 | Initialize the Contaniner values based on cpu and memory usages.
29 | """
30 | cores = HardwareInfo.get_number_of_cores()
31 | memory_total = HardwareInfo.get_memory_total()
32 | if isinstance(data["cpu_usage"], str):
33 | if "n" in data["cpu_usage"]:
34 | data["cpu_usage"] = (float(data["cpu_usage"].replace("n", "")) / 1000000000) / cores
35 | elif "u" in data["cpu_usage"]:
36 | data["cpu_usage"] = (float(data["cpu_usage"].replace("u", "")) / 1000000) / cores
37 | elif "m" in data["cpu_usage"]:
38 | data["cpu_usage"] = (float(data["cpu_usage"].replace("m", "")) / 1000) / cores
39 |
40 | if isinstance(data["memory_usage"], str):
41 | if "Ki" in data["memory_usage"]:
42 | data["memory_usage"] = (float(data["memory_usage"].replace("Ki", "")) * 1000) / memory_total
43 | elif "Mi" in data["memory_usage"]:
44 | data["memory_usage"] = (float(data["memory_usage"].replace("Mi", "")) * 1000000) / memory_total
45 | elif "Gi" in data["memory_usage"]:
46 | data["memory_usage"] = (float(data["memory_usage"].replace("Gi", "")) * 1000000000) / memory_total
47 |
48 | super().__init__(**data)
49 |
50 | class Pod(BaseModel):
51 | """
52 | Pod for Kubernetes.
53 | """
54 |
55 | name: str
56 | namespace: str
57 | containers: List[Container]
58 |
59 | class Kubernetes(BaseModel):
60 | """
61 | Kubernetes client.
62 | """
63 |
64 | namespaces: Optional[List[str]] = None
65 | api: CustomObjectsApi
66 | group: str = "metrics.k8s.io"
67 | version: str = "v1beta1"
68 |
69 | model_config = {
70 | "arbitrary_types_allowed": True,
71 | }
72 |
73 | def __init__(self, **data: Any) -> None:
74 | try:
75 | config.load_incluster_config()
76 | except Exception:
77 | config.load_kube_config()
78 |
79 | if "api" not in data:
80 | data["api"] = CustomObjectsApi()
81 | super().__init__(**data)
82 |
83 | def refresh_namespaces(self) -> None:
84 | """
85 | Refresh the names of the namespaces.
86 | """
87 | self.namespaces = [item.metadata.name for item in CoreV1Api().list_namespace().items]
88 |
89 | def get_pods_usage(self, namespace: Optional[str] = None) -> Iterator[Pod]:
90 | """
91 | Get Pods with usage.
92 |
93 | :param: namespaces: list of namespaces for getting the pods.
94 | :return: an iterator of the pods
95 | """
96 | self.refresh_namespaces()
97 | if namespace and self.namespaces and namespace not in self.namespaces:
98 | raise TracarbonException(
99 | ValueError(
100 | f"The Kubernetes namespace {namespace} is not available in the namespaces {self.namespaces}."
101 | )
102 | )
103 | for n in self.namespaces:
104 | if namespace and namespace != n:
105 | continue
106 |
107 | resource = self.api.list_namespaced_custom_object(
108 | group=self.group,
109 | version=self.version,
110 | namespace=n,
111 | plural="pods",
112 | )
113 | for pod in resource["items"]:
114 | yield Pod(
115 | name=pod["metadata"]["name"],
116 | namespace=pod["metadata"]["namespace"],
117 | containers=[
118 | Container(
119 | name=container["name"],
120 | cpu_usage=container["usage"]["cpu"],
121 | memory_usage=container["usage"]["memory"],
122 | )
123 | for container in pod["containers"]
124 | ],
125 | )
126 |
--------------------------------------------------------------------------------
/tests/hardwares/test_sensors.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import requests
3 |
4 | from tracarbon import RAPL
5 | from tracarbon import AWSEC2EnergyConsumption
6 | from tracarbon import EnergyConsumption
7 | from tracarbon import LinuxEnergyConsumption
8 | from tracarbon import TracarbonException
9 | from tracarbon.hardwares import EnergyUsage
10 | from tracarbon.hardwares import HardwareInfo
11 | from tracarbon.hardwares import WindowsEnergyConsumption
12 | from tracarbon.hardwares.cloud_providers import AWS
13 |
14 |
15 | @pytest.mark.darwin
16 | def test_get_platform_should_return_the_platform_energy_consumption_mac():
17 | energy_consumption = EnergyConsumption.from_platform()
18 |
19 | assert (
20 | energy_consumption.shell_command
21 | == """ioreg -rw0 -a -c AppleSmartBattery | plutil -extract '0.BatteryData.AdapterPower' raw -"""
22 | )
23 | assert energy_consumption.init is False
24 |
25 |
26 | def test_get_platform_should_raise_exception():
27 | with pytest.raises(TracarbonException) as exception:
28 | EnergyConsumption.from_platform(platform="unknown")
29 | assert exception.value.args[0] == "This unknown hardware is not yet implemented."
30 |
31 |
32 | def test_is_ec2_should_return_false_on_exception():
33 | assert AWS.is_ec2() is False
34 |
35 |
36 | @pytest.mark.asyncio
37 | async def test_aws_sensor_with_gpu_should_return_energy_consumption(mocker):
38 | aws_ec2_sensor = AWSEC2EnergyConsumption(instance_type="p2.8xlarge")
39 |
40 | assert aws_ec2_sensor.cpu_idle == 15.55
41 | assert aws_ec2_sensor.cpu_at_10 == 44.38
42 | assert aws_ec2_sensor.cpu_at_50 == 91.28
43 | assert aws_ec2_sensor.cpu_at_100 == 124.95
44 | assert aws_ec2_sensor.memory_idle == 97.6
45 | assert aws_ec2_sensor.memory_at_10 == 146.4
46 | assert aws_ec2_sensor.memory_at_50 == 195.2
47 | assert aws_ec2_sensor.memory_at_100 == 292.8
48 | assert aws_ec2_sensor.has_gpu is True
49 | assert aws_ec2_sensor.delta_full_machine == 25.8
50 |
51 | mocker.patch.object(HardwareInfo, "get_cpu_usage", return_value=50)
52 | mocker.patch.object(HardwareInfo, "get_memory_usage", return_value=50)
53 | gpu_power_usage = 1805.4
54 | mocker.patch.object(HardwareInfo, "get_gpu_power_usage", return_value=gpu_power_usage)
55 | value_expected = (
56 | aws_ec2_sensor.cpu_at_50 + aws_ec2_sensor.memory_at_50 + aws_ec2_sensor.delta_full_machine + gpu_power_usage
57 | )
58 |
59 | energy_usage = await aws_ec2_sensor.get_energy_usage()
60 |
61 | assert energy_usage.host_energy_usage == value_expected
62 |
63 |
64 | @pytest.mark.asyncio
65 | async def test_aws_sensor_without_gpu_should_return_energy_consumption(mocker):
66 | aws_ec2_sensor = AWSEC2EnergyConsumption(instance_type="m5.8xlarge")
67 |
68 | assert aws_ec2_sensor.cpu_idle == 19.29
69 | assert aws_ec2_sensor.cpu_at_10 == 48.88
70 | assert aws_ec2_sensor.cpu_at_50 == 114.57
71 | assert aws_ec2_sensor.cpu_at_100 == 159.33
72 | assert aws_ec2_sensor.memory_idle == 19.27
73 | assert aws_ec2_sensor.memory_at_10 == 30.8
74 | assert aws_ec2_sensor.memory_at_50 == 79.37
75 | assert aws_ec2_sensor.memory_at_100 == 127.94
76 | assert aws_ec2_sensor.has_gpu is False
77 | assert aws_ec2_sensor.delta_full_machine == 32.0
78 |
79 | mocker.patch.object(HardwareInfo, "get_cpu_usage", return_value=50)
80 | mocker.patch.object(HardwareInfo, "get_memory_usage", return_value=50)
81 | value_expected = aws_ec2_sensor.cpu_at_50 + aws_ec2_sensor.memory_at_50 + aws_ec2_sensor.delta_full_machine
82 |
83 | energy_usage = await aws_ec2_sensor.get_energy_usage()
84 |
85 | assert energy_usage.host_energy_usage == value_expected
86 |
87 |
88 | def test_aws_sensor_should_return_error_when_instance_type_is_missing():
89 | instance_type = "fefe"
90 |
91 | with pytest.raises(TracarbonException):
92 | AWSEC2EnergyConsumption(instance_type=instance_type)
93 |
94 |
95 | def test_is_ec2_should_return_true(mocker):
96 | mocker.patch.object(
97 | requests,
98 | "head",
99 | return_value=None,
100 | )
101 |
102 | assert AWS.is_ec2() is True
103 |
104 |
105 | @pytest.mark.asyncio
106 | async def test_get_platform_should_return_the_platform_energy_consumption_linux_error(
107 | mocker,
108 | ):
109 | mocker.patch.object(RAPL, "is_rapl_compatible", return_value=False)
110 |
111 | with pytest.raises(TracarbonException) as exception:
112 | await LinuxEnergyConsumption().get_energy_usage()
113 | assert exception.value.args[0] == "This Linux hardware is not yet supported."
114 |
115 |
116 | @pytest.mark.asyncio
117 | async def test_get_platform_should_return_the_platform_energy_consumption_linux(mocker):
118 | energy_usage = EnergyUsage(host_energy_usage_watts=1.8)
119 | mocker.patch.object(RAPL, "is_rapl_compatible", return_value=True)
120 | mocker.patch.object(
121 | RAPL,
122 | "get_energy_report",
123 | return_value=energy_usage,
124 | )
125 |
126 | results = await LinuxEnergyConsumption().get_energy_usage()
127 |
128 | assert results == energy_usage
129 |
130 |
131 | @pytest.mark.asyncio
132 | async def test_get_platform_should_return_the_platform_energy_consumption_windows_error():
133 | with pytest.raises(TracarbonException) as exception:
134 | await WindowsEnergyConsumption().get_energy_usage()
135 | assert exception.value.args[0] == "This Windows hardware is not yet supported."
136 |
--------------------------------------------------------------------------------
/tracarbon/cli/__init__.py:
--------------------------------------------------------------------------------
1 | import time
2 | from typing import List
3 | from typing import Optional
4 |
5 | import typer
6 | from loguru import logger
7 |
8 | from tracarbon.builder import TracarbonBuilder
9 | from tracarbon.conf import KUBERNETES_INSTALLED
10 | from tracarbon.exporters import Exporter
11 | from tracarbon.exporters import MetricGenerator
12 | from tracarbon.general_metrics import CarbonEmissionGenerator
13 | from tracarbon.general_metrics import EnergyConsumptionGenerator
14 | from tracarbon.locations import Country
15 |
16 | app = typer.Typer()
17 |
18 |
19 | @app.command(help="List the exporters")
20 | def list_exporters(displayed: bool = True) -> List[str]:
21 | """
22 | List all the exporters available.
23 | """
24 | exporters = [cls.get_name() for cls in Exporter.__subclasses__()]
25 | if displayed:
26 | logger.info(f"Available Exporters: {exporters}")
27 | return exporters
28 |
29 |
30 | def get_exporter(
31 | exporter_name: str,
32 | metric_generators: List[MetricGenerator],
33 | tracarbon_builder: Optional[TracarbonBuilder] = None,
34 | ) -> Exporter:
35 | """
36 | Get the exporter based on the name with its metrics.
37 |
38 | :param exporter_name: the name of the exporter
39 | :param metric_generators: the list of the metrics generators
40 | :param tracarbon_builder: the configuration of Tracarbon
41 | :return: the configured exporter
42 | """
43 | if not tracarbon_builder:
44 | tracarbon_builder = TracarbonBuilder()
45 | exporters = list_exporters(displayed=False)
46 | if exporter_name not in exporters:
47 | raise ValueError(f"This exporter is not available in the list: {exporters}")
48 |
49 | try:
50 | selected_exporter = next(cls for cls in Exporter.__subclasses__() if cls.get_name() == exporter_name)
51 | except Exception as exception:
52 | logger.exception("This exporter initiation failed.")
53 | raise exception
54 | return selected_exporter(
55 | metric_generators=metric_generators, metric_prefix_name=tracarbon_builder.configuration.metric_prefix_name
56 | ) # type: ignore
57 |
58 |
59 | def add_containers_generator(location: Country) -> List[MetricGenerator]:
60 | """
61 | Add metric generators for containers if available
62 |
63 | :param: country for the metric generators of containers
64 | :return: the list of metric generators for containers
65 | """
66 | if KUBERNETES_INSTALLED:
67 | from tracarbon.general_metrics import CarbonEmissionKubernetesGenerator
68 | from tracarbon.general_metrics import EnergyConsumptionKubernetesGenerator
69 |
70 | return [
71 | EnergyConsumptionKubernetesGenerator(location=location),
72 | CarbonEmissionKubernetesGenerator(location=location),
73 | ]
74 | else:
75 | raise ImportError("kubernetes optional dependency is not installed")
76 |
77 |
78 | def run_metrics(
79 | exporter_name: str,
80 | country_code_alpha_iso_2: Optional[str] = None,
81 | running: bool = True,
82 | containers: bool = False,
83 | ) -> None:
84 | """
85 | Run the metrics with the selected exporter
86 |
87 | :param country_code_alpha_iso_2: the alpha iso2 country name where it's running
88 | :param running: keep running the metrics
89 | :param exporter_name: the exporter name to run
90 | :param containers: activate the containers feature
91 | :return:
92 | """
93 | tracarbon_builder = TracarbonBuilder()
94 | location = Country.get_location(
95 | co2signal_api_key=tracarbon_builder.configuration.co2signal_api_key,
96 | co2signal_url=tracarbon_builder.configuration.co2signal_url,
97 | country_code_alpha_iso_2=country_code_alpha_iso_2,
98 | )
99 | metric_generators: List[MetricGenerator] = [
100 | EnergyConsumptionGenerator(location=location),
101 | CarbonEmissionGenerator(
102 | location=location,
103 | ),
104 | ]
105 | if containers:
106 | metric_generators.extend(add_containers_generator(location=location))
107 |
108 | tracarbon = None
109 | try:
110 | exporter = get_exporter(
111 | exporter_name=exporter_name,
112 | metric_generators=metric_generators,
113 | tracarbon_builder=tracarbon_builder,
114 | )
115 | tracarbon = tracarbon_builder.with_location(location=location).with_exporter(exporter=exporter).build()
116 | from loguru import logger
117 |
118 | logger.info("Tracarbon CLI started.")
119 | with tracarbon:
120 | while running:
121 | time.sleep(tracarbon_builder.configuration.interval_in_seconds)
122 | except KeyboardInterrupt:
123 | pass
124 | except Exception as e:
125 | logger.exception(f"Error in Tracarbon execution: {e}")
126 |
127 | if tracarbon:
128 | logger.info(f"Tracarbon CLI exited. Tracarbon report: {tracarbon.report}")
129 | else:
130 | logger.info("Tracarbon CLI exited with errors during initialization.")
131 |
132 |
133 | @app.command()
134 | def run(
135 | exporter_name: str = "Stdout",
136 | country_code_alpha_iso_2: Optional[str] = None,
137 | containers: bool = False,
138 | ) -> None:
139 | """
140 | Run Tracarbon.
141 | """
142 | run_metrics(
143 | exporter_name=exporter_name,
144 | country_code_alpha_iso_2=country_code_alpha_iso_2,
145 | containers=containers,
146 | )
147 |
148 |
149 | def main() -> None:
150 | app()
151 |
152 |
153 | if __name__ == "__main__":
154 | main()
155 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | https://fvaleye.github.io/.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/tests/hardwares/test_rapl.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import pathlib
3 |
4 | import pytest
5 |
6 | from tracarbon import RAPL
7 | from tracarbon.hardwares import EnergyUsageUnit
8 | from tracarbon.hardwares import RAPLResult
9 |
10 |
11 | @pytest.mark.linux
12 | @pytest.mark.darwin
13 | def test_is_rapl_compatible(tmpdir):
14 | assert RAPL().is_rapl_compatible() is False
15 |
16 | path = tmpdir.mkdir("intel-rapl")
17 |
18 | assert RAPL(path=str(path)).is_rapl_compatible() is True
19 |
20 |
21 | @pytest.mark.asyncio
22 | async def test_get_rapl_power_usage():
23 | path = f"{pathlib.Path(__file__).parent.resolve()}/data/intel-rapl"
24 | rapl_separator_for_windows = "T"
25 |
26 | rapl_results = await RAPL(path=path, rapl_separator=rapl_separator_for_windows).get_rapl_power_usage()
27 |
28 | def by_energy_uj(rapl_result: RAPLResult) -> str:
29 | return rapl_result.energy_uj
30 |
31 | rapl_results.sort(key=by_energy_uj)
32 | assert rapl_results[0].name == "T1T0-core"
33 | assert rapl_results[0].energy_uj == 3.0
34 | assert rapl_results[1].name == "T1T1-dram"
35 | assert rapl_results[1].energy_uj == 2433.0
36 | assert rapl_results[2].name == "T1-package-1"
37 | assert rapl_results[2].energy_uj == 20232.0
38 | assert rapl_results[3].name == "T0T1-dram"
39 | assert rapl_results[3].energy_uj == 2592370025.0
40 | assert rapl_results[4].name == "T0-package-0"
41 | assert rapl_results[4].energy_uj == 24346753748.0
42 | assert rapl_results[5].name == "T0T0-core"
43 | assert rapl_results[5].energy_uj == 43725162336.0
44 |
45 |
46 | @pytest.mark.asyncio
47 | @pytest.mark.linux
48 | @pytest.mark.darwin
49 | async def test_get_rapl_power_wrap_around_when_0():
50 | path = f"{pathlib.Path(__file__).parent.resolve()}/data/intel-rapl2"
51 | two_seconds_ago = datetime.datetime.now() - datetime.timedelta(seconds=2)
52 | rapl_separator_for_windows = "T"
53 | rapl_results = dict()
54 | rapl_results["T0-package-0"] = RAPLResult(
55 | name="T0-package-0", energy_uj=2, max_energy_uj=70000, timestamp=two_seconds_ago
56 | )
57 | rapl_results["T0T0-core"] = RAPLResult(
58 | name="T0T0-core", energy_uj=1, max_energy_uj=70000, timestamp=two_seconds_ago
59 | )
60 | rapl = RAPL(path=path, rapl_separator=rapl_separator_for_windows, rapl_results=rapl_results)
61 | host_energy_usage_expected = 35
62 | cpu_energy_usage_expected = 35
63 |
64 | energy_report = await rapl.get_energy_report()
65 | energy_report.convert_unit(EnergyUsageUnit.MILLIWATT)
66 | assert round(energy_report.host_energy_usage, 0) == host_energy_usage_expected
67 | assert round(energy_report.cpu_energy_usage, 0) == cpu_energy_usage_expected
68 | assert energy_report.memory_energy_usage is None
69 |
70 |
71 | @pytest.mark.asyncio
72 | @pytest.mark.linux
73 | @pytest.mark.darwin
74 | async def test_get_total_uj_one_call():
75 | path = f"{pathlib.Path(__file__).parent.resolve()}/data/intel-rapl2"
76 | rapl_separator_for_windows = "T"
77 | one_minute_ago = datetime.datetime.now() - datetime.timedelta(seconds=60)
78 | rapl_results = dict()
79 | rapl_results["T0-package-0"] = RAPLResult(
80 | name="T0-package-0", energy_uj=50000, max_energy_uj=70000, timestamp=one_minute_ago
81 | )
82 | rapl_results["T0T0-core"] = RAPLResult(
83 | name="T0T0-core", energy_uj=40000, max_energy_uj=70000, timestamp=one_minute_ago
84 | )
85 | rapl = RAPL(path=path, rapl_separator=rapl_separator_for_windows, rapl_results=rapl_results)
86 | host_energy_usage_expected = 0.33
87 | cpu_energy_usage_expected = 0.5
88 |
89 | energy_report = await rapl.get_energy_report()
90 | energy_report.convert_unit(EnergyUsageUnit.MILLIWATT)
91 | assert round(energy_report.host_energy_usage, 2) == host_energy_usage_expected
92 | assert round(energy_report.cpu_energy_usage, 2) == cpu_energy_usage_expected
93 | assert energy_report.memory_energy_usage is None
94 |
95 |
96 | @pytest.mark.asyncio
97 | @pytest.mark.linux
98 | @pytest.mark.darwin
99 | async def test_results_with_two_packages_are_correctly_computed():
100 | path = f"{pathlib.Path(__file__).parent.resolve()}/data/intel-rapl"
101 | rapl_separator_for_windows = "T"
102 |
103 | one_milliwatt = 60000
104 |
105 | one_minute_ago = datetime.datetime.now() - datetime.timedelta(seconds=60)
106 | rapl_results = dict()
107 | rapl_results["T0-package-0"] = RAPLResult(
108 | name="T0-package-0", energy_uj=24346753748 - one_milliwatt, max_energy_uj=65532610987, timestamp=one_minute_ago
109 | )
110 | rapl_results["T0T0-core"] = RAPLResult(
111 | name="T0T0-core", energy_uj=43725162336 - one_milliwatt, max_energy_uj=65532610987, timestamp=one_minute_ago
112 | )
113 | rapl_results["T0T1-dram"] = RAPLResult(
114 | name="T0T1-dram", energy_uj=2592370025 - one_milliwatt, max_energy_uj=65532610987, timestamp=one_minute_ago
115 | )
116 |
117 | rapl_results["T1-package-1"] = RAPLResult(
118 | name="T1-package-1", energy_uj=20232 - one_milliwatt, max_energy_uj=65532610987, timestamp=one_minute_ago
119 | )
120 | rapl_results["T1T0-core"] = RAPLResult(
121 | name="T1T0-core", energy_uj=65532610987 - one_milliwatt + 3, max_energy_uj=65532610987, timestamp=one_minute_ago
122 | )
123 | rapl_results["T1T1-dram"] = RAPLResult(
124 | name="T1T1-dram", energy_uj=2433 - one_milliwatt, max_energy_uj=65532610987, timestamp=one_minute_ago
125 | )
126 |
127 | rapl = RAPL(path=path, rapl_separator=rapl_separator_for_windows, rapl_results=rapl_results)
128 |
129 | host_energy_usage_expected = 4
130 | cpu_energy_usage_expected = 2
131 | memory_energy_usage_expected = 2
132 |
133 | energy_report = await rapl.get_energy_report()
134 | energy_report.convert_unit(EnergyUsageUnit.MILLIWATT)
135 | assert round(energy_report.host_energy_usage, 2) == host_energy_usage_expected
136 | assert round(energy_report.cpu_energy_usage, 2) == cpu_energy_usage_expected
137 | assert round(energy_report.memory_energy_usage, 2) == memory_energy_usage_expected
138 | assert energy_report.gpu_energy_usage is None
139 |
--------------------------------------------------------------------------------
/tracarbon/locations/country.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import importlib.resources
3 | from typing import Any
4 | from typing import Optional
5 |
6 | import requests
7 | import ujson
8 | from aiocache import cached
9 | from loguru import logger
10 |
11 | from tracarbon.exceptions import CloudProviderRegionIsMissing
12 | from tracarbon.exceptions import CO2SignalAPIKeyIsMissing
13 | from tracarbon.exceptions import CountryIsMissing
14 | from tracarbon.hardwares import CloudProviders
15 | from tracarbon.locations.location import CarbonIntensitySource
16 | from tracarbon.locations.location import Location
17 |
18 |
19 | class Country(Location):
20 | """
21 | Country definition.
22 | """
23 |
24 | @classmethod
25 | def from_eu_file(cls, country_code_alpha_iso_2: str) -> "Country":
26 | """
27 | Get the country from the file.
28 |
29 | :param country_code_alpha_iso_2: the alpha_iso_2 name of the country
30 | :return:
31 | """
32 | resource_file = importlib.resources.files("tracarbon.locations.data").joinpath(
33 | "co2-emission-intensity-9.exhibit.json"
34 | )
35 | with resource_file.open("r") as json_file:
36 | countries_values = ujson.load(json_file)["countries"]
37 | for country in countries_values:
38 | if country_code_alpha_iso_2.lower() == country["name"]:
39 | return cls.model_validate(country)
40 | raise CountryIsMissing(f"The country [{country_code_alpha_iso_2}] is not in the co2 emission file.")
41 |
42 | @classmethod
43 | def get_current_country(cls, url: str = "https://ipinfo.io/json", timeout: int = 300) -> str:
44 | """
45 | Get the client's country using an internet access.
46 |
47 | :param url: the url to fetch the country from IP
48 | :param timeout: the timeout for the request
49 | :return: the client's country alpha_iso_2 name.
50 | """
51 | try:
52 | logger.debug(f"Send request to this url: {url}, timeout {timeout}s")
53 | text = requests.get(url, timeout=timeout).text
54 | content_json = ujson.loads(text)
55 | return content_json["country"]
56 | except Exception as exception:
57 | logger.error(f"Failed to request this url: {url}")
58 | raise exception
59 |
60 | @classmethod
61 | def get_location(
62 | cls,
63 | co2signal_api_key: Optional[str] = None,
64 | co2signal_url: Optional[str] = None,
65 | country_code_alpha_iso_2: Optional[str] = None,
66 | ) -> "Country":
67 | """
68 | Get the current location automatically: on cloud provider or a country.
69 |
70 | :param country_code_alpha_iso_2: the alpha iso 2 country name.
71 | :param co2signal_api_key: api key for fetching CO2 Signal API.
72 | :param co2signal_url: api url for fetching CO2 Signal API endpoint.
73 | :return: the country
74 | """
75 | # Cloud Providers
76 | cloud_provider = CloudProviders.auto_detect()
77 | if cloud_provider:
78 | return AWSLocation(region_name=cloud_provider.region_name)
79 |
80 | # Local
81 | if not country_code_alpha_iso_2:
82 | country_code_alpha_iso_2 = cls.get_current_country()
83 | if co2signal_api_key:
84 | return cls(
85 | co2signal_api_key=co2signal_api_key,
86 | co2signal_url=co2signal_url,
87 | name=country_code_alpha_iso_2,
88 | co2g_kwh_source=CarbonIntensitySource.CO2SignalAPI,
89 | )
90 | return cls.from_eu_file(country_code_alpha_iso_2=country_code_alpha_iso_2)
91 |
92 | @cached(
93 | ttl=3600,
94 | ) # type: ignore
95 | async def get_latest_co2g_kwh(self) -> float:
96 | """
97 | Get the latest CO2g_kwh for the Location from https://www.co2signal.com/.
98 |
99 | :return: the latest CO2g_kwh
100 | """
101 | if self.co2g_kwh_source == CarbonIntensitySource.FILE:
102 | return self.co2g_kwh
103 |
104 | logger.info(f"Request the latest carbon intensity in Co2g/kwh for your country {self.name}.")
105 | if not self.co2signal_api_key:
106 | raise CO2SignalAPIKeyIsMissing()
107 | url = f"{self.co2signal_url}{self.name}"
108 | response = {}
109 | try:
110 | response = await self.request(
111 | url=url,
112 | headers={"auth-token": self.co2signal_api_key},
113 | )
114 | logger.debug(f"Response from the {url}: {response}.")
115 | if "data" in response:
116 | response = response["data"]
117 | self.co2g_kwh = float(response["carbonIntensity"])
118 | logger.info(f"The latest carbon intensity of your country {self.name} is: {self.co2g_kwh} CO2g/kwh.")
119 | except Exception:
120 | logger.error(
121 | f'Failed to get the latest carbon intensity of your country {self.name} {response if response else ""}.'
122 | f"Please check your API configuration."
123 | f"Fallback to use the last known CO2g/kWh of your location {self.co2g_kwh}"
124 | )
125 | return self.co2g_kwh
126 |
127 | def __hash__(self) -> int:
128 | return hash(self.name)
129 |
130 |
131 | class AWSLocation(Country):
132 | """
133 | AWS Location.
134 | """
135 |
136 | def __init__(self, region_name: str, **data: Any) -> None:
137 | resource_file = importlib.resources.files("tracarbon.locations.data").joinpath("grid-emissions-factors-aws.csv")
138 | co2g_kwh = None
139 | with resource_file.open("r") as csv_file:
140 | reader = csv.reader(csv_file)
141 | for row in reader:
142 | if row[0] == region_name:
143 | co2g_kwh = float(row[3]) * 1000000
144 | super().__init__(name=f"AWS({region_name})", co2g_kwh=co2g_kwh, **data)
145 | if not co2g_kwh:
146 | raise CloudProviderRegionIsMissing(
147 | f"The region [{region_name}] is not in the AWS grid emissions factors file."
148 | )
149 |
150 | @cached() # type: ignore
151 | async def get_latest_co2g_kwh(self) -> float:
152 | """
153 | Get the latest co2g_kwh for AWS.
154 |
155 | :return: the latest co2g_kwh
156 | """
157 | return self.co2g_kwh
158 |
159 | async def get_co2g_kwh(self) -> float:
160 | """
161 | Get the Co2g per kwh.
162 |
163 | :return: the co2g/kwh value
164 | """
165 | return self.co2g_kwh
166 |
--------------------------------------------------------------------------------
/tracarbon/emissions/carbon_emissions.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from enum import Enum
3 | from typing import Any
4 | from typing import Optional
5 |
6 | from loguru import logger
7 | from pydantic import BaseModel
8 |
9 | from tracarbon.hardwares import EnergyConsumption
10 | from tracarbon.hardwares import Power
11 | from tracarbon.hardwares import Sensor
12 | from tracarbon.hardwares.energy import EnergyUsage
13 | from tracarbon.hardwares.energy import EnergyUsageUnit
14 | from tracarbon.hardwares.energy import UsageType
15 | from tracarbon.locations import Country
16 | from tracarbon.locations import Location
17 |
18 |
19 | class CarbonUsageUnit(Enum):
20 | """
21 | Carbon usage unit.
22 | """
23 |
24 | CO2_G = "co2g"
25 | CO2_MG = "co2mg"
26 |
27 |
28 | class CarbonUsage(BaseModel):
29 | """
30 | Carbon Usage of the different types.
31 | """
32 |
33 | host_carbon_usage: float = 0.0
34 | cpu_carbon_usage: Optional[float] = None
35 | memory_carbon_usage: Optional[float] = None
36 | gpu_carbon_usage: Optional[float] = None
37 | unit: CarbonUsageUnit = CarbonUsageUnit.CO2_G
38 |
39 | def get_carbon_usage_on_type(self, usage_type: UsageType) -> Optional[float]:
40 | """
41 | Get the carbon usage based on the type.
42 |
43 | :param: usage_type: the type of energy to return
44 | :return: the carbon of the type
45 | """
46 | if usage_type == UsageType.CPU:
47 | return self.cpu_carbon_usage
48 | elif usage_type == UsageType.GPU:
49 | return self.gpu_carbon_usage
50 | elif usage_type == UsageType.HOST:
51 | return self.host_carbon_usage
52 | elif usage_type == UsageType.MEMORY:
53 | return self.memory_carbon_usage
54 | return None
55 |
56 | def convert_unit(self, unit: CarbonUsageUnit) -> None:
57 | """
58 | Convert the carbon usage with the right carbon usage type.
59 |
60 | :param: unit: the carbon usage unit for the conversion
61 | """
62 | if self.unit != unit:
63 | if unit == CarbonUsageUnit.CO2_G and self.unit == CarbonUsageUnit.CO2_MG:
64 | self.host_carbon_usage = self.host_carbon_usage / 1000
65 | self.cpu_carbon_usage = self.cpu_carbon_usage / 1000 if self.cpu_carbon_usage else None
66 | self.memory_carbon_usage = self.memory_carbon_usage / 1000 if self.memory_carbon_usage else None
67 | self.gpu_carbon_usage = self.gpu_carbon_usage / 1000 if self.gpu_carbon_usage else None
68 | self.unit = CarbonUsageUnit.CO2_G
69 | elif unit == CarbonUsageUnit.CO2_MG and self.unit == CarbonUsageUnit.CO2_G:
70 | self.host_carbon_usage = self.host_carbon_usage * 1000
71 | self.cpu_carbon_usage = self.cpu_carbon_usage * 1000 if self.cpu_carbon_usage else None
72 | self.memory_carbon_usage = self.memory_carbon_usage * 1000 if self.memory_carbon_usage else None
73 | self.gpu_carbon_usage = self.gpu_carbon_usage * 1000 if self.gpu_carbon_usage else None
74 | self.unit = CarbonUsageUnit.CO2_MG
75 |
76 |
77 | class CarbonEmission(Sensor):
78 | """
79 | Carbon Metric sensor in watts per second to calculate the CO2g/kwh emitted.
80 | """
81 |
82 | location: Location
83 | energy_consumption: EnergyConsumption
84 | previous_energy_consumption_time: Optional[datetime] = None
85 |
86 | def __init__(self, **data: Any) -> None:
87 | if "location" not in data:
88 | data["location"] = Country.get_location()
89 |
90 | if "energy_consumption" not in data:
91 | data["energy_consumption"] = EnergyConsumption.from_platform()
92 |
93 | super().__init__(**data)
94 |
95 | async def get_energy_usage(self) -> EnergyUsage:
96 | """
97 | Generate energy usage.
98 |
99 | :return: the generated energy usage.
100 | """
101 | return await self.energy_consumption.get_energy_usage()
102 |
103 | async def get_co2_usage(self) -> CarbonUsage:
104 | """
105 | Run the Carbon Emission sensor and get the carbon emission generated.
106 |
107 | :return: the carbon usage.
108 | """
109 | energy_usage = await self.get_energy_usage()
110 | energy_usage.convert_unit(unit=EnergyUsageUnit.WATT)
111 | logger.debug(f"Energy consumption run: {energy_usage}W")
112 |
113 | co2g_per_kwh = await self.location.get_latest_co2g_kwh()
114 | logger.debug(f"Carbon Emission of the location: {co2g_per_kwh}g CO2 eq/kWh")
115 | host_carbon_usage = Power.co2g_from_watts_hour(
116 | Power.watts_to_watt_hours(
117 | watts=energy_usage.host_energy_usage,
118 | previous_energy_measurement_time=self.previous_energy_consumption_time,
119 | ),
120 | co2g_per_kwh=co2g_per_kwh,
121 | )
122 | cpu_carbon_usage = 0.0
123 | memory_carbon_usage = 0.0
124 | gpu_carbon_usage = 0.0
125 | if energy_usage.cpu_energy_usage:
126 | cpu_carbon_usage = Power.co2g_from_watts_hour(
127 | Power.watts_to_watt_hours(
128 | watts=energy_usage.cpu_energy_usage,
129 | previous_energy_measurement_time=self.previous_energy_consumption_time,
130 | ),
131 | co2g_per_kwh=co2g_per_kwh,
132 | )
133 | if energy_usage.memory_energy_usage:
134 | memory_carbon_usage = Power.co2g_from_watts_hour(
135 | Power.watts_to_watt_hours(
136 | watts=energy_usage.memory_energy_usage,
137 | previous_energy_measurement_time=self.previous_energy_consumption_time,
138 | ),
139 | co2g_per_kwh=co2g_per_kwh,
140 | )
141 | if energy_usage.gpu_energy_usage:
142 | gpu_carbon_usage = Power.co2g_from_watts_hour(
143 | Power.watts_to_watt_hours(
144 | watts=energy_usage.gpu_energy_usage,
145 | previous_energy_measurement_time=self.previous_energy_consumption_time,
146 | ),
147 | co2g_per_kwh=co2g_per_kwh,
148 | )
149 | self.previous_energy_consumption_time = datetime.now()
150 | return CarbonUsage(
151 | host_carbon_usage=host_carbon_usage,
152 | cpu_carbon_usage=cpu_carbon_usage if cpu_carbon_usage > 0 else None,
153 | memory_carbon_usage=(memory_carbon_usage if memory_carbon_usage > 0 else None),
154 | gpu_carbon_usage=gpu_carbon_usage if gpu_carbon_usage > 0 else None,
155 | unit=CarbonUsageUnit.CO2_G,
156 | )
157 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/rapl.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | from datetime import datetime
4 | from pathlib import Path
5 | from typing import Dict
6 | from typing import List
7 | from typing import Optional
8 |
9 | import aiofiles
10 | from loguru import logger
11 | from pydantic import BaseModel
12 | from pydantic import Field
13 |
14 | from tracarbon.exceptions import HardwareRAPLException
15 | from tracarbon.hardwares.energy import EnergyUsage
16 | from tracarbon.hardwares.energy import Power
17 |
18 |
19 | class RAPLResult(BaseModel):
20 | """
21 | RAPL result after reading the RAPL registry.
22 | """
23 |
24 | name: str
25 | energy_uj: float
26 | max_energy_uj: float
27 | timestamp: datetime
28 |
29 |
30 | class RAPL(BaseModel):
31 | """
32 | RAPL to read energy consumption with Intel hardware
33 | """
34 |
35 | path: str = "/sys/class/powercap/intel-rapl"
36 | rapl_separator: str = ":"
37 | rapl_results: Dict[str, RAPLResult] = Field(default_factory=dict)
38 | file_list: List[str] = Field(default_factory=list)
39 |
40 | def is_rapl_compatible(self) -> bool:
41 | """
42 | Check if the path of the hardware for reading RAPL energy measurements exists.
43 | :return: if the RAPL files path exists
44 | """
45 | return os.path.exists(self.path)
46 |
47 | def get_rapl_files_list(self) -> None:
48 | """
49 | Get the list of files containing RAPL energy measurements.
50 | Raise error if it's the hardware is not compatible with RAPL.
51 |
52 | :return: the list of files path containing RAPL energy measurements.
53 | """
54 | if not self.is_rapl_compatible():
55 | raise ValueError(f"Path f{self.path} doest not exists for reading RAPL energy measurements")
56 | logger.debug(f"The hardware is RAPL compatible.")
57 | intel_rapl_regex = re.compile("intel-rapl")
58 | for directory_path, directory_names, _filenames in os.walk(self.path, topdown=True):
59 | for directory in directory_names:
60 | if not intel_rapl_regex.search(directory):
61 | directory_names.remove(directory)
62 | current_directory = directory_path.split("/")[-1]
63 | if len(current_directory.split(self.rapl_separator)) >= 2:
64 | self.file_list.append(directory_path)
65 | logger.debug(f"The RAPL file list collected: {self.file_list}.")
66 |
67 | async def get_rapl_power_usage(self) -> List[RAPLResult]:
68 | """
69 | Read the RAPL energy measurements files on paths provided.
70 |
71 | If energy_uj is greater than max_energy_range_uj, the value is set to 0.
72 | In this case, max_energy_range_uj contanst must be returned.
73 |
74 | :return: a list of the RAPL results.
75 | """
76 | rapl_results = list()
77 | try:
78 | if not self.file_list:
79 | self.get_rapl_files_list()
80 | for file_path in self.file_list:
81 | name_prefix = Path(file_path).name.replace("intel-rapl", "")
82 | async with aiofiles.open(f"{file_path}/name", "r") as rapl_name:
83 | name = await rapl_name.read()
84 | name = f"{name_prefix}-{name}"
85 | async with aiofiles.open(f"{file_path}/energy_uj", "r") as rapl_energy:
86 | energy_uj = float(await rapl_energy.read())
87 | async with aiofiles.open(f"{file_path}/max_energy_range_uj", "r") as rapl_max_energy:
88 | max_energy_uj = float(await rapl_max_energy.read())
89 | rapl_results.append(
90 | RAPLResult(
91 | name=name,
92 | energy_uj=energy_uj,
93 | max_energy_uj=max_energy_uj,
94 | timestamp=datetime.now(),
95 | )
96 | )
97 | except Exception as exception:
98 | logger.exception(f"The RAPL read encountered an issue.")
99 | raise HardwareRAPLException(exception) from exception
100 | logger.debug(f"The RAPL results: {rapl_results}.")
101 | return rapl_results
102 |
103 | async def get_energy_report(self) -> EnergyUsage:
104 | """
105 | Get the energy report based on RAPL.
106 |
107 | :return: the energy usage report of the RAPL measurements
108 | """
109 | rapl_results = await self.get_rapl_power_usage()
110 | host_energy_usage_watts = 0.0
111 | cpu_energy_usage_watts = 0.0
112 | memory_energy_usage_watts = 0.0
113 | gpu_energy_usage_watts = 0.0
114 | for rapl_result in rapl_results:
115 | previous_rapl_result = self.rapl_results.get(rapl_result.name, rapl_result)
116 | # Round to the nearest second to make calculation stable over small IO delays
117 | time_difference_seconds = round((rapl_result.timestamp - previous_rapl_result.timestamp).total_seconds())
118 | if time_difference_seconds <= 0:
119 | time_difference_seconds = 1
120 | energy_uj = rapl_result.energy_uj
121 | if previous_rapl_result.energy_uj > rapl_result.energy_uj:
122 | logger.debug(
123 | f"Wrap-around detected in RAPL {rapl_result.name}. The current RAPL energy value ({rapl_result.energy_uj}) is lower than previous value ({previous_rapl_result.energy_uj})."
124 | )
125 | energy_uj = energy_uj + rapl_result.max_energy_uj
126 | watts = Power.watts_from_microjoules((energy_uj - previous_rapl_result.energy_uj) / time_difference_seconds)
127 | self.rapl_results[rapl_result.name] = rapl_result
128 | if "package" in rapl_result.name or "ram" in rapl_result.name:
129 | host_energy_usage_watts += watts
130 | if "core" in rapl_result.name or "cpu" in rapl_result.name:
131 | cpu_energy_usage_watts += watts
132 | if "ram" in rapl_result.name:
133 | memory_energy_usage_watts += watts
134 | if "uncore" in rapl_result.name:
135 | gpu_energy_usage_watts += watts
136 | energy_usage_report = EnergyUsage(
137 | host_energy_usage=host_energy_usage_watts,
138 | cpu_energy_usage=(cpu_energy_usage_watts if cpu_energy_usage_watts > 0 else None),
139 | memory_energy_usage=(memory_energy_usage_watts if memory_energy_usage_watts > 0 else None),
140 | gpu_energy_usage=(gpu_energy_usage_watts if gpu_energy_usage_watts > 0 else None),
141 | )
142 | logger.debug(f"The usage energy report measured with RAPL is {energy_usage_report}.")
143 | return energy_usage_report
144 |
--------------------------------------------------------------------------------
/tracarbon/exporters/exporter.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 | from abc import ABCMeta
4 | from abc import abstractmethod
5 | from datetime import datetime
6 | from threading import Event
7 | from threading import Timer
8 | from typing import AsyncGenerator
9 | from typing import Awaitable
10 | from typing import Callable
11 | from typing import Dict
12 | from typing import List
13 | from typing import Optional
14 |
15 | from asyncer import asyncify
16 | from loguru import logger
17 | from pydantic import BaseModel
18 | from pydantic import ConfigDict
19 | from pydantic import Field
20 |
21 | from tracarbon.hardwares.hardware import HardwareInfo
22 | from tracarbon.locations import Location
23 |
24 |
25 | class Tag(BaseModel):
26 | """
27 | Tag for a metric.
28 | """
29 |
30 | key: str
31 | value: str
32 |
33 |
34 | class Metric(BaseModel):
35 | """
36 | Global metric to use for the exporters.
37 | """
38 |
39 | name: str
40 | value: Callable[[], Awaitable[float]]
41 | tags: List[Tag] = Field(default_factory=list)
42 |
43 | def format_name(self, metric_prefix_name: Optional[str] = None, separator: str = ".") -> str:
44 | """
45 | Format the name of the metric with a prefix and separator.
46 |
47 | :param metric_prefix_name: the prefix to insert before the separator and the name.
48 | :param separator: the separator to use between the prefix and the name.
49 | """
50 | if metric_prefix_name:
51 | return f"{metric_prefix_name}{separator}{self.name}"
52 | return self.name
53 |
54 | def format_tags(self, separator: str = ":") -> List[str]:
55 | """
56 | Format tags with a separator.
57 |
58 | :param separator: the separator to insert between the key and value.
59 | """
60 | return [f"{tag.key}{separator}{tag.value}" for tag in self.tags]
61 |
62 |
63 | class MetricReport(BaseModel):
64 | """
65 | MetricReport is a report of the generated metrics.
66 | """
67 |
68 | exporter_name: str
69 | metric: "Metric"
70 | average_interval_in_seconds: Optional[float] = None
71 | last_report_time: Optional[datetime] = None
72 | total: float = 0.0
73 | average: float = 0.0
74 | minimum: float = sys.float_info.max
75 | maximum: float = 0.0
76 | call_count: int = 0
77 |
78 | model_config = ConfigDict(arbitrary_types_allowed=True)
79 |
80 |
81 | class MetricGenerator(BaseModel):
82 | """
83 | MetricGenerator generates metrics for the Exporter.
84 | """
85 |
86 | metrics: List[Metric]
87 | platform: str = HardwareInfo.get_platform()
88 | location: Optional[Location] = None
89 |
90 | async def generate(self) -> AsyncGenerator[Metric, None]:
91 | """
92 | Generate a metric.
93 | """
94 | for metric in self.metrics:
95 | yield metric
96 |
97 |
98 | class Exporter(BaseModel, metaclass=ABCMeta):
99 | """The Exporter interface."""
100 |
101 | metric_generators: List[MetricGenerator]
102 | event: Optional[Event] = None
103 | stopped: bool = False
104 | metric_prefix_name: Optional[str] = None
105 | metric_report: Dict[str, MetricReport] = Field(default_factory=dict)
106 |
107 | model_config = ConfigDict(arbitrary_types_allowed=True)
108 |
109 | @abstractmethod
110 | async def launch(self, metric_generator: "MetricGenerator") -> None:
111 | """
112 | Launch the exporter.
113 | Add the metric generator to the metric reporter.
114 |
115 | :param metric_generator: the metric generator
116 | """
117 | pass
118 |
119 | def start(self, interval_in_seconds: int) -> None:
120 | """
121 | Start the exporter and a dedicated timer configured with the configured timeout.
122 |
123 | :param: interval_in_seconds: the interval for the timer
124 | """
125 | self.stopped = False
126 | if not self.event:
127 | self.event = Event()
128 |
129 | def _run() -> None:
130 | asyncio.run(self._launch_all())
131 | if self.event and not self.stopped and not self.event.is_set():
132 | timer = Timer(interval_in_seconds, _run, [])
133 | timer.daemon = True
134 | timer.start()
135 |
136 | self.metric_report = dict()
137 | _run()
138 |
139 | def stop(self) -> None:
140 | """
141 | Stop the explorer and the associated timer.
142 |
143 | :return:
144 | """
145 | self.stopped = True
146 | if self.event:
147 | self.event.set()
148 |
149 | async def _launch_all(self) -> None:
150 | """
151 | Launch the exporter with all the metric generators.
152 | """
153 | for metric_generator in self.metric_generators:
154 | logger.debug(f"Running MetricGenerator[{metric_generator}].")
155 | await self.launch(metric_generator=metric_generator)
156 |
157 | async def add_metric_to_report(self, metric: "Metric", value: float) -> "MetricReport":
158 | """
159 | Add the generated metric to the report asynchronously.
160 |
161 | :param metric: the metric to add
162 | :param value: the metric value to add
163 | :return:
164 | """
165 |
166 | def add_metric_to_report() -> MetricReport:
167 | if metric.name not in self.metric_report:
168 | self.metric_report[metric.name] = MetricReport(exporter_name=self.get_name(), metric=metric)
169 | metric_report = self.metric_report[metric.name]
170 | now = datetime.now()
171 | if metric_report.last_report_time:
172 | time_difference_in_s = (now - metric_report.last_report_time).total_seconds()
173 | metric_report.average_interval_in_seconds = (
174 | time_difference_in_s
175 | if not metric_report.average_interval_in_seconds
176 | else (metric_report.average_interval_in_seconds + time_difference_in_s) / 2
177 | )
178 |
179 | metric_report.last_report_time = now
180 | metric_report.total += value
181 | metric_report.call_count += 1
182 | metric_report.average = metric_report.total / metric_report.call_count
183 | if value < metric_report.minimum:
184 | metric_report.minimum = value
185 | if value > metric_report.maximum:
186 | metric_report.maximum = value
187 | return metric_report
188 |
189 | return await asyncify(add_metric_to_report)()
190 |
191 | @classmethod
192 | @abstractmethod
193 | def get_name(cls) -> str:
194 | """
195 | Get the name of the exporter.
196 |
197 | :return: the Exporter's name
198 | """
199 | pass
200 |
--------------------------------------------------------------------------------
/tracarbon/hardwares/sensors.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import csv
3 | import importlib.resources
4 | from abc import ABC
5 | from abc import abstractmethod
6 | from typing import Any
7 |
8 | from loguru import logger
9 | from pydantic import BaseModel
10 | from pydantic import ConfigDict
11 |
12 | from tracarbon.exceptions import AWSSensorException
13 | from tracarbon.exceptions import TracarbonException
14 | from tracarbon.hardwares import EnergyUsage
15 | from tracarbon.hardwares.cloud_providers import CloudProviders
16 | from tracarbon.hardwares.hardware import HardwareInfo
17 | from tracarbon.hardwares.rapl import RAPL
18 |
19 |
20 | class Sensor(ABC, BaseModel):
21 | """
22 | The Sensor contract.
23 | """
24 |
25 | model_config = ConfigDict(arbitrary_types_allowed=True)
26 |
27 | @abstractmethod
28 | async def get_energy_usage(self) -> EnergyUsage:
29 | """
30 | Run the sensor and generate energy usage in watt.
31 |
32 | :return: the generated energy usage.
33 | """
34 | pass
35 |
36 |
37 | class EnergyConsumption(Sensor):
38 | """
39 | A sensor to calculate the energy consumption.
40 | """
41 |
42 | init: bool = False
43 |
44 | @staticmethod
45 | def from_platform(
46 | platform: str = HardwareInfo.get_platform(),
47 | ) -> "EnergyConsumption":
48 | """
49 | Get the energy consumption from the local platform or cloud provider.
50 |
51 | :return: the Energy Consumption
52 | """
53 | # Cloud Providers
54 | cloud_provider = CloudProviders.auto_detect()
55 | if cloud_provider:
56 | return AWSEC2EnergyConsumption(instance_type=cloud_provider.instance_type)
57 |
58 | # Platform
59 | if platform == "Darwin":
60 | return MacEnergyConsumption()
61 | if platform == "Linux":
62 | return LinuxEnergyConsumption()
63 | if platform == "Windows":
64 | return WindowsEnergyConsumption()
65 | raise TracarbonException(f"This {platform} hardware is not yet implemented.")
66 |
67 | @abstractmethod
68 | async def get_energy_usage(self) -> EnergyUsage:
69 | """
70 | Run the sensor and generate energy usage.
71 |
72 | :return: the generated energy usage.
73 | """
74 | pass
75 |
76 |
77 | class MacEnergyConsumption(EnergyConsumption):
78 | """
79 | Energy Consumption of the Mac, working only if it's plugged into plugged-in wall adapter, in watts.
80 | """
81 |
82 | shell_command: str = """ioreg -rw0 -a -c AppleSmartBattery | plutil -extract '0.BatteryData.AdapterPower' raw -"""
83 |
84 | async def get_energy_usage(self) -> EnergyUsage:
85 | """
86 | Run the sensor and generate energy usage.
87 |
88 | :return: the generated energy usage.
89 | """
90 | proc = await asyncio.create_subprocess_shell(self.shell_command, stdout=asyncio.subprocess.PIPE)
91 | result, _ = await proc.communicate()
92 |
93 | return EnergyUsage(host_energy_usage=float(result))
94 |
95 |
96 | class LinuxEnergyConsumption(EnergyConsumption):
97 | """
98 | Energy Consumption of a Linux device: https://github.com/fvaleye/tracarbon/issues/1
99 | """
100 |
101 | rapl: RAPL = RAPL()
102 |
103 | async def get_energy_usage(self) -> EnergyUsage:
104 | """
105 | Run the sensor and generate energy usage.
106 |
107 | :return: the generated energy usage.
108 | """
109 | if self.rapl.is_rapl_compatible():
110 | return await self.rapl.get_energy_report()
111 | raise TracarbonException(f"This Linux hardware is not yet supported.")
112 |
113 |
114 | class WindowsEnergyConsumption(EnergyConsumption):
115 | """
116 | Energy Consumption of a Windows device: https://github.com/fvaleye/tracarbon/issues/2
117 | """
118 |
119 | async def get_energy_usage(self) -> EnergyUsage:
120 | """
121 | Run the sensor and generate energy usage.
122 |
123 | :return: the generated energy usage.
124 | """
125 | raise TracarbonException("This Windows hardware is not yet supported.")
126 |
127 |
128 | class AWSEC2EnergyConsumption(EnergyConsumption):
129 | """
130 | The AWS EC2 Energy Consumption.
131 | """
132 |
133 | cpu_idle: float
134 | cpu_at_10: float
135 | cpu_at_50: float
136 | cpu_at_100: float
137 | memory_idle: float
138 | memory_at_10: float
139 | memory_at_50: float
140 | memory_at_100: float
141 | has_gpu: bool
142 | delta_full_machine: float
143 |
144 | def __init__(self, instance_type: str, **data: Any) -> None:
145 | resource_file = importlib.resources.files("tracarbon.hardwares.data").joinpath("aws-instances.csv")
146 | try:
147 | with resource_file.open("r") as csvfile:
148 | reader = csv.reader(csvfile)
149 |
150 | for row in reader:
151 | if row[0] == instance_type:
152 | data["cpu_idle"] = float(row[14].replace(",", "."))
153 | data["cpu_at_10"] = float(row[15].replace(",", "."))
154 | data["cpu_at_50"] = float(row[16].replace(",", "."))
155 | data["cpu_at_100"] = float(row[17].replace(",", "."))
156 | data["memory_idle"] = float(row[18].replace(",", "."))
157 | data["memory_at_10"] = float(row[19].replace(",", "."))
158 | data["memory_at_50"] = float(row[20].replace(",", "."))
159 | data["memory_at_100"] = float(row[21].replace(",", "."))
160 | data["has_gpu"] = float(row[22].replace(",", ".")) > 0
161 | data["delta_full_machine"] = float(row[26].replace(",", "."))
162 | super().__init__(
163 | **data,
164 | )
165 | return
166 | raise AWSSensorException(f"The AWS instance type [{instance_type}] is missing from the aws instances file.")
167 | except Exception as exception:
168 | logger.exception("Error in the AWSSensor")
169 | raise AWSSensorException(exception) from exception
170 |
171 | async def get_energy_usage(self) -> EnergyUsage:
172 | """
173 | Run the sensor and generate energy usage.
174 |
175 | :return: the generated energy usage.
176 | """
177 | cpu_usage = HardwareInfo.get_cpu_usage()
178 | if cpu_usage >= 90:
179 | cpu_watts = self.cpu_at_100
180 | elif cpu_usage >= 50:
181 | cpu_watts = self.cpu_at_50
182 | elif cpu_usage >= 10:
183 | cpu_watts = self.cpu_at_10
184 | else:
185 | cpu_watts = self.cpu_idle
186 | logger.debug(f"CPU: {cpu_watts}W")
187 |
188 | memory_usage = HardwareInfo.get_memory_usage()
189 | if memory_usage >= 90:
190 | memory_watts = self.memory_at_100
191 | elif memory_usage >= 50:
192 | memory_watts = self.memory_at_50
193 | elif memory_usage >= 10:
194 | memory_watts = self.memory_at_10
195 | else:
196 | memory_watts = self.memory_idle
197 | logger.debug(f"Memory: {memory_watts}W")
198 |
199 | gpu_watts = 0.0
200 | if self.has_gpu:
201 | gpu_watts = HardwareInfo.get_gpu_power_usage()
202 | logger.debug(f"CPU: {gpu_watts}W")
203 |
204 | total_watts = cpu_watts + memory_watts + gpu_watts + self.delta_full_machine
205 | logger.debug(f"Total including the delta of the full machine: {total_watts}W")
206 | return EnergyUsage(
207 | host_energy_usage=total_watts,
208 | cpu_energy_usage=cpu_watts,
209 | memory_energy_usage=memory_watts,
210 | gpu_energy_usage=gpu_watts,
211 | )
212 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | 
4 | [](https://pypi.org/project/tracarbon/)
5 | [](https://fvaleye.github.io/tracarbon)
6 | [](https://github.com/fvaleye/tracarbon/blob/main/LICENSE.txt)
7 |
8 | ## 📌 Overview
9 |
10 | Tracarbon is a Python library that tracks your device's energy consumption and calculates your carbon emissions.
11 |
12 | It detects your location and your device automatically before starting to export measurements to an exporter.
13 | It could be used as a CLI with already defined metrics or programmatically with the API by defining the metrics that you want to have.
14 |
15 | Read more in this [article](https://medium.com/@florian.valeye/tracarbon-track-your-devices-carbon-footprint-fb051fcc9009).
16 |
17 | ## 📦 Where to get it
18 |
19 | ```sh
20 | # Install Tracarbon
21 | pip install tracarbon
22 | ```
23 |
24 | ```sh
25 | # Install one or more exporters from the list
26 | pip install 'tracarbon[datadog,prometheus,kubernetes]'
27 | ```
28 |
29 | ### 🔌 Devices: energy consumption
30 |
31 | | **Devices** | **Description** |
32 | | ----------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
33 | | Mac | ✅ Global energy consumption of your Mac (must be plugged into a wall adapter). |
34 | | Linux | ⚠️ Only with [RAPL](https://web.eece.maine.edu/~vweaver/projects/rapl/). See [#1](https://github.com/fvaleye/tracarbon/issues/1). It works with containers on [Kubernetes](https://kubernetes.io/) using the [Metric API](https://kubernetes.io/docs/tasks/debug/debug-cluster/resource-metrics-pipeline/#metrics-api) if available. |
35 | | Windows | ❌ Not yet implemented. See [#184](https://github.com/hubblo-org/scaphandre/pull/184). |
36 |
37 | | **Cloud Provider** | **Description** |
38 | | ------------------ | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
39 | | AWS | ✅ Use the hardware's usage with the EC2 instances carbon emissions datasets of [cloud-carbon-coefficients](https://github.com/cloud-carbon-footprint/ccf-coefficients/blob/main/data/aws-instances.csv). |
40 | | GCP | ❌ Not yet implemented. |
41 | | Azure | ❌ Not yet implemented. |
42 |
43 | ## 📡 Exporters
44 |
45 | | **Exporter** | **Description** |
46 | | ------------ | :-------------------------------: |
47 | | Stdout | Print the metrics in Stdout. |
48 | | JSON | Write the metrics in a JSON file. |
49 | | Prometheus | Send the metrics to Prometheus. |
50 | | Datadog | Send the metrics to Datadog. |
51 |
52 | ### 🗺️ Locations
53 |
54 | | **Location** | **Description** | **Source** |
55 | | ------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------ |
56 | | Worldwide | Get the latest co2g/kwh in near real-time using the CO2Signal or ElectricityMaps APIs. See [here](http://api.electricitymap.org/v3/zones) for the list of available zones. | [CO2Signal API](https://www.co2signal.com) or [ElectricityMaps](https://static.electricitymaps.com/api/docs/index.html) |
57 | | Europe | Static file created from the European Environment Agency Emission for the co2g/kwh in European countries. | [EEA website](https://www.eea.europa.eu/data-and-maps/daviz/co2-emission-intensity-9#tab-googlechartid_googlechartid_googlechartid_googlechartid_chart_11111) |
58 | | AWS | Static file of the AWS Grid emissions factors. | [cloud-carbon-coefficients](https://github.com/cloud-carbon-footprint/cloud-carbon-coefficients/blob/main/data/grid-emissions-factors-aws.csv) |
59 |
60 | ### ⚙️ Configuration
61 |
62 | The environment variables can be set from an environment file `.env`.
63 |
64 | | **Parameter** | **Description** |
65 | | ----------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
66 | | TRACARBON_CO2SIGNAL_API_KEY | The api key received from [CO2Signal](https://www.co2signal.com) or [ElectricityMaps](https://static.electricitymaps.com/api/docs/index.html). |
67 | | TRACARBON_CO2SIGNAL_URL | The url of [CO2Signal](https://docs.co2signal.com/#get-latest-by-country-code) is the default endpoint to retrieve the last known state of the zone, but it could be changed to [ElectricityMaps](https://static.electricitymaps.com/api/docs/index.html#live-carbon-intensity). |
68 | | TRACARBON_METRIC_PREFIX_NAME | The prefix to use in all the metrics name. |
69 | | TRACARBON_INTERVAL_IN_SECONDS | The interval in seconds to wait between the metrics evaluation. |
70 | | TRACARBON_LOG_LEVEL | The level to use for displaying the logs. |
71 |
72 | ## 🔎 Usage
73 |
74 | **Request your API key**
75 |
76 | - Go to [CO2Signal](https://www.co2signal.com/) and get your free API key for non-commercial use, or go to [ElectricityMaps](https://static.electricitymaps.com/api/docs/index.html) for commercial use.
77 | - This API is used to retrieve the last known carbon intensity (in gCO2eq/kWh) of electricity consumed in your location.
78 | - Set your API key in the environment variables, in the `.env` file or directly in the configuration.
79 | - If you would like to start without an API key, it's possible, the carbon intensity will be loaded statistically from a file.
80 | - Launch Tracarbon 🚀
81 |
82 | **Command Line**
83 |
84 | ```sh
85 | tracarbon run
86 | ```
87 |
88 | **API**
89 |
90 | ```python
91 | from tracarbon import TracarbonBuilder, TracarbonConfiguration
92 |
93 | configuration = TracarbonConfiguration() # Your configuration
94 | tracarbon = TracarbonBuilder(configuration=configuration).build()
95 | tracarbon.start()
96 | # Your code
97 | tracarbon.stop()
98 |
99 | with tracarbon:
100 | # Your code
101 |
102 | report = tracarbon.report() # Get the report
103 | ```
104 |
105 | ## 💻 Development
106 |
107 | **Local: using uv**
108 |
109 | ```sh
110 | make init
111 | make test-unit
112 | ```
113 |
114 | ## 🛡️ Licence
115 |
116 | [Apache License 2.0](https://raw.githubusercontent.com/fvaleye/tracarbon/main/LICENSE.txt)
117 |
118 | ## 📚 Documentation
119 |
120 | The documentation is hosted here: https://fvaleye.github.io/tracarbon/documentation
121 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (2023) Florian Valeye and a number of other contributors. All rights reserved.
2 |
3 |
4 | Apache License
5 | Version 2.0, January 2004
6 | http://www.apache.org/licenses/
7 |
8 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
9 |
10 | 1. Definitions.
11 |
12 | "License" shall mean the terms and conditions for use, reproduction,
13 | and distribution as defined by Sections 1 through 9 of this document.
14 |
15 | "Licensor" shall mean the copyright owner or entity authorized by
16 | the copyright owner that is granting the License.
17 |
18 | "Legal Entity" shall mean the union of the acting entity and all
19 | other entities that control, are controlled by, or are under common
20 | control with that entity. For the purposes of this definition,
21 | "control" means (i) the power, direct or indirect, to cause the
22 | direction or management of such entity, whether by contract or
23 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
24 | outstanding shares, or (iii) beneficial ownership of such entity.
25 |
26 | "You" (or "Your") shall mean an individual or Legal Entity
27 | exercising permissions granted by this License.
28 |
29 | "Source" form shall mean the preferred form for making modifications,
30 | including but not limited to software source code, documentation
31 | source, and configuration files.
32 |
33 | "Object" form shall mean any form resulting from mechanical
34 | transformation or translation of a Source form, including but
35 | not limited to compiled object code, generated documentation,
36 | and conversions to other media types.
37 |
38 | "Work" shall mean the work of authorship, whether in Source or
39 | Object form, made available under the License, as indicated by a
40 | copyright notice that is included in or attached to the work
41 | (an example is provided in the Appendix below).
42 |
43 | "Derivative Works" shall mean any work, whether in Source or Object
44 | form, that is based on (or derived from) the Work and for which the
45 | editorial revisions, annotations, elaborations, or other modifications
46 | represent, as a whole, an original work of authorship. For the purposes
47 | of this License, Derivative Works shall not include works that remain
48 | separable from, or merely link (or bind by name) to the interfaces of,
49 | the Work and Derivative Works thereof.
50 |
51 | "Contribution" shall mean any work of authorship, including
52 | the original version of the Work and any modifications or additions
53 | to that Work or Derivative Works thereof, that is intentionally
54 | submitted to Licensor for inclusion in the Work by the copyright owner
55 | or by an individual or Legal Entity authorized to submit on behalf of
56 | the copyright owner. For the purposes of this definition, "submitted"
57 | means any form of electronic, verbal, or written communication sent
58 | to the Licensor or its representatives, including but not limited to
59 | communication on electronic mailing lists, source code control systems,
60 | and issue tracking systems that are managed by, or on behalf of, the
61 | Licensor for the purpose of discussing and improving the Work, but
62 | excluding communication that is conspicuously marked or otherwise
63 | designated in writing by the copyright owner as "Not a Contribution."
64 |
65 | "Contributor" shall mean Licensor and any individual or Legal Entity
66 | on behalf of whom a Contribution has been received by Licensor and
67 | subsequently incorporated within the Work.
68 |
69 | 2. Grant of Copyright License. Subject to the terms and conditions of
70 | this License, each Contributor hereby grants to You a perpetual,
71 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
72 | copyright license to reproduce, prepare Derivative Works of,
73 | publicly display, publicly perform, sublicense, and distribute the
74 | Work and such Derivative Works in Source or Object form.
75 |
76 | 3. Grant of Patent License. Subject to the terms and conditions of
77 | this License, each Contributor hereby grants to You a perpetual,
78 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
79 | (except as stated in this section) patent license to make, have made,
80 | use, offer to sell, sell, import, and otherwise transfer the Work,
81 | where such license applies only to those patent claims licensable
82 | by such Contributor that are necessarily infringed by their
83 | Contribution(s) alone or by combination of their Contribution(s)
84 | with the Work to which such Contribution(s) was submitted. If You
85 | institute patent litigation against any entity (including a
86 | cross-claim or counterclaim in a lawsuit) alleging that the Work
87 | or a Contribution incorporated within the Work constitutes direct
88 | or contributory patent infringement, then any patent licenses
89 | granted to You under this License for that Work shall terminate
90 | as of the date such litigation is filed.
91 |
92 | 4. Redistribution. You may reproduce and distribute copies of the
93 | Work or Derivative Works thereof in any medium, with or without
94 | modifications, and in Source or Object form, provided that You
95 | meet the following conditions:
96 |
97 | (a) You must give any other recipients of the Work or
98 | Derivative Works a copy of this License; and
99 |
100 | (b) You must cause any modified files to carry prominent notices
101 | stating that You changed the files; and
102 |
103 | (c) You must retain, in the Source form of any Derivative Works
104 | that You distribute, all copyright, patent, trademark, and
105 | attribution notices from the Source form of the Work,
106 | excluding those notices that do not pertain to any part of
107 | the Derivative Works; and
108 |
109 | (d) If the Work includes a "NOTICE" text file as part of its
110 | distribution, then any Derivative Works that You distribute must
111 | include a readable copy of the attribution notices contained
112 | within such NOTICE file, excluding those notices that do not
113 | pertain to any part of the Derivative Works, in at least one
114 | of the following places: within a NOTICE text file distributed
115 | as part of the Derivative Works; within the Source form or
116 | documentation, if provided along with the Derivative Works; or,
117 | within a display generated by the Derivative Works, if and
118 | wherever such third-party notices normally appear. The contents
119 | of the NOTICE file are for informational purposes only and
120 | do not modify the License. You may add Your own attribution
121 | notices within Derivative Works that You distribute, alongside
122 | or as an addendum to the NOTICE text from the Work, provided
123 | that such additional attribution notices cannot be construed
124 | as modifying the License.
125 |
126 | You may add Your own copyright statement to Your modifications and
127 | may provide additional or different license terms and conditions
128 | for use, reproduction, or distribution of Your modifications, or
129 | for any such Derivative Works as a whole, provided Your use,
130 | reproduction, and distribution of the Work otherwise complies with
131 | the conditions stated in this License.
132 |
133 | 5. Submission of Contributions. Unless You explicitly state otherwise,
134 | any Contribution intentionally submitted for inclusion in the Work
135 | by You to the Licensor shall be under the terms and conditions of
136 | this License, without any additional terms or conditions.
137 | Notwithstanding the above, nothing herein shall supersede or modify
138 | the terms of any separate license agreement you may have executed
139 | with Licensor regarding such Contributions.
140 |
141 | 6. Trademarks. This License does not grant permission to use the trade
142 | names, trademarks, service marks, or product names of the Licensor,
143 | except as required for reasonable and customary use in describing the
144 | origin of the Work and reproducing the content of the NOTICE file.
145 |
146 | 7. Disclaimer of Warranty. Unless required by applicable law or
147 | agreed to in writing, Licensor provides the Work (and each
148 | Contributor provides its Contributions) on an "AS IS" BASIS,
149 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
150 | implied, including, without limitation, any warranties or conditions
151 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
152 | PARTICULAR PURPOSE. You are solely responsible for determining the
153 | appropriateness of using or redistributing the Work and assume any
154 | risks associated with Your exercise of permissions under this License.
155 |
156 | 8. Limitation of Liability. In no event and under no legal theory,
157 | whether in tort (including negligence), contract, or otherwise,
158 | unless required by applicable law (such as deliberate and grossly
159 | negligent acts) or agreed to in writing, shall any Contributor be
160 | liable to You for damages, including any direct, indirect, special,
161 | incidental, or consequential damages of any character arising as a
162 | result of this License or out of the use or inability to use the
163 | Work (including but not limited to damages for loss of goodwill,
164 | work stoppage, computer failure or malfunction, or any and all
165 | other commercial damages or losses), even if such Contributor
166 | has been advised of the possibility of such damages.
167 |
168 | 9. Accepting Warranty or Additional Liability. While redistributing
169 | the Work or Derivative Works thereof, You may choose to offer,
170 | and charge a fee for, acceptance of support, warranty, indemnity,
171 | or other liability obligations and/or rights consistent with this
172 | License. However, in accepting such obligations, You may act only
173 | on Your own behalf and on Your sole responsibility, not on behalf
174 | of any other Contributor, and only if You agree to indemnify,
175 | defend, and hold each Contributor harmless for any liability
176 | incurred by, or claims asserted against, such Contributor by reason
177 | of your accepting any such warranty or additional liability.
178 |
179 | END OF TERMS AND CONDITIONS
180 |
--------------------------------------------------------------------------------
/tests/test_general_metrics.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from kubernetes import config
3 |
4 | from tracarbon import CarbonEmission
5 | from tracarbon import CarbonUsage
6 | from tracarbon import EnergyConsumption
7 | from tracarbon import EnergyUsage
8 | from tracarbon import HardwareInfo
9 | from tracarbon import Kubernetes
10 | from tracarbon import MacEnergyConsumption
11 | from tracarbon.exporters import Tag
12 | from tracarbon.general_metrics import CarbonEmissionGenerator
13 | from tracarbon.general_metrics import CarbonEmissionKubernetesGenerator
14 | from tracarbon.general_metrics import EnergyConsumptionGenerator
15 | from tracarbon.general_metrics import EnergyConsumptionKubernetesGenerator
16 | from tracarbon.hardwares import Container
17 | from tracarbon.hardwares import Pod
18 | from tracarbon.locations.country import Country
19 |
20 |
21 | @pytest.mark.asyncio
22 | async def test_carbon_emission_metric(mocker):
23 | location_name = "fr"
24 | energy_usage = EnergyUsage(cpu_energy_usage=12.0, memory_energy_usage=4.0)
25 | mocker.patch.object(Country, "get_current_country", return_value=location_name)
26 | mocker.patch.object(CarbonEmission, "get_energy_usage", return_value=energy_usage)
27 | location = Country(name=location_name, co2g_kwh=51.1)
28 | carbon_emission_generator = CarbonEmissionGenerator(location=location)
29 | generator = carbon_emission_generator.generate()
30 | carbon_emission_metric = await generator.__anext__()
31 |
32 | assert carbon_emission_metric.name == "carbon_emission_host"
33 | assert carbon_emission_metric.tags[1] == Tag(key="location", value=location_name)
34 | assert carbon_emission_metric.tags[2] == Tag(key="source", value=location.co2g_kwh_source.value)
35 | assert carbon_emission_metric.tags[3] == Tag(key="units", value="co2g")
36 |
37 | carbon_emission_metric = await generator.__anext__()
38 | assert carbon_emission_metric.name == "carbon_emission_cpu"
39 | assert carbon_emission_metric.tags[1] == Tag(key="location", value=location_name)
40 | assert carbon_emission_metric.tags[2] == Tag(key="source", value=location.co2g_kwh_source.value)
41 | assert carbon_emission_metric.tags[3] == Tag(key="units", value="co2g")
42 |
43 | carbon_emission_metric = await generator.__anext__()
44 | assert carbon_emission_metric.name == "carbon_emission_memory"
45 | assert carbon_emission_metric.tags[1] == Tag(key="location", value=location_name)
46 | assert carbon_emission_metric.tags[2] == Tag(key="source", value=location.co2g_kwh_source.value)
47 | assert carbon_emission_metric.tags[3] == Tag(key="units", value="co2g")
48 |
49 | carbon_emission_metric = await generator.__anext__()
50 | assert carbon_emission_metric.name == "carbon_emission_gpu"
51 | assert carbon_emission_metric.tags[1] == Tag(key="location", value=location_name)
52 | assert carbon_emission_metric.tags[2] == Tag(key="source", value=location.co2g_kwh_source.value)
53 | assert carbon_emission_metric.tags[3] == Tag(key="units", value="co2g")
54 |
55 |
56 | @pytest.mark.asyncio
57 | async def test_energy_consumption_metric(mocker):
58 | location_name = "fr"
59 | energy_usage = EnergyUsage(cpu_energy_usage=12.0, memory_energy_usage=4.0)
60 | mocker.patch.object(EnergyConsumption, "from_platform", return_value=MacEnergyConsumption())
61 | mocker.patch.object(Country, "get_current_country", return_value=location_name)
62 | mocker.patch.object(MacEnergyConsumption, "get_energy_usage", return_value=energy_usage)
63 | location = Country(name=location_name, co2g_kwh=51.1)
64 | energy_consumption_generator = EnergyConsumptionGenerator(location=location)
65 | generator = energy_consumption_generator.generate()
66 | energy_consumption_metric = await generator.__anext__()
67 |
68 | assert energy_consumption_metric.name == "energy_consumption_host"
69 | assert energy_consumption_metric.tags[1] == Tag(key="location", value=location_name)
70 | assert energy_consumption_metric.tags[2] == Tag(key="units", value="watts")
71 |
72 | energy_consumption_metric = await generator.__anext__()
73 | assert energy_consumption_metric.name == "energy_consumption_cpu"
74 | assert energy_consumption_metric.tags[1] == Tag(key="location", value=location_name)
75 | assert energy_consumption_metric.tags[2] == Tag(key="units", value="watts")
76 |
77 | energy_consumption_metric = await generator.__anext__()
78 | assert energy_consumption_metric.name == "energy_consumption_memory"
79 | assert energy_consumption_metric.tags[1] == Tag(key="location", value=location_name)
80 | assert energy_consumption_metric.tags[2] == Tag(key="units", value="watts")
81 |
82 | energy_consumption_metric = await generator.__anext__()
83 | assert energy_consumption_metric.name == "energy_consumption_gpu"
84 | assert energy_consumption_metric.tags[1] == Tag(key="location", value=location_name)
85 | assert energy_consumption_metric.tags[2] == Tag(key="units", value="watts")
86 |
87 |
88 | @pytest.mark.asyncio
89 | async def test_energy_consumption_kubernetes_generator(mocker):
90 | location_name = "fr"
91 | memory_total = 101200121856
92 | energy_usage = EnergyUsage(cpu_energy_usage=12.0, memory_energy_usage=4.0)
93 | mocker.patch.object(EnergyConsumption, "from_platform", return_value=MacEnergyConsumption())
94 | mocker.patch.object(config, "load_kube_config", return_value=None)
95 | mocker.patch.object(Country, "get_current_country", return_value=location_name)
96 | mocker.patch.object(HardwareInfo, "get_memory_total", return_value=memory_total)
97 | cores_number = 2
98 | mocker.patch.object(HardwareInfo, "get_number_of_cores", return_value=cores_number)
99 | mocker.patch.object(MacEnergyConsumption, "get_energy_usage", return_value=energy_usage)
100 | container_name = "grafana"
101 | pod_name = "grafana-5745b58656-8q4q8"
102 | namespace = "default"
103 | pods_usage = [
104 | Pod(
105 | name=pod_name,
106 | namespace=namespace,
107 | containers=[
108 | Container(
109 | name=container_name,
110 | cpu_usage="825800n",
111 | memory_usage="46472Ki",
112 | )
113 | ],
114 | ),
115 | ]
116 | milliwatts_expected = 6.7916
117 | mocker.patch.object(Kubernetes, "get_pods_usage", return_value=pods_usage)
118 |
119 | location = Country(name=location_name, co2g_kwh=51.1)
120 | energy_consumption_kubernertes_generator = EnergyConsumptionKubernetesGenerator(
121 | location=location, platform="Darwin"
122 | )
123 |
124 | async_generator = energy_consumption_kubernertes_generator.generate()
125 | metric = await async_generator.__anext__()
126 | assert round(await metric.value(), 4) == milliwatts_expected
127 | assert "energy_consumption_kubernetes_total" == metric.name
128 | assert [
129 | f"pod_name:{pod_name}",
130 | f"pod_namespace:{namespace}",
131 | f"container_name:{container_name}",
132 | "platform:Darwin",
133 | "containers:kubernetes",
134 | f"location:{location_name}",
135 | "units:milliwatts",
136 | ] == metric.format_tags()
137 |
138 | milliwatts_expected = 4.9548
139 | metric = await async_generator.__anext__()
140 | assert round(await metric.value(), 4) == milliwatts_expected
141 | assert "energy_consumption_kubernetes_cpu" == metric.name
142 | assert [
143 | f"pod_name:{pod_name}",
144 | f"pod_namespace:{namespace}",
145 | f"container_name:{container_name}",
146 | "platform:Darwin",
147 | "containers:kubernetes",
148 | f"location:{location_name}",
149 | "units:milliwatts",
150 | ] == metric.format_tags()
151 |
152 | milliwatts_expected = 1.8368
153 | metric = await async_generator.__anext__()
154 | assert round(await metric.value(), 4) == milliwatts_expected
155 | assert "energy_consumption_kubernetes_memory" == metric.name
156 | assert [
157 | f"pod_name:{pod_name}",
158 | f"pod_namespace:{namespace}",
159 | f"container_name:{container_name}",
160 | "platform:Darwin",
161 | "containers:kubernetes",
162 | f"location:{location_name}",
163 | "units:milliwatts",
164 | ] == metric.format_tags()
165 |
166 |
167 | @pytest.mark.asyncio
168 | async def test_carbon_emission_kubernetes_generator(mocker):
169 | location_name = "fr"
170 | memory_total = 1000000000
171 | carbon_usage = CarbonUsage(cpu_carbon_usage=0.2, memory_carbon_usage=0.1)
172 | mocker.patch.object(EnergyConsumption, "from_platform", return_value=MacEnergyConsumption())
173 | mocker.patch.object(config, "load_kube_config", return_value=None)
174 | mocker.patch.object(Country, "get_current_country", return_value=location_name)
175 | mocker.patch.object(HardwareInfo, "get_memory_total", return_value=memory_total)
176 | cores_number = 2
177 | mocker.patch.object(HardwareInfo, "get_number_of_cores", return_value=cores_number)
178 | mocker.patch.object(CarbonEmission, "get_co2_usage", return_value=carbon_usage)
179 | container_name = "grafana"
180 | pod_name = "grafana-5745b58656-8q4q8"
181 | namespace = "default"
182 | pods_usage = [
183 | Pod(
184 | name=pod_name,
185 | namespace=namespace,
186 | containers=[
187 | Container(
188 | name=container_name,
189 | cpu_usage="2000m",
190 | memory_usage="1Gi",
191 | )
192 | ],
193 | ),
194 | ]
195 | carbon_usage_expected = 300.00
196 | mocker.patch.object(Kubernetes, "get_pods_usage", return_value=pods_usage)
197 |
198 | location = Country(name=location_name, co2g_kwh=55)
199 | carbon_emission_kubernertes_generator = CarbonEmissionKubernetesGenerator(location=location, platform="Darwin")
200 |
201 | async_generator = carbon_emission_kubernertes_generator.generate()
202 | metric = await async_generator.__anext__()
203 | assert round(await metric.value(), 4) == carbon_usage_expected
204 | assert "carbon_emission_kubernetes_total" == metric.name
205 | assert [
206 | f"pod_name:{pod_name}",
207 | f"pod_namespace:{namespace}",
208 | f"container_name:{container_name}",
209 | "platform:Darwin",
210 | "containers:kubernetes",
211 | f"location:{location_name}",
212 | "source:file",
213 | "units:co2mg",
214 | ] == metric.format_tags()
215 |
216 | carbon_usage_expected = 200.00
217 |
218 | metric = await async_generator.__anext__()
219 | assert round(await metric.value(), 4) == carbon_usage_expected
220 | assert "carbon_emission_kubernetes_cpu" == metric.name
221 | assert [
222 | f"pod_name:{pod_name}",
223 | f"pod_namespace:{namespace}",
224 | f"container_name:{container_name}",
225 | "platform:Darwin",
226 | "containers:kubernetes",
227 | f"location:{location_name}",
228 | "source:file",
229 | "units:co2mg",
230 | ] == metric.format_tags()
231 |
232 | carbon_usage_expected = 100.00
233 |
234 | metric = await async_generator.__anext__()
235 | assert round(await metric.value(), 4) == carbon_usage_expected
236 | assert "carbon_emission_kubernetes_memory" == metric.name
237 | assert [
238 | f"pod_name:{pod_name}",
239 | f"pod_namespace:{namespace}",
240 | f"container_name:{container_name}",
241 | "platform:Darwin",
242 | "containers:kubernetes",
243 | f"location:{location_name}",
244 | "source:file",
245 | "units:co2mg",
246 | ] == metric.format_tags()
247 |
--------------------------------------------------------------------------------
/tracarbon/general_metrics.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from typing import AsyncGenerator
3 | from typing import Optional
4 |
5 | from tracarbon.conf import KUBERNETES_INSTALLED
6 | from tracarbon.emissions import CarbonEmission
7 | from tracarbon.emissions import CarbonUsageUnit
8 | from tracarbon.exporters import Metric
9 | from tracarbon.exporters import MetricGenerator
10 | from tracarbon.exporters import Tag
11 | from tracarbon.hardwares import EnergyConsumption
12 | from tracarbon.hardwares import EnergyUsageUnit
13 | from tracarbon.hardwares import UsageType
14 | from tracarbon.locations import Country
15 | from tracarbon.locations import Location
16 |
17 |
18 | class EnergyConsumptionGenerator(MetricGenerator):
19 | """
20 | Energy consumption generator for energy consumption.
21 | """
22 |
23 | energy_consumption: EnergyConsumption
24 |
25 | def __init__(self, location: Optional[Location] = None, **data: Any) -> None:
26 | if "energy_consumption" not in data:
27 | data["energy_consumption"] = EnergyConsumption.from_platform()
28 | if not location:
29 | location = Country.get_location()
30 | super().__init__(location=location, metrics=[], **data)
31 |
32 | async def generate(self) -> AsyncGenerator[Metric, None]:
33 | """
34 | Generate a metric for energy consumption.
35 |
36 | :return: an async generator of the metrics
37 | """
38 | energy_usage = await self.energy_consumption.get_energy_usage()
39 |
40 | for usage_type in UsageType:
41 |
42 | async def energy_consumption_by_usage_type() -> float:
43 | """
44 | Get the energy usage.
45 | """
46 | return energy_usage.get_energy_usage_on_type(usage_type=usage_type)
47 |
48 | yield Metric(
49 | name=f"energy_consumption_{usage_type.value}",
50 | value=energy_consumption_by_usage_type,
51 | tags=[
52 | Tag(key="platform", value=self.platform),
53 | Tag(key="location", value=self.location.name),
54 | Tag(key="units", value=energy_usage.unit.value),
55 | ],
56 | )
57 |
58 |
59 | class CarbonEmissionGenerator(MetricGenerator):
60 | """
61 | Carbon emission generator to generate carbon emissions.
62 | """
63 |
64 | carbon_emission: CarbonEmission
65 | co2signal_api_key: Optional[str] = None
66 |
67 | def __init__(self, location: Optional[Location] = None, **data: Any) -> None:
68 | if not location:
69 | location = Country.get_location()
70 | if "carbon_emission" not in data:
71 | data["carbon_emission"] = CarbonEmission(
72 | co2signal_api_key=(
73 | data["co2signal_api_key"] if "co2signal_api_key" in data else location.co2signal_api_key
74 | ),
75 | co2signal_url=(data["co2signal_url"] if "co2signal_url" in data else location.co2signal_url),
76 | location=location,
77 | )
78 | super().__init__(location=location, metrics=[], **data)
79 |
80 | async def generate(self) -> AsyncGenerator[Metric, None]:
81 | """
82 | Generate metrics for the carbon emission.
83 |
84 | :return: an async generator of the metrics
85 | """
86 | carbon_usage = await self.carbon_emission.get_co2_usage()
87 |
88 | for usage_type in UsageType:
89 |
90 | async def get_carbon_emission_by_usage_type() -> float:
91 | """
92 | Get the carbon usage.
93 | """
94 | return carbon_usage.get_carbon_usage_on_type(usage_type=usage_type)
95 |
96 | yield Metric(
97 | name=f"carbon_emission_{usage_type.value}",
98 | value=get_carbon_emission_by_usage_type,
99 | tags=[
100 | Tag(key="platform", value=self.platform),
101 | Tag(key="location", value=self.location.name),
102 | Tag(key="source", value=self.location.co2g_kwh_source.value),
103 | Tag(key="units", value=carbon_usage.unit.value),
104 | ],
105 | )
106 |
107 |
108 | if KUBERNETES_INSTALLED:
109 | from tracarbon.hardwares.containers import Kubernetes
110 |
111 | class EnergyConsumptionKubernetesGenerator(MetricGenerator):
112 | """
113 | Energy consumption generator for energy consumption of the containers.
114 | """
115 |
116 | energy_consumption: EnergyConsumption
117 | kubernetes: Kubernetes
118 |
119 | def __init__(self, **data: Any) -> None:
120 | if "energy_consumption" not in data:
121 | data["energy_consumption"] = EnergyConsumption.from_platform()
122 | if "kubernetes" not in data:
123 | data["kubernetes"] = Kubernetes()
124 | super().__init__(metrics=[], **data)
125 |
126 | async def generate(self) -> AsyncGenerator[Metric, None]:
127 | """
128 | Generate metrics for the energy consumption with Kubernetes.
129 |
130 | :return: an async generator of the metrics
131 | """
132 | energy_usage = await self.energy_consumption.get_energy_usage()
133 | energy_usage.convert_unit(unit=EnergyUsageUnit.MILLIWATT)
134 | for pod in self.kubernetes.get_pods_usage():
135 | for container in pod.containers:
136 |
137 | async def get_pod_memory_energy_consumption() -> Optional[float]:
138 | """
139 | Get the memory energy consumption of the pod.
140 | """
141 | return container.memory_usage * energy_usage.memory_energy_usage
142 |
143 | async def get_pod_cpu_energy_consumption() -> Optional[float]:
144 | """
145 | Get the CPU energy consumption of the pod.
146 | """
147 | return container.cpu_usage * energy_usage.cpu_energy_usage
148 |
149 | async def get_pod_total_energy_consumption() -> Optional[float]:
150 | """
151 | Get the total energy consumption of the pod.
152 | """
153 | total = await get_pod_memory_energy_consumption() + await get_pod_cpu_energy_consumption()
154 | return total
155 |
156 | tags = [
157 | Tag(key="pod_name", value=pod.name),
158 | Tag(key="pod_namespace", value=pod.namespace),
159 | Tag(key="container_name", value=container.name),
160 | Tag(key="platform", value=self.platform),
161 | Tag(key="containers", value="kubernetes"),
162 | Tag(key="location", value=self.location.name),
163 | Tag(key="units", value=energy_usage.unit.value),
164 | ]
165 |
166 | yield Metric(
167 | name="energy_consumption_kubernetes_total",
168 | value=get_pod_total_energy_consumption,
169 | tags=tags,
170 | )
171 | yield Metric(
172 | name="energy_consumption_kubernetes_cpu",
173 | value=get_pod_cpu_energy_consumption,
174 | tags=tags,
175 | )
176 | yield Metric(
177 | name="energy_consumption_kubernetes_memory",
178 | value=get_pod_memory_energy_consumption,
179 | tags=tags,
180 | )
181 |
182 | class CarbonEmissionKubernetesGenerator(MetricGenerator):
183 | """
184 | Carbon emission generator to generate carbon emissions of the containers.
185 | """
186 |
187 | carbon_emission: CarbonEmission
188 | kubernetes: Kubernetes
189 | co2signal_api_key: Optional[str] = None
190 |
191 | def __init__(self, location: Location, **data: Any) -> None:
192 | if "carbon_emission" not in data:
193 | data["carbon_emission"] = CarbonEmission(
194 | co2signal_api_key=(
195 | data["co2signal_api_key"] if "co2signal_api_key" in data else location.co2signal_api_key
196 | ),
197 | co2signal_url=(data["co2signal_url"] if "co2signal_url" in data else location.co2signal_url),
198 | location=location,
199 | )
200 | if "kubernetes" not in data:
201 | data["kubernetes"] = Kubernetes()
202 | super().__init__(location=location, metrics=[], **data)
203 |
204 | async def generate(self) -> AsyncGenerator[Metric, None]:
205 | """
206 | Generate metrics for the carbon emission with Kubernetes.
207 |
208 | :return: an async generator of the metrics
209 | """
210 | carbon_usage = await self.carbon_emission.get_co2_usage()
211 | carbon_usage.convert_unit(unit=CarbonUsageUnit.CO2_MG)
212 |
213 | for pod in self.kubernetes.get_pods_usage():
214 | for container in pod.containers:
215 |
216 | async def get_cpu_pod_carbon_emission() -> Optional[float]:
217 | """
218 | Get the CPU carbon emission of the pod.
219 | """
220 | return container.cpu_usage * carbon_usage.cpu_carbon_usage
221 |
222 | async def get_memory_pod_carbon_emission() -> Optional[float]:
223 | """
224 | Get the memory carbon emission of the pod.
225 | """
226 | return container.memory_usage * carbon_usage.memory_carbon_usage
227 |
228 | async def get_total_pod_carbon_emission() -> Optional[float]:
229 | """
230 | Get the total carbon emission of the pod.
231 | """
232 | total = await get_cpu_pod_carbon_emission() + await get_memory_pod_carbon_emission()
233 | return total
234 |
235 | tags = [
236 | Tag(key="pod_name", value=pod.name),
237 | Tag(key="pod_namespace", value=pod.namespace),
238 | Tag(key="container_name", value=container.name),
239 | Tag(key="platform", value=self.platform),
240 | Tag(key="containers", value="kubernetes"),
241 | Tag(key="location", value=self.location.name),
242 | Tag(key="source", value=self.location.co2g_kwh_source.value),
243 | Tag(key="units", value=carbon_usage.unit.value),
244 | ]
245 | yield Metric(
246 | name="carbon_emission_kubernetes_total",
247 | value=get_total_pod_carbon_emission,
248 | tags=tags,
249 | )
250 | yield Metric(
251 | name="carbon_emission_kubernetes_cpu",
252 | value=get_cpu_pod_carbon_emission,
253 | tags=tags,
254 | )
255 | yield Metric(
256 | name="carbon_emission_kubernetes_memory",
257 | value=get_memory_pod_carbon_emission,
258 | tags=tags,
259 | )
260 |
--------------------------------------------------------------------------------