├── tests ├── __init__.py ├── commands │ ├── __init__.py │ └── test_logs.py └── cortisollib │ ├── __init__.py │ ├── test_estimators.py │ ├── test_calculators.py │ ├── test_log_cost_estimator.py │ ├── test_hooks.py │ └── test_readers.py ├── cortisol ├── __init__.py ├── commands │ ├── __init__.py │ └── logs.py ├── cortisollib │ ├── __init__.py │ ├── estimators.py │ ├── templates │ │ └── cli_loadtest.py.j2 │ ├── users.py │ ├── calculators.py │ ├── readers.py │ ├── log_cost_estimator.py │ └── hooks.py └── main.py ├── docs ├── static │ ├── .nojekyll │ └── img │ │ ├── favicon.ico │ │ ├── mg_medium.ico │ │ ├── docusaurus.png │ │ ├── favicon-copy.ico │ │ ├── cortisol_h_large.png │ │ ├── cortisol_medium.png │ │ ├── cortisol_h_large_w.png │ │ ├── gh-action-cortisol.png │ │ ├── getting-started-results.png │ │ ├── logo.svg │ │ ├── undraw_docusaurus_tree.svg │ │ ├── undraw_docusaurus_mountain.svg │ │ └── undraw_docusaurus_react.svg ├── cortisol_h_large.png ├── cortisol_h_large_w.png ├── babel.config.js ├── blog │ ├── 2021-08-26-welcome │ │ ├── docusaurus-plushie-banner.jpeg │ │ └── index.md │ ├── 2019-05-28-first-blog-post.md │ ├── authors.yml │ ├── 2021-08-01-mdx-blog-post.mdx │ └── 2019-05-29-long-blog-post.md ├── src │ ├── pages │ │ ├── markdown-page.md │ │ └── index.module.css │ ├── components │ │ └── HomepageFeatures │ │ │ ├── styles.module.css │ │ │ └── index.js │ └── css │ │ └── custom.css ├── .gitignore ├── README.md ├── sidebars.js ├── docs │ ├── best-practices.md │ ├── how-tos.md │ ├── intro.md │ ├── configuration.md │ ├── CI-visibility.md │ ├── writing-a-cortisolfile.md │ └── getting-started.md ├── package.json ├── cortisol │ └── src │ │ └── pages │ │ └── index.js └── docusaurus.config.js ├── examples ├── cortisolfile.py ├── config.yml └── config.json ├── pyproject.toml ├── .github └── workflows │ ├── release.yml │ └── ci.yml ├── CONTRIBUTING.md ├── .gitignore ├── README.md └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cortisol/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/static/.nojekyll: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cortisol/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cortisol/cortisollib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/cortisollib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/cortisol_h_large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/cortisol_h_large.png -------------------------------------------------------------------------------- /docs/cortisol_h_large_w.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/cortisol_h_large_w.png -------------------------------------------------------------------------------- /docs/static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/favicon.ico -------------------------------------------------------------------------------- /docs/static/img/mg_medium.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/mg_medium.ico -------------------------------------------------------------------------------- /docs/static/img/docusaurus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/docusaurus.png -------------------------------------------------------------------------------- /docs/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve('@docusaurus/core/lib/babel/preset')], 3 | }; 4 | -------------------------------------------------------------------------------- /docs/static/img/favicon-copy.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/favicon-copy.ico -------------------------------------------------------------------------------- /docs/static/img/cortisol_h_large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/cortisol_h_large.png -------------------------------------------------------------------------------- /docs/static/img/cortisol_medium.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/cortisol_medium.png -------------------------------------------------------------------------------- /docs/static/img/cortisol_h_large_w.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/cortisol_h_large_w.png -------------------------------------------------------------------------------- /docs/static/img/gh-action-cortisol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/gh-action-cortisol.png -------------------------------------------------------------------------------- /docs/static/img/getting-started-results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/static/img/getting-started-results.png -------------------------------------------------------------------------------- /docs/blog/2021-08-26-welcome/docusaurus-plushie-banner.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CortisolAI/cortisol/HEAD/docs/blog/2021-08-26-welcome/docusaurus-plushie-banner.jpeg -------------------------------------------------------------------------------- /docs/src/pages/markdown-page.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Markdown page example 3 | --- 4 | 5 | # Markdown page example 6 | 7 | You don't need React to write simple standalone pages. 8 | -------------------------------------------------------------------------------- /docs/src/components/HomepageFeatures/styles.module.css: -------------------------------------------------------------------------------- 1 | .features { 2 | display: flex; 3 | align-items: center; 4 | padding: 2rem 0; 5 | width: 100%; 6 | } 7 | 8 | .featureSvg { 9 | height: 200px; 10 | width: 200px; 11 | } 12 | -------------------------------------------------------------------------------- /examples/cortisolfile.py: -------------------------------------------------------------------------------- 1 | from locust import task 2 | 3 | from cortisol.cortisollib.users import CortisolHttpUser 4 | 5 | 6 | class WebsiteUser(CortisolHttpUser): 7 | @task 8 | def my_task(self): 9 | self.client.get("/") 10 | -------------------------------------------------------------------------------- /examples/config.yml: -------------------------------------------------------------------------------- 1 | host: "http://127.0.0.1:8080" 2 | log-file: "/app/playground_app.log" 3 | users: 10 4 | spawn-rate: 5 5 | run-time: "10s" 6 | cortisol-file: "./examples/cortisolfile.py" 7 | container-id: "1212aa67e530af75b3310e1e5b30261b36844a6748df1d321088c4d48a20ebd0" -------------------------------------------------------------------------------- /examples/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "host": "http://127.0.0.1:8080", 3 | "log-file": "/app/playground_app.log", 4 | "users": 10, 5 | "spawn-rate": 10, 6 | "run-time": "10s", 7 | "cortisol-file": "./examples/cortisolfile.py", 8 | "container-id": "1212aa67e530af75b3310e1e5b30261b36844a6748df1d321088c4d48a20ebd0" 9 | } -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | /node_modules 3 | 4 | # Production 5 | /build 6 | 7 | # Generated files 8 | .docusaurus 9 | .cache-loader 10 | 11 | # Misc 12 | .DS_Store 13 | .env.local 14 | .env.development.local 15 | .env.test.local 16 | .env.production.local 17 | 18 | npm-debug.log* 19 | yarn-debug.log* 20 | yarn-error.log* 21 | -------------------------------------------------------------------------------- /docs/blog/2019-05-28-first-blog-post.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: first-blog-post 3 | title: First Blog Post 4 | authors: 5 | name: Gao Wei 6 | title: Docusaurus Core Team 7 | url: https://github.com/wgao19 8 | image_url: https://github.com/wgao19.png 9 | tags: [hola, docusaurus] 10 | --- 11 | 12 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 13 | -------------------------------------------------------------------------------- /docs/src/pages/index.module.css: -------------------------------------------------------------------------------- 1 | /** 2 | * CSS files with the .module.css suffix will be treated as CSS modules 3 | * and scoped locally. 4 | */ 5 | 6 | .heroBanner { 7 | padding: 4rem 0; 8 | text-align: center; 9 | position: relative; 10 | overflow: hidden; 11 | } 12 | 13 | @media screen and (max-width: 996px) { 14 | .heroBanner { 15 | padding: 2rem; 16 | } 17 | } 18 | 19 | .buttons { 20 | display: flex; 21 | align-items: center; 22 | justify-content: center; 23 | } 24 | -------------------------------------------------------------------------------- /docs/blog/authors.yml: -------------------------------------------------------------------------------- 1 | endi: 2 | name: Endilie Yacop Sucipto 3 | title: Maintainer of Docusaurus 4 | url: https://github.com/endiliey 5 | image_url: https://github.com/endiliey.png 6 | 7 | yangshun: 8 | name: Yangshun Tay 9 | title: Front End Engineer @ Facebook 10 | url: https://github.com/yangshun 11 | image_url: https://github.com/yangshun.png 12 | 13 | slorber: 14 | name: Sébastien Lorber 15 | title: Docusaurus maintainer 16 | url: https://sebastienlorber.com 17 | image_url: https://github.com/slorber.png 18 | -------------------------------------------------------------------------------- /docs/blog/2021-08-01-mdx-blog-post.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | slug: mdx-blog-post 3 | title: MDX Blog Post 4 | authors: [slorber] 5 | tags: [docusaurus] 6 | --- 7 | 8 | Blog posts support [Docusaurus Markdown features](https://docusaurus.io/docs/markdown-features), such as [MDX](https://mdxjs.com/). 9 | 10 | :::tip 11 | 12 | Use the power of React to create interactive blog posts. 13 | 14 | ```js 15 | 16 | ``` 17 | 18 | 19 | 20 | ::: 21 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Website 2 | 3 | This Docs website is built using [Docusaurus 2](https://docusaurus.io/). 4 | 5 | ### Local Development 6 | 7 | ``` 8 | $ npm start 9 | ``` 10 | 11 | This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. 12 | 13 | ### Deployment as Github Page 14 | 15 | ``` 16 | $ GIT_USER= yarn deploy 17 | ``` 18 | 19 | If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. 20 | -------------------------------------------------------------------------------- /cortisol/main.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import typer 4 | 5 | from cortisol.commands.logs import app as log_commands 6 | 7 | _LOGO = """ 8 | ____ _ _ _ 9 | / ___|___ _ __| |_(_)___ ___ | | 10 | | | / _ \| '__| __| / __|/ _ \| | 11 | | |__| (_) | | | |_| \__ \ (_) | | 12 | \____\___/|_| \__|_|___/\___/|_| 13 | 14 | """ 15 | 16 | app = typer.Typer() 17 | 18 | 19 | @app.callback() 20 | def display_logo(): 21 | typer.echo(_LOGO) 22 | 23 | 24 | app.add_typer(log_commands, name="logs") 25 | 26 | 27 | if __name__ == "__main__": 28 | app() 29 | -------------------------------------------------------------------------------- /tests/cortisollib/test_estimators.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from cortisol.cortisollib.estimators import linear_extrapolator 4 | 5 | 6 | class TestLinearExtrapolator(unittest.TestCase): 7 | def test_extrapolation(self): 8 | value_to_extrapolate = 1000 9 | test_run_time = 3600 # 1 hour in seconds 10 | expected_extrapolated_value = (2592000.0 * value_to_extrapolate) / test_run_time 11 | 12 | extrapolated_value = linear_extrapolator(value_to_extrapolate, test_run_time) 13 | self.assertAlmostEqual( 14 | extrapolated_value, 15 | expected_extrapolated_value, 16 | places=10, 17 | msg="Extrapolation result is not as expected", 18 | ) 19 | -------------------------------------------------------------------------------- /docs/blog/2021-08-26-welcome/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: welcome 3 | title: Welcome 4 | authors: [slorber, yangshun] 5 | tags: [facebook, hello, docusaurus] 6 | --- 7 | 8 | [Docusaurus blogging features](https://docusaurus.io/docs/blog) are powered by the [blog plugin](https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-content-blog). 9 | 10 | Simply add Markdown files (or folders) to the `blog` directory. 11 | 12 | Regular blog authors can be added to `authors.yml`. 13 | 14 | The blog post date can be extracted from filenames, such as: 15 | 16 | - `2019-05-30-welcome.md` 17 | - `2019-05-30-welcome/index.md` 18 | 19 | A blog post folder can be convenient to co-locate blog post images: 20 | 21 | ![Docusaurus Plushie](./docusaurus-plushie-banner.jpeg) 22 | 23 | The blog supports tags as well! 24 | 25 | **And if you don't want a blog**: just delete this directory, and use `blog: false` in your Docusaurus config. 26 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "cortisol" 3 | version = "0.3.0" 4 | description = "Accurately forecast log costs pre-production with Cortisol for Datadog, New Relic, and Grafana 💰📉" 5 | authors = ["pm3310 ", "dvarelas "] 6 | license = "Apache License 2.0" 7 | readme = "README.md" 8 | 9 | [tool.poetry.scripts] 10 | cortisol = "cortisol.main:app" 11 | 12 | [tool.poetry.dependencies] 13 | python = "^3.8" 14 | typer = {extras = ["all"], version = "^0.9.0"} 15 | pyyaml = "^6.0.1" 16 | docker = "^6.1.3" 17 | jinja2 = "^3.1.2" 18 | prettytable = "^3.8.0" 19 | locust = "^2.16.0" 20 | 21 | [tool.poetry.group.dev.dependencies] 22 | pytest = "^7.4.0" 23 | black = "^23.7.0" 24 | pytest-cov = "^4.1.0" 25 | 26 | [build-system] 27 | requires = ["poetry-core"] 28 | build-backend = "poetry.core.masonry.api" 29 | 30 | [tool.coverage.run] 31 | omit = [ 32 | "cortisol/main.py" 33 | ] 34 | -------------------------------------------------------------------------------- /docs/sidebars.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Creating a sidebar enables you to: 3 | - create an ordered group of docs 4 | - render a sidebar for each doc of that group 5 | - provide next/previous navigation 6 | 7 | The sidebars can be generated from the filesystem, or explicitly defined here. 8 | 9 | Create as many sidebars as you want. 10 | */ 11 | 12 | // @ts-check 13 | 14 | /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ 15 | const sidebars = { 16 | // By default, Docusaurus generates a sidebar from the docs folder structure 17 | tutorialSidebar: [{type: 'autogenerated', dirName: '.'}], 18 | 19 | // But you can create a sidebar manually 20 | /* 21 | tutorialSidebar: [ 22 | 'intro', 23 | 'hello', 24 | { 25 | type: 'category', 26 | label: 'Tutorial', 27 | items: ['tutorial-basics/create-a-document'], 28 | }, 29 | ], 30 | */ 31 | }; 32 | 33 | module.exports = sidebars; 34 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | release: 4 | types: 5 | - created 6 | 7 | jobs: 8 | publish: 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | python-version: ['3.11'] 13 | poetry-version: [1.5.1] 14 | os: [ubuntu-latest] 15 | runs-on: ${{ matrix.os }} 16 | steps: 17 | - uses: actions/checkout@v2 18 | - uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Run image 22 | uses: abatilo/actions-poetry@v2.1.6 23 | with: 24 | poetry-version: ${{ matrix.poetry-version }} 25 | - name: Install dependencies 26 | run: poetry install 27 | - name: Publish 28 | env: 29 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 30 | run: | 31 | poetry config pypi-token.pypi $PYPI_TOKEN 32 | poetry publish --build 33 | -------------------------------------------------------------------------------- /cortisol/cortisollib/estimators.py: -------------------------------------------------------------------------------- 1 | def linear_extrapolator(value: float, run_time: float): 2 | """ 3 | Linearly extrapolate a value over a specific run time. 4 | 5 | This function extrapolates a given value over a specified run time of one month (30 days). 6 | The extrapolated value is calculated by dividing the given value by the run time in seconds, 7 | and then multiplying it by the total number of seconds in 30 days (2592000 seconds). 8 | 9 | Args: 10 | value (float): The value to be extrapolated. 11 | run_time (float): The duration of the run time in seconds. 12 | 13 | Returns: 14 | float: The extrapolated value over the specified run time. 15 | 16 | Example: 17 | value_to_extrapolate = 1000 18 | test_run_time = 3600 # 1 hour in seconds 19 | extrapolated_value = linear_extrapolator(value_to_extrapolate, test_run_time) 20 | """ 21 | extrapolated_value = (2592000.0 * value) / run_time 22 | return extrapolated_value 23 | -------------------------------------------------------------------------------- /cortisol/cortisollib/templates/cli_loadtest.py.j2: -------------------------------------------------------------------------------- 1 | from time import time 2 | from pathlib import Path 3 | from locust.env import Environment 4 | from locust import task, events, HttpUser, between 5 | 6 | from cortisol.cortisollib.readers import log_file_size_reader 7 | from cortisol.cortisollib.hooks import on_quit, on_request, stats, on_init 8 | 9 | 10 | @events.init_command_line_parser.add_listener 11 | def _(parser): 12 | parser.add_argument("--log-file", type=str, default="/app/playground_app.log") 13 | parser.add_argument( 14 | "--container-id", 15 | type=str, 16 | env_var="CONTAINER_ID", 17 | default="1212aa67e530af75b3310e1e5b30261b36844a6748df1d321088c4d48a20ebd0", 18 | ) 19 | parser.add_argument( 20 | "--stats-file", 21 | type=Path, 22 | env_var="STATS_FILE", 23 | nargs="?", 24 | default=None, 25 | ) 26 | 27 | 28 | {{ cortisolfile }} 29 | 30 | 31 | events.init.add_listener(on_init) 32 | events.quitting.add_listener(on_quit) 33 | events.request.add_listener(on_request) 34 | 35 | environment = Environment(user_classes={{ user_classes }}, events=events) 36 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## General Guidelines 2 | - Every new functionality should be accompanied with solid unit tests. We haven't set any threshold regarding code coverage % as we want to be pragmatic. 3 | - Every bug that is fixed should come with related unit tests. 4 | - Regarding coding style, we follow PEP8 by leveraging [black](https://pypi.org/project/black/) 5 | 6 | ## Branching Model 7 | - Standard Fork & Pull Request Workflow is used in this project 8 | - Every new functionality should be created in a branch (from main branch) with name format `feature/new-functionality-name` 9 | - Every bug should be fixed in a branch (from main branch) with name format `fix/bug-name` 10 | - All branches will be merged to main branch 11 | 12 | ## Setup Environment 13 | - [Poetry](https://python-poetry.org/) is used as a dependency management tool 14 | - Run `poetry install --with dev` to install dependencies 15 | - Ready to rock! 16 | 17 | ## Testing Locally 18 | - `poetry run pytest --cov .`: Runs tests with code coverage 19 | 20 | ## Linting Locally 21 | - `poetry run black . --check` to check if there's a formatting issue 22 | - `poetry run black .` to fix the formatting issue -------------------------------------------------------------------------------- /docs/docs/best-practices.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Best practices 3 | sidebar_position: 5 4 | --- 5 | 6 | ## Best practices 7 | 8 | Here are three best practices to follow when using Cortisol: 9 | 10 | 1. Cortisol uses the volume of the logged data from the run to project and approximate 11 | its value over a month using linear extrapolation. To obtain a precise estimation, 12 | it's essential to set the `--run-time` to an adequate duration. 13 | The `--spawn-rate` influences `--run-time`, 14 | as a higher rate of user spawning results in a shorter `--run-time`. 15 | 16 | 2. Create scenarios that accurately replicate the user actions or API calls 17 | that generate log entries. If certain actions trigger specific log types or levels, 18 | ensure your cortisolfile defines those actions accordingly. 19 | The goal is to generate log entries in a way that mirrors real-world usage. 20 | 21 | 3. Gradually vary the intensity of the load during testing to observe how the volume of logs changes. 22 | Start with a lower load and increase it step by step. This will help you identify thresholds 23 | where log volume might start to spike significantly or where certain log types become more frequent. -------------------------------------------------------------------------------- /docs/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Any CSS included here will be global. The classic template 3 | * bundles Infima by default. Infima is a CSS framework designed to 4 | * work well for content-centric websites. 5 | */ 6 | 7 | /* You can override the default Infima variables here. */ 8 | :root { 9 | --ifm-color-primary: #2e8555; 10 | --ifm-color-primary-dark: #29784c; 11 | --ifm-color-primary-darker: #277148; 12 | --ifm-color-primary-darkest: #205d3b; 13 | --ifm-color-primary-light: #33925d; 14 | --ifm-color-primary-lighter: #359962; 15 | --ifm-color-primary-lightest: #3cad6e; 16 | --ifm-code-font-size: 95%; 17 | --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); 18 | } 19 | 20 | /* For readability concerns, you should choose a lighter palette in dark mode. */ 21 | [data-theme='dark'] { 22 | --ifm-color-primary: #25c2a0; 23 | --ifm-color-primary-dark: #21af90; 24 | --ifm-color-primary-darker: #1fa588; 25 | --ifm-color-primary-darkest: #1a8870; 26 | --ifm-color-primary-light: #29d5b0; 27 | --ifm-color-primary-lighter: #32d8b4; 28 | --ifm-color-primary-lightest: #4fddbf; 29 | --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); 30 | } 31 | -------------------------------------------------------------------------------- /docs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cortisol", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "clear": "docusaurus clear", 12 | "serve": "docusaurus serve", 13 | "write-translations": "docusaurus write-translations", 14 | "write-heading-ids": "docusaurus write-heading-ids" 15 | }, 16 | "dependencies": { 17 | "@docusaurus/core": "2.4.1", 18 | "@docusaurus/preset-classic": "2.4.1", 19 | "@mdx-js/react": "^1.6.22", 20 | "clsx": "^1.2.1", 21 | "prism-react-renderer": "^1.3.5", 22 | "react": "^17.0.2", 23 | "react-dom": "^17.0.2" 24 | }, 25 | "devDependencies": { 26 | "@docusaurus/module-type-aliases": "2.4.1" 27 | }, 28 | "browserslist": { 29 | "production": [ 30 | ">0.5%", 31 | "not dead", 32 | "not op_mini all" 33 | ], 34 | "development": [ 35 | "last 1 chrome version", 36 | "last 1 firefox version", 37 | "last 1 safari version" 38 | ] 39 | }, 40 | "engines": { 41 | "node": ">=16.14" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /docs/cortisol/src/pages/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import clsx from 'clsx'; 3 | import Link from '@docusaurus/Link'; 4 | import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; 5 | import Layout from '@theme/Layout'; 6 | import HomepageFeatures from '@site/src/components/HomepageFeatures'; 7 | 8 | import styles from './index.module.css'; 9 | 10 | function HomepageHeader() { 11 | const {siteConfig} = useDocusaurusContext(); 12 | return ( 13 |
14 |
15 |

{siteConfig.title}

16 |

{siteConfig.tagline}

17 |
18 | 21 | Docusaurus Tutorial - 5min ⏱️ 22 | 23 |
24 |
25 |
26 | ); 27 | } 28 | 29 | export default function Home() { 30 | const {siteConfig} = useDocusaurusContext(); 31 | return ( 32 | 35 | 36 |
37 | 38 |
39 |
40 | ); 41 | } 42 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push] 3 | 4 | jobs: 5 | test: 6 | strategy: 7 | fail-fast: false 8 | matrix: 9 | python-version: ['3.11'] 10 | poetry-version: [1.5.1] 11 | os: [ubuntu-latest] 12 | runs-on: ${{ matrix.os }} 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v2 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | - name: Run image 19 | uses: abatilo/actions-poetry@v2.1.6 20 | with: 21 | poetry-version: ${{ matrix.poetry-version }} 22 | - name: Install dependencies 23 | run: poetry install 24 | - name: Run tests 25 | run: poetry run pytest --cov . 26 | code-quality: 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | python-version: ['3.11'] 31 | poetry-version: [1.5.1] 32 | os: [ubuntu-latest] 33 | runs-on: ${{ matrix.os }} 34 | steps: 35 | - uses: actions/checkout@v2 36 | - uses: actions/setup-python@v2 37 | with: 38 | python-version: ${{ matrix.python-version }} 39 | - name: Run image 40 | uses: abatilo/actions-poetry@v2.1.6 41 | with: 42 | poetry-version: ${{ matrix.poetry-version }} 43 | - name: Install dependencies 44 | run: poetry install --with dev 45 | - name: Run black 46 | run: poetry run black . --check -------------------------------------------------------------------------------- /docs/src/components/HomepageFeatures/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import clsx from 'clsx'; 3 | import styles from './styles.module.css'; 4 | 5 | const FeatureList = [ 6 | { 7 | title: 'Easy to Use', 8 | Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default, 9 | description: ( 10 | <> 11 | Docusaurus was designed from the ground up to be easily installed and 12 | used to get your website up and running quickly. 13 | 14 | ), 15 | }, 16 | { 17 | title: 'Focus on What Matters', 18 | Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default, 19 | description: ( 20 | <> 21 | Docusaurus lets you focus on your docs, and we'll do the chores. Go 22 | ahead and move your docs into the docs directory. 23 | 24 | ), 25 | }, 26 | { 27 | title: 'Powered by React', 28 | Svg: require('@site/static/img/undraw_docusaurus_react.svg').default, 29 | description: ( 30 | <> 31 | Extend or customize your website layout by reusing React. Docusaurus can 32 | be extended while reusing the same header and footer. 33 | 34 | ), 35 | }, 36 | ]; 37 | 38 | function Feature({Svg, title, description}) { 39 | return ( 40 |
41 |
42 | 43 |
44 |
45 |

{title}

46 |

{description}

47 |
48 |
49 | ); 50 | } 51 | 52 | export default function HomepageFeatures() { 53 | return ( 54 |
55 |
56 |
57 | {FeatureList.map((props, idx) => ( 58 | 59 | ))} 60 |
61 |
62 |
63 | ); 64 | } 65 | -------------------------------------------------------------------------------- /tests/cortisollib/test_calculators.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from cortisol.cortisollib.calculators import ( 3 | datadog_log_cost_calculator, 4 | grafana_log_cost_calculator, 5 | new_relic_log_cost_calculator, 6 | gcp_cloud_logging_log_cost_calculator, 7 | format_bytes, 8 | ) 9 | 10 | 11 | class TestLogCostCalculators(unittest.TestCase): 12 | def test_datadog_log_cost_calculator(self): 13 | size_gb = 5.0 14 | expected_cost = size_gb * 0.1 15 | n_retained_logs = 0.0 16 | self.assertEqual( 17 | datadog_log_cost_calculator(size_gb, n_retained_logs), expected_cost 18 | ) 19 | 20 | def test_grafana_log_cost_calculator(self): 21 | size_gb = 103.0 22 | expected_cost = (size_gb - 100.0) * 0.5 23 | self.assertEqual(grafana_log_cost_calculator(size_gb), expected_cost) 24 | 25 | def test_new_log_log_cost_calculator(self): 26 | size_gb = 103.0 27 | expected_cost = (size_gb - 100.0) * 0.3 28 | self.assertEqual(new_relic_log_cost_calculator(size_gb), expected_cost) 29 | 30 | def test_gcp_cloud_logging_log_cost_calculator(self): 31 | size_gb = 103.0 32 | expected_cost = (size_gb - 50.0) * 0.5 + size_gb * 0.01 33 | self.assertEqual(gcp_cloud_logging_log_cost_calculator(size_gb), expected_cost) 34 | 35 | def test_format_bytes(self): 36 | file_size_bytes = 2147483648 # 2 GB in bytes 37 | expected_size_gb = 2.0 38 | result = format_bytes(file_size_bytes) 39 | self.assertEqual(expected_size_gb, result) 40 | 41 | def test_datadog_log_cost_calculator_zero_size(self): 42 | size_gb = 0.0 43 | n_retained_logs = 0.0 44 | expected_cost = 0.0 45 | self.assertEqual( 46 | datadog_log_cost_calculator(size_gb, n_retained_logs), expected_cost 47 | ) 48 | 49 | def test_format_bytes_large_size(self): 50 | file_size_bytes = 128 * 1024**5 # 128 TB in bytes 51 | expected_size_gb = 128 * 1024**2 52 | result = format_bytes(file_size_bytes) 53 | print(result) 54 | print(expected_size_gb) 55 | self.assertEqual(expected_size_gb, result) 56 | -------------------------------------------------------------------------------- /docs/docs/how-tos.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: How-to guides 3 | sidebar_position: 7 4 | --- 5 | 6 | ## Estimating log costs from stdout and stderr streams 7 | 8 | Often, we find ourselves using software like [fluent-bit](https://fluentbit.io/) to capture the stdout and stderr of 9 | our applications and forward them directly to our chosen observability provider. 10 | Many cloud providers offer this functionality as part of their managed Kubernetes offerings. 11 | When you use such a setup, you might not specify a file to store your logs within your logger. 12 | But fear not, with cortisol, you can still easily estimate the cost of these logs. Here's how you can do it! 13 | 14 | ### Local service 15 | 16 | Assuming that you're already familiar with our [Your first log cost estimation guide](./getting-started.md/#your-first-log-cost-estimation), 17 | the only change you need to make is to redirect the stdout and stderr streams to a file of your choice. 18 | 19 | You can do this with a simple command: 20 | ``` 21 | python -m app.main >> /path/to/my_redirected_logs.log 22 | ``` 23 | 24 | Once you've done this, you can use cortisol just as you normally would: 25 | 26 | ```terminal 27 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file cortisolfile.py --cortisol-file ./examples/cortisolfile.py --log-file /path/to/my_redirected_logs.log 28 | ``` 29 | 30 | ### Local service with docker 31 | 32 | If you're already familiar with our [Your first log cost estimation with docker guide](./getting-started.md/#your-first-log-cost-estimation-with-docker), 33 | the only change you need to make is to redirect the stdout and stderr streams from your Docker container to a file outside of it. 34 | 35 | Here's how you can do it: 36 | 1. First, make sure you have the Docker CLI installed. 37 | 38 | 2. Open a new terminal and run the following command, replacing the `container-id` with your own: 39 | ```terminal 40 | docker logs -f {container-id} >> /path/to/my_redirected_logs.log 41 | ``` 42 | 3. Now, run cortisol as usual: 43 | ```terminal 44 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file cortisolfile.py --cortisol-file ./examples/cortisolfile.py --log-file /path/to/my_redirected_logs.log 45 | ``` 46 | There's no need to specify a `container-id` in the `cost-estimate` command because the log file is outside of the container. Enjoy easy log cost estimation with cortisol! -------------------------------------------------------------------------------- /docs/docusaurus.config.js: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | // Note: type annotations allow type checking and IDEs autocompletion 3 | 4 | const lightCodeTheme = require('prism-react-renderer/themes/github'); 5 | const darkCodeTheme = require('prism-react-renderer/themes/dracula'); 6 | 7 | /** @type {import('@docusaurus/types').Config} */ 8 | const config = { 9 | title: 'Cortisol', 10 | tagline: 'The observability cost inspector', 11 | favicon: 'img/favicon.ico', 12 | 13 | // Set the production url of your site here 14 | url: 'https://your-docusaurus-test-site.com', 15 | // Set the // pathname under which your site is served 16 | // For GitHub pages deployment, it is often '//' 17 | baseUrl: '/cortisol/', 18 | 19 | // GitHub pages deployment config. 20 | // If you aren't using GitHub pages, you don't need these. 21 | organizationName: 'CortisolAI', // Usually your GitHub org/user name. 22 | projectName: 'cortisol', // Usually your repo name. 23 | 24 | onBrokenLinks: 'throw', 25 | onBrokenMarkdownLinks: 'warn', 26 | 27 | // Even if you don't use internalization, you can use this field to set useful 28 | // metadata like html lang. For example, if your site is Chinese, you may want 29 | // to replace "en" with "zh-Hans". 30 | i18n: { 31 | defaultLocale: 'en', 32 | locales: ['en'], 33 | }, 34 | 35 | presets: [ 36 | [ 37 | 'classic', 38 | /** @type {import('@docusaurus/preset-classic').Options} */ 39 | ({ 40 | docs: { 41 | routeBasePath: '/', 42 | sidebarPath: require.resolve('./sidebars.js'), 43 | // Please change this to your repo. 44 | // Remove this to remove the "edit this page" links. 45 | editUrl: 46 | 'https://github.com/CortisolAI/cortisol/docs', 47 | }, 48 | theme: { 49 | customCss: require.resolve('./src/css/custom.css'), 50 | }, 51 | }), 52 | ], 53 | ], 54 | 55 | themeConfig: 56 | /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ 57 | ({ 58 | // Replace with your project's social card 59 | image: 'img/cortisol_medium.png', 60 | navbar: { 61 | title: 'Cortisol', 62 | items: [], 63 | }, 64 | footer: { 65 | style: 'dark', 66 | links: [], 67 | copyright: `Copyright © ${new Date().getFullYear()} CortisolAI. Built with Docusaurus.`, 68 | }, 69 | prism: { 70 | theme: lightCodeTheme, 71 | darkTheme: darkCodeTheme, 72 | }, 73 | colorMode: { 74 | defaultMode: 'light', 75 | disableSwitch: true, 76 | respectPrefersColorScheme: false, 77 | }, 78 | }), 79 | }; 80 | 81 | module.exports = config; 82 | -------------------------------------------------------------------------------- /docs/docs/intro.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: / 3 | title: What is Cortisol? 4 | sidebar_position: 1 5 | --- 6 | 7 | # Cortisol 8 | 9 | ![Cortisol](/img/cortisol_h_large.png) 10 | 11 | Let's discover **Cortisol in less than 5 minutes**. 12 | 13 | ## What is Cortisol? 14 | 15 | Cortisol is an open-source command-line tool designed specifically for web services. It offers easy-to-use cost estimation and forecasting capabilities tailored to main observability tools like [Datadog](https://www.datadoghq.com/), [New Relic](https://newrelic.com/), [Grafana](https://grafana.com/) and [GCP Cloud Logging](https://cloud.google.com/logging). Cortisol assists users in planning and optimizing their log costs before deploying their web services. It operates on a foundation inspired by [Locust](https://locust.io/), allowing users to define user behavior using a regular Python script 💰📉. 16 | 17 | ### How does it work? 18 | 19 | Cortisol seamlessly harnesses the power of [Locust](https://locust.io/) for load testing. Users simply provide a standard Python script that outlines the anticipated user behavior on their web service. Cortisol, in turn, processes the load test's log file to project monthly log costs. It achieves this by referencing the public pricing figures of [Datadog](https://www.datadoghq.com/), [New Relic](https://newrelic.com/), [Grafana](https://grafana.com/) and [GCP Cloud Logging](https://cloud.google.com/logging). 20 | 21 | ### Name & Background 22 | 23 | Picture this: the world of observability is brimming with fantastic tools like Grafana, Datadog, and New Relic. But here's the catch – costs can sneak up on you, catching you off guard or, worse, too late. And don't get us started on those log costs; they can go from "too little" to "what?!" in no time, even for the simplest web services. We wanted a tool that's right there in the thick of your software development workflow, because programming should always be a joyful ride. 24 | 25 | In Cortisol, you define the behaviour of your users using Python code, much like you would in load testing scenarios. Various load testing scenarios result in logs of different sizes, leading to differing costs. Having this information beforehand can assist you in more effective budgeting and, potentially, in eliminating unnecessary log statements from your code. 26 | 27 | Cortisol takes its name from the steroid hormone that helps our bodies respond to stress by increasing alertness, boosting energy, and regulating metabolism. It also plays a role in controlling blood sugar levels, reducing inflammation, and supporting the immune system. 28 | 29 | ### Authors 30 | 31 | - Dionysis Varelas ([@dvarelas](https://github.com/dvarelas) on Github) 32 | - Pavlos Mitsoulis ([@pm330](https://github.com/pm3310) on Github) 33 | - Narek Verdian ([@narek](https://www.linkedin.com/in/narek/) on LinkedIn) 34 | -------------------------------------------------------------------------------- /docs/docs/configuration.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Configuration 3 | sidebar_position: 4 4 | --- 5 | 6 | ## Configuration 7 | 8 | ### Log Cost Estimate 9 | 10 | #### Name 11 | 12 | Forecast log costs 13 | 14 | #### Synopsis 15 | 16 | cortisol logs cost-estimate --host HOST --log-file LOG_FILE --users NUM_USERS --spawn-rate SPAWN_RATE --run-time RUN_TIME -cortisol-file CORTISOL_PYTHON_FILE 17 | 18 | #### Description 19 | 20 | Forecast log costs pre-production with Cortisol for [Datadog](https://www.datadoghq.com/), [New Relic](https://newrelic.com/), [Grafana](https://grafana.com/) and [GCP Cloud Logging](https://cloud.google.com/logging) 21 | 22 | ### Example 23 | 24 | cortisol logs cost-estimate --host http://10.20.31.32:8000 --users 100 --spawn-rate 30 --run-time 20m -cortisol-file some_cortisol_file.py 25 | 26 | #### Required Flags - Option 1 27 | 28 | `-f, --cortisol-file PATH` Path to the CORTISOL_FILE 29 | 30 | `-h, --host TEXT` Host in the following format: http://10.20.31.32 or http://10.20.31.32:8000 31 | 32 | `-l, --log-file PATH` Path to log file 33 | 34 | `-u, --users INTEGER` Peak number of concurrent users 35 | 36 | `-r, --spawn-rate INTEGER` Rate to spawn users at (users per second) 37 | 38 | `-t, --run-time TEXT` Stop after the specified amount of time, e.g. (50, 30s, 200m, 5h, 2h30m, etc.). Default unit in seconds. 39 | 40 | #### Required Flags - Option 2 41 | 42 | All the latter options plus the following in case your application run in a Docker container: 43 | 44 | `-c, --container-id TEXT` Optional docker container id where your application runs 45 | 46 | #### Required Flags - Option 3 47 | 48 | `--config PATH` Path to config file (YAML or JSON) containing the long version of flags from option 1 49 | 50 | #### Optional Flags 51 | 52 | `--stats-file PATH` Path where to store the cortisol statistics output as a csv 53 | 54 | Here's a YAML example: 55 | 56 | ```YAML 57 | host: "http://10.20.31.32:8000" 58 | log-file: "/path/to/logfile" 59 | users: 100 60 | spawn-rate: 30 61 | run-time: "20m" 62 | cortisol-file: "some_cortisol_file.py" 63 | stats-file: "cortisol_stats.csv" 64 | ``` 65 | 66 | Here's a YAML example with docker container id: 67 | 68 | ```YAML 69 | host: "http://10.20.31.32:8000" 70 | log-file: "/path/to/logfile" 71 | users: 100 72 | spawn-rate: 30 73 | run-time: "20m" 74 | cortisol-file: "some_cortisol_file.py" 75 | container-id: "80f1bc1e7feb" 76 | stats-file: "cortisol_stats.csv" 77 | ``` 78 | 79 | and a JSON example: 80 | 81 | ```JSON 82 | { 83 | "host": "http://10.20.31.32:8000", 84 | "log_file": "/path/to/logfile", 85 | "users": 100, 86 | "spawn_rate": 30, 87 | "run_time": "20m", 88 | "cortisol_file": "some_cortisol_file.py", 89 | "container_id": "80f1bc1e7feb", 90 | "stats-file": "cortisol_stats.csv" 91 | } 92 | ``` 93 | -------------------------------------------------------------------------------- /tests/commands/test_logs.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from unittest.mock import Mock, patch 4 | from typer.testing import CliRunner 5 | import yaml 6 | 7 | from cortisol.commands.logs import app 8 | 9 | runner = CliRunner() 10 | 11 | 12 | def test_cost_estimate_yaml_config_file_only_happy_case(): 13 | data = { 14 | "host": "http://10.20.31.32:8000", 15 | "log-file": "/path/to/logfile", 16 | "users": 100, 17 | "spawn-rate": 5, 18 | "run-time": "10s", 19 | "cortisol-file": "some_cortisol_file.py", 20 | } 21 | 22 | with runner.isolated_filesystem(): 23 | with open("config.yml", "w") as file: 24 | yaml.dump(data, file) 25 | 26 | with patch("cortisol.commands.logs.get_cost_estimate"): 27 | result = runner.invoke(app, ["--config", "config.yml"]) 28 | 29 | assert result.exit_code == 0 30 | 31 | 32 | def test_cost_estimate_json_config_file_only_happy_case(): 33 | data = { 34 | "host": "http://10.20.31.32:8000", 35 | "log-file": "/path/to/logfile", 36 | "users": 100, 37 | "spawn-rate": 5, 38 | "run-time": "10s", 39 | "cortisol-file": "some_cortisol_file.py", 40 | "container-id": "80f1bc1e7feb", 41 | } 42 | 43 | with runner.isolated_filesystem(): 44 | with open("config.json", "w") as file: 45 | json.dump(data, file) 46 | 47 | with patch("cortisol.commands.logs.get_cost_estimate"): 48 | result = runner.invoke(app, ["--config", "config.json"]) 49 | 50 | assert result.exit_code == 0 51 | 52 | 53 | def test_cost_estimate_configs_in_args_happy_case(): 54 | with patch("cortisol.commands.logs.get_cost_estimate"): 55 | result = runner.invoke( 56 | app, 57 | [ 58 | "-h", 59 | "http://10.20.31.32:8000", 60 | "-l", 61 | "/path/to/logfile", 62 | "-u", 63 | "100", 64 | "-r", 65 | "30", 66 | "-t", 67 | "1h20m", 68 | "-f", 69 | "some_cortisol_file.py", 70 | ], 71 | ) 72 | 73 | assert result.exit_code == 0 74 | 75 | 76 | def test_cost_estimate_no_args_given(): 77 | result = runner.invoke(app, []) 78 | 79 | assert result.exit_code == 1 80 | 81 | 82 | def test_cost_estimate_one_missing_arg(): 83 | result = runner.invoke( 84 | app, 85 | [ 86 | "-h", 87 | "http://10.20.31.32:8000", 88 | "-l", 89 | "/path/to/logfile", 90 | "-u", 91 | "100", 92 | "-r", 93 | "30", 94 | "-t", 95 | "1h20m", 96 | ], 97 | ) 98 | 99 | assert result.exit_code == 1 100 | -------------------------------------------------------------------------------- /docs/blog/2019-05-29-long-blog-post.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: long-blog-post 3 | title: Long Blog Post 4 | authors: endi 5 | tags: [hello, docusaurus] 6 | --- 7 | 8 | This is the summary of a very long blog post, 9 | 10 | Use a `` comment to limit blog post size in the list view. 11 | 12 | 13 | 14 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 15 | 16 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 17 | 18 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 19 | 20 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 21 | 22 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 23 | 24 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 25 | 26 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 27 | 28 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 29 | 30 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 31 | 32 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 33 | 34 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 35 | 36 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 37 | 38 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 39 | 40 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 41 | 42 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 43 | 44 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet 45 | -------------------------------------------------------------------------------- /cortisol/cortisollib/users.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from time import time 3 | from locust import HttpUser, between, task 4 | from locust.user import User 5 | from typing import Optional, Dict 6 | 7 | from urllib3 import PoolManager 8 | 9 | from locust.clients import HttpSession 10 | from locust.exception import LocustError 11 | 12 | from cortisol.cortisollib.readers import log_file_size_reader, count_log_entries 13 | 14 | 15 | class CortisolHttpUser(User): 16 | """ 17 | Represents an HTTP "user" which is to be spawned and attack the system that is to be load tested. 18 | 19 | The behaviour of this user is defined by its tasks. Tasks can be declared either directly on the 20 | class by using the :py:func:`@task decorator ` on methods, or by setting 21 | the :py:attr:`tasks attribute `. 22 | 23 | This class creates a *client* attribute on instantiation which is an HTTP client with support 24 | for keeping a user session between requests. 25 | """ 26 | 27 | abstract = True 28 | """If abstract is True, the class is meant to be subclassed, and users will not choose this locust during a test""" 29 | 30 | pool_manager: Optional[PoolManager] = None 31 | """Connection pool manager to use. If not given, a new manager is created per single user.""" 32 | 33 | wait_time = between(2, 5) 34 | start_time = "" 35 | initial_log_volume = 0 36 | initial_log_entries = 0 37 | 38 | def __init__(self, *args, **kwargs): 39 | super().__init__(*args, **kwargs) 40 | if self.host is None: 41 | raise LocustError( 42 | "You must specify the base host. Either in the host attribute in the User class, or on the command line using the --host option." 43 | ) 44 | 45 | self.client = HttpSession( 46 | base_url=self.host, 47 | request_event=self.environment.events.request, 48 | user=self, 49 | pool_manager=self.pool_manager, 50 | ) 51 | """ 52 | Instance of HttpSession that is created upon instantiation of Locust. 53 | The client supports cookies, and therefore keeps the session between HTTP requests. 54 | """ 55 | self.client.trust_env = False 56 | 57 | def on_start(self): 58 | self.start_time = time() 59 | self.initial_log_volume = log_file_size_reader( 60 | file_path=self.environment.parsed_options.log_file, 61 | container_id=self.environment.parsed_options.container_id, 62 | on_start=True, 63 | ) 64 | self.initial_log_entries = count_log_entries( 65 | file_path=self.environment.parsed_options.log_file, 66 | container_id=self.environment.parsed_options.container_id, 67 | on_start=True, 68 | ) 69 | 70 | def context(self) -> Dict: 71 | default_context = { 72 | "log_file": self.environment.parsed_options.log_file, 73 | "container_id": self.environment.parsed_options.container_id, 74 | "start_time": self.start_time, 75 | "initial_log_volume": self.initial_log_volume, 76 | "initial_log_entries": self.initial_log_entries, 77 | "stats_file": self.environment.parsed_options.stats_file, 78 | } 79 | return default_context 80 | -------------------------------------------------------------------------------- /cortisol/cortisollib/calculators.py: -------------------------------------------------------------------------------- 1 | def datadog_log_cost_calculator(logs_in_gb, log_events_in_million): 2 | """ 3 | Calculate the estimated annual cost of logging using Datadog, including any free tier. 4 | 5 | This function calculates the estimated annual cost of logging using the Datadog platform, 6 | considering the amount of data logs in gigabytes and the number of log events in millions. 7 | 8 | Args: 9 | logs_in_gb (float): Amount of data logs in gigabytes. 10 | log_events_in_million (int): Number of log events in millions. 11 | 12 | Returns: 13 | float: Estimated annual cost of logging in USD for log ingestion plus log retention/storage. 14 | 15 | Note: 16 | The estimated cost is returned based on the assumption that the Datadog Pro Plan is chosen, billed annually and a 30 day retention is chosen. 17 | """ 18 | cost_per_gb = logs_in_gb * 0.1 19 | cost_per_event_million = log_events_in_million * 2.5 20 | return cost_per_gb + cost_per_event_million 21 | 22 | 23 | def grafana_log_cost_calculator(logs_in_gb): 24 | """ 25 | Calculate the estimated cost of logs in USD currency based on the given amount of logs in gigabytes, 26 | assuming the Grafana Cloud Pro Plan is chosen and including the free tier. 27 | 28 | Args: 29 | logs_in_gb (float): The amount of logs in gigabytes. 30 | 31 | Returns: 32 | float: The estimated cost of the logs in USD currency for the Grafana Cloud Pro Plan. 33 | 34 | Example: 35 | >>> grafana_log_cost_calculator(100) 36 | 50.0 37 | 38 | Note: 39 | The return value is calculated based on the pricing of the Grafana Cloud Pro Plan and does consider the free tier. 40 | """ 41 | if logs_in_gb <= 100: 42 | return 0.0 43 | 44 | return (logs_in_gb - 100.0) * 0.5 45 | 46 | 47 | def new_relic_log_cost_calculator(logs_in_gb): 48 | """ 49 | Calculate the estimated cost of logs in USD currency based on the given amount of logs in gigabytes, 50 | assuming the New Relic Pro Plan is chosen and including the free tier. 51 | Args: 52 | logs_in_gb (float): The amount of logs in gigabytes. 53 | Returns: 54 | float: The estimated cost of the logs in USD currency for the New Relic Pro Plan. 55 | Example: 56 | >>> new_relic_log_cost_calculator(120) 57 | 36.0 58 | Note: 59 | The return value is calculated based on the pricing of the New Relic Pro Plan and does consider the free tier. 60 | """ 61 | if logs_in_gb <= 100: 62 | return 0.0 63 | return (logs_in_gb - 100.0) * 0.3 64 | 65 | 66 | def format_bytes(file_size): 67 | """ 68 | Converts file size from Bytes to GB 69 | 70 | Args: 71 | file_size (float): file size in bytes 72 | Returns: 73 | float: File size in GB 74 | """ 75 | k = 1024 76 | file_size_in_gb = file_size / k**3 # Directly convert to GB (1024^3) 77 | return file_size_in_gb 78 | 79 | 80 | def gcp_cloud_logging_log_cost_calculator(logs_in_gb): 81 | """ 82 | Calculate the estimated annual cost of logging using GCP Cloud Logging, including the free tier. 83 | This function calculates the estimated annual cost of logging using the Cloud Logging platform, 84 | considering the amount of data logs in gigabytes. 85 | Args: 86 | logs_in_gb (float): Amount of data logs in gigabytes. 87 | Returns: 88 | float: Estimated annual cost of logging in USD for log ingestion plus log retention/storage. 89 | Note: 90 | The estimated cost is returned based on the assumption that logs are going to be retainted more than 30 days. 91 | No cost is incurred with default retention period https://cloud.google.com/logging/quotas#logs_retention_periods. 92 | Google Commitment Agreement is not taken into account. 93 | """ 94 | if logs_in_gb <= 50: 95 | return logs_in_gb * 0.01 96 | 97 | return (logs_in_gb - 50.0) * 0.5 + logs_in_gb * 0.01 98 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | .idea/ 161 | .DS_Store 162 | 163 | cortisol/cortisollib/templates/locustfile.py -------------------------------------------------------------------------------- /docs/docs/CI-visibility.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Continuous Integration Visibility 3 | sidebar_position: 6 4 | --- 5 | 6 | ## Continuous Integration Visibility 7 | 8 | Streamline your CI/CD pipelines with detailed visibility into your expected log costs. 9 | 10 | There are examples below on how to enable Cortisol in your CI/CD pipeline. 11 | 12 | ## Cortisol as a Github Action 13 | 14 | For more information about Github Action, please refer [here](https://github.com/features/actions). 15 | 16 | Let's suppose that you want to run Cortisol as a step in a Github action for this FastAPI [repository](https://github.com/CortisolAI/getting-started-example). 17 | 18 | It's very simple! 19 | 20 | 1. Fork this [repository](https://github.com/CortisolAI/getting-started-example) 21 | 2. Create a file named `cortisolfile.py` at the root of the repository with the following content: 22 | ```Python 23 | from locust import task 24 | 25 | from cortisol.cortisollib.users import CortisolHttpUser 26 | 27 | 28 | class WebsiteUser(CortisolHttpUser): 29 | @task 30 | def my_task(self): 31 | self.client.get("/") 32 | ``` 33 | 3. Create a file named `my_config.yaml` at the root of the repository with the following content: 34 | ```YAML 35 | host: "http://127.0.0.1:8080" 36 | log-file: "cortisol_app.log" 37 | users: 10 38 | spawn-rate: 5 39 | run-time: "10s" 40 | cortisol-file: "cortisolfile.py" 41 | ``` 42 | 4. Create in this repository the following path `.github/workflows/` 43 | 5. Save under `.github/workflows/` a file named `main.yml` with the following content: 44 | 45 | ```YAML 46 | name: Main Workflow 47 | 48 | on: 49 | push: 50 | branches: 51 | - main # Replace with the branch you want to trigger on 52 | 53 | jobs: 54 | build: 55 | runs-on: ubuntu-latest 56 | 57 | steps: 58 | - name: Checkout Repository 59 | uses: actions/checkout@v2 60 | 61 | - name: Set up Python # Replace with your desired programming language. 62 | uses: actions/setup-python@v4 63 | with: 64 | python-version: "3.10" # Replace with your desired Python version. 65 | 66 | - name: Install Dependencies # Replace with your desired dependency management tool. 67 | run: | 68 | pip install -r requirements.txt 69 | 70 | - name: Run FastAPI Server in the background 71 | run: | 72 | nohup python -m app.main & 73 | 74 | - name: Verify server is running 75 | run: | 76 | curl http://127.0.0.1:8080 77 | 78 | - name: Cortisol log costs pre-production 79 | run: | 80 | cortisol logs cost-estimate --config my_config.yaml 81 | ``` 82 | 6. Push the code changes to your `main` branch. 83 | 7. You should see in the Actions tab of your forked repository this action being in progress. Click on it. Once it's finished you should something like that: 84 | 85 | ![GH-Action](/img/gh-action-cortisol.png) 86 | 87 | Let's explain the steps in this Github Action workflow: 88 | 89 | 1. **Checkout Repository**: This step checks out the source code repository into the runner's workspace. It uses the `actions/checkout` action with version `v2`. 90 | 2. **Set up Python**: This step sets up the Python environment on the runner. It specifies the desired Python version, which is version `3.10` in this case. 91 | 3. **Install Dependencies**: This step installs Python dependencies from a `requirements.txt` file using the `pip` package manager. The `requirements.txt` file contains the `cortisol` library. 92 | 4. **Run FastAPI Server in the background**: This step starts a FastAPI server in the background. The nohup command allows the server to keep running after this step is completed and, more importantly, it doesn't block the entire Github Action. 93 | 5. **Verify server is running**: This step uses `curl` to make an HTTP request to the FastAPI server to verify that it is running and responsive. 94 | 6. **Cortisol log costs pre-production**: This step runs Cortisol with the arguments `logs cost-estimate --config my_config.yaml`. It estimates log costs pre-production. 95 | -------------------------------------------------------------------------------- /tests/cortisollib/test_log_cost_estimator.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from unittest.mock import Mock, patch 4 | from pathlib import Path 5 | 6 | from cortisol.cortisollib.log_cost_estimator import ( 7 | render_locustfile, 8 | render_locust_command, 9 | get_cost_estimate, 10 | _get_classes_extending_httpuser, 11 | ) 12 | 13 | _FILE_DIR_PATH = os.path.normpath( 14 | os.path.join(os.path.dirname(__file__), "..", "..", "cortisol", "cortisollib") 15 | ) 16 | 17 | 18 | class TestLibLogs(unittest.TestCase): 19 | def setUp(self): 20 | # Create temporary files for testing 21 | self.host = "http://127.0.0.1:8080" 22 | self.num_users = 100 23 | self.spawn_rate = 10 24 | self.run_time = "10m" 25 | self.container_id = "123mock" 26 | self.cortisol_file = Path("test_cortisol.py") 27 | self.log_file = Path("test_log.txt") 28 | 29 | def tearDown(self): 30 | # Clean up temporary files after testing 31 | self.cortisol_file.unlink(missing_ok=True) 32 | self.log_file.unlink(missing_ok=True) 33 | 34 | def test_render_locustfile(self): 35 | # Create a temporary cortisol input file for testing 36 | cortisol_input = "user_data: test" 37 | self.cortisol_file.write_text(cortisol_input) 38 | 39 | rendered_content = render_locustfile(self.cortisol_file) 40 | 41 | # Check if the rendered content contains the expected string 42 | self.assertIn("user_data: test", rendered_content) 43 | 44 | def test_render_locust_command(self): 45 | # Define test input values 46 | 47 | # Expected command 48 | expected_command = [ 49 | "locust", 50 | "-f", 51 | os.path.join(_FILE_DIR_PATH, "./templates/locustfile.py"), 52 | "--headless", 53 | "--host", 54 | self.host, 55 | "--users", 56 | str(self.num_users), 57 | "--spawn-rate", 58 | str(self.spawn_rate), 59 | "--run-time", 60 | str(self.run_time), 61 | "--container-id", 62 | self.container_id, 63 | "--log-file", 64 | Path(self.log_file), 65 | ] 66 | 67 | result = render_locust_command( 68 | host=self.host, 69 | log_file=Path(self.log_file), 70 | num_users=self.num_users, 71 | spawn_rate=self.spawn_rate, 72 | run_time=self.run_time, 73 | container_id=self.container_id, 74 | ) 75 | 76 | # Assert the result matches the expected command 77 | self.assertEqual(result, expected_command) 78 | 79 | @patch("cortisol.cortisollib.readers.log_file_size_reader") 80 | @patch("cortisol.cortisollib.log_cost_estimator.subprocess") 81 | @patch("cortisol.cortisollib.log_cost_estimator.render_locustfile") 82 | def test_get_cost_estimate( 83 | self, mock_render_locustfile, mock_subprocess, mock_log_file_size_reader 84 | ): 85 | process_mock = Mock( 86 | stdout=b"mocked_stdout", stderr=b"mocked_stderr", returncode=0 87 | ) 88 | mock_subprocess.Popen.return_value.communicate.return_value = ( 89 | process_mock.stdout, 90 | process_mock.stderr, 91 | ) 92 | mock_subprocess.Popen.return_value.returncode = process_mock.returncode 93 | mock_log_file_size_reader.return_value = 1024 94 | mock_render_locustfile.return_value = 0 95 | 96 | cortisol_input = "user_data: test" 97 | self.cortisol_file.write_text(cortisol_input) 98 | 99 | result = get_cost_estimate( 100 | cortisol_file=self.cortisol_file, 101 | host=self.host, 102 | log_file=self.log_file, 103 | num_users=self.num_users, 104 | spawn_rate=self.spawn_rate, 105 | run_time=self.run_time, 106 | container_id=self.container_id, 107 | ) 108 | 109 | self.assertEqual(result, 0) 110 | 111 | def test_single_class(self): 112 | code = """ 113 | class MyUser(CortisolHttpUser): 114 | pass 115 | """ 116 | result = _get_classes_extending_httpuser(code) 117 | self.assertEqual(result, "[MyUser]") 118 | 119 | def test_multiple_classes(self): 120 | code = """ 121 | class MyUser(CortisolHttpUser): 122 | pass 123 | 124 | class AnotherUser(CortisolHttpUser): 125 | pass 126 | """ 127 | result = _get_classes_extending_httpuser(code) 128 | self.assertEqual(result, "[MyUser, AnotherUser]") 129 | -------------------------------------------------------------------------------- /tests/cortisollib/test_hooks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from pathlib import Path 4 | from unittest.mock import patch 5 | import prettytable 6 | from locust.env import Environment 7 | from unittest.mock import MagicMock 8 | from cortisol.cortisollib.hooks import ( 9 | on_init, 10 | on_request, 11 | on_quit, 12 | create_results_table, 13 | colorize, 14 | add_symbol, 15 | ) 16 | 17 | 18 | class TestFormattingFunctions(unittest.TestCase): 19 | def test_colorize(self): 20 | value = "Some Text" 21 | key = "log-volume" 22 | expected_colorized_value = "\033[38;2;255;255;255mSome Text\033[0m" 23 | colorized_value = colorize(value, key) 24 | self.assertEqual(colorized_value, expected_colorized_value) 25 | 26 | def test_add_symbol(self): 27 | key = "log-volume" 28 | value = 150.0 29 | expected_formatted_value = "150.0 GiB" 30 | formatted_value = add_symbol(key, value) 31 | self.assertEqual(formatted_value, expected_formatted_value) 32 | 33 | 34 | class TestHooks(unittest.TestCase): 35 | def setUp(self): 36 | self.environment = MagicMock(spec=Environment) 37 | runner_mock = MagicMock() 38 | self.environment.runner = runner_mock 39 | 40 | def test_create_results_table(self): 41 | obs_stats = { 42 | "logs": { 43 | "log-volume": 100, 44 | "datadog-cost": 50, 45 | "grafana-cost": 30, 46 | } 47 | } 48 | table = create_results_table(obs_stats) 49 | self.assertIsInstance(table, prettytable.prettytable.PrettyTable) 50 | # You can add more assertions here based on the expected table content 51 | 52 | def test_on_init(self): 53 | on_init(self.environment) 54 | self.assertEqual({}, self.environment.runner.stats.custom_stats) 55 | 56 | def test_on_quit_table(self): 57 | obs_stats = { 58 | "stats_file": None, 59 | "n_requests": 10, 60 | "logs": { 61 | "log-volume": 100, 62 | "datadog-cost": 50, 63 | "grafana-cost": 30, 64 | "new-relic-cost": 30, 65 | "gcp-cloud-logging-cost": 30, 66 | }, 67 | } 68 | self.environment.runner.stats.custom_stats = obs_stats 69 | result = on_quit(self.environment) 70 | self.assertIsInstance(result, prettytable.prettytable.PrettyTable) 71 | 72 | def test_on_quit_stats_file(self): 73 | stats_file_path = "cortisol_test_stats_file_exists.csv" 74 | obs_stats = { 75 | "stats_file": stats_file_path, 76 | "n_requests": 10, 77 | "logs": { 78 | "log-volume": 100, 79 | "datadog-cost": 50, 80 | "grafana-cost": 30, 81 | "new-relic-cost": 30, 82 | "gcp-cloud-logging-cost": 30, 83 | }, 84 | } 85 | self.environment.runner.stats.custom_stats = obs_stats 86 | _ = on_quit(self.environment) 87 | if not Path(stats_file_path).resolve().is_file(): 88 | raise AssertionError("File does not exist: %s" % str(stats_file_path)) 89 | os.remove(stats_file_path) 90 | 91 | @patch("cortisol.cortisollib.readers.docker_log_file_size_reader") 92 | @patch("cortisol.cortisollib.readers.docker_count_log_entries") 93 | def test_on_request( 94 | self, mock_docker_count_log_entries, mock_docker_log_file_size_reader 95 | ): 96 | keys = [ 97 | "log-volume", 98 | "datadog-cost", 99 | "grafana-cost", 100 | "new-relic-cost", 101 | "gcp-cloud-logging-cost", 102 | ] 103 | 104 | context = { 105 | "log_file": Path("/app/playground_app.log"), 106 | "container_id": "test_container_id", 107 | "start_time": 1, 108 | "initial_log_volume": 0, 109 | "initial_log_entries": 0, 110 | "stats_file": "file.csv", 111 | } 112 | mock_docker_log_file_size_reader.return_value = 1024 113 | mock_docker_count_log_entries.return_value = 3 114 | 115 | stats = on_request( 116 | request_type="test_request_type", 117 | name="test_request", 118 | response_time=1, 119 | response_length=1, 120 | exception=None, 121 | context=context, 122 | ) 123 | 124 | for key in keys: 125 | self.assertIn(key, stats["logs"], f"{key} not found in the dictionary") 126 | -------------------------------------------------------------------------------- /docs/docs/writing-a-cortisolfile.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Writing a cortisolfile 3 | sidebar_position: 3 4 | --- 5 | 6 | ## Writing a cortisolfile 7 | 8 | Cortisol has been inspired by [Locust](https://docs.locust.io/en/stable/writing-a-locustfile.html) on defining the behaviour of users. 9 | 10 | In fact, there is only one difference between a cortisol file and a locust file. The user defined class in the cortisol file must extend the `CortisolHttpUser`. The rest is exactly the same. 11 | 12 | Let's look at a realistic example that is slightly modified from the original [Locust example](https://docs.locust.io/en/stable/writing-a-locustfile.html). 13 | 14 | ```python 15 | import time 16 | from cortisol.cortisollib.users import CortisolHttpUser 17 | from locust import task, between 18 | 19 | class QuickstartUser(CortisolHttpUser): 20 | wait_time = between(1, 5) 21 | 22 | @task 23 | def hello_world(self): 24 | self.client.get("/hello") 25 | self.client.get("/world") 26 | 27 | @task(3) 28 | def view_items(self): 29 | for item_id in range(10): 30 | self.client.get(f"/item?id={item_id}", name="/item") 31 | time.sleep(1) 32 | 33 | def on_start(self): 34 | self.client.post("/login", json={"username":"foo", "password":"bar"}) 35 | ``` 36 | 37 | **Let's break it down** 38 | 39 | 40 | ```python 41 | import time 42 | from cortisol.cortisollib.users import CortisolHttpUser 43 | from locust import task, between 44 | ``` 45 | 46 | A cortisol file is just a normal Python module, it can import code from other files or packages. 47 | 48 | ```python 49 | class QuickstartUser(CortisolHttpUser): 50 | ``` 51 | 52 | Here we define a class for the users that we will be simulating. It inherits from 53 | `CortisolHttpUser ` which gives each user a ``client`` attribute, 54 | which is an instance of `HttpSession ` behind the scenes, that 55 | can be used to make HTTP requests to the target system that we want to load test. When a test starts, 56 | locust will create an instance of this class for every user that it simulates, and each of these 57 | users will start running within their own green gevent thread. 58 | 59 | For a file to be a valid cortisolfile it must contain at least one class inheriting from :py:class:`CortisolHttpUser `. 60 | 61 | Behind the scenes, the class `CortisolHttpUser ` has extended the `User ` so that it can read and process the logs that are written during the load test that happen in the background. 62 | 63 | ```python 64 | wait_time = between(1, 5) 65 | ``` 66 | 67 | Our class defines a ``wait_time`` that will make the simulated users wait between 1 and 5 seconds after each task (see below) 68 | is executed. For more info see [wait-time](https://docs.locust.io/en/stable/writing-a-locustfile.html#wait-time). 69 | 70 | ```python 71 | @task 72 | def hello_world(self): 73 | ... 74 | ``` 75 | 76 | Methods decorated with ``@task`` are the core of your cortisol file. For every running user, 77 | Locust creates a greenlet (micro-thread), that will call those methods. 78 | 79 | ```python 80 | @task 81 | def hello_world(self): 82 | self.client.get("/hello") 83 | self.client.get("/world") 84 | 85 | @task(3) 86 | def view_items(self): 87 | ... 88 | ``` 89 | 90 | We've declared two tasks by decorating two methods with ``@task``, one of which has been given a higher weight (3). 91 | When our ``QuickstartUser`` runs it'll pick one of the declared tasks - in this case either ``hello_world`` or 92 | ``view_items`` - and execute it. Tasks are picked at random, but you can give them different weighting. The above 93 | configuration will make Locust three times more likely to pick ``view_items`` than ``hello_world``. When a task has 94 | finished executing, the User will then sleep during its wait time (in this case between 1 and 5 seconds). 95 | After its wait time it'll pick a new task and keep repeating that. 96 | 97 | Note that only methods decorated with ``@task`` will be picked, so you can define your own internal helper methods any way you like. 98 | 99 | ```python 100 | self.client.get("/hello") 101 | ``` 102 | 103 | ```python 104 | @task(3) 105 | def view_items(self): 106 | for item_id in range(10): 107 | self.client.get(f"/item?id={item_id}", name="/item") 108 | time.sleep(1) 109 | ``` 110 | 111 | In the ``view_items`` task we load 10 different URLs by using a variable query parameter. 112 | -------------------------------------------------------------------------------- /cortisol/commands/logs.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | import typer 5 | import yaml 6 | 7 | from cortisol.cortisollib.log_cost_estimator import get_cost_estimate 8 | 9 | app = typer.Typer() 10 | 11 | 12 | def _config_reader(file_path: Path): 13 | try: 14 | file_content = file_path.read_text() 15 | 16 | try: 17 | data = yaml.safe_load(file_content) 18 | return data 19 | except yaml.YAMLError: 20 | # If parsing as YAML fails, try parsing as JSON 21 | import pdb 22 | 23 | pdb.set_trace() 24 | data = json.loads(file_content) 25 | return data 26 | except json.JSONDecodeError: 27 | raise ValueError("Invalid YAML or JSON format in the input file.") 28 | except FileNotFoundError: 29 | raise FileNotFoundError(f"File not found at path: {file_path}") 30 | 31 | 32 | def _check_keys_in_file(file_path: Path): 33 | data = _config_reader(file_path) 34 | 35 | keys_to_check = [ 36 | "cortisol-file", 37 | "host", 38 | "log-file", 39 | "run-time", 40 | "spawn-rate", 41 | "users", 42 | ] 43 | missing_keys = [key for key in keys_to_check if key not in data] 44 | 45 | if missing_keys: 46 | raise KeyError(f"Required keys are missing in the input file: {missing_keys}") 47 | 48 | 49 | @app.command() 50 | def cost_estimate( 51 | cortisol_file: Path = typer.Option( 52 | None, "-f", "--cortisol-file", help="Path to the CORTISOL_FILE" 53 | ), 54 | host: str = typer.Option( 55 | None, 56 | "-h", 57 | "--host", 58 | help="Host in the following format: http://10.20.31.32 or http://10.20.31.32:8000", 59 | ), 60 | log_file: Path = typer.Option(None, "-l", "--log-file", help="Path to log file"), 61 | num_users: int = typer.Option( 62 | None, "-u", "--users", help="Peak number of concurrent users" 63 | ), 64 | spawn_rate: int = typer.Option( 65 | None, "-r", "--spawn-rate", help="Rate to spawn users at (users per second)" 66 | ), 67 | run_time: str = typer.Option( 68 | None, 69 | "-t", 70 | "--run-time", 71 | help="Stop after the specified amount of time, e.g. (50, 30s, 200m, 5h, 2h30m, etc.). Default unit in seconds.", 72 | ), 73 | container_id: str = typer.Option( 74 | None, 75 | "-c", 76 | "--container-id", 77 | help="Optional docker container id where your application runs", 78 | ), 79 | config: Path = typer.Option(None, "--config", help="Path to config file"), 80 | stats_file: Path = typer.Option( 81 | None, 82 | "-s", 83 | "--stats-file", 84 | help="Optional stats file path where the stats will be stored as csv", 85 | ), 86 | ): 87 | """ 88 | Forecast log costs pre-production with Cortisol for Datadog, New Relic, and Grafana 89 | """ 90 | if not config and any( 91 | var is None 92 | for var in (cortisol_file, host, log_file, num_users, spawn_rate, run_time) 93 | ): 94 | typer.echo( 95 | "Option '--config' is required or the following options '-f' / '--cortisol-file', " 96 | "'-l' / '--log-file', '-h' / '--host', '-u' / '--users', '-r' / '--spawn-rate', '-t' / '--run-time" 97 | "'-c' / '--container-id' is required only if your application runs in a Docker container" 98 | ) 99 | raise typer.Abort() 100 | 101 | if config: 102 | try: 103 | _check_keys_in_file(config) 104 | data = _config_reader(config) 105 | cortisol_file = data["cortisol-file"] 106 | host = data["host"] 107 | log_file = data["log-file"] 108 | num_users = data["users"] 109 | spawn_rate = data["spawn-rate"] 110 | run_time = data["run-time"] 111 | container_id = data.get("container-id", "") 112 | stats_file = data.get("stats-file", None) 113 | except (FileNotFoundError, ValueError, KeyError) as e: 114 | typer.echo(str(e)) 115 | raise typer.Abort() 116 | 117 | typer.echo("Cost estimate command in the making") 118 | if not container_id: 119 | container_id = "" 120 | 121 | get_cost_estimate( 122 | cortisol_file=cortisol_file, 123 | host=host, 124 | log_file=log_file, 125 | num_users=num_users, 126 | spawn_rate=spawn_rate, 127 | run_time=run_time, 128 | container_id=container_id, 129 | stats_file=stats_file, 130 | ) 131 | 132 | 133 | if __name__ == "__main__": 134 | app() 135 | -------------------------------------------------------------------------------- /docs/docs/getting-started.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Getting Started 3 | sidebar_position: 2 4 | --- 5 | 6 | ## Installation 7 | 8 | ### Prerequisities 9 | 10 | Cortisol requires one of the following Python versions: 3.8, 3.9, 3.10 or 3.11 11 | 12 | ### Install Cortisol 13 | 14 | At the command line: 15 | 16 | pip install cortisol 17 | 18 | If you have an Apple M1 CPU, we suggest installing using Poetry as a dependency management. Otherwise, the underline gevent library may not work. 19 | 20 | ## Your first log cost estimation 21 | 22 | Let's dive right in and get our hands dirty with Cortisol! As an integral part of your software development workflow, Cortisol CLI brings predictability to managing log costs. 23 | 24 | First things first! We need a RESTful service and so you'll need to do the following steps: 25 | 26 | 1. Clone this example repo https://github.com/CortisolAI/getting-started-example 27 | 2. `cd getting-started-example` 28 | 3. `mkvirtualenv getting-started-cortisol` 29 | 4. `python -m app.main` which will make the service available at `http://127.0.0.1:8080/` 30 | 31 | And, now, it's time to create your first cortisol file. Copy and paste the following in a file named `cortisolfile.py` in the root path of getting-started-example repo: 32 | 33 | ```python 34 | from locust import task 35 | 36 | from cortisol.cortisollib.users import CortisolHttpUser 37 | 38 | 39 | class WebsiteUser(CortisolHttpUser): 40 | @task 41 | def my_task(self): 42 | self.client.get("/") 43 | 44 | ``` 45 | 46 | Here we define a class for the users that we will be simulating. It must always inherit from `CortisolHttpUser `. 47 | 48 | We've declared 1 task by decorating two methods with ``@task``. The `my_task` method calls the root path of the restful service that just returns a simple JSON response. 49 | 50 | You can define multiple tasks for each resource of your web service. 51 | 52 | Almost there! It's time to run the cortisol command and get your first log cost estimates. Before we do that, make sure you know the root path of where the getting-started-example repo is located. For illustration purposes, let's assume the path is `/some/path/getting-started-example/`. 53 | 54 | Switch to another terminal window, enable the `getting-started-cortisol` virtual env and run the following command in the terminal: 55 | 56 | ```terminal 57 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file cortisolfile.py --log-file cortisol_app.log 58 | ``` 59 | 60 | You'll get some results after 10 seconds that look like these ones: 61 | 62 | ![Cortisol](/img/getting-started-results.png) 63 | 64 | Before we dive into the results, let's understand what load testing ran in the background in order to receive the latter log cost estimates. 65 | 66 | - `--host http://127.0.0.1:8080` the FAST API runs at `http://127.0.0.1:8080` 67 | - `--users 10` The peak number of concurrent users is 10 68 | - `--spawn-rate 5` Spawn 5 users per second 69 | - `--run-time 10s` Stop after 10 seconds 70 | - `--cortisol-file cortisolfile.py` path to cortisolfile 71 | - `--log-file cortisol_app.log` path to where logs are saved 72 | 73 | Let's get back to the results. The total log volume per month of running this FAST API with the defined user behaviour in the cortisolfile and in the cortisol command arguments is going to be available at the top of the table. The log costs per observability tool are per month and are explained below: 74 | 75 | - Datadog: 30 day log retention and billed annually 76 | - Grafana: Cloud Pro plan is chosen 77 | - New Relic: Pro plan is chosen 78 | - GCP Cloud Logging: based on the pricing on their website 79 | 80 | Please, note that free tiers have been included to all the costs above 81 | 82 | 83 | ## Your first log cost estimation with Docker 84 | 85 | Let's do the same but run the FAST API in a Docker container. 86 | 87 | 1. Clone this example repo https://github.com/CortisolAI/getting-started-example 88 | 2. `cd getting-started-example` 89 | 3. `make build` to build the Docker image 90 | 4. `make run` to run the container. The printed container ID is important. This command will make the service available at `http://127.0.0.1:8080/` 91 | 92 | On another terminal window: 93 | 94 | 1. Create a virtualenv `mkvirtualenv getting-started-cortisol` 95 | 2. `pip install cortisol` 96 | 97 | You will need to pass the container ID, and just define the log file name. No need to pass the entire path to the log file: 98 | 99 | ```terminal 100 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file cortisolfile.py --container-id d3a45b9e27ca03b52d2fe9d4c7c55f8254829555c96c6b79bc950caaf33719f8 --cortisol-file ./examples/cortisolfile.py --log-file cortisol_app.log 101 | ``` 102 | 103 | And, you'll get some results in 10 seconds. 104 | 105 | Make sure to run `make stop` and `make clean` to stop/delete the Docker image and container. 106 | -------------------------------------------------------------------------------- /tests/cortisollib/test_readers.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from unittest import TestCase 3 | from collections import namedtuple 4 | import os 5 | import docker 6 | from unittest.mock import patch, MagicMock, mock_open 7 | from cortisol.cortisollib.readers import ( 8 | docker_log_file_size_reader, 9 | local_log_file_size_reader, 10 | log_file_size_reader, 11 | local_count_log_entries, 12 | docker_count_log_entries, 13 | count_log_entries, 14 | ) 15 | 16 | ExecResult = namedtuple("ExecResult", "exit_code,output") 17 | 18 | 19 | class TestFileSizeFunctions(TestCase): 20 | @patch("docker.from_env") 21 | def test_docker_log_file_size_reader_success(self, mock_docker): 22 | # Mock the Docker client and container object 23 | mock_container = mock_docker.return_value.containers.get.return_value 24 | mock_stream = MagicMock(spec=docker.types.daemon.CancellableStream) 25 | mock_stream.output = b"123 /app/playground_app.log\n" 26 | mock_stream.__iter__.return_value = [b"123 /app/playground_app.log\n"] 27 | 28 | mock_container.exec_run.return_value = ExecResult( 29 | exit_code=0, 30 | output=mock_stream, # Mock the output as CancellableStream 31 | ) 32 | 33 | # Test file size reading from Docker container 34 | container_id = "test_container_id" 35 | file_path = Path("/app/playground_app.log") 36 | result = docker_log_file_size_reader(container_id, file_path) 37 | self.assertEqual(result, 123) 38 | 39 | def test_local_file_size_reader_success(self): 40 | # Test file size reading from the local file system 41 | file_path = Path("test_file.txt") 42 | with open(file_path, "wb") as file: 43 | file.write(b"This is a test file.") 44 | 45 | result = local_log_file_size_reader(file_path) 46 | self.assertEqual(result, 20) 47 | 48 | # Clean up the test file 49 | os.remove(file_path) 50 | 51 | def test_local_file_size_reader_file_not_found(self): 52 | # Test handling of FileNotFoundError for local file reader 53 | file_path = Path("non_existent_file.txt") 54 | with self.assertRaises(FileNotFoundError): 55 | local_log_file_size_reader(file_path) 56 | 57 | @patch("docker.from_env") 58 | def test_log_file_size_reader_with_container_id(self, mock_docker): 59 | # Test get_file_size when container_id is provided 60 | mock_container = mock_docker.return_value.containers.get.return_value 61 | mock_stream = MagicMock(spec=docker.types.daemon.CancellableStream) 62 | mock_stream.output = b"456 /app/playground_app.log\n" 63 | mock_stream.__iter__.return_value = [b"456 /app/playground_app.log\n"] 64 | 65 | mock_container.exec_run.return_value = ExecResult( 66 | exit_code=0, 67 | output=mock_stream, # Mock the output as CancellableStream 68 | ) 69 | 70 | container_id = "test_container_id" 71 | file_path = Path("/app/playground_app.log") 72 | result = log_file_size_reader(file_path, container_id) 73 | self.assertEqual(result, 456) 74 | 75 | def test_log_file_size_reader_without_container_id(self): 76 | # Test get_file_size without container_id (use local_file_size_reader) 77 | file_path = Path("test_file.txt") 78 | with open(file_path, "wb") as file: 79 | file.write(b"This is a test file.") 80 | 81 | result = local_log_file_size_reader(file_path) 82 | self.assertEqual(result, 20) 83 | 84 | # Clean up the test file 85 | os.remove(file_path) 86 | 87 | 88 | class TestLogEntryCount(TestCase): 89 | def test_local_count_log_entries(self): 90 | with patch("builtins.open", mock_open(read_data="line1\nline2\nline3")): 91 | file_path = Path("test.log") 92 | entry_count = local_count_log_entries(file_path) 93 | self.assertEqual(3e-6, entry_count) 94 | 95 | @patch("docker.from_env") 96 | def test_docker_count_log_entries(self, mock_docker): 97 | mock_container = mock_docker.return_value.containers.get.return_value 98 | mock_container.exec_run.return_value = ExecResult( 99 | exit_code=0, 100 | output=b"line1\nline2\nline3", # Mock the output as CancellableStream 101 | ) 102 | 103 | container_id = "test-container" 104 | file_path = Path("/app/test.log") 105 | entry_count = docker_count_log_entries(container_id, file_path) 106 | self.assertEqual(3e-6, entry_count) 107 | 108 | def test_count_log_entries_local(self): 109 | with patch("builtins.open", mock_open(read_data="line1\nline2\nline3")): 110 | file_path = Path("test.log") 111 | entry_count = count_log_entries(file_path) 112 | self.assertEqual(3e-6, entry_count) 113 | 114 | @patch("docker.from_env") 115 | def test_count_log_entries_docker(self, mock_docker): 116 | mock_container = mock_docker.return_value.containers.get.return_value 117 | mock_container.exec_run.return_value = ExecResult( 118 | exit_code=0, 119 | output=b"line1\nline2\nline3", # Mock the output as CancellableStream 120 | ) 121 | 122 | container_id = "test-container" 123 | file_path = Path("/app/test.log") 124 | entry_count = count_log_entries(file_path, container_id) 125 | self.assertEqual(3e-6, entry_count) 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Cortisol](docs/cortisol_h_large.png#gh-light-mode-only) 2 | ![Cortisol](docs/cortisol_h_large_w.png#gh-dark-mode-only) 3 | 4 |

5 | Cortisol, accurately forecast log costs pre-production. 6 |

7 |

8 | 9 | Test 10 | 11 |

12 | 13 | # cortisol 14 | 15 | Cortisol is an open-source command-line tool designed specifically for web services. It offers easy-to-use cost estimation and forecasting capabilities tailored to main observability tools like [Datadog](https://www.datadoghq.com/), [New Relic](https://newrelic.com/), [Grafana](https://grafana.com/) and [GCP Cloud Logging](https://cloud.google.com/logging). Cortisol assists users in planning and optimizing their log costs before deploying their web services. It operates on a foundation inspired by [Locust](https://locust.io/), allowing users to define user behavior using a regular Python script 💰📉. 16 | 17 | For detailed reference to Cortisol commands please go to: [Read the Docs](https://cortisolai.github.io/cortisol/) 18 | 19 | ## Installation 20 | 21 | ### Prerequisites 22 | 23 | Cortisol requires one of the following Python versions: 3.8, 3.9, 3.10 or 3.11 24 | 25 | ### Install cortisol 26 | 27 | At the command line: 28 | 29 | pip install cortisol 30 | 31 | If you have an Apple M1 CPU, we suggest installing using Poetry as a dependency management. Otherwise, the underline gevent library may not work. 32 | 33 | ## Getting started 34 | 35 | First things first! We need a RESTful service and so you'll need to do the following steps: 36 | 37 | 1. Clone this example repo https://github.com/CortisolAI/getting-started-example 38 | 2. `cd getting-started-example` 39 | 3. `mkvirtualenv getting-started-cortisol` 40 | 4. `python -m app.main` which will make the service available at `http://127.0.0.1:8080/` 41 | 42 | And, now, it's time to create your first cortisol file. Copy and paste the following in a file named `cortisolfile.py`: 43 | 44 | ```python 45 | from locust import task 46 | 47 | from cortisol.cortisollib.users import CortisolHttpUser 48 | 49 | 50 | class WebsiteUser(CortisolHttpUser): 51 | @task 52 | def my_task(self): 53 | self.client.get("/") 54 | 55 | ``` 56 | 57 | Go to the virtualenv where the cortisol library is installed and run the following command in the terminal. Make sure to change the base path for the `--log-file` argument: 58 | 59 | ```terminal 60 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file cortisolfile.py --log-file /some/path/getting-started-example/cortisol_app.log 61 | ``` 62 | 63 | ## Commands 64 | 65 | ### Log Cost Estimate 66 | 67 | #### Name 68 | 69 | Forecast log costs 70 | 71 | #### Synopsis 72 | 73 | cortisol logs cost-estimate --host HOST --log-file LOG_FILE --users NUM_USERS --spawn-rate SPAWN_RATE --run-time RUN_TIME -cortisol-file CORTISOL_PYTHON_FILE 74 | 75 | #### Description 76 | 77 | Forecast log costs pre-production with Cortisol for Datadog, New Relic, and Grafana 78 | 79 | ### Example 80 | 81 | cortisol logs cost-estimate --host http://10.20.31.32:8000 --users 10 --spawn-rate 5 --run-time 10s --cortisol-file ./examples/cortisolfile.py --log-file /app/playground_app.log 82 | 83 | #### Required Flags - Option 1 84 | 85 | `-f, --cortisol-file PATH` Path to the CORTISOL_FILE 86 | 87 | `-h, --host TEXT` Host in the following format: http://10.20.31.32 or http://10.20.31.32:8000 88 | 89 | `-l, --log-file PATH` Path to log file 90 | 91 | `-u, --users INTEGER` Peak number of concurrent users 92 | 93 | `-r, --spawn-rate INTEGER` Rate to spawn users at (users per second) 94 | 95 | `-t, --run-time TEXT` Stop after the specified amount of time, e.g. (50, 30s, 200m, 5h, 2h30m, etc.). Default unit in seconds. 96 | 97 | #### Required Flags - Option 2 98 | 99 | All the latter options plus the following in case your application run in a Docker container: 100 | 101 | `-c, --container-id TEXT` Optional docker container id where your application runs 102 | 103 | ##### Example 104 | cortisol logs cost-estimate --host http://127.0.0.1:8080 --users 100 --spawn-rate 5 --run-time 10s --cortisol-file ./examples/cortisolfile.py --log-file /app/playground_app.log --container-id 1212aa67e530af75b3310e1e5b30261b36844a6748df1d321088c4d48a20ebd0 105 | 106 | 107 | #### Required Flags - Option 3 108 | 109 | `--config PATH` Path to config file (YAML or JSON) containing the long version of flags from option 1 110 | 111 | #### Optional Flags 112 | 113 | `--stats-file PATH` Path where to store the cortisol statistics output as a csv 114 | 115 | Here's a YAML example: 116 | 117 | ```YAML 118 | host: "http://10.20.31.32:8000" 119 | log-file: "/path/to/logfile" 120 | users: 100 121 | spawn-rate: 30 122 | run-time: "20m" 123 | cortisol-file: "some_cortisol_file.py" 124 | stats-file: "cortisol_stats.csv" 125 | ``` 126 | 127 | Here's a YAML example with docker container id: 128 | 129 | ```YAML 130 | host: "http://10.20.31.32:8000" 131 | log-file: "/path/to/logfile" 132 | users: 100 133 | spawn-rate: 30 134 | run-time: "20m" 135 | cortisol-file: "some_cortisol_file.py" 136 | container-id: "80f1bc1e7feb" 137 | stats-file: "cortisol_stats.csv" 138 | ``` 139 | 140 | and a JSON example: 141 | 142 | ```JSON 143 | { 144 | "host": "http://10.20.31.32:8000", 145 | "log_file": "/path/to/logfile", 146 | "users": 100, 147 | "spawn_rate": 30, 148 | "run_time": "20m", 149 | "cortisol_file": "some_cortisol_file.py", 150 | "container_id": "80f1bc1e7feb", 151 | "stats-file": "cortisol_stats.csv" 152 | } 153 | ``` 154 | -------------------------------------------------------------------------------- /docs/static/img/logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cortisol/cortisollib/readers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import docker 4 | from pathlib import Path 5 | 6 | 7 | def local_log_file_size_reader(file_path: Path): 8 | """ 9 | Reads the size of a log file from a locally running service. 10 | 11 | This function retrieves the size of a log file located at the specified file path 12 | for a service running locally. 13 | 14 | Args: 15 | file_path (Path): Path to the log file. 16 | 17 | Returns: 18 | int: The size of the log file in bytes. 19 | 20 | Raises: 21 | FileNotFoundError: If the specified file does not exist. 22 | 23 | Example: 24 | log_file_path = Path("path/to/log_file.log") 25 | log_file_size = local_log_file_size_reader(log_file_path) 26 | """ 27 | try: 28 | if not os.path.exists(file_path): 29 | raise FileNotFoundError("File not found") 30 | 31 | # Get the file size in bytes 32 | file_size_bytes = os.path.getsize(file_path) 33 | return file_size_bytes 34 | 35 | except FileNotFoundError as e: 36 | raise FileNotFoundError(f"Error while accessing file: {e}") 37 | 38 | 39 | def docker_log_file_size_reader(container_id: str, file_path: Path): 40 | """ 41 | Reads the size of a log file from a service running within a Docker container. 42 | 43 | This function retrieves the size of a log file located at the specified file path 44 | for a service running within a Docker container. 45 | 46 | Args: 47 | container_id (str): Identifier of the Docker container. 48 | file_path (Path): Path to the log file within the container. 49 | 50 | Returns: 51 | int: The size of the log file in bytes. 52 | 53 | Raises: 54 | Exception: If the Docker container is not found or an API error occurs. 55 | 56 | Example: 57 | container_id = "my-container" 58 | log_file_path = Path("/app/playground_app.log") 59 | log_file_size = docker_log_file_size_reader(container_id, log_file_path) 60 | """ 61 | try: 62 | docker_client = docker.from_env() 63 | except docker.errors.DockerException as e: 64 | raise Exception("Cannot start docker client") from e 65 | try: 66 | container = docker_client.containers.get(container_id) 67 | exec_result = container.exec_run( 68 | ["du", "-b", str(file_path)], stdout=True, stderr=True, stream=True 69 | ) 70 | 71 | # Concatenate the stream content to get the output 72 | exec_output = b"".join(exec_result.output) 73 | 74 | # Parse the file size from the output (e.g., "123 /app/playground_app.log\n") 75 | file_size_bytes = int(exec_output.split()[0]) 76 | return file_size_bytes 77 | 78 | except docker.errors.NotFound: 79 | raise Exception("Container not found") 80 | 81 | except docker.errors.APIError as e: 82 | raise Exception(f"Error while executing command in the container {e}") 83 | 84 | 85 | def log_file_size_reader( 86 | file_path: Path, container_id: str = "", on_start: bool = False 87 | ): 88 | """ 89 | Reads the size of a log file from a service running locally with or without Docker. 90 | 91 | This function reads the size of a log file located at the specified file path. 92 | If a container ID is provided, it reads the file size from a Docker container. 93 | Otherwise, it reads the file size from a locally running service. 94 | 95 | Args: 96 | file_path (Path): Path to the log file. 97 | container_id (str, optional): Identifier of the Docker container (default: ""). 98 | on_start: (bool, optional): True if it's called on the start of the load test (default: False). 99 | 100 | Returns: 101 | int: The size of the log file in bytes. 102 | 103 | Example: 104 | # Read log file size from a locally running service 105 | log_file_path = Path("path/to/log_file.log") 106 | log_file_size = log_file_size_reader(log_file_path) 107 | 108 | # Read log file size from a Docker container 109 | container_id = "my-container" 110 | log_file_size = log_file_size_reader(log_file_path, container_id, on_start=False) 111 | """ 112 | try: 113 | if container_id != "": 114 | file_size = docker_log_file_size_reader(container_id, file_path) 115 | return file_size 116 | file_size = local_log_file_size_reader(file_path) 117 | return file_size 118 | except FileNotFoundError: 119 | if on_start: 120 | return 0 121 | raise FileNotFoundError(file_path) 122 | except ValueError: 123 | if on_start: 124 | return 0 125 | raise FileNotFoundError(file_path) 126 | 127 | 128 | def local_count_log_entries(file_path): 129 | """ 130 | Count the number of log entries in a log file. 131 | 132 | This function reads the specified log file and counts the number of log entries 133 | by counting the number of lines in the file. 134 | 135 | Args: 136 | file_path (Path): Path to the log file. 137 | 138 | Returns: 139 | float: The number of log entries in the file in millions. 140 | 141 | Example: 142 | file_path = "path/to/log_file.log" 143 | entry_count = local_count_log_entries(file_path) 144 | """ 145 | try: 146 | with open(file_path, "r") as file: 147 | log_entries = file.readlines() 148 | return len(log_entries) / 1000000 149 | except FileNotFoundError: 150 | print(f"Log file '{file_path}' not found.") 151 | return 0 152 | 153 | 154 | def docker_count_log_entries(container_id, file_path): 155 | """ 156 | Count the number of log entries in a log file within a Docker container. 157 | 158 | This function reads the specified log file from a Docker container and counts 159 | the number of log entries by splitting the log data into lines. 160 | 161 | Args: 162 | container_id (str): Identifier of the Docker container. 163 | file_path (Path): Path to the log file within the container. 164 | 165 | Returns: 166 | float: The number of log entries in the file in millions. 167 | 168 | Example: 169 | container_id = "my-container" 170 | file_path = "/app/log_file.log" 171 | entry_count = docker_count_log_entries(container_id, file_path) 172 | """ 173 | try: 174 | docker_client = docker.from_env() 175 | except docker.errors.DockerException as e: 176 | raise Exception("Cannot start docker client") from e 177 | 178 | try: 179 | container = docker_client.containers.get(container_id) 180 | log_data = container.exec_run(["cat", file_path]).output 181 | log_entries = log_data.decode("utf-8").split("\n") 182 | return len(log_entries) / 1000000 183 | except docker.errors.NotFound: 184 | print(f"Container '{container_id}' not found.") 185 | return 0 186 | 187 | 188 | def count_log_entries(file_path: Path, container_id: str = "", on_start: bool = False): 189 | """ 190 | Count the number of log entries in a log file, locally or within a Docker container. 191 | 192 | This function counts the number of log entries in a log file located at the specified 193 | file path. If a container ID is provided, it counts log entries within the Docker container. 194 | Otherwise, it counts log entries from a local log file. 195 | 196 | Args: 197 | file_path (Path): Path to the log file. 198 | container_id (str, optional): Identifier of the Docker container (default: ""). 199 | on_start: (bool, optional): True if it's called on the start of the load test (default: False). 200 | 201 | Returns: 202 | int: The number of log entries in the file in millions. 203 | 204 | Example: 205 | # Count log entries from a local log file 206 | log_file_path = Path("path/to/log_file.log") 207 | entry_count = count_log_entries(log_file_path) 208 | 209 | # Count log entries from a Docker container log file 210 | container_id = "my-container" 211 | log_file_path = Path("/app/log_file.log") 212 | entry_count = count_log_entries(log_file_path, container_id, False) 213 | """ 214 | try: 215 | if container_id != "": 216 | return docker_count_log_entries(container_id, file_path) 217 | return local_count_log_entries(file_path) 218 | except FileNotFoundError as e: 219 | if on_start: 220 | logging.info("ON START") 221 | return 0 222 | raise FileNotFoundError(file_path) 223 | except ValueError: 224 | if on_start: 225 | return 0 226 | raise FileNotFoundError(file_path) 227 | -------------------------------------------------------------------------------- /cortisol/cortisollib/log_cost_estimator.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import threading 3 | import time 4 | import subprocess 5 | from pathlib import Path 6 | import os 7 | from jinja2 import Template 8 | 9 | _FILE_DIR_PATH = os.path.dirname(os.path.realpath(__file__)) 10 | 11 | 12 | def animation_process(timeout, done): 13 | animation = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"] 14 | start_time = time.time() 15 | while True: 16 | elapsed_time = time.time() - start_time 17 | if done.is_set(): 18 | break 19 | remaining_seconds = max(0.0, timeout - elapsed_time) 20 | minutes = int(remaining_seconds / 60) 21 | seconds = int(remaining_seconds % 60) 22 | timer_text = f"{minutes:02d}:{seconds:02d}" 23 | animation_index = int(elapsed_time * 10) % len(animation) 24 | animation_char = animation[animation_index] 25 | print( 26 | f"{animation_char} Hold tight cortisol is estimating your monthly log cost {animation_char} Remaining time: {timer_text}", 27 | end="\r", 28 | ) 29 | time.sleep(0.1) 30 | 31 | 32 | def _get_classes_extending_httpuser(code): 33 | tree = ast.parse(code) 34 | 35 | class_names = [] 36 | for node in ast.walk(tree): 37 | if isinstance(node, ast.ClassDef): 38 | for base in node.bases: 39 | if isinstance(base, ast.Name) and base.id == "CortisolHttpUser": 40 | class_names.append(node.name) 41 | 42 | formatted_string = "[" + ", ".join(class_names) + "]" 43 | return formatted_string 44 | 45 | 46 | def render_locustfile(cortisol_file: Path): 47 | """ 48 | Render a Locustfile template with user input and save the result. 49 | 50 | This function reads a Jinja2 template for a Locust load test scenario from the template file, 51 | merges it with the user-provided input from the specified 'cortisol_file', and then writes 52 | the rendered content to a new Locustfile. The rendered content is returned. 53 | 54 | Args: 55 | cortisol_file (Path): Path to the user input file containing parameters for the load test. 56 | 57 | Returns: 58 | str: The rendered content of the Locustfile after merging the template with user input. 59 | 60 | Example: 61 | cortisol_file = Path("path/to/user_input.yaml") 62 | rendered_content = render_locustfile(cortisol_file) 63 | """ 64 | with open( 65 | os.path.join(_FILE_DIR_PATH, "./templates/cli_loadtest.py.j2"), "r" 66 | ) as template_file: 67 | template_content = template_file.read() 68 | 69 | # Create a Jinja template object 70 | template = Template(template_content) 71 | 72 | with open(cortisol_file, "r") as user_input_file: 73 | user_input = user_input_file.read() 74 | user_classes = _get_classes_extending_httpuser(user_input) 75 | 76 | # Render the template with the user input 77 | rendered_content = template.render( 78 | cortisolfile=user_input, user_classes=user_classes 79 | ) 80 | 81 | with open( 82 | os.path.join(_FILE_DIR_PATH, "./templates/locustfile.py"), "w" 83 | ) as merged_file: 84 | merged_file.write(rendered_content) 85 | 86 | return rendered_content 87 | 88 | 89 | def render_locust_command( 90 | host: str, 91 | log_file: Path, 92 | num_users: int, 93 | spawn_rate: int, 94 | run_time: str, 95 | container_id: str, 96 | stats_file: Path = None, 97 | ): 98 | """ 99 | Generate a command for running a Locust load test in headless mode. 100 | 101 | This function generates a command list that can be used to run a Locust load test 102 | in headless mode. The command includes options such as the Locustfile path, 103 | number of users, spawn rate, run time, container ID, and log file path. 104 | 105 | Args: 106 | log_file (Path): Path to the log file where Locust logs will be stored. 107 | num_users (int): Number of concurrent users/clients to simulate. 108 | spawn_rate (int): The rate at which new users are spawned per second. 109 | run_time (str): The duration of the load test run (e.g., '10m' for 10 minutes). 110 | container_id (str): Identifier for the Docker container, if applicable. 111 | stats_file (Path): Optional. File path where the cortisol stats will be stored as csv 112 | 113 | Returns: 114 | List[str]: A list representing the command for running the Locust load test. 115 | 116 | Example: 117 | log_file = Path("path/to/locust_logs.log") 118 | num_users = 100 119 | spawn_rate = 10 120 | run_time = '15m' 121 | container_id = 'my-locust-container' 122 | 123 | command = render_locust_command(log_file, num_users, spawn_rate, run_time, container_id) 124 | # Execute the command using subprocess or other method 125 | """ 126 | command = [ 127 | "locust", 128 | "-f", 129 | os.path.join(_FILE_DIR_PATH, "./templates/locustfile.py"), 130 | "--headless", 131 | "--host", 132 | host, 133 | "--users", 134 | str(num_users), 135 | "--spawn-rate", 136 | str(spawn_rate), 137 | "--run-time", 138 | str(run_time), 139 | "--container-id", 140 | container_id, 141 | "--log-file", 142 | log_file, 143 | ] 144 | 145 | if stats_file: 146 | command += ["--stats-file", stats_file] 147 | 148 | return command 149 | 150 | 151 | def get_cost_estimate( 152 | cortisol_file: Path, 153 | host: str, 154 | log_file: Path, 155 | num_users: int, 156 | spawn_rate: int, 157 | run_time: str, 158 | container_id: str = "", 159 | stats_file: Path = None, 160 | ): 161 | """ 162 | Calculate the estimated cost of logs. 163 | 164 | This function calculates the estimated cost of logs using the Locust 165 | load testing tool. It renders the Locustfile specified in 'cortisol_file', generates 166 | the Locust command based on the provided parameters, executes the command, captures 167 | the output, and returns the return code of the process. 168 | 169 | Args: 170 | cortisol_file (Path): Path to the Locustfile containing the load test scenario. 171 | host (str): Host to load test in the following format: http://10.21.32.33 172 | log_file (Path): Path to the log file where Locust logs will be stored. 173 | num_users (int): Number of concurrent users/clients to simulate. 174 | spawn_rate (int): The rate at which new users are spawned per second. 175 | run_time (str): The duration of the load test run (e.g., '10m' for 10 minutes). 176 | container_id (str): Identifier for the Docker container, if applicable. 177 | stats_file (Path): Optional. File path where the cortisol stats will be stored as csv 178 | 179 | Returns: 180 | int: The return code of the subprocess that executed the Locust load test. 181 | 182 | Example: 183 | cortisol_file = Path("path/to/locustfile.py") 184 | log_file = Path("path/to/locust_logs.log") 185 | num_users = 100 186 | spawn_rate = 10 187 | run_time = '15m' 188 | container_id = 'my-locust-container' 189 | 190 | return_code = get_cost_estimate(cortisol_file, log_file, num_users, spawn_rate, run_time, container_id) 191 | """ 192 | render_locustfile(cortisol_file) 193 | 194 | command = render_locust_command( 195 | host, log_file, num_users, spawn_rate, run_time, container_id, stats_file 196 | ) 197 | 198 | done = threading.Event() 199 | process = None 200 | animation_thread = None 201 | try: 202 | process = subprocess.Popen( 203 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True 204 | ) 205 | 206 | if run_time.endswith("m"): 207 | total_seconds = float(run_time[:-1]) * 60 208 | elif run_time.endswith("s"): 209 | total_seconds = float(run_time[:-1]) 210 | else: 211 | raise ValueError( 212 | "Invalid runtime format. Use 'Xm' for minutes or 'Xs' for seconds." 213 | ) 214 | 215 | animation_thread = threading.Thread( 216 | target=animation_process, 217 | args=( 218 | total_seconds, 219 | done, 220 | ), 221 | ) 222 | animation_thread.start() 223 | 224 | try: 225 | output, error = process.communicate(timeout=total_seconds + 2) 226 | except subprocess.TimeoutExpired as e: 227 | process.kill() 228 | output, error = process.communicate() 229 | stderr_output = error.strip() 230 | print(stderr_output) 231 | raise TimeoutError(stderr_output) from e 232 | 233 | if process.returncode == 0: 234 | print(output) 235 | else: 236 | print(error.strip()) 237 | 238 | except KeyboardInterrupt as e: 239 | if process is not None: 240 | process.terminate() 241 | _, error = process.communicate() 242 | stderr_output = error.strip() 243 | print(stderr_output) 244 | raise KeyboardInterrupt(stderr_output) from e 245 | raise KeyboardInterrupt from e 246 | 247 | finally: 248 | if process and process.poll() is None: 249 | process.terminate() 250 | if animation_thread is not None: 251 | done.set() 252 | animation_thread.join() 253 | 254 | return process.returncode 255 | -------------------------------------------------------------------------------- /cortisol/cortisollib/hooks.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import uuid 3 | from time import time 4 | from prettytable import PrettyTable 5 | 6 | from cortisol.cortisollib.readers import log_file_size_reader, count_log_entries 7 | from cortisol.cortisollib.estimators import linear_extrapolator 8 | from cortisol.cortisollib.calculators import ( 9 | datadog_log_cost_calculator, 10 | grafana_log_cost_calculator, 11 | new_relic_log_cost_calculator, 12 | gcp_cloud_logging_log_cost_calculator, 13 | format_bytes, 14 | ) 15 | 16 | 17 | def colorize(value, key): 18 | """ 19 | Apply color formatting to a value based on a specified key. 20 | 21 | This function takes a value and a key as input, and applies color formatting 22 | to the value based on the provided key. The colors dictionary contains hex color 23 | codes for different keys. The value is formatted using ANSI escape codes for 24 | terminal color output. 25 | 26 | Args: 27 | value (str): The value to be colorized. 28 | key (str): The key associated with a specific color in the colors dictionary. 29 | 30 | Returns: 31 | str: The colorized value with ANSI escape codes for color. 32 | 33 | Example: 34 | formatted_value = colorize("Some Text", "log-volume") 35 | print(formatted_value) 36 | """ 37 | colors = { 38 | "log-volume": "#FFFFFF", 39 | "datadog-cost": "#774aa4", 40 | "grafana-cost": "#ffa500", 41 | "new-relic-cost": "#1CE783", 42 | "gcp-cloud-logging-cost": "#4285F4", 43 | } 44 | hex_color = colors[key] 45 | r = int(hex_color[1:3], 16) 46 | g = int(hex_color[3:5], 16) 47 | b = int(hex_color[5:8], 16) 48 | color = f"38;2;{r};{g};{b}" 49 | return f"\033[{color}m{value}\033[0m" 50 | 51 | 52 | def add_symbol(key, value): 53 | """ 54 | Add a symbol or unit to a numeric value based on a specified key. 55 | 56 | This function takes a key and a numeric value as input and adds a corresponding 57 | symbol or unit to the value based on the provided key. The symbols dictionary 58 | contains symbols or units for different keys. 59 | 60 | Args: 61 | key (str): The key associated with a specific symbol or unit in the symbols dictionary. 62 | value (float): The numeric value to which the symbol or unit should be added. 63 | 64 | Returns: 65 | str: The value with the symbol or unit added. 66 | 67 | Example: 68 | formatted_value = add_symbol("log-volume", 150) 69 | print(formatted_value) 70 | """ 71 | symbols = { 72 | "log-volume": f"{round(value, 2)} GiB", 73 | "datadog-cost": f"${round(value, 2)}", 74 | "grafana-cost": f"${round(value, 2)}", 75 | "new-relic-cost": f"${round(value, 2)}", 76 | "gcp-cloud-logging-cost": f"${round(value, 2)}", 77 | } 78 | symbol = symbols[key] 79 | return symbol 80 | 81 | 82 | def create_results_table(obs_stats): 83 | """ 84 | Create a formatted results table with colored and symbolized data. 85 | 86 | This function generates a PrettyTable object and populates it with data from 87 | the 'obs_stats' dictionary. The table displays the data with colored names and 88 | symbolized values based on the keys in the dictionary. 89 | 90 | Args: 91 | obs_stats (dict): A dictionary containing observed statistics. 92 | 93 | Returns: 94 | PrettyTable: A formatted table displaying the observed statistics. 95 | 96 | Example: 97 | observed_stats = { 98 | "log-volume": {"Average": 150.5, "Max": 200}, 99 | "datadog-cost": {"Total": 350.25}, 100 | "grafana-cost": {"Total": 80}, 101 | } 102 | result_table = create_results_table(observed_stats) 103 | print(result_table) 104 | """ 105 | table = PrettyTable() 106 | table.field_names = ["Name", "Value per month"] 107 | # Add data to the table with color 108 | for key, value in obs_stats["logs"].items(): 109 | table.add_row([colorize(key, key), colorize(add_symbol(key, value), key)]) 110 | 111 | # Set the formatting options 112 | table.align = "l" # Left-align columns 113 | table.border = True # Add borders to the table 114 | table._min_width = {"Name": 50, "Value": 25} 115 | return table 116 | 117 | 118 | def on_quit(environment, **kwargs): 119 | """ 120 | Display observability statistics using a formatted PrettyTable. 121 | 122 | This function is called when the Locust test is completed or interrupted. 123 | It prints the observability statistics in a formatted table using PrettyTable. 124 | 125 | Args: 126 | environment (Environment): The Locust environment object. 127 | **kwargs: Additional keyword arguments. 128 | 129 | Example: 130 | from locust import Environment, HttpUser, task, events 131 | from my_locust_file import on_quit 132 | 133 | env = Environment(user_classes=[HttpUser]) 134 | env.events.quitting += on_quit 135 | env.create_user(HttpUser, "http://example.com") 136 | env.runner.start(1, hatch_rate=1) 137 | """ 138 | obs_stats = environment.runner.stats.custom_stats 139 | print("\n") 140 | print(f"Cortisol sent {obs_stats['n_requests']} requests to your service") 141 | print("\n") 142 | print("Observability Statistics") 143 | print(f"*----LOGS----*") 144 | # Create a PrettyTable instance 145 | table = create_results_table(obs_stats) 146 | print(table) 147 | if obs_stats["stats_file"]: 148 | with open(obs_stats["stats_file"], "w") as f: 149 | header = ["run_id", "timestamp", "n_requests"] + list( 150 | obs_stats["logs"].keys() 151 | ) 152 | values = [uuid.uuid1(), int(time()), obs_stats["n_requests"]] + list( 153 | obs_stats["logs"].values() 154 | ) 155 | w = csv.writer(f, delimiter=",") 156 | w.writerow(header) 157 | w.writerow(values) 158 | return table 159 | 160 | 161 | stats = {} 162 | 163 | 164 | def on_init(environment, **kwargs): 165 | """ 166 | Initialize custom statistics for the Locust environment. 167 | 168 | This function is called during the initialization of the Locust environment. 169 | It sets up custom statistics for the environment's runner. 170 | 171 | Args: 172 | environment (Environment): The Locust environment object. 173 | **kwargs: Additional keyword arguments. 174 | 175 | Example: 176 | from locust import Environment, HttpUser, task, events 177 | from my_locust_file import on_init 178 | 179 | env = Environment(user_classes=[HttpUser]) 180 | env.events.init += on_init 181 | env.create_user(HttpUser, "http://example.com") 182 | env.runner.start(1, hatch_rate=1) 183 | """ 184 | environment.runner.stats.custom_stats = stats 185 | 186 | 187 | def on_request( 188 | request_type, name, response_time, response_length, exception, context, **kwargs 189 | ): 190 | """ 191 | Calculate and update custom observability statistics during a request. 192 | 193 | This function is called for each request made during a Locust load test. It calculates 194 | and updates custom observability statistics related to log volume, Datadog cost, and Grafana cost. 195 | 196 | Args: 197 | request_type (str): Type of the HTTP request (GET, POST, etc.). 198 | name (str): Name of the request. 199 | response_time (float): Response time of the request in milliseconds. 200 | response_length (int): Length of the response in bytes. 201 | exception (Exception or None): Exception raised during the request, if any. 202 | context (dict): Contextual information related to the request. 203 | **kwargs: Additional keyword arguments. 204 | 205 | Example: 206 | from locust import task, events 207 | from my_locust_file import on_request 208 | 209 | @task 210 | def my_task(self): 211 | with self.client.get("/endpoint", catch_response=True) as response: 212 | if response.status_code == 200: 213 | events.request_success.fire( 214 | request_type="GET", 215 | name="my_task", 216 | response_time=response.elapsed.total_seconds() * 1000, 217 | response_length=len(response.content), 218 | exception=None, 219 | context={ 220 | "log_file": "path/to/log_file.log", 221 | "container_id": "my-container", 222 | "start_time": time(), 223 | "initial_log_volume": 0, 224 | }, 225 | ) 226 | else: 227 | events.request_failure.fire( 228 | request_type="GET", 229 | name="my_task", 230 | response_time=response.elapsed.total_seconds() * 1000, 231 | exception=None, 232 | context={ 233 | "log_file": "path/to/log_file.log", 234 | "container_id": "my-container", 235 | "start_time": time(), 236 | "initial_log_volume": 0, 237 | }, 238 | ) 239 | 240 | self.environment.events.request.fire = on_request 241 | """ 242 | stats.setdefault( 243 | "logs", 244 | { 245 | "log-volume": 0, 246 | "datadog-cost": 0, 247 | "grafana-cost": 0, 248 | "new-relic-cost": 0, 249 | "gcp-cloud-logging-cost": 0, 250 | }, 251 | ) 252 | stats.setdefault("n_requests", 0) 253 | stats.setdefault("stats_file", context["stats_file"]) 254 | 255 | log_file = context["log_file"] 256 | container_id = context["container_id"] 257 | start_time = context["start_time"] 258 | initial_log_volume = context["initial_log_volume"] 259 | initial_log_entries = context["initial_log_entries"] 260 | file_size = log_file_size_reader(log_file, container_id) - initial_log_volume 261 | num_log_entries = count_log_entries(log_file, container_id) - initial_log_entries 262 | formatted_file_size = format_bytes(file_size) 263 | current_time = time() 264 | elapsed_time_in_seconds = (current_time - start_time) / 1000 265 | extrapolated_size = linear_extrapolator( 266 | formatted_file_size, elapsed_time_in_seconds 267 | ) 268 | extrapolated_num_log_entries = ( 269 | linear_extrapolator(num_log_entries, elapsed_time_in_seconds) / 30 270 | ) 271 | datadog_cost = datadog_log_cost_calculator( 272 | extrapolated_size, extrapolated_num_log_entries 273 | ) 274 | grafana_cost = grafana_log_cost_calculator(extrapolated_size) 275 | new_relic_cost = new_relic_log_cost_calculator(extrapolated_size) 276 | gcp_cost = gcp_cloud_logging_log_cost_calculator(extrapolated_size) 277 | 278 | stats["logs"]["log-volume"] = extrapolated_size 279 | stats["logs"]["datadog-cost"] = datadog_cost 280 | stats["logs"]["grafana-cost"] = grafana_cost 281 | stats["logs"]["new-relic-cost"] = new_relic_cost 282 | stats["logs"]["gcp-cloud-logging-cost"] = gcp_cost 283 | stats["n_requests"] += 1 284 | 285 | return stats 286 | -------------------------------------------------------------------------------- /docs/static/img/undraw_docusaurus_tree.svg: -------------------------------------------------------------------------------- 1 | 2 | Focus on What Matters 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /docs/static/img/undraw_docusaurus_mountain.svg: -------------------------------------------------------------------------------- 1 | 2 | Easy to Use 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | -------------------------------------------------------------------------------- /docs/static/img/undraw_docusaurus_react.svg: -------------------------------------------------------------------------------- 1 | 2 | Powered by React 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | --------------------------------------------------------------------------------