├── .github ├── dependabot.yaml ├── images │ ├── banner-dark.svg │ └── banner-light.svg └── workflows │ └── ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── examples ├── client_example.py ├── logger_example.py └── structlog_example.py ├── pyproject.toml ├── src └── axiom_py │ ├── __init__.py │ ├── annotations.py │ ├── client.py │ ├── datasets.py │ ├── logging.py │ ├── query │ ├── __init__.py │ ├── aggregation.py │ ├── filter.py │ ├── options.py │ ├── query.py │ └── result.py │ ├── structlog.py │ ├── tokens.py │ ├── users.py │ ├── util.py │ └── version.py ├── tests ├── __init__.py ├── helpers.py ├── test_annotations.py ├── test_client.py ├── test_datasets.py └── test_logger.py └── uv.lock /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | - package-ecosystem: github-actions 8 | directory: / 9 | schedule: 10 | interval: daily 11 | -------------------------------------------------------------------------------- /.github/images/banner-light.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | 363 | 364 | 365 | 366 | 367 | 368 | 369 | 370 | 371 | 372 | 373 | 374 | 375 | 376 | 377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | tags: 11 | - "v*" 12 | 13 | jobs: 14 | lint: 15 | name: Lint 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: actions/setup-python@v5 20 | with: 21 | python-version: ${{ matrix.python }} 22 | - name: Install uv 23 | uses: astral-sh/setup-uv@v3 24 | - run: uv run ruff check 25 | - run: uv run ruff format --check 26 | 27 | test-integration: 28 | name: Test Integration 29 | runs-on: ubuntu-latest 30 | # run integration tests on PRs originating in the upstream repo (non-forks only) 31 | if: github.repository == 'axiomhq/axiom-py' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) 32 | needs: lint 33 | strategy: 34 | fail-fast: true 35 | matrix: 36 | python: ["3.8", "3.9", "3.10", "3.11", "3.12"] 37 | steps: 38 | - uses: actions/checkout@v4 39 | - uses: actions/setup-python@v5 40 | with: 41 | python-version: ${{ matrix.python }} 42 | - name: Install uv 43 | uses: astral-sh/setup-uv@v5 44 | - name: Test against development 45 | run: uv run pytest 46 | env: 47 | AXIOM_URL: ${{ secrets.TESTING_DEV_API_URL }} 48 | AXIOM_TOKEN: ${{ secrets.TESTING_DEV_TOKEN }} 49 | AXIOM_ORG_ID: ${{ secrets.TESTING_DEV_ORG_ID }} 50 | - name: Test against staging 51 | run: uv run pytest 52 | env: 53 | AXIOM_URL: ${{ secrets.TESTING_STAGING_API_URL }} 54 | AXIOM_TOKEN: ${{ secrets.TESTING_STAGING_TOKEN }} 55 | AXIOM_ORG_ID: ${{ secrets.TESTING_STAGING_ORG_ID }} 56 | 57 | publish: 58 | name: Publish on PyPi 59 | runs-on: ubuntu-latest 60 | if: github.repository == 'axiomhq/axiom-py' && startsWith(github.ref, 'refs/tags') 61 | needs: 62 | - test-integration 63 | steps: 64 | - uses: actions/checkout@v4 65 | - name: Install uv 66 | uses: astral-sh/setup-uv@v5 67 | - uses: actions/setup-python@v5 68 | with: 69 | python-version-file: "pyproject.toml" 70 | - run: uv build 71 | - run: uvx twine upload dist/* 72 | env: 73 | TWINE_USERNAME: "__token__" 74 | TWINE_PASSWORD: "${{ secrets.PYPI_TOKEN }}" 75 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # Poetry lockfiles 132 | poetry.lock 133 | 134 | # VSCode 135 | .vscode/ 136 | 137 | # JetBrains 138 | .idea/ 139 | 140 | # Direnv 141 | /.envrc 142 | /.direnv 143 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: local 5 | hooks: 6 | - id: ruff-check 7 | name: ruff check --fix 8 | entry: uv run ruff check --fix 9 | language: system 10 | types: [python] 11 | - id: ruff-format 12 | name: ruff format 13 | entry: uv run ruff format 14 | language: system 15 | types: [python] 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Axiom, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [!WARNING] 2 | > Version [v0.9.0](https://github.com/axiomhq/axiom-py/releases/tag/v0.9.0) removes the aggregation operation enum, see [#158](https://github.com/axiomhq/axiom-py/pull/158). 3 | 4 | # axiom-py [![CI][ci_badge]][ci] [![PyPI version][pypi_badge]][pypi] [![Python version][version_badge]][pypi] 5 | 6 | ```py 7 | import axiom_py 8 | 9 | client = axiom_py.Client() 10 | 11 | client.ingest_events(dataset="DATASET_NAME", events=[{"foo": "bar"}, {"bar": "baz"}]) 12 | client.query(r"['DATASET_NAME'] | where foo == 'bar' | limit 100") 13 | ``` 14 | 15 | ## Install 16 | 17 | ```sh 18 | pip install axiom-py 19 | ``` 20 | 21 | ## Documentation 22 | 23 | Read documentation on [axiom.co/docs/guides/python](https://axiom.co/docs/guides/python). 24 | 25 | ## License 26 | 27 | [MIT](./LICENSE) 28 | 29 | 30 | 31 | [ci]: https://github.com/axiomhq/axiom-py/actions/workflows/ci.yml 32 | [ci_badge]: https://img.shields.io/github/actions/workflow/status/axiomhq/axiom-py/ci.yml?branch=main&ghcache=unused 33 | [pypi]: https://pypi.org/project/axiom-py/ 34 | [pypi_badge]: https://img.shields.io/pypi/v/axiom-py.svg 35 | [version_badge]: https://img.shields.io/pypi/pyversions/axiom-py.svg 36 | -------------------------------------------------------------------------------- /examples/client_example.py: -------------------------------------------------------------------------------- 1 | from axiom_py import Client 2 | 3 | 4 | def main(): 5 | client = Client() 6 | dataset_name = "my-dataset" 7 | 8 | # Get current user 9 | print(client.users.current()) 10 | 11 | # List datasets 12 | res = client.datasets.get_list() 13 | for dataset in res: 14 | print(dataset.name) 15 | 16 | # Create a dataset 17 | client.datasets.create(dataset_name, "A description.") 18 | 19 | # Ingest events 20 | client.ingest_events(dataset_name, [{"foo": "bar"}]) 21 | 22 | # Query events 23 | res = client.query(f"['{dataset_name}'] | where status == 500") 24 | for match in res.matches: 25 | print(match.data) 26 | 27 | # Delete the dataset 28 | client.datasets.delete(dataset_name) 29 | 30 | 31 | if __name__ == "__main__": 32 | main() 33 | -------------------------------------------------------------------------------- /examples/logger_example.py: -------------------------------------------------------------------------------- 1 | import axiom_py 2 | from axiom_py.logging import AxiomHandler 3 | import logging 4 | 5 | 6 | def main(): 7 | # Add Axiom handler to root logger 8 | client = axiom_py.Client() 9 | handler = AxiomHandler(client, "my-dataset") 10 | logging.getLogger().addHandler(handler) 11 | 12 | # Get logger and log something 13 | logger = logging.getLogger(__name__) 14 | logger.setLevel(logging.INFO) 15 | logger.info("Hello world") 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /examples/structlog_example.py: -------------------------------------------------------------------------------- 1 | from axiom_py import Client 2 | from axiom_py.structlog import AxiomProcessor 3 | import structlog 4 | 5 | 6 | def main(): 7 | client = Client() 8 | 9 | structlog.configure( 10 | processors=[ 11 | structlog.contextvars.merge_contextvars, 12 | structlog.processors.add_log_level, 13 | structlog.processors.StackInfoRenderer(), 14 | structlog.dev.set_exc_info, 15 | structlog.processors.TimeStamper(fmt="iso", key="_time"), 16 | AxiomProcessor(client, "my-dataset"), 17 | structlog.dev.ConsoleRenderer(), 18 | ] 19 | ) 20 | 21 | log = structlog.get_logger() 22 | log.info("hello", who="world") 23 | 24 | 25 | if __name__ == "__main__": 26 | main() 27 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "axiom-py" 3 | version = "0.9.0" 4 | description = "Official bindings for the Axiom API" 5 | readme = "README.md" 6 | requires-python = ">=3.8" 7 | dependencies = [ 8 | "iso8601>=1.0.2", 9 | "requests>=2.32.3", 10 | "requests-toolbelt>=1.0.0", 11 | "ujson>=5.10.0", 12 | "dacite>=1.8.1", 13 | "pyhumps>=3.8.0", 14 | "ndjson>=0.3.1", 15 | ] 16 | license = { file = "LICENSE" } 17 | classifiers = [ 18 | "Development Status :: 4 - Beta", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: MIT License", 21 | "Topic :: System :: Logging", 22 | 23 | "Programming Language :: Python :: 3.8", 24 | "Programming Language :: Python :: 3.9", 25 | "Programming Language :: Python :: 3.10", 26 | "Programming Language :: Python :: 3.11", 27 | "Programming Language :: Python :: 3.12", 28 | ] 29 | 30 | [project.urls] 31 | Homepage = "https://axiom.co" 32 | Repository = "https://github.com/axiomhq/axiom-py.git" 33 | Issues = "https://github.com/axiomhq/axiom-py/issues" 34 | 35 | [build-system] 36 | requires = ["hatchling"] 37 | build-backend = "hatchling.build" 38 | 39 | [tool.ruff] 40 | line-length = 79 # PEP 8 41 | 42 | [tool.uv] 43 | dev-dependencies = [ 44 | "ruff>=0.6.4", 45 | "pytest>=8.3.2", 46 | "responses>=0.25.3", 47 | "rfc3339>=6.2", 48 | "iso8601>=1.0.2", 49 | "pre-commit>=3.5.0", 50 | ] 51 | -------------------------------------------------------------------------------- /src/axiom_py/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Axiom Python Client 3 | """ 4 | 5 | from .client import ( 6 | AxiomError, 7 | IngestFailure, 8 | IngestStatus, 9 | IngestOptions, 10 | AplResultFormat, 11 | ContentType, 12 | ContentEncoding, 13 | WrongQueryKindException, 14 | AplOptions, 15 | Client, 16 | ) 17 | from .datasets import ( 18 | Dataset, 19 | TrimRequest, 20 | DatasetsClient, 21 | ) 22 | from .annotations import ( 23 | Annotation, 24 | AnnotationCreateRequest, 25 | AnnotationUpdateRequest, 26 | AnnotationsClient, 27 | ) 28 | 29 | _all_ = [ 30 | AxiomError, 31 | IngestFailure, 32 | IngestStatus, 33 | IngestOptions, 34 | AplResultFormat, 35 | ContentType, 36 | ContentEncoding, 37 | WrongQueryKindException, 38 | AplOptions, 39 | Client, 40 | Dataset, 41 | TrimRequest, 42 | DatasetsClient, 43 | Annotation, 44 | AnnotationCreateRequest, 45 | AnnotationUpdateRequest, 46 | AnnotationsClient, 47 | ] 48 | -------------------------------------------------------------------------------- /src/axiom_py/annotations.py: -------------------------------------------------------------------------------- 1 | """This package provides annotation models and methods as well as an AnnotationsClient""" 2 | 3 | import ujson 4 | from requests import Session 5 | from typing import List, Optional 6 | from dataclasses import dataclass, asdict, field 7 | from datetime import datetime 8 | from urllib.parse import urlencode 9 | from .util import from_dict 10 | 11 | 12 | @dataclass 13 | class Annotation: 14 | """Represents an Axiom annotation""" 15 | 16 | id: str = field(init=False) 17 | datasets: List[str] 18 | time: datetime 19 | endTime: Optional[datetime] 20 | title: Optional[str] 21 | description: Optional[str] 22 | url: Optional[str] 23 | type: str 24 | 25 | 26 | @dataclass 27 | class AnnotationCreateRequest: 28 | """Request used to create an annotation""" 29 | 30 | datasets: List[str] 31 | time: Optional[datetime] 32 | endTime: Optional[datetime] 33 | title: Optional[str] 34 | description: Optional[str] 35 | url: Optional[str] 36 | type: str 37 | 38 | 39 | @dataclass 40 | class AnnotationUpdateRequest: 41 | """Request used to update an annotation""" 42 | 43 | datasets: Optional[List[str]] 44 | time: Optional[datetime] 45 | endTime: Optional[datetime] 46 | title: Optional[str] 47 | description: Optional[str] 48 | url: Optional[str] 49 | type: Optional[str] 50 | 51 | 52 | class AnnotationsClient: # pylint: disable=R0903 53 | """AnnotationsClient has methods to manipulate annotations.""" 54 | 55 | session: Session 56 | 57 | def __init__(self, session: Session): 58 | self.session = session 59 | 60 | def get(self, id: str) -> Annotation: 61 | """ 62 | Get a annotation by id. 63 | 64 | See https://axiom.co/docs/restapi/endpoints/getAnnotation 65 | """ 66 | path = "/v2/annotations/%s" % id 67 | res = self.session.get(path) 68 | decoded_response = res.json() 69 | return from_dict(Annotation, decoded_response) 70 | 71 | def create(self, req: AnnotationCreateRequest) -> Annotation: 72 | """ 73 | Create an annotation with the given properties. 74 | 75 | See https://axiom.co/docs/restapi/endpoints/createAnnotation 76 | """ 77 | path = "/v2/annotations" 78 | res = self.session.post(path, data=ujson.dumps(asdict(req))) 79 | annotation = from_dict(Annotation, res.json()) 80 | return annotation 81 | 82 | def list( 83 | self, 84 | datasets: List[str] = [], 85 | start: Optional[datetime] = None, 86 | end: Optional[datetime] = None, 87 | ) -> List[Annotation]: 88 | """ 89 | List all annotations. 90 | 91 | See https://axiom.co/docs/restapi/endpoints/getAnnotations 92 | """ 93 | query_params = {} 94 | if len(datasets) > 0: 95 | query_params["datasets"] = ",".join(datasets) 96 | if start is not None: 97 | query_params["start"] = start.isoformat() 98 | if end is not None: 99 | query_params["end"] = end.isoformat() 100 | path = f"/v2/annotations?{urlencode(query_params, doseq=True)}" 101 | 102 | res = self.session.get(path) 103 | 104 | annotations = [] 105 | for record in res.json(): 106 | ds = from_dict(Annotation, record) 107 | annotations.append(ds) 108 | 109 | return annotations 110 | 111 | def update(self, id: str, req: AnnotationUpdateRequest) -> Annotation: 112 | """ 113 | Update an annotation with the given properties. 114 | 115 | See https://axiom.co/docs/restapi/endpoints/updateAnnotation 116 | """ 117 | path = "/v2/annotations/%s" % id 118 | res = self.session.put(path, data=ujson.dumps(asdict(req))) 119 | annotation = from_dict(Annotation, res.json()) 120 | return annotation 121 | 122 | def delete(self, id: str): 123 | """ 124 | Deletes an annotation with the given id. 125 | 126 | See https://axiom.co/docs/restapi/endpoints/deleteAnnotation 127 | """ 128 | path = "/v2/annotations/%s" % id 129 | self.session.delete(path) 130 | -------------------------------------------------------------------------------- /src/axiom_py/client.py: -------------------------------------------------------------------------------- 1 | """Client provides an easy-to use client library to connect to Axiom.""" 2 | 3 | import ndjson 4 | import atexit 5 | import gzip 6 | import ujson 7 | import os 8 | 9 | from enum import Enum 10 | from humps import decamelize 11 | from typing import Optional, List, Dict, Callable 12 | from dataclasses import dataclass, field, asdict 13 | from datetime import datetime 14 | from requests_toolbelt.sessions import BaseUrlSession 15 | from requests.adapters import HTTPAdapter, Retry 16 | from .datasets import DatasetsClient 17 | from .query import ( 18 | QueryLegacy, 19 | QueryResult, 20 | QueryOptions, 21 | QueryLegacyResult, 22 | QueryKind, 23 | ) 24 | from .annotations import AnnotationsClient 25 | from .users import UsersClient 26 | from .version import __version__ 27 | from .util import from_dict, handle_json_serialization, is_personal_token 28 | from .tokens import TokensClient 29 | 30 | 31 | AXIOM_URL = "https://api.axiom.co" 32 | 33 | 34 | @dataclass 35 | class IngestFailure: 36 | """The ingestion failure of a single event""" 37 | 38 | timestamp: datetime 39 | error: str 40 | 41 | 42 | @dataclass 43 | class IngestStatus: 44 | """The status after an event ingestion operation""" 45 | 46 | ingested: int 47 | failed: int 48 | failures: List[IngestFailure] 49 | processed_bytes: int 50 | blocks_created: int 51 | wal_length: int 52 | 53 | 54 | @dataclass 55 | class IngestOptions: 56 | """IngestOptions specifies the optional parameters for the Ingest and 57 | IngestEvents method of the Datasets service.""" 58 | 59 | # timestamp field defines a custom field to extract the ingestion timestamp 60 | # from. Defaults to `_time`. 61 | timestamp_field: str = field(default="_time") 62 | # timestamp format defines a custom format for the TimestampField. 63 | # The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified 64 | # in https://pkg.go.dev/time/?tab=doc#Parse. 65 | timestamp_format: Optional[str] = field(default=None) 66 | # CSV delimiter is the delimiter that separates CSV fields. Only valid when 67 | # the content to be ingested is CSV formatted. 68 | CSV_delimiter: Optional[str] = field(default=None) 69 | 70 | 71 | class AplResultFormat(Enum): 72 | """The result format of an APL query.""" 73 | 74 | Legacy = "legacy" 75 | Tabular = "tabular" 76 | 77 | 78 | class ContentType(Enum): 79 | """ContentType describes the content type of the data to ingest.""" 80 | 81 | JSON = "application/json" 82 | NDJSON = "application/x-ndjson" 83 | CSV = "text/csv" 84 | 85 | 86 | class ContentEncoding(Enum): 87 | """ContentEncoding describes the content encoding of the data to ingest.""" 88 | 89 | IDENTITY = "1" 90 | GZIP = "gzip" 91 | 92 | 93 | class WrongQueryKindException(Exception): 94 | pass 95 | 96 | 97 | @dataclass 98 | class AplOptions: 99 | """AplOptions specifies the optional parameters for the apl query method.""" 100 | 101 | # Start time for the interval to query. 102 | start_time: Optional[datetime] = field(default=None) 103 | # End time for the interval to query. 104 | end_time: Optional[datetime] = field(default=None) 105 | # The result format. 106 | format: AplResultFormat = field(default=AplResultFormat.Legacy) 107 | # Cursor is the query cursor. It should be set to the Cursor returned with 108 | # a previous query result if it was partial. 109 | cursor: Optional[str] = field(default=None) 110 | # IncludeCursor will return the Cursor as part of the query result, if set 111 | # to true. 112 | includeCursor: bool = field(default=False) 113 | # The query limit. 114 | limit: Optional[int] = field(default=None) 115 | 116 | 117 | class AxiomError(Exception): 118 | """This exception is raised on request errors.""" 119 | 120 | status: int 121 | message: str 122 | 123 | @dataclass 124 | class Response: 125 | message: str 126 | error: Optional[str] 127 | 128 | def __init__(self, status: int, res: Response): 129 | message = res.error if res.error is not None else res.message 130 | super().__init__(f"API error {status}: {message}") 131 | 132 | self.status = status 133 | self.message = message 134 | 135 | 136 | def raise_response_error(res): 137 | if res.status_code >= 400: 138 | try: 139 | error_res = from_dict(AxiomError.Response, res.json()) 140 | except Exception: 141 | # Response is not in the Axiom JSON format, create generic error 142 | # message 143 | error_res = AxiomError.Response(message=res.reason, error=None) 144 | 145 | raise AxiomError(res.status_code, error_res) 146 | 147 | 148 | class Client: # pylint: disable=R0903 149 | """The client class allows you to connect to Axiom.""" 150 | 151 | datasets: DatasetsClient 152 | users: UsersClient 153 | annotations: AnnotationsClient 154 | tokens: TokensClient 155 | is_closed: bool = False # track if the client has been closed (for tests) 156 | before_shutdown_funcs: List[Callable] = [] 157 | 158 | def __init__( 159 | self, 160 | token: Optional[str] = None, 161 | org_id: Optional[str] = None, 162 | url_base: Optional[str] = None, 163 | ): 164 | # fallback to env variables if token, org_id or url are not provided 165 | if token is None: 166 | token = os.getenv("AXIOM_TOKEN") 167 | if org_id is None: 168 | org_id = os.getenv("AXIOM_ORG_ID") 169 | if url_base is None: 170 | url_base = AXIOM_URL 171 | 172 | # set exponential retries 173 | retries = Retry( 174 | total=3, backoff_factor=2, status_forcelist=[500, 502, 503, 504] 175 | ) 176 | 177 | self.session = BaseUrlSession(url_base.rstrip("/")) 178 | self.session.mount("http://", HTTPAdapter(max_retries=retries)) 179 | self.session.mount("https://", HTTPAdapter(max_retries=retries)) 180 | # hook on responses, raise error when response is not successfull 181 | self.session.hooks = { 182 | "response": lambda r, *args, **kwargs: raise_response_error(r) 183 | } 184 | self.session.headers.update( 185 | { 186 | "Authorization": "Bearer %s" % token, 187 | # set a default Content-Type header, can be overriden by requests. 188 | "Content-Type": "application/json", 189 | "User-Agent": f"axiom-py/{__version__}", 190 | } 191 | ) 192 | 193 | # if there is an organization id passed, 194 | # set it in the header 195 | if org_id: 196 | self.session.headers.update({"X-Axiom-Org-Id": org_id}) 197 | 198 | self.datasets = DatasetsClient(self.session) 199 | self.users = UsersClient(self.session, is_personal_token(token)) 200 | self.annotations = AnnotationsClient(self.session) 201 | self.tokens = TokensClient(self.session) 202 | 203 | # wrap shutdown hook in a lambda passing in self as a ref 204 | atexit.register(self.shutdown_hook) 205 | 206 | def before_shutdown(self, func: Callable): 207 | self.before_shutdown_funcs.append(func) 208 | 209 | def shutdown_hook(self): 210 | for func in self.before_shutdown_funcs: 211 | func() 212 | self.session.close() 213 | self.is_closed = True 214 | 215 | def ingest( 216 | self, 217 | dataset: str, 218 | payload: bytes, 219 | contentType: ContentType, 220 | enc: ContentEncoding, 221 | opts: Optional[IngestOptions] = None, 222 | ) -> IngestStatus: 223 | """ 224 | Ingest the payload into the named dataset and returns the status. 225 | 226 | See https://axiom.co/docs/restapi/endpoints/ingestIntoDataset 227 | """ 228 | path = "/v1/datasets/%s/ingest" % dataset 229 | 230 | # set headers 231 | headers = { 232 | "Content-Type": contentType.value, 233 | "Content-Encoding": enc.value, 234 | } 235 | # prepare query params 236 | params = self._prepare_ingest_options(opts) 237 | 238 | # override the default header and set the value from the passed parameter 239 | res = self.session.post( 240 | path, data=payload, headers=headers, params=params 241 | ) 242 | status_snake = decamelize(res.json()) 243 | return from_dict(IngestStatus, status_snake) 244 | 245 | def ingest_events( 246 | self, 247 | dataset: str, 248 | events: List[dict], 249 | opts: Optional[IngestOptions] = None, 250 | ) -> IngestStatus: 251 | """ 252 | Ingest the events into the named dataset and returns the status. 253 | 254 | See https://axiom.co/docs/restapi/endpoints/ingestIntoDataset 255 | """ 256 | # encode request payload to NDJSON 257 | content = ndjson.dumps( 258 | events, default=handle_json_serialization 259 | ).encode("UTF-8") 260 | gzipped = gzip.compress(content) 261 | 262 | return self.ingest( 263 | dataset, gzipped, ContentType.NDJSON, ContentEncoding.GZIP, opts 264 | ) 265 | 266 | def query_legacy( 267 | self, id: str, query: QueryLegacy, opts: QueryOptions 268 | ) -> QueryLegacyResult: 269 | """ 270 | Executes the given structured query on the dataset identified by its id. 271 | 272 | See https://axiom.co/docs/restapi/endpoints/queryDataset 273 | """ 274 | if not opts.saveAsKind or (opts.saveAsKind == QueryKind.APL): 275 | raise WrongQueryKindException( 276 | "invalid query kind %s: must be %s or %s" 277 | % (opts.saveAsKind, QueryKind.ANALYTICS, QueryKind.STREAM) 278 | ) 279 | 280 | path = "/v1/datasets/%s/query" % id 281 | payload = ujson.dumps(asdict(query), default=handle_json_serialization) 282 | params = self._prepare_query_options(opts) 283 | res = self.session.post(path, data=payload, params=params) 284 | result = from_dict(QueryLegacyResult, res.json()) 285 | query_id = res.headers.get("X-Axiom-History-Query-Id") 286 | result.savedQueryID = query_id 287 | return result 288 | 289 | def apl_query( 290 | self, apl: str, opts: Optional[AplOptions] = None 291 | ) -> QueryResult: 292 | """ 293 | Executes the given apl query on the dataset identified by its id. 294 | 295 | See https://axiom.co/docs/restapi/endpoints/queryApl 296 | """ 297 | return self.query(apl, opts) 298 | 299 | def query( 300 | self, apl: str, opts: Optional[AplOptions] = None 301 | ) -> QueryResult: 302 | """ 303 | Executes the given apl query on the dataset identified by its id. 304 | 305 | See https://axiom.co/docs/restapi/endpoints/queryApl 306 | """ 307 | path = "/v1/datasets/_apl" 308 | payload = ujson.dumps( 309 | self._prepare_apl_payload(apl, opts), 310 | default=handle_json_serialization, 311 | ) 312 | params = self._prepare_apl_options(opts) 313 | res = self.session.post(path, data=payload, params=params) 314 | result = from_dict(QueryResult, res.json()) 315 | query_id = res.headers.get("X-Axiom-History-Query-Id") 316 | result.savedQueryID = query_id 317 | 318 | return result 319 | 320 | def _prepare_query_options(self, opts: QueryOptions) -> Dict[str, object]: 321 | """returns the query options as a Dict, handles any renaming for key fields.""" 322 | if opts is None: 323 | return {} 324 | params = {} 325 | if opts.streamingDuration: 326 | params["streaming-duration"] = ( 327 | opts.streamingDuration.seconds.__str__() + "s" 328 | ) 329 | if opts.saveAsKind: 330 | params["saveAsKind"] = opts.saveAsKind.value 331 | 332 | params["nocache"] = opts.nocache.__str__() 333 | 334 | return params 335 | 336 | def _prepare_ingest_options( 337 | self, opts: Optional[IngestOptions] 338 | ) -> Dict[str, object]: 339 | """the query params for ingest api are expected in a format 340 | that couldn't be defined as a variable name because it has a dash. 341 | As a work around, we create the params dict manually.""" 342 | 343 | if opts is None: 344 | return {} 345 | 346 | params = {} 347 | if opts.timestamp_field: 348 | params["timestamp-field"] = opts.timestamp_field 349 | if opts.timestamp_format: 350 | params["timestamp-format"] = opts.timestamp_format 351 | if opts.CSV_delimiter: 352 | params["csv-delimiter"] = opts.CSV_delimiter 353 | 354 | return params 355 | 356 | def _prepare_apl_options( 357 | self, opts: Optional[AplOptions] 358 | ) -> Dict[str, object]: 359 | """Prepare the apl query options for the request.""" 360 | params: Dict[str, object] = {"format": AplResultFormat.Legacy.value} 361 | 362 | if opts is not None: 363 | if opts.format: 364 | params["format"] = opts.format.value 365 | if opts.limit is not None: 366 | params["request"] = {"limit": opts.limit} 367 | 368 | return params 369 | 370 | def _prepare_apl_payload( 371 | self, apl: str, opts: Optional[AplOptions] 372 | ) -> Dict[str, object]: 373 | """Prepare the apl query options for the request.""" 374 | params = {} 375 | params["apl"] = apl 376 | 377 | if opts is not None: 378 | if opts.start_time is not None: 379 | params["startTime"] = opts.start_time 380 | if opts.end_time is not None: 381 | params["endTime"] = opts.end_time 382 | if opts.cursor is not None: 383 | params["cursor"] = opts.cursor 384 | if opts.includeCursor: 385 | params["includeCursor"] = opts.includeCursor 386 | 387 | return params 388 | -------------------------------------------------------------------------------- /src/axiom_py/datasets.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package provides dataset models and methods as well as a DatasetClient 3 | """ 4 | 5 | import ujson 6 | from requests import Session 7 | from typing import List 8 | from dataclasses import dataclass, asdict, field 9 | from datetime import timedelta 10 | from .util import from_dict 11 | 12 | 13 | @dataclass 14 | class Dataset: 15 | """Represents an Axiom dataset""" 16 | 17 | id: str = field(init=False) 18 | name: str 19 | description: str 20 | who: str 21 | created: str 22 | 23 | 24 | @dataclass 25 | class DatasetCreateRequest: 26 | """Request used to create a dataset""" 27 | 28 | name: str 29 | description: str 30 | 31 | 32 | @dataclass 33 | class DatasetUpdateRequest: 34 | """Request used to update a dataset""" 35 | 36 | description: str 37 | 38 | 39 | @dataclass 40 | class TrimRequest: 41 | """ 42 | MaxDuration marks the oldest timestamp an event can have before getting 43 | deleted. 44 | """ 45 | 46 | maxDuration: str 47 | 48 | 49 | class DatasetsClient: # pylint: disable=R0903 50 | """DatasetsClient has methods to manipulate datasets.""" 51 | 52 | session: Session 53 | 54 | def __init__(self, session: Session): 55 | self.session = session 56 | 57 | def get(self, id: str) -> Dataset: 58 | """ 59 | Get a dataset by id. 60 | 61 | See https://axiom.co/docs/restapi/endpoints/getDataset 62 | """ 63 | path = "/v1/datasets/%s" % id 64 | res = self.session.get(path) 65 | decoded_response = res.json() 66 | return from_dict(Dataset, decoded_response) 67 | 68 | def create(self, name: str, description: str = "") -> Dataset: 69 | """ 70 | Create a dataset with the given properties. 71 | 72 | See https://axiom.co/docs/restapi/endpoints/createDataset 73 | """ 74 | path = "/v1/datasets" 75 | res = self.session.post( 76 | path, 77 | data=ujson.dumps( 78 | asdict( 79 | DatasetCreateRequest( 80 | name=name, 81 | description=description, 82 | ) 83 | ) 84 | ), 85 | ) 86 | ds = from_dict(Dataset, res.json()) 87 | return ds 88 | 89 | def get_list(self) -> List[Dataset]: 90 | """ 91 | List all available datasets. 92 | 93 | See https://axiom.co/docs/restapi/endpoints/getDatasets 94 | """ 95 | path = "/v1/datasets" 96 | res = self.session.get(path) 97 | 98 | datasets = [] 99 | for record in res.json(): 100 | ds = from_dict(Dataset, record) 101 | datasets.append(ds) 102 | 103 | return datasets 104 | 105 | def update(self, id: str, new_description: str) -> Dataset: 106 | """ 107 | Update a dataset with the given properties. 108 | 109 | See https://axiom.co/docs/restapi/endpoints/updateDataset 110 | """ 111 | path = "/v1/datasets/%s" % id 112 | res = self.session.put( 113 | path, 114 | data=ujson.dumps( 115 | asdict( 116 | DatasetUpdateRequest( 117 | description=new_description, 118 | ) 119 | ) 120 | ), 121 | ) 122 | ds = from_dict(Dataset, res.json()) 123 | return ds 124 | 125 | def delete(self, id: str): 126 | """ 127 | Deletes a dataset with the given id. 128 | 129 | See https://axiom.co/docs/restapi/endpoints/deleteDataset 130 | """ 131 | path = "/v1/datasets/%s" % id 132 | self.session.delete(path) 133 | 134 | def trim(self, id: str, maxDuration: timedelta): 135 | """ 136 | Trim the dataset identified by its id to a given length. The max 137 | duration given will mark the oldest timestamp an event can have. 138 | Older ones will be deleted from the dataset. 139 | 140 | See https://axiom.co/docs/restapi/endpoints/trimDataset 141 | """ 142 | path = "/v1/datasets/%s/trim" % id 143 | # prepare request payload and format masDuration to append time unit at 144 | # the end, e.g `1s` 145 | req = TrimRequest(f"{maxDuration.seconds}s") 146 | self.session.post(path, data=ujson.dumps(asdict(req))) 147 | -------------------------------------------------------------------------------- /src/axiom_py/logging.py: -------------------------------------------------------------------------------- 1 | """Logging contains the AxiomHandler and related methods to do with logging.""" 2 | 3 | from threading import Timer 4 | from logging import Handler, NOTSET, getLogger, WARNING 5 | import time 6 | 7 | from .client import Client 8 | 9 | 10 | class AxiomHandler(Handler): 11 | """A logging handler that sends logs to Axiom.""" 12 | 13 | client: Client 14 | dataset: str 15 | buffer: list 16 | interval: int 17 | last_flush: float 18 | timer: Timer 19 | 20 | def __init__(self, client: Client, dataset: str, level=NOTSET, interval=1): 21 | super().__init__() 22 | # Set urllib3 logging level to warning, check: 23 | # https://github.com/axiomhq/axiom-py/issues/23 24 | # This is a temp solution that would stop requests library from 25 | # flooding the logs with debug messages 26 | getLogger("urllib3").setLevel(WARNING) 27 | self.client = client 28 | self.dataset = dataset 29 | self.buffer = [] 30 | self.interval = interval 31 | self.last_flush = time.monotonic() 32 | 33 | # We use a threading.Timer to make sure we flush every second, even 34 | # if no more logs are emitted. 35 | self.timer = Timer(self.interval, self.flush) 36 | 37 | # Make sure we flush before the client shuts down 38 | def before_shutdown(): 39 | self.flush() 40 | self.timer.cancel() 41 | 42 | client.before_shutdown(before_shutdown) 43 | 44 | def emit(self, record): 45 | """Emit sends a log to Axiom.""" 46 | self.buffer.append(record.__dict__) 47 | if ( 48 | len(self.buffer) >= 1000 49 | or time.monotonic() - self.last_flush > self.interval 50 | ): 51 | self.flush() 52 | 53 | # Restart timer 54 | self.timer.cancel() 55 | self.timer = Timer(self.interval, self.flush) 56 | self.timer.start() 57 | 58 | def flush(self): 59 | """Flush sends all logs in the buffer to Axiom.""" 60 | 61 | self.last_flush = time.monotonic() 62 | 63 | if len(self.buffer) == 0: 64 | return 65 | 66 | local_buffer, self.buffer = self.buffer, [] 67 | self.client.ingest_events(self.dataset, local_buffer) 68 | -------------------------------------------------------------------------------- /src/axiom_py/query/__init__.py: -------------------------------------------------------------------------------- 1 | from .query import QueryKind, Order, VirtualField, Projection, QueryLegacy 2 | from .options import QueryOptions 3 | from .filter import FilterOperation, BaseFilter, Filter 4 | from .aggregation import Aggregation 5 | from .result import ( 6 | MessagePriority, 7 | Message, 8 | QueryStatus, 9 | Entry, 10 | EntryGroupAgg, 11 | EntryGroup, 12 | Interval, 13 | Timeseries, 14 | QueryLegacyResult, 15 | QueryResult, 16 | ) 17 | 18 | __all__ = ( 19 | QueryKind, 20 | Order, 21 | VirtualField, 22 | Projection, 23 | QueryLegacy, 24 | QueryOptions, 25 | FilterOperation, 26 | BaseFilter, 27 | Filter, 28 | Aggregation, 29 | MessagePriority, 30 | Message, 31 | QueryStatus, 32 | Entry, 33 | EntryGroupAgg, 34 | EntryGroup, 35 | Interval, 36 | Timeseries, 37 | QueryLegacyResult, 38 | QueryResult, 39 | ) 40 | -------------------------------------------------------------------------------- /src/axiom_py/query/aggregation.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field as dataclass_field 2 | from typing import Any 3 | 4 | 5 | @dataclass 6 | class Aggregation: 7 | """Aggregation performed as part of a query.""" 8 | 9 | op: str 10 | field: str = dataclass_field(default="") 11 | alias: str = dataclass_field(default="") 12 | argument: Any = dataclass_field(default="") 13 | -------------------------------------------------------------------------------- /src/axiom_py/query/filter.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List 3 | from dataclasses import dataclass, field as dataclass_field 4 | 5 | 6 | class FilterOperation(Enum): 7 | """A FilterOperatuib can be applied on queries to filter based on different conditions.""" 8 | 9 | EMPTY = "" 10 | AND = "and" 11 | OR = "or" 12 | NOT = "not" 13 | 14 | # Works for strings and numbers. 15 | EQUAL = "==" 16 | NOT_EQUAL = "!=" 17 | EXISTS = "exists" 18 | NOT_EXISTS = "not-exists" 19 | 20 | # Only works for numbers. 21 | GREATER_THAN = ">" 22 | GREATER_THAN_EQUAL = ">=" 23 | LESS_THAN = "<" 24 | LESS_THAN_EQUAL = "<=" 25 | 26 | # Only works for strings. 27 | STARTS_WITH = "starts-with" 28 | NOT_STARTS_WITH = "not-starts-with" 29 | ENDS_WITH = "ends-with" 30 | NOT_ENDS_WITH = "not-ends-with" 31 | REGEXP = "regexp" 32 | NOT_REGEXP = "not-regexp" 33 | 34 | # Works for strings and arrays. 35 | CONTAINS = "contains" 36 | NOT_CONTAINS = "not-contains" 37 | 38 | 39 | @dataclass 40 | class BaseFilter: 41 | op: FilterOperation 42 | field: str 43 | value: any 44 | caseSensitive: bool = dataclass_field(default=False) 45 | 46 | 47 | @dataclass 48 | class Filter(BaseFilter): 49 | children: List[BaseFilter] = dataclass_field(default=lambda: []) 50 | -------------------------------------------------------------------------------- /src/axiom_py/query/options.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from dataclasses import dataclass, field 3 | from .query import QueryKind 4 | 5 | 6 | @dataclass 7 | class QueryOptions: 8 | # StreamingDuration of a query. 9 | streamingDuration: timedelta = field(default=None) 10 | # NoCache omits the query cache. 11 | nocache: bool = field(default=False) 12 | # SaveKind saves the query on the server with the given query kind. The ID 13 | # of the saved query is returned with the query result as part of the 14 | # response. `query.APL` is not a valid kind for this field. 15 | saveAsKind: QueryKind = field(default=QueryKind.ANALYTICS) 16 | -------------------------------------------------------------------------------- /src/axiom_py/query/query.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List, Optional 3 | from dataclasses import dataclass, field 4 | from datetime import datetime 5 | from .aggregation import Aggregation 6 | from .filter import Filter 7 | 8 | 9 | class QueryKind(Enum): 10 | """represents kind of query""" 11 | 12 | ANALYTICS = "analytics" 13 | STREAM = "stream" 14 | APL = "apl" 15 | 16 | 17 | @dataclass 18 | class Order: 19 | """Order specifies the order a queries result will be in.""" 20 | 21 | # Field to order on. Must be present in `GroupBy` or used by an 22 | # aggregation. 23 | field: str 24 | # Desc specifies if the field is ordered ascending or descending. 25 | desc: bool 26 | 27 | 28 | @dataclass 29 | class VirtualField: 30 | """ 31 | A VirtualField is not part of a dataset and its value is derived from an 32 | expression. Aggregations, filters and orders can reference this field like 33 | any other field. 34 | """ 35 | 36 | # Alias the virtual field is referenced by. 37 | alias: str 38 | # Expression which specifies the virtual fields value. 39 | expr: str 40 | 41 | 42 | @dataclass 43 | class Projection: 44 | """A Projection is a field that is projected to the query result.""" 45 | 46 | # Field to project to the query result. 47 | field: str 48 | # Alias to reference the projected field by. Optional. 49 | alias: str 50 | 51 | 52 | @dataclass 53 | class QueryLegacy: 54 | """represents a query that gets executed on a dataset.""" 55 | 56 | # start time of the query. Required. 57 | startTime: datetime 58 | # end time of the query. Required. 59 | endTime: datetime 60 | # resolution of the queries graph. Valid values are the queries time 61 | # range / 100 at maximum and / 1000 at minimum. Use zero value for 62 | # serve-side auto-detection. 63 | resolution: str = field(default="auto") 64 | # Aggregations performed as part of the query. 65 | aggregations: Optional[List[Aggregation]] = field( 66 | default_factory=lambda: [] 67 | ) 68 | # GroupBy is a list of field names to group the query result by. Only valid 69 | # when at least one aggregation is specified. 70 | groupBy: Optional[List[str]] = field(default_factory=lambda: []) 71 | # Filter applied on the queried results. 72 | filter: Optional[Filter] = field(default=None) 73 | # Order is a list of order rules that specify the order of the query 74 | # result. 75 | order: Optional[List[Order]] = field(default_factory=lambda: []) 76 | # Limit the amount of results returned from the query. 77 | limit: int = field(default=10) 78 | # VirtualFields is a list of virtual fields that can be referenced by 79 | # aggregations, filters and orders. 80 | virtualFields: Optional[List[VirtualField]] = field( 81 | default_factory=lambda: [] 82 | ) 83 | # Projections is a list of projections that can be referenced by 84 | # aggregations, filters and orders. Leaving it empty projects all available 85 | # fields to the query result. 86 | project: Optional[List[Projection]] = field(default_factory=lambda: []) 87 | # Cursor is the query cursor. It should be set to the Cursor returned with 88 | # a previous query result if it was partial. 89 | cursor: str = field(default=None) 90 | # IncludeCursor will return the Cursor as part of the query result, if set 91 | # to true. 92 | includeCursor: bool = field(default=False) 93 | # ContinuationToken is used to get more results of a previous query. It is 94 | # not valid for starred queries or otherwise stored queries. 95 | continuationToken: str = field(default=None) 96 | -------------------------------------------------------------------------------- /src/axiom_py/query/result.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from datetime import datetime 3 | from typing import List, Dict, Optional, Union 4 | from enum import Enum 5 | 6 | from .query import QueryLegacy 7 | 8 | 9 | class MessagePriority(Enum): 10 | """Message priorities represents the priority of a message associated with a query.""" 11 | 12 | TRACE = "trace" 13 | DEBUG = "debug" 14 | INFO = "info" 15 | WARN = "warn" 16 | ERROR = "error" 17 | FATAL = "fatal" 18 | 19 | 20 | @dataclass 21 | class Message: 22 | """a message associated with a query result.""" 23 | 24 | # priority of the message. 25 | priority: MessagePriority 26 | # describes how often a message of this type was raised by the query. 27 | count: int 28 | # code of the message. 29 | code: str 30 | # a human readable text representation of the message. 31 | msg: str 32 | 33 | 34 | @dataclass 35 | class QueryStatus: 36 | """the status of a query result.""" 37 | 38 | # the duration it took the query to execute. 39 | elapsedTime: int 40 | # the amount of blocks that have been examined by the query. 41 | blocksExamined: int 42 | # the amount of rows that have been examined by the query. 43 | rowsExamined: int 44 | # the amount of rows that matched the query. 45 | rowsMatched: int 46 | # the amount of groups returned by the query. 47 | numGroups: int 48 | # describes if the query result is a partial result. 49 | isPartial: bool 50 | # ContinuationToken is populated when isPartial is true and must be passed 51 | # to the next query request to retrieve the next result set. 52 | continuationToken: Optional[str] = field(default=None) 53 | # describes if the query result is estimated. 54 | isEstimate: Optional[bool] = field(default=None) 55 | # the timestamp of the oldest block examined. 56 | minBlockTime: Optional[str] = field(default=None) 57 | # the timestamp of the newest block examined. 58 | maxBlockTime: Optional[str] = field(default=None) 59 | # messages associated with the query. 60 | messages: List[Message] = field(default_factory=lambda: []) 61 | # row id of the newest row, as seen server side. 62 | maxCursor: Optional[str] = field(default=None) 63 | # row id of the oldest row, as seen server side. 64 | minCursor: Optional[str] = field(default=None) 65 | 66 | 67 | @dataclass 68 | class Entry: 69 | """Entry is an event that matched a query and is thus part of the result set.""" 70 | 71 | # the time the event occurred. Matches SysTime if not specified 72 | # during ingestion. 73 | _time: str 74 | # the time the event was recorded on the server. 75 | _sysTime: str 76 | # the unique ID of the event row. 77 | _rowId: str 78 | # contains the raw data of the event (with filters and aggregations 79 | # applied). 80 | data: Dict[str, object] 81 | 82 | 83 | @dataclass 84 | class EntryGroupAgg: 85 | """an aggregation which is part of a group of queried events.""" 86 | 87 | # alias is the aggregations alias. If it wasn't specified at query time, it 88 | # is the uppercased string representation of the aggregation operation. 89 | value: object 90 | op: str = field(default="") 91 | # value is the result value of the aggregation. 92 | 93 | 94 | @dataclass 95 | class EntryGroup: 96 | """a group of queried event.""" 97 | 98 | # the unique id of the group. 99 | id: int 100 | # group maps the fieldnames to the unique values for the entry. 101 | group: Dict[str, object] 102 | # aggregations of the group. 103 | aggregations: List[EntryGroupAgg] 104 | 105 | 106 | @dataclass 107 | class Interval: 108 | """the interval of queried time series.""" 109 | 110 | # startTime of the interval. 111 | startTime: datetime 112 | # endTime of the interval. 113 | endTime: datetime 114 | # groups of the interval. 115 | groups: Optional[List[EntryGroup]] 116 | 117 | 118 | @dataclass 119 | class Timeseries: 120 | """Timeseries are queried time series.""" 121 | 122 | # the intervals that build a time series. 123 | series: List[Interval] 124 | # totals of the time series. 125 | totals: Optional[List[EntryGroup]] 126 | 127 | 128 | @dataclass 129 | class QueryLegacyResult: 130 | """Result is the result of a query.""" 131 | 132 | # Status of the query result. 133 | status: QueryStatus 134 | # Matches are the events that matched the query. 135 | matches: List[Entry] 136 | # Buckets are the time series buckets. 137 | buckets: Timeseries 138 | # savedQueryID is the ID of the query that generated this result when it 139 | # was saved on the server. This is only set when the query was send with 140 | # the `saveAsKind` option specified. 141 | savedQueryID: Optional[str] = field(default=None) 142 | 143 | 144 | @dataclass 145 | class Source: 146 | name: str 147 | 148 | 149 | @dataclass 150 | class Order: 151 | desc: bool 152 | field: str 153 | 154 | 155 | @dataclass 156 | class Group: 157 | name: str 158 | 159 | 160 | @dataclass 161 | class Range: 162 | # Start is the starting time the query is limited by. 163 | start: datetime 164 | # End is the ending time the query is limited by. 165 | end: datetime 166 | # Field specifies the field name on which the query range was restricted. 167 | field: str 168 | 169 | 170 | @dataclass 171 | class Aggregation: 172 | # Args specifies any non-field arguments for the aggregation. 173 | args: Optional[List[object]] 174 | # Fields specifies the names of the fields this aggregation is computed on. 175 | fields: Optional[List[str]] 176 | # Name is the system name of the aggregation. 177 | name: str 178 | 179 | 180 | @dataclass 181 | class Field: 182 | name: str 183 | type: str 184 | agg: Optional[Aggregation] 185 | 186 | 187 | @dataclass 188 | class Bucket: 189 | # Field specifies the field used to create buckets on. 190 | field: str 191 | # An integer or float representing the fixed bucket size. 192 | size: Union[int, float] 193 | 194 | 195 | @dataclass 196 | class Table: 197 | buckets: Optional[Bucket] 198 | # Columns contain a series of arrays with the raw result data. 199 | # The columns here line up with the fields in the Fields array. 200 | columns: Optional[List[List[object]]] 201 | # Fields contain information about the fields included in these results. 202 | # The order of the fields match up with the order of the data in Columns. 203 | fields: List[Field] 204 | # Groups specifies which grouping operations has been performed on the 205 | # results. 206 | groups: List[Group] 207 | # Name is the name assigned to this table. Defaults to "0". 208 | # The name "_totals" is reserved for system use. 209 | name: str 210 | # Order echoes the ordering clauses that was used to sort the results. 211 | order: List[Order] 212 | range: Optional[Range] 213 | # Sources contain the names of the datasets that contributed data to these 214 | # results. 215 | sources: List[Source] 216 | 217 | def events(self): 218 | return ColumnIterator(self) 219 | 220 | 221 | class ColumnIterator: 222 | table: Table 223 | i: int = 0 224 | 225 | def __init__(self, table: Table): 226 | self.table = table 227 | 228 | def __iter__(self): 229 | return self 230 | 231 | def __next__(self): 232 | if ( 233 | self.table.columns is None 234 | or len(self.table.columns) == 0 235 | or self.i >= len(self.table.columns[0]) 236 | ): 237 | raise StopIteration 238 | 239 | event = {} 240 | for j, f in enumerate(self.table.fields): 241 | event[f.name] = self.table.columns[j][self.i] 242 | 243 | self.i += 1 244 | return event 245 | 246 | 247 | @dataclass 248 | class QueryResult: 249 | """Result is the result of apl query.""" 250 | 251 | request: Optional[QueryLegacy] 252 | # Status of the apl query result. 253 | status: QueryStatus 254 | # Matches are the events that matched the apl query. 255 | matches: Optional[List[Entry]] 256 | # Buckets are the time series buckets. 257 | buckets: Optional[Timeseries] 258 | # Tables is populated in tabular queries. 259 | tables: Optional[List[Table]] 260 | # Dataset names are the datasets that were used in the apl query. 261 | dataset_names: List[str] = field(default_factory=lambda: []) 262 | # savedQueryID is the ID of the apl query that generated this result when it 263 | # was saved on the server. This is only set when the apl query was send with 264 | # the `saveAsKind` option specified. 265 | savedQueryID: Optional[str] = field(default=None) 266 | -------------------------------------------------------------------------------- /src/axiom_py/structlog.py: -------------------------------------------------------------------------------- 1 | """Structlog contains the AxiomProcessor for structlog.""" 2 | 3 | from typing import List 4 | import time 5 | import atexit 6 | 7 | from .client import Client 8 | 9 | 10 | class AxiomProcessor: 11 | """A processor for sending structlogs to Axiom.""" 12 | 13 | client: Client 14 | dataset: str 15 | buffer: List[object] 16 | interval: int 17 | last_run: float 18 | 19 | def __init__(self, client: Client, dataset: str, interval=1): 20 | self.client = client 21 | self.dataset = dataset 22 | self.buffer = [] 23 | self.last_run = time.monotonic() 24 | self.interval = interval 25 | 26 | atexit.register(self.flush) 27 | 28 | def flush(self): 29 | self.last_run = time.monotonic() 30 | if len(self.buffer) == 0: 31 | return 32 | self.client.ingest_events(self.dataset, self.buffer) 33 | self.buffer = [] 34 | 35 | def __call__(self, logger: object, method_name: str, event_dict: object): 36 | self.buffer.append(event_dict.copy()) 37 | if ( 38 | len(self.buffer) >= 1000 39 | or time.monotonic() - self.last_run > self.interval 40 | ): 41 | self.flush() 42 | return event_dict 43 | -------------------------------------------------------------------------------- /src/axiom_py/tokens.py: -------------------------------------------------------------------------------- 1 | import ujson 2 | from dataclasses import dataclass, field, asdict 3 | from requests import Session 4 | from typing import Literal, Optional, List, Dict 5 | from datetime import datetime 6 | from .util import from_dict, handle_json_serialization 7 | 8 | Action = Literal["create", "read", "update", "delete"] 9 | 10 | 11 | @dataclass 12 | class TokenDatasetCapabilities: 13 | # pylint: disable=unsubscriptable-object 14 | """ 15 | TokenDatasetCapabilities describes the dataset-level permissions 16 | which a token can be assigned. 17 | Each token can have multiple dataset-level capability objects; 18 | one per dataset. 19 | """ 20 | 21 | # Ability to ingest data. Optional. 22 | ingest: Optional[List[Action]] = field(default=None) 23 | # Ability to query data. Optional. 24 | query: Optional[List[Action]] = field(default=None) 25 | # Ability to use starred queries. Optional. 26 | starredQueries: Optional[List[Action]] = field(default=None) 27 | # Ability to use virtual fields. Optional. 28 | virtualFields: Optional[List[Action]] = field(default=None) 29 | # Trim capability. Optional 30 | trim: Optional[List[Action]] = field(default=None) 31 | # Vacuum capability. Optional 32 | vacuum: Optional[List[Action]] = field(default=None) 33 | # Data management capability. Optional. 34 | data: Optional[List[Action]] = field(default=None) 35 | # Share capability. Optional. 36 | share: Optional[List[Action]] = field(default=None) 37 | 38 | 39 | @dataclass 40 | class TokenOrganizationCapabilities: 41 | # pylint: disable=unsubscriptable-object 42 | """ 43 | TokenOrganizationCapabilities describes the org-level permissions 44 | which a token can be assigned. 45 | """ 46 | 47 | # Ability to use annotations. Optional. 48 | annotations: Optional[List[Action]] = field(default=None) 49 | # Ability to use api tokens. Optional. 50 | apiTokens: Optional[List[Action]] = field(default=None) 51 | # Audit log capability. Optional. 52 | auditLog: Optional[List[Action]] = field(default=None) 53 | # Ability to access billing. Optional. 54 | billing: Optional[List[Action]] = field(default=None) 55 | # Ability to use dashboards. Optional. 56 | dashboards: Optional[List[Action]] = field(default=None) 57 | # Ability to use datasets. Optional. 58 | datasets: Optional[List[Action]] = field(default=None) 59 | # Ability to use endpoints. Optional. 60 | endpoints: Optional[List[Action]] = field(default=None) 61 | # Ability to use flows. Optional. 62 | flows: Optional[List[Action]] = field(default=None) 63 | # Ability to use integrations. Optional. 64 | integrations: Optional[List[Action]] = field(default=None) 65 | # Ability to use monitors. Optional. 66 | monitors: Optional[List[Action]] = field(default=None) 67 | # Ability to use notifiers. Optional. 68 | notifiers: Optional[List[Action]] = field(default=None) 69 | # Ability to use role-based access controls. Optional. 70 | rbac: Optional[List[Action]] = field(default=None) 71 | # Ability to use shared access keys. Optional. 72 | sharedAccessKeys: Optional[List[Action]] = field(default=None) 73 | # Ability to use users. Optional. 74 | users: Optional[List[Action]] = field(default=None) 75 | # Ability to use views. Optional. 76 | views: Optional[List[Action]] = field(default=None) 77 | 78 | 79 | @dataclass 80 | class ApiToken: 81 | """ 82 | Token contains the response from a call to POST /tokens. 83 | It includes the API token itself, and an ID which can be used to reference it later. 84 | """ 85 | 86 | id: str 87 | name: str 88 | description: Optional[str] 89 | expiresAt: Optional[datetime] 90 | datasetCapabilities: Optional[Dict[str, TokenDatasetCapabilities]] 91 | orgCapabilities: Optional[TokenOrganizationCapabilities] 92 | samlAuthenticated: bool = field(default=False) 93 | 94 | 95 | @dataclass 96 | class CreateTokenRequest: 97 | # pylint: disable=unsubscriptable-object 98 | """ 99 | CraeteTokenRequest describes the set of input parameters that the 100 | POST /tokens API accepts. 101 | """ 102 | 103 | # Name for the token. Required. 104 | name: str 105 | # Description for the API token. Optional. 106 | description: Optional[str] = field(default=None) 107 | # Expiration date for the API token. Optional. 108 | expiresAt: Optional[str] = field(default=None) 109 | # The token's dataset-level capabilities. Keyed on dataset name. Optional. 110 | datasetCapabilities: Optional[Dict[str, TokenDatasetCapabilities]] = field( 111 | default=None 112 | ) 113 | # The token's organization-level capabilities. Optional. 114 | orgCapabilities: Optional[TokenOrganizationCapabilities] = field( 115 | default=None 116 | ) 117 | 118 | 119 | @dataclass 120 | class CreateTokenResponse(ApiToken): 121 | """ 122 | CreateTokenResponse describes the set of output parameters that the 123 | POST /tokens API returns. 124 | """ 125 | 126 | token: str = "" 127 | 128 | 129 | @dataclass 130 | class RegenerateTokenRequest: 131 | # pylint: disable=unsubscriptable-object 132 | """ 133 | RegenerateTokenRequest describes the set of input parameters that the 134 | POST /tokens/{id}/regenerate API accepts. 135 | """ 136 | 137 | existingTokenExpiresAt: datetime 138 | newTokenExpiresAt: datetime 139 | 140 | 141 | class TokensClient: # pylint: disable=R0903 142 | """TokensClient has methods to manipulate tokens.""" 143 | 144 | session: Session 145 | 146 | def __init__(self, session: Session): 147 | self.session = session 148 | 149 | def list(self) -> List[ApiToken]: 150 | """List all API tokens.""" 151 | res = self.session.get("/v2/tokens") 152 | tokens = [] 153 | for record in res.json(): 154 | ds = from_dict(ApiToken, record) 155 | tokens.append(ds) 156 | return tokens 157 | 158 | def create(self, req: CreateTokenRequest) -> CreateTokenResponse: 159 | """Creates a new API token with permissions specified in a TokenAttributes object.""" 160 | res = self.session.post( 161 | "/v2/tokens", 162 | data=ujson.dumps(asdict(req), default=handle_json_serialization), 163 | ) 164 | 165 | # Return the new token and ID. 166 | token = from_dict(CreateTokenResponse, res.json()) 167 | return token 168 | 169 | def get(self, token_id: str) -> ApiToken: 170 | """Get an API token using its ID string.""" 171 | res = self.session.get(f"/v2/tokens/{token_id}") 172 | token = from_dict(ApiToken, res.json()) 173 | return token 174 | 175 | def regenerate( 176 | self, token_id: str, req: RegenerateTokenRequest 177 | ) -> ApiToken: 178 | """Regenerate an API token using its ID string.""" 179 | res = self.session.post( 180 | f"/v2/tokens/{token_id}/regenerate", 181 | data=ujson.dumps(asdict(req), default=handle_json_serialization), 182 | ) 183 | token = from_dict(ApiToken, res.json()) 184 | return token 185 | 186 | def delete(self, token_id: str) -> None: 187 | """Delete an API token using its ID string.""" 188 | self.session.delete(f"/v2/tokens/{token_id}") 189 | -------------------------------------------------------------------------------- /src/axiom_py/users.py: -------------------------------------------------------------------------------- 1 | from .util import from_dict 2 | from dataclasses import dataclass 3 | from requests import Session 4 | from typing import Optional 5 | 6 | 7 | @dataclass 8 | class Role: 9 | id: str 10 | name: str 11 | 12 | 13 | @dataclass 14 | class User: 15 | """An authenticated axiom user.""" 16 | 17 | id: str 18 | name: str 19 | email: str 20 | role: Role 21 | 22 | 23 | class UsersClient: 24 | """The UsersClient is a client for the Axiom Users service.""" 25 | 26 | has_personal_token: bool 27 | 28 | def __init__(self, session: Session, has_personal_token: bool): 29 | self.session = session 30 | self.has_personal_token = has_personal_token 31 | 32 | def current(self) -> Optional[User]: 33 | """ 34 | Get the current authenticated user. 35 | If your token is not a personal token, this will return None. 36 | 37 | See https://axiom.co/docs/restapi/endpoints/getCurrentUser 38 | """ 39 | if not self.has_personal_token: 40 | return None 41 | 42 | res = self.session.get("/v2/user") 43 | user = from_dict(User, res.json()) 44 | return user 45 | -------------------------------------------------------------------------------- /src/axiom_py/util.py: -------------------------------------------------------------------------------- 1 | import dacite 2 | import iso8601 3 | from enum import Enum 4 | from uuid import UUID 5 | from typing import Type, TypeVar 6 | from datetime import datetime, timedelta 7 | 8 | from .query import QueryKind 9 | from .query.result import MessagePriority 10 | from .query.filter import FilterOperation 11 | 12 | 13 | T = TypeVar("T") 14 | 15 | 16 | def _convert_string_to_datetime(val: str) -> datetime: 17 | d = iso8601.parse_date(val) 18 | return d 19 | 20 | 21 | def _convert_string_to_timedelta(val: str) -> timedelta: 22 | if val == "0": 23 | return timedelta(seconds=0) 24 | 25 | exp = "^([0-9]?)([a-z])$" 26 | import re 27 | 28 | found = re.search(exp, val) 29 | if not found: 30 | raise Exception(f"failed to parse timedelta field from value {val}") 31 | 32 | v = int(found.groups()[0]) 33 | unit = found.groups()[1] 34 | 35 | if unit == "s": 36 | return timedelta(seconds=v) 37 | elif unit == "m": 38 | return timedelta(minutes=v) 39 | elif unit == "h": 40 | return timedelta(hours=v) 41 | elif unit == "d": 42 | return timedelta(days=v) 43 | else: 44 | raise Exception(f"failed to parse timedelta field from value {val}") 45 | 46 | 47 | def from_dict(data_class: Type[T], data) -> T: 48 | cfg = dacite.Config( 49 | type_hooks={ 50 | QueryKind: QueryKind, 51 | datetime: _convert_string_to_datetime, 52 | FilterOperation: FilterOperation, 53 | MessagePriority: MessagePriority, 54 | timedelta: _convert_string_to_timedelta, 55 | } 56 | ) 57 | 58 | return dacite.from_dict(data_class=data_class, data=data, config=cfg) 59 | 60 | 61 | def handle_json_serialization(obj): 62 | if isinstance(obj, datetime): 63 | return obj.isoformat("T") + "Z" 64 | elif isinstance(obj, timedelta): 65 | return str(obj.seconds) + "s" 66 | elif isinstance(obj, Enum): 67 | return obj.value 68 | elif isinstance(obj, UUID): 69 | return str(obj) 70 | 71 | 72 | def is_personal_token(token: str): 73 | return token.startswith("xapt-") 74 | -------------------------------------------------------------------------------- /src/axiom_py/version.py: -------------------------------------------------------------------------------- 1 | """The current version""" 2 | 3 | __version__ = "0.9.0" 4 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/axiomhq/axiom-py/f86ed195e5ba25cde85d6151cee0ad81473df5b3/tests/__init__.py -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | """This module contains helper functions for tests.""" 2 | 3 | import random 4 | from datetime import datetime 5 | 6 | 7 | def get_random_name() -> str: 8 | random_string = "" 9 | 10 | for _ in range(10): 11 | random_integer = random.randint(97, 122) 12 | # Keep appending random characters using chr(x) 13 | random_string += chr(random_integer) 14 | 15 | return random_string 16 | 17 | 18 | def parse_time(txt: str) -> datetime: 19 | return datetime.strptime(txt, "%Y-%m-%dT%H:%M:%S.%f") 20 | -------------------------------------------------------------------------------- /tests/test_annotations.py: -------------------------------------------------------------------------------- 1 | """This module contains the tests for the AnnotationsClient.""" 2 | 3 | import os 4 | 5 | import unittest 6 | from logging import getLogger 7 | from .helpers import get_random_name 8 | from axiom_py import ( 9 | Client, 10 | AnnotationCreateRequest, 11 | AnnotationUpdateRequest, 12 | ) 13 | 14 | 15 | class TestAnnotations(unittest.TestCase): 16 | client: Client 17 | dataset_name: str 18 | 19 | @classmethod 20 | def setUpClass(cls): 21 | cls.logger = getLogger() 22 | 23 | cls.client = Client( 24 | os.getenv("AXIOM_TOKEN"), 25 | os.getenv("AXIOM_ORG_ID"), 26 | os.getenv("AXIOM_URL"), 27 | ) 28 | 29 | # create dataset 30 | cls.dataset_name = get_random_name() 31 | cls.client.datasets.create( 32 | cls.dataset_name, "test_annotations.py (dataset_name)" 33 | ) 34 | 35 | def test_happy_path_crud(self): 36 | """ 37 | Test the happy path of creating, reading, updating, and deleting an 38 | annotation. 39 | """ 40 | # Create annotation 41 | req = AnnotationCreateRequest( 42 | datasets=[self.dataset_name], 43 | type="test", 44 | time=None, 45 | endTime=None, 46 | title=None, 47 | description=None, 48 | url=None, 49 | ) 50 | created_annotation = self.client.annotations.create(req) 51 | self.logger.debug(created_annotation) 52 | 53 | # Get annotation 54 | annotation = self.client.annotations.get(created_annotation.id) 55 | self.logger.debug(annotation) 56 | assert annotation.id == created_annotation.id 57 | 58 | # List annotations 59 | annotations = self.client.annotations.list( 60 | datasets=[self.dataset_name] 61 | ) 62 | self.logger.debug(annotations) 63 | assert len(annotations) == 1 64 | 65 | # Update 66 | newTitle = "Update title" 67 | updateReq = AnnotationUpdateRequest( 68 | datasets=None, 69 | type=None, 70 | time=None, 71 | endTime=None, 72 | title=newTitle, 73 | description=None, 74 | url=None, 75 | ) 76 | updated_annotation = self.client.annotations.update( 77 | annotation.id, updateReq 78 | ) 79 | self.logger.debug(updated_annotation) 80 | assert updated_annotation.title == newTitle 81 | 82 | # Delete 83 | self.client.annotations.delete(annotation.id) 84 | 85 | @classmethod 86 | def tearDownClass(cls): 87 | """Delete datasets""" 88 | cls.client.datasets.delete(cls.dataset_name) 89 | -------------------------------------------------------------------------------- /tests/test_client.py: -------------------------------------------------------------------------------- 1 | """This module contains the tests for the axiom client.""" 2 | 3 | import sys 4 | import os 5 | import unittest 6 | from unittest.mock import patch 7 | import gzip 8 | import uuid 9 | 10 | import ujson 11 | import rfc3339 12 | import responses 13 | from logging import getLogger 14 | from datetime import datetime, timedelta 15 | 16 | from .helpers import get_random_name 17 | from axiom_py import ( 18 | AxiomError, 19 | Client, 20 | AplOptions, 21 | AplResultFormat, 22 | ContentEncoding, 23 | ContentType, 24 | IngestOptions, 25 | WrongQueryKindException, 26 | ) 27 | from axiom_py.query import ( 28 | QueryLegacy, 29 | QueryOptions, 30 | QueryKind, 31 | Filter, 32 | Order, 33 | VirtualField, 34 | Projection, 35 | FilterOperation, 36 | Aggregation, 37 | ) 38 | from axiom_py.tokens import ( 39 | CreateTokenRequest, 40 | TokenOrganizationCapabilities, 41 | RegenerateTokenRequest, 42 | ) 43 | 44 | 45 | class TestClient(unittest.TestCase): 46 | client: Client 47 | 48 | @classmethod 49 | def setUpClass(cls): 50 | cls.logger = getLogger() 51 | cls.client = Client( 52 | os.getenv("AXIOM_TOKEN"), 53 | os.getenv("AXIOM_ORG_ID"), 54 | os.getenv("AXIOM_URL"), 55 | ) 56 | cls.dataset_name = get_random_name() 57 | cls.logger.info( 58 | f"generated random dataset name is: {cls.dataset_name}" 59 | ) 60 | events_time_format = "%d/%b/%Y:%H:%M:%S +0000" 61 | # create events to ingest and query 62 | time = datetime.utcnow() - timedelta(minutes=1) 63 | time_formatted = time.strftime(events_time_format) 64 | cls.logger.info(f"time_formatted: {time_formatted}") 65 | cls.events = [ 66 | { 67 | "_time": time_formatted, 68 | "remote_ip": "93.180.71.3", 69 | "remote_user": "-", 70 | "request": "GET /downloads/product_1 HTTP/1.1", 71 | "response": 304, 72 | "bytes": 0, 73 | "referrer": "-", 74 | "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", 75 | }, 76 | { 77 | "_time": time_formatted, 78 | "remote_ip": "93.180.71.3", 79 | "remote_user": "-", 80 | "request": "GET /downloads/product_1 HTTP/1.1", 81 | "response": 304, 82 | "bytes": 0, 83 | "referrer": "-", 84 | "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", 85 | }, 86 | ] 87 | # create dataset to test the client 88 | cls.client.datasets.create( 89 | cls.dataset_name, "create a dataset to test the python client" 90 | ) 91 | 92 | @responses.activate 93 | def test_retries(self): 94 | axiomUrl = os.getenv("AXIOM_URL") or "" 95 | url = axiomUrl + "/v1/datasets/test" 96 | responses.add(responses.GET, url, status=500) 97 | responses.add(responses.GET, url, status=502) 98 | responses.add( 99 | responses.GET, 100 | url, 101 | status=200, 102 | json={"name": "test", "description": "", "who": "", "created": ""}, 103 | ) 104 | 105 | self.client.datasets.get("test") 106 | assert len(responses.calls) == 3 107 | 108 | def test_step001_ingest(self): 109 | """Tests the ingest endpoint""" 110 | data: bytes = ujson.dumps(self.events).encode() 111 | payload = gzip.compress(data) 112 | opts = IngestOptions( 113 | "_time", 114 | "2/Jan/2006:15:04:05 +0000", 115 | # CSV_delimiter obviously not valid for JSON, but perfectly fine to 116 | # test for its presence in this test. 117 | ";", 118 | ) 119 | res = self.client.ingest( 120 | self.dataset_name, 121 | payload=payload, 122 | contentType=ContentType.JSON, 123 | enc=ContentEncoding.GZIP, 124 | opts=opts, 125 | ) 126 | self.logger.debug(res) 127 | 128 | assert ( 129 | res.ingested == 2 130 | ), f"expected ingested count to equal 2, found {res.ingested}" 131 | self.logger.info("ingested 2 events successfully.") 132 | 133 | def test_step002_ingest_events(self): 134 | """Tests the ingest_events method""" 135 | time = datetime.utcnow() - timedelta(hours=1) 136 | time_formatted = rfc3339.format(time) 137 | 138 | res = self.client.ingest_events( 139 | dataset=self.dataset_name, 140 | events=[ 141 | {"foo": "bar", "_time": time_formatted}, 142 | {"bar": "baz", "_time": time_formatted}, 143 | ], 144 | ) 145 | self.logger.debug(res) 146 | 147 | assert ( 148 | res.ingested == 2 149 | ), f"expected ingested count to equal 2, found {res.ingested}" 150 | 151 | def test_step004_query(self): 152 | """Test querying a dataset""" 153 | # query the events we ingested in step2 154 | startTime = datetime.utcnow() - timedelta(minutes=2) 155 | endTime = datetime.utcnow() 156 | 157 | q = QueryLegacy(startTime=startTime, endTime=endTime) 158 | opts = QueryOptions( 159 | streamingDuration=timedelta(seconds=60), 160 | nocache=True, 161 | saveAsKind=QueryKind.ANALYTICS, 162 | ) 163 | qr = self.client.query_legacy(self.dataset_name, q, opts) 164 | 165 | self.assertIsNotNone(qr.savedQueryID) 166 | self.assertEqual(len(qr.matches), len(self.events)) 167 | 168 | def test_step005_apl_query(self): 169 | """Test apl query""" 170 | # query the events we ingested in step2 171 | startTime = datetime.utcnow() - timedelta(minutes=2) 172 | endTime = datetime.utcnow() 173 | 174 | apl = "['%s']" % self.dataset_name 175 | opts = AplOptions( 176 | start_time=startTime, 177 | end_time=endTime, 178 | format=AplResultFormat.Legacy, 179 | ) 180 | qr = self.client.query(apl, opts) 181 | 182 | self.assertEqual(len(qr.matches), len(self.events)) 183 | 184 | def test_step005_apl_query_messages(self): 185 | """Test an APL query with messages""" 186 | startTime = datetime.utcnow() - timedelta(minutes=2) 187 | endTime = datetime.utcnow() 188 | 189 | apl = "['%s'] | where true" % self.dataset_name 190 | opts = AplOptions( 191 | start_time=startTime, 192 | end_time=endTime, 193 | format=AplResultFormat.Legacy, 194 | ) 195 | qr = self.client.query(apl, opts) 196 | # "where clause always evaluates to TRUE, which will include all data" 197 | self.assertEqual(len(qr.status.messages), 1) 198 | self.assertEqual( 199 | qr.status.messages[0].msg, 200 | "line: 1, col: 24: where clause always evaluates to TRUE, which will include all data", 201 | ) 202 | self.assertEqual( 203 | qr.status.messages[0].code, 204 | "apl_whereclausealwaysevaluatestoTRUEwhichwillincludealldata_1", 205 | ) 206 | 207 | def test_step005_apl_query_tabular(self): 208 | """Test apl query (tabular)""" 209 | # query the events we ingested in step2 210 | startTime = datetime.utcnow() - timedelta(minutes=2) 211 | endTime = datetime.utcnow() 212 | 213 | apl = "['%s']" % self.dataset_name 214 | opts = AplOptions( 215 | start_time=startTime, 216 | end_time=endTime, 217 | format=AplResultFormat.Tabular, 218 | ) 219 | qr = self.client.query(apl, opts) 220 | 221 | events = list(qr.tables[0].events()) 222 | self.assertEqual(len(events), len(self.events)) 223 | 224 | def test_step005_wrong_query_kind(self): 225 | """Test wrong query kind""" 226 | startTime = datetime.utcnow() - timedelta(minutes=2) 227 | endTime = datetime.utcnow() 228 | opts = QueryOptions( 229 | streamingDuration=timedelta(seconds=60), 230 | nocache=True, 231 | saveAsKind=QueryKind.APL, 232 | ) 233 | q = QueryLegacy(startTime, endTime) 234 | 235 | try: 236 | self.client.query_legacy(self.dataset_name, q, opts) 237 | except WrongQueryKindException: 238 | self.logger.info( 239 | "passing kind apl to query raised exception as expected" 240 | ) 241 | return 242 | 243 | self.fail("was excepting WrongQueryKindException") 244 | 245 | def test_step005_complex_query(self): 246 | """Test complex query""" 247 | startTime = datetime.utcnow() - timedelta(minutes=2) 248 | endTime = datetime.utcnow() 249 | aggregations = [ 250 | Aggregation(alias="event_count", op="count", field="*") 251 | ] 252 | q = QueryLegacy(startTime, endTime, aggregations=aggregations) 253 | q.groupBy = ["success", "remote_ip"] 254 | q.filter = Filter(FilterOperation.EQUAL, "response", 304) 255 | q.order = [ 256 | Order("success", True), 257 | Order("remote_ip", False), 258 | ] 259 | q.virtualFields = [VirtualField("success", "response < 400")] 260 | q.project = [Projection("remote_ip", "ip")] 261 | 262 | res = self.client.query_legacy(self.dataset_name, q, QueryOptions()) 263 | 264 | # self.assertEqual(len(self.events), res.status.rowsExamined) 265 | self.assertEqual(len(self.events), res.status.rowsMatched) 266 | 267 | if res.buckets.totals and len(res.buckets.totals): 268 | agg = res.buckets.totals[0].aggregations[0] 269 | self.assertEqual("event_count", agg.op) 270 | 271 | def test_api_tokens(self): 272 | """Test creating and deleting an API token""" 273 | token_name = f"PytestToken-{uuid.uuid4()}" 274 | create_req = CreateTokenRequest( 275 | name=token_name, 276 | orgCapabilities=TokenOrganizationCapabilities(apiTokens=["read"]), 277 | ) 278 | token = self.client.tokens.create(create_req) 279 | 280 | self.assertEqual(token_name, token.name) 281 | assert token.id 282 | assert token.token 283 | 284 | tokens = self.client.tokens.list() 285 | assert tokens 286 | 287 | token = self.client.tokens.get(token.id) 288 | self.assertEqual(token_name, token.name) 289 | 290 | self.client.tokens.regenerate( 291 | token.id, 292 | RegenerateTokenRequest( 293 | existingTokenExpiresAt=datetime.now() + timedelta(days=1), 294 | newTokenExpiresAt=datetime.now() + timedelta(days=2), 295 | ), 296 | ) 297 | 298 | # (An exception will be raised if the delete call is not successful.) 299 | self.client.tokens.delete(token.id) 300 | 301 | @patch("sys.exit") 302 | def test_client_shutdown_atexit(self, mock_exit): 303 | """Test client shutdown atexit""" 304 | # Use the mock to test the firing mechanism 305 | self.assertEqual(self.client.is_closed, False) 306 | sys.exit() 307 | mock_exit.assert_called_once() 308 | # Use the hook implementation to assert the client is closed closed 309 | self.client.shutdown_hook() 310 | self.assertEqual(self.client.is_closed, True) 311 | 312 | @classmethod 313 | def tearDownClass(cls): 314 | """A teardown that checks if the dataset still exists and deletes it, 315 | otherwise we might run into zombie datasets on failures.""" 316 | cls.logger.info("cleaning up after TestClient...") 317 | try: 318 | ds = cls.client.datasets.get(cls.dataset_name) 319 | if ds: 320 | cls.client.datasets.delete(cls.dataset_name) 321 | cls.logger.info( 322 | "dataset (%s) was not deleted as part of the test, deleting it now." 323 | % cls.dataset_name 324 | ) 325 | except AxiomError as e: 326 | # nothing to do here, since the dataset doesn't exist 327 | cls.logger.warning(e) 328 | cls.logger.info("finish cleaning up after TestClient") 329 | -------------------------------------------------------------------------------- /tests/test_datasets.py: -------------------------------------------------------------------------------- 1 | """This module contains the tests for the DatasetsClient.""" 2 | 3 | import os 4 | 5 | import unittest 6 | from typing import List, Dict 7 | from logging import getLogger 8 | from datetime import timedelta 9 | from .helpers import get_random_name 10 | from axiom_py import Client, AxiomError 11 | 12 | 13 | class TestDatasets(unittest.TestCase): 14 | dataset_name: str 15 | events: List[Dict[str, object]] 16 | client: Client 17 | events_time_format = "%d/%b/%Y:%H:%M:%S +0000" 18 | 19 | @classmethod 20 | def setUpClass(cls): 21 | cls.logger = getLogger() 22 | 23 | cls.dataset_name = get_random_name() 24 | cls.logger.info( 25 | f"generated random dataset name is: {cls.dataset_name}" 26 | ) 27 | 28 | cls.client = Client( 29 | os.getenv("AXIOM_TOKEN"), 30 | os.getenv("AXIOM_ORG_ID"), 31 | os.getenv("AXIOM_URL"), 32 | ) 33 | 34 | def test_step001_create(self): 35 | """Tests create dataset endpoint""" 36 | res = self.client.datasets.create( 37 | self.dataset_name, "create a dataset to test the python client" 38 | ) 39 | self.logger.debug(res) 40 | assert res.name == self.dataset_name 41 | 42 | def test_step002_get(self): 43 | """Tests get dataset endpoint""" 44 | dataset = self.client.datasets.get(self.dataset_name) 45 | self.logger.debug(dataset) 46 | 47 | assert dataset.name == self.dataset_name 48 | 49 | def test_step003_list(self): 50 | """Tests list datasets endpoint""" 51 | datasets = self.client.datasets.get_list() 52 | self.logger.debug(datasets) 53 | 54 | assert len(datasets) > 0 55 | 56 | def test_step004_update(self): 57 | """Tests update dataset endpoint""" 58 | newDescription = "updated name through test" 59 | ds = self.client.datasets.update(self.dataset_name, newDescription) 60 | 61 | assert ds.description == newDescription 62 | 63 | def test_step005_trim(self): 64 | """Tests dataset trim endpoint""" 65 | self.client.datasets.trim(self.dataset_name, timedelta(seconds=1)) 66 | 67 | def test_step999_delete(self): 68 | """Tests delete dataset endpoint""" 69 | 70 | self.client.datasets.delete(self.dataset_name) 71 | try: 72 | dataset = self.client.datasets.get(self.dataset_name) 73 | 74 | self.assertIsNone( 75 | dataset, 76 | f"expected test dataset (%{self.dataset_name}) to be deleted", 77 | ) 78 | except AxiomError as e: 79 | # the get method returns 404 error if dataset doesn't exist, so 80 | # that means that our tests passed, otherwise, it should fail. 81 | if e.status != 404: 82 | self.fail(e) 83 | 84 | @classmethod 85 | def tearDownClass(cls): 86 | """A teardown that checks if the dataset still exists and deletes it, 87 | otherwise we might run into zombie datasets on failures.""" 88 | cls.logger.info("cleaning up after TestDatasets...") 89 | try: 90 | ds = cls.client.datasets.get(cls.dataset_name) 91 | if ds: 92 | cls.client.datasets.delete(cls.dataset_name) 93 | cls.logger.info( 94 | "dataset (%s) was not deleted as part of the test, deleting it now." 95 | % cls.dataset_name 96 | ) 97 | except AxiomError as e: 98 | # nothing to do here, since the dataset doesn't exist 99 | cls.logger.warning(e) 100 | cls.logger.info("finish cleaning up after TestDatasets") 101 | -------------------------------------------------------------------------------- /tests/test_logger.py: -------------------------------------------------------------------------------- 1 | """This module contains test for the logging Handler.""" 2 | 3 | import os 4 | import logging 5 | import unittest 6 | import time 7 | 8 | from .helpers import get_random_name 9 | from axiom_py import Client 10 | from axiom_py.logging import AxiomHandler 11 | 12 | 13 | class TestLogger(unittest.TestCase): 14 | def test_log(self): 15 | """Tests the logger""" 16 | client = Client( 17 | os.getenv("AXIOM_TOKEN"), 18 | os.getenv("AXIOM_ORG_ID"), 19 | os.getenv("AXIOM_URL"), 20 | ) 21 | # Create a dataset for that purpose 22 | dataset_name = get_random_name() 23 | client.datasets.create( 24 | dataset_name, "A dataset to test axiom-py logger" 25 | ) 26 | 27 | axiom_handler = AxiomHandler(client, dataset_name, interval=1.0) 28 | 29 | logger = logging.getLogger() 30 | logger.addHandler(axiom_handler) 31 | 32 | logger.warning("This is a log!") 33 | 34 | # This log shouldn't be ingested yet 35 | res = client.apl_query(dataset_name) 36 | self.assertEqual(0, res.status.rowsExamined) 37 | 38 | # Flush events 39 | axiom_handler.flush() 40 | 41 | # Wait a bit for the ingest to finish 42 | time.sleep(0.5) 43 | 44 | # Now we should have a log 45 | res = client.apl_query(dataset_name) 46 | self.assertEqual(1, res.status.rowsExamined) 47 | 48 | logger.warning( 49 | "This log should be ingested without any subsequent call" 50 | ) 51 | 52 | # Wait for the background flush. 53 | time.sleep(1.5) 54 | 55 | # Now we should have two logs 56 | res = client.apl_query(dataset_name) 57 | self.assertEqual(2, res.status.rowsExamined) 58 | 59 | # Cleanup created dataset 60 | client.datasets.delete(dataset_name) 61 | --------------------------------------------------------------------------------