├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── artifacts.yml
│ ├── build.yml
│ └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── _typos.toml
├── awpy
├── __init__.py
├── cli.py
├── constants.py
├── converters.py
├── data
│ ├── __init__.py
│ ├── map_data.py
│ └── utils.py
├── demo.py
├── nav.py
├── parsers
│ ├── __init__.py
│ ├── bomb.py
│ ├── clock.py
│ ├── events.py
│ ├── grenades.py
│ ├── rounds.py
│ ├── ticks.py
│ └── utils.py
├── plot
│ ├── __init__.py
│ ├── nav.py
│ ├── plot.py
│ └── utils.py
├── spawn.py
├── stats
│ ├── __init__.py
│ ├── adr.py
│ ├── kast.py
│ └── rating.py
├── vector.py
└── visibility.py
├── docs
├── Makefile
├── conf.py
├── examples
│ ├── demo_stats.ipynb
│ ├── nav.ipynb
│ ├── parse_demo.ipynb
│ ├── parse_demo_cli.ipynb
│ ├── plot_demo.ipynb
│ └── visibility.ipynb
├── getting-started
│ ├── faq.rst
│ ├── installation.rst
│ └── license.rst
├── index.rst
├── make.bat
└── modules
│ ├── cli.rst
│ ├── data.rst
│ ├── demo.rst
│ ├── nav.rst
│ ├── parser_output.rst
│ ├── plot.rst
│ ├── stats.rst
│ └── visibility.rst
├── pyproject.toml
├── scripts
├── generate-maps.ps1
├── generate-navs.ps1
├── generate-spawns.ps1
├── generate-tris.ps1
└── update-latest-patch.ps1
├── tests
├── __init__.py
├── conftest.py
├── de_dust2.json
├── test_cli.py
├── test_data.json
├── test_demo.py
├── test_nav.py
├── test_stats.py
└── test_visibility.py
└── uv.lock
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.dem filter=lfs diff=lfs merge=lfs -text
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create an Awpy bug report
4 | title: ""
5 | labels: ""
6 | assignees: ""
7 | ---
8 |
9 | **Describe the bug**
10 | A description of what the bug is.
11 |
12 | **Include Information to Reproduce**
13 | Please include any code, as well as information on your OS or computing environment.
14 |
15 | **Screenshots**
16 | If applicable, add screenshots to help explain your problem.
17 |
18 | **Additional context**
19 | Add any other context about the problem here.
20 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an Awpy feature
4 | title: ""
5 | labels: ""
6 | assignees: ""
7 | ---
8 |
9 | **Is your feature request related to a problem? Please describe.**
10 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
11 |
12 | **Describe the solution you'd like**
13 | A clear and concise description of what you want to happen.
14 |
15 | **Additional context**
16 | Add any other context or screenshots about the feature request here.
17 |
--------------------------------------------------------------------------------
/.github/workflows/artifacts.yml:
--------------------------------------------------------------------------------
1 | # This workflow will create game artifacts used in Awpy
2 |
3 | name: artifacts
4 |
5 | on:
6 | schedule:
7 | - cron: "0 0 * * *" # runs daily at midnight UTC
8 | pull_request:
9 | branches: [main]
10 | workflow_dispatch:
11 |
12 | env:
13 | PYTHON_VERSION: 3.13
14 | SOURCE2_VIEWER_URL: https://github.com/ValveResourceFormat/ValveResourceFormat/releases/download/11.1/cli-windows-x64.zip
15 | CS2_APP_ID: 730
16 |
17 | jobs:
18 | generate-artifacts:
19 | runs-on: windows-latest
20 | steps:
21 | - name: Checkout awpy library
22 | uses: actions/checkout@v4
23 |
24 | - name: Install uv
25 | uses: astral-sh/setup-uv@v5
26 |
27 | - name: Set up Python
28 | run: |
29 | uv python install ${{ env.PYTHON_VERSION }}
30 |
31 | - name: Install awpy
32 | shell: bash
33 | run: |
34 | uv sync --group dev --group test
35 | uv tool install .
36 |
37 | - name: Setup SteamCMD
38 | uses: CyberAndrii/setup-steamcmd@v1
39 |
40 | - name: Get Counter-Strike 2 Version
41 | run: |
42 | $appInfoOutput = steamcmd +login anonymous +app_info_print 730 +logoff +quit | Out-String
43 | if ($appInfoOutput -match '"branches"\s*\{\s*"public"\s*\{\s*"buildid"\s*"(\d+)"\s*"timeupdated"\s*"(\d+)"') {
44 | $latestPatchId = $matches[1]
45 | $latestPatchTimestamp = $matches[2]
46 | # Export the variables for later steps:
47 | echo "LATEST_PATCH_ID=$latestPatchId" >> $env:GITHUB_ENV
48 | echo "LATEST_PATCH_TIMESTAMP=$latestPatchTimestamp" >> $env:GITHUB_ENV
49 | } else {
50 | throw "Error: buildid and timestamp not found in CS2 app info output."
51 | }
52 | Write-Host "Current Counter-Strike 2 Version: $latestPatchId (Updated: $latestPatchTimestamp)"
53 |
54 | - name: Update Latest Patch
55 | id: update_patch
56 | run: |
57 | # Run the update script and capture its output (expected "true" or "false")
58 | $wasPatchUpdated = .\scripts\update-latest-patch.ps1
59 | Write-Host "Patch needs to be updated? $wasPatchUpdated"
60 | # Set a step output so subsequent steps can conditionally run
61 | echo "wasPatchUpdated=$wasPatchUpdated" >> $env:GITHUB_OUTPUT
62 |
63 | - name: Install Counter-Strike 2
64 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
65 | run: |
66 | $csDir = Join-Path (Get-Location) "cs_go"
67 | steamcmd +force_install_dir $csDir +login anonymous +app_update ${{ env.CS2_APP_ID }} +quit
68 |
69 | - name: Install Source2Viewer-CLI
70 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
71 | run: |
72 | Invoke-WebRequest -Uri $env:SOURCE2_VIEWER_URL -OutFile ".\cli-windows-x64.zip"
73 | Expand-Archive -Path .\cli-windows-x64.zip -DestinationPath . -Force
74 |
75 | - name: Update data module to latest patch
76 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
77 | run: |
78 | Get-ChildItem -Force
79 | .\scripts\update-latest-patch.ps1
80 |
81 | - name: Generate Triangles
82 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
83 | run: |
84 | Get-ChildItem -Force
85 | .\scripts\generate-tris.ps1 -inputPath "cs_go\game\csgo\maps"
86 |
87 | - name: Generate Map Data
88 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
89 | run: |
90 | Get-ChildItem -Force
91 | .\scripts\generate-maps.ps1 -inputPath "cs_go\game\csgo\pak01_dir.vpk"
92 |
93 | - name: Generate Nav
94 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
95 | run: |
96 | Get-ChildItem -Force
97 | .\scripts\generate-navs.ps1 -inputPath "cs_go\game\csgo\maps"
98 |
99 | - name: Generate Spawns
100 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
101 | run: |
102 | Get-ChildItem -Force
103 | .\scripts\generate-spawns.ps1 -inputPath "cs_go\game\csgo\maps"
104 |
105 | - name: Upload Artifacts to R2
106 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
107 | uses: cloudflare/wrangler-action@v3
108 | with:
109 | apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
110 | command: |
111 | r2 object put "awpy/${{ env.LATEST_PATCH_ID }}/maps.zip" --file=maps.zip
112 | r2 object put "awpy/${{ env.LATEST_PATCH_ID }}/navs.zip" --file=navs.zip
113 | r2 object put "awpy/${{ env.LATEST_PATCH_ID }}/tris.zip" --file=tris.zip
114 |
115 | - name: Log R2 Upload Message
116 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
117 | run: |
118 | echo "Uploading artifacts for patch $LATEST_PATCH_ID"
119 |
120 | - name: Push changes and create pull request
121 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
122 | run: |
123 | git config user.name "github-actions[bot]"
124 | git config user.email "github-actions[bot]@users.noreply.github.com"
125 | # Create and switch to a new branch
126 | git checkout -B artifacts/${{ env.LATEST_PATCH_ID }}
127 | # Fetch main and reset the working directory to match main so that only desired changes remain
128 | git fetch origin main
129 | git reset --hard origin/main
130 | # Run formatting only on awpy/data/__init__.py:
131 | uv run ruff format awpy/data/__init__.py
132 | # Stage and commit only the file you want to update
133 | git add awpy/data/__init__.py
134 | git commit -m "Update data with latest patch info (${{ env.LATEST_PATCH_ID }})"
135 | # Push the new branch
136 | git push --set-upstream origin artifacts/${{ env.LATEST_PATCH_ID }}
137 |
138 | - name: Open Pull Request
139 | if: steps.update_patch.outputs.wasPatchUpdated == 'true'
140 | uses: peter-evans/create-pull-request@v4
141 | with:
142 | token: ${{ secrets.GITHUB_TOKEN }}
143 | branch: artifacts/${{ env.LATEST_PATCH_ID }}
144 | base: main
145 | title: "artifacts/${{ env.LATEST_PATCH_ID }}"
146 | body: "This PR updates the artifacts for patch ${{ env.LATEST_PATCH_ID }}."
147 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | # This workflow will run the Awpy test suite
2 |
3 | name: build
4 |
5 | on:
6 | pull_request:
7 | branches: [main]
8 | paths:
9 | - "awpy/**"
10 | - "tests/**"
11 | - "pyproject.toml"
12 |
13 | jobs:
14 | build:
15 | runs-on: ${{ matrix.os }}
16 | strategy:
17 | fail-fast: false
18 | matrix:
19 | os: [ubuntu-latest, macos-latest, windows-latest]
20 | python-version: ["3.11", "3.12", "3.13"]
21 | timeout-minutes: 30
22 | steps:
23 | - name: Checkout awpy library
24 | uses: actions/checkout@v4
25 |
26 | - name: Cache test demos
27 | id: cache-demos
28 | uses: actions/cache@v4
29 | env:
30 | cache-name: cache-demos
31 | with:
32 | # demos are downloaded to and looked for in `{repo}/tests`
33 | path: ${{ github.workspace }}/tests/*.dem
34 | # Invalidate the cache if the file containing the demo urls has changed.
35 | key: cache-test-demos-${{ hashFiles('**/test_data.json') }}
36 | # Care with this: If a demo changes but the name remains the same
37 | # then this could cause issues. So do not do that!
38 | restore-keys: cache-test-demos-
39 |
40 | - name: Install uv
41 | uses: astral-sh/setup-uv@v5
42 |
43 | - name: Set up Python
44 | run: |
45 | uv python install ${{ matrix.python-version }}
46 |
47 | - name: Install awpy
48 | shell: bash
49 | run: |
50 | uv sync --group dev --group test
51 | uv tool install .
52 |
53 | - name: Formatting + Lint
54 | shell: bash
55 | run: |
56 | uv run ruff check . --fix --exit-zero
57 | uv run ruff check .
58 |
59 | - name: Test
60 | shell: bash
61 | run: |
62 | uv run awpy get maps
63 | uv run awpy get navs
64 | uv run awpy get tris
65 | uv run coverage run -m pytest --durations=10
66 | uv run coverage report -m
67 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: release
2 |
3 | on:
4 | push:
5 | tags:
6 | - "release/*.*.*"
7 |
8 | jobs:
9 | build-and-release:
10 | runs-on: ubuntu-latest
11 | permissions:
12 | id-token: write
13 |
14 | steps:
15 | - name: Checkout Awpy library
16 | uses: actions/checkout@v4
17 |
18 | - name: Install uv
19 | uses: astral-sh/setup-uv@v5
20 |
21 | - name: Set up Python
22 | run: |
23 | uv python install 3.11
24 |
25 | - name: Build Awpy
26 | run: |
27 | uv build
28 |
29 | - name: Publish to TestPyPI
30 | run: |
31 | uv publish --publish-url https://test.pypi.org/legacy/ --check-url https://test.pypi.org/simple/ --trusted-publishing always --verbose
32 |
33 | - name: Publish to PyPI
34 | run: |
35 | uv publish --check-url https://pypi.org/simple/ --trusted-publishing always --verbose
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | # For a library or package, you might want to ignore these files since the code is
86 | # intended to run in multiple environments; otherwise, check them in:
87 | # .python-version
88 |
89 | # pipenv
90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
93 | # install all needed dependencies.
94 | #Pipfile.lock
95 |
96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
97 | __pypackages__/
98 |
99 | # Celery stuff
100 | celerybeat-schedule
101 | celerybeat.pid
102 |
103 | # SageMath parsed files
104 | *.sage.py
105 |
106 | # Environments
107 | .env
108 | .venv
109 | env/
110 | venv/
111 | ENV/
112 | env.bak/
113 | venv.bak/
114 | .DS_Store
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # counter strike files
138 | *.dem
139 | *.zip
140 | *.nav
141 | *.vphys
142 | *.tri
143 |
144 | # vscode
145 | *code-workspace
146 | .vscode/
147 |
148 | # windows executable
149 | *.exe
150 | *.dll
151 |
152 | # exceptions txt files
153 | *.txt
154 |
155 | # github actions
156 | cs_go/
157 | node_modules/
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | exclude: docs/
2 | repos:
3 | - repo: 'https://github.com/pre-commit/pre-commit-hooks'
4 | rev: v5.0.0
5 | hooks:
6 | - id: check-yaml
7 | language: python
8 | - id: check-json
9 | language: python
10 | - id: end-of-file-fixer
11 | language: python
12 | - id: trailing-whitespace
13 | language: python
14 | - id: check-docstring-first
15 | language: python
16 | - id: requirements-txt-fixer
17 | language: python
18 | - id: check-merge-conflict
19 | language: python
20 | - id: check-toml
21 | language: python
22 | - id: check-xml
23 | language: python
24 | - id: check-ast
25 | language: python
26 | - id: check-builtin-literals
27 | language: python
28 | - repo: 'https://github.com/charliermarsh/ruff-pre-commit'
29 | rev: v0.11.2
30 | hooks:
31 | - id: ruff
32 | args:
33 | - '--fix'
34 | - '--exit-non-zero-on-fix'
35 | - repo: https://github.com/astral-sh/uv-pre-commit
36 | rev: 0.6.9
37 | hooks:
38 | - id: uv-lock
39 | - repo: https://github.com/crate-ci/typos
40 | rev: v1.30.2
41 | hooks:
42 | - id: typos
43 | args: []
44 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | version: 2
6 |
7 | build:
8 | os: "ubuntu-22.04"
9 | tools:
10 | python: "3.11"
11 | commands:
12 | - asdf plugin add uv
13 | - asdf install uv latest
14 | - asdf global uv latest
15 | - uv sync --all-extras --group docs
16 | - uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html
17 |
18 | sphinx:
19 | configuration: docs/conf.py
20 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Awpy
2 |
3 | Hi! We're happy to have you here. Thank you for considering making a contribution to Awpy. Contributions come in various forms -- from bug reports, to writing example documentation, to proposing and implementin gnew features. This is a guide on how to make contributions to Awpy. If you have any bugs or feature requests, please use [GitHub Issues](https://github.com/pnxenopoulos/awpy/issues). Otherwise, please keep reading.
4 |
5 | ## Python code
6 |
7 | To make sure that Awpy code stays readable and works correctly, we are making use of a variety of helpful tools.
8 |
9 | We use the following tools:
10 |
11 | - [uv](https://docs.astral.sh/uv/): An extremely fast Python package and project manager, written in Rust.
12 | - [ruff](https://github.com/astral-sh/ruff): An extremely fast Python linter, written in Rust.
13 | - [pytest](https://docs.pytest.org): A mature full-featured Python testing tool.
14 | - [pre-commit](https://pre-commit.com/): A framework for managing and maintaining multi-language pre-commit hooks.
15 |
16 | Please install these tools before you begin to develop. After you've installed `uv`, you can run
17 |
18 | ```shell
19 | uv sync --all-groups
20 | uv tool install .
21 | ```
22 |
23 | To install the dependencies. If you want to run Awpy cli commands, you'd do `uv run awpy ...`. To run other commands, you can do `uv run ruff format .` or `uv run pytest .`
24 |
25 | To install the pre-commit hooks, you can run `uv run pre-commit install`
26 |
27 | After you have made your changes locally, use these tools to surface bugs or code smells by running the following:
28 |
29 | ```shell
30 | uv run pre-commit run --all-files --show-diff-on-failure # ruff, typos, uv
31 | uv run coverage run -m pytest --durations=10 # python tests
32 | uv run coverage report -m # produces text-based coverage report
33 | ```
34 |
35 | The coverage run -m pytest --durations=10 command, which runs the Python tests, can take a while as it has to not only download a handful of demos but also parse them. These tests *must pass* for a pull request to be merged into the main branch.
36 |
37 | If you do not have the time or bandwidth, you can omit running the test command locally, since Github actions will run them, as well.
38 |
39 | ### Testing/Coverage
40 |
41 | If you are fixing a bug or adding a new feature, we highly recommend you add [unit tests](https://en.wikipedia.org/wiki/Unit_testing) that cover the original bug or your new functionality. If you are new to writing unit tests, look at the aforementioned link, or check out the [tests](tests) directory in Awpy to see how our existing tests are built.
42 |
43 | If you are adding a test that requires a specific demo, please let us know so that we can include them in our set of testing demos, which is located in the [test_data.json](/tests/test_data.json) file.
44 |
45 | You need `pandoc` to build the documentation locally. On Linux, you can install it with `sudo apt install pandoc`.
46 |
47 | ### Game Updates
48 | During game updates, we may need to update .nav, .tri, map images and map data. The scripts to do these are located in `scripts/` and depend on the [Source2Viewer CLI](https://valveresourceformat.github.io/). Specifically, you can look at the `artifacts.yml` workflow to see how this process is automated.
49 |
50 | ## Thanks
51 | With all this, you are now ready to make contributions to Awpy. Many users depend on your contributions. We look forward to your help!
52 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020-2025 Peter Xenopoulos
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
Awpy
3 |
4 | [](https://discord.gg/W34XjsSs2H) [](https://pepy.tech/project/awpy) [](https://github.com/pnxenopoulos/awpy/actions/workflows/build.yml) [](https://github.com/pnxenopoulos/awpy/actions/workflows/artifacts.yml) [](https://awpy.readthedocs.io/en/latest/?badge=latest) [](https://github.com/pnxenopoulos/awpy/blob/main/LICENSE)
5 |
6 |
7 |
8 | **Counter-Strike 2 Demo Parsing, Analytics and Visualization in Python**
9 |
10 | - :computer: Parse Counter-Strike demos in Python or with a command-line interface
11 | - :skull: Access tick-level player and event data, like kills, damages and more
12 | - :chart: Calculate popular statistics, such as ADR, KAST and Rating
13 | - :mag_right: Determine player visibility in microseconds
14 | - :earth_americas: Parse navigation meshes (.nav) and calculate distance metrics
15 | - 🎞️ Visualize Counter-Strike data, including animated round gifs and heatmaps
16 | - :speaker: Active [Discord](https://discord.gg/W34XjsSs2H) community
17 |
18 | ## Installation
19 |
20 | To install Awpy, you can run
21 |
22 | ```
23 | pip install awpy
24 | ```
25 |
26 | > [!NOTE]
27 | > Awpy requires [Python](https://www.python.org/downloads/) >= 3.11. To update the library, just run `pip install --upgrade awpy`. To check your current Awpy version, run `pip show awpy`. If you want to see what data is available for download, run `awpy get --help`.
28 |
29 | > [!TIP]
30 | > Don't worry if you get stuck, visit [our Discord](https://discord.gg/W34XjsSs2H) for help.
31 |
32 | ## Example Code
33 |
34 | Using Awpy is easy. Just find a demo you want to analyze and use the example below to get started. You can easily find demos on [HLTV](https://hltv.org), [FACEIT](https://faceit.com) or [CS2Stats](https://csstats.gg/).
35 |
36 | ```python
37 | from awpy import Demo
38 |
39 | # Create and parse demo
40 | dem = Demo("g2-vs-navi.dem")
41 | dem.parse()
42 |
43 | # Access various dictionaries & dataframes
44 | dem.header
45 | dem.rounds
46 | dem.grenades
47 | dem.kills
48 | dem.damages
49 | dem.bomb
50 | dem.smokes
51 | dem.infernos
52 | dem.shots
53 | dem.footsteps
54 | dem.ticks
55 |
56 | # The dataframes are Polars dataframes
57 | # to transform to Pandas, just do .to_pandas()
58 | dem.ticks.to_pandas()
59 | ```
60 |
61 | > [!TIP]
62 | > Want to learn more about the parser output? Visit the [parser primer](https://awpy.readthedocs.io/en/latest/modules/parser_output.html) in our documentation!
63 |
64 | ### Help! The parser doesn't work or returns weird data
65 |
66 | Counter-Strike demos can be a bit troublesome. It is likely you'll see increased error rates in POV demos. To help us address parsing issues, please open a bug report in our [Github issues](https://github.com/pnxenopoulos/awpy/issues). Additionally, you can reach out in [our Discord](https://discord.gg/3JrhKYcEKW). We're appreciate any help in identifying bugs. We use [LaihoE's demoparser](https://github.com/LaihoE/demoparser) as a backend, so you may also check there for any open issues.
67 |
68 | ## Examples and Projects
69 |
70 | Take a look at the following Jupyter notebooks provided in our `examples/` directory. These will help you get started parsing and analyzing Counter-Strike data.
71 |
72 | - [Parsing a CS2 demofile](https://awpy.readthedocs.io/en/latest/examples/parse_demo.html)
73 | - [Parsing a CS2 demofile through command-line](https://awpy.readthedocs.io/en/latest/examples/parse_demo_cli.html)
74 | - [Calculating ADR, KAST% and Rating](https://awpy.readthedocs.io/en/latest/examples/demo_stats.html)
75 | - [Plotting CS2 demos](https://awpy.readthedocs.io/en/latest/examples/plot_demo.html)
76 | - [Calculating visibility from CS2 demos](https://awpy.readthedocs.io/en/latest/examples/visibility.html)
77 | - [Parsing CS2 `.nav` files](https://awpy.readthedocs.io/en/latest/examples/nav.html)
78 |
79 | If you use the parser for any public analysis, we kindly ask you to link to the Awpy repository, so that others may know how you parsed, analyzed or visualized your data. If you have a paper or project that uses the parser, please let us know in Discord so we can add it to our growing list!
80 |
81 | > [!IMPORTANT]
82 | > If you use Awpy, we'd love if you could link back to our repo!
83 |
84 | ## Contributing
85 |
86 | We welcome any contributions from the community, no matter the skill-level. You can visit our [issue page](https://github.com/pnxenopoulos/awpy/issues) to see what issues are still open, the [Awpy project](https://github.com/users/pnxenopoulos/projects/5) for a different view of project priorities, or you can message us on Discord. Some examples of where you can make a difference are in documentation, quality assurance, developing new features, or creating unique content with Awpy. You can see more examples of community content [here](https://awpy.readthedocs.io/en/latest/projects.html). If you are interested in contributing to Awpy, learn more [here](https://github.com/pnxenopoulos/awpy/blob/main/CONTRIBUTING.md).
87 |
88 | > [!TIP]
89 | > We are happy to walk through those that want to contribute, no matter your skill level. There are a diverse set of ways one can contribute to Awpy. We welcome first-time contributors!
90 |
91 | ## Acknowledgments
92 |
93 | The name "Awpy" is due to [Nick Wan](https://www.twitch.tv/nickwan_datasci) -- we recommend his stream for sports data analytics enthusiasts.
94 |
95 | Awpy was first built on the amazing work done in the [demoinfocs-golang](https://github.com/markus-wa/demoinfocs-golang) Golang library. We now rely on [demoparser2](https://github.com/LaihoE/demoparser) for parsing, which is another fantastic parsing project, built specifically for Python.
96 |
97 | Awpy's team includes JanEric, adi and hojlund, who you can find in the Awpy Discord. Their work, among others, is crucial to Awpy's continued success! To contribute to Awpy, please visit [CONTRIBUTING](https://github.com/pnxenopoulos/awpy/blob/main/CONTRIBUTING.md).
98 |
--------------------------------------------------------------------------------
/_typos.toml:
--------------------------------------------------------------------------------
1 | [default.extend-words]
2 | # Don't correct the acronym ot for overtime
3 | ot = "ot"
4 |
5 |
6 | [default]
7 | extend-ignore-identifiers-re = [
8 | # Don't correct the name MAKARxJESUS
9 | "MAKAR.*",
10 | # Don't correct the id in notebook
11 | ".*ba355586.*",
12 | ]
13 |
--------------------------------------------------------------------------------
/awpy/__init__.py:
--------------------------------------------------------------------------------
1 | """Provides data parsing, analytics and visualization capabilities for CSGO data."""
2 |
3 | from awpy.demo import Demo
4 | from awpy.nav import Nav
5 | from awpy.spawn import Spawns
6 |
7 | __version__ = "2.0.2"
8 | __all__ = ["Demo", "Nav", "Spawns"]
9 |
--------------------------------------------------------------------------------
/awpy/cli.py:
--------------------------------------------------------------------------------
1 | """Command-line interface for Awpy."""
2 |
3 | from pathlib import Path
4 | from typing import Literal
5 |
6 | import click
7 | from loguru import logger
8 |
9 | import awpy.data
10 | import awpy.data.map_data
11 | import awpy.data.utils
12 | from awpy import Demo, Nav, Spawns
13 | from awpy.visibility import VphysParser
14 |
15 |
16 | @click.group(name="awpy")
17 | def awpy_cli() -> None:
18 | """A simple CLI interface for Awpy."""
19 |
20 |
21 | @awpy_cli.command(
22 | name="get",
23 | help=f"""
24 | Get Counter-Strike 2 resources like parsed map data or nav meshes. Available choices: {awpy.data.POSSIBLE_ARTIFACTS}""", # noqa: E501
25 | )
26 | @click.argument("resource_type", type=click.Choice(awpy.data.POSSIBLE_ARTIFACTS))
27 | @click.option("--patch", type=int, help="Patch number to fetch the resources.", default=awpy.data.CURRENT_BUILD_ID)
28 | def get(resource_type: Literal["maps", "navs", "tris"], *, patch: int = awpy.data.CURRENT_BUILD_ID) -> None:
29 | """Get a resource given its type and name."""
30 | awpy.data.utils.create_data_dir_if_not_exists()
31 |
32 | if resource_type in awpy.data.POSSIBLE_ARTIFACTS:
33 | awpy.data.utils.fetch_resource(resource_type, patch)
34 | else:
35 | resource_not_impl_err_msg = f"Resource type {resource_type} is not yet implemented."
36 | raise NotImplementedError(resource_not_impl_err_msg)
37 |
38 |
39 | @awpy_cli.command(name="artifacts", help="Information on Awpy artifacts.")
40 | def artifacts() -> None:
41 | """Print information on Awpy artifacts."""
42 | print("Current patch:", awpy.data.CURRENT_BUILD_ID)
43 | for patch, patch_data in awpy.data.AVAILABLE_PATCHES.items():
44 | print(
45 | f"Patch {patch} ({patch_data['datetime']}, {patch_data['url']}). Available artifacts: {patch_data['available']}\n" # noqa: E501
46 | )
47 |
48 |
49 | @awpy_cli.command(name="parse", help="Parse a Counter-Strike 2 demo (.dem) file.")
50 | @click.argument("demo_path", type=click.Path(exists=True))
51 | @click.option("--outpath", type=click.Path(), help="Path to save the compressed demo.")
52 | @click.option("--events", multiple=True, help="List of events to parse.")
53 | @click.option("--player-props", multiple=True, help="List of player properties to include.")
54 | @click.option("--other-props", multiple=True, help="List of other properties to include.")
55 | @click.option("--verbose", is_flag=True, default=False, help="Enable verbose mode.")
56 | def parse_demo(
57 | demo_path: Path,
58 | *,
59 | outpath: Path | None = None,
60 | events: tuple[str] | None = None,
61 | player_props: tuple[str] | None = None,
62 | other_props: tuple[str] | None = None,
63 | verbose: bool = False,
64 | ) -> None:
65 | """Parse a file given its path."""
66 | demo_path = Path(demo_path) # Pathify
67 | demo = Demo(path=demo_path, verbose=verbose)
68 | demo.parse(
69 | events=events[0].split(",") if events else None,
70 | player_props=player_props[0].split(",") if player_props else None,
71 | other_props=other_props[0].split(",") if other_props else None,
72 | )
73 | demo.compress(outpath=outpath)
74 |
75 |
76 | @awpy_cli.command(name="spawn", help="Parse spawns from a Counter-Strike 2 .vent file.", hidden=True)
77 | @click.argument("vent_file", type=click.Path(exists=True))
78 | @click.option("--outpath", type=click.Path(), help="Path to save the spawns.")
79 | def parse_spawn(vent_file: Path, *, outpath: Path | None = None) -> None:
80 | """Parse a nav file given its path."""
81 | vent_file = Path(vent_file)
82 | if not outpath:
83 | outpath = vent_file.with_suffix(".json")
84 | spawns_data = Spawns.from_vents_file(vent_file)
85 | spawns_data.to_json(path=outpath)
86 | logger.success(f"Spawns file saved to {vent_file.with_suffix('.json')}, {spawns_data}")
87 |
88 |
89 | @awpy_cli.command(name="nav", help="Parse a Counter-Strike 2 .nav file.", hidden=True)
90 | @click.argument("nav_file", type=click.Path(exists=True))
91 | @click.option("--outpath", type=click.Path(), help="Path to save the nav file.")
92 | def parse_nav(nav_file: Path, *, outpath: Path | None = None) -> None:
93 | """Parse a nav file given its path."""
94 | nav_file = Path(nav_file)
95 | nav_mesh = Nav.from_path(path=nav_file)
96 | if not outpath:
97 | outpath = nav_file.with_suffix(".json")
98 | nav_mesh.to_json(path=outpath)
99 | logger.success(f"Nav mesh saved to {nav_file.with_suffix('.json')}, {nav_mesh}")
100 |
101 |
102 | @awpy_cli.command(name="mapdata", help="Parse Counter-Strike 2 map images.", hidden=True)
103 | @click.argument("overview_dir", type=click.Path(exists=True))
104 | def parse_mapdata(overview_dir: Path) -> None:
105 | """Parse radar overview images given an overview."""
106 | overview_dir = Path(overview_dir)
107 | if not overview_dir.is_dir():
108 | overview_dir_err_msg = f"{overview_dir} is not a directory."
109 | raise NotADirectoryError(overview_dir_err_msg)
110 | map_data = awpy.data.map_data.map_data_from_vdf_files(overview_dir)
111 | awpy.data.map_data.update_map_data_file(map_data, "map-data.json")
112 | logger.success("Map data saved to map_data.json")
113 |
114 |
115 | @awpy_cli.command(name="tri", help="Parse triangles (*.tri) from a .vphys file.", hidden=True)
116 | @click.argument("vphys_file", type=click.Path(exists=True))
117 | @click.option("--outpath", type=click.Path(), help="Path to save the parsed triangle.")
118 | def generate_tri(vphys_file: Path, *, outpath: Path | None = None) -> None:
119 | """Parse a .vphys file into a .tri file."""
120 | vphys_file = Path(vphys_file)
121 | vphys_parser = VphysParser(vphys_file)
122 | vphys_parser.to_tri(path=outpath)
123 | logger.success(f"Tri file saved to {outpath}")
124 |
--------------------------------------------------------------------------------
/awpy/constants.py:
--------------------------------------------------------------------------------
1 | """Module for constants in Counter-Strike 2."""
2 |
3 | # Sides
4 | CT_SIDE = "ct"
5 | T_SIDE = "t"
6 |
7 | # Server
8 | DEFAULT_SERVER_TICKRATE = 128
9 |
10 | # Rounds
11 | DEFAULT_FREEZE_TIME_IN_SECS = 20.0
12 | DEFAULT_ROUND_TIME_IN_SECS = 115.0
13 | DEFAULT_BOMB_TIME_IN_SECS = 40.0
14 |
15 | # Grenades
16 | DEFAULT_SMOKE_DURATION_IN_SECS = 20.0
17 | DEFAULT_INFERNO_DURATION_IN_SECS = 7.03125
18 |
--------------------------------------------------------------------------------
/awpy/converters.py:
--------------------------------------------------------------------------------
1 | """Converters for index-based fields."""
2 |
3 | import awpy.constants
4 |
5 | # Taken from https://github.com/markus-wa/demoinfocs-golang/blob/master/pkg/demoinfocs/events/events.go#L423
6 | HITGROUP_MAP = {
7 | 0: "generic",
8 | 1: "head",
9 | 2: "chest",
10 | 3: "stomach",
11 | 4: "left arm",
12 | 5: "right arm",
13 | 6: "left leg",
14 | 7: "right leg",
15 | 8: "neck",
16 | 10: "gear",
17 | }
18 |
19 | # Taken from https://github.com/markus-wa/demoinfocs-golang/blob/master/pkg/demoinfocs/events/events.go
20 | ROUND_END_REASON_MAP = {
21 | 0: "still in progress",
22 | 1: "target bombed",
23 | 2: "vip escaped",
24 | 3: "vip killed",
25 | 4: "t escaped",
26 | 5: "ct stopped escape",
27 | 6: "t stopped",
28 | 7: "bomb defused",
29 | 8: "t eliminated",
30 | 9: "ct eliminated",
31 | 10: "draw",
32 | 11: "hostages rescued",
33 | 12: "target saved",
34 | 13: "hostages not rescued",
35 | 14: "t not escaped",
36 | 15: "vip not escaped",
37 | 16: "game start",
38 | 17: "t surrender",
39 | 18: "ct surrender",
40 | 19: "t planted",
41 | 20: "ct reached hostage",
42 | }
43 |
44 | # Taken from https://github.com/markus-wa/demoinfocs-golang/blob/master/pkg/demoinfocs/common/common.go#L20
45 | TEAM_MAP = {0: "unassigned", 1: "spectator", 2: awpy.constants.T_SIDE, 3: awpy.constants.CT_SIDE}
46 |
--------------------------------------------------------------------------------
/awpy/data/__init__.py:
--------------------------------------------------------------------------------
1 | """Module to hold Counter-Strike 2 data."""
2 |
3 | import datetime
4 | import pathlib
5 |
6 | # Data directories
7 | AWPY_DATA_DIR = pathlib.Path.home() / ".awpy"
8 | MAPS_DIR = AWPY_DATA_DIR / "maps"
9 | NAVS_DIR = AWPY_DATA_DIR / "navs"
10 | TRIS_DIR = AWPY_DATA_DIR / "tris"
11 |
12 | # Uses build id from https://steamdb.info/app/730/patchnotes/
13 | POSSIBLE_ARTIFACTS = ["maps", "navs", "tris"]
14 | CURRENT_BUILD_ID = 17595823
15 | AVAILABLE_PATCHES = {
16 | 17595823: {
17 | "url": "https://steamdb.info/patchnotes/17595823/",
18 | "datetime": datetime.datetime.fromtimestamp(1741128785, datetime.UTC),
19 | "available": POSSIBLE_ARTIFACTS,
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/awpy/data/map_data.py:
--------------------------------------------------------------------------------
1 | """Module to create the map data."""
2 |
3 | import contextlib
4 | import json
5 | from pathlib import Path
6 | from typing import TypedDict
7 |
8 | with contextlib.suppress(ImportError, ModuleNotFoundError):
9 | import vdf # This is only used for map data parsing
10 |
11 | from loguru import logger
12 |
13 | try:
14 | from awpy.data import MAPS_DIR
15 |
16 | map_data_file = MAPS_DIR / "map-data.json"
17 | with open(map_data_file) as map_data_file:
18 | MAP_DATA = json.load(map_data_file)
19 | except Exception as _e:
20 | logger.warning(f"Failed to load map data from {map_data_file}.")
21 | MAP_DATA = {}
22 |
23 |
24 | class MapData(TypedDict):
25 | """Type of the data for a map. `pos_x` is upper left world coordinate."""
26 |
27 | pos_x: int
28 | pos_y: int
29 | scale: float
30 | rotate: int | None
31 | zoom: float | None
32 | lower_level_max_units: float
33 |
34 |
35 | def map_data_from_vdf_files(vdf_folder: Path) -> dict[str, MapData]:
36 | """Generate the map data from a vdf file."""
37 | new_map_data: MapData = {}
38 | for vdf_file in vdf_folder.iterdir():
39 | # Skip vanity and previews
40 | if vdf_file.stem.endswith("vanity") or "_preview_" in vdf_file.stem:
41 | continue
42 | parsed_data = vdf.loads(vdf_file.read_text())
43 | if vdf_file.stem not in parsed_data:
44 | print(f"Skipping {vdf_file.stem} because the file name is not a valid key.")
45 | print(f"Keys: {list(parsed_data.keys())}")
46 | continue
47 | map_data = parsed_data[vdf_file.stem]
48 | new_map_data[vdf_file.stem] = {
49 | "pos_x": int(float(map_data["pos_x"])),
50 | "pos_y": int(float(map_data["pos_y"])),
51 | "scale": float(map_data["scale"]),
52 | "rotate": int(rotate) if (rotate := map_data.get("rotate")) else None,
53 | "zoom": float(zoom) if (zoom := map_data.get("zoom")) else None,
54 | "lower_level_max_units": float(
55 | map_data.get("verticalsections", {})
56 | .get("lower", {})
57 | .get("AltitudeMax", -1000000) # Use instead of infinity
58 | ),
59 | }
60 | return dict(sorted(new_map_data.items()))
61 |
62 |
63 | def update_map_data_file(new_map_data: dict[str, MapData], filepath: Path) -> None:
64 | """Update the map data file."""
65 | with open(filepath, "w") as json_file:
66 | json.dump(new_map_data, json_file)
67 | json_file.write("\n")
68 |
--------------------------------------------------------------------------------
/awpy/data/utils.py:
--------------------------------------------------------------------------------
1 | """Manages data folders for Awpy."""
2 |
3 | import zipfile
4 |
5 | import requests
6 | import tqdm
7 | from loguru import logger
8 |
9 | from awpy.data import AWPY_DATA_DIR
10 |
11 |
12 | def create_data_dir_if_not_exists() -> None:
13 | """Creates the awpy data directory if it doesn't exist."""
14 | if not AWPY_DATA_DIR.exists():
15 | AWPY_DATA_DIR.mkdir(parents=True, exist_ok=True)
16 | awpy_data_dir_creation_msg = f"Created awpy data directory at {AWPY_DATA_DIR}"
17 | logger.debug(awpy_data_dir_creation_msg)
18 |
19 |
20 | def fetch_resource(resource: str, patch: int, filetype: str = ".zip") -> None:
21 | """Download and optionally extract a resource file from the Awpy mirror.
22 |
23 | Constructs the URL for the resource based on the provided resource name, patch version,
24 | and file extension. The file is then downloaded with a progress bar displayed via tqdm.
25 | If the filetype is '.zip', the resource is extracted into a corresponding directory,
26 | a '.patch' file containing the patch number is written into that directory, and the
27 | original ZIP file is deleted after successful extraction.
28 |
29 | Args:
30 | resource (str): The name of the resource to fetch.
31 | patch (int): The patch version number to include in the URL.
32 | filetype (str, optional): The file extension of the resource. Defaults to ".zip".
33 |
34 | Raises:
35 | requests.HTTPError: If the HTTP GET request fails (i.e., the response status is not OK).
36 | """
37 | # Create directory if compressed folder
38 | resource_path = AWPY_DATA_DIR / f"{resource}{filetype}"
39 | if filetype == ".zip":
40 | resource_dir = AWPY_DATA_DIR / resource
41 | resource_dir.mkdir(parents=True, exist_ok=True)
42 | resource_path = resource_dir / f"{resource}{filetype}"
43 |
44 | # Fetch the resource
45 | resource_url = f"https://awpycs.com/{patch}/{resource}{filetype}"
46 | response = requests.get(resource_url, stream=True, timeout=300)
47 | if not response.ok:
48 | bad_resp_err_msg = f"Failed to fetch {resource_url}: {response.status_code}"
49 | raise requests.HTTPError(bad_resp_err_msg)
50 | total_size = int(response.headers.get("content-length", 0))
51 | block_size = 1024
52 | with (
53 | tqdm.tqdm(total=total_size, unit="B", unit_scale=True) as progress_bar,
54 | open(resource_path, "wb") as file,
55 | ):
56 | for data in response.iter_content(block_size):
57 | progress_bar.update(len(data))
58 | file.write(data) # Could also write the patchnumber to .patch
59 |
60 | # Write the patch number to a ".patch" file in the resource directory (if applicable)
61 | if filetype == ".zip":
62 | patch_file = resource_dir / ".patch"
63 | with open(patch_file, "w") as pf:
64 | pf.write(str(patch))
65 | logger.info(f"Wrote patch number {patch} to {patch_file}")
66 |
67 | # Unzip the file
68 | if filetype == ".zip":
69 | try:
70 | with zipfile.ZipFile(resource_path, "r") as zip_ref:
71 | zip_ref.extractall(resource_dir)
72 | logger.success(f"Extracted contents of {resource_path} to {resource_dir}")
73 | except zipfile.BadZipFile as e:
74 | logger.error(f"Failed to unzip {resource_path}: {e}")
75 | return
76 | resource_path.unlink()
77 |
--------------------------------------------------------------------------------
/awpy/parsers/__init__.py:
--------------------------------------------------------------------------------
1 | """Module for specific parsing functions."""
2 |
--------------------------------------------------------------------------------
/awpy/parsers/bomb.py:
--------------------------------------------------------------------------------
1 | """Module to parse bomb location and status."""
2 |
3 | import polars as pl
4 |
5 | import awpy.parsers.utils
6 |
7 |
8 | def parse_bomb(events: dict[str, pl.DataFrame], valid_ticks: pl.Series) -> pl.DataFrame:
9 | """Create a unified bomb location table by combining bomb event data.
10 |
11 | This function extracts bomb-related events from the provided events dictionary for four types of bomb events:
12 | - "bomb_dropped": indicating that the bomb was dropped.
13 | - "bomb_pickup": indicating that the bomb was picked up (i.e. carried).
14 | - "bomb_planted": indicating that the bomb was planted.
15 | - "bomb_exploded": indicating that the bomb exploded/detonated.
16 | - "bomb_defused": indicating that the bomb was defused.
17 |
18 | The resulting DataFrame has the following columns:
19 | - tick: The tick value.
20 | - event: The bomb status ("drop", "pickup", "plant", "event").
21 | - x, y, z: The bomb's location coordinates from the ticks DataFrame.
22 | - steamid: The player's steam ID from the ticks DataFrame.
23 | - name: The player's name from the ticks DataFrame.
24 | - bombsite: For planted events, the bombsite (otherwise null).
25 |
26 | Parameters:
27 | events (dict[str, pl.DataFrame]): A dictionary of parsed event DataFrames. Must contain:
28 | - "bomb_dropped": DataFrame with columns at least ["entindex", "tick", "user_name", "user_steamid"].
29 | - "bomb_pickup": DataFrame with columns at least ["tick", "user_name", "user_steamid"].
30 | - "bomb_planted": DataFrame with columns at least ["site", "tick", "user_name", "user_steamid"].
31 | - "bomb_exploded": DataFrame with columns at least ["tick", "user_name", "user_steamid"].
32 | - "bomb_defused": DataFrame with columns at least ["tick", "user_name", "user_steamid"].
33 | ticks (pl.DataFrame): A DataFrame of tick data containing positional and player information with columns:
34 | ["tick", "steamid", "name", "X", "Y", "Z"].
35 | valid_ticks (pl.Series): A Series of valid tick values.
36 |
37 | Returns:
38 | pl.DataFrame: A DataFrame with one row per tick containing the columns:
39 | tick, event, x, y, z, steamid, name, bombsite.
40 |
41 | Raises:
42 | KeyError: If any of the required bomb event DataFrames ("bomb_dropped", "bomb_pickup",
43 | "bomb_planted", or "bomb_exploded") are missing from the events dictionary.
44 | KeyError: If the ticks DataFrame does not contain the required columns.
45 | """
46 | # Retrieve required bomb events.
47 | bomb_dropped = awpy.parsers.utils.get_event_from_parsed_events(events, "bomb_dropped", empty_if_not_found=True)
48 | bomb_pickup = awpy.parsers.utils.get_event_from_parsed_events(events, "bomb_pickup", empty_if_not_found=True)
49 | bomb_planted = awpy.parsers.utils.get_event_from_parsed_events(events, "bomb_planted", empty_if_not_found=True)
50 | bomb_exploded = awpy.parsers.utils.get_event_from_parsed_events(events, "bomb_exploded", empty_if_not_found=True)
51 | bomb_defused = awpy.parsers.utils.get_event_from_parsed_events(events, "bomb_defused", empty_if_not_found=True)
52 |
53 | # Process bomb_dropped events
54 | bd = (
55 | (
56 | bomb_dropped.with_columns([pl.lit("drop").alias("event"), pl.lit(None).alias("bombsite")])
57 | .sort("tick")
58 | .filter(pl.col("tick").is_in(valid_ticks))
59 | .select(
60 | [
61 | pl.col("tick"),
62 | pl.col("event"),
63 | pl.col("user_X").alias("X"),
64 | pl.col("user_Y").alias("Y"),
65 | pl.col("user_Z").alias("Z"),
66 | pl.col("user_steamid").alias("steamid"),
67 | pl.col("user_name").alias("name"),
68 | pl.lit(None).alias("bombsite").cast(pl.Utf8),
69 | ]
70 | )
71 | )
72 | if not bomb_dropped.is_empty()
73 | else pl.DataFrame()
74 | )
75 |
76 | # Process bomb_pickup events
77 | bp = (
78 | (
79 | bomb_pickup.with_columns([pl.lit("pickup").alias("event"), pl.lit(None).alias("bombsite")])
80 | .sort("tick")
81 | .filter(pl.col("tick").is_in(valid_ticks))
82 | .select(
83 | [
84 | pl.col("tick"),
85 | pl.col("event"),
86 | pl.col("user_X").alias("X"),
87 | pl.col("user_Y").alias("Y"),
88 | pl.col("user_Z").alias("Z"),
89 | pl.col("user_steamid").alias("steamid"),
90 | pl.col("user_name").alias("name"),
91 | pl.lit(None).alias("bombsite").cast(pl.Utf8),
92 | ]
93 | )
94 | )
95 | if not bomb_pickup.is_empty()
96 | else pl.DataFrame()
97 | )
98 |
99 | # Process bomb_planted events
100 | bpnt = (
101 | (
102 | bomb_planted.with_columns([pl.lit("plant").alias("event")])
103 | .sort("tick")
104 | .filter(pl.col("tick").is_in(valid_ticks))
105 | .select(
106 | [
107 | pl.col("tick"),
108 | pl.col("event"),
109 | pl.col("user_X").alias("X"),
110 | pl.col("user_Y").alias("Y"),
111 | pl.col("user_Z").alias("Z"),
112 | pl.col("user_steamid").alias("steamid"),
113 | pl.col("user_name").alias("name"),
114 | pl.col("user_place").alias("bombsite").cast(pl.Utf8),
115 | ]
116 | )
117 | )
118 | if not bomb_planted.is_empty()
119 | else pl.DataFrame()
120 | )
121 |
122 | # Process bomb_exploded events
123 | bexp = (
124 | (
125 | bomb_exploded.with_columns([pl.lit("detonate").alias("event"), pl.lit(None).alias("bombsite")])
126 | .sort("tick")
127 | .filter(pl.col("tick").is_in(valid_ticks))
128 | .select(
129 | [
130 | pl.col("tick"),
131 | pl.col("event"),
132 | pl.col("user_X").alias("X"),
133 | pl.col("user_Y").alias("Y"),
134 | pl.col("user_Z").alias("Z"),
135 | pl.col("user_steamid").alias("steamid"),
136 | pl.col("user_name").alias("name"),
137 | pl.lit(None).alias("bombsite").cast(pl.Utf8),
138 | ]
139 | )
140 | )
141 | if not bomb_exploded.is_empty()
142 | else pl.DataFrame()
143 | )
144 |
145 | # Process bomb_defused events
146 | bdef = (
147 | (
148 | bomb_defused.with_columns([pl.lit("defuse").alias("event"), pl.lit(None).alias("bombsite")])
149 | .sort("tick")
150 | .filter(pl.col("tick").is_in(valid_ticks))
151 | .select(
152 | [
153 | pl.col("tick"),
154 | pl.col("event"),
155 | pl.col("user_X").alias("X"),
156 | pl.col("user_Y").alias("Y"),
157 | pl.col("user_Z").alias("Z"),
158 | pl.col("user_steamid").alias("steamid"),
159 | pl.col("user_name").alias("name"),
160 | pl.col("user_place").alias("bombsite").cast(pl.Utf8),
161 | ]
162 | )
163 | )
164 | if not bomb_defused.is_empty()
165 | else pl.DataFrame()
166 | )
167 |
168 | # Combine all bomb events into one DataFrame and sort by tick.
169 | nonempty_dfs = [df for df in [bd, bp, bpnt, bexp, bdef] if not df.is_empty()]
170 | return pl.concat(nonempty_dfs).sort("tick")
171 |
--------------------------------------------------------------------------------
/awpy/parsers/clock.py:
--------------------------------------------------------------------------------
1 | """Module for time and clock parsing functions."""
2 |
3 | import math
4 |
5 | import polars as pl
6 |
7 | import awpy.constants
8 | import awpy.parsers.utils
9 |
10 |
11 | def parse_times(df_to_add_times: pl.DataFrame, rounds_df: pl.DataFrame, tick_col: str = "tick") -> pl.DataFrame:
12 | """Augment a tick DataFrame with timing information derived from round phase changes.
13 |
14 | This function processes the rounds DataFrame to extract key phase change times (specified by
15 | the columns "start", "freeze_end", "official_end", and "bomb_plant") by "melting" the DataFrame
16 | into a long format. It then performs an asof join with the tick DataFrame so that for each tick,
17 | the most recent phase change time (i.e. the maximum phase_time that is less than or equal to the tick)
18 | is identified. The function computes the difference between the tick value and the phase change time,
19 | yielding the number of ticks that have elapsed since the last phase change.
20 |
21 | The output DataFrame includes:
22 | - All original columns from df_to_add_times.
23 | - "ticks_since_phase_change": The number of ticks elapsed since the last phase change.
24 | - "last_phase_change": The name of the phase (one of "start", "freeze_end", "official_end", "bomb_plant")
25 | corresponding to the last phase change before each tick.
26 |
27 | Parameters:
28 | df_to_add_times (pl.DataFrame): A Polars DataFrame containing tick data. Must include a column
29 | designated by tick_col (default "tick") representing the tick count.
30 | rounds_df (pl.DataFrame): A Polars DataFrame containing round timing information with phase change columns.
31 | tick_col (str, optional): The name of the column in df_to_add_times holding tick values.
32 | Defaults to "tick".
33 |
34 | Returns:
35 | pl.DataFrame: The input tick DataFrame augmented with two new columns:
36 | - "ticks_since_phase_change": Ticks elapsed since the last phase change.
37 | - "last_phase_change": The phase (from the set {"start", "freeze_end", "official_end", "bomb_plant"})
38 | corresponding to the last phase change.
39 |
40 | Example:
41 | >>> augmented_df = parse_times(ticks_df, rounds_df, tick_col="tick")
42 | """
43 | # Define the phase columns.
44 | phase_cols = ["start", "freeze_end", "official_end", "bomb_plant"]
45 |
46 | # Melt the rounds_df so that all phase times are in one column.
47 | phase_df = (
48 | rounds_df.select(phase_cols)
49 | .melt(value_name="phase_time") # now we have "variable" and "phase_time" columns
50 | .filter(pl.col("phase_time").is_not_null()) # drop any nulls if present
51 | .sort("phase_time") # sort by time; required for join_asof
52 | ).with_columns(
53 | pl.col("phase_time").cast(pl.Int32) # cast to int32 for join_asof
54 | )
55 |
56 | # Ensure the tick dataframe is sorted by tick.
57 | df_to_add_times = df_to_add_times.sort(tick_col)
58 |
59 | # Do an asof join: for each tick, find the last phase_time that is <= tick.
60 | joined = df_to_add_times.join_asof(phase_df, left_on=tick_col, right_on="phase_time", strategy="backward")
61 |
62 | # Compute the difference: ticks since the last phase change.
63 | joined = joined.with_columns((pl.col(tick_col) - pl.col("phase_time")).alias("ticks_since_phase_change"))
64 |
65 | # In case there is no phase change before a given tick, fill null with the tick value.
66 | joined = joined.with_columns(pl.col("ticks_since_phase_change").fill_null(pl.col(tick_col)))
67 |
68 | # Rename the "variable" column to "last_phase_change".
69 | return joined.rename({"variable": "last_phase_change"}).drop("phase_time")
70 |
71 |
72 | def apply_clock_column(
73 | df: pl.DataFrame,
74 | ticks_since_phase_change_col: str = "ticks_since_phase_change",
75 | tick_rate: int = 128,
76 | freeze_time: float = awpy.constants.DEFAULT_FREEZE_TIME_IN_SECS,
77 | round_time: float = awpy.constants.DEFAULT_ROUND_TIME_IN_SECS,
78 | bomb_time: float = awpy.constants.DEFAULT_BOMB_TIME_IN_SECS,
79 | ) -> pl.DataFrame:
80 | """Add a 'clock' column to a DataFrame that contains round timing information.
81 |
82 | The DataFrame must have at least the following columns:
83 | - `last_phase_change`: A string indicating the most recent phase change. Expected values are:
84 | "start", "freeze_end", or "bomb_plant".
85 | - A column (by default named `ticks_since_phase_change`) which represents the number of ticks
86 | elapsed since the last phase change.
87 |
88 | The remaining time is calculated using the following logic:
89 | 1. Map the value of `last_phase_change` to a phase duration (in seconds) as follows:
90 | - If last_phase_change is "start", use `freeze_time`.
91 | - If last_phase_change is "freeze_end", use `round_time`.
92 | - If last_phase_change is "bomb_plant", use `bomb_time`.
93 | 2. Convert the selected phase duration to ticks by multiplying it with the provided `tick_rate`.
94 | 3. Compute the remaining ticks as:
95 |
96 | remaining_ticks = (phase_duration_in_seconds * tick_rate) - ticks_since_phase_change
97 |
98 | Negative remaining ticks are clamped to 0.
99 | 4. Convert the remaining ticks to seconds (rounding up) and then format the result as MM:SS.
100 |
101 | Parameters:
102 | df (pl.DataFrame): Input DataFrame containing round timing information.
103 | ticks_since_phase_change_col (str, optional): Column name for ticks since the last phase change.
104 | Defaults to "ticks_since_phase_change".
105 | tick_rate (int, optional): Server tick rate. Defaults to 128.
106 | freeze_time (float, optional): Duration of the freeze phase in seconds. Used when
107 | last_phase_change is "start". Defaults to awpy.constants.DEFAULT_FREEZE_TIME_IN_SECS.
108 | round_time (float, optional): Duration of the round phase in seconds. Used when
109 | last_phase_change is "freeze_end". Defaults to awpy.constants.DEFAULT_ROUND_TIME_IN_SECS.
110 | bomb_time (float, optional): Duration of the bomb phase in seconds. Used when
111 | last_phase_change is "bomb_plant". Defaults to awpy.constants.DEFAULT_BOMB_TIME_IN_SECS.
112 |
113 | Returns:
114 | pl.DataFrame: The original DataFrame with an added 'clock' column representing the remaining
115 | time in the current phase as a string in MM:SS format.
116 |
117 | Raises:
118 | ValueError: If the DataFrame is missing the required columns ('last_phase_change' or the specified
119 | ticks_since_phase_change_col).
120 | ValueError: If a row's 'last_phase_change' value is not one of the expected phase names.
121 | ValueError: If a row's ticks_since_phase_change value is negative.
122 | """
123 | # Verify required columns exist.
124 | awpy.parsers.utils.validate_required_columns(df, {"last_phase_change", ticks_since_phase_change_col})
125 |
126 | # Mapping from phase labels to the phase durations (in seconds) using the updated logic:
127 | # "start" -> freeze_time,
128 | # "freeze_end" -> round_time,
129 | # "bomb_plant" -> bomb_time.
130 | phase_mapping = {
131 | "start": freeze_time,
132 | "freeze_end": round_time,
133 | "bomb_plant": bomb_time,
134 | }
135 |
136 | def _compute_clock_for_row(row: dict) -> str:
137 | phase = row["last_phase_change"]
138 | ticks_since = row[ticks_since_phase_change_col]
139 |
140 | if phase not in phase_mapping:
141 | invalid_phase_change_err_msg = (
142 | f"Invalid last_phase_change value: {phase}. Expected one of {list(phase_mapping.keys())}."
143 | )
144 | raise ValueError(invalid_phase_change_err_msg)
145 | if ticks_since < 0:
146 | raise ValueError("ticks_since_phase_change cannot be negative.") # noqa: EM101
147 |
148 | # Compute the maximum ticks for the phase.
149 | max_time_ticks = phase_mapping[phase] * tick_rate
150 |
151 | # Calculate remaining ticks; if negative, clamp to 0 (time is up).
152 | remaining_ticks = max(0, max_time_ticks - ticks_since)
153 |
154 | # Convert ticks to seconds (rounding up).
155 | remaining_seconds = math.ceil(remaining_ticks / tick_rate)
156 |
157 | # Format the remaining time as MM:SS.
158 | minutes = remaining_seconds // 60
159 | seconds = remaining_seconds % 60
160 | return f"{int(minutes):02}:{int(seconds):02}"
161 |
162 | # Apply the row-wise function to create the 'clock' column.
163 | # Note that we build a struct with the tick count column (using the provided name) and 'last_phase_change'.
164 | return df.with_columns(
165 | pl.struct([pl.col(ticks_since_phase_change_col), pl.col("last_phase_change")])
166 | .map_elements(_compute_clock_for_row, return_dtype=pl.String)
167 | .alias("clock")
168 | )
169 |
--------------------------------------------------------------------------------
/awpy/parsers/events.py:
--------------------------------------------------------------------------------
1 | """Module for event parsing functions."""
2 |
3 | import polars as pl
4 |
5 | import awpy.converters
6 | import awpy.parsers.utils
7 |
8 |
9 | def parse_kills(df: pl.DataFrame) -> pl.DataFrame:
10 | """Parse kill event data from the given DataFrame.
11 |
12 | This function standardizes kill event data by renaming columns with the prefix "user_" to "victim_".
13 | In addition, it processes the "hitgroup" column by mapping its values using the HITGROUP_MAP converter,
14 | thereby producing a DataFrame where hit groups are standardized for further analysis.
15 |
16 | Args:
17 | df (pl.DataFrame): A Polars DataFrame containing raw kill event data, with columns starting with "user_"
18 | (e.g., "user_name", "user_health", etc.) and a "hitgroup" column.
19 |
20 | Returns:
21 | pl.DataFrame: A standardized DataFrame of kill events with "victim_" prefixed columns
22 | and a mapped "hitgroup" column.
23 | """
24 | return awpy.parsers.utils.rename_columns_with_affix(df, old_affix="user_", new_affix="victim_").with_columns(
25 | pl.col("hitgroup")
26 | .map_elements(lambda h: awpy.converters.HITGROUP_MAP.get(h, h), return_dtype=pl.String)
27 | .alias("hitgroup")
28 | )
29 |
30 |
31 | def parse_damages(df: pl.DataFrame) -> pl.DataFrame:
32 | """Parse damage event data from the given DataFrame.
33 |
34 | This function standardizes damage event data by renaming columns with the prefix "user_" to "victim_".
35 | It replaces the values in the "hitgroup" column using the HITGROUP_MAP converter, and it computes a new column,
36 | "dmg_health_real", which represents the effective damage applied to the victim's health.
37 | The effective damage is calculated as the minimum of "dmg_health" and
38 | "victim_health" (i.e. damage cannot exceed the victim's remaining health).
39 |
40 | Args:
41 | df (pl.DataFrame): A Polars DataFrame containing raw damage event data, with columns starting with "user_"
42 | and columns "dmg_health" and "hitgroup".
43 |
44 | Returns:
45 | pl.DataFrame: A standardized DataFrame of damage events with "victim_" prefixed columns, a
46 | replaced "hitgroup" column, and a computed "dmg_health_real" column.
47 | """
48 | return awpy.parsers.utils.rename_columns_with_affix(df, old_affix="user_", new_affix="victim_").with_columns(
49 | pl.col("hitgroup").map_elements(lambda h: awpy.converters.HITGROUP_MAP.get(h, h), return_dtype=pl.String),
50 | pl.when(pl.col("dmg_health") > pl.col("victim_health"))
51 | .then(pl.col("victim_health"))
52 | .otherwise(pl.col("dmg_health"))
53 | .alias("dmg_health_real"),
54 | )
55 |
56 |
57 | def parse_footsteps(df: pl.DataFrame) -> pl.DataFrame:
58 | """Parse footstep event data from the given DataFrame.
59 |
60 | This function standardizes footstep event data by renaming columns with the prefix "user_" to "player_".
61 | This ensures that the resulting DataFrame uses a consistent naming convention for player-related data.
62 |
63 | Args:
64 | df (pl.DataFrame): A Polars DataFrame containing raw footstep event data with columns starting with "user_".
65 |
66 | Returns:
67 | pl.DataFrame: A standardized DataFrame of footstep events with columns renamed to start with "player_".
68 | """
69 | return awpy.parsers.utils.rename_columns_with_affix(df, old_affix="user_", new_affix="player_")
70 |
71 |
72 | def parse_shots(df: pl.DataFrame) -> pl.DataFrame:
73 | """Parse shot event data from the given DataFrame.
74 |
75 | This function standardizes shot event data by renaming columns with the prefix "user_" to "player_".
76 | This results in a DataFrame that uses a consistent naming scheme for player-related information in shot events.
77 |
78 | Args:
79 | df (pl.DataFrame): A Polars DataFrame containing raw shot event data with columns starting with "user_".
80 |
81 | Returns:
82 | pl.DataFrame: A standardized DataFrame of shot events with columns renamed to start with "player_".
83 | """
84 | return awpy.parsers.utils.rename_columns_with_affix(df, old_affix="user_", new_affix="player_")
85 |
--------------------------------------------------------------------------------
/awpy/parsers/grenades.py:
--------------------------------------------------------------------------------
1 | """Module for grenade (inferno and smoke) parsing."""
2 |
3 | from collections import defaultdict
4 |
5 | import polars as pl
6 |
7 | import awpy.parsers.utils
8 |
9 |
10 | def parse_timed_grenade_entity(
11 | start_df: pl.DataFrame, end_df: pl.DataFrame, max_duration_ticks: int | None = None
12 | ) -> pl.DataFrame:
13 | """Parse timed grenade events (e.g. smokes or infernos) from start and end DataFrames.
14 |
15 | For each grenade start event (from start_df), this function finds the corresponding grenade
16 | end event (from end_df) for the same grenade (matched on 'entityid') with an 'end_tick' greater
17 | than the 'start_tick'. If no matching end event is found, 'end_tick' will be null.
18 |
19 | Additionally, if a grenade event's duration (end_tick - start_tick) exceeds the specified
20 | max_duration (if provided), the end_tick is set to null.
21 |
22 | The resulting DataFrame will have one row per grenade event with the following columns:
23 | - entity_id: The grenade's entity id.
24 | - start_tick: The tick at which the grenade event started.
25 | - end_tick: The tick at which the grenade event ended (or null if not found or if duration is too long).
26 | - All columns from the start event that begin with "thrower_".
27 | - X, Y, Z: The coordinates associated with the grenade event.
28 |
29 | Parameters:
30 | start_df (pl.DataFrame): DataFrame of grenade start events.
31 | end_df (pl.DataFrame): DataFrame of grenade end events.
32 | max_duration_ticks (int, optional): Maximum allowed duration in ticks (end_tick - start_tick).
33 | If the duration exceeds this value, end_tick is set to null. Defaults to None (no filtering).
34 |
35 | Returns:
36 | pl.DataFrame: A DataFrame with one row per grenade event, including the columns described above.
37 |
38 | Raises:
39 | KeyError: If any required columns are missing in start_df or end_df.
40 | """
41 | # Validate required columns for start_df and end_df.
42 | required_start = {"entityid", "tick", "user_name", "user_steamid", "x", "y", "z"}
43 | awpy.parsers.utils.validate_required_columns(start_df, required_start, "start_df")
44 | awpy.parsers.utils.validate_required_columns(end_df, {"entityid", "tick"}, "end_df")
45 |
46 | # Add a row identifier to start_df (for traceability, if needed)
47 | sd = start_df.with_row_index("start_idx")
48 |
49 | # Rename columns in start_df to match the desired output.
50 | # First, rename columns starting with "user_" to "thrower_"
51 | sd = awpy.parsers.utils.rename_columns_with_affix(sd, old_affix="user_", new_affix="thrower_").rename(
52 | {"entityid": "entity_id", "tick": "start_tick", "x": "X", "y": "Y", "z": "Z"}
53 | )
54 |
55 | # Prepare the end events DataFrame:
56 | # Rename 'tick' to 'end_tick' (and keep the original 'entityid' for matching)
57 | ed = end_df.rename({"tick": "end_tick"})
58 |
59 | # Convert the DataFrames to lists of dicts for row-wise processing.
60 | start_list = sd.to_dicts()
61 | end_list = ed.to_dicts()
62 |
63 | # Group end events by 'entityid' for fast lookup.
64 | end_by_entity = defaultdict(list)
65 | for row in end_list:
66 | end_by_entity[row["entityid"]].append(row["end_tick"])
67 | # Sort the tick values for each entity (if there are multiple end events).
68 | for entity in end_by_entity:
69 | end_by_entity[entity].sort()
70 |
71 | matched_rows = []
72 | # Loop over each grenade start event.
73 | for row in start_list:
74 | entity = row["entity_id"]
75 | start_tick = row["start_tick"]
76 |
77 | # Get candidate end ticks for this entity.
78 | candidate_ticks = end_by_entity.get(entity, [])
79 | # Filter candidate ticks to only those greater than start_tick.
80 | valid_ticks = [tick for tick in candidate_ticks if tick > start_tick]
81 | # Select the earliest valid tick, if any.
82 | end_tick = min(valid_ticks) if valid_ticks else None
83 |
84 | # If max_duration is specified and the duration is too long, set end_tick to None.
85 | if max_duration_ticks is not None and end_tick is not None and (end_tick - start_tick) > max_duration_ticks:
86 | end_tick = None
87 |
88 | # Build the combined row.
89 | combined_row = {
90 | "entity_id": entity,
91 | "start_tick": start_tick,
92 | "end_tick": end_tick,
93 | }
94 | # Include all columns from row that start with "thrower_"
95 | for key, value in row.items():
96 | if key.startswith("thrower_"):
97 | combined_row[key] = value
98 |
99 | # Also add coordinate columns.
100 | for coord in ["X", "Y", "Z"]:
101 | combined_row[coord] = row[coord]
102 |
103 | matched_rows.append(combined_row)
104 |
105 | # Return the result as a new Polars DataFrame.
106 | return pl.DataFrame(matched_rows)
107 |
--------------------------------------------------------------------------------
/awpy/parsers/rounds.py:
--------------------------------------------------------------------------------
1 | """Module for round parsing functions."""
2 |
3 | import polars as pl
4 |
5 | import awpy.constants
6 | import awpy.converters
7 | import awpy.parsers.utils
8 |
9 |
10 | def _find_valid_round_indices(rounds_df: pl.DataFrame, full_sequence: list[str]) -> list[int]:
11 | """Identify indices in the rounds DataFrame that form a valid round sequence.
12 |
13 | A valid sequence is defined as either:
14 | - A full sequence matching: ["start", "freeze_end", "end", "official_end"]
15 | - An incomplete sequence matching either:
16 | ["start", "freeze_end", "end"],
17 | ["start", "end", "official_end"], or
18 | ["start", "end"] (typically occurring when there's a surrender vote)
19 | - If at the end of the DataFrame only the first 3 events of full_sequence are present
20 | (i.e. ["start", "freeze_end", "end"]), consider that valid.
21 |
22 | Args:
23 | rounds_df: DataFrame containing event rows with an "event" column.
24 | full_sequence: The expected full sequence of events.
25 |
26 | Returns:
27 | A list of row indices in rounds_df that are part of a valid sequence.
28 | """
29 | valid_indices = []
30 | sequence_length_full = len(full_sequence) # Expected full sequence length (4)
31 | alt_sequence1 = ["start", "freeze_end", "end"]
32 | alt_sequence2 = ["start", "end", "official_end"]
33 | alt_sequence3 = ["start", "end"] # For surrender vote
34 |
35 | n_rows = len(rounds_df)
36 | for i in range(n_rows):
37 | # Extract a slice of events; if we're near the end, this may be shorter than full_sequence.
38 | current_sequence = rounds_df["event"].slice(i, sequence_length_full).to_list()
39 |
40 | # 1. Check for a complete round sequence.
41 | if current_sequence == full_sequence:
42 | valid_indices.extend(range(i, i + sequence_length_full))
43 | # 2. Check for a 3-event sequence: ["start", "freeze_end", "end"].
44 | elif current_sequence == alt_sequence1:
45 | valid_indices.extend(range(i, i + len(alt_sequence1)))
46 | # 3. Check for a 3-event sequence: ["start", "end", "official_end"].
47 | elif len(current_sequence) >= len(alt_sequence2) and current_sequence[: len(alt_sequence2)] == alt_sequence2:
48 | valid_indices.extend(range(i, i + len(alt_sequence2)))
49 | # 4. Check for a 2-event sequence: ["start", "end"].
50 | elif len(current_sequence) == len(alt_sequence3) and current_sequence == alt_sequence3:
51 | valid_indices.extend(range(i, i + len(alt_sequence3)))
52 | # 5. Lastly, if we're at the very end and only have 3 events, check if they match the first three events of full
53 | elif (
54 | len(current_sequence) < sequence_length_full
55 | and len(current_sequence) == 3
56 | and current_sequence == full_sequence[:3]
57 | ):
58 | valid_indices.extend(range(i, i + len(current_sequence)))
59 |
60 | return valid_indices
61 |
62 |
63 | def _add_bomb_plant_info(rounds_df: pl.DataFrame, bomb_plants: pl.DataFrame) -> pl.DataFrame:
64 | """Add bomb plant tick and site information to the rounds DataFrame.
65 |
66 | For each round, this function looks for bomb plant events occurring between
67 | the round's start and end ticks. It then adds two new columns:
68 | - bomb_plant: The tick at which the bomb was planted (if any).
69 | - bomb_site: "bombsite_a" or "bombsite_b" based on the site value, or "not planted"
70 | if no bomb plant occurred.
71 |
72 | Args:
73 | rounds_df: DataFrame containing round information with "start" and "end" columns.
74 | bomb_plants: DataFrame of bomb planted events.
75 |
76 | Returns:
77 | Updated rounds_df with additional bomb plant information.
78 | """
79 | n_rounds = len(rounds_df)
80 | bomb_plant_ticks = [None] * n_rounds
81 | bomb_plant_sites = ["not_planted"] * n_rounds
82 |
83 | # If no bomb plant events are provided, return the rounds DataFrame as is.
84 | if bomb_plants.is_empty():
85 | return rounds_df
86 |
87 | for i in range(n_rounds):
88 | start_tick = rounds_df["start"][i]
89 | end_tick = rounds_df["end"][i]
90 | # Filter bomb plant events that occur within the current round's tick range.
91 | plant_events = bomb_plants.filter((pl.col("tick") >= start_tick) & (pl.col("tick") <= end_tick))
92 | if len(plant_events) > 0:
93 | # Use the first bomb plant event for this round.
94 | bomb_plant_ticks[i] = plant_events["tick"][0]
95 | bomb_plant_sites[i] = "bombsite_a" if plant_events["site"][0] == 220 else "bombsite_b"
96 |
97 | # Add the bomb plant information as new columns.
98 | return rounds_df.with_columns(
99 | [
100 | pl.Series(name="bomb_plant", values=bomb_plant_ticks),
101 | pl.Series(name="bomb_site", values=bomb_plant_sites),
102 | ]
103 | )
104 |
105 |
106 | def create_round_df(events: dict[str, pl.DataFrame]) -> pl.DataFrame:
107 | """Create a consolidated rounds DataFrame from a dictionary of event DataFrames.
108 |
109 | This function processes round events (start, freeze_end, end, official_end) from the input,
110 | filters and validates the event sequence, and pivots the data to create a structured round
111 | DataFrame. It also integrates bomb plant events to add bomb planting tick and site information.
112 |
113 | Args:
114 | events (dict[str, pl.DataFrame]): A dictionary containing event DataFrames with keys:
115 | - "round_start"
116 | - "round_end"
117 | - "round_end_official"
118 | - "round_freeze_end"
119 | Optionally:
120 | - "bomb_planted"
121 |
122 | Returns:
123 | pl.DataFrame: A DataFrame representing consolidated round information.
124 |
125 | Raises:
126 | KeyError: If any required event key is missing from the events dictionary.
127 | ValueError: If no valid round sequences are found in the processed event data.
128 | """
129 | # Retrieve required event DataFrames.
130 | round_start = awpy.parsers.utils.get_event_from_parsed_events(events, "round_start")
131 | round_end = awpy.parsers.utils.get_event_from_parsed_events(events, "round_end")
132 | round_end_official = awpy.parsers.utils.get_event_from_parsed_events(events, "round_officially_ended")
133 | round_freeze_end = awpy.parsers.utils.get_event_from_parsed_events(events, "round_freeze_end")
134 |
135 | # Retrieve optional bomb planted events; default to empty DataFrame if missing.
136 | bomb_plants = events.get("bomb_planted", pl.DataFrame())
137 |
138 | # Extract only the 'event' and 'tick' columns from each round event DataFrame.
139 | event_dfs = [
140 | round_start[["event", "tick"]],
141 | round_freeze_end[["event", "tick"]],
142 | round_end[["event", "tick"]],
143 | round_end_official[["event", "tick"]],
144 | ]
145 |
146 | # Concatenate event DataFrames and filter out rows with tick==0 (unless event is "start").
147 | rounds_df = pl.concat(event_dfs).filter(~((pl.col("tick") == 0) & (pl.col("event") != "start")))
148 |
149 | # Define an enumeration for event types.
150 | round_events_enum = pl.Enum(["official_end", "start", "freeze_end", "end"])
151 |
152 | # Reconstruct the DataFrame with the event type override, remove duplicates, and sort.
153 | rounds_df = (
154 | pl.DataFrame(rounds_df, schema_overrides={"event": round_events_enum})
155 | .unique(subset=["tick", "event"])
156 | .sort(by=["tick", "event"])
157 | )
158 |
159 | # Define the expected full event sequence.
160 | expected_full_sequence = ["start", "freeze_end", "end", "official_end"]
161 |
162 | # Identify the indices of rows that form valid round sequences.
163 | valid_indices = _find_valid_round_indices(rounds_df, expected_full_sequence)
164 | if not valid_indices:
165 | no_valid_sequences_err_msg = "No valid round sequences found in the event data."
166 | raise ValueError(no_valid_sequences_err_msg)
167 |
168 | # Filter the DataFrame to include only rows that are part of valid round sequences.
169 | rounds_df = rounds_df[valid_indices]
170 |
171 | # Create a round number column by cumulatively summing "start" events.
172 | rounds_df = rounds_df.with_columns(round_num=(pl.col("event") == "start").cast(pl.Int8).cum_sum())
173 |
174 | # Pivot the DataFrame so that each round is one row with columns for each event type.
175 | rounds_df = rounds_df.pivot(index="round_num", on="event", values="tick", aggregate_function="first")
176 |
177 | # Join additional round details (such as winner and reason) from the round_end events.
178 | rounds_df = rounds_df.join(round_end[["tick", "winner", "reason"]], left_on="end", right_on="tick")
179 |
180 | # Replace winner and reason with constants
181 | rounds_df = (
182 | rounds_df.with_columns(
183 | pl.col("winner").str.replace("CT", awpy.constants.CT_SIDE),
184 | )
185 | .with_columns(
186 | pl.col("winner").str.replace("TERRORIST", awpy.constants.T_SIDE),
187 | )
188 | .with_columns(
189 | pl.col("winner").str.replace("T", awpy.constants.T_SIDE),
190 | )
191 | )
192 |
193 | # Replace round number with row index (starting at 1) and coalesce official_end data.
194 | rounds_df = (
195 | rounds_df.drop("round_num")
196 | .with_columns(pl.coalesce(pl.col("official_end"), pl.col("end")).alias("official_end"))
197 | .with_row_index("round_num", offset=1)
198 | )
199 |
200 | # Integrate bomb plant information into the rounds DataFrame.
201 | return _add_bomb_plant_info(rounds_df, bomb_plants)
202 |
203 |
204 | def apply_round_num(df: pl.DataFrame, rounds_df: pl.DataFrame, tick_col: str = "tick") -> pl.DataFrame:
205 | """Assign a round number to each event based on its tick value.
206 |
207 | For each row in `df`, this function finds the round from `rounds_df`
208 | in which the event's tick falls. A round is defined by the interval
209 | [start, end] given in `rounds_df`. If an event's tick does not fall within
210 | any round interval, the new column will contain null.
211 |
212 | This function uses an asof join on the 'start' column of the rounds DataFrame.
213 | After joining, it validates that the tick is less than or equal to the round's 'end' tick.
214 |
215 | Args:
216 | df (pl.DataFrame): Input DataFrame containing an event tick column.
217 | rounds_df (pl.DataFrame): DataFrame containing round boundaries with at least
218 | the columns:
219 | - 'round_num': The round number.
220 | - 'start': The starting tick of the round.
221 | - 'end': The ending tick of the round.
222 | This DataFrame should be sorted in ascending order by the 'start' column.
223 | tick_col (str, optional): Name of the tick column in `df`. Defaults to "tick".
224 |
225 | Returns:
226 | pl.DataFrame: A new DataFrame with all the original columns and an added
227 | "round_num" column that indicates the round in which the event occurs. If no
228 | matching round is found, "round_num" will be null.
229 | """
230 | # Use join_asof to get the round where round.start <= event.tick.
231 | # This join will add the columns 'round_num', 'start', and 'end' from rounds_df.
232 | df_with_round = df.join_asof(
233 | rounds_df.select(["round_num", "start", "official_end"]),
234 | left_on=tick_col,
235 | right_on="start",
236 | strategy="backward",
237 | )
238 |
239 | # Validate that the event tick is within the round boundaries.
240 | # If the tick is greater than the round's 'end', then set round_num to null.
241 | df_with_round = df_with_round.with_columns(
242 | pl.when(pl.col(tick_col) <= pl.col("official_end")).then(pl.col("round_num")).otherwise(None).alias("round_num")
243 | )
244 |
245 | return df_with_round.drop(["start", "official_end"])
246 |
--------------------------------------------------------------------------------
/awpy/parsers/ticks.py:
--------------------------------------------------------------------------------
1 | """Module for tick parsing functions."""
2 |
3 | import polars as pl
4 |
5 |
6 | def get_valid_ticks(tick_df: pl.DataFrame) -> pl.Series:
7 | """Get the valid ticks from a tick dataframe.
8 |
9 | This function filters out ticks that occur during certain invalid periods
10 | (warmup, various timeouts, waiting for resume) and retains ticks
11 | only when the match has started. It then returns a Series of unique tick values.
12 |
13 | Args:
14 | tick_df: A DataFrame containing tick data along with boolean columns
15 | indicating various match periods.
16 |
17 | Returns:
18 | A pl.Series containing the unique tick values that meet the filtering criteria.
19 | """
20 | # Filter out ticks occurring in undesired periods.
21 | valid_ticks_df = tick_df.filter(
22 | pl.col("is_match_started")
23 | & (~pl.col("is_warmup_period"))
24 | & (~pl.col("is_terrorist_timeout"))
25 | & (~pl.col("is_ct_timeout"))
26 | & (~pl.col("is_technical_timeout"))
27 | & (~pl.col("is_waiting_for_resume"))
28 | )
29 |
30 | # Select the "tick" column and remove duplicate ticks.
31 | unique_ticks_df = valid_ticks_df.select("tick").unique(subset=["tick"])
32 |
33 | # Return the "tick" column as a pl.Series.
34 | return unique_ticks_df["tick"].sort()
35 |
--------------------------------------------------------------------------------
/awpy/parsers/utils.py:
--------------------------------------------------------------------------------
1 | """Module for parsing utils."""
2 |
3 | import polars as pl
4 |
5 |
6 | def get_event_from_parsed_events(
7 | events: dict[str, pl.DataFrame], key: str, *, empty_if_not_found: bool = False
8 | ) -> pl.DataFrame:
9 | """Retrieve a required event DataFrame from the events dictionary.
10 |
11 | Args:
12 | events: Dictionary of event DataFrames.
13 | key: The key for the required event.
14 | empty_if_not_found: If True, return an empty DataFrame if the event is not found.
15 |
16 | Returns:
17 | The corresponding polars DataFrame for the event.
18 |
19 | Raises:
20 | KeyError: If the event key is missing or its value is None.
21 | """
22 | event_df = events.get(key)
23 | if event_df is None:
24 | if empty_if_not_found:
25 | return pl.DataFrame()
26 | missing_key_err_msg = f"Required event '{key}' is missing from the events dictionary."
27 | raise KeyError(missing_key_err_msg)
28 | return event_df
29 |
30 |
31 | def validate_required_columns(df: pl.DataFrame, required_columns: set[str], df_name: str = "DataFrame") -> None:
32 | """Validate that the given DataFrame contains all required columns.
33 |
34 | Parameters:
35 | df (pl.DataFrame): The DataFrame to validate.
36 | required_columns (set[str]): A set of column names that must be present
37 | in the DataFrame.
38 | df_name (str, optional): Name of the DataFrame for error messaging.
39 | Defaults to "DataFrame".
40 |
41 | Raises:
42 | KeyError: If one or more required columns are missing.
43 | """
44 | missing = required_columns - set(df.columns)
45 | if missing:
46 | missing_col_err_msg = f"{df_name} is missing required columns: {missing}"
47 | raise KeyError(missing_col_err_msg)
48 |
49 |
50 | def get_columns_with_prefix(df: pl.DataFrame, prefix: str) -> list[str]:
51 | """Return a list of column names in the DataFrame that start with the given prefix.
52 |
53 | Parameters:
54 | df (pl.DataFrame): The input DataFrame.
55 | prefix (str): The prefix to filter column names.
56 |
57 | Returns:
58 | list[str]: A list of column names that start with the prefix.
59 | """
60 | return [col for col in df.columns if col.startswith(prefix)]
61 |
62 |
63 | def rename_columns_with_affix(
64 | df: pl.DataFrame,
65 | old_affix: str,
66 | new_affix: str,
67 | *,
68 | is_prefix: bool = True,
69 | ) -> pl.DataFrame:
70 | """Rename columns by replacing old_affix with new_affix for a Polars DataFrame.
71 |
72 | If is_prefix is True, the function replaces a prefix; otherwise, it replaces a suffix.
73 |
74 | Args:
75 | df (pl.DataFrame): DataFrame whose columns are to be renamed.
76 | old_affix (str): Old affix to be replaced.
77 | new_affix (str): New affix to replace the old one.
78 | is_prefix (bool, optional): If True, perform prefix replacement, else suffix. Defaults to True.
79 |
80 | Returns:
81 | pl.DataFrame: DataFrame with renamed columns.
82 | """
83 | new_columns = {}
84 | for col in df.columns:
85 | if is_prefix and col.startswith(old_affix):
86 | new_columns[col] = new_affix + col[len(old_affix) :]
87 | elif not is_prefix and col.endswith(old_affix):
88 | new_columns[col] = col[: -len(old_affix)] + new_affix
89 | return df.rename(new_columns)
90 |
91 |
92 | def fix_common_names(df: pl.DataFrame) -> pl.DataFrame:
93 | """Fixes common column name values and data types.
94 |
95 | Args:
96 | df (pl.DataFrame): DataFrame to fix.
97 |
98 | Returns:
99 | pl.DataFrame: DataFrame with fixed column names and data types.
100 | """
101 | # last_place_name -> place
102 | renamed_df = rename_columns_with_affix(df, old_affix="last_place_name", new_affix="place", is_prefix=False)
103 |
104 | # steamid to u64
105 | for col in renamed_df.columns:
106 | if col.endswith("steamid"):
107 | renamed_df = renamed_df.with_columns(pl.col(col).cast(pl.UInt64))
108 |
109 | # CT -> ct, TERRORIST -> t
110 | for col in renamed_df.columns:
111 | if col.endswith("team_name"):
112 | renamed_df = renamed_df.with_columns(
113 | pl.col(col).map_elements(lambda x: {"CT": "ct", "TERRORIST": "t"}.get(x, x), return_dtype=pl.String)
114 | )
115 |
116 | # team_name -> side
117 | renamed_df = rename_columns_with_affix(renamed_df, old_affix="team_name", new_affix="side", is_prefix=False)
118 |
119 | # armor_value -> armor
120 | return rename_columns_with_affix(renamed_df, old_affix="armor_value", new_affix="armor", is_prefix=False)
121 |
--------------------------------------------------------------------------------
/awpy/plot/__init__.py:
--------------------------------------------------------------------------------
1 | """Awpy plotting module."""
2 |
3 | PLOT_SETTINGS = {
4 | "ct": {
5 | "marker": "o",
6 | "color": "tab:cyan",
7 | "size": 8,
8 | },
9 | "t": {
10 | "marker": "o",
11 | "color": "tab:olive",
12 | "size": 8,
13 | },
14 | "bomb": {
15 | "marker": "x",
16 | "color": "tab:orange",
17 | "size": 8,
18 | },
19 | "smoke": {
20 | "marker": "o",
21 | "color": "tab:gray",
22 | "size": 12,
23 | },
24 | "fire": {
25 | "marker": "o",
26 | "color": "tab:red",
27 | "size": 12,
28 | },
29 | }
30 |
31 | from awpy.plot.plot import gif, heatmap, plot
32 |
33 | __all__ = ["gif", "heatmap", "plot"]
34 |
--------------------------------------------------------------------------------
/awpy/plot/nav.py:
--------------------------------------------------------------------------------
1 | """Module for plotting navigation mesh tiles on a map."""
2 |
3 | from pathlib import Path
4 |
5 | import matplotlib.pyplot as plt
6 | from loguru import logger
7 | from matplotlib import patches
8 | from matplotlib.axes import Axes
9 | from matplotlib.figure import Figure
10 |
11 | import awpy.data
12 | import awpy.nav
13 | import awpy.plot
14 | import awpy.plot.utils
15 | import awpy.vector
16 |
17 |
18 | def _tile_polygon(area_corners: list[awpy.vector.Vector3], map_name: str) -> list[tuple[float, float]]:
19 | """Converts a Nav Area's corner coordinates to pixel coordinates.
20 |
21 | Args:
22 | area_corners (awpy.vector.Vector3): List of corner positions
23 | (see NavArea.corners for more info)
24 | map_name (str): The map name used for coordinate conversion.
25 |
26 | Returns:
27 | list: List of (x, y) pixel coordinates.
28 | """
29 | return [awpy.plot.utils.game_to_pixel(map_name, (c.x, c.y, c.z))[0:2] for c in area_corners]
30 |
31 |
32 | def _plot_tile(
33 | axis: Axes, polygon: list[tuple[float, float]], edgecolor: str, facecolor: str, linewidth: int = 1
34 | ) -> None:
35 | """Adds a single tile patch to the given axis.
36 |
37 | Args:
38 | axis (matplotlib.axes.Axes): The matplotlib axis to add the tile to.
39 | polygon (list[tuple[float, float]]): List of (x, y) pixel coordinates representing the tile's corners.
40 | edgecolor (str): Color of the tile's border.
41 | facecolor (str): Fill color of the tile.
42 | linewidth (int, optional): Width of the tile's border. Defaults to 1.
43 |
44 | Returns:
45 | None
46 |
47 | Example:
48 | >>> _plot_tile(ax, [(0, 0), (1, 0), (1, 1), (0, 1)], edgecolor="blue", facecolor="red")
49 | """
50 | patch = patches.Polygon(polygon, linewidth=linewidth, edgecolor=edgecolor, facecolor=facecolor)
51 | axis.add_patch(patch)
52 |
53 |
54 | def _plot_all_tiles(map_name: str, axis: Axes, default_fill: str = "None") -> None:
55 | """Plots all tiles from the map with a yellow outline and optional fill color.
56 |
57 | Args:
58 | map_name (str): The name of the map for plotting.
59 | axis (matplotlib.axes.Axes): The matplotlib axis to plot the tiles on.
60 | default_fill (str, optional): Fill color for the tiles. Defaults to "None".
61 |
62 | Returns:
63 | None
64 |
65 | Example:
66 | >>> _plot_all_tiles(map_name, ax, default_fill="gray")
67 | """
68 | map_nav = awpy.nav.Nav.from_json(awpy.data.NAVS_DIR / f"{map_name}.json")
69 | for area in map_nav.areas.values():
70 | polygon = _tile_polygon(area.corners, map_name)
71 | _plot_tile(axis, polygon, edgecolor="yellow", facecolor=default_fill)
72 |
73 |
74 | def _plot_selected_tiles(map_name: str, axis: Axes, selected_tiles: list[int]) -> None:
75 | """Plots all tiles on the map, highlighting the selected ones.
76 |
77 | - Tiles not in the selected list are drawn with a yellow outline and no fill.
78 | - Tiles in the selected list are filled with red and have black outlines.
79 | - If multiple tiles are selected (representing a path), the first and last tiles are
80 | filled in green with black outlines.
81 |
82 | Args:
83 | map_name (str): The name of the map for plotting.
84 | axis (matplotlib.axes.Axes): The matplotlib axis to plot the tiles on.
85 | selected_tiles (list[int]): List of tile IDs to highlight. Can represent a path if multiple
86 | tiles are included.
87 |
88 | Returns:
89 | None
90 |
91 | Example:
92 | >>> selected = [101, 102, 103]
93 | >>> _plot_selected_tiles(map_name, ax, selected)
94 | """
95 | # Using a set for quick membership tests.
96 | selected_set = set(selected_tiles)
97 |
98 | map_nav = awpy.nav.Nav.from_json(awpy.data.NAVS_DIR / f"{map_name}.json")
99 | for tile_id, area in map_nav.areas.items():
100 | polygon = _tile_polygon(area.corners, map_name)
101 | if tile_id in selected_set:
102 | """
103 | - If just one tile is passed in to visualize, it is filled in red with black edges.
104 | - If a list of lenth > 1 is passed in (a path), then the first and last tiles are
105 | filled in green with white edges. All other tiles are filled in red with white edges.
106 | - All tiles not in the passed in list are colored with no fill and yellow edges.
107 | """
108 | if len(selected_tiles) > 1 and (tile_id == selected_tiles[0] or tile_id == selected_tiles[-1]):
109 | edgecolor = "black"
110 | facecolor = "green"
111 | else:
112 | edgecolor = "black"
113 | facecolor = "red"
114 | else:
115 | edgecolor = "yellow"
116 | facecolor = "None"
117 | _plot_tile(axis, polygon, edgecolor=edgecolor, facecolor=facecolor)
118 |
119 |
120 | def plot_map_tiles(
121 | map_name: str,
122 | outpath: str | Path | None = None,
123 | dpi: int = 300,
124 | fill: str | None = None,
125 | figure_size: tuple[float, float] = (19, 21),
126 | ) -> tuple[Figure, Axes]:
127 | """Plots all navigation mesh tiles for a given map.
128 |
129 | This function overlays navigation mesh tiles onto a specified map and highlights them.
130 | Non-selected tiles are drawn with a yellow outline. The resulting plot can either be
131 | displayed or saved to a file.
132 |
133 | Args:
134 | map_name (str): The name of the map to plot.
135 | outpath (str | pathlib.Path | None, optional): The file path to save the plotted image.
136 | Accepts both string and Path objects. If None, the figure will be displayed instead
137 | of saved. Defaults to None.
138 | dpi (int, optional): Dots per inch for the saved figure. Higher values result in
139 | better image quality. Defaults to 300.
140 | fill (str, optional): The fill color for the tiles. Use "None" for no fill or specify
141 | a valid color. Defaults to "None".
142 | figure_size (tuple[float, float], optional): Tuple representing the figure size in inches
143 | (width, height). Defaults to `(19, 21)`.
144 |
145 | Returns:
146 | tuple[Figure, Axes]: Matplotlib Figure and Axes objects.
147 |
148 | Example:
149 | >>> plot_map_tiles(
150 | ... map_name="de_dust2",
151 | ... outpath="./maps/tiles_de_dust2.png",
152 | ... dpi=800,
153 | ... fill="blue",
154 | ... figure_size=(15, 20)
155 | ... )
156 | # Saves the plot to './maps/tiles_de_dust2.png'
157 |
158 | >>> plot_map_tiles(
159 | ... map_name="de_dust2",
160 | ... fill="green"
161 | ... )
162 | # Displays the plot with the default figure size
163 | """
164 | fig, axis = awpy.plot.plot(map_name=map_name)
165 | fig.set_size_inches(*figure_size)
166 | _plot_all_tiles(map_name, axis, default_fill=fill)
167 |
168 | # Handle outpath for both str and Path inputs
169 | if outpath is not None:
170 | outpath = Path(outpath)
171 | outpath.parent.mkdir(parents=True, exist_ok=True) # Ensure parent directory exists
172 | plt.savefig(outpath, bbox_inches="tight", dpi=dpi)
173 | logger.debug(f"The visualization has been saved at {outpath.resolve()}")
174 |
175 | return fig, axis
176 |
177 |
178 | def plot_map_tiles_selected(
179 | map_name: str,
180 | selected_tiles: list,
181 | outpath: str | Path | None = None,
182 | dpi: int = 300,
183 | figure_size: tuple[float, float] = (19, 21),
184 | ) -> tuple[Figure, Axes]:
185 | """Plots navigation mesh tiles for a given map with selected tiles highlighted.
186 |
187 | This function overlays navigation mesh tiles onto the specified map and highlights the
188 | selected tiles. Non-selected tiles are drawn with a yellow outline, while selected tiles
189 | are filled with a default color (red) and outlined in black. If multiple tiles are selected
190 | (e.g., representing a path), the first and last tiles are filled with green and outlined
191 | in black to denote the source and destination.
192 |
193 | Args:
194 | map_name (str): The name of the map to plot.
195 | selected_tiles (list): List of tile IDs to be highlighted on the map.
196 | outpath (str | pathlib.Path | None, optional): The file path to save the plotted image.
197 | Accepts both string and Path objects. If None, the figure will be displayed
198 | instead of saved. Defaults to None.
199 | dpi (int, optional): Dots per inch for the saved figure. Higher values result in
200 | better image quality. Defaults to 300.
201 | figure_size (tuple[float, float], optional): Tuple representing the figure size in inches
202 | (width, height). Defaults to `(19, 21)`.
203 |
204 | Returns:
205 | tuple[Figure, Axes]: Matplotlib Figure and Axes objects.
206 |
207 | Behavior:
208 | - Non-selected tiles are drawn with a yellow outline.
209 | - Selected tiles are filled with red and outlined in black.
210 | - If multiple tiles are selected (e.g., a path), the first and last tiles are filled
211 | with green and outlined in black.
212 |
213 | Example:
214 | >>> plot_map_tiles_selected(
215 | ... map_name="de_dust2",
216 | ... selected_tiles=[5, 12, 18],
217 | ... outpath="./maps/selected_tiles_de_dust2.png",
218 | ... dpi=800,
219 | ... figure_size=(15, 20)
220 | ... )
221 | # Saves the plot to './maps/selected_tiles_de_dust2.png'
222 |
223 | >>> plot_map_tiles_selected(
224 | ... map_name="de_dust2",
225 | ... selected_tiles=[5, 12, 18]
226 | ... )
227 | # Displays the plot with the default figure size
228 | """
229 | fig, axis = awpy.plot.plot(map_name=map_name)
230 | fig.set_size_inches(*figure_size)
231 | _plot_selected_tiles(map_name, axis, selected_tiles)
232 |
233 | # Handle outpath for both str and Path inputs
234 | if outpath is not None:
235 | outpath = Path(outpath)
236 | outpath.parent.mkdir(parents=True, exist_ok=True) # Ensure parent directory exists
237 | plt.savefig(outpath, bbox_inches="tight", dpi=dpi)
238 | logger.debug(f"The visualization has been saved at {outpath.resolve()}")
239 |
240 | return fig, axis
241 |
--------------------------------------------------------------------------------
/awpy/plot/utils.py:
--------------------------------------------------------------------------------
1 | """Utilities for plotting and visualization."""
2 |
3 | import warnings
4 | from typing import Literal
5 |
6 | import awpy.data.map_data
7 |
8 |
9 | # Position function courtesy of PureSkill.gg
10 | def game_to_pixel_axis(map_name: str, position: float, axis: Literal["x", "y"]) -> float:
11 | """Transforms a CS2-coord value to a pixel-coord in the X or Y-axis.
12 |
13 | Args:
14 | map_name (str): Map to search
15 | position (float): X or Y coordinate
16 | axis (str): Either "x" or "y"
17 |
18 | Returns:
19 | float: Transformed position
20 |
21 | Raises:
22 | ValueError: Raises a ValueError if axis not 'x' or 'y'
23 | """
24 | axis = axis.lower()
25 | if axis not in ["x", "y"]:
26 | msg = f"'axis' has to be 'x' or 'y', not {axis}"
27 | raise ValueError(msg)
28 | start = awpy.data.map_data.MAP_DATA[map_name]["pos_" + axis]
29 | scale = awpy.data.map_data.MAP_DATA[map_name]["scale"]
30 |
31 | if axis == "x":
32 | return (position - start) / scale
33 | return (start - position) / scale
34 |
35 |
36 | def pixel_to_game_axis(map_name: str, position: float, axis: Literal["x", "y"]) -> float:
37 | """Transforms a pixel-coord value to a CS2-coord in the X or Y-axis.
38 |
39 | Args:
40 | map_name (str): Map to search
41 | position (float): X or Y coordinate
42 | axis (str): Either "x" or "y"
43 |
44 | Returns:
45 | float: Transformed position
46 |
47 | Raises:
48 | ValueError: Raises a ValueError if axis not 'x' or 'y'
49 | """
50 | axis = axis.lower()
51 | if axis not in ["x", "y"]:
52 | msg = f"'axis' has to be 'x' or 'y', not {axis}"
53 | raise ValueError(msg)
54 | start = awpy.data.map_data.MAP_DATA[map_name]["pos_" + axis]
55 | scale = awpy.data.map_data.MAP_DATA[map_name]["scale"]
56 |
57 | if axis == "x":
58 | return position * scale + start
59 | return start - position * scale
60 |
61 |
62 | def game_to_pixel(map_name: str, position: tuple[float, float, float]) -> tuple[float, float, float]:
63 | """Transforms a `(X, Y, Z)` CS2-coord to pixel coord.
64 |
65 | Args:
66 | map_name (str): Map to transform coordinates.
67 | position (tuple): (X,Y,Z) coordinates.
68 |
69 | Returns:
70 | Tuple[float, float, float]: Transformed coordinates (X,Y,Z).
71 | """
72 | return (
73 | game_to_pixel_axis(map_name, position[0], "x"),
74 | game_to_pixel_axis(map_name, position[1], "y"),
75 | position[2],
76 | )
77 |
78 |
79 | def pixel_to_game(map_name: str, position: tuple[float, float, float]) -> tuple[float, float, float]:
80 | """Transforms a `(X, Y, Z)` pixel coord to CS2-coord.
81 |
82 | Args:
83 | map_name (str): Map to transform coordinates.
84 | position (tuple): (X,Y,Z) coordinates.
85 |
86 | Returns:
87 | Tuple[float, float, float]: Transformed coordinates (X,Y,Z).
88 | """
89 | return (
90 | pixel_to_game_axis(map_name, position[0], "x"),
91 | pixel_to_game_axis(map_name, position[1], "y"),
92 | position[2],
93 | )
94 |
95 |
96 | def is_position_on_lower_level(map_name: str, position: tuple[float, float, float]) -> bool:
97 | """Check if a position is on a lower level of a map.
98 |
99 | Args:
100 | map_name (str): Map to check the position level.
101 | position (Tuple[float, float, float]): (X,Y,Z) coordinates.
102 |
103 | Returns:
104 | bool: True if the position on the lower level, False otherwise.
105 | """
106 | metadata = awpy.data.map_data.MAP_DATA[map_name]
107 | return position[2] <= metadata["lower_level_max_units"]
108 |
109 |
110 | def position_transform_axis(map_name: str, position: float, axis: Literal["x", "y"]) -> float:
111 | """Calls `game_to_pixel_axis` and sends warning.
112 |
113 | This is the old name of function `game_to_pixel_axis`. Please update
114 | your code to avoid future deprecation.
115 | """
116 | warnings.warn(
117 | (
118 | "Deprecation warning: Function position_transform_axis() has been "
119 | "renamed to game_to_pixel_axis(). Please update your code to avoid "
120 | "future deprecation."
121 | ),
122 | DeprecationWarning,
123 | stacklevel=2,
124 | )
125 | return game_to_pixel_axis(map_name, position, axis)
126 |
127 |
128 | def position_revert_axis(map_name: str, position: float, axis: Literal["x", "y"]) -> float:
129 | """Calls `pixel_to_game_axis` and sends warning.
130 |
131 | This is the old name of function `pixel_to_game_axis`. Please update
132 | your code to avoid future deprecation.
133 | """
134 | warnings.warn(
135 | (
136 | "Deprecation warning: Function position_revert_axis() has been "
137 | "renamed to pixel_to_game_axis(). Please update your code to avoid "
138 | "future deprecation."
139 | ),
140 | DeprecationWarning,
141 | stacklevel=2,
142 | )
143 | return pixel_to_game_axis(map_name, position, axis)
144 |
145 |
146 | def position_transform(map_name: str, position: tuple[float, float, float]) -> tuple[float, float, float]:
147 | """Calls `game_to_pixel` and sends warning.
148 |
149 | This is the old name of function `game_to_pixel`. Please update
150 | your code to avoid future deprecation.
151 | """
152 | warnings.warn(
153 | (
154 | "Deprecation warning: Function position_transform() has been renamed "
155 | "to game_to_pixel(). Please update your code to avoid future "
156 | "deprecation."
157 | ),
158 | DeprecationWarning,
159 | stacklevel=2,
160 | )
161 | return game_to_pixel(map_name, position)
162 |
163 |
164 | def position_revert(map_name: str, position: tuple[float, float, float]) -> tuple[float, float, float]:
165 | """Calls `pixel_to_game` and sends warning.
166 |
167 | This is the old name of function `pixel_to_game`. Please update
168 | your code to avoid future deprecation.
169 | """
170 | warnings.warn(
171 | (
172 | "Deprecation warning: Function position_revert() has been renamed to "
173 | "pixel_to_game(). Please update your code to avoid future "
174 | "deprecation."
175 | ),
176 | DeprecationWarning,
177 | stacklevel=2,
178 | )
179 | return pixel_to_game(map_name, position)
180 |
--------------------------------------------------------------------------------
/awpy/spawn.py:
--------------------------------------------------------------------------------
1 | """Module to parse .vents files to get map spawns."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | import re
7 | from dataclasses import dataclass
8 | from typing import TYPE_CHECKING
9 |
10 | if TYPE_CHECKING:
11 | import pathlib
12 |
13 | import awpy.vector
14 |
15 | VentsValue = str | int | float | bool | tuple[float, ...]
16 |
17 |
18 | @dataclass
19 | class Spawns:
20 | """Spawns of a map."""
21 |
22 | CT: list[awpy.vector.Vector3]
23 | T: list[awpy.vector.Vector3]
24 |
25 | def to_dict(self) -> dict[str, list[dict[str, float]]]:
26 | """Converts the spawns to a dictionary."""
27 | return {
28 | "CT": [{"x": ct.x, "y": ct.y, "z": ct.z} for ct in self.CT],
29 | "T": [{"x": t.x, "y": t.y, "z": t.z} for t in self.T],
30 | }
31 |
32 | def to_json(self, path: str | pathlib.Path) -> None:
33 | """Writes the spawns data to a JSON file.
34 |
35 | Args:
36 | path: Path to the JSON file to write.
37 | """
38 | spawns_dict = self.to_dict()
39 | with open(path, "w", encoding="utf-8") as json_file:
40 | json.dump(spawns_dict, json_file)
41 | json_file.write("\n")
42 |
43 | @staticmethod
44 | def from_vents_content(vents_content: str) -> Spawns:
45 | """Parse the content of a vents file into Spawns information.
46 |
47 | Args:
48 | vents_content (str): The content of the .vents file.
49 |
50 | Returns:
51 | Spawns: A Spawns object with the parsed data.
52 | """
53 | parsed_data = parse_vents_file_to_dict(vents_content)
54 |
55 | return filter_vents_data(parsed_data)
56 |
57 | @staticmethod
58 | def from_vents_file(vents_file: str | pathlib.Path) -> Spawns:
59 | """Parse the content of a vents file into Spawns information.
60 |
61 | Args:
62 | vents_file (str | pathlib.Path): The path to the .vents file.
63 |
64 | Returns:
65 | Spawns: A Spawns object with the parsed data.
66 | """
67 | with open(vents_file) as f:
68 | return Spawns.from_vents_content(f.read())
69 |
70 |
71 | def parse_vents_file_to_dict(file_content: str) -> dict[int, dict[str, VentsValue]]:
72 | """Parse the content of a .vents file into a dictionary.
73 |
74 | Args:
75 | file_content (str): The content of the .vents file.
76 |
77 | Returns:
78 | dict[int, dict[str, VentsValue]]: A dictionary with the parsed data.
79 | """
80 | # Dictionary to hold parsed data
81 | parsed_data: dict[int, dict[str, VentsValue]] = {}
82 | block_id = 0
83 | block_content: dict[str, VentsValue] = {}
84 |
85 | for line in file_content.splitlines():
86 | if match := re.match(r"^====(\d+)====$", line):
87 | block_id = int(match.group(1))
88 | block_content = {}
89 | continue
90 |
91 | if not line.strip():
92 | continue
93 | try:
94 | key, value = line.split(maxsplit=1)
95 | except Exception as _e: # noqa: S112
96 | continue
97 | key = key.strip()
98 | value = value.strip()
99 |
100 | # Attempt to parse the value
101 | if value in ("True", "False"):
102 | value = value == "True" # Convert to boolean
103 | elif re.match(r"^-?\d+$", value):
104 | value = int(value) # Convert to integer
105 | elif re.match(r"^-?\d*\.\d+$", value):
106 | value = float(value) # Convert to float
107 | elif re.match(r"^-?\d*\.\d+(?:\s-?\d*\.\d+)+$", value):
108 | value = tuple(map(float, value.split())) # Convert to tuple of floats
109 |
110 | block_content[key] = value
111 |
112 | parsed_data[block_id] = block_content
113 |
114 | return parsed_data
115 |
116 |
117 | def filter_vents_data(data: dict[int, dict[str, VentsValue]]) -> Spawns:
118 | """Filter the data to get the positions."""
119 | ct_spawns: list[awpy.vector.Vector3] = []
120 | t_spawns: list[awpy.vector.Vector3] = []
121 |
122 | for properties in data.values():
123 | if (
124 | properties.get("classname") == "info_player_terrorist"
125 | and properties.get("enabled")
126 | and properties.get("priority") == 0
127 | ):
128 | x, y, z = properties["origin"]
129 | t_spawns.append(awpy.vector.Vector3(x=x, y=y, z=z))
130 | elif (
131 | properties.get("classname") == "info_player_counterterrorist"
132 | and properties.get("enabled")
133 | and properties.get("priority") == 0
134 | ):
135 | x, y, z = properties["origin"]
136 | ct_spawns.append(awpy.vector.Vector3(x=x, y=y, z=z))
137 |
138 | return Spawns(CT=ct_spawns, T=t_spawns)
139 |
--------------------------------------------------------------------------------
/awpy/stats/__init__.py:
--------------------------------------------------------------------------------
1 | """Analytics module to calculate player statistics."""
2 |
3 | from awpy.stats.adr import adr
4 | from awpy.stats.kast import calculate_trades, kast
5 | from awpy.stats.rating import impact, rating
6 |
7 | __all__ = ["adr", "calculate_trades", "impact", "kast", "rating"]
8 |
--------------------------------------------------------------------------------
/awpy/stats/adr.py:
--------------------------------------------------------------------------------
1 | """Calculates Average Damage Per Round."""
2 |
3 | import polars as pl
4 |
5 | import awpy.constants
6 | import awpy.demo
7 |
8 |
9 | def adr(
10 | demo: awpy.demo.Demo,
11 | *,
12 | team_dmg: bool = False,
13 | self_dmg: bool = True,
14 | ) -> pl.DataFrame:
15 | """Calculates Average Damage Per Round (ADR) for each player.
16 |
17 | Args:
18 | demo (awpy.demo.Demo): A parsed demo object which has a Polars DataFrame in `demo.damages`.
19 | team_dmg (bool, optional): If True, remove team damage events (i.e. when the attacker and victim
20 | are on the same side). Defaults to False.
21 | self_dmg (bool, optional): If True, remove self damage events (i.e. when `attacker_name` is missing).
22 | Defaults to True.
23 |
24 | Returns:
25 | pl.DataFrame: A DataFrame containing columns: name, steamid, team_name, n_rounds, dmg, adr.
26 |
27 | Raises:
28 | ValueError: If damages are missing in the parsed demo.
29 | """
30 | # Get the damages DataFrame from the demo
31 | damages = demo.damages.clone()
32 |
33 | # Remove team damage events if specified
34 | if team_dmg:
35 | damages = damages.filter(pl.col("attacker_side") != pl.col("victim_side"))
36 |
37 | # Remove self damage events if specified
38 | if self_dmg:
39 | damages = damages.filter(pl.col("attacker_name").is_not_null())
40 |
41 | # Aggregate total damage for all rounds per player
42 | damages_all = (
43 | damages.group_by(["attacker_name", "attacker_steamid"])
44 | .agg(pl.col("dmg_health_real").sum().alias("dmg"))
45 | .with_columns(pl.lit("all").alias("side"))
46 | )
47 |
48 | # Aggregate damage for ct side only
49 | damages_ct = (
50 | damages.filter(pl.col("attacker_side") == awpy.constants.CT_SIDE)
51 | .group_by(["attacker_name", "attacker_steamid"])
52 | .agg(pl.col("dmg_health_real").sum().alias("dmg"))
53 | .with_columns(pl.lit("ct").alias("side"))
54 | )
55 |
56 | # Aggregate damage for t side only
57 | damages_t = (
58 | damages.filter(pl.col("attacker_side") == awpy.constants.T_SIDE)
59 | .group_by(["attacker_name", "attacker_steamid"])
60 | .agg(pl.col("dmg_health_real").sum().alias("dmg"))
61 | .with_columns(pl.lit("t").alias("side"))
62 | )
63 |
64 | # Combine the aggregated damage DataFrames
65 | damage_agg = pl.concat([damages_all, damages_ct, damages_t])
66 |
67 | # Rename columns for clarity
68 | damage_agg = damage_agg.rename({"attacker_name": "name", "attacker_steamid": "steamid"})
69 |
70 | # Merge the aggregated damage data with the rounds data
71 | adr_df = damage_agg.join(demo.player_round_totals, on=["name", "steamid", "side"], how="inner")
72 |
73 | # Calculate ADR = total damage / number of rounds played
74 | adr_df = adr_df.with_columns((pl.col("dmg") / pl.col("n_rounds")).alias("adr"))
75 |
76 | # Select and return the desired columns
77 | return adr_df.select(["name", "steamid", "side", "n_rounds", "dmg", "adr"])
78 |
--------------------------------------------------------------------------------
/awpy/stats/kast.py:
--------------------------------------------------------------------------------
1 | """Calculates the Kill, Assist, Survival, Trade %."""
2 |
3 | import polars as pl
4 |
5 | import awpy.constants
6 | import awpy.demo
7 |
8 |
9 | def calculate_trades(demo: awpy.demo.Demo, trade_length_in_seconds: float = 5.0) -> pl.DataFrame:
10 | """Calculates if kills are trades.
11 |
12 | A trade is a kill where the attacker of a player who recently died was
13 | killed shortly after the initial victim was killed.
14 |
15 | Args:
16 | demo (awpy.demo.Demo): A parsed Demo.
17 | trade_length_in_seconds (float, optional): Length of trade time in
18 | seconds. Defaults to 5.0.
19 |
20 | Returns:
21 | pl.DataFrame: The input DataFrame with an additional boolean column `was_traded`
22 | indicating whether the kill was traded.
23 | """
24 | # Calculate trade ticks
25 | trade_ticks = demo.tickrate * trade_length_in_seconds
26 |
27 | # Add a row index so we can later mark specific rows
28 | kills = demo.kills.with_row_index("row_idx")
29 | trade_indices = []
30 |
31 | # Get unique rounds as a list
32 | rounds = kills.select("round_num").unique().to_series().to_list()
33 |
34 | # For each round, iterate over kills in that round and check for trade conditions.
35 | for r in rounds:
36 | kills_round = kills.filter(pl.col("round_num") == r)
37 | # Convert the round's DataFrame to dictionaries for row-wise iteration.
38 | for row in kills_round.to_dicts():
39 | tick = row["tick"]
40 | victim_name = row["victim_name"]
41 | # Filter kills in the trade window for this round.
42 | kills_in_window = kills_round.filter((pl.col("tick") >= (tick - trade_ticks)) & (pl.col("tick") <= tick))
43 | # Get the list of attacker names in the window.
44 | attacker_names = kills_in_window.select("attacker_name").to_series().to_list()
45 | if victim_name in attacker_names:
46 | last_trade_row = None
47 | # Iterate over the window rows to get the last kill where the attacker equals the victim.
48 | for win_row in kills_in_window.to_dicts():
49 | if win_row["attacker_name"] == victim_name:
50 | last_trade_row = win_row["row_idx"]
51 | if last_trade_row is not None:
52 | trade_indices.append(last_trade_row)
53 |
54 | # Mark rows whose row_idx is in our trade_indices list.
55 | trade_set = set(trade_indices)
56 | kills = kills.with_columns(pl.col("row_idx").is_in(list(trade_set)).alias("was_traded"))
57 | # Drop the temporary row index column.
58 | return kills.drop("row_idx")
59 |
60 |
61 | def kast(demo: awpy.demo.Demo, trade_length_in_seconds: float = 3.0) -> pl.DataFrame:
62 | """Calculates Kill-Assist-Survival-Trade % (KAST) using Polars.
63 |
64 | Args:
65 | demo (awpy.demo.Demo): A parsed Awpy demo with kills and ticks as Polars DataFrames.
66 | trade_length_in_seconds (float, optional): Length of trade time in seconds. Defaults to 3.0.
67 |
68 | Returns:
69 | pl.DataFrame: A DataFrame of player info with KAST statistics. The returned DataFrame
70 | contains the following columns:
71 | - name: The player's name.
72 | - steamid: The player's Steam ID.
73 | - side: The team ("all", "ct", or "t").
74 | - kast_rounds: Number of rounds contributing to KAST.
75 | - n_rounds: Total rounds played.
76 | - kast: The KAST percentage.
77 |
78 | Raises:
79 | ValueError: If kills or ticks are missing in the parsed demo.
80 | """
81 | # Mark trade kills
82 | kills_with_trades = calculate_trades(demo, trade_length_in_seconds)
83 |
84 | # --- Kills & Assists ---
85 |
86 | # Total kills
87 | kills_total = (
88 | kills_with_trades.select(["attacker_name", "attacker_steamid", "round_num"])
89 | .unique()
90 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
91 | )
92 | kills_ct = (
93 | kills_with_trades.filter(pl.col("attacker_side") == awpy.constants.CT_SIDE)
94 | .select(["attacker_name", "attacker_steamid", "round_num"])
95 | .unique()
96 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
97 | )
98 | kills_t = (
99 | kills_with_trades.filter(pl.col("attacker_side") == awpy.constants.T_SIDE)
100 | .select(["attacker_name", "attacker_steamid", "round_num"])
101 | .unique()
102 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
103 | )
104 |
105 | # Total assists
106 | assists_total = (
107 | kills_with_trades.select(["assister_name", "assister_steamid", "round_num"])
108 | .unique()
109 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
110 | )
111 | assists_ct = (
112 | kills_with_trades.filter(pl.col("assister_side") == awpy.constants.CT_SIDE)
113 | .select(["assister_name", "assister_steamid", "round_num"])
114 | .unique()
115 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
116 | )
117 | assists_t = (
118 | kills_with_trades.filter(pl.col("assister_side") == awpy.constants.T_SIDE)
119 | .select(["assister_name", "assister_steamid", "round_num"])
120 | .unique()
121 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
122 | )
123 |
124 | # --- Trades ---
125 |
126 | trades_total = (
127 | kills_with_trades.filter(pl.col("was_traded"))
128 | .select(["victim_name", "victim_steamid", "round_num"])
129 | .unique()
130 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
131 | )
132 | trades_ct = (
133 | kills_with_trades.filter((pl.col("victim_side") == awpy.constants.CT_SIDE) & (pl.col("was_traded")))
134 | .select(["victim_name", "victim_steamid", "round_num"])
135 | .unique()
136 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
137 | )
138 | trades_t = (
139 | kills_with_trades.filter((pl.col("victim_side") == awpy.constants.T_SIDE) & (pl.col("was_traded")))
140 | .select(["victim_name", "victim_steamid", "round_num"])
141 | .unique()
142 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
143 | )
144 |
145 | # --- Survivals ---
146 | # Get the last tick of each round per player, then only keep those with health > 0.
147 | survivals = demo.ticks.sort("tick").group_by(["name", "steamid", "round_num"]).tail(1).filter(pl.col("health") > 0)
148 | survivals_total = survivals.select(["name", "steamid", "round_num"]).unique()
149 | # Depending on your data, team names might be lowercase; adjust as needed.
150 | survivals_ct = (
151 | survivals.filter(pl.col("side") == awpy.constants.CT_SIDE).select(["name", "steamid", "round_num"]).unique()
152 | )
153 | survivals_t = (
154 | survivals.filter(pl.col("side") == awpy.constants.T_SIDE).select(["name", "steamid", "round_num"]).unique()
155 | )
156 |
157 | # --- Tabulate KAST ---
158 | # Overall ("all"): combine kills, assists, trades, and survivals.
159 | total_kast = (
160 | pl.concat([kills_total, assists_total, trades_total, survivals_total])
161 | .unique()
162 | .drop_nulls()
163 | .group_by(["name", "steamid"])
164 | .agg(pl.count("round_num").alias("kast_rounds"))
165 | .join(demo.player_round_totals.filter(pl.col("side") == "all"), on=["name", "steamid"], how="left")
166 | .with_columns((pl.col("kast_rounds") * 100 / pl.col("n_rounds")).alias("kast"))
167 | )
168 |
169 | # ct side
170 | ct_kast = (
171 | pl.concat([kills_ct, assists_ct, trades_ct, survivals_ct])
172 | .unique()
173 | .drop_nulls()
174 | .group_by(["name", "steamid"])
175 | .agg(pl.count("round_num").alias("kast_rounds"))
176 | .join(
177 | demo.player_round_totals.filter(pl.col("side") == awpy.constants.CT_SIDE),
178 | on=["name", "steamid"],
179 | how="left",
180 | )
181 | .with_columns((pl.col("kast_rounds") * 100 / pl.col("n_rounds")).alias("kast"))
182 | .with_columns(pl.lit(awpy.constants.CT_SIDE).alias("side"))
183 | )
184 |
185 | # t side
186 | t_kast = (
187 | pl.concat([kills_t, assists_t, trades_t, survivals_t])
188 | .unique()
189 | .drop_nulls()
190 | .group_by(["name", "steamid"])
191 | .agg(pl.count("round_num").alias("kast_rounds"))
192 | .join(
193 | demo.player_round_totals.filter(pl.col("side") == awpy.constants.T_SIDE), on=["name", "steamid"], how="left"
194 | )
195 | .with_columns((pl.col("kast_rounds") * 100 / pl.col("n_rounds")).alias("kast"))
196 | .with_columns(pl.lit(awpy.constants.T_SIDE).alias("side"))
197 | )
198 |
199 | # Combine all KAST stats
200 | kast_df = pl.concat([total_kast, ct_kast, t_kast])
201 | return kast_df.select(["name", "steamid", "side", "kast_rounds", "n_rounds", "kast"])
202 |
--------------------------------------------------------------------------------
/awpy/stats/rating.py:
--------------------------------------------------------------------------------
1 | """Calculate impact and rating (like in HLTV)."""
2 |
3 | import polars as pl
4 |
5 | import awpy.constants
6 | import awpy.demo
7 | from awpy.stats import adr, kast
8 |
9 |
10 | def impact(
11 | demo: awpy.demo.Demo,
12 | kills_coef: float = 2.13,
13 | assists_coef: float = 0.42,
14 | intercept: float = -0.41,
15 | ) -> pl.DataFrame:
16 | """Calculates impact rating using Polars.
17 |
18 | Args:
19 | demo (awpy.demo.Demo): A parsed Awpy demo with kills and ticks as Polars DataFrames.
20 | kills_coef (float, optional): Coefficient for kills in the impact formula. Defaults to 2.13.
21 | assists_coef (float, optional): Coefficient for assists in the impact formula. Defaults to 0.42.
22 | intercept (float, optional): Intercept in the impact formula. Defaults to -0.41.
23 |
24 | Returns:
25 | pl.DataFrame: A DataFrame of player info with impact rating. The DataFrame contains:
26 | - name (str): The player's name.
27 | - steamid (str): The player's Steam ID.
28 | - side (str): The team ("all", "ct", or "t").
29 | - impact (float): The calculated impact rating.
30 |
31 | Raises:
32 | ValueError: If kills or ticks are missing in the parsed demo.
33 | """
34 | # --- KILLS ---
35 |
36 | # Total kills (all)
37 | kills_total = (
38 | demo.kills.group_by(["attacker_name", "attacker_steamid"])
39 | .agg(pl.count("attacker_name").alias("kills"))
40 | .with_columns(pl.lit("all").alias("side"))
41 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
42 | )
43 | # Kills for CT side
44 | kills_ct = (
45 | demo.kills.filter(pl.col("attacker_side") == awpy.constants.CT_SIDE)
46 | .group_by(["attacker_name", "attacker_steamid"])
47 | .agg(pl.count("attacker_name").alias("kills"))
48 | .with_columns(pl.lit(awpy.constants.CT_SIDE).alias("side"))
49 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
50 | )
51 | # Kills for TERRORIST side
52 | kills_t = (
53 | demo.kills.filter(pl.col("attacker_side") == awpy.constants.T_SIDE)
54 | .group_by(["attacker_name", "attacker_steamid"])
55 | .agg(pl.count("attacker_name").alias("kills"))
56 | .with_columns(pl.lit(awpy.constants.T_SIDE).alias("side"))
57 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
58 | )
59 |
60 | # --- ASSISTS ---
61 |
62 | # Total assists (all)
63 | assists_total = (
64 | demo.kills.group_by(["assister_name", "assister_steamid"])
65 | .agg(pl.count("assister_name").alias("assists"))
66 | .with_columns(pl.lit("all").alias("side"))
67 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
68 | )
69 | # Assists for CT side
70 | assists_ct = (
71 | demo.kills.filter(pl.col("assister_side") == awpy.constants.CT_SIDE)
72 | .group_by(["assister_name", "assister_steamid"])
73 | .agg(pl.count("assister_name").alias("assists"))
74 | .with_columns(pl.lit(awpy.constants.CT_SIDE).alias("side"))
75 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
76 | )
77 | # Assists for TERRORIST side
78 | assists_t = (
79 | demo.kills.filter(pl.col("assister_side") == awpy.constants.T_SIDE)
80 | .group_by(["assister_name", "assister_steamid"])
81 | .agg(pl.count("assister_name").alias("assists"))
82 | .with_columns(pl.lit(awpy.constants.T_SIDE).alias("side"))
83 | .rename({"assister_name": "name", "assister_steamid": "steamid"})
84 | )
85 |
86 | # --- Merge Kills & Assists with Rounds ---
87 |
88 | stats_total = (
89 | demo.player_round_totals.filter(pl.col("side") == "all")
90 | .join(kills_total, on=["name", "steamid"], how="left", suffix="_kills")
91 | .join(assists_total, on=["name", "steamid"], how="left", suffix="_assists")
92 | .fill_null(0)
93 | )
94 | stats_ct = (
95 | demo.player_round_totals.filter(pl.col("side") == "ct")
96 | .join(kills_ct, on=["name", "steamid"], how="left", suffix="_kills")
97 | .join(assists_ct, on=["name", "steamid"], how="left", suffix="_assists")
98 | .fill_null(0)
99 | )
100 | stats_t = (
101 | demo.player_round_totals.filter(pl.col("side") == "t")
102 | .join(kills_t, on=["name", "steamid"], how="left", suffix="_kills")
103 | .join(assists_t, on=["name", "steamid"], how="left", suffix="_assists")
104 | .fill_null(0)
105 | )
106 |
107 | # Combine the stats for all teams
108 | impact_df = pl.concat([stats_total, stats_ct, stats_t])
109 | # Calculate impact = 2.13*(kills/n_rounds) + 0.42*(assists/n_rounds) - 0.41
110 | impact_df = impact_df.with_columns(
111 | (
112 | kills_coef * (pl.col("kills") / pl.col("n_rounds"))
113 | + assists_coef * (pl.col("assists") / pl.col("n_rounds"))
114 | + intercept
115 | ).alias("impact")
116 | )
117 |
118 | return impact_df.select(["name", "steamid", "side", "impact"])
119 |
120 |
121 | def rating(
122 | demo: awpy.demo.Demo,
123 | kast_coef: float = 0.0073,
124 | kills_coef: float = 0.3591,
125 | deaths_coef: float = -0.5329,
126 | impact_coef: float = 0.2372,
127 | adr_coef: float = 0.0032,
128 | intercept: float = 0.1587,
129 | ) -> pl.DataFrame:
130 | """Calculates player rating (similar to HLTV) using Polars.
131 |
132 | Args:
133 | demo (awpy.demo.Demo): A parsed Awpy demo with kills and ticks as Polars DataFrames.
134 | kast_coef (float, optional): Coefficient for KAST in the rating formula. Defaults to 0.0073.
135 | kills_coef (float, optional): Coefficient for kills in the rating formula. Defaults to 0.3591.
136 | deaths_coef (float, optional): Coefficient for deaths in the rating formula. Defaults to -0.5329.
137 | impact_coef (float, optional): Coefficient for impact in the rating formula. Defaults to 0.2372.
138 | adr_coef (float, optional): Coefficient for ADR in the rating formula. Defaults to 0.0032.
139 | intercept (float, optional): Intercept in the rating formula. Defaults to 0.1587.
140 |
141 | Returns:
142 | pl.DataFrame: A DataFrame of player info with additional columns:
143 | - n_rounds (int): Total rounds played.
144 | - impact (float): Impact rating.
145 | - rating (float): The overall calculated rating.
146 |
147 | Raises:
148 | ValueError: If kills or ticks are missing in the parsed demo.
149 | """
150 | # --- KILLS ---
151 |
152 | kills_total = (
153 | demo.kills.group_by(["attacker_name", "attacker_steamid"])
154 | .agg(pl.count("attacker_name").alias("kills"))
155 | .with_columns(pl.lit("all").alias("side"))
156 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
157 | )
158 | kills_ct = (
159 | demo.kills.filter(pl.col("attacker_side") == awpy.constants.CT_SIDE)
160 | .group_by(["attacker_name", "attacker_steamid"])
161 | .agg(pl.count("attacker_name").alias("kills"))
162 | .with_columns(pl.lit(awpy.constants.CT_SIDE).alias("side"))
163 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
164 | )
165 | kills_t = (
166 | demo.kills.filter(pl.col("attacker_side") == awpy.constants.T_SIDE)
167 | .group_by(["attacker_name", "attacker_steamid"])
168 | .agg(pl.count("attacker_name").alias("kills"))
169 | .with_columns(pl.lit(awpy.constants.T_SIDE).alias("side"))
170 | .rename({"attacker_name": "name", "attacker_steamid": "steamid"})
171 | )
172 | kills = pl.concat([kills_total, kills_ct, kills_t])
173 |
174 | # --- DEATHS ---
175 |
176 | deaths_total = (
177 | demo.kills.group_by(["victim_name", "victim_steamid"])
178 | .agg(pl.count("victim_name").alias("deaths"))
179 | .with_columns(pl.lit("all").alias("side"))
180 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
181 | )
182 | deaths_ct = (
183 | demo.kills.filter(pl.col("victim_side") == awpy.constants.CT_SIDE)
184 | .group_by(["victim_name", "victim_steamid"])
185 | .agg(pl.count("victim_name").alias("deaths"))
186 | .with_columns(pl.lit(awpy.constants.CT_SIDE).alias("side"))
187 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
188 | )
189 | deaths_t = (
190 | demo.kills.filter(pl.col("victim_side") == awpy.constants.T_SIDE)
191 | .group_by(["victim_name", "victim_steamid"])
192 | .agg(pl.count("victim_name").alias("deaths"))
193 | .with_columns(pl.lit(awpy.constants.T_SIDE).alias("side"))
194 | .rename({"victim_name": "name", "victim_steamid": "steamid"})
195 | )
196 | deaths = pl.concat([deaths_total, deaths_ct, deaths_t])
197 |
198 | # Get additional stats from other helper functions.
199 | # (Assuming these functions have been refactored to return Polars DataFrames.)
200 | kast_df = kast(demo)
201 | adr_df = adr(demo)
202 | impact_df = impact(demo)
203 |
204 | # --- Merge all stats ---
205 | rating_df = (
206 | demo.player_round_totals.join(
207 | kast_df.select(["name", "steamid", "side", "kast"]),
208 | on=["name", "steamid", "side"],
209 | how="left",
210 | )
211 | .join(
212 | adr_df.select(["name", "steamid", "side", "adr"]),
213 | on=["name", "steamid", "side"],
214 | )
215 | .join(
216 | impact_df.select(["name", "steamid", "side", "impact"]),
217 | on=["name", "steamid", "side"],
218 | )
219 | .join(kills, on=["name", "steamid", "side"])
220 | .join(deaths, on=["name", "steamid", "side"])
221 | )
222 |
223 | # Calculate rating using the weighted formula:
224 | rating_df = rating_df.with_columns(
225 | (
226 | kast_coef * pl.col("kast")
227 | + kills_coef * (pl.col("kills") / pl.col("n_rounds"))
228 | + deaths_coef * (pl.col("deaths") / pl.col("n_rounds"))
229 | + impact_coef * pl.col("impact")
230 | + adr_coef * pl.col("adr")
231 | + intercept
232 | ).alias("rating")
233 | )
234 |
235 | return rating_df.select(["name", "steamid", "side", "n_rounds", "impact", "rating"])
236 |
--------------------------------------------------------------------------------
/awpy/vector.py:
--------------------------------------------------------------------------------
1 | """Simple Vector3 representation to represent 3D points."""
2 |
3 | from __future__ import annotations # Enables postponed evaluation of type hints
4 |
5 | from dataclasses import dataclass
6 | from typing import Self, TypedDict
7 |
8 | import numpy.typing as npt
9 |
10 |
11 | class Vector3Dict(TypedDict):
12 | """Typed dictionary for Vector3."""
13 |
14 | x: float
15 | y: float
16 | z: float
17 |
18 |
19 | @dataclass
20 | class Vector3:
21 | """A 3D vector representation.
22 |
23 | Attributes:
24 | x: X coordinate.
25 | y: Y coordinate.
26 | z: Z coordinate.
27 | """
28 |
29 | x: float
30 | y: float
31 | z: float
32 |
33 | @classmethod
34 | def from_input(cls, value: Vector3 | tuple | list | npt.NDArray) -> Vector3:
35 | """Creates a Vector3 instance from various input types.
36 |
37 | Args:
38 | value (Vector3 | tuple | list | np.ndarray): Input to be
39 | coerced into a Vector3.
40 |
41 | Returns:
42 | Vector3: A Vector3 instance.
43 | """
44 | if isinstance(value, cls):
45 | return value
46 | if isinstance(value, tuple | list) and len(value) == 3:
47 | return cls(*value)
48 | if isinstance(value, npt.NDArray) and value.shape == (3,):
49 | return cls(*value.tolist())
50 | erroneous_input_msg = "Input must be a Vector3, tuple, list of length 3, or a numpy array of shape (3,)"
51 | raise ValueError(erroneous_input_msg)
52 |
53 | def __sub__(self, other: Vector3) -> Vector3:
54 | """Subtract two vectors."""
55 | return Vector3(self.x - other.x, self.y - other.y, self.z - other.z)
56 |
57 | def __add__(self, other: Vector3) -> Vector3:
58 | """Add two vectors."""
59 | return Vector3(self.x + other.x, self.y + other.y, self.z + other.z)
60 |
61 | def dot(self, other: Vector3) -> float:
62 | """Compute dot product."""
63 | return self.x * other.x + self.y * other.y + self.z * other.z
64 |
65 | def cross(self, other: Vector3) -> Vector3:
66 | """Compute cross product."""
67 | return Vector3(
68 | self.y * other.z - self.z * other.y,
69 | self.z * other.x - self.x * other.z,
70 | self.x * other.y - self.y * other.x,
71 | )
72 |
73 | def length(self) -> float:
74 | """Compute vector length."""
75 | return (self.x * self.x + self.y * self.y + self.z * self.z) ** 0.5
76 |
77 | def normalize(self) -> Vector3:
78 | """Return normalized vector."""
79 | length = self.length()
80 | if length == 0:
81 | return Vector3(0, 0, 0)
82 | return Vector3(self.x / length, self.y / length, self.z / length)
83 |
84 | def to_dict(self) -> Vector3Dict:
85 | """Convert Vector3 to dictionary."""
86 | return {"x": self.x, "y": self.y, "z": self.z}
87 |
88 | @classmethod
89 | def from_dict(cls, data: Vector3Dict) -> Vector3:
90 | """Create a Vector3 instance from a dictionary."""
91 | return cls(data["x"], data["y"], data["z"])
92 |
93 | def to_tuple(self) -> tuple[float, float, float]:
94 | """Convert Vector3 to tuple."""
95 | return (self.x, self.y, self.z)
96 |
97 | @classmethod
98 | def from_tuple(cls, data: tuple[float, float, float]) -> Self:
99 | """Create a Vector3 instance from a tuple."""
100 | return cls(data[0], data[1], data[2])
101 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | import os
16 | import sys
17 |
18 | sys.path.insert(0, os.path.abspath(".."))
19 |
20 | # -- Imports -----------------------------------------------------------------
21 | import sphinx_rtd_theme
22 |
23 | # -- Project information -----------------------------------------------------
24 |
25 | project = "Awpy"
26 | copyright = "2024, Peter Xenopoulos"
27 | author = "Peter Xenopoulos"
28 |
29 | # The short X.Y version
30 | version = "2.0"
31 | # The full version, including alpha/beta/rc tags
32 | release = "2.0.2"
33 |
34 |
35 | # -- General configuration ---------------------------------------------------
36 |
37 | # If your documentation needs a minimal Sphinx version, state it here.
38 | #
39 | # needs_sphinx = '1.0'
40 |
41 | # Add any Sphinx extension module names here, as strings. They can be
42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
43 | # ones.
44 | extensions = [
45 | "sphinx.ext.coverage",
46 | "sphinx.ext.mathjax",
47 | "sphinx.ext.viewcode",
48 | "sphinx.ext.githubpages",
49 | "sphinx.ext.napoleon",
50 | "sphinx.ext.autodoc",
51 | "sphinx_rtd_theme",
52 | "sphinx.ext.autosectionlabel",
53 | "nbsphinx",
54 | ]
55 |
56 | # Add any paths that contain templates here, relative to this directory.
57 | templates_path = ["_templates"]
58 |
59 | # The suffix(es) of source filenames.
60 | # You can specify multiple suffix as a list of string:
61 | #
62 | # source_suffix = ['.rst', '.md']
63 | source_suffix = ".rst"
64 |
65 | # The master toctree document.
66 | master_doc = "index"
67 |
68 | # The language for content autogenerated by Sphinx. Refer to documentation
69 | # for a list of supported languages.
70 | #
71 | # This is also used if you do content translation via gettext catalogs.
72 | # Usually you set "language" from the command line for these cases.
73 | language = "en"
74 |
75 | # List of patterns, relative to source directory, that match files and
76 | # directories to ignore when looking for source files.
77 | # This pattern also affects html_static_path and html_extra_path.
78 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
79 |
80 | # The name of the Pygments (syntax highlighting) style to use.
81 | pygments_style = None
82 |
83 |
84 | # -- Options for HTML output -------------------------------------------------
85 |
86 | # The theme to use for HTML and HTML Help pages. See the documentation for
87 | # a list of builtin themes.
88 | #
89 | html_theme = "sphinx_rtd_theme"
90 |
91 | # Theme options are theme-specific and customize the look and feel of a theme
92 | # further. For a list of options available for each theme, see the
93 | # documentation.
94 | #
95 | # html_theme_options = {}
96 |
97 | # Add any paths that contain custom static files (such as style sheets) here,
98 | # relative to this directory. They are copied after the builtin static files,
99 | # so a file named "default.css" will overwrite the builtin "default.css".
100 | # html_static_path = ["_static"]
101 |
102 | # Custom sidebar templates, must be a dictionary that maps document names
103 | # to template names.
104 | #
105 | # The default sidebars (for documents that don't match any pattern) are
106 | # defined by theme itself. Builtin themes are using these templates by
107 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
108 | # 'searchbox.html']``.
109 | #
110 | # html_sidebars = {}
111 |
112 |
113 | # -- Options for HTMLHelp output ---------------------------------------------
114 |
115 | # Output file base name for HTML help builder.
116 | htmlhelp_basename = "awpy_docs"
117 |
118 |
119 | # -- Options for LaTeX output ------------------------------------------------
120 |
121 | latex_elements = {
122 | # The paper size ('letterpaper' or 'a4paper').
123 | #
124 | # 'papersize': 'letterpaper',
125 | # The font size ('10pt', '11pt' or '12pt').
126 | #
127 | # 'pointsize': '10pt',
128 | # Additional stuff for the LaTeX preamble.
129 | #
130 | # 'preamble': '',
131 | # Latex figure (float) alignment
132 | #
133 | # 'figure_align': 'htbp',
134 | }
135 |
136 | # Grouping the document tree into LaTeX files. List of tuples
137 | # (source start file, target name, title,
138 | # author, documentclass [howto, manual, or own class]).
139 | latex_documents = [
140 | (master_doc, "awpy.tex", "awpy Documentation", "Peter Xenopoulos", "manual"),
141 | ]
142 |
143 |
144 | # -- Options for manual page output ------------------------------------------
145 |
146 | # One entry per manual page. List of tuples
147 | # (source start file, name, description, authors, manual section).
148 | man_pages = [(master_doc, "awpy", "awpy Documentation", [author], 1)]
149 |
150 |
151 | # -- Options for Texinfo output ----------------------------------------------
152 |
153 | # Grouping the document tree into Texinfo files. List of tuples
154 | # (source start file, target name, title, author,
155 | # dir menu entry, description, category)
156 | texinfo_documents = [
157 | (
158 | master_doc,
159 | "awpy",
160 | "awpy Documentation",
161 | author,
162 | "awpy",
163 | "One line description of project.",
164 | "Miscellaneous",
165 | ),
166 | ]
167 |
168 |
169 | # -- Options for Epub output -------------------------------------------------
170 |
171 | # Bibliographic Dublin Core info.
172 | epub_title = project
173 |
174 | # The unique identifier of the text. This can be a ISBN number
175 | # or the project homepage.
176 | #
177 | # epub_identifier = ''
178 |
179 | # A unique identification for the text.
180 | #
181 | # epub_uid = ''
182 |
183 | # A list of files that should not be packed into the epub file.
184 | epub_exclude_files = ["search.html"]
185 |
186 |
187 | # -- Extension configuration -------------------------------------------------
188 |
--------------------------------------------------------------------------------
/docs/examples/parse_demo_cli.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Using the Awpy CLI\n",
8 | "\n",
9 | "Awpy also comes with the capability of parsing demos through the command line interface. After installing Awpy, you can run the following:"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "\u001b[32m2025-02-17 11:28:57.866\u001b[0m | \u001b[34m\u001b[1mDEBUG \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mparse\u001b[0m:\u001b[36m214\u001b[0m - \u001b[34m\u001b[1mStarting to parse spirit-vs-natus-vincere-m2-dust2.dem\u001b[0m\n",
22 | "\u001b[32m2025-02-17 11:29:01.834\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mparse\u001b[0m:\u001b[36m258\u001b[0m - \u001b[32m\u001b[1mFinished parsing spirit-vs-natus-vincere-m2-dust2.dem, took 3.97 seconds\u001b[0m\n",
23 | "\u001b[32m2025-02-17 11:29:01.834\u001b[0m | \u001b[34m\u001b[1mDEBUG \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mcompress\u001b[0m:\u001b[36m573\u001b[0m - \u001b[34m\u001b[1mStarting to compress parsed spirit-vs-natus-vincere-m2-dust2.dem\u001b[0m\n",
24 | "\u001b[32m2025-02-17 11:29:02.013\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mcompress\u001b[0m:\u001b[36m605\u001b[0m - \u001b[32m\u001b[1mCompressed demo data saved to /home/xeno/awpy/docs/examples/spirit-vs-natus-vincere-m2-dust2.zip, took 0.18 seconds\u001b[0m\n",
25 | "kills.parquet\n",
26 | "damages.parquet\n",
27 | "footsteps.parquet\n",
28 | "shots.parquet\n",
29 | "grenades.parquet\n",
30 | "infernos.parquet\n",
31 | "smokes.parquet\n",
32 | "bomb.parquet\n",
33 | "ticks.parquet\n",
34 | "rounds.parquet\n",
35 | "header.json\n"
36 | ]
37 | }
38 | ],
39 | "source": [
40 | "!awpy parse spirit-vs-natus-vincere-m2-dust2.dem\n",
41 | "\n",
42 | "import zipfile\n",
43 | "\n",
44 | "\n",
45 | "def list_zip_contents(zip_path: str) -> list[str]:\n",
46 | " with zipfile.ZipFile(zip_path, \"r\") as zip_ref:\n",
47 | " contents = zip_ref.namelist()\n",
48 | " return contents\n",
49 | "\n",
50 | "\n",
51 | "zip_path = \"spirit-vs-natus-vincere-m2-dust2.zip\"\n",
52 | "contents = list_zip_contents(zip_path)\n",
53 | "for file_name in contents:\n",
54 | " print(file_name)"
55 | ]
56 | },
57 | {
58 | "cell_type": "markdown",
59 | "metadata": {},
60 | "source": [
61 | "This will write a zipped file of the parsed demo data (as `.parquet` files). Keep in mind, Awpy doesn't currently write all events, just the main ones that are exposed as properties on the `Demo` class after parsing."
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "### Passing options via the command-line\n",
69 | "\n",
70 | "You can explore the Awpy cli options by running `awpy parse --help`. Below, we show an example of how to use the most important flags."
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "execution_count": 2,
76 | "metadata": {},
77 | "outputs": [
78 | {
79 | "name": "stdout",
80 | "output_type": "stream",
81 | "text": [
82 | "\u001b[32m2025-02-17 11:29:08.982\u001b[0m | \u001b[34m\u001b[1mDEBUG \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mparse\u001b[0m:\u001b[36m214\u001b[0m - \u001b[34m\u001b[1mStarting to parse spirit-vs-natus-vincere-m2-dust2.dem\u001b[0m\n",
83 | "\u001b[32m2025-02-17 11:29:13.061\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mparse\u001b[0m:\u001b[36m258\u001b[0m - \u001b[32m\u001b[1mFinished parsing spirit-vs-natus-vincere-m2-dust2.dem, took 4.08 seconds\u001b[0m\n",
84 | "\u001b[32m2025-02-17 11:29:13.061\u001b[0m | \u001b[34m\u001b[1mDEBUG \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mcompress\u001b[0m:\u001b[36m573\u001b[0m - \u001b[34m\u001b[1mStarting to compress parsed spirit-vs-natus-vincere-m2-dust2.dem\u001b[0m\n",
85 | "\u001b[32m2025-02-17 11:29:13.506\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mawpy.demo\u001b[0m:\u001b[36mcompress\u001b[0m:\u001b[36m605\u001b[0m - \u001b[32m\u001b[1mCompressed demo data saved to /home/xeno/awpy/docs/examples/spirit-vs-natus-vincere-m2-dust2.zip, took 0.44 seconds\u001b[0m\n"
86 | ]
87 | }
88 | ],
89 | "source": [
90 | "!awpy parse spirit-vs-natus-vincere-m2-dust2.dem --player-props X,Y,Z,health --verbose"
91 | ]
92 | }
93 | ],
94 | "metadata": {
95 | "kernelspec": {
96 | "display_name": "3.11.8",
97 | "language": "python",
98 | "name": "python3"
99 | },
100 | "language_info": {
101 | "codemirror_mode": {
102 | "name": "ipython",
103 | "version": 3
104 | },
105 | "file_extension": ".py",
106 | "mimetype": "text/x-python",
107 | "name": "python",
108 | "nbconvert_exporter": "python",
109 | "pygments_lexer": "ipython3",
110 | "version": "3.11.8"
111 | }
112 | },
113 | "nbformat": 4,
114 | "nbformat_minor": 2
115 | }
116 |
--------------------------------------------------------------------------------
/docs/examples/visibility.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Calculating Visibility in Counter-Strike 2\n",
8 | "\n",
9 | "Knowing whether or not two players are visible to each other opens up exciting analysis opportunities. It is also somewhat difficult to calculate, and existing in game flags like `isSpotted` can be unreliable. Awpy provides a fast approach to determining visibility in Counter-Strike 2 (CS2). While our approach is not 100% foolproof, it is a start and applicable in many cases.\n",
10 | "\n",
11 | "Triangles are the fundamental building blocks of 3D models, including those used in Counter-Strike maps. In computer graphics, complex surfaces are often broken down into smaller, flat polygons, typically triangles, because they are computationally efficient to render and manipulate. A Counter-Strike map consists of many such triangles forming walls, floors, objects, and other geometry. These triangles are used not only for visual rendering but also for collision detection and gameplay mechanics, such as determining visibility and movement constraints.\n",
12 | "\n",
13 | "To determine if two points are visible, our approach checks whether the straight line segment connecting them intersects with any of the triangles in the map. This process involves representing the map's triangles in a bounding volume hierarchy (BVH) tree, which organizes the triangles into nested groups to optimize intersection tests. The algorithm first queries the BVH to quickly identify potential collision candidates. Then, it performs precise intersection tests between the line segment and the relevant triangles. If no intersection is found, the points are visible to each other; otherwise, they are not. This approach balances accuracy and performance, making it suitable for real-time applications in games."
14 | ]
15 | },
16 | {
17 | "cell_type": "markdown",
18 | "metadata": {},
19 | "source": [
20 | "### Acquiring `.tri` files\n",
21 | "Awpy parses CS2 maps and produces `.tri` files, which are binary files containing the triangle information. Across all maps, the compressed `.tri` files are about 20 megabytes, so they are ultimately still quite small. To download the `.tri` files to your awpy data directory, you can run `awpy get tris` to get the relevant files. Below, we show where the files are located."
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": 1,
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "name": "stdout",
31 | "output_type": "stream",
32 | "text": [
33 | "C:\\Users\\pnxen\\.awpy\\tris\\.patch\n",
34 | "C:\\Users\\pnxen\\.awpy\\tris\\ar_baggage.tri\n",
35 | "C:\\Users\\pnxen\\.awpy\\tris\\ar_shoots.tri\n",
36 | "C:\\Users\\pnxen\\.awpy\\tris\\cs_italy.tri\n",
37 | "C:\\Users\\pnxen\\.awpy\\tris\\cs_office.tri\n",
38 | "C:\\Users\\pnxen\\.awpy\\tris\\de_ancient.tri\n",
39 | "C:\\Users\\pnxen\\.awpy\\tris\\de_anubis.tri\n",
40 | "C:\\Users\\pnxen\\.awpy\\tris\\de_dust2.tri\n",
41 | "C:\\Users\\pnxen\\.awpy\\tris\\de_inferno.tri\n",
42 | "C:\\Users\\pnxen\\.awpy\\tris\\de_mirage.tri\n",
43 | "C:\\Users\\pnxen\\.awpy\\tris\\de_nuke.tri\n",
44 | "C:\\Users\\pnxen\\.awpy\\tris\\de_overpass.tri\n",
45 | "C:\\Users\\pnxen\\.awpy\\tris\\de_train.tri\n",
46 | "C:\\Users\\pnxen\\.awpy\\tris\\de_vertigo.tri\n",
47 | "C:\\Users\\pnxen\\.awpy\\tris\\lobby_mapveto.tri\n"
48 | ]
49 | }
50 | ],
51 | "source": [
52 | "# Import the Awpy data directory\n",
53 | "from awpy.data import TRIS_DIR\n",
54 | "\n",
55 | "# List files in the data directory\n",
56 | "for file in TRIS_DIR.iterdir():\n",
57 | " print(file)"
58 | ]
59 | },
60 | {
61 | "cell_type": "markdown",
62 | "metadata": {},
63 | "source": [
64 | "### Constructing the `VisibilityChecker`\n",
65 | "\n",
66 | "To get started, you must first create a `VisibilityChecker`. You can do so by passing a path to a `.tri` file or you can pass in a list of `awpy.visibility.Triangle` objects. The below options can take about 30 seconds or so."
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": 2,
72 | "metadata": {},
73 | "outputs": [
74 | {
75 | "name": "stdout",
76 | "output_type": "stream",
77 | "text": [
78 | "VisibilityChecker(n_triangles=326265)\n"
79 | ]
80 | }
81 | ],
82 | "source": [
83 | "from awpy.visibility import VisibilityChecker\n",
84 | "\n",
85 | "de_dust2_tri = TRIS_DIR / \"de_dust2.tri\"\n",
86 | "\n",
87 | "# Create VC object with a file path\n",
88 | "vc = VisibilityChecker(path=de_dust2_tri)\n",
89 | "\n",
90 | "# Create VC object with a list of triangles\n",
91 | "tris = VisibilityChecker.read_tri_file(de_dust2_tri)\n",
92 | "vc = VisibilityChecker(triangles=tris)\n",
93 | "print(vc)"
94 | ]
95 | },
96 | {
97 | "cell_type": "markdown",
98 | "metadata": {},
99 | "source": [
100 | "### Calculating Visibility\n",
101 | "\n",
102 | "We can now calculate visibility (yes/no) rather simply. We just need to provide two points, which we can do using a tuple for each.\n"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 3,
108 | "metadata": {},
109 | "outputs": [
110 | {
111 | "name": "stdout",
112 | "output_type": "stream",
113 | "text": [
114 | "T spawn 1 is visible from T spawn 2: True\n",
115 | "T spawn 1 is visible from CT spawn: False\n"
116 | ]
117 | }
118 | ],
119 | "source": [
120 | "t_spawn_pos_1 = (-680, 834, 180)\n",
121 | "t_spawn_pos_2 = (-1349, 814, 180)\n",
122 | "ct_spawn_pos = (15, 2168, -65)\n",
123 | "\n",
124 | "print(f\"T spawn 1 is visible from T spawn 2: {vc.is_visible(t_spawn_pos_1, t_spawn_pos_2)}\")\n",
125 | "print(f\"T spawn 1 is visible from CT spawn: {vc.is_visible(t_spawn_pos_1, ct_spawn_pos)}\")"
126 | ]
127 | },
128 | {
129 | "cell_type": "markdown",
130 | "metadata": {},
131 | "source": [
132 | "Keep in mind that, due to the BVH we create, these calculations are _fast_. The visibility calculation for two points that are visible will always take the longest. For ones that aren't visible, it should be much shorter. Below, we show that confirming that you cannot see CT spawn from T spawn is roughly 3x faster than confirming that you can see one T spawn position from another."
133 | ]
134 | },
135 | {
136 | "cell_type": "code",
137 | "execution_count": 4,
138 | "metadata": {},
139 | "outputs": [
140 | {
141 | "name": "stdout",
142 | "output_type": "stream",
143 | "text": [
144 | "177 μs ± 5 μs per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n"
145 | ]
146 | }
147 | ],
148 | "source": [
149 | "%timeit vc.is_visible(t_spawn_pos_1, t_spawn_pos_2)"
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": 5,
155 | "metadata": {},
156 | "outputs": [
157 | {
158 | "name": "stdout",
159 | "output_type": "stream",
160 | "text": [
161 | "65.4 μs ± 2.24 μs per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n"
162 | ]
163 | }
164 | ],
165 | "source": [
166 | "%timeit vc.is_visible(t_spawn_pos_1, ct_spawn_pos)"
167 | ]
168 | },
169 | {
170 | "cell_type": "markdown",
171 | "metadata": {},
172 | "source": [
173 | "Most of the time is really spent creating the BVH tree. Below is the time to do so for the smallest and largest maps:"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 6,
179 | "metadata": {},
180 | "outputs": [
181 | {
182 | "name": "stdout",
183 | "output_type": "stream",
184 | "text": [
185 | "744 ms ± 37.7 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n"
186 | ]
187 | }
188 | ],
189 | "source": [
190 | "%timeit VisibilityChecker(path=TRIS_DIR / \"de_mirage.tri\")"
191 | ]
192 | },
193 | {
194 | "cell_type": "code",
195 | "execution_count": 7,
196 | "metadata": {},
197 | "outputs": [
198 | {
199 | "name": "stdout",
200 | "output_type": "stream",
201 | "text": [
202 | "9.62 s ± 175 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n"
203 | ]
204 | }
205 | ],
206 | "source": [
207 | "%timeit VisibilityChecker(path=TRIS_DIR / \"de_inferno.tri\")"
208 | ]
209 | },
210 | {
211 | "cell_type": "markdown",
212 | "metadata": {},
213 | "source": [
214 | "If you want to check positions in a local server, you can set `sv_cheats 1` and then type `getpos` in your game's console."
215 | ]
216 | },
217 | {
218 | "cell_type": "markdown",
219 | "metadata": {},
220 | "source": [
221 | "### Pitfalls\n",
222 | "\n",
223 | "Keep in mind that things like smokes, flashes, and props may impact visibility.\n"
224 | ]
225 | }
226 | ],
227 | "metadata": {
228 | "kernelspec": {
229 | "display_name": ".venv",
230 | "language": "python",
231 | "name": "python3"
232 | },
233 | "language_info": {
234 | "codemirror_mode": {
235 | "name": "ipython",
236 | "version": 3
237 | },
238 | "file_extension": ".py",
239 | "mimetype": "text/x-python",
240 | "name": "python",
241 | "nbconvert_exporter": "python",
242 | "pygments_lexer": "ipython3",
243 | "version": "3.13.2"
244 | }
245 | },
246 | "nbformat": 4,
247 | "nbformat_minor": 2
248 | }
249 |
--------------------------------------------------------------------------------
/docs/getting-started/faq.rst:
--------------------------------------------------------------------------------
1 | Frequently Asked Questions (FAQs)
2 | =================================
3 |
4 | This is a nonexhaustive list of frequently asked questions.
5 |
6 | **Q:** What can I do with Awpy?
7 | You can use Awpy to parse, analyze and visualize Counter-Strik 2 demo files (demos).
8 |
9 | **Q:** How do I install Awpy?
10 | You can install Awpy in Python by running ``pip install awpy``.
11 |
12 | **Q:** The parser returns weird rounds or data!
13 | Please note that Awpy parses, and cleans, data from the demo file assuming the demo is from a competitive Counter-Strike 2 match (e.g., from HLTV, FACEIT or competitive matchmaking).
14 |
15 | **Q:** Where can I get documentation for the parsed data?
16 | Please look at :doc:`../modules/parser_output`.
17 |
18 | **Q:** Is Awpy available in other languages?
19 | Awpy is only available in Python. However, you can use a :doc:`../modules/cli` to parse demos with Awpy via a command-line interface.
20 |
21 | **Q:** I want to parse everything in the demo / I have specific parsing needs, can Awpy help?
22 | Probably not -- Awpy is focused on competitive play and analysis. Awpy is built on the very flexible `demoparser2 library `_. You may still find Awpy useful for visualization.
23 |
24 | **Q:** How can I contribute to Awpy?
25 | We are always looking for people to help improve Awpy, no matter the skill-level. Please reach out on `Discord `_ if you are interested.
26 |
27 | **Q:** How should I attribute Awpy in my work?
28 | If you're posting on a casual site, like Twitter, you can simply link back to the Awpy Github repository. If you're working on an academic paper, you can cite `Analyzing the Differences between Professional and Amateur Esports through Win Probability `_.
--------------------------------------------------------------------------------
/docs/getting-started/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | To install Awpy, you must have Python >= 3.11, and you can simply run
5 |
6 | ``pip install awpy``
7 |
8 | To check your current Awpy version, run
9 |
10 | ``pip show awpy``
11 |
12 | To see what data Awpy makes available for download (e.g., `.tri` files for world geometry, parsed nav files, etc.), you can run
13 |
14 | ``awpy get --help``
15 |
--------------------------------------------------------------------------------
/docs/getting-started/license.rst:
--------------------------------------------------------------------------------
1 | License and Acknowledgments
2 | ===========================
3 |
4 | Awpy uses an MIT License, which you can view `here `_.
5 |
6 | Awpy is made possible by the `demoparser2 library `_, also under the MIT license.
7 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. awpy documentation master file, created by
2 | sphinx-quickstart on Sun Jan 30 21:22:52 2022.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Awpy
7 | ===================
8 | |Discord| |Github| |Build| |License|
9 |
10 | .. |Discord| image:: https://img.shields.io/discord/868146581419999232?color=blue&label=Discord&logo=discord
11 | :target: https://discord.gg/W34XjsSs2H
12 |
13 | .. |Github| image:: https://img.shields.io/badge/github-repo-yellowgreen
14 | :target: https://github.com/pnxenopoulos/awpy
15 |
16 | .. |Build| image:: https://github.com/pnxenopoulos/awpy/actions/workflows/build.yml/badge.svg
17 | :target: https://github.com/pnxenopoulos/awpy/actions/workflows/build.yml
18 |
19 | .. |Artifacts| image:: https://github.com/pnxenopoulos/awpy/actions/workflows/artifacts.yml/badge.svg
20 | :target: https://github.com/pnxenopoulos/awpy/actions/workflows/artifacts.yml
21 |
22 | .. |License| image:: https://img.shields.io/badge/license-MIT-lightgrey
23 | :target: https://github.com/pnxenopoulos/awpy/blob/main/LICENSE
24 |
25 | `Awpy` (GitHub_) allows a user to parse, analyze and visualize Counter-Strike 2 demos, specifically those from competitive Counter-Strike (e.g., demos from HLTV, FACEIT, and competitive matchmaking). To install Awpy, run ``pip install awpy`` (Python >= 3.11). Please join the Discord_ server if you would like to discuss Awpy or esports analytics. You can get started with the following example:
26 |
27 | .. _GitHub: https://github.com/pnxenopoulos/awpy
28 | .. _Discord: https://discord.gg/W34XjsSs2H
29 |
30 |
31 | .. code-block:: python
32 |
33 | from awpy import Demo
34 |
35 | # Construct and then parse a demo
36 | dem = Demo("natus-vincere-vs-virtus-pro-m1-overpass.dem")
37 | dem.parse()
38 |
39 | # Access various dictionaries & Polars dataframes
40 | dem.header
41 | dem.rounds
42 | dem.grenades
43 | dem.kills
44 | dem.damages
45 | dem.bomb
46 | dem.smokes
47 | dem.infernos
48 | dem.shots
49 | dem.ticks
50 |
51 | # If you need to change to a Pandas dataframe, you can do
52 | dem.ticks.to_pandas()
53 |
54 | You can take a look at the :doc:`examples/parse_demo` to see how to parse a demo and access the data.
55 |
56 | .. Hidden TOCs
57 |
58 | .. toctree::
59 | :caption: Getting Started
60 | :maxdepth: 2
61 | :hidden:
62 |
63 | getting-started/installation
64 | getting-started/faq
65 | getting-started/license
66 | modules/parser_output
67 |
68 | .. toctree::
69 | :caption: Example Notebooks
70 | :maxdepth: 2
71 | :hidden:
72 |
73 | examples/parse_demo
74 | examples/parse_demo_cli
75 | examples/demo_stats
76 | examples/plot_demo
77 | examples/visibility
78 | examples/nav
79 |
80 | .. toctree::
81 | :caption: Documentation
82 | :maxdepth: 2
83 | :hidden:
84 |
85 | modules/cli
86 | modules/data
87 | modules/demo
88 | modules/nav
89 | modules/plot
90 | modules/stats
91 | modules/visibility
92 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/modules/cli.rst:
--------------------------------------------------------------------------------
1 | Command-Line Interface
2 | =================================
3 |
4 | You can also use Awpy in command-line form. This is useful for automating tasks or for quick one-off operations. To get started, you can view the example below, which closely mirrors the parse demo example in the documentation.
5 |
6 | .. code-block:: bash
7 |
8 | awpy parse natus-vincere-vs-virtus-pro-m1-overpass.dem --player-props X,Y,Z --other-props is_bomb_planted --verbose
--------------------------------------------------------------------------------
/docs/modules/data.rst:
--------------------------------------------------------------------------------
1 | Data
2 | ===========
3 |
4 | Awpy packages data artifacts (map images, parsed navigation meshes, map triangles, etc.) separately from the code.
5 | These artifacts are liable to change with game updates. We try to maintain as much history as we can, but we can't guarantee that we have all the data for all game versions.
6 | Data is stored on `awpycs.com` as `awpycs.com/{patch}/{artifact}.{filetype}`. Most things like map images, parsed navigation meshes and map triangles are stored as `.zip` files.
7 | The triangles are the largest (roughly 20MB), but most are a few MB compressed.
8 | To see what artifacts are available and what the current patch is, you can run
9 |
10 | .. code-block:: bash
11 |
12 | awpy artifacts
13 |
14 | To get a specific artifact, you can run something like the following, which will grab all the triangles for the current patch.
15 |
16 | .. code-block:: bash
17 |
18 | awpy get tris
19 |
20 | If you want to specify a patch, you can do so with the `--patch` flag.
21 |
22 | .. code-block:: bash
23 |
24 | awpy get tris --patch 123456789
25 |
26 | The data is stored in the Awpy directory, which is `$HOME/.awpy`
--------------------------------------------------------------------------------
/docs/modules/demo.rst:
--------------------------------------------------------------------------------
1 | Demo
2 | ====================
3 |
4 | This module contains the Demo class, which is used to parse and hold demo file data.
5 |
6 | awpy.demo
7 | ------------------------------
8 |
9 | .. automodule:: awpy.demo
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/modules/nav.rst:
--------------------------------------------------------------------------------
1 | Nav
2 | ====================
3 |
4 | This module contains the Nav class, which is used to represent navigation meshes in Counter-Strike 2.
5 |
6 | awpy.nav
7 | ------------------------------
8 |
9 | .. automodule:: awpy.nav
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/modules/plot.rst:
--------------------------------------------------------------------------------
1 | Visualization & Plotting
2 | ========================
3 |
4 | This module contains functions for visualizing Counter-Strike 2 demo files.
5 |
6 | awpy.plot
7 | ------------------------------
8 |
9 | .. automodule:: awpy.plot
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/modules/stats.rst:
--------------------------------------------------------------------------------
1 | Stats
2 | ====================
3 |
4 | This module contains functions for calculating statistics, like ADR, KAST and Rating, on Counter-Strik 2 demo files.
5 |
6 | awpy.stats
7 | ------------------------------
8 |
9 | .. automodule:: awpy.stats
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/modules/visibility.rst:
--------------------------------------------------------------------------------
1 | Visibility
2 | ====================
3 |
4 | This module contains functions for calculating visibility in Counter-Strike 2 demos.
5 |
6 | awpy.visibility
7 | ------------------------------
8 |
9 | .. automodule:: awpy.visibility
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
13 | :exclude-members: Edge, Triangle
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "awpy"
3 | version = "2.0.2"
4 | description = "Counter-Strike 2 demo parsing, analysis and visualization"
5 | readme = "README.md"
6 | authors = [
7 | { name = "Peter Xenopoulos", email = "pnxenopoulos@gmail.com" }
8 | ]
9 | keywords = ["counter-strike 2", "counter-strike", "counter strike 2", "counter strike", "cs2", "csgo", "esports", "sports-analytics"]
10 | classifiers = [
11 | "License :: OSI Approved :: MIT License",
12 | "Intended Audience :: Science/Research",
13 | "Programming Language :: Python :: 3.10",
14 | "Programming Language :: Python :: 3.11",
15 | "Programming Language :: Python :: 3.12",
16 | "Programming Language :: Python :: 3.13",
17 | "Topic :: Games/Entertainment",
18 | "Topic :: Games/Entertainment :: First Person Shooters",
19 | "Topic :: Scientific/Engineering :: Information Analysis"
20 | ]
21 | requires-python = ">=3.11,<3.14"
22 | dependencies = [
23 | "click>=8.1.8",
24 | "demoparser2>=0.38.0",
25 | "loguru>=0.7.3",
26 | "matplotlib>=3.10.0",
27 | "networkx>=3.4.2",
28 | "numpy>=2.0.0",
29 | "pillow>=11.1.0",
30 | "polars>=1.22.0",
31 | "requests>=2.32.3",
32 | "scipy>=1.15.0",
33 | "tqdm>=4.67.1",
34 | ]
35 |
36 | [project.urls]
37 | Homepage = "https://awpycs.com"
38 | Repository = "https://github.com/pnxenopoulos/awpy"
39 | Docs = "https://awpy.readthedocs.io/en/latest/?badge=latest"
40 |
41 | [project.scripts]
42 | awpy = "awpy.cli:awpy_cli"
43 |
44 | [build-system]
45 | requires = ["hatchling"]
46 | build-backend = "hatchling.build"
47 |
48 | [dependency-groups]
49 | dev = [
50 | "mypy>=1.15.0",
51 | "pre-commit>=4.2.0",
52 | "ruff>=0.11.2",
53 | "vdf>=3.4",
54 | ]
55 | test = [
56 | "coverage>=7.7.1",
57 | "pytest>=8.3.5",
58 | ]
59 | docs = [
60 | "ipykernel>=6.29.5",
61 | "nbsphinx>=0.9.6",
62 | "sphinx>=8.1.3",
63 | "sphinx-rtd-theme>=3.0.2",
64 | ]
65 |
66 | # Test & Coverage config
67 | [tool.coverage.run]
68 | branch = true
69 | source = ["awpy"]
70 |
71 | [tool.pytest.ini_options]
72 | testpaths = ["tests"]
73 |
74 | # Setuptools
75 | [tool.setuptools]
76 | include-package-data = true
77 | packages = ["awpy"]
78 |
79 | # Ruff config
80 | [tool.ruff]
81 | exclude = [
82 | ".bzr",
83 | ".direnv",
84 | ".eggs",
85 | ".git",
86 | ".hg",
87 | ".mypy_cache",
88 | ".nox",
89 | ".pants.d",
90 | ".pytype",
91 | ".ruff_cache",
92 | ".svn",
93 | ".tox",
94 | ".venv",
95 | "__pypackages__",
96 | "_build",
97 | "buck-out",
98 | "build",
99 | "dist",
100 | "node_modules",
101 | "venv",
102 | "docs"
103 | ]
104 | line-length = 120
105 |
106 | [tool.ruff.lint]
107 | select = [
108 | "E",
109 | "F",
110 | "B",
111 | "W",
112 | "I",
113 | "N",
114 | "D",
115 | "UP",
116 | "YTT",
117 | "ANN",
118 | "S",
119 | "BLE",
120 | "FBT",
121 | "A",
122 | "C4",
123 | "DTZ",
124 | "T10",
125 | "EXE",
126 | "ISC",
127 | "ICN",
128 | "G",
129 | "INP",
130 | "PIE",
131 | "PYI",
132 | "PT",
133 | "Q",
134 | "RSE",
135 | "RET",
136 | "SLF",
137 | "SIM",
138 | "TID",
139 | "TCH",
140 | "ARG",
141 | "ERA",
142 | "PD",
143 | "PGH",
144 | "PLC",
145 | "PLE",
146 | "PLR",
147 | "PLW",
148 | "TRY",
149 | "NPY",
150 | "RUF",
151 | "EM"
152 | ]
153 | ignore = ["D208", "T20", "PTH", "TRY003", "BLE001", "PLR2004", "UP007", "ISC001"]
154 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
155 |
156 | [tool.ruff.lint.pydocstyle]
157 | convention = "google"
158 |
159 | [tool.ruff.lint.pylint]
160 | max-args = 17
161 |
162 | [tool.ruff.lint.per-file-ignores]
163 | "__init__.py" = ["E402", "F401"]
164 | "tests/test_*.py" = ["ANN201", "S101", "SLF001", "PLR2004"]
165 |
--------------------------------------------------------------------------------
/scripts/generate-maps.ps1:
--------------------------------------------------------------------------------
1 | # Note: On Windows you must keep the .dll from the .zip in the same directory as the .exe
2 | param(
3 | [Parameter(Mandatory=$false)]
4 | [string]$inputPath = "C:\Program Files (x86)\Steam\steamapps\common\Counter-Strike Global Offensive\game\csgo\pak01_dir.vpk",
5 |
6 | [Parameter(Mandatory=$false)]
7 | [string]$outputPath = "."
8 | )
9 |
10 | # Define the fixed command and filters
11 | $exePath = ".\Source2Viewer-CLI.exe"
12 | $folderFilter = "panorama/images/overheadmaps/"
13 | $extensionFilter = "vtex_c"
14 |
15 | # Run the command to extract the files
16 | & $exePath -i $inputPath -f $folderFilter -e $extensionFilter -o $outputPath -d
17 |
18 | # Define the source directory (where the extracted files are)
19 | $sourceDir = Join-Path $outputPath $folderFilter
20 |
21 | # Define the target directory for the renamed files ("maps")
22 | $targetDir = Join-Path $outputPath "maps"
23 |
24 | # If the target directory doesn't exist, create it.
25 | if (-not (Test-Path $targetDir)) {
26 | New-Item -ItemType Directory -Path $targetDir | Out-Null
27 | }
28 |
29 | # Check if the source directory exists before processing.
30 | if (Test-Path $sourceDir) {
31 | # Process each file ending with "_radar_psd.png"
32 | Get-ChildItem -Path $sourceDir -Filter "*_radar_psd.png" | ForEach-Object {
33 | # Skip files with undesired substrings.
34 | if ($_.Name -like "*_preview*" -or $_.Name -like "*_vanity*") {
35 | return
36 | }
37 |
38 | # Define the new file name by replacing "_radar_psd.png" with ".png"
39 | $newFileName = $_.Name -replace "_radar_psd\.png$", ".png"
40 |
41 | # Rename the file within the source directory.
42 | Rename-Item -Path $_.FullName -NewName $newFileName
43 |
44 | # Define full paths for the renamed file and its destination.
45 | $currentPath = Join-Path $sourceDir $newFileName
46 | $destinationPath = Join-Path $targetDir $newFileName
47 |
48 | Write-Host "Moving file: $currentPath" -ForegroundColor Green
49 | Write-Host "To: $destinationPath" -ForegroundColor Green
50 |
51 | # Move the file to the target directory.
52 | Move-Item -Path $currentPath -Destination $destinationPath -Force
53 | }
54 |
55 | # Optionally, remove the 'panorama' folder if it's no longer needed.
56 | $panoramaPath = Join-Path $outputPath "panorama"
57 | if (Test-Path $panoramaPath) {
58 | Remove-Item -Path $panoramaPath -Recurse -Force
59 | }
60 | } else {
61 | Write-Host "Source directory '$sourceDir' does not exist." -ForegroundColor Red
62 | exit
63 | }
64 |
65 | # Generate map data
66 | $resourceFolder = "resource/overviews/"
67 | & $exePath -i $inputPath -f $resourceFolder -e "txt" -o $outputPath -d
68 | $tempOutputDir = Join-Path -Path $outputPath -ChildPath $resourceFolder
69 | uv run awpy mapdata $tempOutputDir
70 | Move-Item -Path "map-data.json" -Destination $targetDir -Force
71 |
72 | # Create a zip archive of the final files in the target directory.
73 | $zipPath = Join-Path $outputPath "maps.zip"
74 | Compress-Archive -Path (Join-Path $targetDir "*") -DestinationPath $zipPath -Force
75 | Write-Host "Zip archive created at: $zipPath" -ForegroundColor Cyan
76 |
77 | # Function to compute a hash based on the actual contents of all files in a directory.
78 | function Get-DirectoryContentHash {
79 | param(
80 | [Parameter(Mandatory=$true)]
81 | [string]$DirectoryPath,
82 | [string]$Algorithm = "SHA256"
83 | )
84 | # Retrieve all files recursively and sort them by full path for consistency.
85 | $files = Get-ChildItem -Path $DirectoryPath -File -Recurse | Sort-Object FullName
86 |
87 | $hasher = [System.Security.Cryptography.HashAlgorithm]::Create($Algorithm)
88 | $ms = New-Object System.IO.MemoryStream
89 |
90 | foreach ($file in $files) {
91 | # Include the file's relative path (as bytes) so that names affect the hash.
92 | $relativePath = $file.FullName.Substring($DirectoryPath.Length).TrimStart('\')
93 | $pathBytes = [System.Text.Encoding]::UTF8.GetBytes($relativePath)
94 | $ms.Write($pathBytes, 0, $pathBytes.Length)
95 |
96 | # Include the file's content.
97 | $fileBytes = [System.IO.File]::ReadAllBytes($file.FullName)
98 | $ms.Write($fileBytes, 0, $fileBytes.Length)
99 | }
100 |
101 | $ms.Position = 0
102 | $hashBytes = $hasher.ComputeHash($ms)
103 | $hashHex = [BitConverter]::ToString($hashBytes) -replace '-', ''
104 | return $hashHex
105 | }
106 |
107 | # Compute and print the hash of the zip archive using SHA256.
108 | $fileHash = Get-FileHash -Path $zipPath -Algorithm SHA256
109 | Write-Host "Zip file hash (SHA256): $($fileHash.Hash)" -ForegroundColor Cyan
110 |
111 | # Compute and print the hash of the contents of the output directory (the .tri files).
112 | $contentHash = Get-DirectoryContentHash -DirectoryPath $targetDir
113 | Write-Host "Combined content hash of output files: $contentHash" -ForegroundColor Cyan
114 |
--------------------------------------------------------------------------------
/scripts/generate-navs.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$false)]
3 | [string]$inputPath = "C:\Program Files (x86)\Steam\steamapps\common\Counter-Strike Global Offensive\game\csgo\maps",
4 |
5 | # The output directory where the final .json files will be placed. Defaults to a folder named "nav".
6 | [Parameter(Mandatory=$false)]
7 | [string]$outputDirectory = (Join-Path (Get-Location).Path "nav")
8 | )
9 |
10 | # Ensure the output directory ("nav") exists; create it if it doesn't.
11 | if (-not (Test-Path $outputDirectory)) {
12 | New-Item -ItemType Directory -Path $outputDirectory | Out-Null
13 | }
14 |
15 | # Verify the input path exists.
16 | if (-not (Test-Path $inputPath)) {
17 | Write-Host "The specified directory does not exist: $inputPath" -ForegroundColor Red
18 | exit
19 | }
20 |
21 | # Process each .vpk file, excluding files with unwanted substrings.
22 | Get-ChildItem -Path $inputPath -Filter "*.vpk" | Where-Object {
23 | $_.Name -notlike "*_preview*" -and $_.Name -notlike "*_vanity*" -and $_.Name -notlike "*lobby_*"
24 | } | ForEach-Object {
25 | $filePath = $_.FullName
26 | $fileNameWithoutExtension = $_.BaseName
27 |
28 | Write-Host "Processing file: $filePath" -ForegroundColor Green
29 |
30 | # Create a temporary output directory in the system temp folder.
31 | $tempOutputDir = Join-Path ([System.IO.Path]::GetTempPath()) ([System.Guid]::NewGuid().ToString())
32 | New-Item -ItemType Directory -Path $tempOutputDir | Out-Null
33 |
34 | # Run Source2Viewer-CLI to generate the .nav file into the temporary folder.
35 | .\Source2Viewer-CLI.exe -i $filePath -e "nav" -o $tempOutputDir -d
36 |
37 | # Construct the expected path for the generated .nav file.
38 | $navFilePath = Join-Path -Path $tempOutputDir -ChildPath "maps\$fileNameWithoutExtension.nav"
39 | if (-not (Test-Path $navFilePath)) {
40 | Write-Host "Error: Expected nav file not found for $fileNameWithoutExtension" -ForegroundColor Red
41 | Remove-Item -Path $tempOutputDir -Recurse -Force
42 | return
43 | }
44 |
45 | # Run the awpy nav command to create a .json file.
46 | $jsonTempPath = Join-Path -Path $tempOutputDir -ChildPath "$fileNameWithoutExtension.json"
47 | Write-Host "Running awpy nav on: $navFilePath" -ForegroundColor Yellow
48 | uv run awpy nav $navFilePath --outpath $jsonTempPath
49 |
50 | if (Test-Path $jsonTempPath) {
51 | $finalJsonPath = Join-Path -Path $outputDirectory -ChildPath "$fileNameWithoutExtension.json"
52 | Move-Item -Path $jsonTempPath -Destination $finalJsonPath -Force
53 | Write-Host "Output saved as: $finalJsonPath" -ForegroundColor Cyan
54 | } else {
55 | Write-Host "Error: .json output not created for $fileNameWithoutExtension" -ForegroundColor Red
56 | }
57 |
58 | # Clean up the temporary directory.
59 | Remove-Item -Path $tempOutputDir -Recurse -Force
60 | }
61 |
62 | # Create a zip archive of the output JSON files.
63 | $zipPath = Join-Path (Split-Path $outputDirectory) "navs.zip"
64 | Compress-Archive -Path (Join-Path $outputDirectory "*") -DestinationPath $zipPath -Force
65 |
66 | if (Test-Path $zipPath) {
67 | Write-Host "Zip file created at: $zipPath" -ForegroundColor Green
68 | } else {
69 | Write-Host "Error: Zip file was not created." -ForegroundColor Red
70 | }
71 |
72 | # Function to compute a hash based on the contents of all files in a directory.
73 | function Get-DirectoryContentHash {
74 | param(
75 | [Parameter(Mandatory=$true)]
76 | [string]$DirectoryPath,
77 | [string]$Algorithm = "SHA256"
78 | )
79 | # Get all files recursively and sort them by their full path for consistency.
80 | $files = Get-ChildItem -Path $DirectoryPath -File -Recurse | Sort-Object FullName
81 |
82 | # Initialize the hasher.
83 | $hasher = [System.Security.Cryptography.HashAlgorithm]::Create($Algorithm)
84 |
85 | # Create a memory stream to accumulate the data.
86 | $ms = New-Object System.IO.MemoryStream
87 |
88 | foreach ($file in $files) {
89 | # Write the file's relative path (as bytes) to ensure files with identical contents but different names produce different results.
90 | $relativePath = $file.FullName.Substring($DirectoryPath.Length).TrimStart('\')
91 | $pathBytes = [System.Text.Encoding]::UTF8.GetBytes($relativePath)
92 | $ms.Write($pathBytes, 0, $pathBytes.Length)
93 |
94 | # Write the file's contents.
95 | $fileBytes = [System.IO.File]::ReadAllBytes($file.FullName)
96 | $ms.Write($fileBytes, 0, $fileBytes.Length)
97 | }
98 |
99 | # Compute the hash of the combined stream.
100 | $ms.Position = 0
101 | $hashBytes = $hasher.ComputeHash($ms)
102 | $hashHex = [BitConverter]::ToString($hashBytes) -replace '-', ''
103 | return $hashHex
104 | }
105 |
106 | # Compute and print the hash of the zip archive using SHA256.
107 | $fileHash = Get-FileHash -Path $zipPath -Algorithm SHA256
108 | Write-Host "Zip file hash (SHA256): $($fileHash.Hash)" -ForegroundColor Cyan
109 |
110 | # Compute and print the hash of the contents of the output directory (ignoring zip metadata).
111 | $contentHash = Get-DirectoryContentHash -DirectoryPath $outputDirectory
112 | Write-Host "Combined content hash of output files: $contentHash" -ForegroundColor Cyan
113 |
--------------------------------------------------------------------------------
/scripts/generate-spawns.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$false)]
3 | [string]$inputPath = "C:\Program Files (x86)\Steam\steamapps\common\Counter-Strike Global Offensive\game\csgo\maps",
4 |
5 | # Final output folder for spawn JSONs; defaults to a folder named "spawns" in the current directory.
6 | [Parameter(Mandatory=$false)]
7 | [string]$outputDirectory = (Join-Path (Get-Location).Path "spawns")
8 | )
9 |
10 | # Ensure the output directory exists; create it if it doesn't.
11 | if (-not (Test-Path $outputDirectory)) {
12 | New-Item -ItemType Directory -Path $outputDirectory | Out-Null
13 | }
14 |
15 | # Verify the input path exists.
16 | if (-not (Test-Path $inputPath)) {
17 | Write-Host "The specified directory does not exist: $inputPath" -ForegroundColor Red
18 | exit
19 | }
20 |
21 | # Process each .vpk file found in the input directory, skipping those with unwanted substrings.
22 | Get-ChildItem -Path $inputPath -Filter "*.vpk" | Where-Object {
23 | $_.Name -notlike "*_preview*" -and $_.Name -notlike "*_vanity*" -and $_.Name -notlike "*lobby_*" -and $_.Name -notlike "*graphics_*"
24 | } | ForEach-Object {
25 | $filePath = $_.FullName
26 | $fileNameWithoutExtension = $_.BaseName
27 |
28 | Write-Host "Processing file: $filePath" -ForegroundColor Green
29 |
30 | # Create a temporary output directory in the system's temp folder.
31 | $tempOutputDir = Join-Path ([System.IO.Path]::GetTempPath()) ([System.Guid]::NewGuid().ToString())
32 | New-Item -ItemType Directory -Path $tempOutputDir | Out-Null
33 |
34 | # Run Source2Viewer-CLI to generate the vents file into the temporary folder.
35 | .\Source2Viewer-CLI.exe -i $filePath -e "vents_c" -o $tempOutputDir -d
36 |
37 | # Construct the expected path for the generated vents file.
38 | $ventsFilePath = Join-Path -Path $tempOutputDir -ChildPath "maps\$fileNameWithoutExtension\entities\default_ents.vents"
39 | if (-not (Test-Path $ventsFilePath)) {
40 | Write-Host "Error: Expected vents file not found for $fileNameWithoutExtension" -ForegroundColor Red
41 | Remove-Item -Path $tempOutputDir -Recurse -Force
42 | return
43 | }
44 |
45 | # Run the awpy spawn command to create a .json file, outputting to a temporary file.
46 | $spawnJsonTempPath = Join-Path -Path $tempOutputDir -ChildPath "$fileNameWithoutExtension.json"
47 | Write-Host "Running awpy spawns on: $ventsFilePath" -ForegroundColor Yellow
48 | uv run awpy spawn $ventsFilePath --outpath $spawnJsonTempPath
49 |
50 | if (Test-Path $spawnJsonTempPath) {
51 | $finalJsonPath = Join-Path -Path $outputDirectory -ChildPath "$fileNameWithoutExtension.json"
52 | Move-Item -Path $spawnJsonTempPath -Destination $finalJsonPath -Force
53 | Write-Host "Output saved as: $finalJsonPath" -ForegroundColor Cyan
54 | } else {
55 | Write-Host "Error: .json output not created for $fileNameWithoutExtension" -ForegroundColor Red
56 | }
57 |
58 | # Clean up the temporary directory.
59 | Remove-Item -Path $tempOutputDir -Recurse -Force
60 | }
61 |
62 | # Create a zip archive of the output spawn JSON files.
63 | $zipPath = Join-Path (Split-Path $outputDirectory) "spawns.zip"
64 | Compress-Archive -Path (Join-Path $outputDirectory "*") -DestinationPath $zipPath -Force
65 |
66 | if (Test-Path $zipPath) {
67 | Write-Host "Zip file created at: $zipPath" -ForegroundColor Green
68 | } else {
69 | Write-Host "Error: Zip file was not created." -ForegroundColor Red
70 | }
71 |
72 | # Function to compute a hash based on the actual contents of all files in a directory.
73 | function Get-DirectoryContentHash {
74 | param(
75 | [Parameter(Mandatory=$true)]
76 | [string]$DirectoryPath,
77 | [string]$Algorithm = "SHA256"
78 | )
79 | # Retrieve all files recursively and sort them by full path for consistency.
80 | $files = Get-ChildItem -Path $DirectoryPath -File -Recurse | Sort-Object FullName
81 |
82 | $hasher = [System.Security.Cryptography.HashAlgorithm]::Create($Algorithm)
83 | $ms = New-Object System.IO.MemoryStream
84 |
85 | foreach ($file in $files) {
86 | # Include the file's relative path (as bytes) so that file names affect the hash.
87 | $relativePath = $file.FullName.Substring($DirectoryPath.Length).TrimStart('\')
88 | $pathBytes = [System.Text.Encoding]::UTF8.GetBytes($relativePath)
89 | $ms.Write($pathBytes, 0, $pathBytes.Length)
90 |
91 | # Include the file's content.
92 | $fileBytes = [System.IO.File]::ReadAllBytes($file.FullName)
93 | $ms.Write($fileBytes, 0, $fileBytes.Length)
94 | }
95 |
96 | $ms.Position = 0
97 | $hashBytes = $hasher.ComputeHash($ms)
98 | $hashHex = [BitConverter]::ToString($hashBytes) -replace '-', ''
99 | return $hashHex
100 | }
101 |
102 | # Compute and print the hash of the zip archive using SHA256.
103 | $fileHash = Get-FileHash -Path $zipPath -Algorithm SHA256
104 | Write-Host "Zip file hash (SHA256): $($fileHash.Hash)" -ForegroundColor Cyan
105 |
106 | # Compute and print the hash of the contents of the output directory (the spawn JSON files).
107 | $contentHash = Get-DirectoryContentHash -DirectoryPath $outputDirectory
108 | Write-Host "Combined content hash of output files: $contentHash" -ForegroundColor Cyan
109 |
--------------------------------------------------------------------------------
/scripts/generate-tris.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$false)]
3 | [string]$inputPath = "C:\Program Files (x86)\Steam\steamapps\common\Counter-Strike Global Offensive\game\csgo\maps",
4 |
5 | # Final output folder for .tri files; defaults to a folder named "tri" in the current directory.
6 | [Parameter(Mandatory=$false)]
7 | [string]$outputDirectory = (Join-Path (Get-Location).Path "tri")
8 | )
9 |
10 | # Ensure the output directory exists; create it if it doesn't.
11 | if (-not (Test-Path $outputDirectory)) {
12 | New-Item -ItemType Directory -Path $outputDirectory | Out-Null
13 | }
14 |
15 | # Verify the input path exists.
16 | if (-not (Test-Path $inputPath)) {
17 | Write-Host "The specified directory does not exist: $inputPath" -ForegroundColor Red
18 | exit
19 | }
20 |
21 | # Process each .vpk file, excluding files with unwanted substrings.
22 | Get-ChildItem -Path $inputPath -Filter "*.vpk" | Where-Object {
23 | $_.Name -notlike "*_preview*" -and $_.Name -notlike "*_vanity*" -and $_.Name -notlike "*lobby_*" -and $_.Name -notlike "*graphics_*"
24 | } | ForEach-Object {
25 | $filePath = $_.FullName
26 | $fileNameWithoutExtension = $_.BaseName
27 |
28 | Write-Host "Processing file: $filePath" -ForegroundColor Green
29 |
30 | # Create a temporary output directory in the system temp folder.
31 | $tempOutputDir = Join-Path ([System.IO.Path]::GetTempPath()) ([System.Guid]::NewGuid().ToString())
32 | New-Item -ItemType Directory -Path $tempOutputDir | Out-Null
33 |
34 | # Run Source2Viewer-CLI and capture stdout
35 | $output = .\Source2Viewer-CLI.exe -i $filePath --block "PHYS" -f "maps/$fileNameWithoutExtension/world_physics.vmdl_c" 2>&1
36 |
37 | # Convert output to an array of lines
38 | $outputLines = $output -split "`r?`n"
39 |
40 | # Find the index where the actual data starts
41 | $startIndex = $outputLines.IndexOf('--- Data for block "PHYS" ---') + 1
42 |
43 | if ($startIndex -eq 0 -or $startIndex -ge $outputLines.Count) {
44 | Write-Host "Error: Expected PHYS data block not found for $fileNameWithoutExtension" -ForegroundColor Red
45 | return
46 | }
47 |
48 | # Extract the relevant lines after the marker
49 | $physData = $outputLines[$startIndex..($outputLines.Count - 1)] -join "`n"
50 |
51 | # Define the expected output path
52 | $vphysFilePath = Join-Path -Path $tempOutputDir -ChildPath "maps\$fileNameWithoutExtension\world_physics.vphys"
53 |
54 | # Ensure the output directory exists
55 | $parentDir = Split-Path -Path $vphysFilePath -Parent
56 | if (!(Test-Path $parentDir)) {
57 | New-Item -ItemType Directory -Path $parentDir -Force | Out-Null
58 | }
59 |
60 | # Write the extracted data to the file
61 | $physData | Out-File -FilePath $vphysFilePath -Encoding utf8
62 |
63 | if (-not (Test-Path $vphysFilePath)) {
64 | Write-Host "Error: Expected vphys file not found for $fileNameWithoutExtension" -ForegroundColor Red
65 | Remove-Item -Path $tempOutputDir -Recurse -Force
66 | return
67 | }
68 |
69 | # Run the awpy tri command to generate a .tri file, outputting to a temporary file.
70 | $triTempPath = Join-Path -Path $tempOutputDir -ChildPath "$fileNameWithoutExtension.tri"
71 | Write-Host "Running awpy generate-tri on: $vphysFilePath" -ForegroundColor Yellow
72 | uv run awpy tri $vphysFilePath --outpath $triTempPath
73 |
74 | if (Test-Path $triTempPath) {
75 | $finalTriPath = Join-Path -Path $outputDirectory -ChildPath "$fileNameWithoutExtension.tri"
76 | Move-Item -Path $triTempPath -Destination $finalTriPath -Force
77 | Write-Host "Output saved as: $finalTriPath" -ForegroundColor Cyan
78 | } else {
79 | Write-Host "Error: .tri output not created for $fileNameWithoutExtension" -ForegroundColor Red
80 | }
81 |
82 | # Clean up the temporary directory.
83 | Remove-Item -Path $tempOutputDir -Recurse -Force
84 | }
85 |
86 | # Create a zip archive of the output .tri files.
87 | $zipPath = Join-Path (Split-Path $outputDirectory) "tris.zip"
88 | Compress-Archive -Path (Join-Path $outputDirectory "*") -DestinationPath $zipPath -Force
89 |
90 | if (Test-Path $zipPath) {
91 | Write-Host "Zip file created at: $zipPath" -ForegroundColor Green
92 | } else {
93 | Write-Host "Error: Zip file was not created." -ForegroundColor Red
94 | }
95 |
96 | # Function to compute a hash based on the actual contents of all files in a directory.
97 | function Get-DirectoryContentHash {
98 | param(
99 | [Parameter(Mandatory=$true)]
100 | [string]$DirectoryPath,
101 | [string]$Algorithm = "SHA256"
102 | )
103 | # Retrieve all files recursively and sort them by full path for consistency.
104 | $files = Get-ChildItem -Path $DirectoryPath -File -Recurse | Sort-Object FullName
105 |
106 | $hasher = [System.Security.Cryptography.HashAlgorithm]::Create($Algorithm)
107 | $ms = New-Object System.IO.MemoryStream
108 |
109 | foreach ($file in $files) {
110 | # Include the file's relative path (as bytes) so that names affect the hash.
111 | $relativePath = $file.FullName.Substring($DirectoryPath.Length).TrimStart('\')
112 | $pathBytes = [System.Text.Encoding]::UTF8.GetBytes($relativePath)
113 | $ms.Write($pathBytes, 0, $pathBytes.Length)
114 |
115 | # Include the file's content.
116 | $fileBytes = [System.IO.File]::ReadAllBytes($file.FullName)
117 | $ms.Write($fileBytes, 0, $fileBytes.Length)
118 | }
119 |
120 | $ms.Position = 0
121 | $hashBytes = $hasher.ComputeHash($ms)
122 | $hashHex = [BitConverter]::ToString($hashBytes) -replace '-', ''
123 | return $hashHex
124 | }
125 |
126 | # Compute and print the hash of the zip archive using SHA256.
127 | $fileHash = Get-FileHash -Path $zipPath -Algorithm SHA256
128 | Write-Host "Zip file hash (SHA256): $($fileHash.Hash)" -ForegroundColor Cyan
129 |
130 | # Compute and print the hash of the contents of the output directory (the .tri files).
131 | $contentHash = Get-DirectoryContentHash -DirectoryPath $outputDirectory
132 | Write-Host "Combined content hash of output files: $contentHash" -ForegroundColor Cyan
133 |
--------------------------------------------------------------------------------
/scripts/update-latest-patch.ps1:
--------------------------------------------------------------------------------
1 | # Path to the file you want to update
2 | $initFile = "awpy\data\__init__.py"
3 |
4 | # Read the file contents once.
5 | $contents = Get-Content $initFile -Raw
6 |
7 | # Extract the current build id using a regex.
8 | $currentBuildIdMatch = [regex]::Match($contents, 'CURRENT_BUILD_ID\s*=\s*(\d+)')
9 | if ($currentBuildIdMatch.Success -and $currentBuildIdMatch.Groups[1].Value -eq $env:LATEST_PATCH_ID) {
10 | # If the build ids are the same, no update is needed.
11 | Write-Output $false
12 | exit
13 | }
14 |
15 | # Update the CURRENT_BUILD_ID line.
16 | $contents = $contents -replace 'CURRENT_BUILD_ID\s*=\s*\d+', "CURRENT_BUILD_ID = $env:LATEST_PATCH_ID"
17 |
18 | # Define the new patch entry as a multiline string.
19 | $newPatch = @"
20 | ${env:LATEST_PATCH_ID}: {
21 | "url": "https://steamdb.info/patchnotes/${env:LATEST_PATCH_ID}/",
22 | "datetime": datetime.datetime.fromtimestamp(${env:LATEST_PATCH_TIMESTAMP}, datetime.UTC),
23 | "available": POSSIBLE_ARTIFACTS
24 | }
25 | "@.Trim()
26 |
27 | # Insert the new patch entry into the AVAILABLE_PATCHES dictionary.
28 | $pattern = '(?s)(AVAILABLE_PATCHES\s*=\s*\{)(.*?)(\})'
29 | $contents = [regex]::Replace($contents, $pattern, {
30 | param($match)
31 | # If the patch key already exists, return the original match.
32 | if ($match.Groups[2].Value -match ${env:LATEST_PATCH_ID}) {
33 | return $match.Value
34 | } else {
35 | # Always add the new patch first, followed by existing content (if any)
36 | $existingContent = $match.Groups[2].Value.Trim()
37 | if ($existingContent -eq "") {
38 | $insert = "`n$newPatch`n"
39 | } else {
40 | $insert = "`n$newPatch,`n$existingContent"
41 | }
42 | return $match.Groups[1].Value + $insert + $match.Groups[3].Value
43 | }
44 | })
45 |
46 | # Write the updated content back to the file.
47 | Set-Content $initFile -Value $contents
48 |
49 | # Return true to indicate a change was made.
50 | Write-Output $true
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Module for testing Awpy."""
2 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Awpy test configuration."""
2 |
3 | import json
4 | import os
5 | import pathlib
6 |
7 | import pytest
8 | import requests
9 |
10 | import awpy.demo
11 |
12 |
13 | @pytest.fixture(scope="session", autouse=True)
14 | def setup(): # noqa: ANN201
15 | """Sets up testing environment by downloading demofiles."""
16 | with open("tests/test_data.json", encoding="utf-8") as test_data_file:
17 | test_data = json.load(test_data_file)
18 | for file_id in test_data:
19 | path = pathlib.Path(f"tests/{file_id}{test_data[file_id]['extension']}")
20 | if not path.exists():
21 | _get_test_file(url=test_data[file_id]["url"], path=path)
22 |
23 |
24 | @pytest.fixture(scope="session", autouse=True)
25 | def teardown(): # noqa: ANN201
26 | """Cleans testing environment by deleting all .dem and .json files."""
27 | yield
28 | for file in os.listdir():
29 | if file.endswith((".json", ".dem", ".zip", ".nav")):
30 | os.remove(file)
31 |
32 |
33 | @pytest.fixture(scope="session")
34 | def parsed_hltv_demo() -> awpy.demo.Demo:
35 | """Fixture that returns a parsed HLTV Demo object.
36 |
37 | https://www.hltv.org/matches/2378917/vitality-vs-spirit-iem-katowice-2025
38 | """
39 | dem = awpy.demo.Demo(path="tests/vitality-vs-spirit-m2-nuke.dem")
40 | dem.parse()
41 | return dem
42 |
43 |
44 | @pytest.fixture(scope="session")
45 | def parsed_faceit_demo() -> awpy.demo.Demo:
46 | """Fixture that returns a parsed FACEIT Demo object.
47 |
48 | https://www.faceit.com/en/cs2/room/1-efdaace4-2fd4-4884-babf-1a5a2c83e344
49 | """
50 | dem = awpy.demo.Demo(path="tests/1-efdaace4-2fd4-4884-babf-1a5a2c83e344.dem")
51 | dem.parse()
52 | return dem
53 |
54 |
55 | @pytest.fixture(scope="session")
56 | def parsed_mm_demo() -> awpy.demo.Demo:
57 | """Fixture that returns a parsed Matchmaking Demo object.
58 |
59 | https://csstats.gg/match/249286425
60 | """
61 | dem = awpy.demo.Demo(path="tests/match730_003736456444682174484_1173793269_201.dem")
62 | dem.parse()
63 | return dem
64 |
65 |
66 | def _get_test_file(url: str, path: pathlib.Path) -> None:
67 | """Sends a request to get a demofile from the object storage.
68 |
69 | Args:
70 | url (str): Link to demo.
71 | path (pathlib.Path): Filepath to write.
72 | """
73 | if not path.exists():
74 | request = requests.get(url, timeout=100)
75 | with open(path, "wb") as demo_file:
76 | demo_file.write(request.content)
77 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | """Test the cli functions."""
2 |
3 | import json
4 | import os
5 | import pathlib
6 | import zipfile
7 |
8 | import pytest
9 | from click.testing import CliRunner
10 |
11 | import awpy.cli
12 |
13 |
14 | class TestCommandLine:
15 | """Tests the Awpy command line interface."""
16 |
17 | @pytest.fixture(autouse=True)
18 | def setup_runner(self, setup): # noqa: ANN001, ARG002
19 | """Setup CLI runner. `setup` arg is the pytest setup fixture."""
20 | self.runner = CliRunner()
21 |
22 | def test_parse_nav_invalid_filepath(self):
23 | """Test the nav command with an invalid filepath."""
24 | result = self.runner.invoke(awpy.cli.parse_nav, ["xyz.nav"])
25 | assert result.exit_code != 0
26 | assert isinstance(result.exception, SystemExit)
27 |
28 | def test_parse_nav(self):
29 | """Test that the nav command produces a json file."""
30 | result = self.runner.invoke(awpy.cli.parse_nav, ["tests/de_dust2.nav"])
31 | assert result.exit_code == 0
32 |
33 | json_name = "tests/de_dust2.json"
34 | assert os.path.exists(json_name)
35 |
36 | def test_parse_demo_invalid_filepath(self):
37 | """Test the parse command with an invalid filepath."""
38 | result = self.runner.invoke(awpy.cli.parse_demo, ["xyz.dem"])
39 | assert result.exit_code != 0
40 | assert isinstance(result.exception, SystemExit)
41 |
42 | def test_parse_demo_zip_creation(self):
43 | """Test that the parse command produces a zip file."""
44 | result = self.runner.invoke(awpy.cli.parse_demo, ["tests/vitality-vs-spirit-m2-nuke.dem"])
45 | assert result.exit_code == 0
46 |
47 | zip_name = "vitality-vs-spirit-m2-nuke.zip"
48 | assert os.path.exists(zip_name)
49 |
50 | with zipfile.ZipFile(zip_name, "r") as zipf:
51 | # Check if all expected files are in the zip
52 | expected_files = [
53 | "kills.parquet",
54 | "damages.parquet",
55 | "footsteps.parquet",
56 | "shots.parquet",
57 | "grenades.parquet",
58 | "smokes.parquet",
59 | "infernos.parquet",
60 | "bomb.parquet",
61 | "ticks.parquet",
62 | "rounds.parquet",
63 | "header.json",
64 | ]
65 | zipped_files = [pathlib.Path(file).name for file in zipf.namelist()]
66 | assert all(pathlib.Path(file).name in zipped_files for file in expected_files)
67 |
68 | # Check if there is an events/ folder and it contains files
69 | events_files = [file for file in zipf.namelist() if file.endswith(".parquet")]
70 | assert len(events_files) > 0
71 |
72 | # Check content of one file as an example
73 | with zipf.open("header.json") as f:
74 | header = json.load(f)
75 | assert header["map_name"] == "de_nuke"
76 |
--------------------------------------------------------------------------------
/tests/test_data.json:
--------------------------------------------------------------------------------
1 | {
2 | "match730_003736456444682174484_1173793269_201": {
3 | "url": "https://figshare.com/ndownloader/files/52456259",
4 | "extension": ".dem"
5 | },
6 | "1-efdaace4-2fd4-4884-babf-1a5a2c83e344": {
7 | "url": "https://figshare.com/ndownloader/files/52455215",
8 | "extension": ".dem"
9 | },
10 | "vitality-vs-spirit-m2-nuke": {
11 | "url": "https://figshare.com/ndownloader/files/52429013",
12 | "extension": ".dem"
13 | },
14 | "de_dust2": {
15 | "url": "https://figshare.com/ndownloader/files/51487988",
16 | "extension": ".nav"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/tests/test_demo.py:
--------------------------------------------------------------------------------
1 | """Test the Demo object."""
2 |
3 | import json
4 | import os
5 | import pathlib
6 | import zipfile
7 |
8 | import polars as pl
9 | import pytest
10 |
11 | import awpy.demo
12 |
13 |
14 | class TestDemo:
15 | """Tests the Demo object."""
16 |
17 | def test_invalid_filepath(self):
18 | """Test the Demo object with an invalid filepath."""
19 | with pytest.raises(FileNotFoundError):
20 | awpy.demo.Demo("xyz.dem")
21 |
22 | def test_hltv_demo(self, parsed_hltv_demo: awpy.demo.Demo):
23 | """Test the Demo object with an HLTV demo."""
24 | assert parsed_hltv_demo.header["map_name"] == "de_nuke"
25 |
26 | def test_faceit_demo(self, parsed_faceit_demo: awpy.demo.Demo):
27 | """Test the Demo object with a FACEIT demo."""
28 | assert parsed_faceit_demo.header["map_name"] == "de_mirage"
29 |
30 | def test_mm_demo(self, parsed_mm_demo: awpy.demo.Demo):
31 | """Test the Demo object with an MM demo."""
32 | assert parsed_mm_demo.header["map_name"] == "de_ancient"
33 |
34 | def test_compress(self, parsed_hltv_demo: awpy.demo.Demo):
35 | """Test that the demo is zipped."""
36 | parsed_hltv_demo.compress()
37 |
38 | zip_name = "vitality-vs-spirit-m2-nuke.zip"
39 | assert os.path.exists(zip_name)
40 |
41 | with zipfile.ZipFile(zip_name, "r") as zipf:
42 | # Check if all expected files are in the zip
43 | expected_files = [
44 | "kills.parquet",
45 | "damages.parquet",
46 | "footsteps.parquet",
47 | "shots.parquet",
48 | "grenades.parquet",
49 | "smokes.parquet",
50 | "infernos.parquet",
51 | "bomb.parquet",
52 | "ticks.parquet",
53 | "rounds.parquet",
54 | "header.json",
55 | ]
56 | zipped_files = [pathlib.Path(file).name for file in zipf.namelist()]
57 | assert all(pathlib.Path(file).name in zipped_files for file in expected_files)
58 |
59 | # Check if there is an events/ folder and it contains files
60 | events_files = [file for file in zipf.namelist() if file.endswith(".parquet")]
61 | assert len(events_files) > 0
62 |
63 | # Check content of header as an example
64 | with zipf.open("header.json") as f:
65 | header = json.load(f)
66 | assert header["map_name"] == "de_nuke"
67 |
68 | def test_hltv_ticks_end_official_end(self, parsed_hltv_demo: awpy.demo.Demo):
69 | """Test the ticks DataFrame for an HLTV demo (end to official end)."""
70 | for end, official_end in zip(
71 | # Do not parse the last element, which is the last round
72 | parsed_hltv_demo.rounds["end"].to_list()[:-1],
73 | parsed_hltv_demo.rounds["official_end"].to_list()[:-1],
74 | strict=False,
75 | ):
76 | assert not parsed_hltv_demo.ticks.filter(
77 | pl.col("tick") >= end,
78 | pl.col("tick") <= official_end,
79 | ).is_empty()
80 |
81 | def test_hltv_ticks_start_freeze(self, parsed_hltv_demo: awpy.demo.Demo):
82 | """Test the ticks DataFrame for an HLTV demo (start to freeze end)."""
83 | for start, freeze_end in zip(
84 | parsed_hltv_demo.rounds["start"].to_list(),
85 | parsed_hltv_demo.rounds["freeze_end"].to_list(),
86 | strict=False,
87 | ):
88 | assert not parsed_hltv_demo.ticks.filter(pl.col("tick") >= start, pl.col("tick") < freeze_end).is_empty()
89 |
90 | def test_hltv_ticks_freeze_end(self, parsed_hltv_demo: awpy.demo.Demo):
91 | """Test the ticks DataFrame for an HLTV demo (freeze end to end)."""
92 | for freeze_end, end in zip(
93 | parsed_hltv_demo.rounds["freeze_end"].to_list(),
94 | parsed_hltv_demo.rounds["end"].to_list(),
95 | strict=False,
96 | ):
97 | assert not parsed_hltv_demo.ticks.filter(pl.col("tick") >= freeze_end, pl.col("tick") < end).is_empty()
98 |
99 | def test_hltv_rounds(self, parsed_hltv_demo: awpy.demo.Demo):
100 | """Test the rounds DataFrame for an HLTV demo."""
101 | assert parsed_hltv_demo.rounds["reason"].to_list() == [
102 | "ct_killed",
103 | "ct_killed",
104 | "ct_killed",
105 | "t_killed",
106 | "ct_killed",
107 | "bomb_exploded",
108 | "t_killed",
109 | "time_ran_out",
110 | "bomb_exploded",
111 | "ct_killed",
112 | "ct_killed",
113 | "t_killed",
114 | "t_killed",
115 | "t_killed",
116 | "bomb_exploded",
117 | "t_killed",
118 | "t_killed",
119 | "t_killed",
120 | ]
121 |
122 | def test_hltv_kills(self, parsed_hltv_demo: awpy.demo.Demo):
123 | """Test the kills DataFrame for an HLTV demo."""
124 | # Total kills
125 | assert len(parsed_hltv_demo.kills) == 111
126 |
127 | def test_hltv_damages(self, parsed_hltv_demo: awpy.demo.Demo):
128 | """Test the damages DataFrame for an HLTV demo."""
129 | assert not parsed_hltv_demo.damages.is_empty()
130 |
131 | def test_faceit_rounds(self, parsed_faceit_demo: awpy.demo.Demo):
132 | """Test the rounds DataFrame for a FACEIT demo."""
133 | assert len(parsed_faceit_demo.rounds) == 24
134 |
135 | def test_faceit_kills(self, parsed_faceit_demo: awpy.demo.Demo):
136 | """Test the kills DataFrame for a FACEIT demo."""
137 | assert len(parsed_faceit_demo.kills.filter(pl.col("attacker_side") != pl.col("victim_side"))) == 165
138 |
139 | def test_mm_rounds(self, parsed_mm_demo: awpy.demo.Demo):
140 | """Test the rounds DataFrame for an MM demo."""
141 | assert parsed_mm_demo.rounds["reason"].to_list() == [
142 | "ct_killed",
143 | "t_killed",
144 | "t_killed",
145 | "t_killed",
146 | "t_killed",
147 | "t_killed",
148 | "ct_killed",
149 | "t_surrender",
150 | ]
151 |
152 | def test_mm_kills(self, parsed_mm_demo: awpy.demo.Demo):
153 | """Test the kills DataFrame for an MM demo."""
154 | assert len(parsed_mm_demo.kills.filter(pl.col("attacker_side") != pl.col("victim_side"))) == 42
155 |
--------------------------------------------------------------------------------
/tests/test_nav.py:
--------------------------------------------------------------------------------
1 | """Test the awpy.nav module."""
2 |
3 | import json
4 |
5 | import pytest
6 |
7 | import awpy.data
8 | import awpy.nav
9 |
10 |
11 | @pytest.fixture
12 | def parsed_nav():
13 | """Fixture that returns a parsed Nav object."""
14 | return awpy.nav.Nav.from_path(path="tests/de_dust2.nav")
15 |
16 |
17 | class TestNav:
18 | """Tests the Nav object."""
19 |
20 | def test_invalid_filepath(self):
21 | """Test the Nav object with an invalid filepath."""
22 | with pytest.raises(FileNotFoundError):
23 | awpy.nav.Nav.from_path("xyz.nav")
24 |
25 | def test_nav_areas(self, parsed_nav: awpy.nav.Nav):
26 | """Test the Demo object with an HLTV demo."""
27 | assert len(parsed_nav.areas) == 2248
28 |
29 | def test_nav_json(self):
30 | """Test the Nav object from a JSON file."""
31 | nav_as_json_path = awpy.data.NAVS_DIR / "de_dust2.json"
32 |
33 | with open(nav_as_json_path) as nav:
34 | nav_as_json = json.load(nav)
35 | assert len(nav_as_json["areas"]) == 2248
36 |
--------------------------------------------------------------------------------
/tests/test_stats.py:
--------------------------------------------------------------------------------
1 | """Test the stats module."""
2 |
3 | import polars as pl
4 | import pytest
5 |
6 | import awpy.demo
7 | import awpy.stats
8 |
9 |
10 | @pytest.fixture
11 | def adr_results(parsed_hltv_demo: awpy.demo.Demo) -> pl.DataFrame:
12 | """Fixture that returns ADR results filtered for the 'all' side."""
13 | adr_df = awpy.stats.adr(parsed_hltv_demo, team_dmg=True)
14 | return adr_df.filter(pl.col("side") == "all")
15 |
16 |
17 | @pytest.fixture
18 | def kast_results(parsed_hltv_demo: awpy.demo.Demo) -> pl.DataFrame:
19 | """Fixture that returns ADR results filtered for the 'all' side."""
20 | kast_df = awpy.stats.kast(parsed_hltv_demo)
21 | return kast_df.filter(pl.col("side") == "all")
22 |
23 |
24 | class TestStats:
25 | """Tests the stats module."""
26 |
27 | @pytest.mark.parametrize(
28 | ("name", "expected_adr"),
29 | [
30 | ("ZywOo", 127.0),
31 | ("ropz", 93.3),
32 | ("apEX", 84.4),
33 | ("flameZ", 70.8),
34 | ("mezii", 37.8),
35 | ("donk", 93.9),
36 | ("zont1x", 56.1),
37 | ("magixx", 59.1),
38 | ("sh1ro", 34.0),
39 | ("chopper", 26.3),
40 | ],
41 | )
42 | def test_adr(self, adr_results: pl.DataFrame, name: str, expected_adr: float):
43 | """Test the ADR function. Compares to HLTV."""
44 | player_df = adr_results.filter(pl.col("name") == name)
45 |
46 | # Verify that exactly one record exists for the player
47 | assert len(player_df) == 1, f"Expected one record for {name}, got {len(player_df)}"
48 |
49 | # Extract and round the ADR value
50 | actual_adr = player_df.select("adr").item()
51 | assert round(actual_adr, 1) == expected_adr, (
52 | f"ADR for {name} is {round(actual_adr, 1)}, expected {expected_adr}"
53 | )
54 |
55 | @pytest.mark.parametrize(
56 | ("name", "expected_kast"),
57 | [
58 | ("ZywOo", 88.9),
59 | ("ropz", 83.3),
60 | ("apEX", 66.7),
61 | ("flameZ", 72.2),
62 | ("mezii", 83.3),
63 | ("donk", 66.7),
64 | ("zont1x", 50.0),
65 | ("magixx", 44.4),
66 | ("sh1ro", 50.0),
67 | ("chopper", 33.3),
68 | ],
69 | )
70 | def test_kast(self, kast_results: pl.DataFrame, name: str, expected_kast: float):
71 | """Test the KAST function. Compares to HLTV."""
72 | player_df = kast_results.filter(pl.col("name") == name)
73 |
74 | # Verify that exactly one record exists for the player
75 | assert len(player_df) == 1, f"Expected one record for {name}, got {len(player_df)}"
76 |
77 | # Extract and round the KAST value
78 | actual_kast = player_df.select("kast").item()
79 | assert round(actual_kast, 1) == expected_kast, (
80 | f"KAST for {name} is {round(actual_kast, 1)}, expected {expected_kast}"
81 | )
82 |
83 | def test_rating(self, parsed_hltv_demo: awpy.demo.Demo):
84 | """Test the rating function. Checks that ordering is correct."""
85 | rating_df = awpy.stats.rating(parsed_hltv_demo).filter(pl.col("side") == "all").sort("rating")
86 | assert len(rating_df) == 10, f"Expected 10 players, got {len(rating_df)}"
87 |
88 | assert rating_df["name"].to_list() == [
89 | "chopper",
90 | "sh1ro",
91 | "magixx",
92 | "zont1x",
93 | "mezii",
94 | "flameZ",
95 | "donk",
96 | "apEX",
97 | "ropz",
98 | "ZywOo",
99 | ]
100 |
--------------------------------------------------------------------------------
/tests/test_visibility.py:
--------------------------------------------------------------------------------
1 | """Test the visibility module."""
2 |
3 | import pytest
4 | from click.testing import CliRunner
5 |
6 | import awpy.cli
7 | import awpy.data
8 | import awpy.vector
9 | import awpy.visibility
10 |
11 |
12 | def check_visibility_brute_force(
13 | start: awpy.vector.Vector3 | tuple | list,
14 | end: awpy.vector.Vector3 | tuple | list,
15 | triangles: list[awpy.visibility.Triangle],
16 | ) -> bool:
17 | """Check visibility by testing against all triangles directly."""
18 | start_vec = awpy.vector.Vector3.from_input(start)
19 | end_vec = awpy.vector.Vector3.from_input(end)
20 |
21 | # Calculate ray direction and length
22 | direction = end_vec - start_vec
23 | distance = direction.length()
24 |
25 | if distance < 1e-6:
26 | return True
27 |
28 | direction = direction.normalize()
29 |
30 | # Check intersection with each triangle
31 | for triangle in triangles:
32 | t = awpy.visibility.VisibilityChecker._ray_triangle_intersection(None, start_vec, direction, triangle)
33 | if t is not None and t <= distance:
34 | return False
35 |
36 | return True
37 |
38 |
39 | class TestVisibility:
40 | """Tests the Awpy calculation functions."""
41 |
42 | @pytest.fixture(autouse=True)
43 | def setup_runner(self):
44 | """Setup CLI runner."""
45 | self.runner = CliRunner()
46 | self.runner.invoke(awpy.cli.get, ["usd", "de_dust2"])
47 |
48 | def test_basic_visibility(self):
49 | """Tests basic visibility for de_dust2."""
50 | de_dust2_tri = awpy.data.TRIS_DIR / "de_dust2.tri"
51 | tris = awpy.visibility.VisibilityChecker.read_tri_file(de_dust2_tri)
52 | vc = awpy.visibility.VisibilityChecker(path=de_dust2_tri)
53 |
54 | test_points = [
55 | # Structured as (point1, point2, expected_visibility)
56 | (
57 | (-651, -831, 179), # t_spawn_pos_1
58 | (-992, -766, 181), # t_spawn_pos_2
59 | True,
60 | ),
61 | (
62 | (-651, -831, 179), # t_spawn_pos_1
63 | (15, 2168, -65), # ct_spawn_pos
64 | False,
65 | ),
66 | (
67 | (-485.90, 1737.51, -60.28), # mid_doors_ct
68 | (-489.97, 1532.02, -61.08), # mid_doors_t
69 | False,
70 | ),
71 | (
72 | (-515.23, 2251.36, -55.76), # ct_spawn_towards_b
73 | (1318.11, 2027.95, 62.41), # long_a_near_site
74 | True,
75 | ),
76 | (
77 | (195.87492752075195, 2467.874755859375, -52.5000057220459),
78 | (-860.0001831054688, -733.0000610351562, 190.00000254313153),
79 | False,
80 | ),
81 | ]
82 |
83 | # Test both BVH and brute force methods
84 | for start, end, expected in test_points:
85 | bvh_result = vc.is_visible(start, end)
86 | brute_force_result = check_visibility_brute_force(start, end, tris)
87 |
88 | # Test visibility in both directions
89 | bvh_result_reverse = vc.is_visible(end, start)
90 | brute_force_result_reverse = check_visibility_brute_force(end, start, tris)
91 |
92 | # Assert all results match the expected outcome
93 | assert bvh_result == expected, f"BVH visibility from {start} to {end} failed"
94 | assert brute_force_result == expected, f"Brute force visibility from {start} to {end} failed"
95 | assert bvh_result == brute_force_result, f"BVH and brute force results differ for {start} to {end}"
96 |
97 | # Assert reverse direction matches
98 | assert bvh_result == bvh_result_reverse, f"BVH visibility not symmetric for {start} and {end}"
99 | assert brute_force_result == brute_force_result_reverse, (
100 | f"Brute force visibility not symmetric for {start} and {end}"
101 | )
102 |
--------------------------------------------------------------------------------