├── .flake8 ├── .gitattributes ├── .github └── workflows │ └── backend.yaml ├── .gitignore ├── .gitmodules ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── api ├── .flake8 ├── .gitignore ├── .readthedocs.yaml ├── DOCS.md ├── README.md ├── docs │ ├── Makefile │ ├── conf.py │ ├── index.rst │ ├── make.bat │ └── requirements.txt ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── statbotics │ ├── __init__.py │ ├── constants.py │ ├── main.py │ └── validate.py └── tests │ ├── __init__.py │ ├── test_framework.py │ └── test_teams.py ├── backend ├── .env-template ├── .gitignore ├── deploy │ ├── api_app.yaml │ ├── api_cloudbuild.yaml │ ├── cors.json │ ├── data_app.yaml │ ├── data_cloudbuild.yaml │ ├── dispatch.yaml │ ├── notes.txt │ ├── site_app.yaml │ └── site_cloudbuild.yaml ├── main.py ├── package.json ├── pyproject.toml ├── requirements.txt ├── root.crt └── src │ ├── api │ ├── __init__.py │ ├── event.py │ ├── match.py │ ├── query.py │ ├── router.py │ ├── team.py │ ├── team_event.py │ ├── team_match.py │ ├── team_year.py │ └── year.py │ ├── breakdown.py │ ├── constants.py │ ├── data │ ├── avg.py │ ├── epa │ │ ├── agg.py │ │ ├── calc.py │ │ ├── main.py │ │ └── metrics.py │ ├── main.py │ ├── router.py │ ├── tba.py │ ├── utils.py │ └── wins.py │ ├── db │ ├── functions │ │ ├── __init__.py │ │ ├── clear_year.py │ │ ├── noteworthy_matches.py │ │ ├── remove_teams_no_events.py │ │ ├── upcoming_matches.py │ │ └── update_teams.py │ ├── inspect.py │ ├── main.py │ ├── models │ │ ├── __init__.py │ │ ├── create.py │ │ ├── etag.py │ │ ├── event.py │ │ ├── main.py │ │ ├── match.py │ │ ├── team.py │ │ ├── team_event.py │ │ ├── team_match.py │ │ ├── team_year.py │ │ ├── types.py │ │ └── year.py │ ├── read │ │ ├── __init__.py │ │ ├── etag.py │ │ ├── event.py │ │ ├── main.py │ │ ├── match.py │ │ ├── team.py │ │ ├── team_event.py │ │ ├── team_match.py │ │ ├── team_year.py │ │ └── year.py │ └── write │ │ ├── main.py │ │ └── template.py │ ├── google │ └── storage.py │ ├── models │ ├── epa │ │ ├── breakdown.py │ │ ├── constants.py │ │ ├── init.py │ │ ├── main.py │ │ ├── math.py │ │ └── unitless.py │ ├── template.py │ └── types.py │ ├── site │ ├── event.py │ ├── helper.py │ ├── match.py │ ├── router.py │ ├── team.py │ └── team_year.py │ ├── tba │ ├── breakdown.py │ ├── clean_data.py │ ├── constants.py │ ├── main.py │ ├── read_tba.py │ ├── types.py │ └── utils.py │ ├── types │ └── enums.py │ └── utils │ ├── alru_cache.py │ ├── decorators.py │ ├── hypothetical.py │ └── utils.py ├── frontend ├── .eslintrc.json ├── .gitignore ├── .vscode │ └── settings.json ├── README.md ├── assets │ ├── code.PNG │ ├── data.PNG │ ├── logo512.png │ ├── logo_border.png │ └── website.PNG ├── docs │ └── bugs.MD ├── next.config.js ├── package.json ├── postcss.config.js ├── public │ ├── 2022carv_sos.png │ ├── 2022hop_sos.png │ ├── 2022tur_sos.png │ ├── bubble.png │ ├── circ_favicon.ico │ ├── exponnorm.png │ ├── favicon.ico │ ├── match.png │ ├── og_spline.png │ ├── spline.png │ ├── tba.png │ ├── team.png │ ├── thrifty.png │ └── wcp.png ├── src │ ├── api │ │ ├── event.tsx │ │ ├── events.tsx │ │ ├── header.tsx │ │ ├── match.tsx │ │ ├── matches.tsx │ │ ├── storage.tsx │ │ ├── team.tsx │ │ └── teams.tsx │ ├── components │ │ ├── Figures │ │ │ ├── Bar.tsx │ │ │ ├── Bubble.tsx │ │ │ ├── EventLine.tsx │ │ │ ├── Line.tsx │ │ │ ├── Scatter.tsx │ │ │ ├── TeamLine.tsx │ │ │ ├── YearLine.tsx │ │ │ └── shared.tsx │ │ ├── MatchTable.tsx │ │ ├── Table │ │ │ ├── BreakdownTable.tsx │ │ │ ├── InsightsTable.tsx │ │ │ ├── MatchBreakdown.tsx │ │ │ ├── Table.tsx │ │ │ └── shared.tsx │ │ ├── columns.tsx │ │ ├── filter.tsx │ │ ├── filterBar.tsx │ │ ├── filterConstants.tsx │ │ ├── multiSelect.tsx │ │ └── utils.tsx │ ├── constants.tsx │ ├── layouts │ │ ├── blogLayout.tsx │ │ └── siteLayout.tsx │ ├── pages │ │ ├── 404.tsx │ │ ├── _app.tsx │ │ ├── blog │ │ │ ├── epa │ │ │ │ └── index.tsx │ │ │ ├── index.tsx │ │ │ ├── intro │ │ │ │ └── index.tsx │ │ │ ├── models │ │ │ │ └── index.tsx │ │ │ ├── sos │ │ │ │ └── index.tsx │ │ │ └── v2 │ │ │ │ └── index.tsx │ │ ├── compare │ │ │ └── index.tsx │ │ ├── docs │ │ │ ├── python.tsx │ │ │ └── rest.tsx │ │ ├── event │ │ │ └── [event_id].tsx │ │ ├── events │ │ │ └── index.tsx │ │ ├── index.tsx │ │ ├── match │ │ │ └── [match_id].tsx │ │ ├── matches │ │ │ └── index.tsx │ │ ├── team │ │ │ └── [team] │ │ │ │ ├── [year].tsx │ │ │ │ └── index.tsx │ │ └── teams │ │ │ └── index.tsx │ ├── pagesContent │ │ ├── blog │ │ │ ├── intro │ │ │ │ └── main.tsx │ │ │ └── shared │ │ │ │ ├── data.tsx │ │ │ │ ├── table.tsx │ │ │ │ └── utils.tsx │ │ ├── compare │ │ │ ├── multiYear.tsx │ │ │ ├── singleYear.tsx │ │ │ └── tabs.tsx │ │ ├── context.tsx │ │ ├── event │ │ │ └── [event_id] │ │ │ │ ├── alliances.tsx │ │ │ │ ├── figures.tsx │ │ │ │ ├── insightsTable.tsx │ │ │ │ ├── matches.tsx │ │ │ │ ├── simulation.tsx │ │ │ │ ├── sos.tsx │ │ │ │ ├── tabs.tsx │ │ │ │ └── worker.ts │ │ ├── events │ │ │ ├── insightsTable.tsx │ │ │ ├── shared.tsx │ │ │ ├── summary.tsx │ │ │ └── tabs.tsx │ │ ├── match │ │ │ └── [match_id] │ │ │ │ ├── imageRow.tsx │ │ │ │ ├── summary.tsx │ │ │ │ ├── table.tsx │ │ │ │ └── video.tsx │ │ ├── matches │ │ │ ├── noteworthy.tsx │ │ │ ├── tabs.tsx │ │ │ └── upcoming.tsx │ │ ├── navbar.tsx │ │ ├── shared │ │ │ ├── layout.tsx │ │ │ ├── notFound.tsx │ │ │ └── tabs.tsx │ │ ├── team │ │ │ ├── figures.tsx │ │ │ ├── main.tsx │ │ │ ├── overview.tsx │ │ │ ├── summaryOverview.tsx │ │ │ ├── summaryTabs.tsx │ │ │ ├── tabs.tsx │ │ │ └── types.tsx │ │ └── teams │ │ │ ├── breakdownTable.tsx │ │ │ ├── figures.tsx │ │ │ ├── insightsTable.tsx │ │ │ └── tabs.tsx │ ├── styles │ │ ├── base.css │ │ └── globals.css │ ├── types │ │ ├── api.tsx │ │ └── data.tsx │ └── utils.tsx ├── tailwind.config.js ├── tsconfig.json └── yarn.lock ├── new └── frontend │ ├── .eslintrc.json │ ├── .gitignore │ ├── assets │ ├── code.PNG │ ├── data.PNG │ ├── logo512.png │ ├── logo_border.png │ └── website.PNG │ ├── next.config.js │ ├── package.json │ ├── postcss.config.js │ ├── public │ ├── 2022carv_sos.png │ ├── 2022hop_sos.png │ ├── 2022tur_sos.png │ ├── bubble.png │ ├── circ_favicon.ico │ ├── exponnorm.png │ ├── favicon.ico │ ├── match.png │ ├── og_spline.png │ ├── spline.png │ ├── tba.png │ ├── team.png │ └── thrifty.png │ ├── src │ ├── api │ │ ├── events.tsx │ │ ├── header.tsx │ │ ├── matches.tsx │ │ ├── storage.tsx │ │ └── teams.tsx │ ├── components │ │ ├── blog │ │ │ ├── data.tsx │ │ │ ├── pageLayout.tsx │ │ │ └── utils.tsx │ │ ├── figures │ │ │ ├── axisOptions.tsx │ │ │ ├── bubbles.tsx │ │ │ ├── lineChart.tsx │ │ │ └── yearLine.tsx │ │ ├── filterBar.tsx │ │ ├── matchTable.tsx │ │ ├── queryHandler.tsx │ │ ├── select.tsx │ │ └── tables │ │ │ ├── eventsTable.tsx │ │ │ ├── teamYearsBreakdownTable.tsx │ │ │ ├── teamYearsTable.tsx │ │ │ └── templates │ │ │ ├── epa.tsx │ │ │ ├── locations.tsx │ │ │ ├── misc.tsx │ │ │ ├── record.tsx │ │ │ └── table.tsx │ ├── contexts │ │ ├── dataContext.tsx │ │ ├── locationContext.tsx │ │ └── preferencesContext.tsx │ ├── layout │ │ ├── footer.tsx │ │ ├── header.tsx │ │ └── tabs.tsx │ ├── pages │ │ ├── _app.tsx │ │ ├── _document.tsx │ │ ├── blog │ │ │ ├── epa │ │ │ │ └── index.tsx │ │ │ ├── index.tsx │ │ │ ├── intro │ │ │ │ └── index.tsx │ │ │ ├── models │ │ │ │ └── index.tsx │ │ │ ├── sos │ │ │ │ └── index.tsx │ │ │ ├── table.tsx │ │ │ └── v2 │ │ │ │ └── index.tsx │ │ ├── docs │ │ │ ├── python │ │ │ │ └── index.tsx │ │ │ └── rest │ │ │ │ └── index.tsx │ │ ├── events │ │ │ └── index.tsx │ │ ├── index.tsx │ │ ├── matches │ │ │ ├── filterBar.tsx │ │ │ ├── index.tsx │ │ │ ├── noteworthy.tsx │ │ │ └── upcoming.tsx │ │ ├── team │ │ │ └── [team] │ │ │ │ ├── [year] │ │ │ │ └── index.tsx │ │ │ │ └── index.tsx │ │ └── teams │ │ │ └── index.tsx │ ├── styles │ │ └── globals.css │ ├── types │ │ └── api.tsx │ └── utils │ │ ├── constants.tsx │ │ ├── events.tsx │ │ ├── filterOptions.tsx │ │ ├── formatting.tsx │ │ ├── geography.tsx │ │ ├── no-ssr.tsx │ │ └── utils.tsx │ ├── tailwind.config.ts │ ├── tsconfig.json │ └── yarn.lock ├── pyrightconfig.json └── scripts ├── 2023 ├── Add Champs Teams.ipynb ├── Component EPAs.ipynb ├── Contest.ipynb ├── Misc.ipynb ├── Qualification Likelihood.ipynb └── contest_responses.csv ├── .gitignore ├── README.md ├── api └── Examples.ipynb ├── baselines └── Baseline Metrics.ipynb ├── exploration ├── High Score Progression.ipynb ├── Nonlinear Sum EPA.ipynb ├── Past Season Impact.ipynb ├── Simulation.ipynb ├── Year Normalized EPA vs. Unitless EPA.ipynb └── figures │ ├── nonlinear_1678_2023.png │ ├── nonlinear_2056_2023.png │ └── nonlinear_254_2022.png └── pyproject.toml /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | ignore = E501, E203, W503 -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ipynb linguist-documentation -------------------------------------------------------------------------------- /.github/workflows/backend.yaml: -------------------------------------------------------------------------------- 1 | name: CI-Backend 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Set up Python 3.11 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: 3.11 19 | - name: Linting 20 | run: | 21 | cd backend 22 | python -m pip install --upgrade pip 23 | python -m pip install virtualenv 24 | python -m virtualenv .venv 25 | source .venv/bin/activate 26 | pip install -r requirements.txt 27 | python -m black . --check --diff 28 | python -m flake8 . --exclude=./.venv/ --ignore=E501,E203,W503 --max-line-length=88 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | cockroach-data 2 | 3 | .DS_store -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "csvs"] 2 | path = csvs 3 | url = https://github.com/avgupta456/statbotics-csvs.git 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "editor.formatOnSave": true, 4 | "editor.formatOnSaveMode": "file", 5 | "python.defaultInterpreterPath": "./backend/.venv/bin", 6 | "python.analysis.typeCheckingMode": "strict", 7 | "python.analysis.diagnosticMode": "workspace", 8 | "[python]": { 9 | "editor.defaultFormatter": "ms-python.black-formatter" 10 | }, 11 | "flake8.interpreter": [ 12 | "./backend/.venv/bin/python" 13 | ], 14 | "flake8.args": [ 15 | "--ignore=E501,E203,W503", 16 | "--max-line-length=88" 17 | ], 18 | "[typescript]": { 19 | "editor.defaultFormatter": "esbenp.prettier-vscode" 20 | }, 21 | "files.exclude": { 22 | "**/.git": true, 23 | "**/__pycache__": true, 24 | "**/.venv": true, 25 | "**/.pytest_cache": true, 26 | "**/.coverage": true, 27 | "**/build": true, 28 | "**/node_modules": true, 29 | "**/cockroach-data": true 30 | } 31 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Abhijit Gupta 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /api/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | max-complexity = 100 4 | select = B,C,E,F,W,T 5 | ignore = E203, W503, E501 -------------------------------------------------------------------------------- /api/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | dist 3 | _build -------------------------------------------------------------------------------- /api/.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.12" 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: api/docs/conf.py 16 | 17 | # Build documentation with MkDocs 18 | #mkdocs: 19 | # configuration: mkdocs.yml 20 | 21 | # Optionally build your docs in additional formats such as PDF 22 | formats: 23 | - pdf 24 | 25 | # Optionally set the version of Python and requirements required to build your docs 26 | python: 27 | install: 28 | - requirements: api/docs/requirements.txt 29 | -------------------------------------------------------------------------------- /api/DOCS.md: -------------------------------------------------------------------------------- 1 | # Statbotics API 2 | 3 | The Statbotics Python API queries the Statbotics backend and returns data in a JSON format. It is distributed via PyPI. 4 | 5 | ## Testing 6 | 7 | Requires Python 3.8+. 8 | 9 | ``` 10 | pip install pytest 11 | pytest tests/ 12 | ``` 13 | 14 | ## Documentation 15 | 16 | Requires Python 3.8+. Production builds are triggered directly on the Read the Docs website. You may need to comment out cache control for the build to succeed. 17 | 18 | ``` 19 | pip install sphinx 20 | pip install sphinx_rtd_theme 21 | cd docs 22 | make html 23 | ``` 24 | 25 | ## Deployment 26 | 27 | Requires Python 3.8+. Requires a PyPI account. 28 | 29 | 1. Update `version` in `pyproject.toml` and `docs/conf.py` 30 | 2. Delete old distributions in `/dist` 31 | 3. Run the following scripts: 32 | 33 | ``` 34 | pip install build twine 35 | python -m build 36 | twine upload dist/* 37 | ``` 38 | -------------------------------------------------------------------------------- /api/README.md: -------------------------------------------------------------------------------- 1 | # Statbotics API 2 | 3 | Statbotics.io aims to modernize FRC data analytics through developing and distributing cutting-edge metrics and analysis. This Python API makes Expected Points Added (EPA) statistics just a few Python lines away! Currently we support queries on teams, years, events, and matches. Read below for usage and documentation. 4 | 5 | Visit https://statbotics.io for more content! 6 | 7 | ## Usage 8 | 9 | With Python>=3.8 and pip installed, run 10 | 11 | ``` 12 | pip install statbotics==3.0.0 13 | ``` 14 | 15 | Then in a Python file, create a Statbotics object and get started! 16 | 17 | ``` 18 | import statbotics 19 | 20 | sb = statbotics.Statbotics() 21 | print(sb.get_team(254)) 22 | 23 | >> {'team': 254, 'name': 'The Cheesy Poofs', 'country': 'USA', 'state': 'CA', 'district': None, 'rookie_year': 1999, 'active': True, 'record': {'wins': 808, 'losses': 160, 'ties': 8, 'count': 976, 'winrate': 0.832}, 'norm_epa': {'current': 1909.0, 'recent': 1904.0, 'mean': 1894.0, 'max': 2058.0}} 24 | ``` 25 | 26 | Read below for more methods! 27 | 28 | ## API Reference 29 | 30 | Visit https://statbotics.readthedocs.io/en/latest/ 31 | 32 | ## Contribute 33 | 34 | If you are interested in contributing, reach out to Abhijit Gupta (avgupta456@gmail.com) 35 | 36 | ## Support 37 | 38 | If you are having issues, please let us know. We welcome issues and pull requests. 39 | 40 | ## License 41 | 42 | The project is licensed under the MIT license. 43 | -------------------------------------------------------------------------------- /api/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /api/docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | import sphinx_rtd_theme 16 | 17 | 18 | sys.path.insert(0, os.path.abspath("./..")) 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = "Statbotics" 23 | copyright = "2025, Abhijit Gupta" 24 | author = "Abhijit Gupta" 25 | 26 | # The full version, including alpha/beta/rc tags 27 | release = "3.0.0" 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = ["sphinx.ext.autodoc", "sphinx_rtd_theme"] 36 | 37 | pygments_style = "sphinx" 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ["_templates"] 41 | master_doc = "index" 42 | 43 | # List of patterns, relative to source directory, that match files and 44 | # directories to ignore when looking for source files. 45 | # This pattern also affects html_static_path and html_extra_path. 46 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 47 | 48 | 49 | # -- Options for HTML output ------------------------------------------------- 50 | 51 | # The theme to use for HTML and HTML Help pages. See the documentation for 52 | # a list of builtin themes. 53 | # 54 | html_theme = "sphinx_rtd_theme" 55 | html_theme_options = { 56 | "collapse_navigation": False, 57 | "navigation_depth": 2, 58 | } 59 | 60 | # Add any paths that contain custom static files (such as style sheets) here, 61 | # relative to this directory. They are copied after the builtin static files, 62 | # so a file named "default.css" will overwrite the builtin "default.css". 63 | html_static_path = ["_static"] 64 | 65 | autodoc_member_order = "bysource" 66 | -------------------------------------------------------------------------------- /api/docs/index.rst: -------------------------------------------------------------------------------- 1 | Statbotics 2 | ========== 3 | 4 | Statbotics.io aims to modernize FRC data analytics through developing and distributing cutting-edge metrics and analysis. This Python API makes Expected Points Added (EPA) statistics just a few Python lines away! 5 | Currently we support queries on teams, years, events, and matches. Read below for usage and documentation. 6 | 7 | Visit https://statbotics.io for more content! 8 | 9 | Usage 10 | ----- 11 | 12 | With Python>=3.8 and pip installed, run 13 | 14 | .. code-block:: python 15 | 16 | pip install statbotics==3.0.0 17 | 18 | Then in a Python file, create a Statbotics object and get started! 19 | 20 | .. code-block:: python 21 | 22 | import statbotics 23 | 24 | sb = statbotics.Statbotics() 25 | print(sb.get_team(254)) 26 | 27 | >> {'team': 254, 'name': 'The Cheesy Poofs', 'country': 'USA', 'state': 'CA', 'district': None, 'rookie_year': 1999, 'active': True, 'record': {'wins': 808, 'losses': 160, 'ties': 8, 'count': 976, 'winrate': 0.832}, 'norm_epa': {'current': 1909.0, 'recent': 1904.0, 'mean': 1894.0, 'max': 2058.0}} 28 | 29 | Read below for more methods! 30 | 31 | API Reference 32 | ------------- 33 | .. autoclass:: statbotics.main.Statbotics 34 | :members: get_team, get_teams, get_year, get_years, get_team_year, get_team_years, get_event, get_events, get_team_event, get_team_events, get_match, get_matches, get_team_match, get_team_matches 35 | 36 | Contribute 37 | ---------- 38 | 39 | If you are interested in contributing, reach out to Abhijit Gupta (avgupta456@gmail.com). Source code is available at github.com/avgupta456/statbotics. 40 | 41 | Support 42 | ------- 43 | 44 | If you are having issues, please let us know. We welcome issues and pull requests at github.com/avgupta456/statbotics. 45 | 46 | License 47 | ------- 48 | 49 | The project is licensed under the MIT license. 50 | -------------------------------------------------------------------------------- /api/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /api/docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx-rtd-theme==3.0.2 2 | requests==2.28.2 3 | cachecontrol==0.12.11 -------------------------------------------------------------------------------- /api/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 88 3 | include = '\.pyi?$' 4 | exclude = ''' 5 | /( 6 | \.git 7 | | \.hg 8 | | \.mypy_cache 9 | | \.tox 10 | | \.venv 11 | | _build 12 | | buck-out 13 | | build 14 | | dist 15 | )/ 16 | ''' 17 | 18 | [tool.poetry] 19 | name = "statbotics" 20 | version = "3.0.0" 21 | description = "Modernizing FRC Data Analytics" 22 | authors = ["Abhijit Gupta "] 23 | license = "MIT License" 24 | 25 | readme = 'README.md' # Markdown files are supported 26 | 27 | repository = "https://github.com/avgupta456/statbotics" 28 | homepage = "https://statbotics.io" 29 | 30 | keywords = ['FIRST', 'robotics', 'data', 'analysis', 'ranking'] 31 | 32 | [tool.poetry.dependencies] 33 | python = "^3.8" 34 | requests = "^2.28.2" 35 | cachecontrol = "^0.12.11" 36 | 37 | [tool.poetry.dev-dependencies] 38 | 39 | [build-system] 40 | requires = ["poetry>=0.12"] 41 | build-backend = "poetry.masonry.api" 42 | -------------------------------------------------------------------------------- /api/requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.28.2 2 | cachecontrol==0.12.11 -------------------------------------------------------------------------------- /api/statbotics/__init__.py: -------------------------------------------------------------------------------- 1 | from .main import * -------------------------------------------------------------------------------- /api/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/api/tests/__init__.py -------------------------------------------------------------------------------- /api/tests/test_framework.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import warnings 3 | 4 | from statbotics import main 5 | 6 | 7 | class TestFramework(unittest.TestCase): 8 | def setUp(self): 9 | warnings.simplefilter("ignore", ResourceWarning) 10 | 11 | def tearDown(self): 12 | warnings.simplefilter("default", ResourceWarning) 13 | 14 | def test_create_statbotics(self): 15 | sb = main.Statbotics() 16 | return sb 17 | -------------------------------------------------------------------------------- /backend/.env-template: -------------------------------------------------------------------------------- 1 | PROD=False 2 | 3 | CRDB_USER=abhijit 4 | CRDB_PWD=REDACTED 5 | CRDB_HOST=statbotics-5256.5xj.gcp-us-central1.cockroachlabs.cloud:26257 6 | -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | poetry.lock 3 | __pycache__ 4 | cache -------------------------------------------------------------------------------- /backend/deploy/api_app.yaml: -------------------------------------------------------------------------------- 1 | service: default 2 | runtime: python311 3 | entrypoint: gunicorn -w 1 -t 600 -k uvicorn.workers.UvicornWorker main:app 4 | 5 | instance_class: F1 6 | 7 | automatic_scaling: 8 | min_instances: 0 9 | max_instances: 1 10 | 11 | env_variables: 12 | LOCAL_DB: false 13 | -------------------------------------------------------------------------------- /backend/deploy/api_cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: node:10.15.1 3 | entrypoint: npm 4 | args: ["install"] 5 | dir: "backend" 6 | - name: node:10.15.1 7 | entrypoint: npm 8 | args: ["run", "create-env"] 9 | dir: "backend" 10 | env: 11 | - "CRDB_USER=abhijit" 12 | - "CRDB_PWD=${_CRDB_PWD}" 13 | - "CRDB_HOST=statbotics-5256.5xj.gcp-us-central1.cockroachlabs.cloud:26257" 14 | - "PROD=True" 15 | - name: "gcr.io/cloud-builders/gcloud" 16 | args: ["app", "deploy", "--appyaml", "./deploy/api_app.yaml"] 17 | dir: "backend" 18 | -------------------------------------------------------------------------------- /backend/deploy/cors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "origin": ["http://localhost:3000", "https://statbotics.io", "https://www.statbotics.io"], 4 | "responseHeader": ["Content-Type", "Cache-Control"], 5 | "method": ["GET", "HEAD", "OPTIONS"], 6 | "maxAgeSeconds": 3600 7 | } 8 | ] -------------------------------------------------------------------------------- /backend/deploy/data_app.yaml: -------------------------------------------------------------------------------- 1 | service: data 2 | runtime: python311 3 | entrypoint: gunicorn -w 1 -t 1200 -k uvicorn.workers.UvicornWorker main:app 4 | 5 | instance_class: B4 6 | 7 | # exactly one instance 8 | basic_scaling: 9 | max_instances: 1 10 | 11 | #set to minimums 12 | resources: 13 | cpu: 1 14 | memory_gb: 1 15 | disk_size_gb: 10 16 | 17 | env_variables: 18 | LOCAL_DB: false 19 | -------------------------------------------------------------------------------- /backend/deploy/data_cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: node:10.15.1 3 | entrypoint: npm 4 | args: ["install"] 5 | dir: "backend" 6 | - name: node:10.15.1 7 | entrypoint: npm 8 | args: ["run", "create-env"] 9 | dir: "backend" 10 | env: 11 | - "CRDB_USER=abhijit" 12 | - "CRDB_PWD=${_CRDB_PWD}" 13 | - "CRDB_HOST=statbotics-5256.5xj.gcp-us-central1.cockroachlabs.cloud:26257" 14 | - "PROD=True" 15 | - name: "gcr.io/cloud-builders/gcloud" 16 | args: ["app", "deploy", "--appyaml", "./deploy/data_app.yaml"] 17 | dir: "backend" 18 | -------------------------------------------------------------------------------- /backend/deploy/dispatch.yaml: -------------------------------------------------------------------------------- 1 | dispatch: 2 | - url: "*/v3/data/*" 3 | service: data 4 | 5 | - url: "*/v3/site/*" 6 | service: site 7 | 8 | - url: "*/.*" 9 | service: default 10 | -------------------------------------------------------------------------------- /backend/deploy/notes.txt: -------------------------------------------------------------------------------- 1 | gsutil cors set cors.json gs://site_dev_v1 2 | gsutil cors set cors.json gs://site_v1 -------------------------------------------------------------------------------- /backend/deploy/site_app.yaml: -------------------------------------------------------------------------------- 1 | service: site 2 | runtime: python311 3 | entrypoint: gunicorn -w 1 -t 600 -k uvicorn.workers.UvicornWorker main:app 4 | 5 | instance_class: F1 6 | 7 | automatic_scaling: 8 | min_instances: 0 9 | max_instances: 1 10 | 11 | env_variables: 12 | LOCAL_DB: false 13 | -------------------------------------------------------------------------------- /backend/deploy/site_cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: node:10.15.1 3 | entrypoint: npm 4 | args: ["install"] 5 | dir: "backend" 6 | - name: node:10.15.1 7 | entrypoint: npm 8 | args: ["run", "create-env"] 9 | dir: "backend" 10 | env: 11 | - "CRDB_USER=abhijit" 12 | - "CRDB_PWD=${_CRDB_PWD}" 13 | - "CRDB_HOST=statbotics-5256.5xj.gcp-us-central1.cockroachlabs.cloud:26257" 14 | - "PROD=True" 15 | - name: "gcr.io/cloud-builders/gcloud" 16 | args: ["app", "deploy", "--appyaml", "./deploy/site_app.yaml"] 17 | dir: "backend" 18 | -------------------------------------------------------------------------------- /backend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "backend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "start": "poetry run uvicorn main:app --reload --port=8000", 7 | "set-reqs": "poetry lock && poetry export -f requirements.txt --output requirements.txt --without-hashes --with dev", 8 | "isort": "poetry run isort . --src-path=./src --skip=./.venv --multi-line=3 --trailing-comma --line-length=88 --combine-as --ensure-newline-before-comments", 9 | "create-env": "printenv > .env", 10 | "free-port": "sudo lsof -t -i tcp:8000 | xargs kill -9", 11 | "lint": "poetry run black . --check --diff && poetry run flake8 . --exclude=./.venv/ && poetry run pyright . --venvpath=./.venv/" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /backend/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "backend" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Abhijit Gupta "] 6 | 7 | [tool.poetry.dependencies] 8 | python = ">=3.9,<3.12" 9 | fastapi = "^0.101.1" 10 | python-dotenv = "^1.0.0" 11 | uvicorn = "^0.23.2" 12 | gunicorn = "^21.2.0" 13 | requests = "^2.31.0" 14 | SQLAlchemy = "^2.0.20" 15 | sqlalchemy-cockroachdb = "^2.0.1" 16 | psycopg2 = "^2.9.7" 17 | attrs = "^23.1.0" 18 | scipy = "^1.11.1" 19 | CacheControl = "^0.13.1" 20 | typer = {extras = ["all"], version = "^0.9.0"} 21 | black = "^23.7.0" 22 | flake8 = "^6.1.0" 23 | isort = "^5.12.0" 24 | pyright = "^1.1.324" 25 | numpy = "1.26.4" 26 | google-cloud-storage = "^3.1.0" 27 | 28 | [tool.poetry.group.dev.dependencies] 29 | pyinstrument = "^4.6.1" 30 | vulture = "^2.10" 31 | 32 | [build-system] 33 | requires = ["poetry-core>=1.0.0"] 34 | build-backend = "poetry.core.masonry.api" 35 | -------------------------------------------------------------------------------- /backend/requirements.txt: -------------------------------------------------------------------------------- 1 | annotated-types==0.7.0 2 | anyio==4.9.0 3 | attrs==23.2.0 4 | black==23.12.1 5 | cachecontrol==0.13.1 6 | cachetools==5.5.2 7 | certifi==2025.1.31 8 | charset-normalizer==3.4.1 9 | click==8.1.8 10 | colorama==0.4.6 11 | exceptiongroup==1.2.2 12 | fastapi==0.101.1 13 | flake8==6.1.0 14 | google-api-core==2.24.2 15 | google-auth==2.38.0 16 | google-cloud-core==2.4.3 17 | google-cloud-storage==3.1.0 18 | google-crc32c==1.7.1 19 | google-resumable-media==2.7.2 20 | googleapis-common-protos==1.69.2 21 | greenlet==3.1.1 22 | gunicorn==21.2.0 23 | h11==0.14.0 24 | idna==3.10 25 | isort==5.13.2 26 | markdown-it-py==3.0.0 27 | mccabe==0.7.0 28 | mdurl==0.1.2 29 | msgpack==1.1.0 30 | mypy-extensions==1.0.0 31 | nodeenv==1.9.1 32 | numpy==1.26.4 33 | packaging==24.2 34 | pathspec==0.12.1 35 | platformdirs==4.3.7 36 | proto-plus==1.26.1 37 | protobuf==6.30.2 38 | psycopg2==2.9.10 39 | pyasn1-modules==0.4.2 40 | pyasn1==0.6.1 41 | pycodestyle==2.11.1 42 | pydantic-core==2.33.1 43 | pydantic==2.11.2 44 | pyflakes==3.1.0 45 | pygments==2.19.1 46 | pyinstrument==4.7.3 47 | pyright==1.1.398 48 | python-dotenv==1.1.0 49 | requests==2.32.3 50 | rich==13.9.4 51 | rsa==4.9 52 | scipy==1.13.1 53 | shellingham==1.5.4 54 | sniffio==1.3.1 55 | sqlalchemy-cockroachdb==2.0.2 56 | sqlalchemy==2.0.40 57 | starlette==0.27.0 58 | tomli==2.2.1 59 | typer[all]==0.9.4 60 | typing-extensions==4.13.1 61 | typing-inspection==0.4.0 62 | urllib3==2.3.0 63 | uvicorn==0.23.2 64 | vulture==2.14 65 | -------------------------------------------------------------------------------- /backend/src/api/__init__.py: -------------------------------------------------------------------------------- 1 | from src.api.event import get_event_cached, get_events_cached 2 | from src.api.match import get_match_cached, get_matches_cached 3 | from src.api.team import get_team_cached, get_teams_cached 4 | from src.api.team_event import get_team_event_cached, get_team_events_cached 5 | from src.api.team_match import get_team_match_cached, get_team_matches_cached 6 | from src.api.team_year import get_team_year_cached, get_team_years_cached 7 | from src.api.year import get_year_cached, get_years_cached 8 | 9 | __all__ = [ 10 | "get_event_cached", 11 | "get_events_cached", 12 | "get_match_cached", 13 | "get_matches_cached", 14 | "get_team_event_cached", 15 | "get_team_events_cached", 16 | "get_team_match_cached", 17 | "get_team_matches_cached", 18 | "get_team_year_cached", 19 | "get_team_years_cached", 20 | "get_team_cached", 21 | "get_teams_cached", 22 | "get_year_cached", 23 | "get_years_cached", 24 | ] 25 | -------------------------------------------------------------------------------- /backend/src/api/query.py: -------------------------------------------------------------------------------- 1 | from fastapi import Query 2 | 3 | from src.constants import CURR_YEAR 4 | 5 | active_query = Query(None, description="Whether the team has played in the last year.") 6 | 7 | ascending_query = Query( 8 | None, 9 | description="Whether to sort the returned values in ascending order. Default is ascending", 10 | ) 11 | 12 | country_query = Query( 13 | None, description="Capitalized country name, e.g. `USA` or `Canada`." 14 | ) 15 | 16 | district_query = Query( 17 | None, 18 | description="One of [`fma`, `fnc`, `fsc`, `fit`, `fin`, `fim`, `ne`, `chs`, `ont`, `pnw`, `pch`, `isr`]", 19 | ) 20 | 21 | elim_query = Query(None, description="Whether the match is an elimination match.") 22 | 23 | event_query = Query(None, description="Event key, e.g. `2019ncwak`.") 24 | 25 | event_type_query = Query( 26 | None, 27 | description="One of [`regional`, `district`, `district_cmp`, `cmp_division`, or `cmp_finals`].", 28 | ) 29 | 30 | limit_query = Query( 31 | None, 32 | ge=1, 33 | le=1000, 34 | description="Maximum number of events to return. Default is 1000.", 35 | ) 36 | 37 | match_query = Query(None, description="Match key, e.g. `2019ncwak_f1m1`.") 38 | 39 | metric_query = Query( 40 | None, 41 | description="How to sort the returned values. Any column in the table is valid.", 42 | ) 43 | 44 | offset_query = Query(None, ge=0, description="Offset from the first result to return.") 45 | state_query = Query(None, description="Capitalized two-letter state code, e.g. `NC`.") 46 | 47 | team_query = Query( 48 | None, ge=0, lt=100000, description="Team number (no prefix), e.g. 5511." 49 | ) 50 | 51 | week_query = Query( 52 | None, ge=0, le=8, description="Week of the competition season. 8 is CMP" 53 | ) 54 | 55 | year_query = Query(None, ge=2002, le=CURR_YEAR, description="Four-digit year") 56 | -------------------------------------------------------------------------------- /backend/src/api/router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from src.api.event import router as event_router 4 | from src.api.match import router as match_router 5 | from src.api.team import router as team_router 6 | from src.api.team_event import router as team_event_router 7 | from src.api.team_match import router as team_match_router 8 | from src.api.team_year import router as team_year_router 9 | from src.api.year import router as year_router 10 | 11 | router = APIRouter() 12 | router.include_router(year_router, tags=["Year"]) 13 | router.include_router(team_router, tags=["Team"]) 14 | router.include_router(team_year_router, tags=["TeamYear"]) 15 | router.include_router(event_router, tags=["Event"]) 16 | router.include_router(team_event_router, tags=["TeamEvent"]) 17 | router.include_router(match_router, tags=["Match"]) 18 | router.include_router(team_match_router, tags=["TeamMatch"]) 19 | 20 | 21 | @router.get("/") 22 | async def read_root(): 23 | return {"name": "API V3 Router"} 24 | -------------------------------------------------------------------------------- /backend/src/api/year.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Any, Dict, List, Optional, Tuple 3 | 4 | from fastapi import APIRouter, Response 5 | 6 | from src.api.query import ascending_query, limit_query, metric_query, offset_query 7 | from src.db.models import Year 8 | from src.db.read import get_year, get_years 9 | from src.utils.alru_cache import alru_cache 10 | from src.utils.decorators import ( 11 | async_fail_gracefully_plural, 12 | async_fail_gracefully_singular, 13 | ) 14 | 15 | router = APIRouter() 16 | 17 | 18 | @router.get("/") 19 | async def read_root_year(): 20 | return {"name": "Year V3 Router"} 21 | 22 | 23 | @alru_cache(ttl=timedelta(minutes=2)) 24 | async def get_year_cached( 25 | year: int, no_cache: bool = False 26 | ) -> Tuple[bool, Optional[Year]]: 27 | return (True, get_year(year=year)) 28 | 29 | 30 | @alru_cache(ttl=timedelta(minutes=2)) 31 | async def get_years_cached( 32 | metric: Optional[str] = None, 33 | ascending: Optional[bool] = None, 34 | limit: Optional[int] = None, 35 | offset: Optional[int] = None, 36 | site: bool = False, 37 | no_cache: bool = False, 38 | ) -> Tuple[bool, List[Year]]: 39 | if not site: 40 | limit = min(limit or 1000, 1000) 41 | 42 | return ( 43 | True, 44 | get_years(metric=metric, ascending=ascending, limit=limit, offset=offset), 45 | ) 46 | 47 | 48 | @router.get( 49 | "/year/{year}", 50 | summary="Query a single year", 51 | description="Returns a single Year object. Requires a four-digit year, e.g. `2019`.", 52 | ) 53 | @async_fail_gracefully_singular 54 | async def read_year( 55 | response: Response, 56 | year: int, 57 | ) -> Dict[str, Any]: 58 | year_obj: Optional[Year] = await get_year_cached(year=year) 59 | if year_obj is None: 60 | raise Exception("Year not found") 61 | 62 | return year_obj.to_dict() 63 | 64 | 65 | @router.get( 66 | "/years", 67 | summary="Query multiple years", 68 | response_description="Returns a list of Years since 2002. Older data is not available.", 69 | ) 70 | @async_fail_gracefully_plural 71 | async def read_years( 72 | response: Response, 73 | metric: Optional[str] = metric_query, 74 | ascending: Optional[bool] = ascending_query, 75 | limit: Optional[int] = limit_query, 76 | offset: Optional[int] = offset_query, 77 | ) -> List[Dict[str, Any]]: 78 | years: List[Year] = await get_years_cached( 79 | metric=metric, ascending=ascending, limit=limit, offset=offset 80 | ) 81 | return [year.to_dict() for year in years] 82 | -------------------------------------------------------------------------------- /backend/src/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import List 3 | 4 | # GLOBAL 5 | 6 | PROD = os.getenv("PROD", "False") == "True" 7 | 8 | # 8001 emulates the data server 9 | BACKEND_URL = "https://api.statbotics.io" if PROD else "http://localhost:8001" 10 | 11 | # DB 12 | 13 | CRDB_USER = os.getenv("CRDB_USER", "") 14 | CRDB_PWD = os.getenv("CRDB_PWD", "") 15 | CRDB_HOST = os.getenv("CRDB_HOST", "") 16 | 17 | CONN_STR = ( 18 | ( 19 | "cockroachdb://" 20 | + CRDB_USER 21 | + ":" 22 | + CRDB_PWD 23 | + "@" 24 | + CRDB_HOST 25 | + "/statbotics3?sslmode=verify-full&sslrootcert=root.crt" 26 | ) 27 | if PROD 28 | else "cockroachdb://root@localhost:26257/statbotics3?sslmode=disable" 29 | ) 30 | 31 | # API 32 | 33 | AUTH_KEY_BLACKLIST: List[str] = [] 34 | 35 | # CONFIG 36 | 37 | CURR_YEAR = 2025 38 | CURR_WEEK = 8 39 | 40 | # MISC 41 | 42 | EPS = 1e-6 43 | -------------------------------------------------------------------------------- /backend/src/data/epa/calc.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from typing import Dict 3 | 4 | from src.data.utils import objs_type 5 | from src.db.models import Event, TeamEvent, TeamMatch, TeamYear 6 | from src.models.epa.main import EPA 7 | from src.utils.utils import get_team_event_key, get_team_match_key, get_team_year_key 8 | 9 | 10 | def process_year( 11 | objs: objs_type, all_team_years: Dict[int, Dict[int, TeamYear]] 12 | ) -> objs_type: 13 | year = objs[0] 14 | team_years = objs[1] 15 | events = objs[2] 16 | team_events = objs[3] 17 | matches = objs[4] 18 | team_matches = objs[5] 19 | 20 | match_events: Dict[str, Event] = {} 21 | match_team_matches: Dict[str, Dict[int, TeamMatch]] = defaultdict(dict) 22 | match_team_events: Dict[str, Dict[int, TeamEvent]] = defaultdict(dict) 23 | match_team_years: Dict[str, Dict[int, TeamYear]] = defaultdict(dict) 24 | for match in matches.values(): 25 | match_events[match.key] = events[match.event] 26 | red_teams, blue_teams = match.get_teams() 27 | for team in red_teams + blue_teams: 28 | team_match_key = get_team_match_key(team, match.key) 29 | match_team_matches[match.key][team] = team_matches[team_match_key] 30 | team_event_key = get_team_event_key(team, match.event) 31 | match_team_events[match.key][team] = team_events[team_event_key] 32 | team_year_key = get_team_year_key(team, match.year) 33 | match_team_years[match.key][team] = team_years[team_year_key] 34 | 35 | model = EPA() 36 | 37 | model.start_season(year, all_team_years, team_years) 38 | for curr_match in sorted(matches.values(), key=lambda m: m.time): 39 | curr_event = match_events[curr_match.key] 40 | curr_team_matches = match_team_matches[curr_match.key] 41 | curr_team_events = match_team_events[curr_match.key] 42 | curr_team_years = match_team_years[curr_match.key] 43 | model.process_match( 44 | curr_match, 45 | curr_event, 46 | curr_team_matches, 47 | curr_team_events, 48 | curr_team_years, 49 | ) 50 | 51 | # Records TeamEvent EPA stats if no matches played yet 52 | for team_event in team_events.values(): 53 | if team_event.qual_count == 0: 54 | model.post_record_team(team_event.team, None, team_event, None) 55 | 56 | return objs 57 | -------------------------------------------------------------------------------- /backend/src/data/epa/main.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from typing import Dict, List 3 | 4 | from src.constants import CURR_YEAR 5 | from src.data.epa.agg import process_year as process_year_agg 6 | from src.data.epa.calc import process_year as process_year_calc 7 | from src.data.epa.metrics import process_year as process_year_metrics 8 | from src.data.utils import objs_type 9 | from src.db.models import Team, TeamYear 10 | from src.utils.utils import r 11 | 12 | 13 | # MAIN FUNCTION 14 | def process_year( 15 | objs: objs_type, all_team_years: Dict[int, Dict[int, TeamYear]] 16 | ) -> objs_type: 17 | objs = process_year_calc(objs, all_team_years) 18 | objs = process_year_agg(objs) 19 | objs = process_year_metrics(objs) 20 | 21 | return objs 22 | 23 | 24 | def post_process( 25 | teams: List[Team], all_team_years: Dict[int, Dict[int, TeamYear]] 26 | ) -> List[Team]: 27 | team_team_years: Dict[int, List[TeamYear]] = defaultdict(list) 28 | for team_years in all_team_years.values(): 29 | for team_year in team_years.values(): 30 | team_team_years[team_year.team].append(team_year) 31 | 32 | for team in teams: 33 | years: Dict[int, float] = {} 34 | 35 | for team_year in team_team_years[team.team]: 36 | if team_year.norm_epa is not None: 37 | years[team_year.year] = team_year.norm_epa 38 | 39 | keys, values = years.keys(), years.values() 40 | 41 | # get recent epas (last four years) 42 | recent: List[float] = [] 43 | for year in range(CURR_YEAR - 4, CURR_YEAR + 1): 44 | if year in keys: 45 | recent.append(years[year]) 46 | r_y, y = len(recent), len(keys) 47 | 48 | team.norm_epa = None if y == 0 else r(years[max(keys)]) 49 | team.norm_epa_recent = None if r_y == 0 else r(sum(recent) / r_y) 50 | team.norm_epa_mean = None if y == 0 else r(sum(values) / y) 51 | team.norm_epa_max = None if y == 0 else r(max(values)) 52 | 53 | return teams 54 | -------------------------------------------------------------------------------- /backend/src/data/router.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from fastapi import APIRouter, BackgroundTasks 3 | 4 | from src.constants import BACKEND_URL, CURR_YEAR 5 | from src.data.main import reset_all_years, update_curr_year 6 | from src.data.tba import check_year_partial as check_year_partial_tba 7 | from src.db.read import get_etags as get_etags_db, get_events as get_events_db 8 | 9 | data_router = APIRouter() 10 | site_router = APIRouter() 11 | 12 | 13 | @data_router.get("/") 14 | async def read_root(): 15 | return {"name": "Data Router"} 16 | 17 | 18 | @data_router.get("/reset_all_years") 19 | async def reset_all_years_endpoint(): 20 | # return {"status": "skipped"} 21 | reset_all_years() 22 | return {"status": "success"} 23 | 24 | 25 | @data_router.get("/reset_curr_year") 26 | async def reset_curr_year_endpoint(): 27 | update_curr_year(partial=False) 28 | return {"status": "success"} 29 | 30 | 31 | @data_router.get("/update_curr_year") 32 | async def update_curr_year_endpoint(): 33 | update_curr_year(partial=True) 34 | return {"status": "success"} 35 | 36 | 37 | def update_curr_year_background(): 38 | requests.get(f"{BACKEND_URL}/v3/data/update_curr_year") 39 | 40 | 41 | @site_router.get("/update_curr_year") 42 | async def update_curr_year_site_endpoint(background_tasks: BackgroundTasks): 43 | event_objs = get_events_db(year=CURR_YEAR) 44 | etags = get_etags_db(CURR_YEAR) 45 | is_new_data = check_year_partial_tba(CURR_YEAR, event_objs, etags) 46 | if not is_new_data: 47 | return {"status": "skipped"} 48 | 49 | background_tasks.add_task(update_curr_year_background) 50 | return {"status": "backgrounded"} 51 | -------------------------------------------------------------------------------- /backend/src/db/functions/__init__.py: -------------------------------------------------------------------------------- 1 | from src.db.functions.clear_year import clear_year 2 | from src.db.functions.noteworthy_matches import get_noteworthy_matches 3 | from src.db.functions.remove_teams_no_events import remove_teams_with_no_events 4 | from src.db.functions.upcoming_matches import get_upcoming_matches 5 | from src.db.functions.update_teams import update_team_districts 6 | 7 | __all__ = [ 8 | "clear_year", 9 | "get_noteworthy_matches", 10 | "remove_teams_with_no_events", 11 | "get_upcoming_matches", 12 | "update_team_districts", 13 | ] 14 | -------------------------------------------------------------------------------- /backend/src/db/functions/clear_year.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import Session as SessionType 2 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 3 | 4 | from src.db.main import Session 5 | from src.db.models.etag import ETagORM 6 | from src.db.models.event import EventORM 7 | from src.db.models.match import MatchORM 8 | from src.db.models.team_event import TeamEventORM 9 | from src.db.models.team_match import TeamMatchORM 10 | from src.db.models.team_year import TeamYearORM 11 | from src.db.models.year import YearORM 12 | 13 | 14 | def clear_year(year: int) -> None: 15 | def callback(session: SessionType): 16 | # delete all data from a given year 17 | for table in [ 18 | ETagORM, 19 | YearORM, 20 | TeamYearORM, 21 | EventORM, 22 | TeamEventORM, 23 | MatchORM, 24 | TeamMatchORM, 25 | ]: 26 | session.query(table).filter(table.year == year).delete( 27 | synchronize_session=False 28 | ) 29 | 30 | run_transaction(Session, callback) 31 | -------------------------------------------------------------------------------- /backend/src/db/functions/remove_teams_no_events.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from sqlalchemy.orm import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.constants import CURR_YEAR 7 | from src.db.main import Session 8 | from src.db.models.team import TeamORM 9 | from src.db.models.team_event import TeamEventORM 10 | from src.db.models.team_year import TeamYearORM 11 | 12 | 13 | def remove_teams_with_no_events() -> None: 14 | def callback(session: SessionType): 15 | teams: List[TeamORM] = [ 16 | x[0] 17 | for x in session.query(TeamEventORM.team).group_by(TeamEventORM.team).all() 18 | ] 19 | 20 | # Filter teamYears with no events 21 | session.query(TeamYearORM).filter( 22 | (TeamYearORM.team.notin_(teams)) & (TeamYearORM.year < CURR_YEAR) 23 | ).delete() 24 | 25 | # Filter teams with no events 26 | session.query(TeamORM).filter( 27 | (TeamORM.team.notin_(teams)) & (TeamORM.rookie_year < CURR_YEAR) 28 | ).delete() 29 | 30 | run_transaction(Session, callback) 31 | -------------------------------------------------------------------------------- /backend/src/db/functions/update_teams.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import Session as SessionType 2 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 3 | 4 | from src.db.main import Session 5 | from src.db.models.team import TeamORM 6 | from src.db.models.team_year import TeamYearORM 7 | 8 | 9 | def update_team_districts() -> None: 10 | def callback(session: SessionType): 11 | session.query(TeamORM).update( 12 | { 13 | TeamORM.district: session.query(TeamYearORM.district) # type: ignore 14 | .filter(TeamYearORM.team == TeamORM.team) 15 | .order_by(TeamYearORM.year.desc()) 16 | .limit(1) 17 | .as_scalar() 18 | }, 19 | synchronize_session=False, 20 | ) 21 | 22 | run_transaction(Session, callback) 23 | -------------------------------------------------------------------------------- /backend/src/db/inspect.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import inspect 2 | 3 | from src.db.main import engine 4 | 5 | 6 | def print_all_tables(): 7 | schema = "public" 8 | inspector = inspect(engine) 9 | for table_name in inspector.get_table_names(schema): 10 | print(f"{table_name}") 11 | for column in inspector.get_columns(table_name, schema): 12 | print("Column: %s" % column) 13 | print() 14 | -------------------------------------------------------------------------------- /backend/src/db/main.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass, sessionmaker 3 | 4 | from src.constants import CONN_STR 5 | 6 | engine = create_engine(CONN_STR) 7 | 8 | Session = sessionmaker(bind=engine) 9 | 10 | 11 | # Only for type hints, doesn't enable slots 12 | # Mirror to avoid intermediate commits to DB 13 | class Base(MappedAsDataclass, DeclarativeBase): 14 | pass 15 | 16 | 17 | def clean_db() -> None: 18 | Base.metadata.drop_all(bind=engine) 19 | Base.metadata.create_all(engine) 20 | -------------------------------------------------------------------------------- /backend/src/db/models/__init__.py: -------------------------------------------------------------------------------- 1 | from src.db.models.create import match_dict_to_objs 2 | from src.db.models.etag import ETag 3 | from src.db.models.event import Event 4 | from src.db.models.main import Model 5 | from src.db.models.match import Match 6 | from src.db.models.team import Team 7 | from src.db.models.team_event import TeamEvent 8 | from src.db.models.team_match import TeamMatch 9 | from src.db.models.team_year import TeamYear 10 | from src.db.models.year import Year 11 | 12 | __all__ = [ 13 | "Model", 14 | "ETag", 15 | "Event", 16 | "Match", 17 | "TeamEvent", 18 | "TeamMatch", 19 | "TeamYear", 20 | "Team", 21 | "Year", 22 | "match_dict_to_objs", 23 | ] 24 | -------------------------------------------------------------------------------- /backend/src/db/models/etag.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Integer, String 2 | from sqlalchemy.orm import mapped_column 3 | 4 | from src.db.main import Base 5 | from src.db.models.main import Model, ModelORM, generate_attr_class 6 | from src.db.models.types import MI, MOS, MS 7 | 8 | 9 | class ETagORM(Base, ModelORM): 10 | """DECLARATION""" 11 | 12 | __tablename__ = "etags" 13 | year: MI = mapped_column(Integer, index=True) 14 | path: MS = mapped_column(String, index=True, primary_key=True) 15 | etag: MOS = mapped_column(String) 16 | 17 | 18 | _ETag = generate_attr_class("ETag", ETagORM) 19 | 20 | 21 | class ETag(_ETag, Model): 22 | def pk(self: "ETag") -> str: 23 | return self.path 24 | 25 | def __hash__(self: "ETag") -> int: 26 | return hash(self.pk()) 27 | -------------------------------------------------------------------------------- /backend/src/db/models/main.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Type, TypeVar 2 | 3 | import attr 4 | from sqlalchemy import inspect 5 | 6 | 7 | class ModelORM: 8 | __table__: Any 9 | 10 | 11 | class Model: 12 | T1 = TypeVar("T1") 13 | 14 | @classmethod 15 | def from_dict(cls: Type[T1], dict: Dict[str, Any]) -> T1: 16 | dict = {k: dict.get(k, None) for k in cls.__slots__} # type: ignore 17 | return cls(**dict) 18 | 19 | def to_dict(self) -> Dict[str, Any]: 20 | return attr.asdict(self) 21 | 22 | def sort(self) -> Any: 23 | raise NotImplementedError() 24 | 25 | def pk(self) -> str: 26 | raise NotImplementedError() 27 | 28 | def __hash__(self) -> int: 29 | return hash(self.pk()) 30 | 31 | def __eq__(self, other: Any) -> bool: 32 | if not isinstance(other, Model): 33 | return False 34 | return self.pk() == other.pk() 35 | 36 | def __str__(self): 37 | return self.__repr__() 38 | 39 | 40 | T2 = TypeVar("T2", bound=ModelORM) 41 | 42 | 43 | def generate_attr_class(name: str, sqlalchemy_model: Type[T2]) -> Type[T2]: 44 | columns = inspect(sqlalchemy_model).columns # type: ignore 45 | 46 | fields = { 47 | c.name: attr.ib(default=None if c.default is None else c.default.arg) 48 | for c in columns 49 | } 50 | 51 | return attr.make_class( # type: ignore 52 | name, attrs=fields, bases=(Model,), auto_attribs=True, slots=True 53 | ) 54 | 55 | 56 | TModelORM = TypeVar("TModelORM", bound=ModelORM) 57 | TModel = TypeVar("TModel", bound=Model) 58 | -------------------------------------------------------------------------------- /backend/src/db/models/types.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional 2 | 3 | from sqlalchemy.orm import Mapped 4 | 5 | MF = Mapped[float] 6 | MOF = Mapped[Optional[float]] 7 | 8 | MI = Mapped[int] 9 | MOI = Mapped[Optional[int]] 10 | 11 | MB = Mapped[bool] 12 | MOB = Mapped[Optional[bool]] 13 | 14 | MS = Mapped[str] 15 | MOS = Mapped[Optional[str]] 16 | 17 | 18 | def values_callable(x: Any) -> Any: 19 | return [e.value for e in x] 20 | -------------------------------------------------------------------------------- /backend/src/db/read/__init__.py: -------------------------------------------------------------------------------- 1 | from src.db.read.etag import get_etags, get_num_etags 2 | from src.db.read.event import get_event, get_events, get_num_events 3 | from src.db.read.match import get_match, get_matches, get_num_matches 4 | from src.db.read.team import get_num_teams, get_team, get_teams 5 | from src.db.read.team_event import get_num_team_events, get_team_event, get_team_events 6 | from src.db.read.team_match import ( 7 | get_num_team_matches, 8 | get_team_match, 9 | get_team_matches, 10 | ) 11 | from src.db.read.team_year import get_num_team_years, get_team_year, get_team_years 12 | from src.db.read.year import get_num_years, get_year, get_years 13 | 14 | __all__ = [ 15 | "get_etags", 16 | "get_num_etags", 17 | "get_event", 18 | "get_events", 19 | "get_num_events", 20 | "get_match", 21 | "get_matches", 22 | "get_num_matches", 23 | "get_team_event", 24 | "get_team_events", 25 | "get_num_team_events", 26 | "get_team_match", 27 | "get_team_matches", 28 | "get_num_team_matches", 29 | "get_team_year", 30 | "get_team_years", 31 | "get_num_team_years", 32 | "get_team", 33 | "get_teams", 34 | "get_num_teams", 35 | "get_year", 36 | "get_years", 37 | "get_num_years", 38 | ] 39 | -------------------------------------------------------------------------------- /backend/src/db/read/etag.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.etag import ETag, ETagORM 8 | 9 | 10 | def get_etags(year: Optional[int] = None, path: Optional[str] = None) -> List[ETag]: 11 | def callback(session: SessionType): 12 | data = session.query(ETagORM) 13 | if year is not None: 14 | data = data.filter(ETagORM.year == year) 15 | if path is not None: 16 | data = data.filter(ETagORM.path == path) 17 | out_data: List[ETagORM] = data.all() 18 | return [ETag.from_dict(x.__dict__) for x in out_data] 19 | 20 | return run_transaction(Session, callback) # type: ignore 21 | 22 | 23 | def get_num_etags() -> int: 24 | def callback(session: SessionType) -> int: 25 | return session.query(ETagORM).count() 26 | 27 | return run_transaction(Session, callback) # type: ignore 28 | -------------------------------------------------------------------------------- /backend/src/db/read/event.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.event import Event, EventORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_event(event_id: str) -> Optional[Event]: 12 | def callback(session: SessionType): 13 | data = session.query(EventORM).filter(EventORM.key == event_id).first() 14 | if data is None: 15 | return None 16 | return Event.from_dict(data.__dict__) 17 | 18 | return run_transaction(Session, callback) # type: ignore 19 | 20 | 21 | def get_events( 22 | year: Optional[int] = None, 23 | country: Optional[str] = None, 24 | state: Optional[str] = None, 25 | district: Optional[str] = None, 26 | type: Optional[str] = None, 27 | week: Optional[int] = None, 28 | metric: Optional[str] = None, 29 | ascending: Optional[bool] = None, 30 | limit: Optional[int] = None, 31 | offset: Optional[int] = None, 32 | ) -> List[Event]: 33 | @common_filters(EventORM, Event, metric, ascending, limit, offset) 34 | def callback(session: SessionType): 35 | data = session.query(EventORM) 36 | if year is not None: 37 | data = data.filter(EventORM.year == year) 38 | if country is not None: 39 | data = data.filter(EventORM.country == country) 40 | if state is not None: 41 | data = data.filter(EventORM.state == state) 42 | if district is not None: 43 | data = data.filter(EventORM.district == district) 44 | if type is not None: 45 | data = data.filter(EventORM.type == type) 46 | if week is not None: 47 | data = data.filter(EventORM.week == week) 48 | 49 | return data 50 | 51 | return run_transaction(Session, callback) # type: ignore 52 | 53 | 54 | def get_num_events() -> int: 55 | def callback(session: SessionType) -> int: 56 | return session.query(EventORM).count() 57 | 58 | return run_transaction(Session, callback) # type: ignore 59 | -------------------------------------------------------------------------------- /backend/src/db/read/main.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List, Optional, Type, TypeVar 2 | 3 | from src.db.models.main import Model, ModelORM 4 | 5 | T = TypeVar("T") 6 | 7 | 8 | def common_filters( 9 | model_orm: Type[ModelORM], 10 | model: Type[Model], 11 | metric: Optional[str], 12 | ascending: Optional[bool], 13 | limit: Optional[int], 14 | offset: Optional[int], 15 | ) -> Any: 16 | def decorator(func: Any) -> Any: 17 | def wrapper(*args: Any, **kwargs: Any) -> Any: 18 | data = func(*args, **kwargs) 19 | 20 | if metric is not None: 21 | data = data.filter(model_orm.__dict__[metric] != None) # noqa: E711 22 | if ascending is not None and ascending: 23 | data = data.order_by(model_orm.__dict__[metric].asc()) 24 | else: 25 | data = data.order_by(model_orm.__dict__[metric].desc()) 26 | if limit is not None: 27 | data = data.limit(limit) 28 | if offset is not None: 29 | data = data.offset(offset) 30 | out_data: List[model_orm] = data.all() # type: ignore 31 | 32 | return [model.from_dict(x.__dict__) for x in out_data] 33 | 34 | return wrapper 35 | 36 | return decorator 37 | -------------------------------------------------------------------------------- /backend/src/db/read/match.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.match import Match, MatchORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_match(match: str) -> Optional[Match]: 12 | def callback(session: SessionType): 13 | data = session.query(MatchORM).filter(MatchORM.key == match).first() 14 | 15 | if data is None: 16 | return None 17 | 18 | return Match.from_dict(data.__dict__) 19 | 20 | return run_transaction(Session, callback) # type: ignore 21 | 22 | 23 | def get_matches( 24 | team: Optional[int] = None, 25 | year: Optional[int] = None, 26 | event: Optional[str] = None, 27 | week: Optional[int] = None, 28 | elim: Optional[bool] = None, 29 | metric: Optional[str] = None, 30 | ascending: Optional[bool] = None, 31 | limit: Optional[int] = None, 32 | offset: Optional[int] = None, 33 | ) -> List[Match]: 34 | @common_filters(MatchORM, Match, metric, ascending, limit, offset) 35 | def callback(session: SessionType): 36 | data = session.query(MatchORM) 37 | if team is not None: 38 | data = data.filter( 39 | (MatchORM.red_1 == team) 40 | | (MatchORM.red_2 == team) 41 | | (MatchORM.red_3 == team) 42 | | (MatchORM.blue_1 == team) 43 | | (MatchORM.blue_2 == team) 44 | | (MatchORM.blue_3 == team) 45 | ) 46 | if year is not None: 47 | data = data.filter(MatchORM.year == year) 48 | if event is not None: 49 | data = data.filter(MatchORM.event == event) 50 | if week is not None: 51 | data = data.filter(MatchORM.week == week) 52 | if elim is not None: 53 | data = data.filter(MatchORM.elim == elim) 54 | 55 | return data 56 | 57 | return run_transaction(Session, callback) # type: ignore 58 | 59 | 60 | def get_num_matches() -> int: 61 | def callback(session: SessionType) -> int: 62 | return session.query(MatchORM).count() 63 | 64 | return run_transaction(Session, callback) # type: ignore 65 | -------------------------------------------------------------------------------- /backend/src/db/read/team.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.team import Team, TeamORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_team(team: int) -> Optional[Team]: 12 | def callback(session: SessionType): 13 | out_data = session.query(TeamORM).filter(TeamORM.team == team).first() 14 | return Team.from_dict(out_data.__dict__) if out_data else None 15 | 16 | return run_transaction(Session, callback) # type: ignore 17 | 18 | 19 | def get_teams( 20 | country: Optional[str] = None, 21 | state: Optional[str] = None, 22 | district: Optional[str] = None, 23 | active: Optional[bool] = None, 24 | metric: Optional[str] = None, 25 | ascending: Optional[bool] = None, 26 | limit: Optional[int] = None, 27 | offset: Optional[int] = None, 28 | ) -> List[Team]: 29 | @common_filters(TeamORM, Team, metric, ascending, limit, offset) 30 | def callback(session: SessionType): 31 | data = session.query(TeamORM) 32 | if country is not None: 33 | data = data.filter(TeamORM.country == country) 34 | if state is not None: 35 | data = data.filter(TeamORM.state == state) 36 | if district is not None: 37 | data = data.filter(TeamORM.district == district) 38 | if active is not None: 39 | data = data.filter(TeamORM.active == active) 40 | 41 | return data 42 | 43 | return run_transaction(Session, callback) # type: ignore 44 | 45 | 46 | def get_num_teams() -> int: 47 | def callback(session: SessionType) -> int: 48 | return session.query(TeamORM).count() 49 | 50 | return run_transaction(Session, callback) # type: ignore 51 | -------------------------------------------------------------------------------- /backend/src/db/read/team_event.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.team_event import TeamEvent, TeamEventORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_team_event(team: int, event: str) -> Optional[TeamEvent]: 12 | def callback(session: SessionType): 13 | data = session.query(TeamEventORM).filter( 14 | TeamEventORM.team == team, TeamEventORM.event == event 15 | ) 16 | out_data: Optional[TeamEventORM] = data.first() 17 | if out_data is None: 18 | return None 19 | return TeamEvent.from_dict(out_data.__dict__) 20 | 21 | return run_transaction(Session, callback) # type: ignore 22 | 23 | 24 | def get_team_events( 25 | team: Optional[int] = None, 26 | year: Optional[int] = None, 27 | event: Optional[str] = None, 28 | country: Optional[str] = None, 29 | state: Optional[str] = None, 30 | district: Optional[str] = None, 31 | type: Optional[str] = None, 32 | week: Optional[int] = None, 33 | metric: Optional[str] = None, 34 | ascending: Optional[bool] = None, 35 | limit: Optional[int] = None, 36 | offset: Optional[int] = None, 37 | ) -> List[TeamEvent]: 38 | @common_filters(TeamEventORM, TeamEvent, metric, ascending, limit, offset) 39 | def callback(session: SessionType): 40 | data = session.query(TeamEventORM) 41 | if team is not None: 42 | data = data.filter(TeamEventORM.team == team) 43 | if year is not None: 44 | data = data.filter(TeamEventORM.year == year) 45 | if event is not None: 46 | data = data.filter(TeamEventORM.event == event) 47 | if country is not None: 48 | data = data.filter(TeamEventORM.country == country) 49 | if state is not None: 50 | data = data.filter(TeamEventORM.state == state) 51 | if district is not None: 52 | data = data.filter(TeamEventORM.district == district) 53 | if type is not None: 54 | data = data.filter(TeamEventORM.type == type) 55 | if week is not None: 56 | data = data.filter(TeamEventORM.week == week) 57 | 58 | return data 59 | 60 | return run_transaction(Session, callback) # type: ignore 61 | 62 | 63 | def get_num_team_events() -> int: 64 | def callback(session: SessionType) -> int: 65 | return session.query(TeamEventORM).count() 66 | 67 | return run_transaction(Session, callback) # type: ignore 68 | -------------------------------------------------------------------------------- /backend/src/db/read/team_match.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.team_match import TeamMatch, TeamMatchORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_team_match(team: int, match: str) -> Optional[TeamMatch]: 12 | def callback(session: SessionType): 13 | data = session.query(TeamMatchORM).filter( 14 | TeamMatchORM.team == team, TeamMatchORM.match == match 15 | ) 16 | out_data: Optional[TeamMatchORM] = data.first() 17 | if out_data is None: 18 | return None 19 | return TeamMatch.from_dict(out_data.__dict__) 20 | 21 | return run_transaction(Session, callback) # type: ignore 22 | 23 | 24 | def get_team_matches( 25 | team: Optional[int] = None, 26 | year: Optional[int] = None, 27 | event: Optional[str] = None, 28 | week: Optional[int] = None, 29 | match: Optional[str] = None, 30 | elim: Optional[bool] = None, 31 | metric: Optional[str] = None, 32 | ascending: Optional[bool] = None, 33 | limit: Optional[int] = None, 34 | offset: Optional[int] = None, 35 | ) -> List[TeamMatch]: 36 | @common_filters(TeamMatchORM, TeamMatch, metric, ascending, limit, offset) 37 | def callback(session: SessionType): 38 | data = session.query(TeamMatchORM) 39 | if team is not None: 40 | data = data.filter(TeamMatchORM.team == team) 41 | if year is not None: 42 | data = data.filter(TeamMatchORM.year == year) 43 | if event is not None: 44 | data = data.filter(TeamMatchORM.event == event) 45 | if week is not None: 46 | data = data.filter(TeamMatchORM.week == week) 47 | if match is not None: 48 | data = data.filter(TeamMatchORM.match == match) 49 | if elim is not None: 50 | data = data.filter(TeamMatchORM.elim == elim) 51 | 52 | return data 53 | 54 | return run_transaction(Session, callback) # type: ignore 55 | 56 | 57 | def get_num_team_matches() -> int: 58 | def callback(session: SessionType) -> int: 59 | return session.query(TeamMatchORM).count() 60 | 61 | return run_transaction(Session, callback) # type: ignore 62 | -------------------------------------------------------------------------------- /backend/src/db/read/team_year.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.team_year import TeamYear, TeamYearORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_team_year(team: int, year: int) -> Optional[TeamYear]: 12 | def callback(session: SessionType): 13 | data = session.query(TeamYearORM).filter( 14 | TeamYearORM.team == team, TeamYearORM.year == year 15 | ) 16 | out_data: Optional[TeamYearORM] = data.first() 17 | if out_data is None: 18 | return None 19 | return TeamYear.from_dict(out_data.__dict__) 20 | 21 | return run_transaction(Session, callback) # type: ignore 22 | 23 | 24 | def get_team_years( 25 | team: Optional[int] = None, 26 | teams: Optional[List[str]] = None, 27 | year: Optional[int] = None, 28 | country: Optional[str] = None, 29 | state: Optional[str] = None, 30 | district: Optional[str] = None, 31 | metric: Optional[str] = None, 32 | ascending: Optional[bool] = None, 33 | limit: Optional[int] = None, 34 | offset: Optional[int] = None, 35 | ) -> List[TeamYear]: 36 | @common_filters(TeamYearORM, TeamYear, metric, ascending, limit, offset) 37 | def callback(session: SessionType): 38 | data = session.query(TeamYearORM) 39 | if team is not None: 40 | data = data.filter(TeamYearORM.team == team) 41 | if teams is not None: 42 | data = data.filter(TeamYearORM.team.in_(teams)) 43 | if year is not None: 44 | data = data.filter(TeamYearORM.year == year) 45 | if country is not None: 46 | data = data.filter(TeamYearORM.country == country) 47 | if state is not None: 48 | data = data.filter(TeamYearORM.state == state) 49 | if district is not None: 50 | data = data.filter(TeamYearORM.district == district) 51 | 52 | return data 53 | 54 | return run_transaction(Session, callback) # type: ignore 55 | 56 | 57 | def get_num_team_years() -> int: 58 | def callback(session: SessionType) -> int: 59 | return session.query(TeamYearORM).count() 60 | 61 | return run_transaction(Session, callback) # type: ignore 62 | -------------------------------------------------------------------------------- /backend/src/db/read/year.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from sqlalchemy.orm.session import Session as SessionType 4 | from sqlalchemy_cockroachdb import run_transaction # type: ignore 5 | 6 | from src.db.main import Session 7 | from src.db.models.year import Year, YearORM 8 | from src.db.read.main import common_filters 9 | 10 | 11 | def get_year(year: int) -> Optional[Year]: 12 | def callback(session: SessionType): 13 | data = session.query(YearORM).filter(YearORM.year == year).first() 14 | if data is None: 15 | return None 16 | return Year.from_dict(data.__dict__) 17 | 18 | return run_transaction(Session, callback) # type: ignore 19 | 20 | 21 | def get_years( 22 | metric: Optional[str] = None, 23 | ascending: Optional[bool] = None, 24 | limit: Optional[int] = None, 25 | offset: Optional[int] = None, 26 | ) -> List[Year]: 27 | @common_filters(YearORM, Year, metric, ascending, limit, offset) 28 | def callback(session: SessionType): 29 | return session.query(YearORM) 30 | 31 | return run_transaction(Session, callback) # type: ignore 32 | 33 | 34 | def get_num_years() -> int: 35 | def callback(session: SessionType) -> int: 36 | return session.query(YearORM).count() 37 | 38 | return run_transaction(Session, callback) # type: ignore 39 | -------------------------------------------------------------------------------- /backend/src/db/write/main.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from src.db.models.etag import ETag, ETagORM 4 | from src.db.models.event import Event, EventORM 5 | from src.db.models.match import Match, MatchORM 6 | from src.db.models.team import Team, TeamORM 7 | from src.db.models.team_event import TeamEvent, TeamEventORM 8 | from src.db.models.team_match import TeamMatch, TeamMatchORM 9 | from src.db.models.team_year import TeamYear, TeamYearORM 10 | from src.db.models.year import Year, YearORM 11 | from src.db.write.template import update_template 12 | 13 | 14 | def update_etags(items: List[ETag], only_insert: bool = False) -> None: 15 | return update_template(ETagORM, ETag)(items, only_insert) 16 | 17 | 18 | def update_events(items: List[Event], only_insert: bool = False) -> None: 19 | return update_template(EventORM, Event)(items, only_insert) 20 | 21 | 22 | def update_matches(items: List[Match], only_insert: bool = False) -> None: 23 | return update_template(MatchORM, Match)(items, only_insert) 24 | 25 | 26 | def update_teams(items: List[Team], only_insert: bool = False) -> None: 27 | return update_template(TeamORM, Team)(items, only_insert) 28 | 29 | 30 | def update_years(items: List[Year], only_insert: bool = False) -> None: 31 | return update_template(YearORM, Year)(items, only_insert) 32 | 33 | 34 | def update_team_events(items: List[TeamEvent], only_insert: bool = False) -> None: 35 | return update_template(TeamEventORM, TeamEvent)(items, only_insert) 36 | 37 | 38 | def update_team_matches(items: List[TeamMatch], only_insert: bool = False) -> None: 39 | return update_template(TeamMatchORM, TeamMatch)(items, only_insert) 40 | 41 | 42 | def update_team_years(items: List[TeamYear], only_insert: bool = False) -> None: 43 | return update_template(TeamYearORM, TeamYear)(items, only_insert) 44 | -------------------------------------------------------------------------------- /backend/src/models/epa/constants.py: -------------------------------------------------------------------------------- 1 | NORM_MEAN = 1500 2 | NORM_SD = 250 3 | INIT_PENALTY = 0.2 4 | 5 | YEAR_ONE_WEIGHT = 0.7 6 | MEAN_REVERSION = 0.4 7 | 8 | ELIM_WEIGHT = 1 / 3 9 | -------------------------------------------------------------------------------- /backend/src/models/epa/init.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | from typing import Optional, Tuple 3 | 4 | from src.constants import EPS 5 | from src.db.models import TeamYear, Year 6 | from src.models.epa.constants import ( 7 | INIT_PENALTY, 8 | MEAN_REVERSION, 9 | NORM_MEAN, 10 | NORM_SD, 11 | YEAR_ONE_WEIGHT, 12 | ) 13 | from src.models.epa.math import SkewNormal, inv_unit_sigmoid 14 | 15 | 16 | @lru_cache(maxsize=None) 17 | def get_constants(year: Year) -> Tuple[int, float, float]: 18 | num_teams = 2 if year.year <= 2004 else 3 19 | curr_mean = year.no_foul_mean or year.score_mean or 0 20 | curr_sd = year.score_sd or 0 21 | 22 | return num_teams, curr_mean, curr_sd 23 | 24 | 25 | def norm_epa_to_next_season_epa( 26 | norm_epa: float, curr_mean: float, curr_sd: float, curr_num_teams: int 27 | ) -> float: 28 | return max( 29 | curr_mean / curr_num_teams + curr_sd * (norm_epa - NORM_MEAN) / NORM_SD, 0 30 | ) 31 | 32 | 33 | def get_init_epa( 34 | year: Year, team_year_1: Optional[TeamYear], team_year_2: Optional[TeamYear] 35 | ) -> SkewNormal: 36 | num_teams, year_mean, year_sd = get_constants(year) 37 | 38 | INIT_EPA = NORM_MEAN - INIT_PENALTY * NORM_SD 39 | norm_epa_1 = norm_epa_2 = INIT_EPA 40 | if team_year_1 is not None and team_year_1.norm_epa is not None: 41 | norm_epa_1 = team_year_1.norm_epa 42 | if team_year_2 is not None and team_year_2.norm_epa is not None: 43 | norm_epa_2 = team_year_2.norm_epa 44 | 45 | prev_norm_epa = YEAR_ONE_WEIGHT * norm_epa_1 + (1 - YEAR_ONE_WEIGHT) * norm_epa_2 46 | curr_norm_epa = (1 - MEAN_REVERSION) * prev_norm_epa + MEAN_REVERSION * INIT_EPA 47 | 48 | curr_epa_z_score = (curr_norm_epa - NORM_MEAN) / NORM_SD 49 | 50 | # enforces starting EPA >= 0 51 | curr_epa_z_score = max(-year_mean / num_teams / year_sd, curr_epa_z_score) 52 | 53 | mean = year.get_mean_components() 54 | sd_frac = (year_sd or 0) / (year_mean or 1) 55 | sd = mean * sd_frac 56 | 57 | if year.year >= 2016: 58 | # For ranking points, take inv sigmoid since later we will apply sigmoid 59 | mean[4] = max(-1, inv_unit_sigmoid(max(EPS, min(1 - EPS, mean[4])))) 60 | mean[5] = max(-1, inv_unit_sigmoid(max(EPS, min(1 - EPS, mean[5])))) 61 | mean[6] = max(-1, inv_unit_sigmoid(max(EPS, min(1 - EPS, mean[6])))) 62 | 63 | curr_epa_mean = mean / num_teams + sd * curr_epa_z_score 64 | curr_epa_sd = sd / num_teams 65 | 66 | return SkewNormal(curr_epa_mean, curr_epa_sd, 0) 67 | -------------------------------------------------------------------------------- /backend/src/models/epa/unitless.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | 3 | from bisect import bisect_left 4 | from typing import Callable, List 5 | 6 | from scipy.stats import expon, exponnorm 7 | 8 | from src.models.epa.constants import NORM_MEAN, NORM_SD 9 | 10 | 11 | def epa_to_unitless_epa(epa: float, mean: float, sd: float) -> float: 12 | return NORM_MEAN + NORM_SD * (epa - mean / 3) / sd 13 | 14 | 15 | # For converting EPA to Norm EPA 16 | distrib = exponnorm(1.6, -0.3, 0.2) 17 | 18 | 19 | def get_epa_to_norm_epa_func(year_epas: List[float]) -> Callable[[float], float]: 20 | desc_sorted_epas = sorted(year_epas, reverse=True) 21 | total_N, cutoff_N = len(desc_sorted_epas), int(len(desc_sorted_epas) / 10) 22 | exponnorm_disrib = expon_distrib = None 23 | if total_N > 0: 24 | exponnorm_disrib = exponnorm(*exponnorm.fit(desc_sorted_epas)) 25 | if cutoff_N > 0: 26 | expon_distrib = expon(*expon.fit(desc_sorted_epas[:cutoff_N])) 27 | 28 | sorted_epas = desc_sorted_epas[::-1] 29 | 30 | def _get_norm_epa(epa: float) -> float: 31 | i = total_N - bisect_left(sorted_epas, epa) 32 | exponnorm_value: float = exponnorm_disrib.cdf(epa) 33 | percentile = exponnorm_value 34 | if i < cutoff_N: 35 | expon_value: float = expon_distrib.cdf(epa) 36 | expon_value = 1 - cutoff_N / total_N * (1 - expon_value) 37 | # Linearly interpolate between the two distributions from 10% to 5% 38 | expon_frac = min(1, 2 * (cutoff_N - i) / cutoff_N) 39 | percentile = expon_frac * expon_value + (1 - expon_frac) * exponnorm_value 40 | out: float = distrib.ppf(percentile) 41 | return NORM_MEAN + NORM_SD * out 42 | 43 | # get quantiles of year_epas, and linearly interpolate between norm_epas 44 | quantiles = [sorted_epas[((total_N - 1) * i) // 100] for i in range(101)] 45 | quantile_norm_epas = [_get_norm_epa(epa) for epa in quantiles] 46 | 47 | def get_norm_epa(epa: float) -> float: 48 | i = bisect_left(quantiles, epa) 49 | if i == 0: 50 | return quantile_norm_epas[0] 51 | if i == 101: 52 | return quantile_norm_epas[100] 53 | 54 | x0 = quantiles[i - 1] 55 | x1 = quantiles[i] 56 | y0 = quantile_norm_epas[i - 1] 57 | y1 = quantile_norm_epas[i] 58 | 59 | return y0 + (y1 - y0) * (epa - x0) / (x1 - x0) 60 | 61 | return get_norm_epa 62 | -------------------------------------------------------------------------------- /backend/src/models/types.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional 2 | 3 | 4 | class AlliancePred: 5 | score: float 6 | breakdown: Any 7 | rp_1: Optional[float] 8 | rp_2: Optional[float] 9 | rp_3: Optional[float] 10 | 11 | def __init__( 12 | self, 13 | score: float, 14 | breakdown: Any, 15 | rp_1: Optional[float] = None, 16 | rp_2: Optional[float] = None, 17 | rp_3: Optional[float] = None, 18 | ): 19 | self.score = score 20 | self.breakdown = breakdown 21 | self.rp_1 = rp_1 22 | self.rp_2 = rp_2 23 | self.rp_3 = rp_3 24 | 25 | def __repr__(self): 26 | return f"Pred({self.score} {self.rp_1} {self.rp_2} {self.rp_3})" 27 | 28 | 29 | class MatchPred: 30 | win_prob: float 31 | red_score: float 32 | blue_score: float 33 | red_rp_1: Optional[float] 34 | blue_rp_1: Optional[float] 35 | red_rp_2: Optional[float] 36 | blue_rp_2: Optional[float] 37 | red_rp_3: Optional[float] 38 | blue_rp_3: Optional[float] 39 | 40 | def __init__( 41 | self, 42 | win_prob: float, 43 | red_pred: AlliancePred, 44 | blue_pred: AlliancePred, 45 | ): 46 | self.win_prob = win_prob 47 | self.red_score = red_pred.score 48 | self.blue_score = blue_pred.score 49 | self.red_rp_1 = red_pred.rp_1 50 | self.blue_rp_1 = blue_pred.rp_1 51 | self.red_rp_2 = red_pred.rp_2 52 | self.blue_rp_2 = blue_pred.rp_2 53 | self.red_rp_3 = red_pred.rp_3 54 | self.blue_rp_3 = blue_pred.rp_3 55 | 56 | 57 | class Attribution: 58 | epa: Any 59 | 60 | def __init__(self, epa: Optional[Any] = None): 61 | self.epa = epa 62 | 63 | def __repr__(self): 64 | return f"Attribution({self.epa})" 65 | -------------------------------------------------------------------------------- /backend/src/site/helper.py: -------------------------------------------------------------------------------- 1 | import io 2 | import json 3 | import zlib 4 | from typing import Any 5 | 6 | from fastapi.responses import StreamingResponse 7 | 8 | 9 | # TODO: implement load testing and find optimal compression that doesn't make 10 | # the backend compute constrainted (ran into problems during 2024 season) 11 | def compress(x: Any) -> StreamingResponse: 12 | return StreamingResponse( 13 | io.BytesIO(zlib.compress(json.dumps(x).encode(), level=1)), 14 | media_type="application/octet-stream", 15 | ) 16 | -------------------------------------------------------------------------------- /backend/src/site/router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from src.site.event import router as event_router 4 | from src.site.match import router as match_router 5 | from src.site.team import router as team_router 6 | from src.site.team_year import router as team_year_router 7 | 8 | router = APIRouter() 9 | router.include_router(match_router, tags=["match"]) 10 | router.include_router(event_router, tags=["event"]) 11 | router.include_router(team_year_router, tags=["team_year"]) 12 | router.include_router(team_router, tags=["team"]) 13 | 14 | 15 | @router.get("/") 16 | async def read_root(): 17 | return {"name": "Site V3 Router"} 18 | -------------------------------------------------------------------------------- /backend/src/site/team_year.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional 2 | 3 | from fastapi import APIRouter 4 | from fastapi.responses import Response 5 | 6 | from src.api import get_team_matches_cached, get_team_years_cached, get_year_cached 7 | from src.constants import CURR_YEAR 8 | from src.db.models import TeamMatch, TeamYear, Year 9 | 10 | # from src.site.helper import compress 11 | from src.utils.decorators import ( 12 | async_fail_gracefully_plural, 13 | async_fail_gracefully_singular, 14 | ) 15 | 16 | router = APIRouter() 17 | 18 | 19 | def _read_team_years( 20 | year: int, year_obj: Year, team_years: List[TeamYear] 21 | ) -> Dict[str, Any]: 22 | team_years = [x for x in team_years if x.count > 0 or year >= CURR_YEAR] 23 | 24 | return { 25 | "team_years": [x.to_dict() for x in team_years], 26 | "year": year_obj.to_dict(), 27 | } 28 | 29 | 30 | @router.get("/team_years/{year}") 31 | @async_fail_gracefully_singular 32 | async def read_team_years( 33 | response: Response, 34 | year: int, 35 | limit: Optional[int] = None, 36 | metric: Optional[str] = None, 37 | no_cache: bool = False, 38 | ) -> Any: 39 | year_obj: Optional[Year] = await get_year_cached(year=year, no_cache=no_cache) 40 | if year_obj is None: 41 | raise Exception("Year not found") 42 | 43 | team_years: List[TeamYear] = await get_team_years_cached( 44 | year=year, limit=limit, metric=metric, site=True, no_cache=no_cache 45 | ) 46 | 47 | return _read_team_years(year, year_obj, team_years) 48 | 49 | 50 | @router.get("/team_year/{year}/{team}/matches") 51 | @async_fail_gracefully_plural 52 | async def read_team_matches( 53 | response: Response, year: int, team: int, no_cache: bool = False 54 | ) -> Any: 55 | team_matches: List[TeamMatch] = await get_team_matches_cached( 56 | team=team, year=year, no_cache=no_cache 57 | ) 58 | 59 | team_matches = sorted(team_matches, key=lambda x: x.time) 60 | 61 | out = [x.to_dict() for x in team_matches] 62 | 63 | return out 64 | -------------------------------------------------------------------------------- /backend/src/tba/clean_data.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from src.tba.constants import CANADA_MAPPING, DISTRICT_MAPPING, USA_MAPPING 4 | from src.types.enums import CompLevel 5 | 6 | 7 | def clean_state(state: str) -> Optional[str]: 8 | if state in USA_MAPPING: 9 | return USA_MAPPING[state] 10 | if state in CANADA_MAPPING: 11 | return CANADA_MAPPING[state] 12 | if state in USA_MAPPING.values(): 13 | return state 14 | if state in CANADA_MAPPING.values(): 15 | return state 16 | return None 17 | 18 | 19 | def clean_district(district: Optional[str]) -> Optional[str]: 20 | if district in DISTRICT_MAPPING: 21 | return DISTRICT_MAPPING[district] 22 | return district 23 | 24 | 25 | def get_match_time( 26 | comp_level: CompLevel, set_number: int, match_number: int, event_time: int 27 | ) -> int: 28 | match_time = event_time # start value 29 | if comp_level == CompLevel.QUAL: 30 | match_time += match_number 31 | elif comp_level == CompLevel.EIGHTH: 32 | match_time += 200 + 10 * set_number + match_number 33 | elif comp_level == CompLevel.QUARTER: 34 | match_time += 300 + 10 * set_number + match_number 35 | elif comp_level == CompLevel.SEMI: 36 | match_time += 400 + 10 * set_number + match_number 37 | elif comp_level == CompLevel.FINAL: 38 | match_time += 500 + match_number 39 | else: 40 | raise ValueError("Invalid comp_level: " + comp_level) 41 | return match_time 42 | -------------------------------------------------------------------------------- /backend/src/tba/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Any, Optional, Tuple, Union 3 | 4 | from requests import Session 5 | 6 | from src.tba.constants import AUTH_KEY 7 | from src.tba.utils import dump_cache, load_cache 8 | 9 | read_prefix = "https://www.thebluealliance.com/api/v3/" 10 | 11 | session = Session() 12 | session.headers.update({"X-TBA-Auth-Key": AUTH_KEY, "X-TBA-Auth-Id": ""}) 13 | 14 | 15 | def _get_tba( 16 | url: str, etag: Optional[str] = None 17 | ) -> Tuple[Union[Any, bool], Optional[str]]: 18 | if etag is not None: 19 | session.headers.update({"If-None-Match": etag}) 20 | response = session.get(read_prefix + url) 21 | if response.status_code == 304: 22 | return True, etag 23 | elif response.status_code == 200: 24 | return response.json(), response.headers.get("ETag") 25 | else: 26 | response = session.get(read_prefix + url) 27 | if response.status_code == 200: 28 | return response.json(), response.headers.get("ETag") 29 | return False, None 30 | 31 | 32 | def get_tba( 33 | url: str, etag: Optional[str] = None, cache: bool = True 34 | ) -> Tuple[Union[Any, bool], Optional[str]]: 35 | if cache and os.path.exists("cache/" + url + "/data.p"): 36 | # Cache Hit 37 | return load_cache("cache/" + url), None 38 | 39 | data, new_etag = _get_tba(url, etag) 40 | 41 | # Either Etag or Invalid 42 | if type(data) is bool: 43 | return data, new_etag 44 | 45 | # Cache Miss 46 | dump_cache("cache/" + url, data) 47 | return data, new_etag 48 | -------------------------------------------------------------------------------- /backend/src/tba/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | 6 | def dump(path: str, data: Any): 7 | try: 8 | if not os.path.exists(path): 9 | os.makedirs(path) 10 | with open(path, "wb") as f: 11 | pickle.dump(data, f) 12 | except OSError: 13 | pass 14 | 15 | 16 | def load(file: str): 17 | with open(file, "rb") as f: 18 | return pickle.load(f) 19 | 20 | 21 | def dump_cache(path: str, data: Any): 22 | try: 23 | if not os.path.exists(path): 24 | os.makedirs(path) 25 | with open(path + "/data.p", "wb") as f: 26 | pickle.dump(data, f) 27 | except OSError: 28 | pass 29 | 30 | 31 | def load_cache(file: str): 32 | with open(file + "/data.p", "rb") as f: 33 | return pickle.load(f) 34 | -------------------------------------------------------------------------------- /backend/src/types/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class MatchWinner(str, Enum): 5 | RED = "red" 6 | BLUE = "blue" 7 | TIE = "tie" 8 | 9 | 10 | class MatchStatus(str, Enum): 11 | UPCOMING = "Upcoming" 12 | COMPLETED = "Completed" 13 | 14 | 15 | class CompLevel(str, Enum): 16 | INVALID = "invalid" 17 | QUAL = "qm" 18 | EIGHTH = "ef" 19 | QUARTER = "qf" 20 | SEMI = "sf" 21 | FINAL = "f" 22 | 23 | 24 | class EventStatus(str, Enum): 25 | INVALID = "Invalid" 26 | UPCOMING = "Upcoming" 27 | ONGOING = "Ongoing" 28 | COMPLETED = "Completed" 29 | 30 | 31 | class EventType(str, Enum): 32 | INVALID = "invalid" 33 | REGIONAL = "regional" 34 | DISTRICT = "district" 35 | DISTRICT_CMP = "district_cmp" 36 | CHAMPS_DIV = "champs_div" 37 | EINSTEIN = "einstein" 38 | 39 | def is_champs(self: "EventType") -> bool: 40 | return self in (EventType.CHAMPS_DIV, EventType.EINSTEIN) 41 | -------------------------------------------------------------------------------- /backend/src/utils/alru_cache.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from functools import wraps 3 | from typing import ( 4 | Any, 5 | Awaitable, 6 | Callable, 7 | Dict, 8 | FrozenSet, 9 | List, 10 | ParamSpec, 11 | Tuple, 12 | TypeVar, 13 | ) 14 | 15 | Param = ParamSpec("Param") 16 | TOutput = TypeVar("TOutput") 17 | 18 | TKey = Tuple[Tuple[Any, ...], FrozenSet[Tuple[str, Any]]] 19 | 20 | 21 | def alru_cache(max_size: int = 8, ttl: timedelta = timedelta(minutes=1)): 22 | def decorator( 23 | func: Callable[Param, Awaitable[Tuple[bool, TOutput]]] 24 | ) -> Callable[Param, Awaitable[TOutput]]: 25 | cache: Dict[TKey, Tuple[datetime, TOutput]] = {} 26 | keys: List[TKey] = [] 27 | 28 | def in_cache(key: TKey) -> bool: 29 | # key not in cache 30 | if key not in cache: 31 | return False 32 | 33 | # key in cache but expired 34 | if datetime.now() - cache[key][0] > ttl: 35 | return False 36 | 37 | # key in cache and not expired 38 | return True 39 | 40 | def update_cache_and_return(key: TKey, flag: bool, value: TOutput) -> TOutput: 41 | # if flag = False, do not update cache and return value 42 | if not flag: 43 | return value 44 | 45 | # if flag = True, update cache 46 | now = datetime.now() 47 | cache[key] = (now, value) 48 | keys.append(key) 49 | 50 | # remove oldest key if cache is full 51 | if len(keys) > max_size: 52 | try: 53 | # Should not raise KeyError, but just in case 54 | del cache[keys.pop(0)] 55 | except KeyError: 56 | # Already deleted by another thread 57 | pass 58 | 59 | # return value from cache 60 | return value # equal to cache[key][1] 61 | 62 | @wraps(func) 63 | async def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> TOutput: 64 | key: TKey = tuple(args), frozenset( 65 | [(k, v) for k, v in kwargs.items() if k not in ["no_cache"]] 66 | ) 67 | if "no_cache" in kwargs and kwargs["no_cache"]: 68 | (flag, value) = await func(*args, **kwargs) 69 | return update_cache_and_return(key, flag, value) 70 | 71 | if in_cache(key): 72 | return cache[key][1] 73 | 74 | (flag, value) = await func(*args, **kwargs) 75 | return update_cache_and_return(key, flag, value) 76 | 77 | return wrapper 78 | 79 | return decorator 80 | -------------------------------------------------------------------------------- /backend/src/utils/decorators.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import wraps 3 | from typing import Any, Callable, Dict, List 4 | 5 | from fastapi import Response, status 6 | 7 | 8 | def async_fail_gracefully_singular(func: Callable[..., Any]): 9 | @wraps(func) # needed to play nice with FastAPI decorator 10 | async def wrapper( 11 | response: Response, *args: List[Any], **kwargs: Dict[str, Any] 12 | ) -> Any: 13 | try: 14 | return await func(response, *args, **kwargs) 15 | except Exception as e: 16 | logging.exception(e) 17 | response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR 18 | return {} 19 | 20 | return wrapper 21 | 22 | 23 | def async_fail_gracefully_plural(func: Callable[..., Any]): 24 | @wraps(func) # needed to play nice with FastAPI decorator 25 | async def wrapper( 26 | response: Response, *args: List[Any], **kwargs: Dict[str, Any] 27 | ) -> Any: 28 | try: 29 | return await func(response, *args, **kwargs) 30 | except Exception as e: 31 | logging.exception(e) 32 | response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR 33 | return [] 34 | 35 | return wrapper 36 | -------------------------------------------------------------------------------- /backend/src/utils/hypothetical.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List, Tuple 2 | 3 | import requests 4 | 5 | chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_" 6 | 7 | 8 | def compress(year: int, teams: List[int], match: int) -> str: 9 | year_binary = bin(year - 2000)[2:] 10 | match_binary = bin(match)[2:] 11 | prefix = year_binary.rjust(6, "0") + match_binary.rjust(7, "0") 12 | 13 | teams = sorted(teams) 14 | teams_binary = [int(bin(team)[2:]) for team in teams] 15 | lengths = [len(str(team)) for team in teams_binary] 16 | length_counts = [0 for _ in range(20)] 17 | for length in lengths: 18 | length_counts[length] += 1 19 | pos_lengths_binary = "".join(["1" if x > 0 else "0" for x in length_counts]) 20 | lengths_binary = "".join([bin(x)[2:].rjust(7, "0") for x in length_counts if x > 0]) 21 | 22 | binary_string = ( 23 | prefix 24 | + pos_lengths_binary 25 | + lengths_binary 26 | + "".join([str(team) for team in teams_binary]) 27 | ) 28 | 29 | binary_string += "0" * (6 - len(binary_string) % 6) 30 | 31 | string = "" 32 | for i in range(0, len(binary_string), 6): 33 | string += chars[int(binary_string[i : i + 6], 2)] 34 | 35 | return string 36 | 37 | 38 | def decompress(string: str) -> Tuple[int, List[int], int]: 39 | binary_string = "" 40 | for char in string: 41 | binary_string += bin(chars.index(char))[2:].rjust(6, "0") 42 | 43 | prefix = binary_string[:13] 44 | year = int(prefix[:6], 2) + 2000 45 | match = int(prefix[6:], 2) 46 | 47 | pos_lengths_binary = binary_string[13:33] 48 | lengths_binary = binary_string[33 : 33 + 7 * pos_lengths_binary.count("1")] 49 | teams_binary = binary_string[33 + 7 * pos_lengths_binary.count("1") :] 50 | 51 | lengths: List[int] = [] 52 | for i in range(len(pos_lengths_binary)): 53 | if pos_lengths_binary[i] == "1": 54 | lengths.append(int(lengths_binary[:7], 2)) 55 | lengths_binary = lengths_binary[7:] 56 | else: 57 | lengths.append(0) 58 | 59 | teams: List[int] = [] 60 | for i in range(len(lengths)): 61 | for _ in range(lengths[i]): 62 | teams.append(int(teams_binary[:i], 2)) 63 | teams_binary = teams_binary[i:] 64 | 65 | return year, teams, match 66 | 67 | 68 | def get_cheesy_schedule(num_teams: int, matches_per_team: int) -> List[Any]: 69 | data = requests.get( 70 | f"https://raw.githubusercontent.com/Team254/cheesy-arena/main/schedules/{num_teams}_{matches_per_team}.csv" 71 | ) 72 | lines = data.text.split("\n") 73 | lines = [[int(x) for x in line.split(",")[::2]] for line in lines[:-1]] 74 | return lines 75 | -------------------------------------------------------------------------------- /backend/src/utils/utils.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | 4 | def get_team_year_key(team: int, year: int) -> str: 5 | return str(team) + "_" + str(year) 6 | 7 | 8 | def get_team_event_key(team: int, event: str) -> str: 9 | return str(team) + "_" + event 10 | 11 | 12 | def get_team_match_key(team: int, match: str) -> str: 13 | return str(team) + "_" + match 14 | 15 | 16 | def get_match_name(key: str) -> str: 17 | if "_" in key: 18 | key = key.split("_")[1] 19 | 20 | if "qm" in key: 21 | return "Qual " + key.split("qm")[1] 22 | elif "ef" in key: 23 | set_num = key.split("ef")[1].split("m")[0] 24 | match_num = key.split("ef")[1].split("m")[1] 25 | return "Eighths " + set_num + " Match " + match_num 26 | elif "qf" in key: 27 | set_num = key.split("qf")[1].split("m")[0] 28 | match_num = key.split("qf")[1].split("m")[1] 29 | return "Quarters " + set_num + " Match " + match_num 30 | elif "sf" in key: 31 | set_num = key.split("sf")[1].split("m")[0] 32 | match_num = key.split("sf")[1].split("m")[1] 33 | return "Semis " + set_num + " Match " + match_num 34 | elif "f" in key: 35 | return "Finals Match " + key.split("f")[1].split("m")[1] 36 | 37 | raise Exception("Invalid match key") 38 | 39 | 40 | def get_match_number(key: str) -> int: 41 | if "_" in key: 42 | key = key.split("_")[1] 43 | 44 | if "qm" in key: 45 | return int(key.split("qm")[1]) 46 | elif "qf" in key: 47 | set_num = key.split("qf")[1].split("m")[0] 48 | match_num = key.split("qf")[1].split("m")[1] 49 | return 100 + 10 * int(set_num) + int(match_num) 50 | elif "sf" in key: 51 | set_num = key.split("sf")[1].split("m")[0] 52 | match_num = key.split("sf")[1].split("m")[1] 53 | return 200 + 10 * int(set_num) + int(match_num) 54 | elif "f" in key: 55 | return 300 + int(key.split("f")[1].split("m")[1]) 56 | 57 | raise Exception("Invalid match key") 58 | 59 | 60 | def r(x: float, n: int = 0) -> float: 61 | return int(x * (10**n) + 0.5) / (10**n) 62 | 63 | 64 | def is_uuid(s: str) -> bool: 65 | try: 66 | uuid.UUID(s) 67 | return True 68 | except ValueError: 69 | return False 70 | -------------------------------------------------------------------------------- /frontend/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | .pnpm-debug.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | 38 | # tailwind 39 | src/app/dist.css 40 | -------------------------------------------------------------------------------- /frontend/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules\\typescript\\lib", 3 | "typescript.enablePromptUseWorkspaceTsdk": true 4 | } 5 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # Statbotics Frontend 2 | 3 | The Statbotics frontend is a NextJS 13 (Beta, experimental `app/`) project that displays EPA data and allows users to explore and compare teams, events, and matches. The tech stack also includes TypeScript, Tailwind CSS, Nivo, and React Table. The frontend is deployed on Vercel. 4 | 5 | ## Setup 6 | 7 | Requires Node 18+. 8 | 9 | 1. Install dependencies: `yarn install` 10 | 2. Run the development server: `yarn dev` 11 | 3. Open the app at `localhost:3000` 12 | 4. Build the app: `yarn build` 13 | 5. Run the production server: `yarn start` 14 | 15 | ## Deployment 16 | 17 | The frontend is deployed on Vercel. Vercel automatically deploys the frontend upon pushing to the `master` branch. 18 | 19 | ## Structure 20 | 21 | The frontend is structured as follows: 22 | 23 | - `app/`: The NextJS app pages. 24 | - `(docs)`: The documentation pages, including the blog and API docs. 25 | - `(site)`: The main pages, including the team, event, and match pages. 26 | - `assets/`: Static assets (not used in production). 27 | - `components/`: React components including Figures, Tables, Filters, and more. 28 | - `docs/`: Informal documentation for the frontend (not used in production). 29 | - `public/`: Static assets used in production. 30 | 31 | ## Contributing 32 | 33 | Contributions are welcome! Please open an issue or pull request. 34 | -------------------------------------------------------------------------------- /frontend/assets/code.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/assets/code.PNG -------------------------------------------------------------------------------- /frontend/assets/data.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/assets/data.PNG -------------------------------------------------------------------------------- /frontend/assets/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/assets/logo512.png -------------------------------------------------------------------------------- /frontend/assets/logo_border.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/assets/logo_border.png -------------------------------------------------------------------------------- /frontend/assets/website.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/assets/website.PNG -------------------------------------------------------------------------------- /frontend/docs/bugs.MD: -------------------------------------------------------------------------------- 1 | ## Random Bugs (For posterity) 2 | 3 | - ~~NextJS 13 is Beta, 13.07 causes some issues with the build process and turbo but 13.06 works fine. 13.1 is out and provides better support for TailwindCSS but has a bug and fails in production. 13.1.1.canary fixes this. Simultaneously, react-table 8.7.4 has a bug in production regarding undefined variable. Downgrading to 8.7.0 fixes this with 13.0.6, but causes an issue (only in development) with NextJS 13.1.1.canary where there is a duplicate variable in the minified code. 13.1.1.canary with 8.7.4 does not resolve the react-table bug. Currently the best solution is to use NextJS 13.0.6 and react-table 8.7.0. Ideally we would use the latest versions of both.~~ (Updated to NextJS 13.2.1 and react-table 8.7.9) 4 | - NextJS fetch() has an issue above 15kb for NodeJS 16. Upgrading to NodeJS 18 changes the backend to undici which does not have this limitation. However, there is a Windows bug with localhost and we must switch to 127.0.0.1. If this continues to be an issue, can always use axios instead. 5 | - ~~Back button in dev mode does not work, instead causing the website to be downloaded as a file. This does not seem to occur in production, so we can likely ignore this.~~ (Resolved by NextJS 13.2.1) 6 | - Switched from Turbopack to Webpack to facilitate wasm-pack. Ended up not using wasm-pack, but Webpack handles TailwindCSS better right now so we will keep it. If we can later update to NextJS 13.1 or higher, we can switch back to Turbopack. (Update: Turbopack still fails with NextJS 13.2.1) 7 | - ~~Does not scroll to top of page after navigating to a new page. Relatively new NextJS issue, tracking here: https://github.com/vercel/next.js/issues/42492~~ (Resolved by NextJS 13.2.1) 8 | - Markdown LaTeX renders slowly in development mode, causing a hydration error. This is not a problem in production and does not interfere with local testing, so no workaround is implemented. 9 | - React-csv 2.2.0 fails in production (downloads entire website HTML). Downgrading to 2.0.3 solves this issue. 10 | -------------------------------------------------------------------------------- /frontend/next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | reactStrictMode: false, 4 | images: { 5 | domains: [ 6 | // imgur 7 | "i.imgur.com", 8 | // instagram, through TBA 9 | "www.thebluealliance.com", 10 | ], 11 | }, 12 | env: { 13 | PROD: process.env.PROD || "false", 14 | }, 15 | }; 16 | 17 | module.exports = nextConfig; 18 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "NODE_OPTIONS=--max_old_space_size=3072 next dev", 7 | "dev-windows": "set NODE_OPTIONS=--max_old_space_size=3072 && next dev", 8 | "build": "next build", 9 | "start": "next start", 10 | "lint": "next lint", 11 | "prettier": "prettier --write src" 12 | }, 13 | "dependencies": { 14 | "@nivo/bar": "^0.80.0", 15 | "@nivo/core": "^0.80.0", 16 | "@nivo/line": "^0.80.0", 17 | "@nivo/pie": "^0.80.0", 18 | "@nivo/scatterplot": "^0.80.0", 19 | "@nivo/tooltip": "^0.80.0", 20 | "@tanstack/react-table": "^8.7.9", 21 | "@types/node": "18.11.17", 22 | "@types/react": "18.0.26", 23 | "@types/react-dom": "18.0.9", 24 | "daisyui": "^2.46.1", 25 | "gaussian": "^1.3.0", 26 | "highcharts": "^10.3.2", 27 | "highcharts-more": "^0.1.7", 28 | "highcharts-react-official": "^3.1.0", 29 | "idb-keyval": "^6.2.1", 30 | "next": "^13.5.5", 31 | "pako": "^2.1.0", 32 | "prop-types": "^15.8.1", 33 | "react": "^18.2.0", 34 | "react-csv": "2.0.3", 35 | "react-debounce-input": "^3.3.0", 36 | "react-dom": "^18.2.0", 37 | "react-icons": "^4.7.1", 38 | "react-markdown": "^8.0.4", 39 | "react-range": "^1.8.14", 40 | "react-select": "^5.7.0", 41 | "react-windowed-select": "^5.1.0", 42 | "regression": "^2.0.1", 43 | "rehype-katex": "^6.0.2", 44 | "remark-math": "^5.1.1", 45 | "sharp": "^0.31.3", 46 | "typescript": "4.9.4" 47 | }, 48 | "devDependencies": { 49 | "@tailwindcss/typography": "^0.5.8", 50 | "@trivago/prettier-plugin-sort-imports": "^4.0.0", 51 | "@types/pako": "^2.0.3", 52 | "autoprefixer": "^10.4.13", 53 | "eslint": "8.30.0", 54 | "eslint-config-next": "13.0.6", 55 | "postcss": "^8.4.20", 56 | "prettier": "^2.8.1", 57 | "tailwindcss": "^3.2.4" 58 | }, 59 | "prettier": { 60 | "plugins": [ 61 | "@trivago/prettier-plugin-sort-imports" 62 | ], 63 | "printWidth": 100, 64 | "tabWidth": 2, 65 | "useTabs": false, 66 | "semi": true, 67 | "importOrder": [ 68 | "^react", 69 | "^next", 70 | "^@", 71 | "^#", 72 | "^[./]" 73 | ], 74 | "importOrderSeparation": true, 75 | "importOrderSortSpecifiers": true 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /frontend/public/2022carv_sos.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/2022carv_sos.png -------------------------------------------------------------------------------- /frontend/public/2022hop_sos.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/2022hop_sos.png -------------------------------------------------------------------------------- /frontend/public/2022tur_sos.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/2022tur_sos.png -------------------------------------------------------------------------------- /frontend/public/bubble.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/bubble.png -------------------------------------------------------------------------------- /frontend/public/circ_favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/circ_favicon.ico -------------------------------------------------------------------------------- /frontend/public/exponnorm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/exponnorm.png -------------------------------------------------------------------------------- /frontend/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/favicon.ico -------------------------------------------------------------------------------- /frontend/public/match.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/match.png -------------------------------------------------------------------------------- /frontend/public/og_spline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/og_spline.png -------------------------------------------------------------------------------- /frontend/public/spline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/spline.png -------------------------------------------------------------------------------- /frontend/public/tba.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/tba.png -------------------------------------------------------------------------------- /frontend/public/team.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/team.png -------------------------------------------------------------------------------- /frontend/public/thrifty.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/thrifty.png -------------------------------------------------------------------------------- /frontend/public/wcp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avgupta456/statbotics/bfcd4b18171b2ab0a32a5e545966a66412bd424f/frontend/public/wcp.png -------------------------------------------------------------------------------- /frontend/src/api/event.tsx: -------------------------------------------------------------------------------- 1 | import { APITeamMatch } from "../types/api"; 2 | import { EventData } from "../types/data"; 3 | import query, { version } from "./storage"; 4 | 5 | export async function getEvent(event: string): Promise { 6 | const urlSuffix = `/event/${event}`; 7 | const storageKey = `event_${event}_${version}`; 8 | 9 | return query(storageKey, urlSuffix, true, 0, 60); // 1 minute 10 | } 11 | 12 | export async function getTeamEventTeamMatches( 13 | team: number, 14 | event: string 15 | ): Promise { 16 | const urlSuffix = `/event/${event}/team_matches/${team}`; 17 | const storageKey = `event_${event}_team_matches_${team}_${version}`; 18 | 19 | return query(storageKey, urlSuffix, false, 0, 60); // 1 minute 20 | } 21 | -------------------------------------------------------------------------------- /frontend/src/api/events.tsx: -------------------------------------------------------------------------------- 1 | import { CURR_YEAR } from "../constants"; 2 | import { APIEvent, APIYear } from "../types/api"; 3 | import query, { version } from "./storage"; 4 | 5 | // eslint-disable-next-line import/prefer-default-export 6 | export async function getYearEvents(year: number): Promise<{ year: APIYear; events: APIEvent[] }> { 7 | const urlSuffix = `/events/${year}`; 8 | const storageKey = `events_${year}_${version}`; 9 | return query(storageKey, urlSuffix, year === CURR_YEAR, 0, year === CURR_YEAR ? 60 : 60 * 60); // 1 minute / 1 hour 10 | } 11 | -------------------------------------------------------------------------------- /frontend/src/api/header.tsx: -------------------------------------------------------------------------------- 1 | import { APIShortEvent, APIShortTeam } from "../types/api"; 2 | import query, { version } from "./storage"; 3 | 4 | export async function getAllTeams(): Promise { 5 | return query(`full_team_list_${version}`, "/teams/all", true, 1000, 60 * 60 * 24 * 7); // 1 week expiry 6 | } 7 | 8 | export async function getAllEvents(): Promise { 9 | return query(`full_event_list_${version}`, "/events/all", true, 1000, 60 * 60 * 24 * 7); // 1 week expiry 10 | } 11 | -------------------------------------------------------------------------------- /frontend/src/api/match.tsx: -------------------------------------------------------------------------------- 1 | import { MatchData } from "../types/data"; 2 | import query, { version } from "./storage"; 3 | 4 | export async function getMatch(match: string): Promise { 5 | const urlSuffix = `/match/${match}`; 6 | const storageKey = `match_${match}_${version}`; 7 | 8 | return query(storageKey, urlSuffix, false, 0, 60); // 1 minute 9 | } 10 | -------------------------------------------------------------------------------- /frontend/src/api/matches.tsx: -------------------------------------------------------------------------------- 1 | import { CURR_YEAR } from "../constants"; 2 | import query, { version } from "./storage"; 3 | 4 | export async function getNoteworthyMatches( 5 | year: number, 6 | country: string | null, 7 | state: string | null, 8 | district: string | null, 9 | elim: string | null, 10 | week: number | null 11 | ) { 12 | let suffix = `/noteworthy_matches/${year}`; 13 | let storageKey = `noteworthy_matches_${year}_${version}`; 14 | 15 | let suffixes = []; 16 | if (country) { 17 | suffixes.push(`country=${country}`); 18 | storageKey += `_${country}`; 19 | } 20 | if (state) { 21 | suffixes.push(`state=${state}`); 22 | storageKey += `_${state}`; 23 | } 24 | if (district) { 25 | suffixes.push(`district=${district}`); 26 | storageKey += `_${district}`; 27 | } 28 | if (elim) { 29 | suffixes.push(`elim=${elim}`); 30 | storageKey += `_${elim}`; 31 | } 32 | if (week) { 33 | suffixes.push(`week=${week}`); 34 | storageKey += `_${week}`; 35 | } 36 | 37 | suffix += suffixes.length > 0 ? `?${suffixes.join("&")}` : ""; 38 | 39 | return query(storageKey, suffix, year === CURR_YEAR, 0, year === CURR_YEAR ? 60 : 60 * 60); // 1 minute / 1 hour 40 | } 41 | 42 | export async function getUpcomingMatches( 43 | country: string | null, 44 | state: string | null, 45 | district: string | null, 46 | elim: string | null, 47 | filterMatches: string | null, 48 | sortMatches: string 49 | ) { 50 | let suffix = `/upcoming_matches?limit=20&metric=${sortMatches}`; 51 | let storageKey = `upcoming_matches_${sortMatches}_${version}`; 52 | if (filterMatches) { 53 | suffix += `&minutes=${filterMatches}`; 54 | storageKey += `_${filterMatches}`; 55 | } 56 | if (country) { 57 | suffix += `&country=${country}`; 58 | storageKey += `_${country}`; 59 | } 60 | if (state) { 61 | suffix += `&state=${state}`; 62 | storageKey += `_${state}`; 63 | } 64 | if (district) { 65 | suffix += `&district=${district}`; 66 | storageKey += `_${district}`; 67 | } 68 | if (elim) { 69 | suffix += `&elim=${elim}`; 70 | storageKey += `_${elim}`; 71 | } 72 | return query(storageKey, suffix, true, 0, 60); // 1 minute 73 | } 74 | -------------------------------------------------------------------------------- /frontend/src/api/storage.tsx: -------------------------------------------------------------------------------- 1 | import { del, get, set } from "idb-keyval"; 2 | import pako from "pako"; 3 | 4 | import { BACKEND_URL, BUCKET_URL } from "../constants"; 5 | import { log, round } from "../utils"; 6 | 7 | export const version = "v3"; 8 | 9 | async function setWithExpiry(key: string, value: any, ttl: number) { 10 | const now = new Date(); 11 | 12 | try { 13 | await set(`${key}_expiry`, now.getTime() + 1000 * ttl); 14 | await set(key, value); 15 | } catch (e: any) { 16 | log("Error setting", e); 17 | } 18 | } 19 | 20 | async function getWithExpiry(key: string) { 21 | const expiry = await get(`${key}_expiry`); 22 | if (!expiry) { 23 | return null; 24 | } 25 | const now = new Date(); 26 | if (now.getTime() > expiry) { 27 | await del(`${key}_expiry`); 28 | await del(key); 29 | return null; 30 | } 31 | return get(key); 32 | } 33 | 34 | export function decompress(buffer: any) { 35 | const strData = pako.inflate(buffer, { to: "string" }); 36 | const data = JSON.parse(strData); 37 | return data; 38 | } 39 | 40 | async function query( 41 | storageKey: string, 42 | apiPath: string, 43 | checkBucket: boolean, 44 | minLength: number, 45 | expiry: number 46 | ) { 47 | const cacheData = await getWithExpiry(storageKey); 48 | if (cacheData && (minLength === 0 || cacheData?.length > minLength)) { 49 | log(`Used Local Storage: ${storageKey}`); 50 | return cacheData; 51 | } 52 | 53 | const start = performance.now(); 54 | 55 | let buffer = null; 56 | try { 57 | if (!checkBucket) { 58 | throw new Error("Skip bucket check"); 59 | } 60 | const fileName = apiPath.replace("?", ".").replace("&", "."); 61 | const res = await fetch(`${BUCKET_URL}${fileName}?t=${Date.now() / 1000 / 60}`, { 62 | next: { revalidate: 0 }, 63 | headers: { 64 | "Cache-Control": "no-cache", 65 | "Content-Type": "application/octet-stream", 66 | }, 67 | }); 68 | log(`${fileName} (bucket) took ${round(performance.now() - start, 0)}ms`); 69 | if (res.ok) { 70 | buffer = decompress(await res.arrayBuffer()); 71 | } else { 72 | throw new Error(`Failed to fetch from bucket: ${res.status}`); 73 | } 74 | } catch (e) { 75 | const res = await fetch(`${BACKEND_URL}${apiPath}`, { next: { revalidate: 0 } }); 76 | log(`${apiPath} (backend) took ${round(performance.now() - start, 0)}ms`); 77 | if (res.ok) { 78 | buffer = await res.json(); 79 | } 80 | } 81 | 82 | if (buffer) { 83 | await setWithExpiry(storageKey, buffer, expiry); 84 | return buffer; 85 | } 86 | } 87 | 88 | export default query; 89 | -------------------------------------------------------------------------------- /frontend/src/api/teams.tsx: -------------------------------------------------------------------------------- 1 | import { CURR_YEAR } from "../constants"; 2 | import { APITeamMatch, APITeamYear, APIYear } from "../types/api"; 3 | import query, { version } from "./storage"; 4 | 5 | export async function getYearTeamYears( 6 | year: number, 7 | limit?: number | null 8 | ): Promise<{ 9 | year: APIYear; 10 | team_years: APITeamYear[]; 11 | }> { 12 | let urlSuffix = `/team_years/${year}`; 13 | let storageKey = `team_years_${year}_${version}`; 14 | if (limit) { 15 | urlSuffix += `?limit=${limit}&metric=epa`; 16 | storageKey += `_${limit}`; 17 | } 18 | storageKey += "_v3"; 19 | 20 | return query(storageKey, urlSuffix, year === CURR_YEAR, 0, year === CURR_YEAR ? 60 : 60 * 60); // 1 minute / 1 hour 21 | } 22 | 23 | export async function getTeamYearTeamMatches( 24 | year: number, 25 | teamNum: number 26 | ): Promise { 27 | const urlSuffix = `/team_year/${year}/${teamNum}/matches`; 28 | const storageKey = `team_year_matches_${year}_${teamNum}_${version}`; 29 | 30 | return query(storageKey, urlSuffix, false, 0, year === CURR_YEAR ? 60 : 60 * 60); // 1 minute / 1 hour 31 | } 32 | -------------------------------------------------------------------------------- /frontend/src/components/Figures/Scatter.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import { line } from "d3-shape"; 4 | import regression from "regression"; 5 | 6 | import React from "react"; 7 | 8 | import { ResponsiveScatterPlot } from "@nivo/scatterplot"; 9 | 10 | import { round } from "../../utils"; 11 | 12 | const BestFitLine = ({ nodes, xScale, yScale }) => { 13 | nodes.sort((a, b) => a.data["x"] - b.data["x"]); 14 | const points = nodes.map(function (x, i) { 15 | return [x.data["x"], x.data["y"]]; 16 | }); 17 | 18 | let results1 = regression.logarithmic(points); 19 | let results2 = regression.linear(points); 20 | let results = results1; 21 | if (Math.abs(results2["r2"]) > Math.abs(results1["r2"])) { 22 | results = results2; 23 | } 24 | 25 | const lineGenerator = line() 26 | .x((x) => xScale(x.data["x"])) 27 | .y((x) => yScale(results.predict(x.data["x"])[1])); 28 | 29 | return ( 30 | 36 | ); 37 | }; 38 | 39 | const ScatterPlot = ({ data, axis }: { data: any[]; axis: string }) => { 40 | return ( 41 |
42 | { 48 | return ( 49 |
50 |
{`Team ${node["data"]["id"]}`}
51 |
{`Rank ${node["data"]["x"]}`}
52 |
{`${axis}: ${round( 53 | parseFloat(node["data"]["y"].toString()) 54 | )}`}
55 |
56 | ); 57 | }} 58 | colors={"rgb(55,126,184)"} 59 | blendMode="multiply" 60 | axisTop={null} 61 | axisRight={null} 62 | axisBottom={{ 63 | tickSize: 5, 64 | tickPadding: 5, 65 | tickRotation: 0, 66 | legend: "Actual Rank", 67 | legendPosition: "middle", 68 | legendOffset: 46, 69 | }} 70 | axisLeft={{ 71 | tickSize: 5, 72 | tickPadding: 5, 73 | tickRotation: 0, 74 | legend: `${axis}`, 75 | legendPosition: "middle", 76 | legendOffset: -60, 77 | }} 78 | useMesh={false} 79 | legends={[]} 80 | layers={["grid", "axes", BestFitLine, "nodes", "markers", "mesh", "legends", "annotations"]} 81 | /> 82 |
83 | ); 84 | }; 85 | 86 | export default ScatterPlot; 87 | -------------------------------------------------------------------------------- /frontend/src/components/Figures/shared.tsx: -------------------------------------------------------------------------------- 1 | import { RP_NAMES } from "../../constants"; 2 | import { APITeamEvent, APITeamMatch, APITeamYear } from "../../types/api"; 3 | 4 | export type LineData = { 5 | id: string | number; 6 | data: { x: number; label: string; y: number }[]; 7 | }; 8 | 9 | export const getYAxisOptions = (year: number) => [ 10 | { 11 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 12 | teamYear?.epa?.breakdown?.total_points ?? 0, 13 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.total_points ?? 0, 14 | value: "total_epa", 15 | label: "Total EPA", 16 | }, 17 | ...(year >= 2016 18 | ? [ 19 | { 20 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 21 | teamYear?.epa?.breakdown?.auto_points ?? 0, 22 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.auto_points ?? 0, 23 | value: "auto_epa", 24 | label: "Auto EPA", 25 | }, 26 | { 27 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 28 | teamYear?.epa?.breakdown?.teleop_points ?? 0, 29 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.teleop_points ?? 0, 30 | value: "teleop_epa", 31 | label: "Teleop EPA", 32 | }, 33 | { 34 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 35 | teamYear?.epa?.breakdown?.endgame_points ?? 0, 36 | matchAccessor: (teamMatch: APITeamMatch) => 37 | teamMatch?.epa?.breakdown?.endgame_points ?? 0, 38 | value: "endgame_epa", 39 | label: "Endgame EPA", 40 | }, 41 | { 42 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 43 | teamYear?.epa?.breakdown?.rp_1 ?? 0, 44 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.rp_1 ?? 0, 45 | value: "rp_1_epa", 46 | label: `${RP_NAMES[year][0]} EPA`, 47 | }, 48 | { 49 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 50 | teamYear?.epa?.breakdown?.rp_2 ?? 0, 51 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.rp_2 ?? 0, 52 | value: "rp_2_epa", 53 | label: `${RP_NAMES[year][1]} EPA`, 54 | }, 55 | ] 56 | : []), 57 | ...(year >= 2025 58 | ? [ 59 | { 60 | yearAccessor: (teamYear: APITeamYear | APITeamEvent) => 61 | teamYear?.epa?.breakdown?.rp_3 ?? 0, 62 | matchAccessor: (teamMatch: APITeamMatch) => teamMatch?.epa?.breakdown?.rp_3 ?? 0, 63 | value: "rp_3_epa", 64 | label: `${RP_NAMES[year][2]} EPA`, 65 | }, 66 | ] 67 | : []), 68 | ]; 69 | -------------------------------------------------------------------------------- /frontend/src/components/multiSelect.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { components } from "react-select"; 3 | 4 | import { Category10Colors } from "../constants"; 5 | 6 | export const Option = ({ children, ...props }) => { 7 | const { onMouseMove, onMouseOver, ...rest } = props.innerProps; 8 | const newProps: any = Object.assign(props, { innerProps: rest }); 9 | return {children}; 10 | }; 11 | 12 | export const multiSelectStyles = (getIndex: any) => ({ 13 | menu: (provided) => ({ ...provided, zIndex: 9999 }), 14 | multiValue: (styles, { data }: { data: any }) => { 15 | const index = getIndex(data.value) % Category10Colors.length; 16 | return { 17 | ...styles, 18 | backgroundColor: Category10Colors[index], 19 | }; 20 | }, 21 | multiValueLabel: (styles, { data }) => ({ 22 | ...styles, 23 | color: "#eee", 24 | }), 25 | multiValueRemove: (styles, { data }) => ({ 26 | ...styles, 27 | color: "#eee", 28 | ":hover": { 29 | color: "black", 30 | }, 31 | }), 32 | }); 33 | -------------------------------------------------------------------------------- /frontend/src/components/utils.tsx: -------------------------------------------------------------------------------- 1 | import { round } from "../utils"; 2 | 3 | export const compLevelFullNames = { 4 | qm: "Qualifications", 5 | ef: "Eighth Finals", 6 | qf: "Quarterfinals", 7 | sf: "Semifinals", 8 | f: "Finals", 9 | }; 10 | 11 | export const compLevelShortNames = { 12 | qm: "Quals", 13 | ef: "Eighths", 14 | qf: "Quarters", 15 | sf: "Semis", 16 | f: "Finals", 17 | }; 18 | 19 | export const formatMatch = (compLevel: string, matchNum: number, setNum: number) => { 20 | let displayMatch = `${compLevelShortNames[compLevel]} ${matchNum}`; 21 | if (compLevel !== "qm") { 22 | displayMatch = `${compLevelShortNames[compLevel]} ${setNum}-${matchNum}`; 23 | } 24 | 25 | return displayMatch; 26 | }; 27 | -------------------------------------------------------------------------------- /frontend/src/layouts/blogLayout.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const PageLayout = ({ 4 | title, 5 | lead, 6 | children, 7 | }: { 8 | title: string; 9 | lead: string; 10 | children: React.ReactNode; 11 | }) => { 12 | return ( 13 |
14 |
15 |

{title}

16 |

{lead}

17 | {children} 18 |
19 |
20 | ); 21 | }; 22 | 23 | export default PageLayout; 24 | -------------------------------------------------------------------------------- /frontend/src/layouts/siteLayout.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import React, { useState } from "react"; 4 | 5 | import { CURR_YEAR } from "../constants"; 6 | import { AppContext } from "../pagesContent/context"; 7 | import { EventsData, TeamYearsData } from "../types/data"; 8 | 9 | export default function Layout({ children }: { children: React.ReactNode }) { 10 | const [teamYearDataDict, setTeamYearDataDict] = useState<{ [key: number]: TeamYearsData }>({}); 11 | const [eventDataDict, setEventDataDict] = useState<{ [key: number]: EventsData }>({}); 12 | const [year, setYear] = useState(CURR_YEAR); 13 | 14 | return ( 15 | 25 | {children} 26 | 27 | ); 28 | } 29 | -------------------------------------------------------------------------------- /frontend/src/pages/404.tsx: -------------------------------------------------------------------------------- 1 | // pages/404.tsx 2 | import React, { useEffect } from "react"; 3 | 4 | import { useRouter } from "next/router"; 5 | 6 | import NotFound from "../pagesContent/shared/notFound"; 7 | 8 | const NotFoundPage = () => { 9 | const router = useRouter(); 10 | const path = router.asPath.split("/").filter(Boolean); 11 | 12 | useEffect(() => { 13 | if (path.length === 1) { 14 | const firstPath = path[0]; 15 | 16 | if (/^\d+$/.test(firstPath)) { 17 | router.replace(`/team/${firstPath}`); 18 | } else if (firstPath.includes("_")) { 19 | router.replace(`/match/${firstPath}`); 20 | } else { 21 | router.replace(`/event/${firstPath}`); 22 | } 23 | } 24 | }, [path, router]); 25 | 26 | return ; 27 | }; 28 | 29 | export default NotFoundPage; 30 | -------------------------------------------------------------------------------- /frontend/src/pages/blog/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import Link from "next/link"; 4 | 5 | export const metadata = { 6 | title: "Blog - Statbotics", 7 | }; 8 | 9 | const Page = () => { 10 | const posts = [ 11 | { 12 | title: "Statbotics V2", 13 | lead: "Statbotics has gone through some major changes this season. Let's take a look at what's new!", 14 | url: "/blog/v2", 15 | date: "2023-01-09", 16 | }, 17 | { 18 | title: "The EPA Model", 19 | lead: "The Expected Points Added (EPA) model builds upon the Elo rating system, but transforms ratings to point units and makes several modifications.", 20 | url: "/blog/epa", 21 | date: "2023-01-09", 22 | }, 23 | { 24 | title: "Evaluating FRC Rating Models", 25 | lead: "How do you choose between FRC rating models? We compare several models on three characteristics: predictive power, interpretability, and accessibility.", 26 | url: "/blog/models", 27 | date: "2023-01-09", 28 | }, 29 | { 30 | title: "The EPA Model: A Gentle Introduction", 31 | lead: "The math behind EPA can be a bit confusing. This page will help you understand the EPA model, how it works, and how to best use it. No math required!", 32 | url: "/blog/intro", 33 | date: "2023-03-16", 34 | }, 35 | { 36 | title: "Strength of Schedule", 37 | lead: "How can we use EPA to measure strength of schedule? We propose three metrics and briefly explore some results.", 38 | url: "/blog/sos", 39 | date: "2023-03-23", 40 | }, 41 | ]; 42 | 43 | return ( 44 |
45 |
46 |
Statbotics Blog
47 |
48 | {posts.reverse().map((post) => ( 49 |
50 | 51 |
52 |
{post.title}
53 |
{post.date}
54 |
{post.lead}
55 |
56 | 57 |
58 | ))} 59 |
60 |
61 |
62 | ); 63 | }; 64 | 65 | export default Page; 66 | -------------------------------------------------------------------------------- /frontend/src/pages/blog/intro/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import PageContent from "../../../pagesContent/blog/intro/main"; 4 | 5 | export const metadata = { 6 | title: "The EPA Model: A Gentle Introduction - Statbotics", 7 | }; 8 | 9 | const Page = () => { 10 | return ; 11 | }; 12 | 13 | export default Page; 14 | -------------------------------------------------------------------------------- /frontend/src/pages/compare/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import SiteLayout from "../../layouts/siteLayout"; 4 | import Tabs from "../../pagesContent/compare/tabs"; 5 | 6 | export const metadata = { 7 | title: "Compare Teams - Statbotics", 8 | }; 9 | 10 | const Page = () => { 11 | return ( 12 | 13 | 14 | 15 | ); 16 | }; 17 | 18 | export default Page; 19 | -------------------------------------------------------------------------------- /frontend/src/pages/docs/python.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | export const metadata = { 4 | title: "Python API - Statbotics", 5 | }; 6 | 7 | const Page = () => { 8 | return