├── .env.example
├── .gitignore
├── .pre-commit-config.yaml
├── CODEOWNERS
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── extras
├── db.sql
└── nginx.conf
├── migrations
├── 001-country-row
│ ├── README.md
│ ├── go.mod
│ └── main.go
├── 002-replay-prefixes
│ ├── README.md
│ └── main.py
├── 003-delete-scorev2-scores
│ ├── README.md
│ └── migration.sql
├── 004-remove-invalid-osu
│ └── main.py
├── 005-add-pp-caps
│ └── migration.sql
├── 006-add-s3-storage
│ ├── README.md
│ └── main.py
└── README.md
├── requirements
├── dev.txt
└── main.txt
├── scripts
├── await_service.sh
├── bootstrap.sh
└── run_app.sh
└── ussr
├── app
├── __init__.py
├── api
│ ├── __init__.py
│ ├── coins.py
│ ├── direct.py
│ ├── error.py
│ ├── lastfm.py
│ ├── leaderboards.py
│ ├── pp.py
│ ├── rate.py
│ ├── replays.py
│ ├── score_sub.py
│ ├── screenshots.py
│ └── seasonals.py
├── constants
│ ├── __init__.py
│ ├── lastfm.py
│ ├── leaderboard_type.py
│ ├── mode.py
│ ├── mods.py
│ ├── privileges.py
│ ├── ranked_status.py
│ └── score_status.py
├── init_api.py
├── models
│ ├── ___init__.py
│ ├── achievement.py
│ ├── beatmap.py
│ ├── score.py
│ ├── stats.py
│ └── user.py
├── objects
│ ├── __init__.py
│ ├── binary.py
│ ├── leaderboard.py
│ ├── oppai.py
│ └── path.py
├── redis.py
├── state
│ ├── __init__.py
│ ├── cache.py
│ ├── services.py
│ └── storage.py
├── usecases
│ ├── __init__.py
│ ├── beatmap.py
│ ├── clans.py
│ ├── countries.py
│ ├── discord.py
│ ├── leaderboards.py
│ ├── password.py
│ ├── performance.py
│ ├── pp_cap.py
│ ├── privileges.py
│ ├── score.py
│ ├── stats.py
│ ├── user.py
│ ├── usernames.py
│ ├── verified.py
│ └── whitelist.py
└── utils.py
├── logger.py
├── main.py
└── settings.py
/.env.example:
--------------------------------------------------------------------------------
1 | # HTTP Configuration
2 | HTTP_PORT=2003
3 |
4 | # MySQL Database Configuration
5 | MYSQL_HOST=
6 | MYSQL_PORT=2002
7 | MYSQL_USER=
8 | MYSQL_DATABASE=
9 | MYSQL_PASSWORD=
10 |
11 | # Redis Configuration
12 | REDIS_HOST=
13 | REDIS_PORT=
14 | REDIS_PASSWORD=
15 | REDIS_DB=0
16 |
17 | # MeiliSearch Configuration
18 | MEILI_DIRECT=false
19 | MEILI_URL=http://localhost:2004
20 | MEILI_KEY=
21 |
22 | # Directories and URLs
23 | DATA_BEATMAP_DIRECTORY=/path/to/maps
24 | DATA_SCREENSHOT_DIRECTORY=/path/to/screenshots
25 | DATA_REPLAY_DIRECTORY=/path/to/replays
26 |
27 | # API Configuration
28 | API_KEYS_POOL= # can be empty or a comma-separated list of keys, e.g., key1,key2,key3
29 | API_FALLBACK_URL=https://catboy.best/osu
30 | API_OSU_FALLBACK_URL=https://catboy.best/api
31 | DIRECT_URL=https://catboy.best/api
32 |
33 | # Server Information
34 | SRV_URL=https://ussr.pl/
35 | SRV_NAME=RealistikOsu
36 | SRV_VERIFIED_BADGE=1000
37 | BOT_USER_ID=999
38 | CUSTOM_CLIENTS=false
39 |
40 | # Discord Configuration
41 | DISCORD_FIRST_PLACE=
42 | DISCORD_ADMIN_HOOK=
43 |
44 | # WebSocket Configuration
45 | WS_WRITE_KEY=randomise_this
46 |
47 | # Performance Service Configuration
48 | PERFORMANCE_SERVICE_URL=
49 |
50 | # S3 Configuration
51 | S3_ENABLED=false
52 | S3_BUCKET=
53 | S3_REGION=
54 | S3_ENDPOINT=
55 | S3_ACCESS_KEY=
56 | S3_SECRET_KEY=
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | .vscode/
132 | test_data/
133 | .data/
134 | *.sum
135 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v4.5.0
6 | hooks:
7 | - id: check-ast
8 | - id: check-builtin-literals
9 | - id: check-yaml
10 | - id: debug-statements
11 | - id: end-of-file-fixer
12 | - id: requirements-txt-fixer
13 | - id: trailing-whitespace
14 | - repo: https://github.com/psf/black
15 | rev: 24.1.1
16 | hooks:
17 | - id: black
18 | - repo: https://github.com/asottile/pyupgrade
19 | rev: v3.15.0
20 | hooks:
21 | - id: pyupgrade
22 | args: [--py37-plus, --keep-runtime-typing]
23 | - repo: https://github.com/asottile/reorder-python-imports
24 | rev: v3.12.0
25 | hooks:
26 | - id: reorder-python-imports
27 | args: [--py37-plus, --add-import, 'from __future__ import annotations']
28 | - repo: https://github.com/asottile/add-trailing-comma
29 | rev: v3.1.0
30 | hooks:
31 | - id: add-trailing-comma
32 | - repo: https://github.com/asottile/blacken-docs
33 | rev: 1.16.0
34 | hooks:
35 | - id: blacken-docs
36 | additional_dependencies: [black==22.1.0]
37 |
38 | default_language_version:
39 | python: python3.9
40 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @RealistikDash @lenforiee
2 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9
2 |
3 | ENV PYTHONUNBUFFERED=1
4 |
5 | WORKDIR /app
6 |
7 | # Requirements
8 | COPY ./requirements/main.txt /app/requirements.txt
9 | RUN python3.9 -m pip install -r /app/requirements.txt
10 |
11 | # Scripts
12 | COPY ./scripts /app/scripts
13 |
14 | # Application.
15 | COPY ./ussr /app/ussr
16 |
17 | RUN chmod +x -R /app/scripts
18 | ENTRYPOINT [ "/app/scripts/bootstrap.sh" ]
19 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make
2 | build:
3 | docker build -t ussr:latest .
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # USSR
2 | Ultimate Score Server for RealistikOsu (well not just us but it makes the acronym work.)
3 | *Also I wonder how long this name will last*.
4 |
5 |
6 | 
7 |
8 | ## What is this?
9 |
10 | The primary objective of the USSR is to serve as an almost drag and drop replacement for Ripple's [LETS](https://github.com/osuripple/lets) as a score server. This means for existing server owners:
11 | - Usage of the usual Ripple database schema.
12 | - Full usage of the Ripple Redis API (pubsubs, keys etc).
13 | - Full support of the Ripple JSON API.
14 |
15 | All of this while also MASSIVELY improving upon LETS in the following areas:
16 | - Massive performance gains
17 | - Significantly more efficient with the database and other resources
18 | - Modern asynchronous Python architecture
19 | - Maintainable, modifiable codebase
20 |
21 | ## Real world examples of USSR insances
22 | Don't believe us? There are countless servers already running their instances of USSR!
23 |
24 | - [RealistikOsu!](https://ussr.pl/)
25 | - [Akatsuki](https://akatsuki.pw)
26 | - [Fuquila](https://fuquila.net/)
27 | - [Nekosu](https://nksu.gg/)
28 |
--------------------------------------------------------------------------------
/extras/nginx.conf:
--------------------------------------------------------------------------------
1 | # This just forwards non-ssl traffic to the SSL endpoints.
2 | server {
3 | listen 80;
4 | server_name osu.ussr.pl;
5 | return 301 https://osu.ussr.pl$request_uri;
6 | }
7 |
8 | # Main config for the score server.
9 | upstream ussr {
10 | server 127.0.0.1:2137 fail_timeout=0;
11 | }
12 |
13 | server {
14 | server_name osu.ussr.pl;
15 | listen 443 ssl;
16 | ssl_certificate /home/rosu/certs/fullchain.pem;
17 | ssl_certificate_key /home/rosu/certs/privkey.pem;
18 |
19 | # The osu! endpoints.
20 | location /web/ {
21 | proxy_set_header Host $host;
22 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
23 | proxy_set_header X-Forwarded-Proto https;
24 | proxy_set_header X-Real-IP $remote_addr;
25 | proxy_pass http://ussr;
26 | }
27 |
28 | # Another osu! endpoint except for some reason does not fall under /web/.
29 | # Perhaps could make it into a straight up Nginx redirect later.
30 | location /difficulty-rating {
31 | proxy_set_header Host $host;
32 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
33 | proxy_set_header X-Forwarded-Proto https;
34 | proxy_set_header X-Real-IP $remote_addr;
35 | proxy_pass http://ussr;
36 | }
37 |
38 | # Screenshots are all handled by Nginx for performance.
39 | location ~ ^/ss/(.*) {
40 | root /home/rosu/screenshots;
41 | add_header content-type "image/png";
42 | try_files /$1 =404;
43 | }
44 |
45 | # Beatmap Downloads handled by the beatmap mirror.
46 | location ~ ^/d/(.*) {
47 | return 301 https://catboy.best/d/$1;
48 | }
49 |
50 | # Optional: Redirect rest of the connections to our main domain. Don't do if
51 | # you use osu.domain as your main domain.
52 | location / {
53 | return 301 https://ussr.pl$request_uri;
54 | }
55 | }
56 |
57 | # For switcher support, just copy the config above and set the name to osu.ppy.sh
58 |
--------------------------------------------------------------------------------
/migrations/001-country-row/README.md:
--------------------------------------------------------------------------------
1 | # Country Row Migrator
2 |
3 | A database migration utility for the updated user country storage.
4 | 
5 |
6 | ### Pre-setup
7 |
8 | Firstly, you will have to run the query below to add the `country` column to your database.
9 |
10 | ```sql
11 | ALTER TABLE `users` ADD `country` VARCHAR(2) NOT NULL DEFAULT 'XX' AFTER `ban_reason`;
12 | ```
13 |
14 | Then, you open the `main.go` file in an editor of choice, setting the SQL credentials to your database.
15 |
16 | ### Setting up the migrator.
17 | Then, you must set up the migrator before you are able to run it.
18 |
19 | Run this command to install the required go modules for the migrator to run.
20 | ```sh
21 | go get
22 | ```
23 |
24 | ### Running the migrator
25 | Finally, you are now able to run the migrator using the command
26 | ```sh
27 | go run .
28 | ```
29 |
30 | This will immediately start the migration utility.
31 |
--------------------------------------------------------------------------------
/migrations/001-country-row/go.mod:
--------------------------------------------------------------------------------
1 | module main
2 |
3 | go 1.17
4 |
5 | require github.com/go-sql-driver/mysql v1.6.0
6 |
7 | require github.com/jmoiron/sqlx v1.3.4
8 |
--------------------------------------------------------------------------------
/migrations/001-country-row/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | _ "github.com/go-sql-driver/mysql"
5 | "github.com/jmoiron/sqlx"
6 |
7 | "fmt"
8 | "log"
9 | "reflect"
10 | "sync"
11 | )
12 |
13 | // Edit these variables only.
14 | var SQL_HOST = "localhost"
15 | var SQL_USER = "root"
16 | var SQL_PASSWORD = ""
17 | var SQL_PORT = "3306"
18 | var SQL_DATABASE = "ripple"
19 |
20 | // statistics variables, dont edit.
21 | var USERS_COUNT = 0
22 |
23 | var DB *sqlx.DB
24 |
25 | func updateCountry(idx int, usersList []int) {
26 |
27 | log.Println(fmt.Sprintf("[Thread #%d] Starting job...", idx))
28 |
29 | for _, userID := range usersList {
30 | var country string
31 | err := DB.Get(&country, "SELECT country FROM users_stats WHERE id = ?", userID)
32 | if err != nil {
33 | log.Println(err)
34 | continue
35 | }
36 | if country == "" {
37 | country = "XX"
38 | }
39 | _, err = DB.Exec("UPDATE users SET country = ? WHERE id = ?", country, userID)
40 | if err != nil {
41 | log.Println(err)
42 | continue
43 | }
44 | log.Println(fmt.Sprintf("[Thread #%d] Updated user id: %d", idx, userID))
45 | USERS_COUNT += 1
46 | }
47 | }
48 |
49 | func SplitToChunks(slice interface{}, chunkSize int) interface{} {
50 | sliceType := reflect.TypeOf(slice)
51 | sliceVal := reflect.ValueOf(slice)
52 | length := sliceVal.Len()
53 | if sliceType.Kind() != reflect.Slice {
54 | panic("parameter must be []T")
55 | }
56 | n := 0
57 | if length%chunkSize > 0 {
58 | n = 1
59 | }
60 | SST := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length/chunkSize+n)
61 | st, ed := 0, 0
62 | for st < length {
63 | ed = st + chunkSize
64 | if ed > length {
65 | ed = length
66 | }
67 | SST = reflect.Append(SST, sliceVal.Slice(st, ed))
68 | st = ed
69 | }
70 | return SST.Interface()
71 | }
72 |
73 | func main() {
74 | log.Println("Starting country field migrator!")
75 |
76 | log.Println("Connecting to database...")
77 | // initialise db
78 | dbDSN := fmt.Sprintf("%s:%s@(%s:%s)/%s", SQL_USER, SQL_PASSWORD, SQL_HOST, SQL_PORT, SQL_DATABASE)
79 | DB = sqlx.MustConnect("mysql", dbDSN+"?parseTime=true&allowNativePasswords=true")
80 | log.Println("Connection Initialised!")
81 |
82 | var userIDList []int
83 |
84 | log.Println("Fetching user IDs...")
85 | // get user IDs
86 | err := DB.Select(&userIDList, "SELECT id FROM users")
87 | if err != nil {
88 | panic(err)
89 | }
90 |
91 | var wg sync.WaitGroup
92 |
93 | // Start threads.
94 | for index, userChunk := range SplitToChunks(userIDList, 1000).([][]int) {
95 | wg.Add(1)
96 | go func(idx int, chunk []int) {
97 | defer wg.Done()
98 | updateCountry(idx+1, chunk)
99 | }(index, userChunk)
100 | }
101 | // Wait till all threads are finished.
102 | wg.Wait()
103 |
104 | log.Println(fmt.Sprintf("[Main] Finished. Initial Users: %d // Users updated: %d", len(userIDList), USERS_COUNT))
105 | }
106 |
--------------------------------------------------------------------------------
/migrations/002-replay-prefixes/README.md:
--------------------------------------------------------------------------------
1 | # Replay Prefix Adder
2 | This is a simple utility made to fix a bug caused by the 19/6/22 rewrite where new replays
3 | would me named incorrectly.
4 |
5 | ## Requirements
6 | This migration has the same exact requirements as USSR itself.
7 | - Python >=3.8
8 | - A previously generated USSR config with correct paths set.
9 |
10 | ## Running the migrator
11 | To run this migrator, just run the command
12 | ```sh
13 | python3 main.py
14 | ```
15 | replacing `python3` with your corresponding python executable.
16 |
17 | ## Important note!
18 | Do not move the main.py from its directory! This is because the migration makes a few assumptions based on the
19 | current working directory, meaning your file structure may not match up if its moved.
20 |
--------------------------------------------------------------------------------
/migrations/002-replay-prefixes/main.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import glob
4 | import json
5 | import logging
6 | import os
7 | from typing import Any
8 |
9 | CONFIG_PATH = "config.json"
10 | REPLAY_PREFIXES = (
11 | "",
12 | "_relax",
13 | "_ap",
14 | )
15 |
16 |
17 | def load_json(path: str) -> dict[str, Any]:
18 | with open(path) as f:
19 | return json.load(f)
20 |
21 |
22 | def set_cwd() -> None:
23 | """Sets the CWD to the root USSR dir."""
24 |
25 | os.chdir("../../")
26 |
27 |
28 | def determine_full_path(path: str) -> str:
29 | return os.path.join(os.getcwd(), path) if not path.startswith("/") else path
30 |
31 |
32 | def main() -> int:
33 | logging.basicConfig(
34 | level=logging.INFO,
35 | )
36 | set_cwd()
37 | if not os.path.exists(CONFIG_PATH):
38 | logging.error(
39 | "The config file was not found! Make sure you have generated "
40 | "it prior to using this migration.",
41 | )
42 | return 1
43 |
44 | config = load_json(CONFIG_PATH)
45 | logging.info("Config successfully loaded!")
46 |
47 | data_dir = determine_full_path(config["data_dir"])
48 | if not os.path.exists(data_dir):
49 | logging.error("The data directory within the config file was not found!")
50 | return 1
51 |
52 | # Search for misnamed replays.
53 | for prefix in REPLAY_PREFIXES:
54 | replay_path = os.path.join(data_dir, "replays" + prefix)
55 | logging.info(f"Setting CWD to {replay_path}")
56 | os.chdir(replay_path)
57 |
58 | # Find all replays to rename.
59 | rename_replays = filter(
60 | lambda x: not x.startswith("replay_"),
61 | glob.glob("*.osr"),
62 | )
63 |
64 | for replay_name in rename_replays:
65 | new_name = f"replay_{replay_name}"
66 | logging.info(f"Renaming {replay_name} -> {new_name}")
67 | os.rename(replay_name, new_name)
68 |
69 | return 0
70 |
71 |
72 | if __name__ == "__main__":
73 | raise SystemExit(main())
74 |
--------------------------------------------------------------------------------
/migrations/003-delete-scorev2-scores/README.md:
--------------------------------------------------------------------------------
1 | # ScoreV2 score remover
2 | This simple migration aims to correct a bug with USSR where scores with the "ScoreV2" mod would be allowed to submit.
3 | This is due to it completely changing the scoring mechanic, disrupting all score based leaderboards.
4 |
--------------------------------------------------------------------------------
/migrations/003-delete-scorev2-scores/migration.sql:
--------------------------------------------------------------------------------
1 | -- Delete all scores with ScoreV2
2 | DELETE FROM scores WHERE mods & 536870912;
3 | DELETE FROM scores_relax WHERE mods & 536870912;
4 | DELETE FROM scores_ap WHERE mods & 536870912;
5 |
--------------------------------------------------------------------------------
/migrations/004-remove-invalid-osu/main.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import glob
4 | import json
5 | import logging
6 | import os
7 | from typing import Any
8 |
9 | CONFIG_PATH = "config.json"
10 |
11 |
12 | def load_json(path: str) -> dict[str, Any]:
13 | with open(path) as f:
14 | return json.load(f)
15 |
16 |
17 | def set_cwd() -> None:
18 | """Sets the CWD to the root USSR dir."""
19 |
20 | os.chdir("../../")
21 |
22 |
23 | def determine_full_path(path: str) -> str:
24 | return os.path.join(os.getcwd(), path) if not path.startswith("/") else path
25 |
26 |
27 | def main() -> int:
28 | logging.basicConfig(
29 | level=logging.INFO,
30 | )
31 | set_cwd()
32 | if not os.path.exists(CONFIG_PATH):
33 | logging.error(
34 | "The config file was not found! Make sure you have generated "
35 | "it prior to using this migration.",
36 | )
37 | return 1
38 |
39 | config = load_json(CONFIG_PATH)
40 | logging.info("Config successfully loaded!")
41 |
42 | data_dir = determine_full_path(config["data_dir"])
43 | if not os.path.exists(data_dir):
44 | logging.error("The data directory within the config file was not found!")
45 | return 1
46 |
47 | for path in glob.glob(os.path.join(data_dir, "maps", "*.osu")):
48 | with open(path, "rb") as f:
49 | if b"osu file format" not in f.read():
50 | logging.info(f"Removing invalid .osu file: {path}")
51 | os.remove(path)
52 |
53 |
54 | if __name__ == "__main__":
55 | raise SystemExit(main())
56 |
--------------------------------------------------------------------------------
/migrations/005-add-pp-caps/migration.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE pp_limits (
2 | mode tinyint(1) NOT NULL,
3 | relax tinyint(1) NOT NULL,
4 | pp int NOT NULL,
5 | flashlight_pp int NOT NULL
6 | );
7 |
8 | INSERT INTO pp_limits (mode, relax, pp, flashlight_pp)
9 | VALUES
10 | (0, 0, 700, 500),
11 | (0, 1, 1400, 1000),
12 | (0, 2, 650, 650), -- autopilot needs more thought one day
13 | (1, 0, 700, 500),
14 | (1, 1, 1200, 1000),
15 | (2, 0, 700, 500),
16 | (2, 1, 1000, 800),
17 | (3, 0, 1200, 600);
18 |
--------------------------------------------------------------------------------
/migrations/006-add-s3-storage/README.md:
--------------------------------------------------------------------------------
1 | # Replay mover
2 | This migration moves all replays into one folder in favour of S3 Storage being implemented.
3 |
--------------------------------------------------------------------------------
/migrations/006-add-s3-storage/main.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import glob
4 | import json
5 | import logging
6 | import os
7 | from typing import Any
8 |
9 | CONFIG_PATH = "config.json"
10 |
11 |
12 | def load_json(path: str) -> dict[str, Any]:
13 | with open(path) as f:
14 | return json.load(f)
15 |
16 |
17 | def set_cwd() -> None:
18 | """Sets the CWD to the root USSR dir."""
19 |
20 | os.chdir("../../")
21 |
22 |
23 | def determine_full_path(path: str) -> str:
24 | return os.path.join(os.getcwd(), path) if not path.startswith("/") else path
25 |
26 |
27 | def main() -> int:
28 | logging.basicConfig(
29 | level=logging.INFO,
30 | )
31 | set_cwd()
32 | if not os.path.exists(CONFIG_PATH):
33 | logging.error(
34 | "The config file was not found! Make sure you have generated "
35 | "it prior to using this migration.",
36 | )
37 | return 1
38 |
39 | config = load_json(CONFIG_PATH)
40 | logging.info("Config successfully loaded!")
41 |
42 | data_dir = determine_full_path(config["data_dir"])
43 | if not os.path.exists(data_dir):
44 | logging.error("The data directory within the config file was not found!")
45 | return 1
46 |
47 | for folder in ("replays_relax", "replays_ap"):
48 | if not os.path.exists(os.path.join(data_dir, folder)):
49 | continue
50 |
51 | for path in glob.glob(os.path.join(data_dir, folder, "*.osr")):
52 | file = os.path.basename(path)
53 | os.rename(path, os.path.join(data_dir, "replays", file))
54 | logging.info(f"Moved {file}!")
55 |
56 |
57 | if __name__ == "__main__":
58 | raise SystemExit(main())
59 |
--------------------------------------------------------------------------------
/migrations/README.md:
--------------------------------------------------------------------------------
1 | ## What is this?
2 | This is a tools for updating older USSR databases (pre-20/6/22) to the schema used by modern USSR.
3 | This migrations utility is responsible for moving user country data from the `users_stats` table to the `users` table and much more.
4 | This changes resolves a consistency error within the schema alongisde improving the performance of some queries which require the country alongside
5 | user data.
6 |
7 | Every folder has their custom readme so you won't get confused running it.
8 |
9 | ## Requirements.
10 |
11 | This migration utilities requires `golang >= 1.17` as it is written with performance in mind.
12 |
--------------------------------------------------------------------------------
/requirements/dev.txt:
--------------------------------------------------------------------------------
1 | -r main.txt
2 | pre-commit
3 |
--------------------------------------------------------------------------------
/requirements/main.txt:
--------------------------------------------------------------------------------
1 | aiobotocore == 2.5.2
2 | aiodns
3 | aiohttp
4 | aioredis==2.0.1
5 | # databases likes to say that there's no extra asyncmy.
6 | asyncmy==0.2.2
7 | bcrypt==4.0.1
8 | Brotli
9 | cchardet
10 | databases[asyncmy]==0.5.5
11 | ddtrace
12 | fastapi
13 | meilisearch-python-async
14 | orjson
15 | py3rijndael
16 | python-dotenv
17 | python-json-logger==2.0.7
18 | python-multipart
19 | # Pinned as it breaks databases
20 | sqlalchemy==1.4.41
21 | types-aiobotocore[s3] == 2.5.2
22 | uvicorn
23 | uvloop
24 |
--------------------------------------------------------------------------------
/scripts/await_service.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -uo pipefail
3 |
4 | await_service()
5 | {
6 | local start_ts=$(date +%s)
7 | while [ $(date +%s) -lt $((start_ts + $3)) ];
8 | do
9 | (echo -n > /dev/tcp/$1/$2) > /dev/null
10 | if [[ $? -eq 0 ]]; then
11 | break
12 | fi
13 | sleep 1
14 | done
15 | local end_ts=$(date +%s)
16 |
17 | if [ $(date +%s) -ge $((start_ts + $3)) ]; then
18 | echo "Timeout occurred while waiting for $1:$2 to become available"
19 | exit 1
20 | fi
21 |
22 | echo "$1:$2 is available after $((end_ts - start_ts)) seconds"
23 | }
24 |
25 | if [[ $# -ne 3 ]]; then
26 | echo "Usage: $0 "
27 | exit 1
28 | fi
29 |
30 | await_service $1 $2 $3
31 |
--------------------------------------------------------------------------------
/scripts/bootstrap.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | echo "Waiting for services to become available..."
5 |
6 | ./scripts/await_service.sh $MYSQL_HOST $MYSQL_PORT $SERVICE_READINESS_TIMEOUT
7 | ./scripts/await_service.sh $REDIS_HOST $REDIS_PORT $SERVICE_READINESS_TIMEOUT
8 |
9 | exec /app/scripts/run_app.sh
10 |
--------------------------------------------------------------------------------
/scripts/run_app.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | echo "Starting server..."
5 |
6 | cd /app/ussr/
7 |
8 | python3.9 main.py
9 |
--------------------------------------------------------------------------------
/ussr/app/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from . import api
4 | from . import constants
5 | from . import init_api
6 | from . import models
7 | from . import objects
8 | from . import redis
9 | from . import state
10 | from . import usecases
11 | from . import utils
12 |
--------------------------------------------------------------------------------
/ussr/app/api/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import settings
4 | from app.models.user import User
5 | from app.usecases.user import authenticate_user
6 | from fastapi import APIRouter
7 | from fastapi import Depends
8 | from fastapi import Path
9 | from fastapi import Query
10 | from fastapi import Request
11 | from fastapi import Response
12 | from fastapi.responses import ORJSONResponse
13 | from fastapi.responses import RedirectResponse
14 |
15 | from . import coins
16 | from . import direct
17 | from . import error
18 | from . import lastfm
19 | from . import leaderboards
20 | from . import pp
21 | from . import rate
22 | from . import replays
23 | from . import score_sub
24 | from . import screenshots
25 | from . import seasonals
26 |
27 | router = APIRouter(default_response_class=Response)
28 |
29 | router.add_api_route("/web/osu-osz2-getscores.php", leaderboards.get_leaderboard)
30 | router.add_api_route(
31 | "/web/osu-submit-modular-selector.php",
32 | score_sub.submit_score,
33 | methods=["POST"],
34 | )
35 |
36 | router.add_api_route(
37 | "/web/osu-screenshot.php",
38 | screenshots.upload_screenshot,
39 | methods=["POST"],
40 | )
41 |
42 | router.add_api_route("/web/osu-getreplay.php", replays.get_replay)
43 | router.add_api_route("/web/replays/{score_id}", replays.get_full_replay)
44 |
45 | if settings.USE_MEILI_DIRECT:
46 | router.add_api_route("/web/osu-search.php", direct.osu_direct_meili)
47 | else:
48 | router.add_api_route("/web/osu-search.php", direct.osu_direct_cheesegull)
49 |
50 | router.add_api_route("/web/osu-search-set.php", direct.beatmap_card)
51 | router.add_api_route("/d/{set_id}", direct.download_map)
52 |
53 | router.add_api_route("/web/osu-getseasonal.php", seasonals.get_seasonals)
54 |
55 | router.add_api_route("/web/lastfm.php", lastfm.lastfm, methods=["POST"])
56 |
57 | router.add_api_route(
58 | "/web/osu-error.php",
59 | error.error,
60 | methods=["POST"],
61 | )
62 |
63 | router.add_api_route("/web/osu-rate.php", rate.rate_map)
64 |
65 | router.add_api_route("/web/coins.php", coins.coins)
66 |
67 | router.add_api_route("/api/v1/pp", pp.calculate_pp)
68 |
69 |
70 | @router.get("/web/bancho-connect.php")
71 | async def bancho_connect():
72 | return b""
73 |
74 |
75 | @router.get("/p/doyoureallywanttoaskpeppy")
76 | async def peppy():
77 | return b"This is a peppy skill issue, please ignore."
78 |
79 |
80 | async def osu_redirect(request: Request, _: int = Path(...)):
81 | return RedirectResponse(
82 | url=f"https://osu.ppy.sh{request['path']}",
83 | status_code=301,
84 | )
85 |
86 |
87 | for pattern in (
88 | "/beatmapsets/{_}",
89 | "/beatmaps/{_}",
90 | "/community/forums/topics/{_}",
91 | "/web/maps/{_}",
92 | ):
93 | router.get(pattern)(osu_redirect)
94 |
95 |
96 | @router.post("/difficulty-rating")
97 | async def difficulty_rating(request: Request):
98 | return RedirectResponse(
99 | url=f"https://osu.ppy.sh{request['path']}",
100 | status_code=307,
101 | )
102 |
103 |
104 | @router.get("/web/osu-getfriends.php")
105 | async def get_friends(
106 | user: User = Depends(authenticate_user(Query, "u", "h")),
107 | ):
108 | return "\n".join(map(str, user.friends))
109 |
110 |
111 | @router.get("/api/v1/status")
112 | async def status_handler():
113 | return ORJSONResponse(
114 | {"status": 200, "server_status": 1},
115 | )
116 |
--------------------------------------------------------------------------------
/ussr/app/api/coins.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state.services
4 | import logger
5 | from app.models.user import User
6 | from app.usecases.user import authenticate_user
7 | from fastapi import Depends
8 | from fastapi import Query
9 |
10 |
11 | async def coins(
12 | user: User = Depends(authenticate_user(Query, "u", "h")),
13 | action: str = Query(...),
14 | count: int = Query(..., alias="c"),
15 | checksum: str = Query(..., alias="cs"),
16 | ):
17 | if action not in ("earn", "use", "recharge"):
18 | return str(user.coins)
19 |
20 | if action == "earn":
21 | logger.info(f"{user} has earned a coin.")
22 | user.coins += 1
23 | elif action == "use":
24 | logger.info(f"{user} has used a coin.")
25 | user.coins -= 1
26 | else: # recharge
27 | if user.coins <= 0:
28 | logger.info(f"{user} has recharged their coins.")
29 | user.coins = 1
30 |
31 | await app.state.services.database.execute(
32 | "UPDATE users SET coins = :coins WHERE id = :id",
33 | {"coins": user.coins, "id": user.id},
34 | )
35 |
36 | return str(user.coins)
37 |
--------------------------------------------------------------------------------
/ussr/app/api/direct.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import re
4 | from typing import Any
5 | from typing import Optional
6 | from urllib.parse import unquote_plus
7 |
8 | import app.state.services
9 | import app.usecases.beatmap
10 | import settings
11 | from app.constants.ranked_status import RankedStatus
12 | from app.models.user import User
13 | from app.usecases.user import authenticate_user
14 | from fastapi import Depends
15 | from fastapi import Path
16 | from fastapi import Query
17 | from fastapi import status
18 | from fastapi.responses import RedirectResponse
19 |
20 | USING_CHIMU = "https://api.chimu.moe/v1" == settings.DIRECT_URL
21 | CHIMU_SPELL = "SetId" if USING_CHIMU else "SetID"
22 |
23 | DIRECT_SET_INFO_FMTSTR = (
24 | "{chimu_spell}.osz|{Artist}|{Title}|{Creator}|"
25 | "{RankedStatus}|10.0|{LastUpdate}|{chimu_spell}|"
26 | "0|{HasVideo}|0|0|0|{diffs}"
27 | )
28 |
29 | DIRECT_MAP_INFO_FMTSTR = (
30 | "[{DifficultyRating:.2f}⭐] {DiffName} "
31 | "{{cs: {CS} / od: {OD} / ar: {AR} / hp: {HP}}}@{Mode}"
32 | )
33 |
34 | DIRECT_MAP_INFO_FMTSTR_MEILI = "{difficulty} od: {od} / ar: {ar}@{mode}"
35 |
36 | DIRECT_SET_INFO_FMTSTR_MEILI = (
37 | "{chimu_spell}.osz|{Artist}|{Title}|{Creator}|"
38 | "{RankedStatus}|{Rating}|0|{chimu_spell}|"
39 | "0|0|0|0|0|{diffs}"
40 | )
41 |
42 |
43 | async def osu_direct_meili(
44 | user: User = Depends(authenticate_user(Query, "u", "h")),
45 | ranked_status: int = Query(..., alias="r", ge=0, le=8),
46 | query: str = Query(..., alias="q"),
47 | mode: int = Query(..., alias="m", ge=-1, le=3),
48 | page_num: int = Query(..., alias="p"),
49 | ):
50 | # Special filters
51 | order = "play_count:desc"
52 | if query == "Top Rated":
53 | order = "avg_rating:desc"
54 | query = ""
55 | elif query == "Newest":
56 | order = "id:desc"
57 | query = ""
58 | elif query == "Newest":
59 | query = ""
60 |
61 | filters = []
62 |
63 | if ranked_status != 4:
64 | status = RankedStatus.from_direct(ranked_status)
65 | filters.append(f"status={status.value}")
66 |
67 | if mode != -1:
68 | filters.append(f"modes={mode}")
69 |
70 | index = app.state.services.meili.index("beatmaps")
71 | search_res = await index.search(
72 | query,
73 | offset=page_num * 100,
74 | limit=100,
75 | filter=filters,
76 | sort=[order],
77 | )
78 |
79 | # Response building
80 | res = [str(search_res.estimated_total_hits)]
81 |
82 | for beatmap_set in search_res.hits:
83 | diff_str = ",".join(
84 | DIRECT_MAP_INFO_FMTSTR_MEILI.format(
85 | difficulty=child["difficulty"],
86 | od=child["od"],
87 | ar=child["ar"],
88 | mode=child["mode"],
89 | )
90 | for child in beatmap_set["children"]
91 | )
92 | res.append(
93 | DIRECT_SET_INFO_FMTSTR_MEILI.format(
94 | chimu_spell=beatmap_set["id"],
95 | Artist=beatmap_set["artist"].replace("|", ""),
96 | Title=beatmap_set["title"].replace("|", ""),
97 | Creator=beatmap_set["creator"],
98 | RankedStatus=RankedStatus(beatmap_set["status"]).osu_direct,
99 | Rating=beatmap_set["avg_rating"],
100 | diffs=diff_str,
101 | ),
102 | )
103 |
104 | return "\n".join(res).encode()
105 |
106 |
107 | async def osu_direct_cheesegull(
108 | user: User = Depends(authenticate_user(Query, "u", "h")),
109 | ranked_status: int = Query(..., alias="r", ge=0, le=8),
110 | query: str = Query(..., alias="q"),
111 | mode: int = Query(..., alias="m", ge=-1, le=3),
112 | page_num: int = Query(..., alias="p"),
113 | ):
114 | search_url = f"{settings.DIRECT_URL}/search"
115 |
116 | params: dict[str, Any] = {"amount": 101, "offset": page_num}
117 |
118 | if unquote_plus(query) not in ("Newest", "Top Rated", "Most Played"):
119 | params["query"] = query
120 |
121 | if mode != -1:
122 | params["mode"] = mode
123 |
124 | if ranked_status != 4:
125 | params["status"] = RankedStatus.from_direct(ranked_status).osu_api
126 |
127 | async with app.state.services.http.get(search_url, params=params) as response:
128 | if response.status != status.HTTP_200_OK:
129 | return b"-1\nFailed to retrieve data from the beatmap mirror."
130 |
131 | result = await response.json()
132 |
133 | result_len = len(result)
134 | ret = [f"{'101' if result_len == 100 else result_len}"]
135 |
136 | for bmap in result:
137 | if not bmap["ChildrenBeatmaps"]:
138 | continue
139 |
140 | diff_sorted_maps = sorted(
141 | bmap["ChildrenBeatmaps"],
142 | key=lambda x: x["DifficultyRating"],
143 | )
144 |
145 | diffs_str = ",".join(
146 | DIRECT_MAP_INFO_FMTSTR.format(**bm) for bm in diff_sorted_maps
147 | )
148 |
149 | # Apparently beatmap names can contain the | character. Remove it as it
150 | # messes up the format. TODO: Look if I can urlencode it instead.
151 | bmap["Title"] = bmap["Title"].replace("|", "")
152 | bmap["Artist"] = bmap["Artist"].replace("|", "")
153 | bmap["Creator"] = bmap["Creator"].replace("|", "")
154 |
155 | ret.append(
156 | DIRECT_SET_INFO_FMTSTR.format(
157 | **bmap,
158 | diffs=diffs_str,
159 | chimu_spell=bmap[CHIMU_SPELL],
160 | ),
161 | )
162 |
163 | return "\n".join(ret).encode()
164 |
165 |
166 | async def beatmap_card(
167 | user: User = Depends(authenticate_user(Query, "u", "h")),
168 | map_set_id: Optional[int] = Query(None, alias="s"),
169 | map_id: Optional[int] = Query(None, alias="b"),
170 | ):
171 | if not map_set_id:
172 | bmap = await app.usecases.beatmap.fetch_by_id(map_id)
173 | if not bmap:
174 | return
175 |
176 | map_set_id = bmap.set_id
177 |
178 | url = f"{settings.DIRECT_URL}/{'set' if USING_CHIMU else 's'}/{map_set_id}"
179 | async with app.state.services.http.get(url) as response:
180 | if not response or response.status != 200:
181 | return
182 |
183 | result = await response.json()
184 |
185 | json_data = result["data"] if USING_CHIMU else result
186 |
187 | return (
188 | "{chimu_spell}.osz|{Artist}|{Title}|{Creator}|"
189 | "{RankedStatus}|10.0|{LastUpdate}|{chimu_spell}|"
190 | "0|0|0|0|0".format(**json_data, chimu_spell=json_data[CHIMU_SPELL])
191 | ).encode()
192 |
193 |
194 | async def download_map(set_id: str = Path(...)):
195 | domain = settings.DIRECT_URL.split("/")[2]
196 |
197 | return RedirectResponse(
198 | url=f"https://{domain}/d/{set_id}",
199 | status_code=status.HTTP_301_MOVED_PERMANENTLY,
200 | )
201 |
--------------------------------------------------------------------------------
/ussr/app/api/error.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import time
5 | from typing import Optional
6 |
7 | import app.state
8 | import app.usecases
9 | import logger
10 | from app.models.user import User
11 | from fastapi import Form
12 |
13 |
14 | async def log_user_error(
15 | user: User,
16 | stacktrace: Optional[str],
17 | config: str,
18 | version: str,
19 | exe_hash: str,
20 | ):
21 | await app.state.services.database.execute(
22 | "INSERT INTO client_err_logs (user_id, timestamp, traceback, config, osu_ver, osu_hash) "
23 | "VALUES (:id, :timestamp, :trace, :cfg, :ver, :hash)",
24 | {
25 | "id": user.id,
26 | "timestamp": int(time.time()),
27 | "trace": stacktrace,
28 | "cfg": config,
29 | "ver": version,
30 | "hash": exe_hash,
31 | },
32 | )
33 |
34 |
35 | async def error(
36 | user_id: int = Form(..., alias="i"),
37 | stacktrace: Optional[str] = Form(None),
38 | config: str = Form(...),
39 | version: str = Form(...),
40 | exehash: str = Form(...),
41 | ):
42 | user = await app.usecases.user.fetch_db_id(user_id)
43 | if not user:
44 | return
45 |
46 | logger.info(f"{user} has experienced a client exception!")
47 | asyncio.create_task(log_user_error(user, stacktrace, config, version, exehash))
48 |
--------------------------------------------------------------------------------
/ussr/app/api/lastfm.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import time
5 |
6 | import app.state
7 | import app.usecases
8 | import logger
9 | from app.constants.lastfm import LastFMFlags
10 | from app.models.user import User
11 | from app.usecases.user import authenticate_user
12 | from fastapi import Depends
13 | from fastapi import Query
14 |
15 | # Down to earth explanations for each flag to be understandable to
16 | # the average admin.
17 | _flag_expl = {
18 | LastFMFlags.TIMEWARP: "[LIKELY] Timewarp flag triggered (audio is desynced from expected position)! "
19 | "May be caused by lag on the user's end.",
20 | LastFMFlags.INCORRECT_MOD_VALUE: "[MIXED] The score's mod value didn't match enabled mods (possible "
21 | "sign of a mod remover such as Hidden remover).",
22 | LastFMFlags.MULTIPLE_OSU_CLIENTS: "[MIXED] The user had multiple instances of osu! open.",
23 | LastFMFlags.CHECKSUM_FAIL: "[LIKELY] The score related memory has been edited in a weird manner.",
24 | LastFMFlags.FLASHLIGHT_CHECKSUM_FAIL: "[UNKNOWN] FL Checksum fail occurence is unknown.",
25 | LastFMFlags.FLASHLIGHT_REMOVER: "[CERTAIN] User is using a flashlight remover.",
26 | LastFMFlags.WINDOW_OVERLAY: "[LIKELY] A transparent window is overlaying the osu! client.",
27 | LastFMFlags.FAST_PRESS: "[LIKELY] User is consistently hitting notes with a low latency in mania.",
28 | LastFMFlags.MOUSE_DISCREPENCY: "[LIKELY] Something is altering the mouse position the mouse info "
29 | "on the position received by the game.",
30 | LastFMFlags.KB_DISCREPENCY: "[LIKELY] Something is altering the keyboard presses received by the game.",
31 | LastFMFlags.LF_FLAG_PRESENT: "[UNKNOW] LF flag is present. Occurence of this is unknown.",
32 | LastFMFlags.OSU_DEBUGGED: "[LIKELY] osu! is being debugged. Console attached to the process "
33 | "has been detected.",
34 | LastFMFlags.EXTRA_THREADS: "[LIKELY] A foreign thread has been detected attached to osu! This is a method "
35 | "usually used by cheats to run.",
36 | LastFMFlags.HQOSU_ASSEMBLY: "[CERTAIN] The HQOsu assembly has been detected.",
37 | LastFMFlags.HQOSU_FILE: "[MIXED] The presence of HQOsu files has been detected.",
38 | LastFMFlags.HQ_RELIFE: "[MIXED] HQOsu Relife traces found in registry. This means that the user has used the "
39 | "multiaccounting tool in the past, but may not be using it now.",
40 | LastFMFlags.AQN_SQL2LIB: "[CERTAIN] Ancient AQN library SQL2Lib detected.",
41 | LastFMFlags.AQN_LIBEAY32: "[CERTAIN] Use of ancient AQN version detected through library libeay32.dll",
42 | LastFMFlags.AQN_MENU_SOUND: "[CERTAIN] Use of ancient AQN version detected through menu sound.",
43 | }
44 |
45 | # Same as above but with ints to lookup.
46 | _flag_ints = {flag.value: expl for flag, expl in _flag_expl.items()}
47 |
48 |
49 | def get_flag_explanation(flag: LastFMFlags) -> list[str]:
50 | """Returns a list of strings explaining the meaning of all triggered
51 | flags."""
52 |
53 | flag_int = flag.value
54 |
55 | # Iterate over every single bit of `flag_int` and look up the meaning.
56 | res = []
57 | cur_bit = 0b1
58 | while cur_bit < flag_int:
59 | if flag_int & cur_bit:
60 | text_append = _flag_ints.get(cur_bit)
61 |
62 | # The flag doesnt have an explanation available, add a repr.
63 | if not text_append:
64 | text_append = f"Undocumented Flag: {LastFMFlags(cur_bit)!r}"
65 |
66 | res.append(text_append)
67 |
68 | cur_bit <<= 1
69 |
70 | return res
71 |
72 |
73 | async def log_lastfm_flag(user_id: int, flag: int, flag_text: str) -> None:
74 | await app.state.services.database.execute(
75 | "INSERT INTO lastfm_flags (user_id, timestamp, flag_enum, flag_text) VALUES "
76 | "(:id, :timestamp, :flag, :flag_str)",
77 | {
78 | "id": user_id,
79 | "timestamp": int(time.time()),
80 | "flag": flag,
81 | "flag_str": flag_text,
82 | },
83 | )
84 |
85 |
86 | async def lastfm(
87 | user: User = Depends(authenticate_user(Query, "u", "h")),
88 | map_id_or_anticheat_flag: str = Query(..., alias="b"),
89 | ):
90 | if not map_id_or_anticheat_flag.startswith("a"):
91 | return b"-3"
92 |
93 | flags = LastFMFlags(int(map_id_or_anticheat_flag.removeprefix("a")))
94 | expl_str = "\n".join(get_flag_explanation(flags))
95 |
96 | asyncio.create_task(log_lastfm_flag(user.id, flags.value, expl_str))
97 |
98 | logger.info(f"{user} has been flagged with {flags!r}!\n{expl_str}")
99 | return b"-3"
100 |
--------------------------------------------------------------------------------
/ussr/app/api/leaderboards.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import time
4 | from typing import Optional
5 | from urllib.parse import unquote
6 |
7 | import app.state
8 | import app.usecases
9 | import app.utils
10 | import logger
11 | from app.constants.leaderboard_type import LeaderboardType
12 | from app.constants.mode import Mode
13 | from app.constants.mods import Mods
14 | from app.constants.privileges import Privileges
15 | from app.models.score import Score
16 | from app.models.user import User
17 | from app.usecases.user import authenticate_user
18 | from fastapi import Depends
19 | from fastapi import Query
20 |
21 | CUR_LB_VER = 4
22 |
23 |
24 | async def get_leaderboard(
25 | user: User = Depends(authenticate_user(Query, "us", "ha")),
26 | requesting_from_editor_song_select: bool = Query(..., alias="s"),
27 | leaderboard_version: int = Query(..., alias="vv"),
28 | leaderboard_type_arg: int = Query(..., alias="v", ge=0, le=4),
29 | map_md5: str = Query(..., alias="c", min_length=32, max_length=32),
30 | map_filename: str = Query(..., alias="f"),
31 | mode_arg: int = Query(..., alias="m", ge=0, le=3),
32 | map_set_id: int = Query(..., alias="i", ge=-1, le=2_147_483_647),
33 | mods_arg: int = Query(..., alias="mods", ge=0, le=2_147_483_647),
34 | map_package_hash: str = Query(..., alias="h"), # TODO: whaat to do?
35 | aqn_files_found: bool = Query(..., alias="a"), # TODO: whaat to do?
36 | leaderboard_pp: Optional[bool] = Query(None, alias="pp"),
37 | ):
38 | start = time.perf_counter_ns()
39 |
40 | if map_md5 in app.state.cache.UNSUBMITTED:
41 | return b"-1|false"
42 | elif map_md5 in app.state.cache.REQUIRES_UPDATE:
43 | return b"1|false"
44 |
45 | mode = Mode.from_lb(mode_arg, mods_arg)
46 | mods = Mods(mods_arg)
47 |
48 | if leaderboard_version != CUR_LB_VER:
49 | await app.usecases.user.restrict_user(
50 | user,
51 | "Bypassing client version protections.",
52 | "The leaderboard version for the current known latest osu! client is "
53 | f"{CUR_LB_VER}, but the client sent {leaderboard_version}. (leaderboard gate)",
54 | )
55 |
56 | has_set_id = map_set_id > 0
57 | if has_set_id:
58 | await app.usecases.beatmap.fetch_by_set_id(map_set_id)
59 |
60 | beatmap = await app.usecases.beatmap.fetch_by_md5(map_md5)
61 | if beatmap and beatmap.deserves_update:
62 | beatmap = await app.usecases.beatmap.update_beatmap(beatmap)
63 |
64 | if not beatmap:
65 | if has_set_id:
66 | app.state.cache.UNSUBMITTED.add(map_md5)
67 | app.usecases.beatmap.SET_CACHE.pop(map_set_id, None)
68 |
69 | return b"-1|false"
70 |
71 | filename = unquote(map_filename)
72 | if has_set_id:
73 | for bmap in app.usecases.beatmap.SET_CACHE[map_set_id]:
74 | if bmap.filename == filename:
75 | map_exists = True
76 | break
77 | else:
78 | map_exists = False
79 | else:
80 | map_exists = await app.state.services.database.fetch_val(
81 | "SELECT 1 FROM beatmaps WHERE file_name = :filename",
82 | {"filename": filename},
83 | )
84 |
85 | if map_exists:
86 | app.state.cache.REQUIRES_UPDATE.add(map_md5)
87 | return b"1|false"
88 | else:
89 | if map_md5 not in app.state.cache.UNSUBMITTED:
90 | app.state.cache.UNSUBMITTED.add(map_md5)
91 |
92 | return b"-1|false"
93 |
94 | if not beatmap.has_leaderboard and not user.privileges & Privileges.USER_DONOR:
95 | return f"{beatmap.status.value}|false".encode()
96 |
97 | response_lines: list[str] = []
98 |
99 | if requesting_from_editor_song_select:
100 | response_lines.append(beatmap.osu_string(score_count=0, rating=beatmap.rating))
101 | else:
102 | # real leaderboard, let's get some scores!
103 | leaderboard = await app.usecases.leaderboards.fetch(beatmap, mode)
104 |
105 | response_lines.append(
106 | beatmap.osu_string(
107 | score_count=len(leaderboard),
108 | rating=beatmap.rating,
109 | ),
110 | )
111 |
112 | personal_best = await leaderboard.find_user_score(user.id)
113 | if personal_best:
114 | response_lines.append(
115 | personal_best["score"].osu_string(
116 | user.name, personal_best["rank"], show_pp=leaderboard_pp,
117 | ),
118 | )
119 | else:
120 | response_lines.append("")
121 |
122 | leaderboard_type = LeaderboardType(leaderboard_type_arg)
123 |
124 | scores: list[Score] = []
125 | for score in await leaderboard.get_unrestricted_scores(user.id):
126 | if len(scores) >= 100: # max 100 scores on lb
127 | break
128 |
129 | if leaderboard_type == LeaderboardType.MODS and score.mods != mods:
130 | continue
131 |
132 | score_country = await app.usecases.countries.get_country(score.user_id)
133 | if (
134 | leaderboard_type == LeaderboardType.COUNTRY
135 | and score_country != user.country
136 | ):
137 | continue
138 |
139 | if (
140 | leaderboard_type == LeaderboardType.FRIENDS
141 | and score.user_id not in user.friends
142 | ):
143 | continue
144 |
145 | scores.append(score)
146 |
147 | # this double loop probably seems pointless
148 | # however it's necessary to be able to limit score count and get accurate ranking at the same time
149 | for idx, score in enumerate(scores):
150 | if score.user_id == user.id:
151 | displayed_name = user.name
152 | else:
153 | score_clan = await app.usecases.clans.fetch(score.user_id)
154 | score_username = await app.usecases.usernames.fetch(
155 | score.user_id,
156 | )
157 |
158 | if score_clan:
159 | displayed_name = f"[{score_clan}] {score_username}"
160 | else:
161 | displayed_name = score_username
162 |
163 | response_lines.append(
164 | score.osu_string(displayed_name, rank=idx + 1, show_pp=leaderboard_pp),
165 | )
166 |
167 | end = time.perf_counter_ns()
168 | formatted_time = app.utils.format_time(end - start)
169 | logger.info(
170 | f"Served {user} leaderboard for {beatmap.song_name} in {formatted_time}",
171 | )
172 |
173 | return "\n".join(response_lines).encode()
174 |
--------------------------------------------------------------------------------
/ussr/app/api/pp.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Optional
4 |
5 | import app.usecases
6 | import logger
7 | import settings
8 | from app.constants.mode import Mode
9 | from app.constants.mods import Mods
10 | from app.objects.path import Path
11 | from app.usecases.performance import PerformanceScore
12 | from fastapi import Query
13 | from fastapi import status
14 | from fastapi.responses import ORJSONResponse
15 |
16 | COMMON_PP_PERCENTAGES = (
17 | 100.0,
18 | 99.0,
19 | 98.0,
20 | 95.0,
21 | )
22 |
23 | MAPS_PATH = Path(settings.DATA_BEATMAP_DIRECTORY)
24 |
25 |
26 | async def calculate_pp(
27 | beatmap_id: int = Query(..., alias="b"),
28 | mods_arg: int = Query(0, alias="m"),
29 | mode_arg: int = Query(0, alias="g", ge=0, le=3),
30 | acc: Optional[float] = Query(None, alias="a"),
31 | combo: int = Query(0, alias="max_combo"),
32 | ):
33 | mods = Mods(mods_arg)
34 | mode = Mode.from_lb(mode_arg, mods_arg)
35 |
36 | use_common_pp_percentages = acc is None
37 |
38 | beatmap = await app.usecases.beatmap.id_from_api(beatmap_id, should_save=False)
39 | if not beatmap:
40 | return ORJSONResponse(
41 | content={"message": "Invalid/non-existent beatmap id."},
42 | status_code=status.HTTP_400_BAD_REQUEST,
43 | )
44 |
45 | combo = combo if combo else beatmap.max_combo
46 |
47 | file_path = MAPS_PATH / f"{beatmap.id}.osu"
48 | if not await app.usecases.performance.check_local_file(
49 | file_path,
50 | beatmap.id,
51 | beatmap.md5,
52 | ):
53 | return ORJSONResponse(
54 | content={"message": "Invalid/non-existent beatmap id."},
55 | status_code=status.HTTP_400_BAD_REQUEST,
56 | )
57 |
58 | star_rating = pp_result = 0.0
59 | if use_common_pp_percentages:
60 | pp_requests: list[PerformanceScore] = [
61 | {
62 | "beatmap_id": beatmap.id,
63 | "mode": mode.as_vn,
64 | "mods": mods,
65 | "max_combo": combo,
66 | "accuracy": accuracy,
67 | "miss_count": 0,
68 | }
69 | for accuracy in COMMON_PP_PERCENTAGES
70 | ]
71 |
72 | pp_result = [
73 | pp
74 | for pp, _ in await app.usecases.performance.calculate_performances(
75 | pp_requests,
76 | )
77 | ]
78 | else:
79 | pp_result, star_rating = await app.usecases.performance.calculate_performance(
80 | beatmap.id,
81 | mode,
82 | mods,
83 | combo,
84 | acc,
85 | 0, # miss count
86 | )
87 |
88 | logger.info(f"Handled PP calculation API request for {beatmap.song_name}!")
89 |
90 | return ORJSONResponse(
91 | {
92 | "status": 200,
93 | "message": "ok",
94 | "song_name": beatmap.song_name,
95 | "pp": pp_result,
96 | "length": beatmap.hit_length,
97 | "stars": star_rating, # TODO is this wrong for common values?
98 | "ar": beatmap.ar,
99 | "bpm": beatmap.bpm,
100 | },
101 | )
102 |
--------------------------------------------------------------------------------
/ussr/app/api/rate.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Optional
4 |
5 | import app.state
6 | import app.usecases
7 | import logger
8 | from app.models.beatmap import Beatmap
9 | from app.models.user import User
10 | from app.usecases.user import authenticate_user
11 | from fastapi import Depends
12 | from fastapi import Query
13 |
14 |
15 | async def check_user_rated(user: User, beatmap: Beatmap):
16 | return await app.state.services.database.fetch_val(
17 | "SELECT 1 FROM beatmaps_rating WHERE user_id = :uid AND beatmap_md5 = :md5",
18 | {"uid": user.id, "md5": beatmap.md5},
19 | )
20 |
21 |
22 | async def add_rating(user_id: int, map_md5: str, rating: int) -> float:
23 | await app.state.services.database.execute(
24 | "INSERT INTO beatmaps_rating (user_id, rating, beatmap_md5) VALUES (:id, :rating, :md5)",
25 | {"id": user_id, "rating": rating, "md5": map_md5},
26 | )
27 |
28 | new_rating = await app.state.services.database.fetch_val(
29 | "SELECT AVG(rating) FROM beatmaps_rating WHERE beatmap_md5 = :md5",
30 | {"md5": map_md5},
31 | )
32 |
33 | await app.state.services.database.execute(
34 | "UPDATE beatmaps SET rating = :rating WHERE beatmap_md5 = :md5",
35 | {"rating": new_rating, "md5": map_md5},
36 | )
37 |
38 | return new_rating
39 |
40 |
41 | async def rate_map(
42 | user: User = Depends(authenticate_user(Query, "u", "p")),
43 | map_md5: str = Query(..., alias="c"),
44 | rating: Optional[int] = Query(None, alias="v", ge=1, le=10),
45 | ):
46 | beatmap = await app.usecases.beatmap.fetch_by_md5(map_md5)
47 | if not beatmap:
48 | return b"no exist"
49 |
50 | if not beatmap.has_leaderboard:
51 | return b"not ranked"
52 |
53 | if await check_user_rated(user, beatmap):
54 | return f"alreadyvoted\n{beatmap.rating}".encode()
55 |
56 | if rating:
57 | new_rating = await add_rating(user.id, map_md5, rating)
58 | beatmap.rating = new_rating
59 |
60 | logger.info(
61 | f"{user} has rated {beatmap.song_name} with rating {rating} (new average: {new_rating:.2f})",
62 | )
63 | return f"alreadyvoting\n{new_rating:.2f}".encode()
64 | else:
65 | return b"ok"
66 |
--------------------------------------------------------------------------------
/ussr/app/api/replays.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 |
5 | import app.state
6 | import app.usecases
7 | import app.utils
8 | import logger
9 | from app.constants.mode import Mode
10 | from app.models.score import Score
11 | from fastapi import Path
12 | from fastapi import Query
13 | from fastapi import Response
14 | from fastapi.responses import FileResponse
15 |
16 |
17 | async def get_replay(
18 | score_id: int = Query(..., alias="c"),
19 | ):
20 | mode_rep = Mode.from_offset(score_id)
21 |
22 | db_score = await app.state.services.database.fetch_one(
23 | f"SELECT mods, play_mode, userid FROM {mode_rep.scores_table} WHERE id = :id",
24 | {"id": score_id},
25 | )
26 |
27 | if not db_score:
28 | logger.error(f"Requested non-existent replay ID {score_id}")
29 | return b"error: no"
30 |
31 | mode = Mode.from_lb(db_score["play_mode"], db_score["mods"])
32 | replay_bytes = await app.state.services.replay_storage.load(
33 | f"replay_{score_id}.osr",
34 | )
35 |
36 | if not replay_bytes:
37 | logger.error(f"Requested replay ID {score_id}, but no file could be found.")
38 | return b"error: no"
39 |
40 | asyncio.create_task(
41 | app.usecases.user.increment_replays_watched(db_score["userid"], mode),
42 | )
43 |
44 | logger.info(f"Served replay ID {score_id}")
45 | return Response(content=replay_bytes)
46 |
47 |
48 | def _make_not_found_resp(text: str) -> Response:
49 | return Response(
50 | content=text,
51 | media_type="text/plain",
52 | status_code=404,
53 | )
54 |
55 |
56 | async def get_full_replay(
57 | score_id: int = Path(...),
58 | ):
59 | mode_rep = Mode.from_offset(score_id)
60 |
61 | db_score = await app.state.services.database.fetch_one(
62 | f"SELECT * FROM {mode_rep.scores_table} WHERE id = :id",
63 | {"id": score_id},
64 | )
65 | if not db_score:
66 | return _make_not_found_resp("Score not found!")
67 |
68 | score = Score.from_dict(db_score)
69 |
70 | replay = await app.usecases.score.build_full_replay(score)
71 | if not replay:
72 | return _make_not_found_resp("Replay not found!")
73 |
74 | beatmap = await app.usecases.beatmap.fetch_by_md5(score.map_md5)
75 | if not beatmap:
76 | return _make_not_found_resp("Beatmap not found!")
77 |
78 | username = await app.usecases.usernames.fetch(score.user_id)
79 | if not username:
80 | return _make_not_found_resp("User not found!")
81 |
82 | filename = f"{username} - {beatmap.song_name} ({score_id}).osr"
83 |
84 | logger.info(f"Serving compiled replay ID {score_id}")
85 | return Response(
86 | content=bytes(replay.buffer),
87 | media_type="application/octet-stream",
88 | headers={"Content-Disposition": f"attachment; filename={filename}"},
89 | )
90 |
--------------------------------------------------------------------------------
/ussr/app/api/score_sub.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import time
5 | from base64 import b64decode
6 | from copy import copy
7 | from datetime import datetime
8 | from typing import NamedTuple
9 | from typing import Optional
10 | from typing import TypeVar
11 | from typing import Union
12 |
13 | import app.state
14 | import app.usecases.whitelist
15 | import app.utils
16 | import logger
17 | import settings
18 | from app.constants.mode import Mode
19 | from app.constants.mods import Mods
20 | from app.constants.privileges import Privileges
21 | from app.constants.ranked_status import RankedStatus
22 | from app.constants.score_status import ScoreStatus
23 | from app.models.score import Score
24 | from app.objects.path import Path
25 | from app.usecases.user import restrict_user
26 | from fastapi import File
27 | from fastapi import Form
28 | from fastapi import Header
29 | from fastapi import Request
30 | from fastapi.datastructures import FormData
31 | from py3rijndael import Pkcs7Padding
32 | from py3rijndael import RijndaelCbc
33 | from starlette.datastructures import UploadFile as StarletteUploadFile
34 |
35 |
36 | class ScoreData(NamedTuple):
37 | score_data_b64: bytes
38 | replay_file: StarletteUploadFile
39 |
40 |
41 | async def parse_form(score_data: FormData) -> Optional[ScoreData]:
42 | try:
43 | score_parts = score_data.getlist("score")
44 | assert len(score_parts) == 2, "Invalid score data"
45 |
46 | score_data_b64 = score_data.getlist("score")[0]
47 | assert isinstance(score_data_b64, str), "Invalid score data"
48 | replay_file = score_data.getlist("score")[1]
49 | assert isinstance(replay_file, StarletteUploadFile), "Invalid replay data"
50 | except AssertionError as exc:
51 | logger.warning(f"Failed to validate score multipart data: ({exc.args[0]})")
52 | return None
53 | else:
54 | return ScoreData(
55 | score_data_b64.encode(),
56 | replay_file,
57 | )
58 |
59 |
60 | class ScoreClientData(NamedTuple):
61 | score_data: list[str]
62 | client_hash_decoded: str
63 |
64 |
65 | def decrypt_score_data(
66 | score_data_b64: bytes,
67 | client_hash_b64: bytes,
68 | iv_b64: bytes,
69 | osu_version: str,
70 | ) -> tuple[list[str], str]:
71 | aes = RijndaelCbc(
72 | key=f"osu!-scoreburgr---------{osu_version}".encode(),
73 | iv=b64decode(iv_b64),
74 | padding=Pkcs7Padding(32),
75 | block_size=32,
76 | )
77 |
78 | score_data = aes.decrypt(b64decode(score_data_b64)).decode().split(":")
79 | client_hash_decoded = aes.decrypt(b64decode(client_hash_b64)).decode()
80 |
81 | return score_data, client_hash_decoded
82 |
83 |
84 | MAPS_PATH = Path(settings.DATA_BEATMAP_DIRECTORY)
85 |
86 |
87 | T = TypeVar("T", bound=Union[int, float])
88 |
89 |
90 | def chart_entry(name: str, before: Optional[T], after: T) -> str:
91 | return f"{name}Before:{before or ''}|{name}After:{after}"
92 |
93 |
94 | async def submit_score(
95 | request: Request,
96 | token: Optional[str] = Header(None),
97 | user_agent: str = Header(...),
98 | exited_out: bool = Form(..., alias="x"),
99 | fail_time: int = Form(..., alias="ft"),
100 | visual_settings_b64: bytes = Form(..., alias="fs"),
101 | updated_beatmap_hash: str = Form(..., alias="bmk"),
102 | storyboard_md5: Optional[str] = Form(None, alias="sbk"),
103 | iv_b64: bytes = Form(..., alias="iv"),
104 | unique_ids: str = Form(..., alias="c1"),
105 | score_time: int = Form(..., alias="st"),
106 | password_md5: str = Form(..., alias="pass"),
107 | osu_version: str = Form(..., alias="osuver"),
108 | client_hash_b64: bytes = Form(..., alias="s"),
109 | fl_cheat_screenshot: Optional[bytes] = File(None, alias="i"),
110 | ):
111 | start = time.perf_counter_ns()
112 |
113 | score_params = await parse_form(await request.form())
114 | if not score_params:
115 | return
116 |
117 | score_data_b64, replay_file = score_params
118 | score_data, _ = decrypt_score_data(
119 | score_data_b64,
120 | client_hash_b64,
121 | iv_b64,
122 | osu_version,
123 | )
124 |
125 | username = score_data[1].rstrip()
126 | if not (user := await app.usecases.user.auth_user(username, password_md5)):
127 | return # empty resp tells osu to retry
128 |
129 | beatmap_md5 = score_data[0]
130 | if not (beatmap := await app.usecases.beatmap.fetch_by_md5(beatmap_md5)):
131 | return b"error: beatmap"
132 |
133 | # Prohibit bot users from submitting scores.
134 | if user.privileges & Privileges.BOT_USER:
135 | return b"error: no"
136 |
137 | score = Score.from_submission(score_data[2:], beatmap_md5, user)
138 | leaderboard = await app.usecases.leaderboards.fetch(beatmap, score.mode)
139 |
140 | score.acc = app.usecases.score.calculate_accuracy(score)
141 | score.quit = exited_out
142 |
143 | await app.usecases.user.update_latest_activity(user.id)
144 |
145 | if not score.mods.rankable:
146 | return b"error: no"
147 |
148 | # This can be unreliable with devserver.
149 | # if not token and not config.custom_clients:
150 | # await app.usecases.user.restrict_user(
151 | # user,
152 | # "Tampering with osu!auth.",
153 | # "The client has not sent an anticheat token to the server, meaning "
154 | # "that they either have disabled the anticheat, or are using a custom/older "
155 | # "client. (score submit gate)",
156 | # )
157 |
158 | if user_agent != "osu!":
159 | await app.usecases.user.restrict_user(
160 | user,
161 | "Score submitter or other external client behaviour emulator",
162 | "The expected user-agent header for an osu! client is 'osu!', while "
163 | f"the client sent '{user_agent}'. (score submit gate)",
164 | )
165 |
166 | if score.mods.conflict:
167 | await app.usecases.user.restrict_user(
168 | user,
169 | "Illegal score mod combination.",
170 | "The user attempted to submit a score with the mod combination "
171 | f"+{score.mods!r}, which contains mutually exclusive/illegal mods. "
172 | "(score submit gate)",
173 | )
174 |
175 | osu_file_path = MAPS_PATH / f"{beatmap.id}.osu"
176 | if await app.usecases.performance.check_local_file(
177 | osu_file_path,
178 | beatmap.id,
179 | beatmap.md5,
180 | ):
181 | await app.usecases.performance.calculate_score(score, beatmap.id)
182 |
183 | if score.passed:
184 | old_best = await leaderboard.find_user_score(user.id)
185 |
186 | if old_best:
187 | score.old_best = old_best["score"]
188 |
189 | if score.old_best:
190 | score.old_best.rank = old_best["rank"]
191 |
192 | app.usecases.score.calculate_status(score)
193 | elif score.quit:
194 | score.status = ScoreStatus.QUIT
195 | else:
196 | score.status = ScoreStatus.FAILED
197 |
198 | score.time_elapsed = score_time if score.passed else fail_time
199 |
200 | if await app.state.services.database.fetch_val(
201 | (
202 | f"SELECT 1 FROM {score.mode.scores_table} WHERE userid = :id AND beatmap_md5 = :md5 AND score = :score "
203 | "AND play_mode = :mode AND mods = :mods"
204 | ),
205 | {
206 | "id": user.id,
207 | "md5": beatmap.md5,
208 | "score": score.score,
209 | "mode": score.mode.as_vn,
210 | "mods": score.mods.value,
211 | },
212 | ):
213 | # duplicate score detected
214 | return b"error: no"
215 |
216 | # update most played
217 | await app.state.services.database.execute(
218 | """\
219 | INSERT INTO users_beatmap_playcount (user_id, beatmap_id, game_mode, playcount)
220 | VALUES (:user_id, :beatmap_id, :game_mode, 1)
221 | ON DUPLICATE KEY UPDATE playcount = playcount + 1
222 | """,
223 | {
224 | "user_id": user.id,
225 | "beatmap_id": beatmap.id,
226 | "game_mode": score.mode.as_vn,
227 | },
228 | )
229 |
230 | if (
231 | beatmap.gives_pp
232 | and score.passed
233 | and score.pp
234 | > await app.usecases.pp_cap.get_pp_cap(
235 | score.mode,
236 | score.mods & Mods.FLASHLIGHT != 0,
237 | )
238 | ):
239 | # Separated from the previous clause to only call the pp cap function
240 | # when necessary.
241 | if not await app.usecases.verified.get_verified(
242 | user.id,
243 | ) and not await app.usecases.whitelist.is_whitelisted(user.id):
244 | await restrict_user(
245 | user,
246 | f"Surpassing PP cap as unverified!",
247 | "The user attempted to submit a score with PP higher than the "
248 | f"PP cap. {beatmap.song_name} +{score.mods!r} ({score.pp:.2f}pp)"
249 | f" ID: {score.id} (score submit gate)",
250 | )
251 |
252 | if score.status == ScoreStatus.BEST:
253 | await app.state.services.database.execute(
254 | f"UPDATE {score.mode.scores_table} SET completed = 2 WHERE completed = 3 AND beatmap_md5 = :md5 AND userid = :id AND play_mode = :mode",
255 | {"md5": beatmap.md5, "id": user.id, "mode": score.mode.as_vn},
256 | )
257 |
258 | score.id = await app.state.services.database.execute(
259 | (
260 | f"INSERT INTO {score.mode.scores_table} (beatmap_md5, userid, score, max_combo, full_combo, mods, 300_count, 100_count, 50_count, katus_count, "
261 | "gekis_count, misses_count, time, play_mode, completed, accuracy, pp, playtime) VALUES "
262 | "(:beatmap_md5, :userid, :score, :max_combo, :full_combo, :mods, :300_count, :100_count, :50_count, :katus_count, "
263 | ":gekis_count, :misses_count, :time, :play_mode, :completed, :accuracy, :pp, :playtime)"
264 | ),
265 | score.db_dict,
266 | )
267 |
268 | if score.passed:
269 | replay_data = await replay_file.read()
270 |
271 | if len(replay_data) < 24:
272 | await restrict_user(
273 | user,
274 | "Score submit without replay.",
275 | "The user attempted to submit a completed score without a replay "
276 | "attached. This should NEVER happen and means they are likely using "
277 | "a replay editor. (score submit gate)",
278 | )
279 | else:
280 | await app.state.services.replay_storage.save(
281 | f"replay_{score.id}.osr",
282 | replay_data,
283 | )
284 |
285 | asyncio.create_task(app.usecases.beatmap.increment_playcount(beatmap))
286 | asyncio.create_task(app.usecases.user.increment_playtime(score, beatmap))
287 |
288 | stats = await app.usecases.stats.fetch(user.id, score.mode)
289 | assert stats is not None
290 |
291 | old_stats = copy(stats)
292 |
293 | stats.playcount += 1
294 | stats.total_score += score.score
295 | stats.total_hits += score.n300 + score.n100 + score.n50
296 |
297 | if score.passed and beatmap.has_leaderboard:
298 | if beatmap.status == RankedStatus.RANKED:
299 | stats.ranked_score += score.score
300 |
301 | if score.old_best and score.status == ScoreStatus.BEST:
302 | stats.ranked_score -= score.old_best.score
303 |
304 | if stats.max_combo < score.max_combo:
305 | stats.max_combo = score.max_combo
306 |
307 | if score.status == ScoreStatus.BEST and score.pp:
308 | await app.usecases.stats.full_recalc(stats)
309 |
310 | await leaderboard.add_score(score)
311 |
312 | await app.usecases.stats.save(stats)
313 |
314 | if (
315 | score.status == ScoreStatus.BEST
316 | and not user.privileges.is_restricted
317 | and old_stats.pp != stats.pp
318 | ):
319 | await app.usecases.stats.update_rank(stats)
320 |
321 | await app.usecases.stats.refresh_stats(user.id)
322 |
323 | if score.status == ScoreStatus.BEST:
324 | score.rank = await leaderboard.find_score_rank(score.user_id, score.id)
325 | elif score.status == ScoreStatus.SUBMITTED:
326 | score.rank = await leaderboard.whatif_placement(
327 | user.id,
328 | score.pp if score.mode > Mode.MANIA else score.score,
329 | )
330 |
331 | if (
332 | score.rank == 1
333 | and score.status == ScoreStatus.BEST
334 | and beatmap.has_leaderboard
335 | and not user.privileges.is_restricted
336 | ):
337 | asyncio.create_task(
338 | app.usecases.score.handle_first_place(
339 | score,
340 | beatmap,
341 | user,
342 | old_stats,
343 | stats,
344 | ),
345 | )
346 |
347 | asyncio.create_task(app.utils.notify_new_score(score.id))
348 |
349 | if score.old_best:
350 | beatmap_ranking_chart = (
351 | chart_entry("rank", score.old_best.rank, score.rank),
352 | chart_entry("rankedScore", score.old_best.score, score.score),
353 | chart_entry("totalScore", score.old_best.score, score.score),
354 | chart_entry("maxCombo", score.old_best.max_combo, score.max_combo),
355 | chart_entry("accuracy", round(score.old_best.acc, 2), round(score.acc, 2)),
356 | chart_entry("pp", round(score.old_best.pp, 2), round(score.pp, 2)),
357 | )
358 | else:
359 | beatmap_ranking_chart = (
360 | chart_entry("rank", None, score.rank),
361 | chart_entry("rankedScore", None, score.score),
362 | chart_entry("totalScore", None, score.score),
363 | chart_entry("maxCombo", None, score.max_combo),
364 | chart_entry("accuracy", None, round(score.acc, 2)),
365 | chart_entry("pp", None, round(score.pp, 2)),
366 | )
367 |
368 | overall_ranking_chart = (
369 | chart_entry("rank", old_stats.rank, stats.rank),
370 | chart_entry("rankedScore", old_stats.ranked_score, stats.ranked_score),
371 | chart_entry("totalScore", old_stats.total_score, stats.total_score),
372 | chart_entry("maxCombo", old_stats.max_combo, stats.max_combo),
373 | chart_entry("accuracy", round(old_stats.accuracy, 2), round(stats.accuracy, 2)),
374 | chart_entry("pp", old_stats.pp, stats.pp),
375 | )
376 |
377 | new_achievements: list[str] = []
378 | if score.passed and beatmap.has_leaderboard and not user.privileges.is_restricted:
379 | new_achievements = await app.usecases.score.unlock_achievements(score, stats)
380 |
381 | achievements_str = "/".join(new_achievements)
382 |
383 | submission_charts = [
384 | f"beatmapId:{beatmap.id}",
385 | f"beatmapSetId:{beatmap.set_id}",
386 | f"beatmapPlaycount:{beatmap.plays}",
387 | f"beatmapPasscount:{beatmap.passes}",
388 | f"approvedDate:{datetime.utcfromtimestamp(beatmap.last_update).strftime('%Y-%m-%d %H:%M:%S')}",
389 | "\n",
390 | "chartId:beatmap",
391 | f"chartUrl:{beatmap.set_url}",
392 | "chartName:Beatmap Ranking",
393 | *beatmap_ranking_chart,
394 | f"onlineScoreId:{score.id}",
395 | "\n",
396 | "chartId:overall",
397 | f"chartUrl:{user.url}",
398 | "chartName:Overall Ranking",
399 | *overall_ranking_chart,
400 | f"achievements-new:{achievements_str}",
401 | ]
402 |
403 | end = time.perf_counter_ns()
404 | formatted_time = app.utils.format_time(end - start)
405 | logger.info(
406 | f"{user} submitted a {score.pp:.2f}pp {score.mode!r} score on {beatmap.song_name} in {formatted_time}",
407 | )
408 |
409 | return "|".join(submission_charts).encode()
410 |
--------------------------------------------------------------------------------
/ussr/app/api/screenshots.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import random
4 | import string
5 |
6 | import app.state
7 | import app.utils
8 | import logger
9 | import settings
10 | from app.models.user import User
11 | from app.objects.path import Path
12 | from app.usecases.user import authenticate_user
13 | from fastapi import Depends
14 | from fastapi import File
15 | from fastapi import Form
16 | from fastapi import Header
17 | from fastapi import Query
18 | from fastapi import UploadFile
19 |
20 | SS_DELAY = 10 # Seconds per screenshot.
21 | FS_LIMIT = 500_000 # Rosu screenshots don't exceed this.
22 | ERR_RESP = "https://c.ussr.pl/" # We do a lil trolley.
23 | SS_NAME_LEN = 8
24 |
25 | SS_PATH = Path(settings.DATA_SCREENSHOT_DIRECTORY)
26 |
27 |
28 | async def is_ratelimit(ip: str) -> bool:
29 | """Checks if an IP is ratelimited from taking screenshots. If not,
30 | it establishes the limit in Redis."""
31 |
32 | rl_key = "ussr:ss_limit:" + ip
33 | if await app.state.services.redis.get(rl_key):
34 | return True
35 |
36 | await app.state.services.redis.setex(rl_key, SS_DELAY, 1)
37 | return False
38 |
39 |
40 | AV_CHARS = string.ascii_letters + string.digits
41 |
42 |
43 | def gen_rand_str(len: int) -> str:
44 | return "".join(random.choice(AV_CHARS) for _ in range(len))
45 |
46 |
47 | async def upload_screenshot(
48 | user: User = Depends(authenticate_user(Form, "u", "p")),
49 | screenshot_file: UploadFile = File(None, alias="ss"),
50 | user_agent: str = Header(...),
51 | x_forwarded_for: str = Header(...),
52 | ):
53 | if not await app.utils.check_online(user.id):
54 | logger.error(f"{user} tried to upload a screenshot while offline")
55 | return ERR_RESP
56 |
57 | if user_agent != "osu!":
58 | logger.error(f"{user} tried to upload a screenshot using a bot")
59 | return ERR_RESP
60 |
61 | if await is_ratelimit(x_forwarded_for):
62 | logger.error(f"{user} tried to upload a screenshot while ratelimited")
63 | return ERR_RESP
64 |
65 | content = await screenshot_file.read()
66 | if content.__sizeof__() > FS_LIMIT:
67 | return ERR_RESP
68 |
69 | if content[6:10] in (b"JFIF", b"Exif"):
70 | ext = "jpeg"
71 | elif content.startswith(b"\211PNG\r\n\032\n"):
72 | ext = "png"
73 | else:
74 | logger.error(f"{user} tried to upload unknown extension file")
75 | return ERR_RESP
76 |
77 | while True:
78 | file_name = f"{gen_rand_str(SS_NAME_LEN)}.{ext}"
79 |
80 | ss_path = SS_PATH / file_name
81 | if not ss_path.exists():
82 | break
83 |
84 | ss_path.write_bytes(content)
85 |
86 | logger.info(f"{user} has uploaded screenshot {file_name}")
87 | return file_name
88 |
--------------------------------------------------------------------------------
/ussr/app/api/seasonals.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 | from fastapi.responses import ORJSONResponse
5 |
6 |
7 | async def get_seasonals():
8 | db_seasonals = await app.state.services.database.fetch_all(
9 | "SELECT url FROM seasonal_bg WHERE enabled = 1",
10 | )
11 |
12 | return ORJSONResponse([seasonal["url"] for seasonal in db_seasonals])
13 |
--------------------------------------------------------------------------------
/ussr/app/constants/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from . import lastfm
4 | from . import leaderboard_type
5 | from . import mode
6 | from . import mods
7 | from . import privileges
8 | from . import ranked_status
9 | from . import score_status
10 |
--------------------------------------------------------------------------------
/ussr/app/constants/lastfm.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntEnum
4 |
5 |
6 | class LastFMFlags(IntEnum):
7 | """Bitwise enum flags for osu's LastFM anticheat flags (aka `BadFlags`)."""
8 |
9 | # 2016 Anticheat (from source)
10 | TIMEWARP = (
11 | 1 << 1
12 | ) # Saw this one get triggered during intense lag. Compares song speed to time elapsed.
13 | INCORRECT_MOD_VALUE = 1 << 2 # Cheat attempted to alter the mod values incorrectly
14 | MULTIPLE_OSU_CLIENTS = 1 << 3
15 | CHECKSUM_FAIL = 1 << 4 # Cheats that modify memory to unrealistic values.
16 | FLASHLIGHT_CHECKSUM_FAIL = 1 << 5
17 |
18 | # These 2 are server side
19 | OSU_EXE_CHECKSUM_FAIL = 1 << 6
20 | MISSING_PROCESS = 1 << 7
21 |
22 | FLASHLIGHT_REMOVER = 1 << 8 # Checks actual pixels on the screen
23 | AUTOSPIN_HACK = 1 << 9 # Unused in 2016 src
24 | WINDOW_OVERLAY = 1 << 10 # There is a transparent window overlaying osu (cheat uis)
25 | FAST_PRESS = 1 << 11 # Mania only. dont understand it fully.
26 |
27 | # These check if there is something altering the cursor pos/kb being received
28 | # through comparing the raw input.
29 | MOUSE_DISCREPENCY = 1 << 12
30 | KB_DISCREPENCY = 1 << 13
31 |
32 | # These are taken from `gulag` https://github.com/cmyui/gulag/blob/master/constants/clientflags.py
33 | # They relate to the new 2019 lastfm extension introducing measures against AQN and HQOsu.
34 | LF_FLAG_PRESENT = 1 << 14
35 | OSU_DEBUGGED = 1 << 15 # A console attached to the osu process is running.
36 | EXTRA_THREADS = (
37 | 1 << 16
38 | ) # Osu cheats usually create a new thread to run it. This aims to detect them.
39 |
40 | # HQOsu specific ones.
41 | HQOSU_ASSEMBLY = 1 << 17
42 | HQOSU_FILE = 1 << 18
43 | HQ_RELIFE = 1 << 19 # Detects registry edits left by Relife
44 |
45 | # (Outdated) AQN detection methods
46 | AQN_SQL2LIB = 1 << 20
47 | AQN_LIBEAY32 = 1 << 21
48 | AQN_MENU_SOUND = 1 << 22
49 |
--------------------------------------------------------------------------------
/ussr/app/constants/leaderboard_type.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntEnum
4 |
5 |
6 | class LeaderboardType(IntEnum):
7 | LOCAL = 0
8 | TOP = 1
9 | MODS = 2
10 | FRIENDS = 3
11 | COUNTRY = 4
12 |
--------------------------------------------------------------------------------
/ussr/app/constants/mode.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntEnum
4 | from functools import cached_property
5 |
6 | from app.constants.mods import Mods
7 |
8 | mode_str = (
9 | "osu!std",
10 | "osu!taiko",
11 | "osu!catch",
12 | "osu!mania",
13 | "std!rx",
14 | "taiko!rx",
15 | "catch!rx",
16 | "std!ap",
17 | )
18 |
19 | RELAX_OFFSET = 1_073_741_823
20 | AP_OFFSET = 2_000_000_000
21 |
22 |
23 | class Mode(IntEnum):
24 | STD = 0
25 | TAIKO = 1
26 | CATCH = 2
27 | MANIA = 3
28 |
29 | STD_RX = 4
30 | TAIKO_RX = 5
31 | CATCH_RX = 6
32 | STD_AP = 7
33 |
34 | def __repr__(self) -> str:
35 | return mode_str[self.value]
36 |
37 | @cached_property
38 | def as_vn(self) -> int:
39 | if self.value in (0, 4, 7):
40 | return 0
41 | elif self.value in (1, 5):
42 | return 1
43 | elif self.value in (2, 6):
44 | return 2
45 | else:
46 | return self.value
47 |
48 | @cached_property
49 | def relax(self) -> bool:
50 | return self.value > 3 and self.value != 7
51 |
52 | @cached_property
53 | def autopilot(self) -> bool:
54 | return self.value == 7
55 |
56 | @cached_property
57 | def scores_table(self) -> str:
58 | if self.relax:
59 | return "scores_relax"
60 |
61 | if self.autopilot:
62 | return "scores_ap"
63 |
64 | return "scores"
65 |
66 | @cached_property
67 | def stats_table(self) -> str:
68 | if self.relax:
69 | return "rx_stats"
70 |
71 | if self.autopilot:
72 | return "ap_stats"
73 |
74 | return "users_stats"
75 |
76 | @cached_property
77 | def stats_prefix(self) -> str:
78 | mode_vn = self.as_vn
79 |
80 | return {
81 | Mode.STD: "std",
82 | Mode.TAIKO: "taiko",
83 | Mode.CATCH: "ctb",
84 | Mode.MANIA: "mania",
85 | }[mode_vn]
86 |
87 | @cached_property
88 | def redis_leaderboard(self) -> str:
89 | suffix = ""
90 |
91 | if self.relax:
92 | suffix = "_relax"
93 | elif self.autopilot:
94 | suffix = "_ap"
95 |
96 | return f"leaderboard{suffix}"
97 |
98 | @cached_property
99 | def relax_int(self) -> int:
100 | if self.relax:
101 | return 1
102 |
103 | if self.autopilot:
104 | return 2
105 |
106 | return 0
107 |
108 | @cached_property
109 | def relax_str(self) -> str:
110 | if self.relax:
111 | return "RX"
112 |
113 | if self.autopilot:
114 | return "AP"
115 |
116 | return "VN"
117 |
118 | @cached_property
119 | def sort(self) -> str:
120 | return "pp" if self.value > 3 else "score"
121 |
122 | @classmethod
123 | def from_offset(cls, score_id: int) -> Mode:
124 | # IMPORTANT NOTE: this does not return the correct MODE, just the correct vn/rx/ap representation
125 | if RELAX_OFFSET < score_id < AP_OFFSET:
126 | return Mode.STD_RX
127 | elif score_id > AP_OFFSET:
128 | return Mode.STD_AP
129 |
130 | return Mode.STD
131 |
132 | @classmethod
133 | def from_lb(cls, mode: int, mods: int) -> Mode:
134 | if mods & Mods.RELAX:
135 | if mode == 3:
136 | return Mode.MANIA
137 |
138 | return Mode(mode + 4)
139 | elif mods & Mods.AUTOPILOT:
140 | if mode != 0:
141 | return Mode.STD
142 |
143 | return Mode.STD_AP
144 |
145 | return Mode(mode)
146 |
--------------------------------------------------------------------------------
/ussr/app/constants/mods.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntFlag
4 |
5 |
6 | class Mods(IntFlag):
7 | NOMOD = 0
8 | NOFAIL = 1 << 0
9 | EASY = 1 << 1
10 | TOUCHSCREEN = 1 << 2
11 | HIDDEN = 1 << 3
12 | HARDROCK = 1 << 4
13 | SUDDENDEATH = 1 << 5
14 | DOUBLETIME = 1 << 6
15 | RELAX = 1 << 7
16 | HALFTIME = 1 << 8
17 | NIGHTCORE = 1 << 9
18 | FLASHLIGHT = 1 << 10
19 | AUTOPLAY = 1 << 11
20 | SPUNOUT = 1 << 12
21 | AUTOPILOT = 1 << 13
22 | PERFECT = 1 << 14
23 | KEY4 = 1 << 15
24 | KEY5 = 1 << 16
25 | KEY6 = 1 << 17
26 | KEY7 = 1 << 18
27 | KEY8 = 1 << 19
28 | FADEIN = 1 << 20
29 | RANDOM = 1 << 21
30 | CINEMA = 1 << 22
31 | TARGET = 1 << 23
32 | KEY9 = 1 << 24
33 | KEYCOOP = 1 << 25
34 | KEY1 = 1 << 26
35 | KEY3 = 1 << 27
36 | KEY2 = 1 << 28
37 | SCOREV2 = 1 << 29
38 | MIRROR = 1 << 30
39 |
40 | SPEED_MODS = DOUBLETIME | NIGHTCORE | HALFTIME
41 | GAME_CHANGING = RELAX | AUTOPILOT
42 |
43 | UNRANKED = SCOREV2 | AUTOPLAY | TARGET
44 |
45 | def __repr__(self) -> str:
46 | if not self.value:
47 | return "NM"
48 |
49 | _str = ""
50 |
51 | for mod in Mods:
52 | if self.value & mod and (m := str_mods.get(mod)):
53 | _str += m
54 |
55 | if self.value & Mods.NIGHTCORE:
56 | _str = _str.replace("DT", "")
57 | if self.value & Mods.PERFECT:
58 | _str = _str.replace("SD", "")
59 |
60 | return _str
61 |
62 | @classmethod
63 | def convert_str(cls, mods: str) -> Mods:
64 | _mods = cls.NOMOD # in case theres none to match
65 |
66 | if not mods or mods == "NM":
67 | return _mods
68 |
69 | split_mods = [mods[char : char + 2].upper() for char in range(0, len(mods), 2)]
70 |
71 | for mod in split_mods:
72 | if mod not in mods_str:
73 | continue
74 |
75 | _mods |= mods_str[mod]
76 |
77 | return _mods
78 |
79 | @property
80 | def rankable(self) -> bool:
81 | """Checks if the mod combo is rank-worthy."""
82 |
83 | return self & UNRANKABLE_MODS == Mods.NOMOD
84 |
85 | @property
86 | def conflict(self) -> bool:
87 | """Anticheat measure to check for illegal mod combos."""
88 |
89 | if self & Mods.DOUBLETIME and self & Mods.HALFTIME:
90 | return True
91 | elif self & Mods.NIGHTCORE and not self & Mods.DOUBLETIME:
92 | return True
93 | elif self & Mods.EASY and self & Mods.HARDROCK:
94 | return True
95 |
96 | return False
97 |
98 |
99 | UNRANKABLE_MODS = Mods.AUTOPLAY | Mods.SCOREV2
100 |
101 | str_mods = {
102 | Mods.NOFAIL: "NF",
103 | Mods.EASY: "EZ",
104 | Mods.TOUCHSCREEN: "TD",
105 | Mods.HIDDEN: "HD",
106 | Mods.HARDROCK: "HR",
107 | Mods.SUDDENDEATH: "SD",
108 | Mods.DOUBLETIME: "DT",
109 | Mods.RELAX: "RX",
110 | Mods.HALFTIME: "HT",
111 | Mods.NIGHTCORE: "NC",
112 | Mods.FLASHLIGHT: "FL",
113 | Mods.AUTOPLAY: "AU",
114 | Mods.SPUNOUT: "SO",
115 | Mods.AUTOPILOT: "AP",
116 | Mods.PERFECT: "PF",
117 | Mods.FADEIN: "FI",
118 | Mods.RANDOM: "RN",
119 | Mods.CINEMA: "CN",
120 | Mods.TARGET: "TP",
121 | Mods.SCOREV2: "V2",
122 | Mods.MIRROR: "MR",
123 | Mods.KEY1: "1K",
124 | Mods.KEY2: "2K",
125 | Mods.KEY3: "3K",
126 | Mods.KEY4: "4K",
127 | Mods.KEY5: "5K",
128 | Mods.KEY6: "6K",
129 | Mods.KEY7: "7K",
130 | Mods.KEY8: "8K",
131 | Mods.KEY9: "9K",
132 | Mods.KEYCOOP: "CO",
133 | }
134 |
135 | mods_str = {
136 | "NF": Mods.NOFAIL,
137 | "EZ": Mods.EASY,
138 | "TD": Mods.TOUCHSCREEN,
139 | "HD": Mods.HIDDEN,
140 | "HR": Mods.HARDROCK,
141 | "SD": Mods.SUDDENDEATH,
142 | "DT": Mods.DOUBLETIME,
143 | "RX": Mods.RELAX,
144 | "HT": Mods.HALFTIME,
145 | "NC": Mods.NIGHTCORE,
146 | "FL": Mods.FLASHLIGHT,
147 | "AU": Mods.AUTOPLAY,
148 | "SO": Mods.SPUNOUT,
149 | "AP": Mods.AUTOPILOT,
150 | "PF": Mods.PERFECT,
151 | "FI": Mods.FADEIN,
152 | "RN": Mods.RANDOM,
153 | "CN": Mods.CINEMA,
154 | "TP": Mods.TARGET,
155 | "V2": Mods.SCOREV2,
156 | "MR": Mods.MIRROR,
157 | "1K": Mods.KEY1,
158 | "2K": Mods.KEY2,
159 | "3K": Mods.KEY3,
160 | "4K": Mods.KEY4,
161 | "5K": Mods.KEY5,
162 | "6K": Mods.KEY6,
163 | "7K": Mods.KEY7,
164 | "8K": Mods.KEY8,
165 | "9K": Mods.KEY9,
166 | "CO": Mods.KEYCOOP,
167 | }
168 |
--------------------------------------------------------------------------------
/ussr/app/constants/privileges.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntFlag
4 |
5 |
6 | class Privileges(IntFlag):
7 | """Bitwise enumerations for Ripple privileges."""
8 |
9 | USER_PUBLIC = 1
10 | USER_NORMAL = 2 << 0
11 | USER_DONOR = 2 << 1
12 | ADMIN_ACCESS_RAP = 2 << 2
13 | ADMIN_MANAGE_USERS = 2 << 3
14 | ADMIN_BAN_USERS = 2 << 4
15 | ADMIN_SILENCE_USERS = 2 << 5
16 | ADMIN_WIPE_USERS = 2 << 6
17 | ADMIN_MANAGE_BEATMAPS = 2 << 7
18 | ADMIN_MANAGE_SERVERS = 2 << 8
19 | ADMIN_MANAGE_SETTINGS = 2 << 9
20 | ADMIN_MANAGE_BETAKEYS = 2 << 10
21 | ADMIN_MANAGE_REPORTS = 2 << 11
22 | ADMIN_MANAGE_DOCS = 2 << 12
23 | ADMIN_MANAGE_BADGES = 2 << 13
24 | ADMIN_VIEW_RAP_LOGS = 2 << 14
25 | ADMIN_MANAGE_PRIVILEGES = 2 << 15
26 | ADMIN_SEND_ALERTS = 2 << 16
27 | ADMIN_CHAT_MOD = 2 << 17
28 | ADMIN_KICK_USERS = 2 << 18
29 | USER_PENDING_VERIFICATION = 2 << 19
30 | USER_TOURNAMENT_STAFF = 2 << 20
31 | ADMIN_CAKER = 20 << 21
32 | BOT_USER = 1 << 30
33 |
34 | @property
35 | def is_restricted(self) -> bool:
36 | """Checks if user is restricted."""
37 | return (
38 | (self & Privileges.USER_NORMAL) and not (self & Privileges.USER_PUBLIC)
39 | ) or self.is_banned
40 |
41 | @property
42 | def is_banned(self) -> bool:
43 | """Checks if user is banned."""
44 | return self & Privileges.USER_NORMAL == 0
45 |
46 | def has_privilege(self, priv: Privileges) -> bool:
47 | """Returns a bool corresponding to whether the privilege flag contains
48 | a single privilege.
49 | Note:
50 | This is a check for a **singular** privilege. If you include
51 | multiple, just the presence of one would result in
52 | `True` being returned.
53 | """
54 |
55 | return self & priv != 0
56 |
--------------------------------------------------------------------------------
/ussr/app/constants/ranked_status.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import functools
4 | from enum import IntEnum
5 |
6 |
7 | class RankedStatus(IntEnum):
8 | NOT_SUBMITTED = -1
9 | PENDING = 0
10 | UPDATE_AVAILABLE = 1
11 | RANKED = 2
12 | APPROVED = 3
13 | QUALIFIED = 4
14 | LOVED = 5
15 |
16 | @functools.cached_property
17 | def osu_api(self) -> int:
18 | return {
19 | self.PENDING: 0,
20 | self.RANKED: 1,
21 | self.APPROVED: 2,
22 | self.QUALIFIED: 3,
23 | self.LOVED: 4,
24 | }[self]
25 |
26 | @classmethod
27 | @functools.cache
28 | def from_osu_api(cls, osu_api_status: int) -> RankedStatus:
29 | return {
30 | -2: cls.PENDING, # graveyard
31 | -1: cls.PENDING, # wip
32 | 0: cls.PENDING,
33 | 1: cls.RANKED,
34 | 2: cls.APPROVED,
35 | 3: cls.QUALIFIED,
36 | 4: cls.LOVED,
37 | }.get(osu_api_status, cls.UPDATE_AVAILABLE)
38 |
39 | @classmethod
40 | @functools.cache
41 | def from_direct(cls, direct_status: int) -> RankedStatus:
42 | return {
43 | 0: cls.RANKED,
44 | 2: cls.PENDING,
45 | 3: cls.QUALIFIED,
46 | 5: cls.PENDING, # graveyard
47 | 7: cls.RANKED, # played before
48 | 8: cls.LOVED,
49 | }.get(direct_status, cls.UPDATE_AVAILABLE)
50 |
51 | @functools.cached_property
52 | def osu_direct(self) -> int:
53 | return {
54 | self.PENDING: 0,
55 | self.RANKED: 1,
56 | self.APPROVED: 2,
57 | self.QUALIFIED: 3,
58 | self.LOVED: 4,
59 | }.get(self, 0)
60 |
--------------------------------------------------------------------------------
/ussr/app/constants/score_status.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from enum import IntEnum
4 |
5 |
6 | class ScoreStatus(IntEnum):
7 | QUIT = 0
8 | FAILED = 1
9 | SUBMITTED = 2
10 | BEST = 3
11 |
--------------------------------------------------------------------------------
/ussr/app/init_api.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import pprint
5 |
6 | import aiohttp
7 | import app.redis
8 | import app.state
9 | import app.usecases
10 | import logger
11 | import orjson
12 | import settings
13 | from fastapi import FastAPI
14 | from fastapi import status
15 | from fastapi.encoders import jsonable_encoder
16 | from fastapi.exceptions import RequestValidationError
17 | from fastapi.requests import Request
18 | from fastapi.responses import ORJSONResponse
19 | from fastapi.responses import Response
20 | from starlette.middleware.base import RequestResponseEndpoint
21 |
22 |
23 | def init_events(asgi_app: FastAPI) -> None:
24 | @asgi_app.on_event("startup")
25 | async def on_startup() -> None:
26 | # TODO: maybe not here?
27 | if not settings.API_KEYS_POOL:
28 | logger.warning(
29 | "No osu!api v1 keys in the pool! Using fallback API v1 + osu.",
30 | )
31 |
32 | await app.state.services.database.connect()
33 | await app.state.services.redis.initialize()
34 |
35 | if settings.S3_ENABLED:
36 | await app.state.services.replay_storage.connect()
37 |
38 | app.state.services.http = aiohttp.ClientSession(
39 | json_serialize=lambda x: orjson.dumps(x).decode(),
40 | )
41 |
42 | await app.state.cache.init_cache()
43 | await app.redis.initialise_pubsubs()
44 |
45 | logger.info("Server has started!")
46 |
47 | @asgi_app.on_event("shutdown")
48 | async def on_shutdown() -> None:
49 | await app.state.cancel_tasks()
50 |
51 | await app.state.services.database.disconnect()
52 | await app.state.services.redis.close()
53 |
54 | if settings.S3_ENABLED:
55 | await app.state.services.replay_storage.disconnect()
56 |
57 | await app.state.services.http.close()
58 |
59 | logger.info("Server has shutdown!")
60 |
61 | @asgi_app.middleware("http")
62 | async def http_middleware(
63 | request: Request,
64 | call_next: RequestResponseEndpoint,
65 | ) -> Response:
66 | try:
67 | return await call_next(request)
68 | except RuntimeError as err:
69 | if err.args[0] == "No response returned.":
70 | return Response("skill issue")
71 |
72 | raise err
73 |
74 | @asgi_app.exception_handler(RequestValidationError)
75 | async def handle_validation_error(
76 | request: Request,
77 | e: RequestValidationError,
78 | ) -> Response:
79 | return ORJSONResponse(
80 | content=jsonable_encoder(e.errors()),
81 | status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
82 | )
83 |
84 |
85 | def init_fastapi() -> FastAPI:
86 | asgi_app = FastAPI()
87 |
88 | init_events(asgi_app)
89 |
90 | import app.api
91 |
92 | asgi_app.include_router(app.api.router)
93 |
94 | return asgi_app
95 |
96 |
97 | asgi_app = init_fastapi()
98 |
--------------------------------------------------------------------------------
/ussr/app/models/___init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from . import achievement
4 | from . import beatmap
5 | from . import score
6 | from . import stats
7 | from . import user
8 |
--------------------------------------------------------------------------------
/ussr/app/models/achievement.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 | from typing import Callable
5 |
6 |
7 | @dataclass
8 | class Achievement:
9 | id: int
10 | file: str
11 | name: str
12 | desc: str
13 | cond: Callable
14 |
15 | @property
16 | def full_name(self) -> str:
17 | return f"{self.file}+{self.name}+{self.desc}"
18 |
--------------------------------------------------------------------------------
/ussr/app/models/beatmap.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import time
4 | from dataclasses import dataclass
5 | from typing import Optional
6 |
7 | import settings
8 | from app.constants.mode import Mode
9 | from app.constants.ranked_status import RankedStatus
10 |
11 | ONE_DAY = 86_400
12 |
13 |
14 | @dataclass
15 | class Beatmap:
16 | md5: str
17 | id: int
18 | set_id: int
19 |
20 | song_name: str
21 |
22 | status: RankedStatus
23 |
24 | plays: int
25 | passes: int
26 | mode: Mode
27 |
28 | od: float
29 | ar: float
30 |
31 | difficulty_std: float
32 | difficulty_taiko: float
33 | difficulty_ctb: float
34 | difficulty_mania: float
35 |
36 | hit_length: int
37 |
38 | last_update: int = 0
39 |
40 | max_combo: int = 0
41 | bpm: int = 0
42 | filename: str = ""
43 | frozen: bool = False
44 | rating: Optional[float] = None
45 |
46 | @property
47 | def url(self) -> str:
48 | # i hate this
49 | server_url = settings.PS_DOMAIN.replace("https://", "").replace("http://", "")
50 |
51 | return f"https://osu.{server_url}/beatmaps/{self.id}"
52 |
53 | @property
54 | def set_url(self) -> str:
55 | # i hate this
56 | server_url = settings.PS_DOMAIN.replace("https://", "").replace("http://", "")
57 |
58 | return f"https://osu.{server_url}/beatmapsets/{self.set_id}"
59 |
60 | @property
61 | def embed(self) -> str:
62 | return f"[{self.url} {self.song_name}]"
63 |
64 | @property
65 | def gives_pp(self) -> bool:
66 | return self.status in (RankedStatus.RANKED, RankedStatus.APPROVED)
67 |
68 | @property
69 | def has_leaderboard(self) -> bool:
70 | return self.status >= RankedStatus.RANKED
71 |
72 | @property
73 | def deserves_update(self) -> bool:
74 | """Checks if there should be an attempt to update a map/check if
75 | should be updated. This condition is true if a map is not ranked and a day
76 | have passed since it was last checked."""
77 |
78 | return (
79 | self.status
80 | not in (RankedStatus.RANKED, RankedStatus.APPROVED, RankedStatus.LOVED)
81 | and self.last_update < int(time.time()) - ONE_DAY
82 | and self.id < 999999999
83 | )
84 |
85 | def osu_string(self, score_count: int, rating: float) -> str:
86 | return (
87 | f"{int(self.status)}|false|{self.id}|{self.set_id}|{score_count}|0|\n" # |0| = featured artist bs
88 | f"0\n{self.song_name}\n{rating:.1f}" # 0 = offset
89 | )
90 |
91 | @property
92 | def db_dict(self) -> dict:
93 | return {
94 | "beatmap_md5": self.md5,
95 | "beatmap_id": self.id,
96 | "beatmapset_id": self.set_id,
97 | "song_name": self.song_name,
98 | "ranked": self.status.value,
99 | "playcount": self.plays,
100 | "passcount": self.passes,
101 | "mode": self.mode.value,
102 | "od": self.od,
103 | "ar": self.ar,
104 | "difficulty_std": self.difficulty_std,
105 | "difficulty_taiko": self.difficulty_taiko,
106 | "difficulty_ctb": self.difficulty_ctb,
107 | "difficulty_mania": self.difficulty_mania,
108 | "hit_length": self.hit_length,
109 | "latest_update": self.last_update,
110 | "max_combo": self.max_combo,
111 | "bpm": self.bpm,
112 | "file_name": self.filename,
113 | "ranked_status_freezed": self.frozen,
114 | "rating": self.rating,
115 | }
116 |
117 | @classmethod
118 | def from_dict(cls, result: dict) -> Beatmap:
119 | return cls(
120 | md5=result["beatmap_md5"],
121 | id=result["beatmap_id"],
122 | set_id=result["beatmapset_id"],
123 | song_name=result["song_name"],
124 | status=RankedStatus(result["ranked"]),
125 | plays=result["playcount"],
126 | passes=result["passcount"],
127 | mode=Mode(result["mode"]),
128 | od=result["od"],
129 | ar=result["ar"],
130 | difficulty_std=result["difficulty_std"],
131 | difficulty_taiko=result["difficulty_taiko"],
132 | difficulty_ctb=result["difficulty_ctb"],
133 | difficulty_mania=result["difficulty_mania"],
134 | hit_length=result["hit_length"],
135 | last_update=result["latest_update"],
136 | max_combo=result["max_combo"],
137 | bpm=result["bpm"],
138 | filename=result["file_name"],
139 | frozen=result["ranked_status_freezed"],
140 | rating=result["rating"],
141 | )
142 |
--------------------------------------------------------------------------------
/ussr/app/models/score.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import time
4 | from dataclasses import dataclass
5 | from typing import Optional
6 |
7 | from app.constants.mode import Mode
8 | from app.constants.mods import Mods
9 | from app.constants.score_status import ScoreStatus
10 | from app.models.user import User
11 |
12 |
13 | @dataclass
14 | class Score:
15 | id: int
16 | map_md5: str
17 |
18 | user_id: int
19 |
20 | mode: Mode
21 | mods: Mods
22 |
23 | pp: float
24 | sr: float
25 |
26 | score: int
27 | max_combo: int
28 | acc: float
29 |
30 | n300: int
31 | n100: int
32 | n50: int
33 | nmiss: int
34 | ngeki: int
35 | nkatu: int
36 |
37 | passed: bool
38 | quit: bool
39 | full_combo: bool
40 | status: ScoreStatus
41 |
42 | time: int
43 | time_elapsed: int
44 |
45 | rank: int = 0
46 | old_best: Optional[Score] = None
47 |
48 | def osu_string(
49 | self,
50 | username: str,
51 | rank: int,
52 | show_pp: Optional[bool] = None,
53 | ) -> str:
54 | score = self.score
55 |
56 | if show_pp:
57 | score = int(self.pp)
58 | elif show_pp is None:
59 | if self.mode > Mode.MANIA:
60 | score = int(self.pp)
61 |
62 | return (
63 | f"{self.id}|{username}|{score}|{self.max_combo}|{self.n50}|{self.n100}|{self.n300}|{self.nmiss}|"
64 | f"{self.nkatu}|{self.ngeki}|{int(self.full_combo)}|{int(self.mods)}|{self.user_id}|{rank}|{self.time}|"
65 | "1" # has replay
66 | )
67 |
68 | @property
69 | def db_dict(self) -> dict:
70 | return {
71 | "beatmap_md5": self.map_md5,
72 | "userid": self.user_id,
73 | "score": self.score,
74 | "max_combo": self.max_combo,
75 | "full_combo": self.full_combo,
76 | "mods": self.mods.value,
77 | "300_count": self.n300,
78 | "100_count": self.n100,
79 | "50_count": self.n50,
80 | "katus_count": self.nkatu,
81 | "gekis_count": self.ngeki,
82 | "misses_count": self.nmiss,
83 | "time": self.time,
84 | "play_mode": self.mode.as_vn,
85 | "completed": self.status.value,
86 | "accuracy": self.acc,
87 | "pp": self.pp,
88 | "playtime": self.time_elapsed,
89 | }
90 |
91 | @classmethod
92 | def from_dict(cls, result: dict) -> Score:
93 | return cls(
94 | id=result["id"],
95 | map_md5=result["beatmap_md5"],
96 | user_id=result["userid"],
97 | score=result["score"],
98 | max_combo=result["max_combo"],
99 | full_combo=result["full_combo"],
100 | mods=Mods(result["mods"]),
101 | n300=result["300_count"],
102 | n100=result["100_count"],
103 | n50=result["50_count"],
104 | nkatu=result["katus_count"],
105 | ngeki=result["gekis_count"],
106 | nmiss=result["misses_count"],
107 | time=int(result["time"]),
108 | mode=Mode.from_lb(result["play_mode"], result["mods"]),
109 | status=ScoreStatus(result["completed"]),
110 | acc=result["accuracy"],
111 | pp=result["pp"],
112 | sr=0.0, # irrelevant in this case
113 | time_elapsed=result["playtime"],
114 | passed=result["completed"] > ScoreStatus.FAILED,
115 | quit=result["completed"] == ScoreStatus.QUIT,
116 | )
117 |
118 | @classmethod
119 | def from_submission(cls, data: list[str], map_md5: str, user: User) -> Score:
120 | return Score(
121 | id=0, # set later
122 | map_md5=map_md5,
123 | user_id=user.id,
124 | mode=Mode.from_lb(int(data[13]), int(data[11])),
125 | mods=Mods(int(data[11])),
126 | pp=0.0, # set later
127 | sr=0.0, # set later
128 | score=int(data[7]),
129 | max_combo=int(data[8]),
130 | acc=0.0, # set later
131 | n300=int(data[1]),
132 | n100=int(data[2]),
133 | n50=int(data[3]),
134 | nmiss=int(data[6]),
135 | ngeki=int(data[4]),
136 | nkatu=int(data[5]),
137 | passed=data[12] == "True",
138 | quit=False, # set later
139 | full_combo=data[9] == "True",
140 | status=ScoreStatus.FAILED, # set later
141 | time=int(time.time()),
142 | time_elapsed=0, # set later
143 | )
144 |
--------------------------------------------------------------------------------
/ussr/app/models/stats.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 |
5 | from app.constants.mode import Mode
6 |
7 |
8 | @dataclass
9 | class Stats:
10 | user_id: int
11 | mode: Mode
12 |
13 | ranked_score: int
14 | total_score: int
15 | pp: float
16 | rank: int
17 | country_rank: int
18 | accuracy: float
19 | playcount: int
20 | max_combo: int
21 | total_hits: int
22 |
--------------------------------------------------------------------------------
/ussr/app/models/user.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 |
5 | import settings
6 | from app.constants.privileges import Privileges
7 |
8 |
9 | @dataclass
10 | class User:
11 | id: int
12 | name: str
13 | privileges: Privileges
14 | friends: list[int]
15 | password_bcrypt: str
16 | country: str
17 | coins: int
18 |
19 | def __repr__(self) -> str:
20 | return f"<{self.name} ({self.id})>"
21 |
22 | @property
23 | def url(self) -> str:
24 | # i hate this
25 | server_url = settings.PS_DOMAIN.replace("https://", "").replace("http://", "")
26 |
27 | return f"https://{server_url}/u/{self.id}"
28 |
29 | @property
30 | def embed(self) -> str:
31 | return f"[{self.url} {self.name}]"
32 |
--------------------------------------------------------------------------------
/ussr/app/objects/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from . import binary
4 | from . import leaderboard
5 | from . import oppai
6 | from . import path
7 |
--------------------------------------------------------------------------------
/ussr/app/objects/binary.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import struct
4 | from typing import Union
5 |
6 |
7 | class BinaryWriter:
8 | """A binary writer used for serialisation. Primarily includes osu!'s types."""
9 |
10 | def __init__(self) -> None:
11 | self.buffer = bytearray()
12 |
13 | def write_uleb128(self, value: int) -> None:
14 | """Write a uleb128 value to the buffer."""
15 | if value == 0:
16 | self.buffer.append(0)
17 | return
18 |
19 | ret = bytearray()
20 |
21 | while value > 0:
22 | ret.append(value & 0b01111111)
23 | value >>= 7
24 | if value != 0:
25 | ret[-1] |= 0b10000000
26 |
27 | self.buffer.extend(ret)
28 |
29 | def write_u64_le(self, value: int) -> BinaryWriter:
30 | """Write a 64-bit unsigned integer to the buffer."""
31 | self.buffer += struct.pack(" BinaryWriter:
35 | """Write a 64-bit integer to the buffer."""
36 | self.buffer += struct.pack(" BinaryWriter:
40 | """Write a 32-bit integer to the buffer."""
41 | self.buffer += struct.pack(" BinaryWriter:
45 | """Write a 32-bit unsigned integer to the buffer."""
46 | self.buffer += struct.pack(" BinaryWriter:
50 | """Write a 16-bit integer to the buffer."""
51 | self.buffer += struct.pack(" BinaryWriter:
55 | """Write a 16-bit unsigned integer to the buffer."""
56 | self.buffer += struct.pack(" BinaryWriter:
60 | """Write a 8-bit integer to the buffer."""
61 | self.buffer += struct.pack(" BinaryWriter:
65 | """Write a 8-bit unsigned integer to the buffer."""
66 | self.buffer += struct.pack(" BinaryWriter:
70 | """Write raw data to the buffer."""
71 | self.buffer += data
72 | return self
73 |
74 | def write_osu_string(self, string: str) -> BinaryWriter:
75 | """Write an osu! protocol style string.
76 | An osu! protocol string consists of an 'exists' byte, followed
77 | by a uleb128 length, followed by the string itself.
78 | """
79 | if string:
80 | self.buffer.append(0xB)
81 | str_encoded = string.encode("utf-8")
82 | self.write_uleb128(len(str_encoded))
83 | self.write_raw(str_encoded)
84 | else:
85 | self.buffer.append(0)
86 | return self
87 |
--------------------------------------------------------------------------------
/ussr/app/objects/leaderboard.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 | from dataclasses import field
5 | from typing import Optional
6 | from typing import TYPE_CHECKING
7 | from typing import TypedDict
8 | from typing import Union
9 |
10 | from app.constants.mode import Mode
11 |
12 | if TYPE_CHECKING:
13 | from app.models.score import Score
14 |
15 |
16 | class UserScore(TypedDict):
17 | score: Score
18 | rank: int
19 |
20 |
21 | @dataclass
22 | class Leaderboard:
23 | mode: Mode
24 | scores: list[Score] = field(default_factory=list)
25 |
26 | def __len__(self) -> int:
27 | return len(self.scores)
28 |
29 | def remove_score_index(self, index: int) -> None:
30 | self.scores.pop(index)
31 |
32 | async def find_user_score(
33 | self,
34 | user_id: int,
35 | unrestricted: bool = True,
36 | ) -> Optional[UserScore]:
37 | if unrestricted:
38 | unrestricted_scores = await self.get_unrestricted_scores(user_id)
39 | else:
40 | unrestricted_scores = self.scores
41 |
42 | for idx, score in enumerate(unrestricted_scores):
43 | if score.user_id == user_id:
44 | return {
45 | "score": score,
46 | "rank": idx + 1,
47 | }
48 |
49 | async def find_score_rank(self, user_id: int, score_id: int) -> int:
50 | unrestricted_scores = await self.get_unrestricted_scores(user_id)
51 |
52 | for idx, score in enumerate(unrestricted_scores):
53 | if score.id == score_id:
54 | return idx + 1
55 |
56 | return 0
57 |
58 | async def get_unrestricted_scores(
59 | self,
60 | user_id: int,
61 | include_self: bool = True,
62 | ) -> list[Score]:
63 | scores = []
64 |
65 | for score in self.scores:
66 | import app.usecases.privileges # hotfix until we fix the code.
67 |
68 | user_privilege = await app.usecases.privileges.fetch(score.user_id)
69 | if user_privilege.is_restricted and not (
70 | score.user_id == user_id and include_self
71 | ):
72 | continue
73 |
74 | scores.append(score)
75 |
76 | return scores
77 |
78 | async def remove_user(self, user_id: int) -> None:
79 | result = await self.find_user_score(user_id, unrestricted=False)
80 |
81 | if result is not None:
82 | self.scores.remove(result["score"])
83 |
84 | def sort(self) -> None:
85 | if self.mode > Mode.MANIA:
86 | sort = lambda score: score.pp
87 | else:
88 | sort = lambda score: score.score
89 |
90 | self.scores = sorted(self.scores, key=sort, reverse=True)
91 |
92 | async def whatif_placement(
93 | self,
94 | user_id: int,
95 | sort_value: Union[int, float],
96 | ) -> int:
97 | unrestricted_scores = await self.get_unrestricted_scores(user_id)
98 |
99 | for idx, score in enumerate(unrestricted_scores):
100 | if self.mode > Mode.MANIA:
101 | sort_key = score.pp
102 | else:
103 | sort_key = score.score
104 |
105 | if sort_value > sort_key:
106 | return idx + 1
107 |
108 | return 1
109 |
110 | async def add_score(self, score: Score) -> None:
111 | await self.remove_user(score.user_id)
112 |
113 | self.scores.append(score)
114 | self.sort()
115 |
--------------------------------------------------------------------------------
/ussr/app/objects/oppai.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import ctypes
4 | import functools
5 | from types import TracebackType
6 | from typing import Optional
7 | from typing import Type
8 |
9 | from app.objects.path import Path
10 |
11 |
12 | class OppaiWrapper:
13 | """Lightweight wrapper around franc[e]sco's c89 oppai-ng library.
14 | Made by cmyui https://github.com/cmyui/cmyui_pkg/blob/master/cmyui/osu/oppai_ng.py
15 | """
16 |
17 | __slots__ = ("static_lib", "_ez")
18 |
19 | def __init__(self, lib_path: str) -> None:
20 | self.static_lib = self.load_static_library(lib_path)
21 | self._ez = 0
22 |
23 | def __enter__(self) -> OppaiWrapper:
24 | self._ez = self.static_lib.ezpp_new()
25 | return self
26 |
27 | def __exit__(
28 | self,
29 | exc_type: Optional[Type[BaseException]],
30 | exc_value: Optional[BaseException],
31 | traceback: Optional[TracebackType],
32 | ) -> bool:
33 | self.static_lib.ezpp_free(self._ez)
34 | self._ez = 0
35 | return False
36 |
37 | # Screw proper with usage.
38 | def set_static_lib(self):
39 | """Creates a static lib binding for the object.
40 | Note:
41 | Remember to call this before using the object.
42 | Remember to free the static lib using `free_static_lib` after you're done.
43 | """
44 |
45 | self._ez = self.static_lib.ezpp_new()
46 |
47 | def free_static_lib(self):
48 | """Frees the static lib for the object."""
49 | self.static_lib.ezpp_free(self._ez)
50 |
51 | # NOTE: probably the only function you'll need to use
52 | def configure(
53 | self,
54 | mode: int = 0,
55 | acc: float = 0,
56 | mods: int = 0,
57 | combo: int = 0,
58 | nmiss: int = 0,
59 | ) -> None:
60 | """Convenience wrapper so you don't have to
61 | think about the order for clobbering stuff"""
62 | if self._ez == 0:
63 | raise RuntimeError("OppaiWrapper used before oppai-ng initialization!")
64 |
65 | if mode:
66 | self.set_mode(mode)
67 | if mods:
68 | self.set_mods(mods)
69 | if nmiss:
70 | self.set_nmiss(nmiss)
71 | if combo:
72 | self.set_combo(combo)
73 | if acc:
74 | self.set_accuracy_percent(acc) # n50, n100s?
75 |
76 | # NOTE: all of the 1-1 oppai-ng api functions below will assume the library
77 | # has been loaded successfully and ezpp_new has been called.
78 |
79 | def calculate(self, osu_file_path: Path) -> None: # ezpp()
80 | osu_file_path_bytestr = str(osu_file_path).encode()
81 | self.static_lib.ezpp(self._ez, osu_file_path_bytestr)
82 |
83 | def calculate_data(self, osu_file_contents: bytes) -> None: # ezpp_data()
84 | self.static_lib.ezpp_data(self._ez, osu_file_contents, len(osu_file_contents))
85 |
86 | def calculate_dup(self, osu_file_path: Path) -> None: # ezpp_dup()
87 | osu_file_path_bytestr = str(osu_file_path).encode()
88 | self.static_lib.ezpp_dup(self._ez, osu_file_path_bytestr)
89 |
90 | def calculate_data_dup(self, osu_file_contents: bytes) -> None: # ezpp_data_dup()
91 | self.static_lib.ezpp_data_dup(
92 | self._ez,
93 | osu_file_contents,
94 | len(osu_file_contents),
95 | )
96 |
97 | # get stuff
98 |
99 | def get_pp(self) -> float:
100 | return self.static_lib.ezpp_pp(self._ez)
101 |
102 | def get_sr(self) -> float:
103 | return self.static_lib.ezpp_stars(self._ez)
104 |
105 | def get_mode(self) -> int:
106 | return self.static_lib.ezpp_mode(self._ez)
107 |
108 | def get_combo(self) -> int:
109 | return self.static_lib.ezpp_combo(self._ez)
110 |
111 | def get_max_combo(self) -> int:
112 | return self.static_lib.ezpp_max_combo(self._ez)
113 |
114 | def get_mods(self) -> int:
115 | return self.static_lib.ezpp_mods(self._ez)
116 |
117 | def get_score_version(self) -> int:
118 | return self.static_lib.ezpp_score_version(self._ez)
119 |
120 | def get_aim_stars(self) -> float:
121 | return self.static_lib.ezpp_aim_stars(self._ez)
122 |
123 | def get_speed_stars(self) -> float:
124 | return self.static_lib.ezpp_speed_stars(self._ez)
125 |
126 | def get_aim_pp(self) -> float:
127 | return self.static_lib.ezpp_aim_pp(self._ez)
128 |
129 | def get_speed_pp(self) -> float:
130 | return self.static_lib.ezpp_speed_pp(self._ez)
131 |
132 | def get_accuracy_percent(self) -> float:
133 | return self.static_lib.ezpp_accuracy_percent(self._ez)
134 |
135 | def get_n300(self) -> int:
136 | return self.static_lib.ezpp_n300(self._ez)
137 |
138 | def get_n100(self) -> int:
139 | return self.static_lib.ezpp_n100(self._ez)
140 |
141 | def get_n50(self) -> int:
142 | return self.static_lib.ezpp_n50(self._ez)
143 |
144 | def get_nmiss(self) -> int:
145 | return self.static_lib.ezpp_nmiss(self._ez)
146 |
147 | def get_title(self) -> bytes:
148 | return self.static_lib.ezpp_title(self._ez)
149 |
150 | def get_title_unicode(self) -> bytes:
151 | return self.static_lib.ezpp_title_unicode(self._ez)
152 |
153 | def get_artist(self) -> bytes:
154 | return self.static_lib.ezpp_artist(self._ez)
155 |
156 | def get_artist_unicode(self) -> bytes:
157 | return self.static_lib.ezpp_artist_unicode(self._ez)
158 |
159 | def get_creator(self) -> bytes:
160 | return self.static_lib.ezpp_creator(self._ez)
161 |
162 | def get_version(self) -> bytes:
163 | return self.static_lib.ezpp_version(self._ez)
164 |
165 | def get_ncircles(self) -> int:
166 | return self.static_lib.ezpp_ncircles(self._ez)
167 |
168 | def get_nsliders(self) -> int:
169 | return self.static_lib.ezpp_nsliders(self._ez)
170 |
171 | def get_nspinners(self) -> int:
172 | return self.static_lib.ezpp_nspinners(self._ez)
173 |
174 | def get_nobjects(self) -> int:
175 | return self.static_lib.ezpp_nobjects(self._ez)
176 |
177 | def get_ar(self) -> float:
178 | return self.static_lib.ezpp_ar(self._ez)
179 |
180 | def get_cs(self) -> float:
181 | return self.static_lib.ezpp_cs(self._ez)
182 |
183 | def get_od(self) -> float:
184 | return self.static_lib.ezpp_od(self._ez)
185 |
186 | def get_hp(self) -> float:
187 | return self.static_lib.ezpp_hp(self._ez)
188 |
189 | def get_odms(self) -> float:
190 | return self.static_lib.ezpp_odms(self._ez)
191 |
192 | def get_autocalc(self) -> int:
193 | return self.static_lib.ezpp_autocalc(self._ez)
194 |
195 | def get_time_at(self, i: int) -> float:
196 | return self.static_lib.ezpp_time_at(self._ez, i)
197 |
198 | def get_strain_at(self, i: int, difficulty_type: int) -> float:
199 | return self.static_lib.ezpp_strain_at(self._ez, i, difficulty_type)
200 |
201 | def get_ntiming_points(self) -> int:
202 | return self.static_lib.ezpp_ntiming_points(self._ez)
203 |
204 | def get_timing_time(self, i: int) -> float:
205 | return self.static_lib.ezpp_timing_time(self._ez, i)
206 |
207 | def get_timing_ms_per_beat(self, i: int) -> float:
208 | return self.static_lib.ezpp_timing_ms_per_beat(self._ez, i)
209 |
210 | def get_timing_change(self, i: int) -> int:
211 | return self.static_lib.ezpp_timing_change(self._ez, i)
212 |
213 | # set stuff
214 | # NOTE: the order you call these in matters due to
215 | # memory clobbering (for example setting misscount
216 | # will reset the internal accuracy_percent). they're
217 | # all documented here, but you can use the configure()
218 | # method in the main api above if you don't want to
219 | # think about this level of abstraction.
220 |
221 | def set_aim_stars(self, aim_stars: float) -> None:
222 | self.static_lib.ezpp_set_aim_stars(self._ez, aim_stars)
223 |
224 | def set_speed_stars(self, speed_stars: float) -> None:
225 | self.static_lib.ezpp_set_speed_stars(self._ez, speed_stars)
226 |
227 | def set_base_ar(self, ar: float) -> None:
228 | self.static_lib.ezpp_set_base_ar(self._ez, ar)
229 |
230 | def set_base_od(self, od: float) -> None:
231 | self.static_lib.ezpp_set_base_od(self._ez, od)
232 |
233 | def set_base_cs(self, cs: float) -> None:
234 | # NOTE: will force map re-parse
235 | self.static_lib.ezpp_set_base_cs(self._ez, cs)
236 |
237 | def set_base_hp(self, hp: float) -> None:
238 | self.static_lib.ezpp_set_base_hp(self._ez, hp)
239 |
240 | def set_mode_override(self, mode: int) -> None:
241 | # NOTE: will force map re-parse
242 | self.static_lib.ezpp_set_mode_override(self._ez, mode)
243 |
244 | def set_mode(self, mode: int) -> None:
245 | self.static_lib.ezpp_set_mode(self._ez, mode)
246 |
247 | def set_mods(self, mods: int) -> None:
248 | # NOTE: will force map re-parse for
249 | # hr, ez, dt, nc and ht.
250 | self.static_lib.ezpp_set_mods(self._ez, mods)
251 |
252 | def set_combo(self, combo: int) -> None:
253 | self.static_lib.ezpp_set_combo(self._ez, combo)
254 |
255 | def set_nmiss(self, nmiss: int) -> None:
256 | # NOTE: will force map re-parse &
257 | # clobber accuracy_percent
258 | # (i think the map re-parse can be removed,
259 | # and will talk to franc[e]sco about it)
260 | self.static_lib.ezpp_set_nmiss(self._ez, nmiss)
261 |
262 | def set_score_version(self, score_version: int) -> None:
263 | self.static_lib.ezpp_set_score_version(self._ez, score_version)
264 |
265 | def set_accuracy_percent(self, accuracy: float) -> None:
266 | self.static_lib.ezpp_set_accuracy_percent(self._ez, accuracy)
267 |
268 | def set_accuracy(self, n100: int, n50: int) -> None:
269 | self.static_lib.ezpp_set_accuracy(self._ez, n100, n50)
270 |
271 | def set_end(self, end: int) -> None:
272 | # NOTE: will force map re-parse &
273 | # clobber accuracy_percent
274 | self.static_lib.ezpp_set_end(self._ez, end)
275 |
276 | def set_end_time(self, end_time: float) -> None:
277 | # NOTE: will force map re-parse &
278 | # clobber accuracy_percent
279 | self.static_lib.ezpp_set_end_time(self._ez, end_time)
280 |
281 | @staticmethod
282 | @functools.cache
283 | def load_static_library(lib_path: str) -> ctypes.CDLL:
284 | """Load the oppai-ng static library,
285 | and register c types to it's api."""
286 | static_lib = ctypes.cdll.LoadLibrary(lib_path)
287 |
288 | if not static_lib:
289 | raise RuntimeError(f"Failed to load {lib_path}.")
290 |
291 | # main api
292 |
293 | ezpp_new = static_lib.ezpp_new
294 | ezpp_new.argtypes = ()
295 | ezpp_new.restype = ctypes.c_int
296 |
297 | ezpp_free = static_lib.ezpp_free
298 | ezpp_free.argtypes = ()
299 | ezpp_free.restype = ctypes.c_void_p
300 |
301 | ezpp = static_lib.ezpp
302 | ezpp.argtypes = (ctypes.c_int, ctypes.c_char_p)
303 | ezpp.restype = ctypes.c_int
304 |
305 | ezpp_data = static_lib.ezpp_data
306 | ezpp_data.argtypes = (ctypes.c_int, ctypes.c_char_p, ctypes.c_int)
307 | ezpp_data.restype = ctypes.c_int
308 |
309 | ezpp_dup = static_lib.ezpp_dup
310 | ezpp_dup.argtypes = (ctypes.c_int, ctypes.c_char_p)
311 | ezpp_dup.restype = ctypes.c_int
312 |
313 | ezpp_data_dup = static_lib.ezpp_data_dup
314 | ezpp_data_dup.argtypes = (ctypes.c_int, ctypes.c_char_p, ctypes.c_int)
315 |
316 | # getting internals
317 |
318 | ezpp_pp = static_lib.ezpp_pp
319 | ezpp_pp.argtypes = (ctypes.c_int,)
320 | ezpp_pp.restype = ctypes.c_float
321 |
322 | ezpp_stars = static_lib.ezpp_stars
323 | ezpp_stars.argtypes = (ctypes.c_int,)
324 | ezpp_stars.restype = ctypes.c_float
325 |
326 | ezpp_mode = static_lib.ezpp_mode
327 | ezpp_mode.argtypes = (ctypes.c_int,)
328 | ezpp_mode.restype = ctypes.c_int
329 |
330 | ezpp_combo = static_lib.ezpp_combo
331 | ezpp_combo.argtypes = (ctypes.c_int,)
332 | ezpp_combo.restype = ctypes.c_int
333 |
334 | ezpp_max_combo = static_lib.ezpp_max_combo
335 | ezpp_max_combo.argtypes = (ctypes.c_int,)
336 | ezpp_max_combo.restype = ctypes.c_int
337 |
338 | ezpp_mods = static_lib.ezpp_mods
339 | ezpp_mods.argtypes = (ctypes.c_int,)
340 | ezpp_mods.restype = ctypes.c_int
341 |
342 | ezpp_score_version = static_lib.ezpp_score_version
343 | ezpp_score_version.argtypes = (ctypes.c_int,)
344 | ezpp_score_version.restype = ctypes.c_int
345 |
346 | ezpp_aim_stars = static_lib.ezpp_aim_stars
347 | ezpp_aim_stars.argtypes = (ctypes.c_int,)
348 | ezpp_aim_stars.restype = ctypes.c_float
349 |
350 | ezpp_speed_stars = static_lib.ezpp_speed_stars
351 | ezpp_speed_stars.argtypes = (ctypes.c_int,)
352 | ezpp_speed_stars.restype = ctypes.c_float
353 |
354 | ezpp_aim_pp = static_lib.ezpp_aim_pp
355 | ezpp_aim_pp.argtypes = (ctypes.c_int,)
356 | ezpp_aim_pp.restype = ctypes.c_float
357 |
358 | ezpp_speed_pp = static_lib.ezpp_speed_pp
359 | ezpp_speed_pp.argtypes = (ctypes.c_int,)
360 | ezpp_speed_pp.restype = ctypes.c_float
361 |
362 | ezpp_acc_pp = static_lib.ezpp_acc_pp
363 | ezpp_acc_pp.argtypes = (ctypes.c_int,)
364 | ezpp_acc_pp.restype = ctypes.c_float
365 |
366 | ezpp_accuracy_percent = static_lib.ezpp_accuracy_percent
367 | ezpp_accuracy_percent.argtypes = (ctypes.c_int,)
368 | ezpp_accuracy_percent.restype = ctypes.c_float
369 |
370 | ezpp_n300 = static_lib.ezpp_n300
371 | ezpp_n300.argtypes = (ctypes.c_int,)
372 | ezpp_n300.restype = ctypes.c_int
373 |
374 | ezpp_n100 = static_lib.ezpp_n100
375 | ezpp_n100.argtypes = (ctypes.c_int,)
376 | ezpp_n100.restype = ctypes.c_int
377 |
378 | ezpp_n50 = static_lib.ezpp_n50
379 | ezpp_n50.argtypes = (ctypes.c_int,)
380 | ezpp_n50.restype = ctypes.c_int
381 |
382 | ezpp_nmiss = static_lib.ezpp_nmiss
383 | ezpp_nmiss.argtypes = (ctypes.c_int,)
384 | ezpp_nmiss.restype = ctypes.c_int
385 |
386 | ezpp_title = static_lib.ezpp_title
387 | ezpp_title.argtypes = (ctypes.c_int,)
388 | ezpp_title.restype = ctypes.c_char_p
389 |
390 | ezpp_title_unicode = static_lib.ezpp_title_unicode
391 | ezpp_title_unicode.argtypes = (ctypes.c_int,)
392 | ezpp_title_unicode.restype = ctypes.c_char_p
393 |
394 | ezpp_artist = static_lib.ezpp_artist
395 | ezpp_artist.argtypes = (ctypes.c_int,)
396 | ezpp_artist.restype = ctypes.c_char_p
397 |
398 | ezpp_artist_unicode = static_lib.ezpp_artist_unicode
399 | ezpp_artist_unicode.argtypes = (ctypes.c_int,)
400 | ezpp_artist_unicode.restype = ctypes.c_char_p
401 |
402 | ezpp_creator = static_lib.ezpp_creator
403 | ezpp_creator.argtypes = (ctypes.c_int,)
404 | ezpp_creator.restype = ctypes.c_char_p
405 |
406 | ezpp_version = static_lib.ezpp_version
407 | ezpp_version.argtypes = (ctypes.c_int,)
408 | ezpp_version.restype = ctypes.c_char_p
409 |
410 | ezpp_ncircles = static_lib.ezpp_ncircles
411 | ezpp_ncircles.argtypes = (ctypes.c_int,)
412 | ezpp_ncircles.restype = ctypes.c_int
413 |
414 | ezpp_nsliders = static_lib.ezpp_nsliders
415 | ezpp_nsliders.argtypes = (ctypes.c_int,)
416 | ezpp_nsliders.restype = ctypes.c_int
417 |
418 | ezpp_nspinners = static_lib.ezpp_nspinners
419 | ezpp_nspinners.argtypes = (ctypes.c_int,)
420 | ezpp_nspinners.restype = ctypes.c_int
421 |
422 | ezpp_nobjects = static_lib.ezpp_nobjects
423 | ezpp_nobjects.argtypes = (ctypes.c_int,)
424 | ezpp_nobjects.restype = ctypes.c_int
425 |
426 | ezpp_ar = static_lib.ezpp_ar
427 | ezpp_ar.argtypes = (ctypes.c_int,)
428 | ezpp_ar.restype = ctypes.c_float
429 |
430 | ezpp_cs = static_lib.ezpp_cs
431 | ezpp_cs.argtypes = (ctypes.c_int,)
432 | ezpp_cs.restype = ctypes.c_float
433 |
434 | ezpp_od = static_lib.ezpp_od
435 | ezpp_od.argtypes = (ctypes.c_int,)
436 | ezpp_od.restype = ctypes.c_float
437 |
438 | ezpp_hp = static_lib.ezpp_hp
439 | ezpp_hp.argtypes = (ctypes.c_int,)
440 | ezpp_hp.restype = ctypes.c_float
441 |
442 | ezpp_odms = static_lib.ezpp_odms
443 | ezpp_odms.argtypes = (ctypes.c_int,)
444 | ezpp_odms.restype = ctypes.c_float
445 |
446 | ezpp_autocalc = static_lib.ezpp_autocalc
447 | ezpp_autocalc.argtypes = (ctypes.c_int,)
448 | ezpp_autocalc.restype = ctypes.c_int
449 |
450 | ezpp_time_at = static_lib.ezpp_time_at
451 | ezpp_time_at.argtypes = (ctypes.c_int, ctypes.c_int)
452 | ezpp_time_at.restype = ctypes.c_float
453 |
454 | ezpp_strain_at = static_lib.ezpp_strain_at
455 | ezpp_strain_at.argtypes = (ctypes.c_int, ctypes.c_int, ctypes.c_int)
456 | ezpp_strain_at.restype = ctypes.c_float
457 |
458 | ezpp_strain_at = static_lib.ezpp_strain_at
459 | ezpp_strain_at.argtypes = (ctypes.c_int, ctypes.c_int, ctypes.c_int)
460 | ezpp_strain_at.restype = ctypes.c_float
461 |
462 | ezpp_ntiming_points = static_lib.ezpp_ntiming_points
463 | ezpp_ntiming_points.argtypes = (ctypes.c_int,)
464 | ezpp_ntiming_points.restype = ctypes.c_int
465 |
466 | ezpp_timing_time = static_lib.ezpp_timing_time
467 | ezpp_timing_time.argtypes = (ctypes.c_int, ctypes.c_int)
468 | ezpp_timing_time.restype = ctypes.c_float
469 |
470 | ezpp_timing_ms_per_beat = static_lib.ezpp_timing_ms_per_beat
471 | ezpp_timing_ms_per_beat.argtypes = (ctypes.c_int, ctypes.c_int)
472 | ezpp_timing_ms_per_beat.restype = ctypes.c_float
473 |
474 | ezpp_timing_change = static_lib.ezpp_timing_change
475 | ezpp_timing_change.argtypes = (ctypes.c_int, ctypes.c_int)
476 | ezpp_timing_change.restype = ctypes.c_int
477 |
478 | # setting internals
479 |
480 | ezpp_set_aim_stars = static_lib.ezpp_set_aim_stars
481 | ezpp_set_aim_stars.argtypes = (ctypes.c_int, ctypes.c_float)
482 | ezpp_set_aim_stars.restype = ctypes.c_void_p
483 |
484 | ezpp_set_speed_stars = static_lib.ezpp_set_speed_stars
485 | ezpp_set_speed_stars.argtypes = (ctypes.c_int, ctypes.c_float)
486 | ezpp_set_speed_stars.restype = ctypes.c_void_p
487 |
488 | ezpp_set_base_ar = static_lib.ezpp_set_base_ar
489 | ezpp_set_base_ar.argtypes = (ctypes.c_int, ctypes.c_float)
490 | ezpp_set_base_ar.restype = ctypes.c_void_p
491 |
492 | ezpp_set_base_od = static_lib.ezpp_set_base_od
493 | ezpp_set_base_od.argtypes = (ctypes.c_int, ctypes.c_float)
494 | ezpp_set_base_od.restype = ctypes.c_void_p
495 |
496 | ezpp_set_base_hp = static_lib.ezpp_set_base_hp
497 | ezpp_set_base_hp.argtypes = (ctypes.c_int, ctypes.c_float)
498 | ezpp_set_base_hp.restype = ctypes.c_void_p
499 |
500 | ezpp_set_mode = static_lib.ezpp_set_mode
501 | ezpp_set_mode.argtypes = (ctypes.c_int, ctypes.c_int)
502 | ezpp_set_mode.restype = ctypes.c_void_p
503 |
504 | ezpp_set_combo = static_lib.ezpp_set_combo
505 | ezpp_set_combo.argtypes = (ctypes.c_int, ctypes.c_int)
506 | ezpp_set_combo.restype = ctypes.c_void_p
507 |
508 | ezpp_set_score_version = static_lib.ezpp_set_score_version
509 | ezpp_set_score_version.argtypes = (ctypes.c_int, ctypes.c_int)
510 | ezpp_set_score_version.restype = ctypes.c_void_p
511 |
512 | ezpp_set_accuracy_percent = static_lib.ezpp_set_accuracy_percent
513 | ezpp_set_accuracy_percent.argtypes = (ctypes.c_int, ctypes.c_float)
514 | ezpp_set_accuracy_percent.restype = ctypes.c_void_p
515 |
516 | ezpp_set_autocalc = static_lib.ezpp_set_autocalc
517 | ezpp_set_autocalc.argtypes = (ctypes.c_int, ctypes.c_int)
518 | ezpp_set_autocalc.restype = ctypes.c_void_p
519 |
520 | # forces map re-parse for map-changing mods
521 | # (this is an implementation detail of oppai-ng)
522 |
523 | ezpp_set_mods = static_lib.ezpp_set_mods
524 | ezpp_set_mods.argtypes = (ctypes.c_int, ctypes.c_int)
525 | ezpp_set_mods.restype = ctypes.c_void_p
526 |
527 | # forces map re-parse
528 |
529 | ezpp_set_base_cs = static_lib.ezpp_set_base_cs
530 | ezpp_set_base_cs.argtypes = (ctypes.c_int, ctypes.c_float)
531 | ezpp_set_base_cs.restype = ctypes.c_void_p
532 |
533 | ezpp_set_mode_override = static_lib.ezpp_set_mode_override
534 | ezpp_set_mode_override.argtypes = (ctypes.c_int, ctypes.c_int)
535 | ezpp_set_mode_override.restype = ctypes.c_void_p
536 |
537 | # forces map re-parse & clobbers acc
538 |
539 | ezpp_set_nmiss = static_lib.ezpp_set_nmiss
540 | ezpp_set_nmiss.argtypes = (ctypes.c_int, ctypes.c_int)
541 | ezpp_set_nmiss.restype = ctypes.c_void_p
542 |
543 | ezpp_set_end = static_lib.ezpp_set_end
544 | ezpp_set_end.argtypes = (ctypes.c_int, ctypes.c_int)
545 | ezpp_set_end.restype = ctypes.c_void_p
546 |
547 | ezpp_set_end_time = static_lib.ezpp_set_end_time
548 | ezpp_set_end_time.argtypes = (ctypes.c_int, ctypes.c_float)
549 | ezpp_set_end_time.restype = ctypes.c_void_p
550 |
551 | ezpp_set_accuracy = static_lib.ezpp_set_accuracy
552 | ezpp_set_accuracy.argtypes = (ctypes.c_int, ctypes.c_int, ctypes.c_int)
553 | ezpp_set_accuracy.restype = ctypes.c_void_p
554 |
555 | return static_lib
556 |
--------------------------------------------------------------------------------
/ussr/app/objects/path.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import glob
4 | import os
5 | from typing import Union
6 |
7 |
8 | class Path:
9 | def __init__(self, file_path: str) -> None:
10 | self._path = file_path
11 |
12 | @staticmethod
13 | def cwd() -> Path:
14 | return Path(os.getcwd())
15 |
16 | def __str__(self) -> str:
17 | return self._path
18 |
19 | def __truediv__(self, other_path: Union[str, Path]) -> Path:
20 | if isinstance(other_path, str):
21 | return Path(os.path.join(self._path, other_path))
22 |
23 | # not sure if this even makes sense lol?
24 | return Path(os.path.join(self._path, other_path._path))
25 |
26 | def exists(self) -> bool:
27 | return os.path.exists(self._path)
28 |
29 | def read_bytes(self) -> bytes:
30 | if not self.exists():
31 | raise FileNotFoundError
32 |
33 | with open(self._path, "rb") as f:
34 | file_bytes = f.read()
35 |
36 | return file_bytes
37 |
38 | def read_text(self) -> str:
39 | if not self.exists():
40 | raise FileNotFoundError
41 |
42 | with open(self._path) as f:
43 | file_contents = f.read()
44 |
45 | return file_contents
46 |
47 | def write_bytes(self, content: bytes) -> None:
48 | with open(self._path, "wb") as f:
49 | f.write(content)
50 |
51 | def write_text(self, content: str) -> None:
52 | with open(self._path, "w") as f:
53 | f.write(content)
54 |
55 | def mkdir(
56 | self,
57 | mode: int = 0o777,
58 | parents: bool = False,
59 | exist_ok: bool = False,
60 | ) -> None:
61 | if self.exists():
62 | if not exist_ok:
63 | raise FileExistsError
64 |
65 | return
66 |
67 | if parents:
68 | os.makedirs(self._path, mode)
69 | else:
70 | os.mkdir(self._path, mode)
71 |
72 | def glob(self, pattern: str, recursive: bool = False) -> list[Path]:
73 | file_paths = glob.glob(os.path.join(self._path, pattern), recursive=recursive)
74 | if not file_paths:
75 | return []
76 |
77 | return [Path(file_path) for file_path in file_paths]
78 |
79 | def rglob(self, pattern: str) -> list[Path]:
80 | return self.glob(f"**/{pattern}", recursive=True)
81 |
--------------------------------------------------------------------------------
/ussr/app/redis.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | from typing import Awaitable
5 | from typing import Callable
6 | from typing import TypedDict
7 |
8 | import aioredis.client
9 | import app.state
10 | import app.usecases
11 | import logger
12 | import orjson
13 | from app.constants.mode import Mode
14 | from app.constants.ranked_status import RankedStatus
15 |
16 | PUBSUB_HANDLER = Callable[[str], Awaitable[None]]
17 |
18 |
19 | def register_pubsub(channel: str):
20 | def decorator(handler: PUBSUB_HANDLER):
21 | app.state.PUBSUBS[channel] = handler
22 |
23 | return decorator
24 |
25 |
26 | class UsernameChange(TypedDict):
27 | userID: str
28 |
29 |
30 | @register_pubsub("ussr:refresh_bmap")
31 | async def handle_beatmap_status_change(payload: str) -> None:
32 | """Pubsub to handle beatmap status changes
33 |
34 | This pubsub should be called when a beatmap's status updates
35 | so that the cache can accordingly refresh.
36 |
37 | It should be published with the payload being the beatmap's md5.
38 | """
39 |
40 | cached_beatmap = app.usecases.beatmap.md5_from_cache(payload)
41 | if not cached_beatmap:
42 | return
43 |
44 | new_beatmap = await app.usecases.beatmap.md5_from_database(payload)
45 | if new_beatmap is None:
46 | return
47 |
48 | if new_beatmap.status != cached_beatmap.status:
49 | # map's status changed, reflect it
50 | cached_beatmap.status = new_beatmap.status
51 |
52 | # reflect changes in the cache
53 | app.usecases.beatmap.MD5_CACHE[cached_beatmap.md5] = cached_beatmap
54 | app.usecases.beatmap.ID_CACHE[cached_beatmap.id] = cached_beatmap
55 | app.usecases.beatmap.add_to_set_cache(cached_beatmap)
56 |
57 | logger.info(f"Updated {cached_beatmap.song_name} in cache!")
58 |
59 |
60 | @register_pubsub("ussr:recalculate_user")
61 | async def handle_user_recalculate(payload: str) -> None:
62 | user_id = int(payload)
63 |
64 | for mode in Mode:
65 | stats = await app.usecases.stats.fetch(user_id, mode)
66 | if stats is None:
67 | logger.warning(
68 | f"Attempted to recalculate stats for user {user_id} but they don't exist!",
69 | )
70 | return
71 | await app.usecases.stats.full_recalc(stats)
72 | await app.usecases.stats.update_rank(stats)
73 | await app.usecases.stats.save(stats)
74 |
75 | logger.info(f"Recalculated user ID {user_id}")
76 |
77 |
78 | @register_pubsub("ussr:recalculate_user_full")
79 | async def handle_user_recalculate_full(payload: str) -> None:
80 | user_id = int(payload)
81 |
82 | for mode in Mode:
83 | stats = await app.usecases.stats.fetch(user_id, mode)
84 | if stats is None:
85 | logger.warning(
86 | f"Attempted to recalculate stats for user {user_id} but they don't exist!",
87 | )
88 | return
89 | await app.usecases.stats.full_recalc(stats)
90 | await app.usecases.stats.update_rank(stats)
91 | await app.usecases.stats.calc_playcount(stats)
92 | await app.usecases.stats.calc_max_combo(stats)
93 | await app.usecases.stats.calc_total_score(stats)
94 | await app.usecases.stats.calc_ranked_score(stats)
95 | await app.usecases.stats.save(stats)
96 |
97 | logger.info(f"Recalculated user ID {user_id}")
98 |
99 |
100 | class RedisMessage(TypedDict):
101 | channel: bytes
102 | data: bytes
103 |
104 |
105 | async def loop_pubsubs(pubsub: aioredis.client.PubSub) -> None:
106 | while True:
107 | try:
108 | message: RedisMessage = await pubsub.get_message(
109 | ignore_subscribe_messages=True,
110 | timeout=1.0,
111 | )
112 | if message is not None:
113 | if handler := app.state.PUBSUBS.get(message["channel"].decode()):
114 | await handler(message["data"].decode())
115 |
116 | await asyncio.sleep(0.01)
117 | except asyncio.TimeoutError:
118 | pass
119 |
120 |
121 | async def initialise_pubsubs() -> None:
122 | pubsub = app.state.services.redis.pubsub()
123 | await pubsub.subscribe(*[channel for channel in app.state.PUBSUBS.keys()])
124 |
125 | pubsub_loop = asyncio.create_task(loop_pubsubs(pubsub))
126 | app.state.tasks.add(pubsub_loop)
127 |
--------------------------------------------------------------------------------
/ussr/app/state/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 |
5 | import logger
6 |
7 | from . import cache
8 | from . import services
9 |
10 | tasks: set[asyncio.Task] = set()
11 |
12 | from typing import Callable, Awaitable
13 |
14 | PUBSUB_HANDLER = Callable[[str], Awaitable[None]]
15 |
16 | PUBSUBS: dict[str, PUBSUB_HANDLER] = {}
17 |
18 |
19 | async def cancel_tasks() -> None:
20 | logger.info(f"Cancelling {len(tasks)} tasks.")
21 |
22 | for task in tasks:
23 | task.cancel()
24 |
25 | await asyncio.gather(*tasks, return_exceptions=True)
26 |
27 | loop = asyncio.get_running_loop()
28 | for task in tasks:
29 | if not task.cancelled():
30 | if exception := task.exception():
31 | loop.call_exception_handler(
32 | {
33 | "message": "unhandled exception during loop shutdown",
34 | "exception": exception,
35 | "task": task,
36 | },
37 | )
38 |
--------------------------------------------------------------------------------
/ussr/app/state/cache.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 | from app.models.achievement import Achievement
5 |
6 | UNSUBMITTED: set[str] = set()
7 | REQUIRES_UPDATE: set[str] = set()
8 | ACHIEVEMENTS: list[Achievement] = []
9 |
10 |
11 | async def init_cache() -> None:
12 | db_achievements = await app.state.services.database.fetch_all(
13 | "SELECT * FROM ussr_achievements",
14 | )
15 |
16 | for achievement in db_achievements:
17 | condition = eval(f"lambda score, mode_vn, stats: {achievement['cond']}")
18 | ACHIEVEMENTS.append(
19 | Achievement(
20 | id=achievement["id"],
21 | file=achievement["file"],
22 | name=achievement["name"],
23 | desc=achievement["desc"],
24 | cond=condition,
25 | ),
26 | )
27 |
--------------------------------------------------------------------------------
/ussr/app/state/services.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from urllib.parse import quote
4 |
5 | import aiohttp
6 | import aioredis
7 | import databases
8 | import meilisearch_python_async
9 | import settings
10 |
11 | from .storage import AbstractStorage
12 | from .storage import LocalStorage
13 | from .storage import S3Storage
14 |
15 | redis: aioredis.Redis = aioredis.from_url(
16 | f"redis://:{settings.REDIS_PASSWORD}@{settings.REDIS_HOST}:{settings.REDIS_PORT}/{settings.REDIS_DB}",
17 | )
18 |
19 | url = databases.DatabaseURL(
20 | "mysql+asyncmy://{username}:{password}@{host}:{port}/{db}".format(
21 | username=settings.MYSQL_USER,
22 | password=quote(settings.MYSQL_PASSWORD),
23 | host=settings.MYSQL_HOST,
24 | port=settings.MYSQL_PORT,
25 | db=settings.MYSQL_DATABASE,
26 | ),
27 | )
28 | database: databases.Database = databases.Database(url)
29 | meili = meilisearch_python_async.Client(
30 | url=settings.MEILI_URL,
31 | api_key=settings.MEILI_KEY,
32 | )
33 | replay_storage: AbstractStorage
34 |
35 | if settings.S3_ENABLED:
36 | replay_storage = S3Storage(
37 | settings.S3_REGION,
38 | settings.S3_ENDPOINT,
39 | settings.S3_ACCESS_KEY,
40 | settings.S3_SECRET_KEY,
41 | settings.S3_BUCKET,
42 | retries=10,
43 | timeout=5,
44 | )
45 | else:
46 | replay_storage = LocalStorage(settings.DATA_REPLAY_DIRECTORY)
47 |
48 | http: aiohttp.ClientSession
49 |
--------------------------------------------------------------------------------
/ussr/app/state/storage.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import os
5 | from abc import ABC
6 | from abc import abstractmethod
7 |
8 | import logger
9 | from aiobotocore.config import AioConfig
10 | from aiobotocore.session import get_session
11 | from types_aiobotocore_s3 import S3Client
12 |
13 |
14 | class AbstractStorage(ABC):
15 | @abstractmethod
16 | async def load(self, key: str) -> bytes | None:
17 | """Loads a binary file from long-term storage."""
18 | ...
19 |
20 | @abstractmethod
21 | async def save(self, key: str, data: bytes) -> None:
22 | """Saves a binary file to long-term storage. It is not guaranteed
23 | that the file will be available immediately after this method."""
24 | ...
25 |
26 |
27 | class LocalStorage(AbstractStorage):
28 | def __init__(self, root: str) -> None:
29 | self._root = root
30 |
31 | def __ensure_subdirectories(self, key: str) -> None:
32 | if "/" not in key:
33 | return
34 |
35 | directory = os.path.dirname(f"{self._root}/{key}")
36 | os.makedirs(directory, exist_ok=True)
37 |
38 | async def load(self, key: str) -> bytes | None:
39 | location = f"{self._root}/{key}"
40 | if not os.path.exists(location):
41 | return None
42 |
43 | with open(location, "rb") as file:
44 | return file.read()
45 |
46 | async def save(self, key: str, data: bytes) -> None:
47 | self.__ensure_subdirectories(key)
48 | location = f"{self._root}/{key}"
49 |
50 | with open(location, "wb") as file:
51 | file.write(data)
52 |
53 |
54 | class S3Storage(AbstractStorage):
55 | def __init__(
56 | self,
57 | region: str,
58 | endpoint: str,
59 | access_key: str,
60 | secret_key: str,
61 | bucket: str,
62 | timeout: int,
63 | retries: int,
64 | ) -> None:
65 | boto_config = AioConfig(
66 | timeout=timeout,
67 | )
68 | self._s3_creator = get_session().create_client(
69 | "s3",
70 | region_name=region,
71 | endpoint_url=endpoint,
72 | aws_access_key_id=access_key,
73 | aws_secret_access_key=secret_key,
74 | config=boto_config,
75 | )
76 | self._s3 = None
77 | self._bucket = bucket
78 | self._retries = retries
79 |
80 | async def connect(self) -> None:
81 | self._s3 = await self._s3_creator.__aenter__()
82 |
83 | async def disconnect(self) -> None:
84 | await self._s3_creator.__aexit__(None, None, None)
85 | self._s3 = None
86 |
87 | async def __save_file(self, key: str, data: bytes) -> None:
88 | assert self._s3 is not None
89 |
90 | await self._s3.put_object(
91 | Bucket=self._bucket,
92 | Key=key,
93 | Body=data,
94 | )
95 |
96 | async def __save(self, key: str, data: bytes) -> None:
97 | for i in range(self._retries):
98 | try:
99 | await self.__save_file(key, data)
100 | return
101 | except Exception as e:
102 | sleep_time = i * 2
103 | logger.error(str(e))
104 | logger.warning(
105 | f"Failed to save {key} to S3. Retrying in {sleep_time}s...",
106 | )
107 | await asyncio.sleep(sleep_time)
108 |
109 | logger.error(
110 | f"Failed to save {key} to S3 after {self._retries} retries.",
111 | )
112 |
113 | async def save(self, key: str, data: bytes) -> None:
114 | if self._s3 is None:
115 | raise RuntimeError("The S3 client has not been connected!")
116 |
117 | asyncio.create_task(self.__save(key, data))
118 |
119 | async def load(self, key: str) -> bytes | None:
120 | if self._s3 is None:
121 | raise RuntimeError("The S3 client has not been connected!")
122 |
123 | try:
124 | response = await self._s3.get_object(
125 | Bucket=self._bucket,
126 | Key=key,
127 | )
128 | except self._s3.exceptions.NoSuchKey:
129 | return None
130 |
131 | return await response["Body"].read()
132 |
--------------------------------------------------------------------------------
/ussr/app/usecases/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from . import beatmap
4 | from . import clans
5 | from . import countries
6 | from . import discord
7 | from . import leaderboards
8 | from . import password
9 | from . import performance
10 | from . import pp_cap
11 | from . import privileges
12 | from . import score
13 | from . import stats
14 | from . import user
15 | from . import usernames
16 | from . import verified
17 | from . import whitelist
18 |
--------------------------------------------------------------------------------
/ussr/app/usecases/beatmap.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import random
5 | import time
6 | from typing import Any
7 | from typing import Optional
8 |
9 | import app.state
10 | import settings
11 | from app.constants.mode import Mode
12 | from app.constants.ranked_status import RankedStatus
13 | from app.models.beatmap import Beatmap
14 |
15 | MD5_CACHE: dict[str, Beatmap] = {}
16 | ID_CACHE: dict[int, Beatmap] = {}
17 | SET_CACHE: dict[int, list[Beatmap]] = {}
18 |
19 |
20 | async def update_beatmap(beatmap: Beatmap) -> Optional[Beatmap]:
21 | if not beatmap.deserves_update:
22 | return beatmap
23 |
24 | new_beatmap = await id_from_api(beatmap.id)
25 | if new_beatmap:
26 | # handle deleting the old beatmap etc.
27 |
28 | if new_beatmap.md5 != beatmap.md5:
29 | # delete any instances of the old map
30 | MD5_CACHE.pop(beatmap.md5, None)
31 |
32 | asyncio.create_task(
33 | app.state.services.database.execute(
34 | "DELETE FROM beatmaps WHERE beatmap_md5 = :old_md5",
35 | {"old_md5": beatmap.md5},
36 | ),
37 | )
38 |
39 | if beatmap.frozen:
40 | # if the previous version is status frozen, we should force the old status on the new version
41 | new_beatmap.status = beatmap.status
42 | else:
43 | # it's now unsubmitted!
44 | asyncio.create_task(
45 | app.state.services.database.execute(
46 | "DELETE FROM beatmaps WHERE beatmap_md5 = :old_md5",
47 | {"old_md5": beatmap.md5},
48 | ),
49 | )
50 |
51 | return None
52 |
53 | # update for new shit
54 | new_beatmap.last_update = int(time.time())
55 |
56 | asyncio.create_task(save(new_beatmap)) # i don't trust mysql for some reason
57 | MD5_CACHE[new_beatmap.md5] = new_beatmap
58 | ID_CACHE[new_beatmap.id] = new_beatmap
59 |
60 | return new_beatmap
61 |
62 |
63 | async def fetch_by_md5(md5: str) -> Optional[Beatmap]:
64 | if beatmap := md5_from_cache(md5):
65 | return beatmap
66 |
67 | if beatmap := await md5_from_database(md5):
68 | MD5_CACHE[md5] = beatmap
69 | ID_CACHE[beatmap.id] = beatmap
70 |
71 | return beatmap
72 |
73 | if beatmap := await md5_from_api(md5):
74 | MD5_CACHE[md5] = beatmap
75 | ID_CACHE[beatmap.id] = beatmap
76 |
77 | return beatmap
78 |
79 |
80 | async def fetch_by_id(id: int) -> Optional[Beatmap]:
81 | if beatmap := id_from_cache(id):
82 | return beatmap
83 |
84 | if beatmap := await id_from_database(id):
85 | MD5_CACHE[beatmap.md5] = beatmap
86 | ID_CACHE[beatmap.id] = beatmap
87 |
88 | return beatmap
89 |
90 | if beatmap := await id_from_api(id):
91 | MD5_CACHE[beatmap.md5] = beatmap
92 | ID_CACHE[beatmap.id] = beatmap
93 |
94 | return beatmap
95 |
96 |
97 | async def fetch_by_set_id(set_id: int) -> Optional[list[Beatmap]]:
98 | if beatmaps := set_from_cache(set_id):
99 | return beatmaps
100 |
101 | if beatmaps := await set_from_database(set_id):
102 | for beatmap in beatmaps:
103 | MD5_CACHE[beatmap.md5] = beatmap
104 | ID_CACHE[beatmap.id] = beatmap
105 |
106 | add_to_set_cache(beatmap)
107 |
108 | return beatmaps
109 |
110 | if beatmaps := await set_from_api(set_id):
111 | for beatmap in beatmaps:
112 | MD5_CACHE[beatmap.md5] = beatmap
113 | ID_CACHE[beatmap.id] = beatmap
114 |
115 | add_to_set_cache(beatmap)
116 |
117 | return beatmaps
118 |
119 |
120 | def add_to_set_cache(beatmap: Beatmap) -> None:
121 | if set_list := SET_CACHE.get(beatmap.set_id):
122 | for _map in set_list:
123 | if _map.id == beatmap.id or _map.md5 == beatmap.md5:
124 | set_list.remove(_map)
125 |
126 | set_list.append(beatmap)
127 | else:
128 | SET_CACHE[beatmap.set_id] = [beatmap]
129 |
130 |
131 | def set_from_cache(set_id: int) -> Optional[list[Beatmap]]:
132 | return SET_CACHE.get(set_id)
133 |
134 |
135 | def md5_from_cache(md5: str) -> Optional[Beatmap]:
136 | return MD5_CACHE.get(md5)
137 |
138 |
139 | def id_from_cache(id: int) -> Optional[Beatmap]:
140 | return ID_CACHE.get(id)
141 |
142 |
143 | async def md5_from_database(md5: str) -> Optional[Beatmap]:
144 | db_result = await app.state.services.database.fetch_one(
145 | "SELECT * FROM beatmaps WHERE beatmap_md5 = :md5",
146 | {"md5": md5},
147 | )
148 |
149 | if not db_result:
150 | return None
151 |
152 | return Beatmap.from_dict(db_result)
153 |
154 |
155 | async def id_from_database(id: int) -> Optional[Beatmap]:
156 | db_result = await app.state.services.database.fetch_one(
157 | "SELECT * FROM beatmaps WHERE beatmap_id = :id",
158 | {"id": id},
159 | )
160 |
161 | if not db_result:
162 | return None
163 |
164 | return Beatmap.from_dict(db_result)
165 |
166 |
167 | async def set_from_database(set_id: int) -> Optional[list[Beatmap]]:
168 | db_results = await app.state.services.database.fetch_all(
169 | "SELECT * FROM beatmaps WHERE beatmapset_id = :id",
170 | {"id": set_id},
171 | )
172 |
173 | if not db_results:
174 | return None
175 |
176 | return [Beatmap.from_dict(db_result) for db_result in db_results]
177 |
178 |
179 | async def save(beatmap: Beatmap) -> None:
180 | await app.state.services.database.execute(
181 | (
182 | "REPLACE INTO beatmaps (beatmap_id, beatmapset_id, beatmap_md5, song_name, ar, od, mode, rating, "
183 | "difficulty_std, difficulty_taiko, difficulty_ctb, difficulty_mania, max_combo, hit_length, bpm, playcount, "
184 | "passcount, ranked, latest_update, ranked_status_freezed, file_name) VALUES (:beatmap_id, :beatmapset_id, :beatmap_md5, :song_name, "
185 | ":ar, :od, :mode, :rating, :difficulty_std, :difficulty_taiko, :difficulty_ctb, :difficulty_mania, :max_combo, :hit_length, :bpm, "
186 | ":playcount, :passcount, :ranked, :latest_update, :ranked_status_freezed, :file_name)"
187 | ),
188 | beatmap.db_dict,
189 | )
190 |
191 |
192 | GET_BEATMAP_URL = "https://old.ppy.sh/api/get_beatmaps"
193 | GET_BEATMAP_FALLBACK_URL = settings.API_FALLBACK_URL + "/get_beatmaps"
194 |
195 |
196 | async def _make_get_beatmaps_request(args: dict[str, Any]) -> Optional[list[Beatmap]]:
197 | url = GET_BEATMAP_FALLBACK_URL
198 | if settings.API_KEYS_POOL:
199 | url = GET_BEATMAP_URL
200 | args["k"] = random.choice(settings.API_KEYS_POOL)
201 |
202 | async with app.state.services.http.get(
203 | url,
204 | params=args,
205 | ) as response:
206 | if not response or response.status != 200:
207 | return None
208 |
209 | response_json = await response.json()
210 | if not response_json:
211 | return None
212 |
213 | return parse_from_osu_api(response_json)
214 |
215 |
216 | async def md5_from_api(md5: str) -> Optional[Beatmap]:
217 | beatmaps = await _make_get_beatmaps_request(
218 | {"h": md5},
219 | )
220 |
221 | if beatmaps is None:
222 | return None
223 |
224 | for beatmap in beatmaps:
225 | asyncio.create_task(save(beatmap))
226 | add_to_set_cache(beatmap)
227 |
228 | for beatmap in beatmaps:
229 | if beatmap.md5 == md5:
230 | return beatmap
231 |
232 |
233 | async def id_from_api(id: int, should_save: bool = True) -> Optional[Beatmap]:
234 | beatmaps = await _make_get_beatmaps_request(
235 | {"b": id},
236 | )
237 |
238 | if beatmaps is None:
239 | return None
240 |
241 | if should_save:
242 | for beatmap in beatmaps:
243 | asyncio.create_task(save(beatmap))
244 | add_to_set_cache(beatmap)
245 |
246 | for beatmap in beatmaps:
247 | if beatmap.id == id:
248 | return beatmap
249 |
250 |
251 | async def set_from_api(id: int) -> Optional[list[Beatmap]]:
252 | beatmaps = await _make_get_beatmaps_request(
253 | {"s": id},
254 | )
255 |
256 | if beatmaps is None:
257 | return None
258 |
259 | for beatmap in beatmaps:
260 | asyncio.create_task(save(beatmap))
261 | add_to_set_cache(beatmap)
262 |
263 | return beatmaps
264 |
265 |
266 | IGNORED_BEATMAP_CHARS = dict.fromkeys(map(ord, r':\/*<>?"|'), None)
267 |
268 | FROZEN_STATUSES = (RankedStatus.RANKED, RankedStatus.APPROVED, RankedStatus.LOVED)
269 |
270 |
271 | def parse_from_osu_api(
272 | response_json_list: list[dict],
273 | frozen: bool = False,
274 | ) -> list[Beatmap]:
275 | maps = []
276 |
277 | for response_json in response_json_list:
278 | md5 = response_json["file_md5"]
279 | id = int(response_json["beatmap_id"])
280 | set_id = int(response_json["beatmapset_id"])
281 |
282 | filename = ("{artist} - {title} ({creator}) [{version}].osu").format(
283 | **response_json,
284 | )
285 |
286 | song_name = (
287 | ("{artist} - {title} [{version}]")
288 | .format(**response_json)
289 | .translate(IGNORED_BEATMAP_CHARS)
290 | )
291 |
292 | hit_length = int(response_json["hit_length"])
293 |
294 | if _max_combo := response_json.get("max_combo"):
295 | max_combo = int(_max_combo)
296 | else:
297 | max_combo = 0
298 |
299 | ranked_status = RankedStatus.from_osu_api(int(response_json["approved"]))
300 | if ranked_status in FROZEN_STATUSES:
301 | frozen = True # beatmaps are always frozen when ranked/approved/loved
302 |
303 | mode = Mode(int(response_json["mode"]))
304 |
305 | if _bpm := response_json.get("bpm"):
306 | bpm = round(float(_bpm))
307 | else:
308 | bpm = 0
309 |
310 | od = float(response_json["diff_overall"])
311 | ar = float(response_json["diff_approach"])
312 |
313 | maps.append(
314 | Beatmap(
315 | md5=md5,
316 | id=id,
317 | set_id=set_id,
318 | song_name=song_name,
319 | status=ranked_status,
320 | plays=0,
321 | passes=0,
322 | mode=mode,
323 | od=od,
324 | ar=ar,
325 | difficulty_std=0.0,
326 | difficulty_taiko=0.0,
327 | difficulty_ctb=0.0,
328 | difficulty_mania=0.0,
329 | hit_length=hit_length,
330 | last_update=int(time.time()),
331 | max_combo=max_combo,
332 | bpm=bpm,
333 | filename=filename,
334 | frozen=frozen,
335 | rating=10.0,
336 | ),
337 | )
338 |
339 | return maps
340 |
341 |
342 | async def increment_playcount(beatmap: Beatmap, passcount: bool = True) -> None:
343 | beatmap.plays += 1
344 | if passcount:
345 | beatmap.passes += 1
346 |
347 | await app.state.services.database.execute(
348 | "UPDATE beatmaps SET passcount = :pass, playcount = :play WHERE beatmap_md5 = :md5",
349 | {"play": beatmap.plays, "pass": beatmap.passes, "md5": beatmap.md5},
350 | )
351 |
--------------------------------------------------------------------------------
/ussr/app/usecases/clans.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 |
5 | import app.state
6 | import logger
7 |
8 |
9 | async def fetch(user_id: int) -> str:
10 | clan_tag = await app.state.services.database.fetch_val(
11 | "SELECT tag FROM user_clans LEFT JOIN clans ON user_clans.clan = clans.id WHERE user = :id",
12 | {"id": user_id},
13 | )
14 |
15 | if not clan_tag:
16 | return ""
17 | return clan_tag
18 |
--------------------------------------------------------------------------------
/ussr/app/usecases/countries.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 |
5 |
6 | async def get_country(user_id: int) -> str:
7 | country = await app.state.services.database.fetch_val(
8 | "SELECT country FROM users WHERE id = :id",
9 | {"id": user_id},
10 | )
11 |
12 | if not country:
13 | return "XX" # xd
14 |
15 | return country
16 |
--------------------------------------------------------------------------------
/ussr/app/usecases/discord.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import traceback
5 | from typing import Optional
6 |
7 | import app.state
8 | import logger
9 | import settings
10 | from app.models.beatmap import Beatmap
11 | from app.models.score import Score
12 | from app.models.stats import Stats
13 | from app.models.user import User
14 |
15 |
16 | # This portion is based off cmyui's discord hooks code
17 | # https://github.com/cmyui/cmyui_pkg/blob/master/cmyui/discord/webhook.py
18 | class Footer:
19 | def __init__(self, text: str, **kwargs) -> None:
20 | self.text = text
21 | self.icon_url = kwargs.get("icon_url")
22 | self.proxy_icon_url = kwargs.get("proxy_icon_url")
23 |
24 |
25 | class Image:
26 | def __init__(self, **kwargs) -> None:
27 | self.url = kwargs.get("url")
28 | self.proxy_url = kwargs.get("proxy_url")
29 | self.height = kwargs.get("height")
30 | self.width = kwargs.get("width")
31 |
32 |
33 | class Thumbnail:
34 | def __init__(self, **kwargs) -> None:
35 | self.url = kwargs.get("url")
36 | self.proxy_url = kwargs.get("proxy_url")
37 | self.height = kwargs.get("height")
38 | self.width = kwargs.get("width")
39 |
40 |
41 | class Video:
42 | def __init__(self, **kwargs) -> None:
43 | self.url = kwargs.get("url")
44 | self.height = kwargs.get("height")
45 | self.width = kwargs.get("width")
46 |
47 |
48 | class Provider:
49 | def __init__(self, **kwargs) -> None:
50 | self.url = kwargs.get("url")
51 | self.name = kwargs.get("name")
52 |
53 |
54 | class Author:
55 | def __init__(self, **kwargs) -> None:
56 | self.name = kwargs.get("name")
57 | self.url = kwargs.get("url")
58 | self.icon_url = kwargs.get("icon_url")
59 | self.proxy_icon_url = kwargs.get("proxy_icon_url")
60 |
61 |
62 | class Field:
63 | def __init__(self, name: str, value: str, inline: bool = False) -> None:
64 | self.name = name
65 | self.value = value
66 | self.inline = inline
67 |
68 |
69 | class Embed:
70 | def __init__(self, **kwargs) -> None:
71 | self.title = kwargs.get("title")
72 | self.type = kwargs.get("type")
73 | self.description = kwargs.get("description")
74 | self.url = kwargs.get("url")
75 | self.timestamp = kwargs.get("timestamp") # datetime
76 | self.color = kwargs.get("color", 0x000000)
77 |
78 | self.footer: Optional[Footer] = kwargs.get("footer")
79 | self.image: Optional[Image] = kwargs.get("image")
80 | self.thumbnail: Optional[Thumbnail] = kwargs.get("thumbnail")
81 | self.video: Optional[Video] = kwargs.get("video")
82 | self.provider: Optional[Provider] = kwargs.get("provider")
83 | self.author: Optional[Author] = kwargs.get("author")
84 |
85 | self.fields: list[Field] = kwargs.get("fields", [])
86 |
87 | def set_footer(self, **kwargs) -> None:
88 | self.footer = Footer(**kwargs)
89 |
90 | def set_image(self, **kwargs) -> None:
91 | self.image = Image(**kwargs)
92 |
93 | def set_thumbnail(self, **kwargs) -> None:
94 | self.thumbnail = Thumbnail(**kwargs)
95 |
96 | def set_video(self, **kwargs) -> None:
97 | self.video = Video(**kwargs)
98 |
99 | def set_provider(self, **kwargs) -> None:
100 | self.provider = Provider(**kwargs)
101 |
102 | def set_author(self, **kwargs) -> None:
103 | self.author = Author(**kwargs)
104 |
105 | def add_field(self, name: str, value: str, inline: bool = False) -> None:
106 | self.fields.append(Field(name, value, inline))
107 |
108 |
109 | class Webhook:
110 | """A class to represent a single-use Discord webhook."""
111 |
112 | __slots__ = ("url", "content", "username", "avatar_url", "tts", "file", "embeds")
113 |
114 | def __init__(self, url: str, **kwargs) -> None:
115 | self.url = url
116 | self.content = kwargs.get("content")
117 | self.username = kwargs.get("username")
118 | self.avatar_url = kwargs.get("avatar_url")
119 | self.tts = kwargs.get("tts")
120 | self.file = kwargs.get("file")
121 | self.embeds: list[Embed] = kwargs.get("embeds", [])
122 |
123 | def add_embed(self, embed: Embed) -> None:
124 | self.embeds.append(embed)
125 |
126 | @property
127 | def json(self):
128 | if not any([self.content, self.file, self.embeds]):
129 | raise Exception(
130 | "Webhook must contain atleast one " "of (content, file, embeds).",
131 | )
132 |
133 | if self.content and len(self.content) > 2000:
134 | raise Exception("Webhook content must be under " "2000 characters.")
135 |
136 | payload = {"embeds": []}
137 |
138 | for key in ("content", "username", "avatar_url", "tts", "file"):
139 | if (val := getattr(self, key)) is not None:
140 | payload[key] = val
141 |
142 | for embed in self.embeds:
143 | embed_payload = {}
144 |
145 | # simple params
146 | for key in ("title", "type", "description", "url", "timestamp", "color"):
147 | if val := getattr(embed, key):
148 | embed_payload[key] = val
149 |
150 | # class params, must turn into dict
151 | for key in ("footer", "image", "thumbnail", "video", "provider", "author"):
152 | if val := getattr(embed, key):
153 | embed_payload[key] = val.__dict__
154 |
155 | if embed.fields:
156 | embed_payload["fields"] = [f.__dict__ for f in embed.fields]
157 |
158 | payload["embeds"].append(embed_payload)
159 |
160 | return payload
161 |
162 | async def post(self) -> None:
163 | """Post the webhook in JSON format."""
164 |
165 | async with app.state.services.http.post(
166 | self.url,
167 | json=self.json,
168 | ) as req:
169 | if req.status != 204:
170 | logger.error(f"Failed sending webhook with response code {req.status}")
171 |
172 |
173 | async def wrap_hook(hook: str, embed: Embed) -> None:
174 | """Handles sending the webhook to discord."""
175 |
176 | logger.info("Sending Discord webhook!")
177 |
178 | try:
179 | wh = Webhook(hook, tts=False, username="USSR Score Server")
180 | wh.add_embed(embed)
181 | await wh.post()
182 | except Exception:
183 | logger.error(
184 | "Failed sending Discord webhook with exception " + traceback.format_exc(),
185 | )
186 |
187 |
188 | def schedule_hook(hook: Optional[str], embed: Embed):
189 | """Performs a hook execution in a non-blocking manner."""
190 |
191 | if not hook:
192 | return
193 |
194 | loop = asyncio.get_event_loop()
195 | loop.create_task(wrap_hook(hook, embed))
196 |
197 | logger.debug("Scheduled the performing of a discord webhook!")
198 |
199 |
200 | EDIT_COL = "4360181"
201 | EDIT_ICON = "https://cdn3.iconfinder.com/data/icons/bold-blue-glyphs-free-samples/32/Info_Circle_Symbol_Information_Letter-512.png"
202 |
203 | admin_hook = a_hook if (a_hook := settings.DISCORD_ADMIN_HOOK) else None
204 | first_hook = one_hook if (one_hook := settings.DISCORD_FIRST_PLACE) else None
205 |
206 |
207 | async def log_user_edit(
208 | user: User,
209 | action: str,
210 | reason: str,
211 | ) -> None:
212 | """Logs a user edit action to the admin webhook."""
213 |
214 | embed = Embed(title="User Edited!", color=EDIT_COL)
215 | embed.description = (
216 | f"{user.name} ({user.id}) has just been {action}" f" for {reason}!"
217 | )
218 | embed.set_author(name="USSR Score Server", icon_url=EDIT_ICON)
219 | embed.set_footer(text="This is an automated action performed by the server.")
220 |
221 | schedule_hook(admin_hook, embed)
222 |
223 |
224 | async def log_first_place(
225 | score: Score,
226 | user: User,
227 | beatmap: Beatmap,
228 | old_stats: Stats,
229 | new_stats: Stats,
230 | ) -> None:
231 | """Logs a user's first place to the first place webhook."""
232 |
233 | pp_gained = new_stats.pp - old_stats.pp
234 |
235 | # Heavily inspired by Ainu's webhook style.
236 | embed = Embed(color=0x0F97FF)
237 | embed.set_footer(text="USSR Score Server")
238 | embed.set_author(name=f"[{score.mode.relax_str}] New #1 score set by {user.name}!")
239 | embed.add_field(
240 | name=f"Score pp: {score.pp:.2f}pp",
241 | value=(
242 | f"Gained: {pp_gained:.2f}pp"
243 | if pp_gained >= 0
244 | else f"Lost: {pp_gained:.2f}pp"
245 | ),
246 | )
247 | embed.add_field(
248 | name=f"Global Rank: {new_stats.rank}",
249 | value=f"[__[Download Map]({settings.PS_DOMAIN}/d/{beatmap.set_id})__]",
250 | )
251 | embed.add_field(
252 | name=f"Played by: {user.name}",
253 | value=f"[__[User Profile]({settings.PS_DOMAIN}/u/{user.id})__]",
254 | )
255 |
256 | embed.set_image(
257 | url=f"https://assets.ppy.sh/beatmaps/{beatmap.set_id}/covers/cover.jpg",
258 | )
259 |
260 | schedule_hook(first_hook, embed)
261 |
--------------------------------------------------------------------------------
/ussr/app/usecases/leaderboards.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state.services
4 | from app.constants.mode import Mode
5 | from app.models.beatmap import Beatmap
6 | from app.models.score import Score
7 | from app.objects.leaderboard import Leaderboard
8 |
9 |
10 | async def fetch(beatmap: Beatmap, mode: Mode) -> Leaderboard:
11 | leaderboard = Leaderboard(mode)
12 |
13 | db_scores = await app.state.services.database.fetch_all(
14 | f"SELECT * FROM {mode.scores_table} WHERE beatmap_md5 = :md5 AND play_mode = :mode AND completed = 3",
15 | {
16 | "md5": beatmap.md5,
17 | "mode": mode.as_vn,
18 | },
19 | )
20 |
21 | for db_score in db_scores:
22 | score = Score.from_dict(db_score)
23 | leaderboard.scores.append(score)
24 |
25 | leaderboard.sort()
26 | return leaderboard
27 |
--------------------------------------------------------------------------------
/ussr/app/usecases/password.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 |
5 | import bcrypt
6 |
7 | CACHE: dict[str, str] = {}
8 |
9 |
10 | async def verify_password(plain_password: str, hashed_password: str) -> bool:
11 | if hashed_password in CACHE:
12 | return CACHE[hashed_password] == plain_password
13 |
14 | result = await asyncio.to_thread(
15 | bcrypt.checkpw,
16 | plain_password.encode(),
17 | hashed_password.encode(),
18 | )
19 |
20 | if result:
21 | CACHE[hashed_password] = plain_password
22 |
23 | return result
24 |
--------------------------------------------------------------------------------
/ussr/app/usecases/performance.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import hashlib
4 | from typing import TypedDict
5 |
6 | import app.state
7 | import settings
8 | from app.constants.mode import Mode
9 | from app.models.score import Score
10 | from app.objects.path import Path
11 |
12 | OSU_BASE_URL = "https://old.ppy.sh/osu"
13 | if not settings.API_KEYS_POOL:
14 | OSU_BASE_URL = settings.API_OSU_FALLBACK_URL
15 |
16 |
17 | async def check_local_file(osu_file_path: Path, map_id: int, map_md5: str) -> bool:
18 | if (
19 | not osu_file_path.exists()
20 | or hashlib.md5(osu_file_path.read_bytes()).hexdigest() != map_md5
21 | ):
22 | async with app.state.services.http.get(
23 | f"{OSU_BASE_URL}/{map_id}",
24 | ) as response:
25 | if response.status != 200:
26 | return False
27 |
28 | osu_file_path.write_bytes(await response.read())
29 |
30 | return True
31 |
32 |
33 | class PerformanceScore(TypedDict):
34 | beatmap_id: int
35 | mode: int
36 | mods: int
37 | max_combo: int
38 | accuracy: float
39 | miss_count: int
40 |
41 |
42 | async def calculate_performances(
43 | scores: list[PerformanceScore],
44 | ) -> list[tuple[float, float]]:
45 | async with app.state.services.http.post(
46 | f"{settings.PERFORMANCE_SERVICE_URL}/api/v1/calculate",
47 | json=scores,
48 | ) as resp:
49 | if resp.status != 200:
50 | return [(0.0, 0.0)] * len(scores)
51 |
52 | data = await resp.json()
53 | return [(result["pp"], result["stars"]) for result in data]
54 |
55 |
56 | # TODO: split sr & pp calculations
57 | async def calculate_performance(
58 | beatmap_id: int,
59 | mode: Mode,
60 | mods: int,
61 | max_combo: int,
62 | acc: float,
63 | nmiss: int,
64 | ) -> tuple[float, float]:
65 | async with app.state.services.http.post(
66 | f"{settings.PERFORMANCE_SERVICE_URL}/api/v1/calculate",
67 | json=[
68 | {
69 | "beatmap_id": beatmap_id,
70 | "mode": mode.as_vn,
71 | "mods": mods,
72 | "max_combo": max_combo,
73 | "accuracy": acc,
74 | "miss_count": nmiss,
75 | },
76 | ],
77 | ) as resp:
78 | if resp.status != 200:
79 | return 0.0, 0.0
80 |
81 | data = (await resp.json())[0]
82 | return data["pp"], data["stars"]
83 |
84 |
85 | async def calculate_score(score: Score, beatmap_id: int) -> None:
86 | score.pp, score.sr = await calculate_performance(
87 | beatmap_id,
88 | score.mode,
89 | score.mods,
90 | score.max_combo,
91 | score.acc,
92 | score.nmiss,
93 | )
94 |
--------------------------------------------------------------------------------
/ussr/app/usecases/pp_cap.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import logging
4 | from typing import NamedTuple
5 |
6 | import app.state
7 | from app.constants.mode import Mode
8 |
9 |
10 | class CapValue(NamedTuple):
11 | pp: int
12 | flashlight_pp: int
13 |
14 |
15 | async def get_pp_cap(mode: Mode, flashlight: bool) -> int:
16 | pp_cap = await fetch(mode)
17 | assert pp_cap is not None
18 |
19 | if flashlight:
20 | return pp_cap.flashlight_pp
21 |
22 | return pp_cap.pp
23 |
24 |
25 | async def fetch(mode: Mode) -> CapValue:
26 | pp_cap = await app.state.services.database.fetch_one(
27 | f"SELECT pp, flashlight_pp FROM pp_limits WHERE mode = :mode and relax = :relax",
28 | {"mode": mode.as_vn, "relax": mode.relax},
29 | )
30 |
31 | if not pp_cap:
32 | return CapValue(0, 0)
33 |
34 | return CapValue(pp_cap["pp"], pp_cap["flashlight_pp"])
35 |
--------------------------------------------------------------------------------
/ussr/app/usecases/privileges.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state.services
4 | from app.constants.privileges import Privileges
5 |
6 |
7 | async def fetch(user_id: int) -> Privileges:
8 | db_privilege = await app.state.services.database.fetch_val(
9 | "SELECT privileges FROM users WHERE id = :id",
10 | {"id": user_id},
11 | )
12 |
13 | if not db_privilege:
14 | return Privileges(2) # assume restricted? xd
15 |
16 | return Privileges(db_privilege)
17 |
--------------------------------------------------------------------------------
/ussr/app/usecases/score.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import hashlib
5 | from typing import Optional
6 |
7 | import app.state
8 | import app.usecases
9 | import app.utils
10 | from app.constants.mods import Mods
11 | from app.constants.score_status import ScoreStatus
12 | from app.models.beatmap import Beatmap
13 | from app.models.score import Score
14 | from app.models.stats import Stats
15 | from app.models.user import User
16 | from app.objects.binary import BinaryWriter
17 |
18 |
19 | def calculate_accuracy(score: Score) -> float:
20 | vanilla_mode = score.mode.as_vn
21 |
22 | n300 = score.n300
23 | n100 = score.n100
24 | n50 = score.n50
25 |
26 | ngeki = score.ngeki
27 | nkatu = score.nkatu
28 |
29 | nmiss = score.nmiss
30 |
31 | if vanilla_mode == 0: # osu!
32 | total = n300 + n100 + n50 + nmiss
33 |
34 | if total == 0:
35 | return 0.0
36 |
37 | return (
38 | 100.0 * ((n300 * 300.0) + (n100 * 100.0) + (n50 * 50.0)) / (total * 300.0)
39 | )
40 |
41 | elif vanilla_mode == 1: # osu!taiko
42 | total = n300 + n100 + nmiss
43 |
44 | if total == 0:
45 | return 0.0
46 |
47 | return 100.0 * ((n100 * 0.5) + n300) / total
48 |
49 | elif vanilla_mode == 2: # osu!catch
50 | total = n300 + n100 + n50 + nkatu + nmiss
51 |
52 | if total == 0:
53 | return 0.0
54 |
55 | return 100.0 * (n300 + n100 + n50) / total
56 |
57 | elif vanilla_mode == 3: # osu!mania
58 | total = n300 + n100 + n50 + ngeki + nkatu + nmiss
59 |
60 | if total == 0:
61 | return 0.0
62 |
63 | return (
64 | 100.0
65 | * (
66 | (n50 * 50.0)
67 | + (n100 * 100.0)
68 | + (nkatu * 200.0)
69 | + ((n300 + ngeki) * 300.0)
70 | )
71 | / (total * 300.0)
72 | )
73 |
74 | else:
75 | raise ValueError(
76 | "The score has a vanilla mode for which accuracy calculation is "
77 | "unsupported.",
78 | )
79 |
80 |
81 | def calculate_status(score: Score) -> None:
82 | if score.old_best:
83 | if score.pp > score.old_best.pp:
84 | score.status = ScoreStatus.BEST
85 | score.old_best.status = ScoreStatus.SUBMITTED
86 | elif score.pp == score.old_best.pp and score.score > score.old_best.score:
87 | # spin to win!
88 | score.status = ScoreStatus.BEST
89 | score.old_best.status = ScoreStatus.SUBMITTED
90 | else:
91 | score.status = ScoreStatus.SUBMITTED
92 | else:
93 | score.status = ScoreStatus.BEST
94 |
95 |
96 | async def unlock_achievements(score: Score, stats: Stats) -> list[str]:
97 | new_achievements: list[str] = []
98 |
99 | user_achievements = await app.usecases.user.fetch_achievements(score.user_id)
100 | for achievement in app.state.cache.ACHIEVEMENTS:
101 | if achievement.id in user_achievements:
102 | continue
103 |
104 | if achievement.cond(score, score.mode.as_vn, stats):
105 | new_achievements.append(achievement.full_name)
106 |
107 | # db insertion is not required immediately, let's run it in the bg!
108 | asyncio.create_task(
109 | app.usecases.user.unlock_achievement(score.user_id, achievement.id),
110 | )
111 |
112 | return new_achievements
113 |
114 |
115 | def get_non_computed_playtime(score: Score, beatmap: Beatmap) -> int:
116 | if score.passed:
117 | return beatmap.hit_length
118 |
119 | return score.time_elapsed // 1000
120 |
121 |
122 | def get_computed_playtime(score: Score, beatmap: Beatmap) -> int:
123 | if score.passed:
124 | return beatmap.hit_length
125 |
126 | value = score.time_elapsed
127 | if score.mods & Mods.DOUBLETIME:
128 | value //= 1.5
129 | elif score.mods & Mods.HALFTIME:
130 | value //= 0.75
131 |
132 | if beatmap.hit_length and value > beatmap.hit_length * 1.33:
133 | return 0
134 |
135 | return value
136 |
137 |
138 | async def handle_first_place(
139 | score: Score,
140 | beatmap: Beatmap,
141 | user: User,
142 | old_stats: Stats,
143 | new_stats: Stats,
144 | ) -> None:
145 | await app.state.services.database.execute(
146 | "DELETE FROM first_places WHERE beatmap_md5 = :md5 AND mode = :mode AND relax = :rx",
147 | {"md5": score.map_md5, "mode": score.mode.as_vn, "rx": score.mode.relax_int},
148 | )
149 |
150 | await app.state.services.database.execute(
151 | (
152 | "INSERT INTO first_places (score_id, user_id, score, max_combo, full_combo, "
153 | "mods, 300_count, 100_count, 50_count, ckatus_count, cgekis_count, miss_count, "
154 | "timestamp, mode, completed, accuracy, pp, play_time, beatmap_md5, relax) VALUES "
155 | "(:score_id, :user_id, :score, :max_combo, :full_combo, "
156 | ":mods, :300_count, :100_count, :50_count, :ckatus_count, :cgekis_count, :miss_count, "
157 | ":timestamp, :mode, :completed, :accuracy, :pp, :play_time, :beatmap_md5, :relax)"
158 | ),
159 | {
160 | "score_id": score.id,
161 | "user_id": score.user_id,
162 | "score": score.score,
163 | "max_combo": score.max_combo,
164 | "full_combo": score.full_combo,
165 | "mods": score.mods.value,
166 | "300_count": score.n300,
167 | "100_count": score.n100,
168 | "50_count": score.n50,
169 | "ckatus_count": score.nkatu,
170 | "cgekis_count": score.ngeki,
171 | "miss_count": score.nmiss,
172 | "timestamp": score.time,
173 | "mode": score.mode.as_vn,
174 | "completed": score.status.value,
175 | "accuracy": score.acc,
176 | "pp": score.pp,
177 | "play_time": score.time,
178 | "beatmap_md5": score.map_md5,
179 | "relax": score.mode.relax_int,
180 | },
181 | )
182 |
183 | msg = f"[{score.mode.relax_str}] User {user.embed} has submitted a #1 place on {beatmap.embed} +{score.mods!r} ({score.pp:.2f}pp)"
184 | await app.utils.announce(msg)
185 |
186 | await app.usecases.discord.log_first_place(
187 | score,
188 | user,
189 | beatmap,
190 | old_stats,
191 | new_stats,
192 | )
193 |
194 |
195 | OSU_VERSION = 20211103
196 |
197 |
198 | async def build_full_replay(score: Score) -> Optional[BinaryWriter]:
199 | replay_bytes = await app.state.services.replay_storage.load(
200 | f"replay_{score.id}.osr",
201 | )
202 | if not replay_bytes:
203 | return
204 |
205 | username = await app.usecases.usernames.fetch(score.user_id)
206 | if not username:
207 | return
208 |
209 | replay_md5 = hashlib.md5(
210 | "{}p{}o{}o{}t{}a{}r{}e{}y{}o{}u{}{}{}".format(
211 | score.n100 + score.n300,
212 | score.n50,
213 | score.ngeki,
214 | score.nkatu,
215 | score.nmiss,
216 | score.map_md5,
217 | score.max_combo,
218 | "true" if score.full_combo else "false",
219 | username,
220 | score.score,
221 | 0,
222 | score.mods.value,
223 | "true",
224 | ).encode(),
225 | ).hexdigest()
226 |
227 | return (
228 | BinaryWriter()
229 | .write_u8_le(score.mode.as_vn)
230 | .write_i32_le(OSU_VERSION)
231 | .write_osu_string(score.map_md5)
232 | .write_osu_string(username)
233 | .write_osu_string(replay_md5)
234 | .write_i16_le(score.n300)
235 | .write_i16_le(score.n100)
236 | .write_i16_le(score.n50)
237 | .write_i16_le(score.ngeki)
238 | .write_i16_le(score.nkatu)
239 | .write_i16_le(score.nmiss)
240 | .write_i32_le(score.score)
241 | .write_i16_le(score.max_combo)
242 | .write_u8_le(score.full_combo)
243 | .write_i32_le(score.mods.value)
244 | .write_u8_le(0)
245 | .write_i64_le(app.utils.ts_to_utc_ticks(score.time))
246 | .write_i32_le(len(replay_bytes))
247 | .write_raw(replay_bytes)
248 | .write_i64_le(score.id)
249 | )
250 |
--------------------------------------------------------------------------------
/ussr/app/usecases/stats.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 | from typing import NamedTuple
5 | from typing import Optional
6 |
7 | import app.state
8 | import app.usecases
9 | from app.constants.mode import Mode
10 | from app.models.stats import Stats
11 |
12 |
13 | class StatsInfo(NamedTuple):
14 | user_id: int
15 | mode: Mode
16 |
17 |
18 | async def fetch(user_id: int, mode: Mode) -> Optional[Stats]:
19 | db_stats = await app.state.services.database.fetch_one(
20 | (
21 | "SELECT ranked_score_{m} ranked_score, total_score_{m} total_score, pp_{m} pp, avg_accuracy_{m} accuracy, "
22 | "playcount_{m} playcount, max_combo_{m} max_combo, total_hits_{m} total_hits FROM {s} WHERE id = :id"
23 | ).format(m=mode.stats_prefix, s=mode.stats_table),
24 | {"id": user_id},
25 | )
26 |
27 | if not db_stats:
28 | return None
29 |
30 | global_rank, country_rank = await get_redis_rank(user_id, mode)
31 |
32 | return Stats(
33 | user_id=user_id,
34 | mode=mode,
35 | ranked_score=db_stats["ranked_score"],
36 | total_score=db_stats["total_score"],
37 | pp=db_stats["pp"],
38 | rank=global_rank,
39 | country_rank=country_rank,
40 | accuracy=db_stats["accuracy"],
41 | playcount=db_stats["playcount"],
42 | max_combo=db_stats["max_combo"],
43 | total_hits=db_stats["total_hits"],
44 | )
45 |
46 |
47 | class RankInfo(NamedTuple):
48 | global_rank: int
49 | country_rank: int
50 |
51 |
52 | async def get_redis_rank(user_id: int, mode: Mode) -> RankInfo:
53 | redis_global_rank = await app.state.services.redis.zrevrank(
54 | f"ripple:{mode.redis_leaderboard}:{mode.stats_prefix}",
55 | user_id,
56 | )
57 | global_rank = int(redis_global_rank) + 1 if redis_global_rank else 0
58 |
59 | country = await app.usecases.countries.get_country(user_id)
60 | redis_country_rank = await app.state.services.redis.zrevrank(
61 | f"ripple:{mode.redis_leaderboard}:{mode.stats_prefix}:{country.lower()}",
62 | user_id,
63 | )
64 | country_rank = int(redis_country_rank) + 1 if redis_country_rank else 0
65 |
66 | return RankInfo(global_rank, country_rank)
67 |
68 |
69 | async def full_recalc(stats: Stats) -> None:
70 | db_scores = await app.state.services.database.fetch_all(
71 | f"SELECT s.accuracy, s.pp FROM {stats.mode.scores_table} s RIGHT JOIN beatmaps b USING(beatmap_md5) "
72 | "WHERE s.completed = 3 AND s.play_mode = :mode AND b.ranked IN (3, 2) AND s.userid = :id ORDER BY s.pp DESC LIMIT 100",
73 | {"mode": stats.mode.as_vn, "id": stats.user_id},
74 | )
75 |
76 | total_acc = 0.0
77 | total_pp = 0.0
78 | last_idx = 0
79 |
80 | for idx, score in enumerate(db_scores):
81 | total_pp += score["pp"] * (0.95**idx)
82 | total_acc += score["accuracy"] * (0.95**idx)
83 | last_idx = idx
84 |
85 | stats.accuracy = (total_acc * (100.0 / (20 * (1 - 0.95 ** (last_idx + 1))))) / 100
86 | stats.pp = total_pp + await calc_bonus(stats)
87 |
88 |
89 | async def calc_bonus(stats: Stats) -> float:
90 | count = await app.state.services.database.fetch_val(
91 | (
92 | f"SELECT COUNT(*) FROM {stats.mode.scores_table} s RIGHT JOIN beatmaps b USING(beatmap_md5) "
93 | "WHERE b.ranked IN (2, 3) AND s.completed = 3 AND s.play_mode = :mode AND s.userid = :id LIMIT 25397"
94 | ),
95 | {
96 | "mode": stats.mode.as_vn,
97 | "id": stats.user_id,
98 | },
99 | )
100 |
101 | stats._cur_bonus_pp = 416.6667 * (1 - (0.995 ** min(1000, count)))
102 | return stats._cur_bonus_pp
103 |
104 |
105 | async def save(stats: Stats) -> None:
106 | await app.state.services.database.execute(
107 | (
108 | "UPDATE {t} SET ranked_score_{m} = :ranked_score, total_score_{m} = :total_score, pp_{m} = :pp, avg_accuracy_{m} = :accuracy, "
109 | "playcount_{m} = :playcount, max_combo_{m} = :max_combo, total_hits_{m} = :total_hits WHERE id = :id"
110 | ).format(t=stats.mode.stats_table, m=stats.mode.stats_prefix),
111 | {
112 | "ranked_score": stats.ranked_score,
113 | "total_score": stats.total_score,
114 | "pp": stats.pp,
115 | "accuracy": stats.accuracy,
116 | "playcount": stats.playcount,
117 | "max_combo": stats.max_combo,
118 | "total_hits": stats.total_hits,
119 | "id": stats.user_id,
120 | },
121 | )
122 |
123 |
124 | async def update_rank(stats: Stats) -> None:
125 | mode = stats.mode
126 |
127 | await app.state.services.redis.zadd(
128 | f"ripple:{mode.redis_leaderboard}:{mode.stats_prefix}",
129 | {stats.user_id: stats.pp},
130 | )
131 |
132 | country = await app.usecases.countries.get_country(stats.user_id)
133 | await app.state.services.redis.zadd(
134 | f"ripple:{mode.redis_leaderboard}:{mode.stats_prefix}:{country.lower()}",
135 | {stats.user_id: stats.pp},
136 | )
137 |
138 | stats.rank, stats.country_rank = await get_redis_rank(stats.user_id, mode)
139 |
140 |
141 | async def refresh_stats(user_id: int) -> None:
142 | await app.state.services.redis.publish("peppy:update_cached_stats", user_id)
143 |
144 |
145 | async def calc_playcount(stats: Stats) -> int:
146 | stats.playcount = await app.state.services.database.fetch_val(
147 | f"SELECT COUNT(*) FROM {stats.mode.scores_table} WHERE userid = :id AND play_mode = :mode",
148 | {"id": stats.user_id, "mode": stats.mode.as_vn},
149 | )
150 |
151 | return stats.playcount
152 |
153 |
154 | async def calc_max_combo(stats: Stats) -> int:
155 | stats.max_combo = (
156 | await app.state.services.database.fetch_val(
157 | f"SELECT MAX(max_combo) FROM {stats.mode.scores_table} WHERE userid = :id "
158 | "AND play_mode = :mode",
159 | {"id": stats.user_id, "mode": stats.mode.as_vn},
160 | )
161 | or 0
162 | )
163 |
164 | return stats.max_combo
165 |
166 |
167 | async def calc_total_score(stats: Stats) -> int:
168 | stats.total_score = (
169 | await app.state.services.database.fetch_val(
170 | f"SELECT SUM(score) FROM {stats.mode.scores_table} WHERE userid = :id "
171 | "AND play_mode = :mode",
172 | {"id": stats.user_id, "mode": stats.mode.as_vn},
173 | )
174 | or 0
175 | )
176 |
177 | return stats.total_score
178 |
179 |
180 | async def calc_ranked_score(stats: Stats) -> int:
181 | stats.ranked_score = (
182 | await app.state.services.database.fetch_val(
183 | f"SELECT SUM(s.score) FROM {stats.mode.scores_table} s INNER JOIN beatmaps b "
184 | "ON s.beatmap_md5 = b.beatmap_md5 WHERE s.userid = :id "
185 | "AND s.play_mode = :mode AND s.completed = 3 AND b.ranked IN (2, 3)",
186 | {"id": stats.user_id, "mode": stats.mode.as_vn},
187 | )
188 | or 0
189 | )
190 |
191 | return stats.ranked_score
192 |
--------------------------------------------------------------------------------
/ussr/app/usecases/user.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import time
4 | from typing import Any
5 | from typing import Awaitable
6 | from typing import Callable
7 | from typing import Optional
8 |
9 | import app.state.services
10 | import app.usecases.discord
11 | import app.usecases.password
12 | import app.usecases.privileges
13 | import app.usecases.score
14 | import app.utils
15 | import logger
16 | import settings
17 | from app.constants.mode import Mode
18 | from app.constants.privileges import Privileges
19 | from app.models.beatmap import Beatmap
20 | from app.models.score import Score
21 | from app.models.user import User
22 | from fastapi import HTTPException
23 |
24 |
25 | async def fetch_db(username: str) -> Optional[User]:
26 | safe_name = app.utils.make_safe(username)
27 |
28 | db_user = await app.state.services.database.fetch_one(
29 | "SELECT * FROM users WHERE username_safe = :safe_name",
30 | {"safe_name": safe_name},
31 | )
32 |
33 | if not db_user:
34 | return None
35 |
36 | country = await app.state.services.database.fetch_val(
37 | "SELECT country FROM users WHERE id = :id",
38 | {"id": db_user["id"]},
39 | )
40 |
41 | db_friends = await app.state.services.database.fetch_all(
42 | "SELECT user2 FROM users_relationships WHERE user1 = :id",
43 | {"id": db_user["id"]},
44 | )
45 |
46 | friends = [relationship["user2"] for relationship in db_friends]
47 |
48 | return User(
49 | id=db_user["id"],
50 | name=db_user["username"],
51 | privileges=Privileges(db_user["privileges"]),
52 | friends=friends,
53 | password_bcrypt=db_user["password_md5"],
54 | country=country,
55 | coins=db_user["coins"],
56 | )
57 |
58 |
59 | async def fetch_db_id(user_id: int) -> Optional[User]:
60 | db_user = await app.state.services.database.fetch_one(
61 | "SELECT * FROM users WHERE id = :id",
62 | {"id": user_id},
63 | )
64 |
65 | if not db_user:
66 | return None
67 |
68 | country = await app.state.services.database.fetch_val(
69 | "SELECT country FROM users WHERE id = :id",
70 | {"id": db_user["id"]},
71 | )
72 |
73 | db_friends = await app.state.services.database.fetch_all(
74 | "SELECT user2 FROM users_relationships WHERE user1 = :id",
75 | {"id": db_user["id"]},
76 | )
77 |
78 | friends = [relationship["user2"] for relationship in db_friends]
79 |
80 | return User(
81 | id=db_user["id"],
82 | name=db_user["username"],
83 | privileges=Privileges(db_user["privileges"]),
84 | friends=friends,
85 | password_bcrypt=db_user["password_md5"],
86 | country=country,
87 | coins=db_user["coins"],
88 | )
89 |
90 |
91 | # common call ver
92 | async def auth_user(username: str, password: str) -> Optional[User]:
93 | user = await fetch_db(username)
94 | if not user:
95 | return None
96 |
97 | correct_password = await app.usecases.password.verify_password(
98 | password,
99 | user.password_bcrypt,
100 | )
101 | if not correct_password:
102 | return None
103 |
104 | return user
105 |
106 |
107 | # depends ver
108 | def authenticate_user(
109 | param_function: Callable[..., Any],
110 | name_arg: str = "u",
111 | password_arg: str = "p",
112 | error_text: Optional[Any] = None,
113 | ) -> Callable[[str, str], Awaitable[User]]:
114 | async def wrapper(
115 | username: str = param_function(..., alias=name_arg),
116 | password: str = param_function(..., alias=password_arg),
117 | ) -> User:
118 | user = await fetch_db(username)
119 | if not user:
120 | raise HTTPException(
121 | status_code=401,
122 | detail=error_text,
123 | )
124 |
125 | correct_password = await app.usecases.password.verify_password(
126 | password,
127 | user.password_bcrypt,
128 | )
129 | if not correct_password:
130 | raise HTTPException(
131 | status_code=401,
132 | detail=error_text,
133 | )
134 |
135 | return user
136 |
137 | return wrapper
138 |
139 |
140 | async def remove_from_leaderboard(user: User) -> None:
141 | uid = str(user.id)
142 |
143 | for mode in ("std", "taiko", "ctb", "mania"):
144 | await app.state.services.redis.zrem(f"ripple:leaderboard:{mode}", uid)
145 | await app.state.services.redis.zrem(f"ripple:leaderboard_relax:{mode}", uid)
146 | await app.state.services.redis.zrem(f"ripple:leaderboard_ap:{mode}", uid)
147 |
148 | if user.country and (c := user.country.lower()) != "xx":
149 | await app.state.services.redis.zrem(f"ripple:leaderboard:{mode}:{c}", uid)
150 |
151 | await app.state.services.redis.zrem(
152 | f"ripple:leaderboard_relax:{mode}:{c}",
153 | uid,
154 | )
155 |
156 | await app.state.services.redis.zrem(
157 | f"ripple:leaderboard_ap:{mode}:{c}",
158 | uid,
159 | )
160 |
161 |
162 | async def notify_ban(user: User) -> None:
163 | await app.state.services.redis.publish("peppy:ban", user.id)
164 |
165 |
166 | async def insert_ban_log(user: User, summary: str, detail: str) -> None:
167 | """Inserts a ban log into the database.
168 |
169 | Note:
170 | This function prefixes the detail with `"USSR Autoban: "` before
171 | inserting it into the database.
172 | """
173 |
174 | # Prefix the detail with a ussr autoban.
175 | detail = "USSR Autoban: " + detail
176 |
177 | await app.state.services.database.execute(
178 | "INSERT INTO ban_logs (from_id, to_id, summary, detail) VALUES (:from_id, :to_id, :summary, :detail)",
179 | {
180 | "from_id": settings.PS_BOT_USER_ID,
181 | "to_id": user.id,
182 | "summary": summary,
183 | "detail": detail,
184 | },
185 | )
186 |
187 |
188 | DEFAULT_SUMMARY = "No summary available."
189 | DEFAULT_DETAIL = "No detail available."
190 |
191 |
192 | async def restrict_user(
193 | user: User,
194 | summary: str = DEFAULT_SUMMARY,
195 | detail: str = DEFAULT_DETAIL,
196 | ) -> None:
197 | if user.privileges.is_restricted:
198 | return
199 |
200 | user.privileges = user.privileges & ~Privileges.USER_PUBLIC
201 | await app.state.services.database.execute(
202 | "UPDATE users SET privileges = :new_priv, ban_datetime = :ban_time, ban_reason = :ban_reason WHERE id = :id",
203 | {
204 | "new_priv": user.privileges.value,
205 | "ban_time": int(time.time()),
206 | "ban_reason": summary,
207 | "id": user.id,
208 | },
209 | )
210 |
211 | await insert_ban_log(user, summary, detail)
212 | await notify_ban(user)
213 | await remove_from_leaderboard(user)
214 |
215 | await app.usecases.discord.log_user_edit(user, "restricted", summary)
216 | logger.info(f"{user} has been restricted for {summary}!")
217 |
218 |
219 | async def fetch_achievements(user_id: int) -> list[int]:
220 | db_achievements = await app.state.services.database.fetch_all(
221 | "SELECT achievement_id FROM users_achievements WHERE user_id = :id",
222 | {"id": user_id},
223 | )
224 |
225 | return [ach["achievement_id"] for ach in db_achievements]
226 |
227 |
228 | async def unlock_achievement(user_id: int, ach_id: int) -> None:
229 | await app.state.services.database.execute(
230 | "INSERT INTO users_achievements (achievement_id, user_id, time) VALUES (:aid, :uid, :timestamp)",
231 | {"aid": ach_id, "uid": user_id, "timestamp": int(time.time())},
232 | )
233 |
234 |
235 | async def increment_playtime(score: Score, beatmap: Beatmap) -> None:
236 | await app.state.services.database.execute(
237 | f"UPDATE {score.mode.stats_table} SET playtime_{score.mode.stats_prefix} = playtime_{score.mode.stats_prefix} + :new WHERE id = :id",
238 | {
239 | "new": app.usecases.score.get_non_computed_playtime(score, beatmap),
240 | "id": score.user_id,
241 | },
242 | )
243 |
244 |
245 | async def increment_replays_watched(user_id: int, mode: Mode) -> None:
246 | await app.state.services.database.execute(
247 | "UPDATE users_stats SET replays_watched_{0} = replays_watched_{0} + 1 WHERE id = :id".format(
248 | mode.stats_prefix,
249 | ),
250 | {
251 | "id": user_id,
252 | },
253 | )
254 |
255 |
256 | async def update_latest_activity(user_id: int) -> None:
257 | await app.state.services.database.execute(
258 | "UPDATE users SET latest_activity = UNIX_TIMESTAMP() WHERE id = :id",
259 | {
260 | "id": user_id,
261 | },
262 | )
263 |
--------------------------------------------------------------------------------
/ussr/app/usecases/usernames.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 |
5 |
6 | async def fetch(user_id: int) -> str:
7 | username = await app.state.services.database.fetch_val(
8 | "SELECT username FROM users WHERE id = :id",
9 | {"id": user_id},
10 | )
11 |
12 | if not username:
13 | return "" # xd
14 |
15 | return username
16 |
--------------------------------------------------------------------------------
/ussr/app/usecases/verified.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 | import settings
5 |
6 |
7 | async def get_verified(user_id: int) -> bool:
8 | return await app.state.services.database.fetch_val(
9 | "SELECT 1 FROM user_badges WHERE user = :uid AND badge = :bid",
10 | {"uid": user_id, "bid": settings.PS_VERIFIED_BADGE},
11 | )
12 |
--------------------------------------------------------------------------------
/ussr/app/usecases/whitelist.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import app.state
4 |
5 |
6 | async def get_whitelisted_users() -> list[int]:
7 | results_db = app.state.services.database.iterate(
8 | "SELECT user_id FROM whitelist",
9 | )
10 |
11 | return [result["user_id"] async for result in results_db]
12 |
13 |
14 | async def is_whitelisted(user_id: int) -> bool:
15 | return await app.state.services.database.fetch_val(
16 | "SELECT 1 FROM whitelist WHERE user_id = :uid",
17 | {"uid": user_id},
18 | )
19 |
--------------------------------------------------------------------------------
/ussr/app/utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Optional
4 | from typing import Union
5 |
6 | import app.state
7 | import orjson
8 |
9 |
10 | def make_safe(username: str) -> str:
11 | return username.rstrip().lower().replace(" ", "_")
12 |
13 |
14 | _TIME_ORDER_SUFFIXES = ["ns", "μs", "ms", "s"]
15 |
16 |
17 | def format_time(time: Union[int, float]) -> str:
18 | for suffix in _TIME_ORDER_SUFFIXES:
19 | if time < 1000:
20 | break
21 |
22 | time /= 1000
23 |
24 | return f"{time:.2f}{suffix}" # type: ignore
25 |
26 |
27 | async def channel_message(channel: str, message: str) -> None:
28 | msg = orjson.dumps(
29 | {
30 | "to": channel,
31 | "message": message,
32 | },
33 | )
34 |
35 | await app.state.services.redis.publish("peppy:bot_msg", msg)
36 |
37 |
38 | async def announce(message: str) -> None:
39 | await channel_message("#announce", message)
40 |
41 |
42 | async def notify_new_score(score_id: int) -> None:
43 | await app.state.services.redis.publish("api:score_submission", score_id)
44 |
45 |
46 | async def check_online(user_id: int, ip: Optional[str] = None) -> bool:
47 | key = f"peppy:sessions:{user_id}"
48 |
49 | if ip:
50 | return await app.state.services.redis.sismember(key, ip)
51 |
52 | return await app.state.services.redis.exists(key)
53 |
54 |
55 | def ts_to_utc_ticks(ts: int) -> int:
56 | """Converts a UNIX timestamp to a UTC ticks. Equivalent to the reverse of
57 | C#'s `DateTime.ToUniversalTime().Ticks`.
58 | """
59 |
60 | return int(ts * 1e7) + 0x89F7FF5F7B58000
61 |
--------------------------------------------------------------------------------
/ussr/logger.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import logging
4 | import os
5 | import sys
6 | import time
7 | from enum import IntEnum
8 | from typing import Any
9 | from typing import Optional
10 |
11 | from pythonjsonlogger import jsonlogger
12 |
13 | DEBUG = "debug" in sys.argv
14 | __all__ = (
15 | "info",
16 | "error",
17 | "warning",
18 | "debug",
19 | )
20 |
21 | logging.basicConfig(
22 | level="DEBUG",
23 | )
24 |
25 | logger = logging.getLogger()
26 | logHandler = logging.StreamHandler()
27 | formatter = jsonlogger.JsonFormatter()
28 | logHandler.setFormatter(formatter)
29 | logger.addHandler(logHandler)
30 |
31 |
32 | def info(text: str, *, extra: Optional[dict[str, Any]] = None):
33 | logger.info(text, extra=extra)
34 |
35 |
36 | def error(text: str, *, extra: Optional[dict[str, Any]] = None):
37 | logger.error(text, extra=extra)
38 |
39 |
40 | def warning(text: str, *, extra: Optional[dict[str, Any]] = None):
41 | logger.warn(text, extra=extra)
42 |
43 |
44 | def debug(text: str, *, extra: Optional[dict[str, Any]] = None):
45 | logger.debug(text, extra=extra)
46 |
--------------------------------------------------------------------------------
/ussr/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3.9
2 | from __future__ import annotations
3 |
4 | import logging
5 | import sys
6 |
7 | import app.utils
8 | import ddtrace
9 | import logger
10 | import settings
11 | import uvicorn
12 | import uvloop
13 |
14 | uvloop.install()
15 |
16 | DEBUG = "debug" in sys.argv
17 |
18 |
19 | def main() -> int:
20 | ddtrace.patch_all()
21 |
22 | uvicorn.run(
23 | "app.init_api:asgi_app",
24 | reload=DEBUG,
25 | log_level=logging.WARNING,
26 | server_header=False,
27 | date_header=False,
28 | host="0.0.0.0",
29 | port=settings.HTTP_PORT,
30 | )
31 |
32 | return 0
33 |
34 |
35 | if __name__ == "__main__":
36 | raise SystemExit(main())
37 |
--------------------------------------------------------------------------------
/ussr/settings.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import os
4 |
5 | from dotenv import load_dotenv
6 |
7 | load_dotenv()
8 |
9 | _BOOLEAN_STRINGS = ("true", "1", "yes")
10 |
11 |
12 | def _parse_bool(value: str) -> bool:
13 | return value.strip().lower() in _BOOLEAN_STRINGS
14 |
15 |
16 | def _parse_string_list(value: str) -> list[str]:
17 | return value.strip().replace(", ", ",").split(",")
18 |
19 |
20 | # HTTP Configuration
21 | HTTP_PORT = int(os.environ["HTTP_PORT"])
22 |
23 | # MySQL Database Configuration
24 | MYSQL_HOST = os.environ["MYSQL_HOST"]
25 | MYSQL_PORT = int(os.environ["MYSQL_PORT"])
26 | MYSQL_USER = os.environ["MYSQL_USER"]
27 | MYSQL_DATABASE = os.environ["MYSQL_DATABASE"]
28 | MYSQL_PASSWORD = os.environ["MYSQL_PASSWORD"]
29 |
30 | # Redis Configuration
31 | REDIS_HOST = os.environ["REDIS_HOST"]
32 | REDIS_PORT = int(os.environ["REDIS_PORT"])
33 | REDIS_PASSWORD = os.environ["REDIS_PASSWORD"]
34 | REDIS_DB = int(os.environ["REDIS_DB"])
35 |
36 | # MeiliSearch Configuration
37 | USE_MEILI_DIRECT = _parse_bool(os.environ["MEILI_DIRECT"])
38 | MEILI_URL = os.environ["MEILI_URL"]
39 | MEILI_KEY = os.environ["MEILI_KEY"]
40 |
41 | # Directories and URLs
42 | DATA_BEATMAP_DIRECTORY = os.environ["DATA_BEATMAP_DIRECTORY"]
43 | DATA_SCREENSHOT_DIRECTORY = os.environ["DATA_SCREENSHOT_DIRECTORY"]
44 | DATA_REPLAY_DIRECTORY = os.environ["DATA_REPLAY_DIRECTORY"]
45 |
46 | # API Configuration
47 | API_KEYS_POOL = _parse_string_list(os.environ["API_KEYS_POOL"])
48 | API_FALLBACK_URL = os.environ["API_FALLBACK_URL"]
49 | API_OSU_FALLBACK_URL = os.environ["API_OSU_FALLBACK_URL"]
50 | DIRECT_URL = os.environ["DIRECT_URL"]
51 |
52 | # Server Information
53 | PS_DOMAIN = os.environ["SRV_URL"]
54 | PS_NAME = os.environ["SRV_NAME"]
55 | PS_VERIFIED_BADGE = int(os.environ["SRV_VERIFIED_BADGE"])
56 | PS_BOT_USER_ID = int(os.environ["BOT_USER_ID"])
57 | PS_ALLOW_CUSTOM_CLIENTS = _parse_bool(os.environ["CUSTOM_CLIENTS"])
58 |
59 | # Discord Configuration
60 | DISCORD_FIRST_PLACE = os.environ["DISCORD_FIRST_PLACE"]
61 | DISCORD_ADMIN_HOOK = os.environ["DISCORD_ADMIN_HOOK"]
62 |
63 | # WebSocket Configuration
64 | WS_WRITE_KEY = os.environ["WS_WRITE_KEY"]
65 |
66 | # Performance Service Configuration
67 | PERFORMANCE_SERVICE_URL = os.environ["PERFORMANCE_SERVICE_URL"]
68 |
69 | # S3 Configuration
70 | S3_ENABLED = _parse_bool(os.environ["S3_ENABLED"])
71 | S3_BUCKET = os.environ["S3_BUCKET"]
72 | S3_REGION = os.environ["S3_REGION"]
73 | S3_ENDPOINT = os.environ["S3_ENDPOINT"]
74 | S3_ACCESS_KEY = os.environ["S3_ACCESS_KEY"]
75 | S3_SECRET_KEY = os.environ["S3_SECRET_KEY"]
76 |
--------------------------------------------------------------------------------